[med-svn] [picard-tools] 03/05: Imported Upstream version 1.100

Charles Plessy plessy at alioth.debian.org
Sun Oct 20 01:39:07 UTC 2013


This is an automated email from the git hooks/post-receive script.

plessy pushed a commit to branch master
in repository picard-tools.

commit d02872e63a745b157a2d76a3ed614a8554875623
Author: Charles Plessy <plessy at debian.org>
Date:   Sat Oct 19 12:27:31 2013 +0900

    Imported Upstream version 1.100
---
 Picard-public.iml                                  |    8 +-
 Picard-public.ipr                                  |   17 +
 build.xml                                          |   21 +-
 .../analysis/AlignmentSummaryMetricsCollector.java |    4 +-
 .../analysis/CollectAlignmentSummaryMetrics.java   |    2 +-
 .../picard/analysis/CollectInsertSizeMetrics.java  |    2 +-
 .../sf/picard/analysis/CollectMultipleMetrics.java |    2 +-
 .../sf/picard/analysis/CollectRnaSeqMetrics.java   |    1 +
 .../net/sf/picard/analysis/MeanQualityByCycle.java |    2 +-
 .../picard/analysis/QualityScoreDistribution.java  |    2 +-
 .../analysis/directed/CollectTargetedMetrics.java  |    3 +-
 .../directed/InsertSizeMetricsCollector.java       |    2 +-
 .../analysis/directed/RnaSeqMetricsCollector.java  |    2 +-
 .../analysis/directed/TargetMetricsCollector.java  |    2 +-
 .../net/sf/picard/cmdline/CommandLineParser.java   |    2 +-
 src/java/net/sf/picard/filter/ReadNameFilter.java  |   35 +-
 .../filter/SecondaryOrSupplementaryFilter.java     |   32 ++
 .../picard/illumina/ExtractIlluminaBarcodes.java   |   16 +-
 .../illumina/IlluminaBasecallsConverter.java       |   14 +-
 .../picard/illumina/IlluminaBasecallsToFastq.java  |   13 +-
 .../sf/picard/illumina/IlluminaBasecallsToSam.java |   14 +-
 .../sf/picard/illumina/MarkIlluminaAdapters.java   |  235 ++++++++++
 .../sf/picard/illumina/parser/BarcodeParser.java   |    2 +-
 .../net/sf/picard/illumina/parser/BclParser.java   |   16 +-
 .../net/sf/picard/illumina/parser/CifParser.java   |    2 +-
 .../net/sf/picard/illumina/parser/CnfParser.java   |    2 +-
 .../sf/picard/illumina/parser/FilterParser.java    |    2 +-
 .../parser/IlluminaDataProviderFactory.java        |   12 +-
 .../picard/illumina/parser/IlluminaFileUtil.java   |    4 +-
 .../illumina/parser/IlluminaIntensityParser.java   |    2 +-
 .../illumina/parser/PerTilePerCycleParser.java     |    6 +-
 .../net/sf/picard/illumina/parser/PosParser.java   |    2 +-
 .../net/sf/picard/illumina/parser/QseqParser.java  |    1 +
 .../sf/picard/illumina/parser/TileMetricsUtil.java |    2 +-
 .../readers/BclQualityEvaluationStrategy.java      |   95 ++++
 .../picard/illumina/parser/readers/BclReader.java  |   11 +-
 src/java/net/sf/picard/io/IoUtil.java              |  114 ++++-
 .../picard/reference/IndexedFastaSequenceFile.java |   12 +-
 .../net/sf/picard/sam/AbstractAlignmentMerger.java |   51 +-
 .../sam/AbstractDuplicateFindingAlgorithm.java     |    5 +-
 .../net/sf/picard/sam/AddOrReplaceReadGroups.java  |    5 +-
 src/java/net/sf/picard/sam/CompareSAMs.java        |   18 +-
 src/java/net/sf/picard/sam/DownsampleSam.java      |    2 +-
 src/java/net/sf/picard/sam/FastqToSam.java         |    4 +-
 src/java/net/sf/picard/sam/FixMateInformation.java |    4 +-
 src/java/net/sf/picard/sam/HitsForInsert.java      |   25 +-
 src/java/net/sf/picard/sam/MarkDuplicates.java     |    4 +-
 src/java/net/sf/picard/sam/MergeBamAlignment.java  |    6 +-
 src/java/net/sf/picard/sam/MergeSamFiles.java      |    2 +-
 ...stDistantPrimaryAlignmentSelectionStrategy.java |    2 +-
 .../sf/picard/sam/MultiHitAlignedReadIterator.java |   25 +-
 src/java/net/sf/picard/sam/RevertSam.java          |    3 +-
 src/java/net/sf/picard/sam/SamAlignmentMerger.java |    3 +-
 src/java/net/sf/picard/sam/SamFileValidator.java   |    2 +-
 src/java/net/sf/picard/sam/SamToFastq.java         |    2 +-
 .../net/sf/picard/util/AbstractInputParser.java    |    1 +
 src/java/net/sf/picard/util/CsvInputParser.java    |   36 ++
 .../util/DelimitedTextFileWithHeaderIterator.java  |  111 +++++
 src/java/net/sf/picard/util/IntervalTreeMap.java   |    3 +-
 src/java/net/sf/picard/util/IterableAdapter.java   |   29 ++
 .../net/sf/picard/util/IterableOnceIterator.java   |   42 ++
 src/java/net/sf/picard/util/MathUtil.java          |   11 +-
 src/java/net/sf/picard/util/ProcessExecutor.java   |   60 +++
 src/java/net/sf/picard/util/RExecutor.java         |    1 +
 src/java/net/sf/picard/util/SamLocusIterator.java  |    2 +-
 src/java/net/sf/picard/util/TabbedInputParser.java |    3 +-
 .../util/TabbedTextFileWithHeaderParser.java       |    1 -
 src/java/net/sf/picard/vcf/BcfIterator.java        |   84 ----
 src/java/net/sf/picard/vcf/MakeSitesOnlyVcf.java   |   90 ++++
 src/java/net/sf/picard/vcf/MergeVcfs.java          |   55 ++-
 src/java/net/sf/picard/vcf/SplitVcfs.java          |   59 ++-
 .../net/sf/picard/vcf/VariantContextIterator.java  |   34 --
 .../picard/vcf/VariantContextIteratorFactory.java  |   49 --
 .../net/sf/picard/vcf/VariantContextUtils.java     |   64 ---
 src/java/net/sf/picard/vcf/VcfFormatConverter.java |   54 ++-
 src/java/net/sf/picard/vcf/VcfIterator.java        |   92 ----
 src/java/net/sf/samtools/SAMFileReader.java        |   17 +-
 src/java/net/sf/samtools/SAMRecord.java            |   56 ++-
 .../sf/samtools/SAMRecordQueryNameComparator.java  |    3 +
 src/java/net/sf/samtools/SAMSequenceRecord.java    |    2 +-
 src/java/net/sf/samtools/SAMValidationError.java   |    3 +
 .../SecondaryOrSupplementarySkippingIterator.java  |   41 ++
 .../seekablestream/SeekableStreamFactory.java      |    9 +
 .../util/AbstractIterator.java                     |   16 +-
 src/java/net/sf/samtools/util/BinaryCodec.java     |    3 +-
 .../net/sf/samtools/util/BufferedLineReader.java   |    2 +
 .../{picard => samtools}/util/CollectionUtil.java  |   14 +-
 src/java/net/sf/samtools/util/Tuple.java           |   16 +
 .../org/broad/tribble/AbstractFeatureCodec.java    |   16 +-
 .../org/broad/tribble/AbstractFeatureReader.java   |   29 +-
 src/java/org/broad/tribble/AsciiFeatureCodec.java  |  108 ++---
 src/java/org/broad/tribble/BinaryFeatureCodec.java |   42 ++
 .../broad/tribble/CloseableTribbleIterator.java    |    8 +-
 src/java/org/broad/tribble/FeatureCodec.java       |   93 ++--
 src/java/org/broad/tribble/TabixFeatureReader.java |   38 +-
 .../broad/tribble/TribbleIndexedFeatureReader.java |  190 +++++---
 src/java/org/broad/tribble/bed/BEDCodec.java       |   15 +-
 .../org/broad/tribble/dbsnp/OldDbSNPCodec.java     |    8 +-
 .../broad/tribble/example/ExampleBinaryCodec.java  |   27 +-
 .../org/broad/tribble/gelitext/GeliTextCodec.java  |   68 +--
 src/java/org/broad/tribble/index/IndexFactory.java |   93 ++--
 .../org/broad/tribble/readers/AsciiLineReader.java |   25 +-
 .../tribble/readers/AsciiLineReaderIterator.java   |   99 ++++
 .../tribble/readers/AsynchronousLineReader.java    |   97 ++++
 .../org/broad/tribble/readers/LineIterator.java    |   12 +
 .../broad/tribble/readers/LineIteratorImpl.java    |   33 ++
 .../org/broad/tribble/readers/LineReaderUtil.java  |   61 +++
 .../org/broad/tribble/readers/LocationAware.java   |   24 +
 .../tribble/readers/LongLineBufferedReader.java    |  490 ++++++++++++++++++++
 src/java/org/broad/tribble/readers/Positional.java |   20 +-
 .../tribble/readers/PositionalBufferedStream.java  |   13 +-
 .../tribble/readers/TabixIteratorLineReader.java   |    8 +-
 src/java/org/broad/tribble/util/IOUtil.java        |   66 ---
 .../org/broadinstitute/variant/bcf2/BCF2Codec.java |   14 +-
 .../org/broadinstitute/variant/bcf2/BCF2Utils.java |    2 +-
 .../variant/variantcontext/VariantContext.java     |   16 +-
 .../variantcontext}/VariantContextComparator.java  |    3 +-
 .../writer/IndexingVariantContextWriter.java       |    8 +-
 .../variant/variantcontext/writer/VCFWriter.java   |   16 +-
 .../writer/VariantContextWriterFactory.java        |    6 +-
 .../variant/vcf/AbstractVCFCodec.java              |    9 +-
 .../org/broadinstitute/variant/vcf/VCF3Codec.java  |   62 ++-
 .../org/broadinstitute/variant/vcf/VCFCodec.java   |   70 ++-
 .../broadinstitute/variant/vcf/VCFConstants.java   |    1 +
 .../variant/vcf/VCFContigHeaderLine.java           |   38 +-
 .../broadinstitute/variant/vcf/VCFFileReader.java  |   69 +++
 .../variant/vcf/VCFFilterHeaderLine.java           |    5 +
 .../variant/vcf/VCFFormatHeaderLine.java           |    5 +
 .../org/broadinstitute/variant/vcf/VCFHeader.java  |   63 ++-
 .../broadinstitute/variant/vcf/VCFHeaderLine.java  |   10 +-
 .../variant/vcf/VCFInfoHeaderLine.java             |    5 +
 .../variant/vcf/VCFSimpleHeaderLine.java           |   12 +-
 src/scripts/explain_sam_flags.py                   |    3 +-
 .../net/sf/picard/analysis/insertSizeHistogram.R   |  108 ++---
 .../net/sf/picard/analysis/rnaSeqCoverage.R        |   18 +-
 src/scripts/release_picard.sh                      |    6 +
 .../picard/analysis/MultiLevelCollectorTest.java   |    2 +-
 .../sf/picard/cmdline/CommandLineParserTest.java   |    2 +-
 .../illumina/CheckIlluminaDirectoryTest.java       |    2 +-
 .../illumina/ExtractIlluminaBarcodesTest.java      |    4 +-
 .../net/sf/picard/illumina/ReadStructureTest.java  |    5 +-
 .../sf/picard/illumina/parser/BclParserTest.java   |    5 +-
 .../net/sf/picard/illumina/parser/BinTdUtil.java   |    2 +-
 .../illumina/parser/CycleIlluminaFileMapTest.java  |    2 +-
 .../parser/IlluminaDataProviderFactoryTest.java    |    4 +-
 .../illumina/parser/IlluminaDataProviderTest.java  |   13 +-
 .../illumina/parser/IlluminaFileUtilTest.java      |    2 +-
 .../picard/illumina/parser/PerTileParserTest.java  |    2 +-
 .../illumina/parser/PerTilePerCycleParserTest.java |    3 +-
 .../sf/picard/illumina/parser/PosParserTest.java   |    2 +-
 .../sf/picard/illumina/parser/QseqParserTest.java  |    2 +-
 .../illumina/parser/readers/BclReaderTest.java     |  159 +++++--
 src/tests/java/net/sf/picard/io/IoUtilTest.java    |   15 +
 .../net/sf/picard/sam/MergeBamAlignmentTest.java   |   83 +++-
 .../DelimitedTextFileWithHeaderIteratorTest.java   |   95 ++++
 .../net/sf/picard/util/IntervalTreeMapTest.java}   |   42 +-
 .../net/sf/picard/util/SamLocusIteratorTest.java   |    2 -
 .../java/net/sf/picard/vcf/MergeVcfsTest.java      |   88 ++--
 .../java/net/sf/picard/vcf/SplitVcfsTest.java      |   35 +-
 .../picard/vcf/VariantContextComparatorTest.java   |    1 +
 .../sf/picard/vcf/VariantContextIteratorTest.java  |   24 -
 .../net/sf/picard/vcf/VcfFormatConverterTest.java  |   62 +--
 .../seekablestream/SeekableStreamFactoryTest.java  |   15 +
 .../broad/tribble/AbstractFeatureReaderTest.java   |   39 +-
 .../java/org/broad/tribble/BinaryFeaturesTest.java |    9 +-
 .../java/org/broad/tribble/FeatureReaderTest.java  |   51 +-
 .../org/broad/tribble/index/IndexFactoryTest.java  |    3 +-
 .../tribble/index/interval/IntervalTreeTest.java   |    2 +-
 .../readers/AsynchronousLineReaderTest.java        |   31 ++
 .../broad/tribble/readers/LineReaderUtilTest.java  |   27 ++
 .../readers/LongLineBufferedReaderTest.java        |   33 ++
 .../java/org/broad/tribble/readers/ReaderTest.java |    7 +-
 .../variant/bcf2/BCF2UtilsUnitTest.java            |   30 ++
 .../variantcontext/VariantContextTestProvider.java |  108 +++--
 .../variantcontext/VariantContextUnitTest.java     |   36 +-
 .../variantcontext/writer/VCFWriterUnitTest.java   |   51 +-
 .../writer/VariantContextWritersUnitTest.java      |   27 +-
 .../variant/vcf/IndexFactoryUnitTest.java          |    6 +-
 .../variant/vcf/VCFHeaderUnitTest.java             |    6 +-
 testdata/net/sf/picard/io/slurptest.txt            |    3 +
 .../sam/MergeBamAlignment/aligned.supplement.sam   |   21 +
 .../picard/vcf/CEUTrio-indels-bad-samples.vcf.idx  |  Bin 0 -> 15212 bytes
 .../vcf/CEUTrio-indels-dissimilar-contigs.vcf.idx  |  Bin 0 -> 13460 bytes
 testdata/net/sf/picard/vcf/CEUTrio-indels.vcf.idx  |  Bin 0 -> 15200 bytes
 .../picard/vcf/CEUTrio-merged-indels-snps.vcf.idx  |  Bin 0 -> 26611 bytes
 .../sf/picard/vcf/CEUTrio-random-scatter-0.vcf.idx |  Bin 0 -> 17156 bytes
 .../sf/picard/vcf/CEUTrio-random-scatter-1.vcf.idx |  Bin 0 -> 18707 bytes
 .../sf/picard/vcf/CEUTrio-random-scatter-2.vcf.idx |  Bin 0 -> 18693 bytes
 .../sf/picard/vcf/CEUTrio-random-scatter-3.vcf.idx |  Bin 0 -> 14381 bytes
 .../sf/picard/vcf/CEUTrio-random-scatter-4.vcf.idx |  Bin 0 -> 14620 bytes
 .../sf/picard/vcf/CEUTrio-random-scatter-5.vcf.idx |  Bin 0 -> 15823 bytes
 testdata/net/sf/picard/vcf/CEUTrio-snps.vcf.idx    |  Bin 0 -> 26434 bytes
 testdata/net/sf/picard/vcf/vcfFormatTest.bcf       |  Bin 28266 -> 86444 bytes
 testdata/net/sf/picard/vcf/vcfFormatTest.bcf.idx   |  Bin 0 -> 26598 bytes
 testdata/net/sf/picard/vcf/vcfFormatTest.vcf       |  264 +++++++++++
 testdata/net/sf/picard/vcf/vcfFormatTest.vcf.idx   |  Bin 0 -> 26586 bytes
 testdata/tribble/large.txt                         |  165 +++++++
 197 files changed, 4266 insertions(+), 1500 deletions(-)

diff --git a/Picard-public.iml b/Picard-public.iml
index 7cf2ee3..8cf5b94 100644
--- a/Picard-public.iml
+++ b/Picard-public.iml
@@ -6,12 +6,14 @@
     </facet>
   </component>
   <component name="NewModuleRootManager" inherit-compiler-output="false">
-    <output url="file://$MODULE_DIR$/classes" />
-    <output-test url="file://$MODULE_DIR$/testclasses" />
+    <output url="file://$MODULE_DIR$/intellij.classes" />
+    <output-test url="file://$MODULE_DIR$/intellij.testclasses" />
     <exclude-output />
     <content url="file://$MODULE_DIR$">
       <sourceFolder url="file://$MODULE_DIR$/src/java" isTestSource="false" />
       <sourceFolder url="file://$MODULE_DIR$/src/tests/java" isTestSource="true" />
+      <excludeFolder url="file://$MODULE_DIR$/.command_tmp" />
+      <excludeFolder url="file://$MODULE_DIR$/classes" />
     </content>
     <orderEntry type="jdk" jdkName="1.6" jdkType="JavaSDK" />
     <orderEntry type="sourceFolder" forTests="false" />
@@ -47,7 +49,7 @@
     <orderEntry type="module-library">
       <library>
         <CLASSES>
-          <root url="jar://$MODULE_DIR$/lib/cofoja-1.0-r139.jar!/" />
+          <root url="jar://$MODULE_DIR$/lib/cofoja/cofoja-1.0-r139.jar!/" />
         </CLASSES>
         <JAVADOC />
         <SOURCES />
diff --git a/Picard-public.ipr b/Picard-public.ipr
index 4d3916b..ab72fb3 100644
--- a/Picard-public.ipr
+++ b/Picard-public.ipr
@@ -55,6 +55,7 @@
       <profile version="1.0" is_locked="false">
         <option name="myName" value="Project Default" />
         <option name="myLocal" value="false" />
+        <inspection_tool class="Convert2Diamond" enabled="false" level="WARNING" enabled_by_default="false" />
         <inspection_tool class="FieldMayBeFinal" enabled="true" level="WARNING" enabled_by_default="true" />
         <inspection_tool class="JavaDoc" enabled="false" level="WARNING" enabled_by_default="false">
           <option name="TOP_LEVEL_CLASS_OPTIONS">
@@ -91,6 +92,7 @@
           <option name="REPORT_VARIABLES" value="true" />
           <option name="REPORT_PARAMETERS" value="true" />
         </inspection_tool>
+        <inspection_tool class="SqlNoDataSourceInspection" enabled="false" level="WARNING" enabled_by_default="false" />
         <inspection_tool class="UnusedDeclaration" enabled="false" level="WARNING" enabled_by_default="false">
           <option name="ADD_MAINS_TO_ENTRIES" value="true" />
           <option name="ADD_APPLET_TO_ENTRIES" value="true" />
@@ -262,11 +264,26 @@
     <option name="projectName" value="Picard-public" />
   </component>
   <component name="ProjectDictionaryState">
+    <dictionary name="jrose">
+      <words>
+        <w>ribosomal</w>
+      </words>
+    </dictionary>
     <dictionary name="mccowan">
       <words>
+        <w>bgzipped</w>
+        <w>codecs</w>
+        <w>endian</w>
+        <w>gzipped</w>
+        <w>indexable</w>
         <w>inferer</w>
         <w>inferrer</w>
+        <w>parsability</w>
         <w>phread</w>
+        <w>seekable</w>
+        <w>tabix</w>
+        <w>tokenizes</w>
+        <w>tribble</w>
       </words>
     </dictionary>
   </component>
diff --git a/build.xml b/build.xml
index 05c8751..5b6c3bd 100755
--- a/build.xml
+++ b/build.xml
@@ -43,15 +43,17 @@
     <!-- Get SVN revision, if available, otherwise leave it blank.  -->
     <exec executable="svnversion" outputproperty="repository.revision" failifexecutionfails="false"/>
     <property name="repository.revision" value=""/>
-    <property name="sam-version" value="1.95"/>
+    <property name="sam-version" value="1.100"/>
     <property name="picard-version" value="${sam-version}"/>
     <property name="tribble-version" value="${sam-version}"/>
     <property name="variant-version" value="${sam-version}"/>
     <property name="command_tmp" value=".command_tmp"/>
     <property name="command-line-html-dir" value="${dist}/html"/>
     <property name="testng.verbosity" value="2"/>
+    <property name="test.debug.port" value="5005" />  <!-- override on the command line if desired -->
 
     <property environment="env"/>
+    <property name="java6.home" value="${env.JAVA6_HOME}"/>
 
     <condition  property="isUnix">
         <os family="unix"/>
@@ -75,6 +77,10 @@
             <pathelement path="${classpath}"/>
             <pathelement location="${classes}"/>
         </path>
+
+        <path id="java6.lib.ref">
+            <fileset dir="${java6.home}/lib" includes="*.jar"/>
+        </path>
     </target>
 
     <!-- CLEAN -->
@@ -163,9 +169,14 @@
             description="Compile and run a single test.">
         <taskdef resource="testngtasks" classpathref="classpath"/>
         <fail unless="name" message="Please provide input test: -Dname=..." />
+
+        <condition property="debug.jvm.args" value="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=${test.debug.port}" else="">
+          <isset property="test.debug" />
+        </condition>
+
         <testng suitename="samtools-single-test" classpathref="classpath" outputdir="${test.output}"
                 verbose="${testng.verbosity}">
-            <jvmarg line="-Xmx512M"/>
+            <jvmarg line="-Xmx512M ${debug.jvm.args}"/>
             <classpath>
                 <pathelement path="${classes}"/>
                 <pathelement path="${classes.test}"/>
@@ -292,6 +303,7 @@
         <package-and-document-command title="IlluminaBasecallsToSam"         main-class="net.sf.picard.illumina.IlluminaBasecallsToSam"/>
         <package-and-document-command title="CheckIlluminaDirectory"         main-class="net.sf.picard.illumina.CheckIlluminaDirectory"/>
         <package-and-document-command title="IntervalListTools"              main-class="net.sf.picard.util.IntervalListTools"/>
+        <package-and-document-command title="MakeSitesOnlyVcf"               main-class="net.sf.picard.vcf.MakeSitesOnlyVcf"/>
         <package-and-document-command title="MarkDuplicates"                 main-class="net.sf.picard.sam.MarkDuplicates"/>
         <package-and-document-command title="MeanQualityByCycle"             main-class="net.sf.picard.analysis.MeanQualityByCycle"/>
         <package-and-document-command title="MergeBamAlignment"              main-class="net.sf.picard.sam.MergeBamAlignment"/>
@@ -319,6 +331,7 @@
                 </fileset>
             </package-filesets>
         </package-and-document-command>
+        <package-and-document-command title="MarkIlluminaAdapters"         main-class="net.sf.picard.illumina.MarkIlluminaAdapters"/>
         <package-and-document-command title="SplitVcfs"                      main-class="net.sf.picard.vcf.SplitVcfs">
             <package-filesets>
                 <fileset dir=".command_tmp" >
@@ -455,6 +468,8 @@
                    target="${javac.target}">
                 <classpath refid="@{compile.classpath}"/>
                 <compilerarg line="@{compiler.args}" />
+                <!-- ensure that Java 6 bootclasspath is used -->
+                <compilerarg value="-Xbootclasspath/p:${toString:java6.lib.ref}"/>
             </javac>
         </sequential>
     </macrodef>
@@ -479,6 +494,8 @@
                     <pathelement location="${classes}"/>
                 </classpath>
                 <compilerarg line="@{compiler.args}"/>
+                <!-- ensure that Java 6 bootclasspath is used -->
+                <compilerarg value="-Xbootclasspath/p:${toString:java6.lib.ref}"/>
             </javac>
         </sequential>
     </macrodef>
diff --git a/src/java/net/sf/picard/analysis/AlignmentSummaryMetricsCollector.java b/src/java/net/sf/picard/analysis/AlignmentSummaryMetricsCollector.java
index 4e9d8cd..a22f958 100644
--- a/src/java/net/sf/picard/analysis/AlignmentSummaryMetricsCollector.java
+++ b/src/java/net/sf/picard/analysis/AlignmentSummaryMetricsCollector.java
@@ -62,7 +62,7 @@ public class AlignmentSummaryMetricsCollector extends SAMRecordAndReferenceMulti
 
     @Override
     public void acceptRecord(final SAMRecord rec, final ReferenceSequence ref) {
-        if (!rec.getNotPrimaryAlignmentFlag()) {
+        if (!rec.isSecondaryOrSupplementary()) {
             super.acceptRecord(rec, ref);
         }
     }
@@ -215,7 +215,7 @@ public class AlignmentSummaryMetricsCollector extends SAMRecordAndReferenceMulti
             }
 
             public void addRecord(final SAMRecord record, final ReferenceSequence ref) {
-                if (record.getNotPrimaryAlignmentFlag()) {
+                if (record.isSecondaryOrSupplementary()) {
                     // only want 1 count per read so skip non primary alignments
                     return;
                 }
diff --git a/src/java/net/sf/picard/analysis/CollectAlignmentSummaryMetrics.java b/src/java/net/sf/picard/analysis/CollectAlignmentSummaryMetrics.java
index 07e3f69..5bc4db0 100644
--- a/src/java/net/sf/picard/analysis/CollectAlignmentSummaryMetrics.java
+++ b/src/java/net/sf/picard/analysis/CollectAlignmentSummaryMetrics.java
@@ -25,7 +25,7 @@
 package net.sf.picard.analysis;
 
 import net.sf.picard.reference.ReferenceSequence;
-import net.sf.picard.util.CollectionUtil;
+import net.sf.samtools.util.CollectionUtil;
 import net.sf.picard.cmdline.Option;
 import net.sf.picard.cmdline.Usage;
 import net.sf.picard.io.IoUtil;
diff --git a/src/java/net/sf/picard/analysis/CollectInsertSizeMetrics.java b/src/java/net/sf/picard/analysis/CollectInsertSizeMetrics.java
index cbf5d4f..0c2fc75 100644
--- a/src/java/net/sf/picard/analysis/CollectInsertSizeMetrics.java
+++ b/src/java/net/sf/picard/analysis/CollectInsertSizeMetrics.java
@@ -34,7 +34,7 @@ import net.sf.picard.cmdline.Usage;
 import net.sf.picard.io.IoUtil;
 import net.sf.picard.metrics.MetricsFile;
 import net.sf.picard.reference.ReferenceSequence;
-import net.sf.picard.util.CollectionUtil;
+import net.sf.samtools.util.CollectionUtil;
 import net.sf.picard.util.Log;
 import net.sf.picard.util.RExecutor;
 import net.sf.samtools.SAMFileHeader;
diff --git a/src/java/net/sf/picard/analysis/CollectMultipleMetrics.java b/src/java/net/sf/picard/analysis/CollectMultipleMetrics.java
index 153c5e1..cda3b8d 100644
--- a/src/java/net/sf/picard/analysis/CollectMultipleMetrics.java
+++ b/src/java/net/sf/picard/analysis/CollectMultipleMetrics.java
@@ -4,7 +4,7 @@ import net.sf.picard.cmdline.CommandLineProgram;
 import net.sf.picard.cmdline.Option;
 import net.sf.picard.cmdline.StandardOptionDefinitions;
 import net.sf.picard.cmdline.Usage;
-import net.sf.picard.util.CollectionUtil;
+import net.sf.samtools.util.CollectionUtil;
 
 import java.io.File;
 import java.util.ArrayList;
diff --git a/src/java/net/sf/picard/analysis/CollectRnaSeqMetrics.java b/src/java/net/sf/picard/analysis/CollectRnaSeqMetrics.java
index c0bb7a1..bf35e4b 100644
--- a/src/java/net/sf/picard/analysis/CollectRnaSeqMetrics.java
+++ b/src/java/net/sf/picard/analysis/CollectRnaSeqMetrics.java
@@ -34,6 +34,7 @@ import net.sf.picard.metrics.*;
 import net.sf.picard.reference.ReferenceSequence;
 import net.sf.picard.util.*;
 import net.sf.samtools.*;
+import net.sf.samtools.util.CollectionUtil;
 
 import java.io.File;
 import java.util.*;
diff --git a/src/java/net/sf/picard/analysis/MeanQualityByCycle.java b/src/java/net/sf/picard/analysis/MeanQualityByCycle.java
index f09d218..5810418 100644
--- a/src/java/net/sf/picard/analysis/MeanQualityByCycle.java
+++ b/src/java/net/sf/picard/analysis/MeanQualityByCycle.java
@@ -169,7 +169,7 @@ public class MeanQualityByCycle extends SinglePassSamProgram {
         // Skip unwanted records
         if (PF_READS_ONLY && rec.getReadFailsVendorQualityCheckFlag()) return;
         if (ALIGNED_READS_ONLY && rec.getReadUnmappedFlag()) return;
-        if (rec.getNotPrimaryAlignmentFlag()) return;
+        if (rec.isSecondaryOrSupplementary()) return;
 
         q.addRecord(rec);
         oq.addRecord(rec);
diff --git a/src/java/net/sf/picard/analysis/QualityScoreDistribution.java b/src/java/net/sf/picard/analysis/QualityScoreDistribution.java
index 7a97d93..4b0a670 100644
--- a/src/java/net/sf/picard/analysis/QualityScoreDistribution.java
+++ b/src/java/net/sf/picard/analysis/QualityScoreDistribution.java
@@ -99,7 +99,7 @@ public class QualityScoreDistribution extends SinglePassSamProgram {
         // Skip unwanted records
         if (PF_READS_ONLY && rec.getReadFailsVendorQualityCheckFlag()) return;
         if (ALIGNED_READS_ONLY && rec.getReadUnmappedFlag()) return;
-        if (rec.getNotPrimaryAlignmentFlag()) return;
+        if (rec.isSecondaryOrSupplementary()) return;
 
         final byte[] bases = rec.getReadBases();
         final byte[] quals = rec.getBaseQualities();
diff --git a/src/java/net/sf/picard/analysis/directed/CollectTargetedMetrics.java b/src/java/net/sf/picard/analysis/directed/CollectTargetedMetrics.java
index dc4655a..c9cd0fe 100644
--- a/src/java/net/sf/picard/analysis/directed/CollectTargetedMetrics.java
+++ b/src/java/net/sf/picard/analysis/directed/CollectTargetedMetrics.java
@@ -8,7 +8,7 @@ import net.sf.picard.io.IoUtil;
 import net.sf.picard.metrics.MetricsFile;
 import net.sf.picard.reference.ReferenceSequenceFile;
 import net.sf.picard.reference.ReferenceSequenceFileFactory;
-import net.sf.picard.util.CollectionUtil;
+import net.sf.samtools.util.CollectionUtil;
 import net.sf.picard.util.IntervalList;
 import net.sf.picard.util.Log;
 import net.sf.picard.util.ProgressLogger;
@@ -16,7 +16,6 @@ import net.sf.samtools.SAMFileReader;
 import net.sf.samtools.SAMReadGroupRecord;
 import net.sf.samtools.SAMRecord;
 import net.sf.samtools.util.SequenceUtil;
-import net.sf.samtools.util.StopWatch;
 
 import java.io.File;
 import java.util.Iterator;
diff --git a/src/java/net/sf/picard/analysis/directed/InsertSizeMetricsCollector.java b/src/java/net/sf/picard/analysis/directed/InsertSizeMetricsCollector.java
index e5a2cec..3e2167b 100644
--- a/src/java/net/sf/picard/analysis/directed/InsertSizeMetricsCollector.java
+++ b/src/java/net/sf/picard/analysis/directed/InsertSizeMetricsCollector.java
@@ -63,7 +63,7 @@ public class InsertSizeMetricsCollector extends MultiLevelCollector<InsertSizeMe
                 record.getReadUnmappedFlag() ||
                 record.getMateUnmappedFlag() ||
                 record.getFirstOfPairFlag() ||
-                record.getNotPrimaryAlignmentFlag() ||
+                record.isSecondaryOrSupplementary() ||
                 record.getDuplicateReadFlag() ||
                 record.getInferredInsertSize() == 0) {
             return;
diff --git a/src/java/net/sf/picard/analysis/directed/RnaSeqMetricsCollector.java b/src/java/net/sf/picard/analysis/directed/RnaSeqMetricsCollector.java
index 8bda976..87ed62b 100644
--- a/src/java/net/sf/picard/analysis/directed/RnaSeqMetricsCollector.java
+++ b/src/java/net/sf/picard/analysis/directed/RnaSeqMetricsCollector.java
@@ -96,7 +96,7 @@ public class RnaSeqMetricsCollector extends SAMRecordMultiLevelCollector<RnaSeqM
 
         public void acceptRecord(SAMRecord rec) {
             // Filter out some reads, and collect the total number of PF bases
-            if (rec.getReadFailsVendorQualityCheckFlag() || rec.getNotPrimaryAlignmentFlag()) return;
+            if (rec.getReadFailsVendorQualityCheckFlag() || rec.isSecondaryOrSupplementary()) return;
 
             this.metrics.PF_BASES += rec.getReadLength();
             if (rec.getReadUnmappedFlag()) return;
diff --git a/src/java/net/sf/picard/analysis/directed/TargetMetricsCollector.java b/src/java/net/sf/picard/analysis/directed/TargetMetricsCollector.java
index 0c056a2..2084639 100644
--- a/src/java/net/sf/picard/analysis/directed/TargetMetricsCollector.java
+++ b/src/java/net/sf/picard/analysis/directed/TargetMetricsCollector.java
@@ -296,7 +296,7 @@ public abstract class TargetMetricsCollector<METRIC_TYPE extends MultilevelMetri
         /** Adds information about an individual SAMRecord to the statistics. */
         public void acceptRecord(final SAMRecord rec) {
             // Just plain avoid records that are marked as not-primary
-            if (rec.getNotPrimaryAlignmentFlag()) return;
+            if (rec.isSecondaryOrSupplementary()) return;
 
             this.metrics.TOTAL_READS += 1;
 
diff --git a/src/java/net/sf/picard/cmdline/CommandLineParser.java b/src/java/net/sf/picard/cmdline/CommandLineParser.java
index a75b36c..68a44fc 100644
--- a/src/java/net/sf/picard/cmdline/CommandLineParser.java
+++ b/src/java/net/sf/picard/cmdline/CommandLineParser.java
@@ -31,7 +31,7 @@ import java.lang.reflect.ParameterizedType;
 import java.lang.reflect.Type;
 import java.util.*;
 
-import net.sf.picard.util.CollectionUtil.MultiMap;
+import net.sf.samtools.util.CollectionUtil.MultiMap;
 import net.sf.samtools.util.StringUtil;
 import net.sf.samtools.util.CloserUtil;
 import net.sf.picard.PicardException;
diff --git a/src/java/net/sf/picard/filter/ReadNameFilter.java b/src/java/net/sf/picard/filter/ReadNameFilter.java
index 113c81f..07aafa8 100644
--- a/src/java/net/sf/picard/filter/ReadNameFilter.java
+++ b/src/java/net/sf/picard/filter/ReadNameFilter.java
@@ -23,21 +23,19 @@
  */
 package net.sf.picard.filter;
 
-import net.sf.picard.PicardException;
-import net.sf.picard.io.IoUtil;
-import net.sf.samtools.SAMRecord;
-
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.IOException;
 import java.util.HashSet;
-import java.util.Scanner;
 import java.util.Set;
+import net.sf.picard.PicardException;
+import net.sf.picard.io.IoUtil;
+import net.sf.samtools.SAMRecord;
 
 /**
  * Filter by a set of specified readnames
- *
- * $Id$
+ * <p/>
+ * $Id: ReadNameFilter.java 1557 2013-09-19 21:50:03Z cristyn $
  */
 public class ReadNameFilter implements SamRecordFilter {
 
@@ -49,25 +47,22 @@ public class ReadNameFilter implements SamRecordFilter {
         IoUtil.assertFileIsReadable(readNameFilterFile);
         IoUtil.assertFileSizeNonZero(readNameFilterFile);
 
-        final BufferedReader is;
-
         try {
-            is = IoUtil.openFileForBufferedReading(readNameFilterFile);
-        } catch (IOException e) {
-            throw new PicardException(e.getMessage(), e);
-        }
+            final BufferedReader in = IoUtil.openFileForBufferedReading(readNameFilterFile);
 
-        final Scanner scanner = new Scanner(is);
+            String line = null;
 
-        while (scanner.hasNext()) {
-            final String line = scanner.nextLine();
-
-            if (!line.trim().isEmpty()) {
-                readNameFilterSet.add(line.split("\\s+")[0]);
+            while ((line = in.readLine()) != null) {
+                if (!line.trim().isEmpty()) {
+                    readNameFilterSet.add(line.split("\\s+")[0]);
+                }
             }
+
+            in.close();
+        } catch (IOException e) {
+            throw new PicardException(e.getMessage(), e);
         }
 
-        scanner.close();
         this.includeReads = includeReads;
     }
 
diff --git a/src/java/net/sf/picard/filter/SecondaryOrSupplementaryFilter.java b/src/java/net/sf/picard/filter/SecondaryOrSupplementaryFilter.java
new file mode 100644
index 0000000..1aad60c
--- /dev/null
+++ b/src/java/net/sf/picard/filter/SecondaryOrSupplementaryFilter.java
@@ -0,0 +1,32 @@
+package net.sf.picard.filter;
+
+import net.sf.samtools.SAMRecord;
+
+/**
+ * Filter out SAMRecords with NotPrimaryAlignment or Supplementary flag set
+ * This class should be viewed as a replacement for NotPrimarySkippingIterator,
+ * in that we did not want to change the functionality of NPSI to no longer match its name
+ * $Id$
+ */
+public class SecondaryOrSupplementaryFilter  implements SamRecordFilter {
+    /**
+     * @param record the SAMRecord to evaluate
+     * @return true if the SAMRecord matches the filter, otherwise false
+     */
+    public boolean filterOut(final SAMRecord record) {
+        return record.isSecondaryOrSupplementary();
+    }
+
+    /**
+     * Determines whether a pair of SAMRecord matches this filter
+     *
+     * @param first  the first SAMRecord to evaluate
+     * @param second the second SAMRecord to evaluate
+     *
+     * @return true if the SAMRecords matches the filter, otherwise false
+     */
+    public boolean filterOut(final SAMRecord first, final SAMRecord second) {
+        // if either fails, exclude them both
+        return first.isSecondaryOrSupplementary() || second.isSecondaryOrSupplementary();
+    }
+}
diff --git a/src/java/net/sf/picard/illumina/ExtractIlluminaBarcodes.java b/src/java/net/sf/picard/illumina/ExtractIlluminaBarcodes.java
index 8ffb5d3..60b7dfc 100644
--- a/src/java/net/sf/picard/illumina/ExtractIlluminaBarcodes.java
+++ b/src/java/net/sf/picard/illumina/ExtractIlluminaBarcodes.java
@@ -24,6 +24,7 @@
 package net.sf.picard.illumina;
 
 import net.sf.picard.illumina.parser.*;
+import net.sf.picard.illumina.parser.readers.BclQualityEvaluationStrategy;
 import net.sf.picard.util.IlluminaUtil;
 import net.sf.picard.util.Log;
 import net.sf.picard.util.TabbedTextFileWithHeaderParser;
@@ -108,6 +109,10 @@ public class ExtractIlluminaBarcodes extends CommandLineProgram {
     @Option(shortName="Q", doc="Minimum base quality. Any barcode bases falling below this quality will be considered a mismatch even in the bases match.")
     public int MINIMUM_BASE_QUALITY = 0;
 
+    @Option(doc="The minimum quality (after transforming 0s to 1s) expected from reads.  If qualities are lower than this value, an error is thrown." +
+            "The default of 2 is what the Illumina's spec describes as the minimum, but in practice the value has been observed lower.")
+    public int MINIMUM_QUALITY = BclQualityEvaluationStrategy.ILLUMINA_ALLEGED_MINIMUM_QUALITY;
+    
     @Option(shortName="GZIP", doc="Compress output s_l_t_barcode.txt files using gzip and append a .gz extension to the filenames.")
     public boolean COMPRESS_OUTPUTS = false;
 
@@ -130,6 +135,7 @@ public class ExtractIlluminaBarcodes extends CommandLineProgram {
     private BarcodeMetric noMatchMetric = null;
 
     private final NumberFormat tileNumberFormatter = NumberFormat.getNumberInstance();
+    private BclQualityEvaluationStrategy bclQualityEvaluationStrategy;
 
     public ExtractIlluminaBarcodes() {
         tileNumberFormatter.setMinimumIntegerDigits(4);
@@ -254,6 +260,12 @@ public class ExtractIlluminaBarcodes extends CommandLineProgram {
             }
         }
 
+        // Warn about minimum qualities and assert that we've achieved the minimum.
+        for (Map.Entry<Byte, Integer> entry : bclQualityEvaluationStrategy.getPoorQualityFrequencies().entrySet()) {
+            log.warn(String.format("Observed low quality of %s %s times.", entry.getKey(), entry.getValue()));
+        }
+        bclQualityEvaluationStrategy.assertMinimumQualities();
+        
         // Calculate the normalized matches
         if (totalPfReadsAssigned > 0) {
             final double mean = (double) totalPfReadsAssigned / (double) barcodeToMetrics.values().size();
@@ -289,6 +301,8 @@ public class ExtractIlluminaBarcodes extends CommandLineProgram {
     protected String[] customCommandLineValidation() {
         final ArrayList<String> messages = new ArrayList<String>();
 
+        this.bclQualityEvaluationStrategy = new BclQualityEvaluationStrategy(MINIMUM_QUALITY);
+        
         /**
          * In extract illumina barcodes we NEVER want to look at the template reads, therefore replace them with skips because
          * IlluminaDataProvider and its factory will not open these nor produce ClusterData with the template reads in them, thus reducing
@@ -298,7 +312,7 @@ public class ExtractIlluminaBarcodes extends CommandLineProgram {
         final IlluminaDataType[] datatypes = (MINIMUM_BASE_QUALITY > 0) ? 
                                              new IlluminaDataType[] {IlluminaDataType.BaseCalls, IlluminaDataType.PF, IlluminaDataType.QualityScores}:
                                              new IlluminaDataType[] {IlluminaDataType.BaseCalls, IlluminaDataType.PF};
-        factory = new IlluminaDataProviderFactory(BASECALLS_DIR, LANE, readStructure, datatypes);
+        factory = new IlluminaDataProviderFactory(BASECALLS_DIR, LANE, readStructure, bclQualityEvaluationStrategy, datatypes);
         outputReadStructure = factory.getOutputReadStructure();
 
         if (BARCODE_FILE != null) {
diff --git a/src/java/net/sf/picard/illumina/IlluminaBasecallsConverter.java b/src/java/net/sf/picard/illumina/IlluminaBasecallsConverter.java
index 831e369..3f5fd86 100644
--- a/src/java/net/sf/picard/illumina/IlluminaBasecallsConverter.java
+++ b/src/java/net/sf/picard/illumina/IlluminaBasecallsConverter.java
@@ -25,6 +25,7 @@ package net.sf.picard.illumina;
 
 import net.sf.picard.PicardException;
 import net.sf.picard.illumina.parser.*;
+import net.sf.picard.illumina.parser.readers.BclQualityEvaluationStrategy;
 import net.sf.picard.util.FileChannelJDKBugWorkAround;
 import net.sf.picard.util.Log;
 import net.sf.picard.util.ProgressLogger;
@@ -97,6 +98,7 @@ public class IlluminaBasecallsConverter<CLUSTER_OUTPUT_RECORD> {
 
     private final Comparator<CLUSTER_OUTPUT_RECORD> outputRecordComparator;
 
+    private final BclQualityEvaluationStrategy bclQualityEvaluationStrategy;
     private final Map<String, ? extends ConvertedClusterDataWriter<CLUSTER_OUTPUT_RECORD>> barcodeRecordWriterMap;
     private final int maxReadsInRamPerTile;
     private final boolean demultiplex;
@@ -142,6 +144,7 @@ public class IlluminaBasecallsConverter<CLUSTER_OUTPUT_RECORD> {
                                       final Comparator<CLUSTER_OUTPUT_RECORD> outputRecordComparator,
                                       final SortingCollection.Codec<CLUSTER_OUTPUT_RECORD> codecPrototype,
                                       final Class<CLUSTER_OUTPUT_RECORD> outputRecordClass,
+                                      final BclQualityEvaluationStrategy bclQualityEvaluationStrategy,
                                       final boolean applyEamssFiltering) {
         this.barcodeRecordWriterMap = barcodeRecordWriterMap;
         this.demultiplex = demultiplex;
@@ -150,6 +153,7 @@ public class IlluminaBasecallsConverter<CLUSTER_OUTPUT_RECORD> {
         this.outputRecordComparator = outputRecordComparator;
         this.codecPrototype = codecPrototype;
         this.outputRecordClass = outputRecordClass;
+        this.bclQualityEvaluationStrategy = bclQualityEvaluationStrategy;
 
         // If we're forcing garbage collection, collect every 5 minutes in a daemon thread.
         if (forceGc) {
@@ -169,7 +173,7 @@ public class IlluminaBasecallsConverter<CLUSTER_OUTPUT_RECORD> {
             gcTimerTask = null;
         }
 
-        this.factory = new IlluminaDataProviderFactory(basecallsDir, lane, readStructure, getDataTypesFromReadStructure(readStructure, demultiplex));
+        this.factory = new IlluminaDataProviderFactory(basecallsDir, lane, readStructure, bclQualityEvaluationStrategy, getDataTypesFromReadStructure(readStructure, demultiplex));
         this.factory.setApplyEamssFiltering(applyEamssFiltering);
 
         if (numProcessors == 0) {
@@ -239,10 +243,16 @@ public class IlluminaBasecallsConverter<CLUSTER_OUTPUT_RECORD> {
                 tileReadAggregator.awaitWorkComplete();
             } catch (InterruptedException e) {
                 log.error(e, "Failure encountered in worker thread; attempting to shut down remaining worker threads and terminate ...");
-                tileReadAggregator.shutdown();
                 throw new PicardException("Failure encountered in worker thread; see log for details.");
+            } finally {
+                tileReadAggregator.shutdown();
             }
 
+            for (Map.Entry<Byte, Integer> entry : bclQualityEvaluationStrategy.getPoorQualityFrequencies().entrySet()) {
+                log.warn(String.format("Observed low quality of %s %s times.", entry.getKey(), entry.getValue()));
+            }
+            bclQualityEvaluationStrategy.assertMinimumQualities();
+            
         } finally {
             try {
                 gcTimerTask.cancel();
diff --git a/src/java/net/sf/picard/illumina/IlluminaBasecallsToFastq.java b/src/java/net/sf/picard/illumina/IlluminaBasecallsToFastq.java
index e78ca22..3a5bdfa 100644
--- a/src/java/net/sf/picard/illumina/IlluminaBasecallsToFastq.java
+++ b/src/java/net/sf/picard/illumina/IlluminaBasecallsToFastq.java
@@ -32,8 +32,9 @@ import net.sf.picard.fastq.*;
 import net.sf.picard.illumina.parser.ClusterData;
 import net.sf.picard.illumina.parser.ReadData;
 import net.sf.picard.illumina.parser.ReadStructure;
+import net.sf.picard.illumina.parser.readers.BclQualityEvaluationStrategy;
 import net.sf.picard.io.IoUtil;
-import net.sf.picard.util.CollectionUtil;
+import net.sf.samtools.util.CollectionUtil;
 import net.sf.picard.util.IlluminaUtil;
 import net.sf.picard.util.Log;
 import net.sf.picard.util.TabbedTextFileWithHeaderParser;
@@ -112,6 +113,11 @@ public class IlluminaBasecallsToFastq extends CommandLineProgram {
             " run, each SortingCollection gets this value/number of indices.")
     public int MAX_READS_IN_RAM_PER_TILE = 1200000;
 
+    @Option(doc="The minimum quality (after transforming 0s to 1s) expected from reads.  If qualities are lower than this value, an error is thrown." +
+            "The default of 2 is what the Illumina's spec describes as the minimum, but in practice the value has been observed lower.")
+    public int MINIMUM_QUALITY = BclQualityEvaluationStrategy.ILLUMINA_ALLEGED_MINIMUM_QUALITY;
+
+
     private final Map<String, FastqRecordsWriter> barcodeFastqWriterMap = new HashMap<String, FastqRecordsWriter>();
     private ReadStructure readStructure;
     IlluminaBasecallsConverter<FastqRecordsForCluster> basecallsConverter;
@@ -125,6 +131,7 @@ public class IlluminaBasecallsToFastq extends CommandLineProgram {
                     r2.templateRecords[0].getReadHeader());
         }
     };
+    private BclQualityEvaluationStrategy bclQualityEvaluationStrategy;
 
 
     @Override
@@ -132,6 +139,7 @@ public class IlluminaBasecallsToFastq extends CommandLineProgram {
         initialize();
 
         basecallsConverter.doTileProcessing();
+
         return 0;
     }
 
@@ -139,6 +147,7 @@ public class IlluminaBasecallsToFastq extends CommandLineProgram {
      * Prepares loggers, initiates garbage collection thread, parses arguments and initialized variables appropriately/
      */
     private void initialize() {
+        bclQualityEvaluationStrategy = new BclQualityEvaluationStrategy(MINIMUM_QUALITY);
         readStructure = new ReadStructure(READ_STRUCTURE);
         if (MULTIPLEX_PARAMS != null) {
             IoUtil.assertFileIsReadable(MULTIPLEX_PARAMS);
@@ -156,7 +165,7 @@ public class IlluminaBasecallsToFastq extends CommandLineProgram {
                 barcodeFastqWriterMap, demultiplex, MAX_READS_IN_RAM_PER_TILE/readsPerCluster, TMP_DIR, NUM_PROCESSORS,
                 FORCE_GC, FIRST_TILE, TILE_LIMIT, queryNameComparator,
                 new FastqRecordsForClusterCodec(readStructure.templates.length(),
-                readStructure.barcodes.length()), FastqRecordsForCluster.class,
+                readStructure.barcodes.length()), FastqRecordsForCluster.class, bclQualityEvaluationStrategy,
                 this.APPLY_EAMSS_FILTER);
 
         log.info("READ STRUCTURE IS " + readStructure.toString());
diff --git a/src/java/net/sf/picard/illumina/IlluminaBasecallsToSam.java b/src/java/net/sf/picard/illumina/IlluminaBasecallsToSam.java
index af28526..54c47ab 100644
--- a/src/java/net/sf/picard/illumina/IlluminaBasecallsToSam.java
+++ b/src/java/net/sf/picard/illumina/IlluminaBasecallsToSam.java
@@ -30,8 +30,9 @@ import net.sf.picard.cmdline.Option;
 import net.sf.picard.cmdline.StandardOptionDefinitions;
 import net.sf.picard.cmdline.Usage;
 import net.sf.picard.illumina.parser.ReadStructure;
+import net.sf.picard.illumina.parser.readers.BclQualityEvaluationStrategy;
 import net.sf.picard.io.IoUtil;
-import net.sf.picard.util.CollectionUtil;
+import net.sf.samtools.util.CollectionUtil;
 import net.sf.picard.util.IlluminaUtil;
 import net.sf.picard.util.IlluminaUtil.IlluminaAdapterPair;
 import net.sf.picard.util.Log;
@@ -171,11 +172,16 @@ public class IlluminaBasecallsToSam extends CommandLineProgram {
             " run, each SortingCollection gets this value/number of indices.")
     public int MAX_READS_IN_RAM_PER_TILE = 1200000;
 
+    @Option(doc="The minimum quality (after transforming 0s to 1s) expected from reads.  If qualities are lower than this value, an error is thrown." +
+            "The default of 2 is what the Illumina's spec describes as the minimum, but in practice the value has been observed lower.")
+    public int MINIMUM_QUALITY = BclQualityEvaluationStrategy.ILLUMINA_ALLEGED_MINIMUM_QUALITY;
+
+
     private final Map<String, SAMFileWriterWrapper> barcodeSamWriterMap = new HashMap<String, SAMFileWriterWrapper>();
     private ReadStructure readStructure;
     IlluminaBasecallsConverter<SAMRecordsForCluster> basecallsConverter;
     private static final Log log = Log.getInstance(IlluminaBasecallsToSam.class);
-
+    private BclQualityEvaluationStrategy bclQualityEvaluationStrategy;
 
     @Override
     protected int doWork() {
@@ -188,6 +194,8 @@ public class IlluminaBasecallsToSam extends CommandLineProgram {
      * Prepares loggers, initiates garbage collection thread, parses arguments and initialized variables appropriately/
      */
     private void initialize() {
+        this.bclQualityEvaluationStrategy = new BclQualityEvaluationStrategy(MINIMUM_QUALITY);
+        
         if (OUTPUT != null) {
             IoUtil.assertFileIsWritable(OUTPUT);
         }
@@ -209,7 +217,7 @@ public class IlluminaBasecallsToSam extends CommandLineProgram {
         basecallsConverter = new IlluminaBasecallsConverter<SAMRecordsForCluster>(BASECALLS_DIR, LANE, readStructure,
                 barcodeSamWriterMap, true, MAX_READS_IN_RAM_PER_TILE/numOutputRecords, TMP_DIR, NUM_PROCESSORS, FORCE_GC,
                 FIRST_TILE, TILE_LIMIT, new QueryNameComparator(), new Codec(numOutputRecords), SAMRecordsForCluster.class,
-                this.APPLY_EAMSS_FILTER);
+                bclQualityEvaluationStrategy, this.APPLY_EAMSS_FILTER);
 
         log.info("DONE_READING STRUCTURE IS " + readStructure.toString());
 
diff --git a/src/java/net/sf/picard/illumina/MarkIlluminaAdapters.java b/src/java/net/sf/picard/illumina/MarkIlluminaAdapters.java
new file mode 100644
index 0000000..ea60091
--- /dev/null
+++ b/src/java/net/sf/picard/illumina/MarkIlluminaAdapters.java
@@ -0,0 +1,235 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2009 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package net.sf.picard.illumina;
+
+import net.sf.picard.PicardException;
+import net.sf.picard.cmdline.CommandLineProgram;
+import net.sf.picard.cmdline.Option;
+import net.sf.picard.cmdline.StandardOptionDefinitions;
+import net.sf.picard.cmdline.Usage;
+import net.sf.picard.io.IoUtil;
+import net.sf.picard.metrics.MetricsFile;
+import net.sf.picard.sam.ReservedTagConstants;
+import net.sf.picard.util.*;
+import net.sf.samtools.*;
+import net.sf.samtools.util.SequenceUtil;
+import net.sf.samtools.util.StringUtil;
+
+import java.io.File;
+import java.util.Iterator;
+
+/**
+ * Command line program to mark the location of adapter sequences.
+ * This also outputs a histogram of metrics describing the clipped bases
+ *
+ * @author Tim Fennell (adapted by mborkan at broadinstitute.org)
+ */
+public class MarkIlluminaAdapters extends CommandLineProgram {
+
+    // The following attributes define the command-line arguments
+    @Usage
+    public String USAGE =
+            getStandardUsagePreamble() +  "Reads a SAM or BAM file and rewrites it with new adapter-trimming tags.\n" +
+                    "Clear any existing adapter-trimming tags (XT:i:).\n" +
+                    "Only works for unaligned files in query-name order.\n"+
+                    "Note: This is a utility program and will not be run in the pipeline.\n";
+
+    @Option(shortName=StandardOptionDefinitions.INPUT_SHORT_NAME)
+    public File INPUT;
+    @Option(doc="If output is not specified, just the metrics are generated",
+            shortName=StandardOptionDefinitions.OUTPUT_SHORT_NAME, optional=true)
+    public File OUTPUT;
+    @Option(doc="Histogram showing counts of bases_clipped in how many reads", shortName="M")
+    public File METRICS;
+    @Option(doc="The minimum number of bases that must match the adapter that will be clipped. Defaults to " +
+            ClippingUtility.MIN_MATCH_PE_BASES + " if paired-end, otherwise" + ClippingUtility.MIN_MATCH_BASES +
+            "/nThe stricter match used when matching 2 reads will be twice this.",
+            optional=true)
+    public Integer MIN_MATCH_BASES;
+    @Option(doc="The percentage of errors allowed when matching the adapter sequence. Defaults to " +
+            ClippingUtility.MAX_PE_ERROR_RATE + " if paired-end, otherwise " + ClippingUtility.MAX_ERROR_RATE,
+            optional=true)
+    public Double MAX_ERROR_RATE;
+    @Option(doc="Whether this is a paired-end run. ", shortName="PE")
+    public Boolean PAIRED_RUN;
+    @Option(doc="Which adapters to use, PAIRED_END, INDEXED, or SINGLE_END",
+            mutex={"FIVE_PRIME_ADAPTER", "THREE_PRIME_ADAPTER"})
+    // this probably only makes sense for paired_run where you need to specify either PAIRED_END or INDEXED?
+    //                         or for non-paired_run where you need to specify either SINGLE_END or INDEXED?
+    // but we won't enforce this.
+    public IlluminaUtil.IlluminaAdapterPair ADAPTERS;
+
+    @Option(doc="For specifying adapters other than standard Illumina", mutex = {"ADAPTERS"})
+    public String FIVE_PRIME_ADAPTER;
+    @Option(doc="For specifying adapters other than standard Illumina", mutex = {"ADAPTERS"})
+    public String THREE_PRIME_ADAPTER;
+
+    private static final Log log = Log.getInstance(MarkIlluminaAdapters.class);
+
+    @Override
+    protected String[] customCommandLineValidation() {
+        // set default thresholds based on what kind of run
+        if (PAIRED_RUN){
+            if (MIN_MATCH_BASES == null) MIN_MATCH_BASES = ClippingUtility.MIN_MATCH_PE_BASES;
+            if (MAX_ERROR_RATE == null) MAX_ERROR_RATE = ClippingUtility.MAX_PE_ERROR_RATE;
+            // For paired runs, you may actually want to specify all 4 thresholds
+            // so the stricter test when mismatch can be controlled.
+            // We'll assume that the stricter test will be twice the min_match_bases
+        } else {
+            if (MIN_MATCH_BASES == null) MIN_MATCH_BASES = ClippingUtility.MIN_MATCH_BASES;
+            if (MAX_ERROR_RATE == null) MAX_ERROR_RATE = ClippingUtility.MAX_ERROR_RATE;
+        }
+        return null;
+    }
+
+    public static void main(String[] args) {
+        System.exit(new MarkIlluminaAdapters().instanceMain(args));
+    }
+
+    @Override
+    protected int doWork() {
+        IoUtil.assertFileIsReadable(INPUT);
+        IoUtil.assertFileIsWritable(METRICS);
+
+        SAMFileReader in = new SAMFileReader(INPUT);
+        SAMFileWriter out = null;
+        if (OUTPUT != null) {
+            IoUtil.assertFileIsWritable(OUTPUT);
+            out = new SAMFileWriterFactory().makeSAMOrBAMWriter(in.getFileHeader(), true, OUTPUT);
+        }
+
+        Histogram<Integer> histo = new Histogram<Integer>("clipped_bases", "read_count");
+
+        // check sort order in the header - must be queryName for paired end runs
+        if (PAIRED_RUN && !in.getFileHeader().getSortOrder().equals(SAMFileHeader.SortOrder.queryname)) {
+            throw new PicardException("Input BAM file must be sorted by queryname");
+        }
+
+        final AdapterPair adapters;
+        if (ADAPTERS != null) {
+            adapters = ADAPTERS;
+        } else {
+            adapters = new CustomAdapterPair(FIVE_PRIME_ADAPTER, THREE_PRIME_ADAPTER);
+        }
+        // The following loop is roughly the same as "for (SAMRecord rec : in){"
+        final ProgressLogger progress = new ProgressLogger(log, 1000000, "Read");
+        for (Iterator<SAMRecord> iter = in.iterator(); iter.hasNext();) {
+            SAMRecord rec = iter.next();
+
+            //  clear any existing trim on rec
+            rec.setAttribute(ReservedTagConstants.XT, null);
+
+            SAMRecord rec2 = null;
+            if (PAIRED_RUN) {
+                if (rec.getFirstOfPairFlag() || rec.getSecondOfPairFlag()) {
+                    // the secondOfPair should be the next record
+                    rec2 = iter.hasNext() ? iter.next() : null;
+                    if (rec2 == null) {
+                        throw new PicardException("Missing second read for " + rec);
+                    }
+
+                    // clear any existing trim on rec2
+                    rec2.setAttribute(ReservedTagConstants.XT, null);
+                    if (!rec.getReadName().equals(rec2.getReadName())){
+                        throw new PicardException("read names of two paired reads differs : " +
+                                rec.getReadName() + ", " + rec2.getReadName());
+                    }
+
+                    // establish which of pair is first and which second
+                    SAMRecord firstRead;
+                    SAMRecord secondRead;
+                    if (rec.getFirstOfPairFlag()){
+                        firstRead = rec;
+                        secondRead = rec2;
+                    } else {
+                        firstRead = rec2;
+                        secondRead = rec;
+                    }
+                    if (!firstRead.getFirstOfPairFlag()){
+                        throw new PicardException("first of two reads doesn't have getFirstOfPairFlag()");
+                    }
+                    if (!secondRead.getSecondOfPairFlag()){
+                        throw new PicardException("second of two reads doesn't have getSecondOfPairFlag()");
+                    }
+
+                    String warnString = ClippingUtility.adapterTrimIlluminaPairedReads(firstRead, secondRead,
+                            adapters, MIN_MATCH_BASES, MAX_ERROR_RATE);
+                    if (warnString != null) {
+                        log.info("Adapter trimming " + warnString);
+                    }
+                } else {
+                    throw new PicardException("Non-paired reads in a paired run " + rec);
+                }
+            } else { // not a paired run
+                ClippingUtility.adapterTrimIlluminaSingleRead(rec,
+                        adapters, MIN_MATCH_BASES, MAX_ERROR_RATE);
+            }
+
+            if (out != null) out.addAlignment(rec);
+            if (out != null && rec2 != null) out.addAlignment(rec2);
+
+            Integer trimPoint = rec.getIntegerAttribute(ReservedTagConstants.XT);
+            if (trimPoint != null) {
+                histo.increment(rec.getReadLength() - trimPoint + 1);
+            }
+
+            progress.record(rec);
+        }
+
+        if (out != null) out.close();
+
+        MetricsFile<?,Integer> metricsFile = getMetricsFile();
+        metricsFile.setHistogram(histo);
+        metricsFile.write(METRICS);
+
+        return 0;
+    }
+
+    private class CustomAdapterPair implements AdapterPair {
+
+        final String fivePrime, threePrime, fivePrimeReadOrder;
+        final byte[]  fivePrimeBytes, threePrimeBytes, fivePrimeReadOrderBytes;
+
+        private CustomAdapterPair(final String fivePrime, final String threePrime) {
+            this.threePrime = threePrime;
+            this.threePrimeBytes = StringUtil.stringToBytes(threePrime);
+
+            this.fivePrime = fivePrime;
+            this.fivePrimeReadOrder = SequenceUtil.reverseComplement(fivePrime);
+            this.fivePrimeBytes = StringUtil.stringToBytes(fivePrime);
+            this.fivePrimeReadOrderBytes = StringUtil.stringToBytes(fivePrimeReadOrder);
+        }
+
+        public String get3PrimeAdapter(){ return threePrime; }
+        public String get5PrimeAdapter(){ return fivePrime; }
+        public String get3PrimeAdapterInReadOrder(){ return threePrime; }
+        public String get5PrimeAdapterInReadOrder() { return fivePrimeReadOrder; }
+        public byte[] get3PrimeAdapterBytes() { return threePrimeBytes; }
+        public byte[] get5PrimeAdapterBytes() { return fivePrimeBytes; }
+        public byte[] get3PrimeAdapterBytesInReadOrder() { return threePrimeBytes; }
+        public byte[] get5PrimeAdapterBytesInReadOrder()  { return fivePrimeReadOrderBytes; }
+        public String getName() { return "Custom adapter pair"; }
+    }
+}
diff --git a/src/java/net/sf/picard/illumina/parser/BarcodeParser.java b/src/java/net/sf/picard/illumina/parser/BarcodeParser.java
index b97d885..8fabb97 100644
--- a/src/java/net/sf/picard/illumina/parser/BarcodeParser.java
+++ b/src/java/net/sf/picard/illumina/parser/BarcodeParser.java
@@ -24,7 +24,7 @@
 package net.sf.picard.illumina.parser;
 
 import net.sf.picard.illumina.parser.readers.BarcodeFileReader;
-import net.sf.picard.util.CollectionUtil;
+import net.sf.samtools.util.CollectionUtil;
 import net.sf.samtools.util.CloseableIterator;
 
 import java.io.File;
diff --git a/src/java/net/sf/picard/illumina/parser/BclParser.java b/src/java/net/sf/picard/illumina/parser/BclParser.java
index 20cd8e4..ef34a2a 100644
--- a/src/java/net/sf/picard/illumina/parser/BclParser.java
+++ b/src/java/net/sf/picard/illumina/parser/BclParser.java
@@ -24,6 +24,7 @@
 package net.sf.picard.illumina.parser;
 
 
+import net.sf.picard.illumina.parser.readers.BclQualityEvaluationStrategy;
 import net.sf.picard.illumina.parser.readers.BclReader;
 
 import java.io.File;
@@ -31,7 +32,7 @@ import java.util.Collections;
 import java.util.NoSuchElementException;
 import java.util.Set;
 
-import static net.sf.picard.util.CollectionUtil.makeSet;
+import static net.sf.samtools.util.CollectionUtil.makeSet;
 
 /**
  * BclParser parses a number of BclFiles equal to the total of all the values in outputLengths and returns a BclData object
@@ -44,16 +45,19 @@ class BclParser extends PerTilePerCycleParser<BclData>{
     public static final byte MASKING_QUALITY = (byte) 0x02;
 
     private static final Set<IlluminaDataType> SUPPORTED_TYPES = Collections.unmodifiableSet(makeSet(IlluminaDataType.BaseCalls, IlluminaDataType.QualityScores));
-
+    
+    private final BclQualityEvaluationStrategy bclQualityEvaluationStrategy;
     private final boolean applyEamssFilter;
 
-    public BclParser(final File directory, final int lane, final CycleIlluminaFileMap tilesToCycleFiles, final OutputMapping outputMapping) {
-        this(directory, lane, tilesToCycleFiles, outputMapping, true);
+    public BclParser(final File directory, final int lane, final CycleIlluminaFileMap tilesToCycleFiles, final OutputMapping outputMapping, final BclQualityEvaluationStrategy bclQualityEvaluationStrategy) {
+        this(directory, lane, tilesToCycleFiles, outputMapping, true, bclQualityEvaluationStrategy);
     }
 
-    public BclParser(final File directory, final int lane, final CycleIlluminaFileMap tilesToCycleFiles, final OutputMapping outputMapping, final boolean applyEamssFilter) {
+    public BclParser(final File directory, final int lane, final CycleIlluminaFileMap tilesToCycleFiles, final OutputMapping outputMapping, final boolean applyEamssFilter, final BclQualityEvaluationStrategy bclQualityEvaluationStrategy) {
         super(directory, lane, tilesToCycleFiles, outputMapping);
+        this.bclQualityEvaluationStrategy = bclQualityEvaluationStrategy;
         this.applyEamssFilter = applyEamssFilter;
+        this.initialize();
     }
 
     /** Create the BclData object segmented by the given outputLengths */
@@ -72,7 +76,7 @@ class BclParser extends PerTilePerCycleParser<BclData>{
     protected CycleFileParser<BclData> makeCycleFileParser(final File file, final int cycle) {
         return new CycleFileParser<BclData>(){
             final OutputMapping.TwoDIndex cycleOutputIndex = outputMapping.getOutputIndexForCycle(cycle);
-            BclReader reader = new BclReader(file);
+            BclReader reader = new BclReader(file, bclQualityEvaluationStrategy);
 
             @Override
             public void close() {
diff --git a/src/java/net/sf/picard/illumina/parser/CifParser.java b/src/java/net/sf/picard/illumina/parser/CifParser.java
index a0af42d..b202d99 100644
--- a/src/java/net/sf/picard/illumina/parser/CifParser.java
+++ b/src/java/net/sf/picard/illumina/parser/CifParser.java
@@ -23,7 +23,7 @@
  */
 package net.sf.picard.illumina.parser;
 
-import net.sf.picard.util.CollectionUtil;
+import net.sf.samtools.util.CollectionUtil;
 
 import java.io.File;
 import java.util.Collections;
diff --git a/src/java/net/sf/picard/illumina/parser/CnfParser.java b/src/java/net/sf/picard/illumina/parser/CnfParser.java
index 223f56e..3b33359 100644
--- a/src/java/net/sf/picard/illumina/parser/CnfParser.java
+++ b/src/java/net/sf/picard/illumina/parser/CnfParser.java
@@ -23,7 +23,7 @@
  */
 package net.sf.picard.illumina.parser;
 
-import net.sf.picard.util.CollectionUtil;
+import net.sf.samtools.util.CollectionUtil;
 
 import java.io.File;
 import java.util.Collections;
diff --git a/src/java/net/sf/picard/illumina/parser/FilterParser.java b/src/java/net/sf/picard/illumina/parser/FilterParser.java
index 0355d9e..a49816f 100644
--- a/src/java/net/sf/picard/illumina/parser/FilterParser.java
+++ b/src/java/net/sf/picard/illumina/parser/FilterParser.java
@@ -26,7 +26,7 @@ package net.sf.picard.illumina.parser;
 import net.sf.picard.illumina.parser.readers.FilterFileReader;
 import net.sf.samtools.util.CloseableIterator;
 
-import static net.sf.picard.util.CollectionUtil.*;
+import static net.sf.samtools.util.CollectionUtil.*;
 
 import java.io.File;
 import java.util.*;
diff --git a/src/java/net/sf/picard/illumina/parser/IlluminaDataProviderFactory.java b/src/java/net/sf/picard/illumina/parser/IlluminaDataProviderFactory.java
index f5a7fe4..664f39c 100644
--- a/src/java/net/sf/picard/illumina/parser/IlluminaDataProviderFactory.java
+++ b/src/java/net/sf/picard/illumina/parser/IlluminaDataProviderFactory.java
@@ -28,12 +28,13 @@ import java.io.File;
 import java.util.*;
 
 import net.sf.picard.PicardException;
+import net.sf.picard.illumina.parser.readers.BclQualityEvaluationStrategy;
 import net.sf.picard.util.Log;
 import net.sf.picard.illumina.parser.IlluminaFileUtil.SupportedIlluminaFormat;
 import net.sf.samtools.util.StringUtil;
 
-import static net.sf.picard.util.CollectionUtil.makeList;
-import static net.sf.picard.util.CollectionUtil.makeSet;
+import static net.sf.samtools.util.CollectionUtil.makeList;
+import static net.sf.samtools.util.CollectionUtil.makeSet;
 
 /**
  * IlluminaDataProviderFactory accepts options for parsing Illumina data files for a lane and creates an
@@ -104,7 +105,7 @@ public class IlluminaDataProviderFactory {
     private final List<Integer> availableTiles;
 
     private final OutputMapping outputMapping;
-
+    private  final BclQualityEvaluationStrategy bclQualityEvaluationStrategy;
 
     /**
      * Create factory with the specified options, one that favors using QSeqs over all other files
@@ -116,9 +117,10 @@ public class IlluminaDataProviderFactory {
      *                          in a run's QSeq files
      * @param dataTypes         Which data types to read
      */
-    public IlluminaDataProviderFactory(final File basecallDirectory, final int lane, final ReadStructure readStructure, final IlluminaDataType... dataTypes) {
+    public IlluminaDataProviderFactory(final File basecallDirectory, final int lane, final ReadStructure readStructure,  final BclQualityEvaluationStrategy bclQualityEvaluationStrategy, final IlluminaDataType... dataTypes) {
         this.basecallDirectory     = basecallDirectory;
         this.intensitiesDirectory = basecallDirectory.getParentFile();
+        this.bclQualityEvaluationStrategy = bclQualityEvaluationStrategy;
 
         this.lane = lane;
         this.dataTypes = Collections.unmodifiableSet(new HashSet<IlluminaDataType>(Arrays.asList(dataTypes)));
@@ -308,7 +310,7 @@ public class IlluminaDataProviderFactory {
             case Bcl:
                 final CycleIlluminaFileMap bclFileMap = fileUtil.bcl().getFiles(requestedTiles, outputMapping.getOutputCycles());
                 bclFileMap.assertValid(requestedTiles, outputMapping.getOutputCycles());
-                parser = new BclParser(basecallDirectory, lane, bclFileMap, outputMapping, this.applyEamssFiltering);
+                parser = new BclParser(basecallDirectory, lane, bclFileMap, outputMapping, this.applyEamssFiltering, bclQualityEvaluationStrategy);
                 break;
 
             case Cif:
diff --git a/src/java/net/sf/picard/illumina/parser/IlluminaFileUtil.java b/src/java/net/sf/picard/illumina/parser/IlluminaFileUtil.java
index d7b4003..9d06e7c 100644
--- a/src/java/net/sf/picard/illumina/parser/IlluminaFileUtil.java
+++ b/src/java/net/sf/picard/illumina/parser/IlluminaFileUtil.java
@@ -576,7 +576,9 @@ public class IlluminaFileUtil {
                                         failures.add("0 Length tile file(" + cycleFile.getAbsolutePath() + ")");
                                     } else if(cycleSize == null) {
                                         cycleSize = cycleFile.length();
-                                    } else if(cycleSize != cycleFile.length()) {
+                                    } else if (!extension.equals(".bcl.gz") && cycleSize != cycleFile.length()) {
+                                        // TODO: The gzip bcl files might not be the same length despite having the same content,
+                                        // for now we're punting on this but this should be looked into at some point
                                         failures.add("File type " + extension + " has cycles files of different length.  Current cycle (" + currentCycle + ") " +
                                                      "Length of first non-empty file (" + cycleSize + ") length of current cycle (" + cycleFile.length() + ")"  + " File(" + cycleFile.getAbsolutePath() + ")");
                                     }
diff --git a/src/java/net/sf/picard/illumina/parser/IlluminaIntensityParser.java b/src/java/net/sf/picard/illumina/parser/IlluminaIntensityParser.java
index 08b7597..f61f74a 100644
--- a/src/java/net/sf/picard/illumina/parser/IlluminaIntensityParser.java
+++ b/src/java/net/sf/picard/illumina/parser/IlluminaIntensityParser.java
@@ -1,4 +1,3 @@
-
 package net.sf.picard.illumina.parser;
 
 import net.sf.picard.PicardException;
@@ -17,6 +16,7 @@ import java.util.Map;
 abstract class IlluminaIntensityParser<T extends IlluminaData> extends PerTilePerCycleParser<T> {
     public IlluminaIntensityParser(final File directory, final int lane, final CycleIlluminaFileMap tilesToCycleFiles, final OutputMapping outputMapping) {
         super(directory, lane, tilesToCycleFiles, outputMapping);
+        this.initialize();
     }
 
     /**
diff --git a/src/java/net/sf/picard/illumina/parser/PerTilePerCycleParser.java b/src/java/net/sf/picard/illumina/parser/PerTilePerCycleParser.java
index 4370af9..333ece2 100644
--- a/src/java/net/sf/picard/illumina/parser/PerTilePerCycleParser.java
+++ b/src/java/net/sf/picard/illumina/parser/PerTilePerCycleParser.java
@@ -68,10 +68,14 @@ abstract class PerTilePerCycleParser<ILLUMINA_DATA extends IlluminaData> impleme
         this.outputMapping = outputMapping;
 
         cycleFileParsers = new ArrayList<CycleFileParser<ILLUMINA_DATA>>(outputMapping.getTotalOutputCycles());
+    }
 
+    /** Do initialization work.  This bit was excised from the constructor because inheriting classes that called super() could not
+     * initialize member variables first. */
+    protected void initialize() {
         seekToTile(tilesToCycleFiles.firstKey());
     }
-
+    
     /**
      * Per cluster makeData will make the relevant IlluminaData object with the given outputLengths
      * @param outputLengths The expected lengths of the output data
diff --git a/src/java/net/sf/picard/illumina/parser/PosParser.java b/src/java/net/sf/picard/illumina/parser/PosParser.java
index 96acf02..c6c98b4 100644
--- a/src/java/net/sf/picard/illumina/parser/PosParser.java
+++ b/src/java/net/sf/picard/illumina/parser/PosParser.java
@@ -34,7 +34,7 @@ import java.io.File;
 import java.util.Collections;
 import java.util.Set;
 
-import static net.sf.picard.util.CollectionUtil.makeSet;
+import static net.sf.samtools.util.CollectionUtil.makeSet;
 
 /**
  * PosParser parses multiple files formatted as one of the three file formats that contain position information
diff --git a/src/java/net/sf/picard/illumina/parser/QseqParser.java b/src/java/net/sf/picard/illumina/parser/QseqParser.java
index 053d1b4..e4672f2 100644
--- a/src/java/net/sf/picard/illumina/parser/QseqParser.java
+++ b/src/java/net/sf/picard/illumina/parser/QseqParser.java
@@ -25,6 +25,7 @@ package net.sf.picard.illumina.parser;
 
 import net.sf.picard.util.*;
 import net.sf.picard.PicardException;
+import net.sf.samtools.util.CollectionUtil;
 import net.sf.samtools.util.StringUtil;
 
 import java.io.File;
diff --git a/src/java/net/sf/picard/illumina/parser/TileMetricsUtil.java b/src/java/net/sf/picard/illumina/parser/TileMetricsUtil.java
index 181a136..dd6b54f 100644
--- a/src/java/net/sf/picard/illumina/parser/TileMetricsUtil.java
+++ b/src/java/net/sf/picard/illumina/parser/TileMetricsUtil.java
@@ -3,7 +3,7 @@ package net.sf.picard.illumina.parser;
 import net.sf.picard.PicardException;
 import net.sf.picard.illumina.parser.readers.TileMetricsOutReader;
 import net.sf.picard.illumina.parser.readers.TileMetricsOutReader.IlluminaTileMetrics;
-import net.sf.picard.util.CollectionUtil;
+import net.sf.samtools.util.CollectionUtil;
 
 import java.io.File;
 import java.io.FileNotFoundException;
diff --git a/src/java/net/sf/picard/illumina/parser/readers/BclQualityEvaluationStrategy.java b/src/java/net/sf/picard/illumina/parser/readers/BclQualityEvaluationStrategy.java
new file mode 100644
index 0000000..686fdae
--- /dev/null
+++ b/src/java/net/sf/picard/illumina/parser/readers/BclQualityEvaluationStrategy.java
@@ -0,0 +1,95 @@
+package net.sf.picard.illumina.parser.readers;
+
+import net.sf.picard.PicardException;
+import net.sf.samtools.util.CollectionUtil;
+
+import java.util.*;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * Describes a mechanism for revising and evaluating qualities read from a BCL file.  This class accumulates observations about low quality
+ * scores that it evaluates, so distinct instances should be used for unrelated sets of BCL readers.
+ * 
+ * The mechanism for revising qualities is not configurable.  The qualities that are less than 1 are revised to 1, and other qualities are
+ * not affected.
+ *
+ * This class is thread-safe and a single instance can and should be passed to {@link BclReader}s running in separate threads.
+ * 
+ * To replicate the functionality of {@link BclReader}s prior to the introduction of this class, create a single instance passing 
+ * {@link #ILLUMINA_ALLEGED_MINIMUM_QUALITY} to the constructor, and then call {@link #assertMinimumQualities()} once the readers finish
+ * their work.
+ * 
+ * @author mccowan
+ */
+public class BclQualityEvaluationStrategy {
+    public static final int ILLUMINA_ALLEGED_MINIMUM_QUALITY = 2;
+    private final int minimumRevisedQuality;
+    /** A thread-safe defaulting map that injects an AtomicInteger starting at 0 when a uninitialized key is get-ted. */
+    private Map<Byte, AtomicInteger> qualityCountMap = Collections.synchronizedMap(new CollectionUtil.DefaultingMap<Byte, AtomicInteger>(
+            new CollectionUtil.DefaultingMap.Factory<AtomicInteger, Byte>() {
+                @Override
+                public AtomicInteger make(final Byte _) {
+                    return new AtomicInteger(0);
+                }
+            }, true));
+
+    /**
+     * @param minimumRevisedQuality The minimum quality that should be seen from revised qualities; controls whether or not an exception
+     *                              is thrown when calling {@link #assertMinimumQualities()}
+     */
+    public BclQualityEvaluationStrategy(final int minimumRevisedQuality) {
+        this.minimumRevisedQuality = minimumRevisedQuality;
+    }
+
+    /** The rule used to revise quality scores, which is: if it's less than 1, make it 1. */
+    private static byte generateRevisedQuality(final byte quality) { return (byte) Math.max(quality, 1); }
+    
+    /**
+     * Accepts a quality read from a BCL file and (1) returns a 1 if the value was 0 and (2) makes a note of the provided quality if it is
+     * low.  Because of (2) each record's quality should be passed only once to this method, otherwise it will be observed multiple times.
+     *
+     * @param quality The quality score read from the BCL
+     * @return The revised new quality score
+     */
+    public byte reviseAndConditionallyLogQuality(final byte quality) {
+        final byte revisedQuality = generateRevisedQuality(quality);
+        if (quality < ILLUMINA_ALLEGED_MINIMUM_QUALITY) {
+            qualityCountMap.get(quality).incrementAndGet();
+        }
+        return revisedQuality;
+    }
+
+    /**
+     * Reviews the qualities observed thus far and throws an exception if any are below the minimum quality threshold.
+     */
+    public void assertMinimumQualities() {
+        final Collection<String> errorTokens = new LinkedList<String>();
+        for (final Map.Entry<Byte, AtomicInteger> entry : this.qualityCountMap.entrySet()) {
+            /**
+             * We're comparing revised qualities here, not observed, but the qualities that are logged in qualityCountMap are observed
+             * qualities.  So as we iterate through it, convert observed qualities into their revised value. 
+             */
+            if (generateRevisedQuality(entry.getKey()) < minimumRevisedQuality) { 
+                errorTokens.add(String.format("quality %s observed %s times", entry.getKey(), entry.getValue()));
+            }
+        }
+        if (!errorTokens.isEmpty()) {
+            throw new PicardException(String.format(
+                    "Found BCL qualities that fell beneath minimum threshold of %s: %s.",
+                    minimumRevisedQuality, 
+                    CollectionUtil.join(errorTokens, "; ")
+            ));
+        }
+    }
+
+    /**
+     * Returns a view of number of qualities that failed, where the key is the quality score and the value is the number of observations.
+     */
+    public Map<Byte, Integer> getPoorQualityFrequencies() {
+        final Map<Byte, Integer> qualityCountMapCopy = new HashMap<Byte, Integer>();
+        for (final Map.Entry<Byte, AtomicInteger> entry : qualityCountMap.entrySet()) {
+            qualityCountMapCopy.put(entry.getKey(), entry.getValue().intValue());
+        }
+        return Collections.unmodifiableMap(qualityCountMapCopy);
+    }
+}
diff --git a/src/java/net/sf/picard/illumina/parser/readers/BclReader.java b/src/java/net/sf/picard/illumina/parser/readers/BclReader.java
index ec3577d..6fea411 100644
--- a/src/java/net/sf/picard/illumina/parser/readers/BclReader.java
+++ b/src/java/net/sf/picard/illumina/parser/readers/BclReader.java
@@ -64,6 +64,8 @@ import java.util.zip.GZIPInputStream;
 public class BclReader implements Iterator<BclReader.BclValue> {
     /** The size of the opening header (consisting solely of numClusters*/
     private static final int HEADER_SIZE = 4;
+    
+    private final BclQualityEvaluationStrategy bclQualityEvaluationStrategy;
 
     /** The number of clusters provided in this BCL */
     public final long numClusters;
@@ -91,7 +93,9 @@ public class BclReader implements Iterator<BclReader.BclValue> {
         }
     }
 
-    public BclReader(final File file) {
+    public BclReader(final File file, final BclQualityEvaluationStrategy bclQualityEvaluationStrategy) {
+        this.bclQualityEvaluationStrategy = bclQualityEvaluationStrategy;
+        
         filePath = file.getAbsolutePath();
         final boolean isGzip = filePath.endsWith(".gz");
 
@@ -196,10 +200,7 @@ public class BclReader implements Iterator<BclReader.BclValue> {
                     throw new PicardException("Impossible case! BCL Base value neither A, C, G, nor T! Value(" + (element & BASE_MASK) + ") + in file(" + filePath + ")");
             }
 
-            quality = (byte)(UnsignedTypeUtil.uByteToInt(element) >>> 2);
-            if(quality == 0 || quality == 1) {
-                throw new PicardException("If base is NOT a NO CALL then it should have a quality of 2 or greater!  Quality Found(" + quality + ")  Cluster(" + nextCluster + ")");
-            }
+            quality = bclQualityEvaluationStrategy.reviseAndConditionallyLogQuality((byte)(UnsignedTypeUtil.uByteToInt(element) >>> 2));
         }
 
         ++nextCluster;
diff --git a/src/java/net/sf/picard/io/IoUtil.java b/src/java/net/sf/picard/io/IoUtil.java
index 7552dae..647d826 100644
--- a/src/java/net/sf/picard/io/IoUtil.java
+++ b/src/java/net/sf/picard/io/IoUtil.java
@@ -24,12 +24,15 @@
 package net.sf.picard.io;
 
 import net.sf.picard.PicardException;
+import net.sf.picard.util.IterableOnceIterator;
 import net.sf.samtools.Defaults;
+import net.sf.samtools.util.CloserUtil;
+import net.sf.samtools.util.CollectionUtil;
 import net.sf.samtools.util.RuntimeIOException;
 
 import java.io.*;
 import java.nio.charset.Charset;
-import java.util.Arrays;
+import java.util.*;
 import java.util.regex.Pattern;
 import java.util.zip.GZIPInputStream;
 import java.util.zip.GZIPOutputStream;
@@ -547,6 +550,115 @@ public class IoUtil extends net.sf.samtools.util.IOUtil {
             throw new RuntimeIOException("Error reading stream", ioe);
         }
     }
+
+    /**
+     * Returns an iterator over the lines in a text file. The underlying resources are automatically
+     * closed when the iterator hits the end of the input, or manually by calling close().
+     *
+     * @param f a file that is to be read in as text
+     * @return an iterator over the lines in the text file
+     */
+    public static IterableOnceIterator<String> readLines(final File f) {
+        try {
+            final BufferedReader in = IoUtil.openFileForBufferedReading(f);
+
+            return new IterableOnceIterator<String>() {
+                private String next = in.readLine();
+
+                /** Returns true if there is another line to read or false otherwise. */
+                @Override public boolean hasNext() { return next != null; }
+
+                /** Returns the next line in the file or null if there are no more lines. */
+                @Override public String next() {
+                    try {
+                        final String tmp = next;
+                        next = in.readLine();
+                        if (next == null) in.close();
+                        return tmp;
+                    }
+                    catch (IOException ioe) { throw new RuntimeIOException(ioe); }
+                }
+
+                /** Closes the underlying input stream. Not required if end of stream has already been hit. */
+                @Override public void close() throws IOException { CloserUtil.close(in); }
+            };
+        }
+        catch (IOException e) {
+            throw new RuntimeIOException(e);
+        }
+    }
+
+    /** Returns all of the untrimmed lines in the provided file. */
+    public static List<String> slurpLines(final File file) throws FileNotFoundException {
+        return slurpLines(new FileInputStream(file));
+    }
+
+    public static List<String> slurpLines(final InputStream is) throws FileNotFoundException {
+        /** See {@link Scanner} source for origin of delimiter used here.  */
+        return tokenSlurp(is, Charset.defaultCharset(), "\r\n|[\n\r\u2028\u2029\u0085]");
+    }
+    
+    /** Convenience overload for {@link #slurp(java.io.InputStream, java.nio.charset.Charset)} using the default charset {@link Charset#defaultCharset()}. */
+    public static String slurp(final File file) throws FileNotFoundException {
+        return slurp(new FileInputStream(file));
+    }
+    
+    /** Convenience overload for {@link #slurp(java.io.InputStream, java.nio.charset.Charset)} using the default charset {@link Charset#defaultCharset()}. */
+    public static String slurp(final InputStream is) {
+        return slurp(is, Charset.defaultCharset());
+    }
+
+    /** Reads all of the stream into a String, decoding with the provided {@link Charset} then closes the stream quietly. */
+    public static String slurp(final InputStream is, final Charset charSet) {
+        return CollectionUtil.getSoleElement(tokenSlurp(is, charSet, "\\A"));
+    }
+
+    /** Tokenizes the provided input stream into memory using the given delimiter. */
+    private static List<String> tokenSlurp(final InputStream is, final Charset charSet, final String delimiterPattern) {
+        try {
+            final Scanner s = new Scanner(is, charSet.toString()).useDelimiter(delimiterPattern);
+            final LinkedList<String> tokens = new LinkedList<String>();
+            while (s.hasNext()) {
+                tokens.add(s.next());
+            }
+            return tokens;
+        } finally {
+            CloserUtil.close(is);
+        }
+    }
+
+    /**
+     * Go through the files provided and if they have one of the provided file extensions pass the file into the output
+     * otherwise assume that file is a list of filenames and unfold it into the output.
+     */
+    public static List<File> unrollFiles(final Collection<File> inputs, final String... extensions) {
+        if (extensions.length < 1) throw new IllegalArgumentException("Must provide at least one extension.");
+
+        final Stack<File> stack = new Stack<File>();
+        final List<File> output = new ArrayList<File>();
+        stack.addAll(inputs);
+
+        final Set<String> exts = new HashSet<String>();
+        Collections.addAll(exts, extensions);
+
+        while (!stack.empty()) {
+            final File f = stack.pop();
+            final String ext = IoUtil.fileSuffix(f);
+
+            if (exts.contains(ext)) {
+                output.add(f);
+            }
+            else {
+                IoUtil.assertFileIsReadable(f);
+
+                for (final String s : IoUtil.readLines(f)) {
+                    if (!s.trim().isEmpty()) stack.push(new File(s.trim()));
+                }
+            }
+        }
+
+        return output;
+    }
 }
 
 /**
diff --git a/src/java/net/sf/picard/reference/IndexedFastaSequenceFile.java b/src/java/net/sf/picard/reference/IndexedFastaSequenceFile.java
index 2e86ccf..9e37e30 100644
--- a/src/java/net/sf/picard/reference/IndexedFastaSequenceFile.java
+++ b/src/java/net/sf/picard/reference/IndexedFastaSequenceFile.java
@@ -29,10 +29,7 @@ import net.sf.picard.io.IoUtil;
 import net.sf.samtools.SAMSequenceDictionary;
 import net.sf.samtools.SAMSequenceRecord;
 
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
+import java.io.*;
 import java.nio.ByteBuffer;
 import java.nio.channels.FileChannel;
 import java.util.Iterator;
@@ -41,7 +38,7 @@ import java.util.Iterator;
  * A fasta file driven by an index for fast, concurrent lookups.  Supports two interfaces:
  * the ReferenceSequenceFile for old-style, stateful lookups and a direct getter.
  */
-public class IndexedFastaSequenceFile extends AbstractFastaSequenceFile {
+public class IndexedFastaSequenceFile extends AbstractFastaSequenceFile implements Closeable {
     /**
      * Size of the read buffer.
      */
@@ -258,4 +255,9 @@ public class IndexedFastaSequenceFile extends AbstractFastaSequenceFile {
     public String toString() {
         return this.file.getAbsolutePath();
     }
+
+    @Override
+    public void close() throws IOException {
+        channel.close();
+    }
 }
diff --git a/src/java/net/sf/picard/sam/AbstractAlignmentMerger.java b/src/java/net/sf/picard/sam/AbstractAlignmentMerger.java
index 7a6924e..37a1e8e 100644
--- a/src/java/net/sf/picard/sam/AbstractAlignmentMerger.java
+++ b/src/java/net/sf/picard/sam/AbstractAlignmentMerger.java
@@ -119,7 +119,7 @@ public abstract class AbstractAlignmentMerger {
      * @param bisulfiteSequence Whether the reads are bisulfite sequence (used when calculating the
      *                          NM and UQ tags). Required.
      * @param alignedReadsOnly  Whether to output only those reads that have alignment data
-     * @param programRecord     Program record for taget file SAMRecords created.
+     * @param programRecord     Program record for target file SAMRecords created.
      * @param attributesToRetain  private attributes from the alignment record that should be
      *                          included when merging.  This overrides the exclusion of
      *                          attributes whose tags start with the reserved characters
@@ -253,6 +253,14 @@ public abstract class AbstractAlignmentMerger {
 
                 if (rec.getReadPairedFlag()) {
                     for (int i = 0; i < nextAligned.numHits(); ++i) {
+                        // firstAligned or secondAligned may be null, if there wasn't an alignment for the end,
+                        // or if the alignment was rejected by ignoreAlignment.
+                        final SAMRecord firstAligned = nextAligned.getFirstOfPair(i);
+                        final SAMRecord secondAligned = nextAligned.getSecondOfPair(i);
+
+                        final boolean isPrimaryAlignment = (firstAligned != null && !firstAligned.isSecondaryOrSupplementary()) ||
+                                (secondAligned != null && !secondAligned.isSecondaryOrSupplementary());
+
                         final SAMRecord firstToWrite;
                         final SAMRecord secondToWrite;
                         if (clone) {
@@ -262,13 +270,6 @@ public abstract class AbstractAlignmentMerger {
                             firstToWrite = rec;
                             secondToWrite = secondOfPair;
                         }
-                        // firstAligned or secondAligned may be null, if there wasn't an alignment for the end,
-                        // or if the alignment was rejected by ignoreAlignment.
-                        final SAMRecord firstAligned = nextAligned.getFirstOfPair(i);
-                        final SAMRecord secondAligned = nextAligned.getSecondOfPair(i);
-
-                        final boolean isPrimaryAlignment = (firstAligned != null && !firstAligned.getNotPrimaryAlignmentFlag()) ||
-                                (secondAligned != null && !secondAligned.getNotPrimaryAlignmentFlag());
 
                         transferAlignmentInfoToPairedRead(firstToWrite, secondToWrite, firstAligned, secondAligned);
 
@@ -284,6 +285,27 @@ public abstract class AbstractAlignmentMerger {
                             else ++unmapped;
                         }
                     }
+
+                    // This is already being checked at construction, but just to be sure ....
+                    if (nextAligned.getSupplementalFirstOfPairOrFragment().size() != nextAligned.getSupplementalSecondOfPair().size()) {
+                        throw new IllegalStateException("Supplemental first of pairs not the same size as second of pairs!");
+                    }
+                    // Take all of the supplemental reads which had been stashed and add them (as appropriate) to sorted
+                    for (int i = 0; i < nextAligned.getSupplementalFirstOfPairOrFragment().size(); i++) {
+                        final SAMRecord firstToWrite = clone(rec);
+                        final SAMRecord secondToWrite = clone(secondOfPair);
+                        transferAlignmentInfoToPairedRead(firstToWrite, secondToWrite,
+                                nextAligned.getSupplementalFirstOfPairOrFragment().get(i),
+                                nextAligned.getSupplementalSecondOfPair().get(i));
+                        addIfNotFiltered(sorted, firstToWrite);
+                        addIfNotFiltered(sorted, secondToWrite);
+
+                        if (!firstToWrite.getReadUnmappedFlag()) ++unmapped;
+                        else ++aligned;
+
+                        if (!secondToWrite.getReadUnmappedFlag()) ++unmapped;
+                        else ++aligned;
+                    }
                 } else {
                     for (int i = 0; i < nextAligned.numHits(); ++i) {
                         final SAMRecord recToWrite = clone ? clone(rec) : rec;
@@ -292,6 +314,15 @@ public abstract class AbstractAlignmentMerger {
                         if (recToWrite.getReadUnmappedFlag()) ++unmapped;
                         else ++aligned;
                     }
+                    // Take all of the supplemental reads which had been stashed and add them (as appropriate) to sorted
+                    for (final SAMRecord supplementalRec : nextAligned.getSupplementalFirstOfPairOrFragment()) {
+                        // always clone supplementals
+                        final SAMRecord recToWrite = clone(rec);
+                        transferAlignmentInfoToFragment(recToWrite, supplementalRec);
+                        addIfNotFiltered(sorted, recToWrite);
+                        if (recToWrite.getReadUnmappedFlag()) ++unmapped;
+                        else ++aligned;
+                    }
                 }
                 nextAligned = nextAligned();
             } else {
@@ -351,6 +382,9 @@ public abstract class AbstractAlignmentMerger {
         log.info("Wrote " + aligned + " alignment records and " + (alignedReadsOnly ? 0 : unmapped) + " unmapped reads.");
     }
 
+    /**
+     * Add record if it is primary or optionally secondary.
+     */
     private void addIfNotFiltered(final SortingCollection<SAMRecord> sorted, final SAMRecord rec) {
         if (includeSecondaryAlignments || !rec.getNotPrimaryAlignmentFlag()) {
             sorted.add(rec);
@@ -468,6 +502,7 @@ public abstract class AbstractAlignmentMerger {
         rec.setAlignmentStart(alignment.getAlignmentStart());
         rec.setReadNegativeStrandFlag(alignment.getReadNegativeStrandFlag());
         rec.setNotPrimaryAlignmentFlag(alignment.getNotPrimaryAlignmentFlag());
+        rec.setSupplementaryAlignmentFlag(alignment.getSupplementaryAlignmentFlag());
         if (!alignment.getReadUnmappedFlag()) {
             // only aligned reads should have cigar and mapping quality set
             rec.setCigar(alignment.getCigar());  // cigar may change when a
diff --git a/src/java/net/sf/picard/sam/AbstractDuplicateFindingAlgorithm.java b/src/java/net/sf/picard/sam/AbstractDuplicateFindingAlgorithm.java
index 95c91b8..75da479 100644
--- a/src/java/net/sf/picard/sam/AbstractDuplicateFindingAlgorithm.java
+++ b/src/java/net/sf/picard/sam/AbstractDuplicateFindingAlgorithm.java
@@ -25,7 +25,10 @@ public abstract class AbstractDuplicateFindingAlgorithm extends CommandLineProgr
     @Option(doc="Regular expression that can be used to parse read names in the incoming SAM file. Read names are " +
             "parsed to extract three variables: tile/region, x coordinate and y coordinate. These values are used " +
             "to estimate the rate of optical duplication in order to give a more accurate estimated library size. " +
-            "The regular expression should contain three capture groups for the three variables, in order.")
+            "The regular expression should contain three capture groups for the three variables, in order. " +
+            "It must match the entire read name. " +
+            "Note that if the default regex is specified, a regex match is not actually done, but instead the read name " +
+            " is split on colon character and the 2nd, 3rd and 4th elements are assumed to be tile, x and y values.")
     public String READ_NAME_REGEX = DEFAULT_READ_NAME_REGEX;
     
     @Option(doc="The maximum offset between two duplicte clusters in order to consider them optical duplicates. This " +
diff --git a/src/java/net/sf/picard/sam/AddOrReplaceReadGroups.java b/src/java/net/sf/picard/sam/AddOrReplaceReadGroups.java
index 413050e..58f2b45 100644
--- a/src/java/net/sf/picard/sam/AddOrReplaceReadGroups.java
+++ b/src/java/net/sf/picard/sam/AddOrReplaceReadGroups.java
@@ -11,7 +11,6 @@ import net.sf.samtools.*;
 import net.sf.samtools.SAMFileHeader.SortOrder;
 import net.sf.samtools.util.Iso8601Date;
 
-import javax.xml.bind.SchemaOutputResolver;
 import java.io.File;
 import java.util.Arrays;
 
@@ -59,6 +58,9 @@ public class AddOrReplaceReadGroups extends CommandLineProgram {
     @Option(shortName="DT", doc="Read Group run date", optional=true)
     public Iso8601Date RGDT;
 
+    @Option(shortName = "PI", doc = "Read Group predicted insert size", optional = true)
+    public Integer RGPI;
+
     private final Log log = Log.getInstance(AddOrReplaceReadGroups.class);
 
     /** Required main method implementation. */
@@ -81,6 +83,7 @@ public class AddOrReplaceReadGroups extends CommandLineProgram {
         if (RGCN != null) rg.setSequencingCenter(RGCN);
         if (RGDS != null) rg.setDescription(RGDS);
         if (RGDT != null) rg.setRunDate(RGDT);
+        if (RGPI != null) rg.setPredictedMedianInsertSize(RGPI);
 
         log.info(String.format("Created read group ID=%s PL=%s LB=%s SM=%s%n", rg.getId(), rg.getPlatform(), rg.getLibrary(), rg.getSample()));
 
diff --git a/src/java/net/sf/picard/sam/CompareSAMs.java b/src/java/net/sf/picard/sam/CompareSAMs.java
index 95ee1c4..e0e27f4 100644
--- a/src/java/net/sf/picard/sam/CompareSAMs.java
+++ b/src/java/net/sf/picard/sam/CompareSAMs.java
@@ -124,10 +124,10 @@ public class CompareSAMs extends CommandLineProgram {
 
 
     private boolean compareCoordinateSortedAlignments() {
-        final NotPrimarySkippingIterator itLeft =
-                new NotPrimarySkippingIterator(samReaders[0].iterator());
-        final NotPrimarySkippingIterator itRight =
-                new NotPrimarySkippingIterator(samReaders[1].iterator());
+        final SecondaryOrSupplementarySkippingIterator itLeft =
+                new SecondaryOrSupplementarySkippingIterator(samReaders[0].iterator());
+        final SecondaryOrSupplementarySkippingIterator itRight =
+                new SecondaryOrSupplementarySkippingIterator(samReaders[1].iterator());
 
         // Save any reads which haven't been matched during in-order scan.
         final Map<String, SAMRecord> leftUnmatched = new HashMap<String, SAMRecord>();
@@ -244,8 +244,8 @@ public class CompareSAMs extends CommandLineProgram {
     }
 
     private boolean compareQueryNameSortedAlignments() {
-        final NotPrimarySkippingIterator it1 = new NotPrimarySkippingIterator(samReaders[0].iterator());
-        final NotPrimarySkippingIterator it2 = new NotPrimarySkippingIterator(samReaders[1].iterator());
+        final SecondaryOrSupplementarySkippingIterator it1 = new SecondaryOrSupplementarySkippingIterator(samReaders[0].iterator());
+        final SecondaryOrSupplementarySkippingIterator it2 = new SecondaryOrSupplementarySkippingIterator(samReaders[1].iterator());
 
         boolean ret = true;
         while (it1.hasCurrent()) {
@@ -278,8 +278,8 @@ public class CompareSAMs extends CommandLineProgram {
     }
 
     private boolean compareUnsortedAlignments() {
-        final NotPrimarySkippingIterator it1 = new NotPrimarySkippingIterator(samReaders[0].iterator());
-        final NotPrimarySkippingIterator it2 = new NotPrimarySkippingIterator(samReaders[1].iterator());
+        final SecondaryOrSupplementarySkippingIterator it1 = new SecondaryOrSupplementarySkippingIterator(samReaders[0].iterator());
+        final SecondaryOrSupplementarySkippingIterator it2 = new SecondaryOrSupplementarySkippingIterator(samReaders[1].iterator());
         boolean ret = true;
         for (; it1.hasCurrent(); it1.advance(), it2.advance()) {
             if (!it2.hasCurrent()) {
@@ -301,7 +301,7 @@ public class CompareSAMs extends CommandLineProgram {
         return ret;
     }
 
-    private int countRemaining(final NotPrimarySkippingIterator it) {
+    private int countRemaining(final SecondaryOrSupplementarySkippingIterator it) {
         int i;
         for (i = 0; it.hasCurrent(); ++i) {
             it.advance();
diff --git a/src/java/net/sf/picard/sam/DownsampleSam.java b/src/java/net/sf/picard/sam/DownsampleSam.java
index 9b75db8..553e585 100644
--- a/src/java/net/sf/picard/sam/DownsampleSam.java
+++ b/src/java/net/sf/picard/sam/DownsampleSam.java
@@ -62,7 +62,7 @@ public class DownsampleSam extends CommandLineProgram {
         final ProgressLogger progress = new ProgressLogger(log, (int) 1e7, "Read");
 
         for (final SAMRecord rec : in) {
-            if (rec.getNotPrimaryAlignmentFlag()) continue;
+            if (rec.isSecondaryOrSupplementary()) continue;
             ++total;
 
             final String key = rec.getReadName();
diff --git a/src/java/net/sf/picard/sam/FastqToSam.java b/src/java/net/sf/picard/sam/FastqToSam.java
index dcaf777..21eabfd 100644
--- a/src/java/net/sf/picard/sam/FastqToSam.java
+++ b/src/java/net/sf/picard/sam/FastqToSam.java
@@ -247,7 +247,7 @@ public class FastqToSam extends CommandLineProgram {
         final SAMFileHeader header = new SAMFileHeader();
         header.addReadGroup(rgroup);
 
-        for (String comment : COMMENT) {
+        for (final String comment : COMMENT) {
             header.addComment(comment);
         }
 
@@ -255,7 +255,7 @@ public class FastqToSam extends CommandLineProgram {
         return header ;
     }
 
-    /** Based on the type of quality scores coming in, converts them to a numeric byte[] in prhred scale. */
+    /** Based on the type of quality scores coming in, converts them to a numeric byte[] in phred scale. */
     void convertQuality(final byte[] quals, final FastqQualityFormat version) {
         switch (version)  {
             case Standard:
diff --git a/src/java/net/sf/picard/sam/FixMateInformation.java b/src/java/net/sf/picard/sam/FixMateInformation.java
index e2b56df..3a566e7 100644
--- a/src/java/net/sf/picard/sam/FixMateInformation.java
+++ b/src/java/net/sf/picard/sam/FixMateInformation.java
@@ -175,7 +175,7 @@ public class FixMateInformation extends CommandLineProgram {
         final ProgressLogger progress = new ProgressLogger(log);
         while (iterator.hasNext()) {
             final SAMRecord rec1 = iterator.next();
-            if (rec1.getNotPrimaryAlignmentFlag()) {
+            if (rec1.isSecondaryOrSupplementary()) {
                 writeAlignment(rec1);
                 progress.record(rec1);
                 continue;
@@ -185,7 +185,7 @@ public class FixMateInformation extends CommandLineProgram {
             // or until there are no more SAMRecords.
             while (iterator.hasNext()) {
                 rec2 = iterator.peek();
-                if (rec2.getNotPrimaryAlignmentFlag()) {
+                if (rec2.isSecondaryOrSupplementary()) {
                     iterator.next();
                     writeAlignment(rec2);
                     progress.record(rec2);
diff --git a/src/java/net/sf/picard/sam/HitsForInsert.java b/src/java/net/sf/picard/sam/HitsForInsert.java
index 805de1f..cac30f2 100644
--- a/src/java/net/sf/picard/sam/HitsForInsert.java
+++ b/src/java/net/sf/picard/sam/HitsForInsert.java
@@ -59,6 +59,9 @@ class HitsForInsert {
     final List<SAMRecord> firstOfPairOrFragment = new ArrayList<SAMRecord>();
     final List<SAMRecord> secondOfPair = new ArrayList<SAMRecord>();
 
+    private final List<SAMRecord> supplementalFirstOfPairOrFragment = new ArrayList<SAMRecord>();
+    private final List<SAMRecord> supplementalSecondOfPair = new ArrayList<SAMRecord>();
+
     /**
      * @throws if numHits() == 0
      */
@@ -109,6 +112,14 @@ class HitsForInsert {
         secondOfPair.add(rec);
     }
 
+    public void addSupplementalFirstOfPairOrFragment(final SAMRecord rec) {
+        supplementalFirstOfPairOrFragment.add(rec);
+    }
+
+    public void addSupplementalSecondOfPair(final SAMRecord rec) {
+        supplementalSecondOfPair.add(rec);
+    }
+
     /**
      * @return The ith hit for a un-paired read.  Never returns null.
      * Do not call if paired read.
@@ -214,7 +225,7 @@ class HitsForInsert {
     private NumPrimaryAlignmentState tallyPrimaryAlignments(final List<SAMRecord> records) {
         boolean seenPrimary = false;
         for (int i = 0; i < records.size(); ++i) {
-            if (records.get(i) != null && !records.get(i).getNotPrimaryAlignmentFlag()) {
+            if (records.get(i) != null && !records.get(i).isSecondaryOrSupplementary()) {
                 if (seenPrimary) return NumPrimaryAlignmentState.MORE_THAN_ONE;
                 else seenPrimary = true;
             }
@@ -223,7 +234,7 @@ class HitsForInsert {
         else return NumPrimaryAlignmentState.NONE;
     }
 
-    public NumPrimaryAlignmentState tallyPrimaryAlignments(boolean firstEnd) {
+    public NumPrimaryAlignmentState tallyPrimaryAlignments(final boolean firstEnd) {
         if (firstEnd) return tallyPrimaryAlignments(firstOfPairOrFragment);
         else return tallyPrimaryAlignments(secondOfPair);
     }
@@ -231,7 +242,7 @@ class HitsForInsert {
     int findPrimaryAlignment(final List<SAMRecord> records) {
         int indexOfPrimaryAlignment = -1;
         for (int i = 0; i < records.size(); ++i) {
-            if (records.get(i) != null && !records.get(i).getNotPrimaryAlignmentFlag()) {
+            if (records.get(i) != null && !records.get(i).isSecondaryOrSupplementary()) {
                 if (indexOfPrimaryAlignment != -1) {
                     throw new IllegalStateException("Multiple primary alignments found for read " + getReadName());
                 }
@@ -256,4 +267,12 @@ class HitsForInsert {
             }
         }
     }
+
+    List<SAMRecord> getSupplementalFirstOfPairOrFragment() {
+        return supplementalFirstOfPairOrFragment;
+    }
+
+    List<SAMRecord> getSupplementalSecondOfPair() {
+        return supplementalSecondOfPair;
+    }
 }
diff --git a/src/java/net/sf/picard/sam/MarkDuplicates.java b/src/java/net/sf/picard/sam/MarkDuplicates.java
index aca7764..5d716a4 100644
--- a/src/java/net/sf/picard/sam/MarkDuplicates.java
+++ b/src/java/net/sf/picard/sam/MarkDuplicates.java
@@ -221,7 +221,7 @@ public class MarkDuplicates extends AbstractDuplicateFindingAlgorithm {
         final CloseableIterator<SAMRecord> iterator = headerAndIterator.iterator;
         while (iterator.hasNext()) {
             final SAMRecord rec = iterator.next();
-            if (!rec.getNotPrimaryAlignmentFlag()) {
+            if (!rec.isSecondaryOrSupplementary()) {
                 final String library = getLibraryName(header, rec);
                 DuplicationMetrics metrics = metricsByLibrary.get(library);
                 if (metrics == null) {
@@ -409,7 +409,7 @@ public class MarkDuplicates extends AbstractDuplicateFindingAlgorithm {
                 }
                 // If this read is unmapped but sorted with the mapped reads, just skip it.
             }
-            else if (!rec.getNotPrimaryAlignmentFlag()){
+            else if (!rec.isSecondaryOrSupplementary()){
                 final ReadEnds fragmentEnd = buildReadEnds(header, index, rec);
                 this.fragSort.add(fragmentEnd);
 
diff --git a/src/java/net/sf/picard/sam/MergeBamAlignment.java b/src/java/net/sf/picard/sam/MergeBamAlignment.java
index 465b679..5cb647f 100644
--- a/src/java/net/sf/picard/sam/MergeBamAlignment.java
+++ b/src/java/net/sf/picard/sam/MergeBamAlignment.java
@@ -217,13 +217,13 @@ public class MergeBamAlignment extends CommandLineProgram {
         }
         // TEMPORARY FIX until internal programs all specify EXPECTED_ORIENTATIONS
         if (JUMP_SIZE != null) {
-            EXPECTED_ORIENTATIONS = Arrays.asList(new SamPairUtil.PairOrientation[]{SamPairUtil.PairOrientation.RF});
+            EXPECTED_ORIENTATIONS = Arrays.asList(SamPairUtil.PairOrientation.RF);
         }
         else if (EXPECTED_ORIENTATIONS == null || EXPECTED_ORIENTATIONS.isEmpty()) {
-            EXPECTED_ORIENTATIONS = Arrays.asList(new SamPairUtil.PairOrientation[]{SamPairUtil.PairOrientation.FR});
+            EXPECTED_ORIENTATIONS = Arrays.asList(SamPairUtil.PairOrientation.FR);
         }
 
-        final SamAlignmentMerger merger = new SamAlignmentMerger (UNMAPPED_BAM, OUTPUT,
+        final SamAlignmentMerger merger = new SamAlignmentMerger(UNMAPPED_BAM, OUTPUT,
             REFERENCE_SEQUENCE, prod, CLIP_ADAPTERS, IS_BISULFITE_SEQUENCE, PAIRED_RUN,
             ALIGNED_READS_ONLY, ALIGNED_BAM, MAX_INSERTIONS_OR_DELETIONS,
             ATTRIBUTES_TO_RETAIN, READ1_TRIM, READ2_TRIM,
diff --git a/src/java/net/sf/picard/sam/MergeSamFiles.java b/src/java/net/sf/picard/sam/MergeSamFiles.java
index 1f6fdef..f17b3f4 100644
--- a/src/java/net/sf/picard/sam/MergeSamFiles.java
+++ b/src/java/net/sf/picard/sam/MergeSamFiles.java
@@ -61,7 +61,7 @@ public class MergeSamFiles extends CommandLineProgram {
     shortName = StandardOptionDefinitions.ASSUME_SORTED_SHORT_NAME)
     public boolean ASSUME_SORTED = false;
 
-    @Option(shortName="MSD", doc="Merge the seqeunce dictionaries", optional=true)
+    @Option(shortName="MSD", doc="Merge the sequence dictionaries", optional=true)
     public boolean MERGE_SEQUENCE_DICTIONARIES = false;
 
     @Option(doc="Option to create a background thread to encode, " +
diff --git a/src/java/net/sf/picard/sam/MostDistantPrimaryAlignmentSelectionStrategy.java b/src/java/net/sf/picard/sam/MostDistantPrimaryAlignmentSelectionStrategy.java
index 1ab3d27..2641939 100644
--- a/src/java/net/sf/picard/sam/MostDistantPrimaryAlignmentSelectionStrategy.java
+++ b/src/java/net/sf/picard/sam/MostDistantPrimaryAlignmentSelectionStrategy.java
@@ -23,7 +23,7 @@
  */
 package net.sf.picard.sam;
 
-import net.sf.picard.util.CollectionUtil;
+import net.sf.samtools.util.CollectionUtil;
 import net.sf.samtools.SAMRecord;
 import net.sf.samtools.SAMUtils;
 import net.sf.samtools.util.CoordMath;
diff --git a/src/java/net/sf/picard/sam/MultiHitAlignedReadIterator.java b/src/java/net/sf/picard/sam/MultiHitAlignedReadIterator.java
index 0d2d1a2..1c8e1e6 100644
--- a/src/java/net/sf/picard/sam/MultiHitAlignedReadIterator.java
+++ b/src/java/net/sf/picard/sam/MultiHitAlignedReadIterator.java
@@ -26,6 +26,7 @@ package net.sf.picard.sam;
 import net.sf.picard.PicardException;
 import net.sf.picard.filter.FilteringIterator;
 import net.sf.picard.filter.SamRecordFilter;
+import net.sf.picard.util.Log;
 import net.sf.picard.util.PeekableIterator;
 import net.sf.samtools.SAMRecord;
 import net.sf.samtools.SAMRecordQueryNameComparator;
@@ -43,7 +44,7 @@ import static net.sf.picard.sam.HitsForInsert.NumPrimaryAlignmentState;
  * Iterate over queryname-sorted SAM, and return each group of reads with the same queryname.  Unmapped reads
  * are filtered out, as are alignments that don't seem to match any part of the reference.
  * If there are multiple hits for the same read, and the first and second ends need to be correlated,
- * then they are sorted by hit index.
+ * then they are sorted by hit index. Supplemental alignments are discarded, with a logged message.
  * A set of hits for a single query may then be filtered with a caller-supplied filter, which will remove any
  * alignments that do not pass the filter.  If the primary alignment is removed, the best-mapping secondary alignment
  * or alignment pair will be marked as primary.
@@ -52,7 +53,6 @@ import static net.sf.picard.sam.HitsForInsert.NumPrimaryAlignmentState;
  * @throws IllegalStateException if the input is not queryname-sorted.
  */
 class MultiHitAlignedReadIterator implements CloseableIterator<HitsForInsert> {
-
     private final PeekableIterator<SAMRecord> peekIterator;
     private final SAMRecordQueryNameComparator queryNameComparator = new SAMRecordQueryNameComparator();
     private final PrimaryAlignmentSelectionStrategy primaryAlignmentSelectionStrategy;
@@ -132,13 +132,30 @@ class MultiHitAlignedReadIterator implements CloseableIterator<HitsForInsert> {
             } else if (isPaired != rec.getReadPairedFlag()) {
                 throw new PicardException("Got a mix of paired and unpaired alignments for read " + readName);
             }
+
+            // Records w/ a supplemental flag are stashed to the side until the primary alignment has
+            // been determined, and then re-added into the process later
             if (!rec.getReadPairedFlag() || rec.getFirstOfPairFlag()) {
-                hits.addFirstOfPairOrFragment(rec);
+                if (rec.getSupplementaryAlignmentFlag()) {
+                    hits.addSupplementalFirstOfPairOrFragment(rec);
+                } else {
+                    hits.addFirstOfPairOrFragment(rec);
+                }
             } else if (rec.getSecondOfPairFlag()) {
-                hits.addSecondOfPair(rec);
+                if (rec.getSupplementaryAlignmentFlag()) {
+                    hits.addSupplementalSecondOfPair(rec);
+                } else {
+                    hits.addSecondOfPair(rec);
+                }
             } else throw new PicardException("Read is marked as pair but neither first or second: " + readName);
         } while (peekIterator.hasNext() && peekIterator.peek().getReadName().equals(readName));
 
+        // If we've added to the second of pair supplementals, make sure it is the same size as the first of pairs
+        if (hits.getSupplementalSecondOfPair().size() > 0 &&
+                hits.getSupplementalSecondOfPair().size() != hits.getSupplementalFirstOfPairOrFragment().size()) {
+            throw new PicardException("Number of supplemental second of pairs do not equal the number of supplemental first of pairs");
+        }
+
         // If there is no more than one alignment for each end, no need to do any coordination.
         if (hits.numHits() <= 1) {
             // No HI tags needed if only a single hit
diff --git a/src/java/net/sf/picard/sam/RevertSam.java b/src/java/net/sf/picard/sam/RevertSam.java
index b7b4fb8..1d46ae2 100644
--- a/src/java/net/sf/picard/sam/RevertSam.java
+++ b/src/java/net/sf/picard/sam/RevertSam.java
@@ -74,6 +74,7 @@ public class RevertSam extends CommandLineProgram {
         add("PG");
         add("MD");
         add("MQ");
+        add("SA"); // Supplementary alignment metadata
     }};
 
     @Option(doc="The sample alias to use in the reverted output file.  This will override the existing " +
@@ -147,7 +148,7 @@ public class RevertSam extends CommandLineProgram {
 
         final ProgressLogger progress = new ProgressLogger(log, 1000000, "Reverted");
         for (final SAMRecord rec : in) {
-            if (rec.getNotPrimaryAlignmentFlag()) continue;
+            if (rec.isSecondaryOrSupplementary()) continue;
             if (RESTORE_ORIGINAL_QUALITIES) {
                 final byte[] oq = rec.getOriginalBaseQualities();
                 if (oq != null) {
diff --git a/src/java/net/sf/picard/sam/SamAlignmentMerger.java b/src/java/net/sf/picard/sam/SamAlignmentMerger.java
index e809c3e..d80053a 100644
--- a/src/java/net/sf/picard/sam/SamAlignmentMerger.java
+++ b/src/java/net/sf/picard/sam/SamAlignmentMerger.java
@@ -96,8 +96,7 @@ public class SamAlignmentMerger extends AbstractAlignmentMerger {
             for (final File f : alignedSamFile) {
                 IoUtil.assertFileIsReadable(f);
             }
-        }
-        else {
+        } else {
             for (final File f : read1AlignedSamFile) {
                 IoUtil.assertFileIsReadable(f);
             }
diff --git a/src/java/net/sf/picard/sam/SamFileValidator.java b/src/java/net/sf/picard/sam/SamFileValidator.java
index 7a269f5..77f9a90 100644
--- a/src/java/net/sf/picard/sam/SamFileValidator.java
+++ b/src/java/net/sf/picard/sam/SamFileValidator.java
@@ -426,7 +426,7 @@ public class SamFileValidator {
     }
 
     private void validateMateFields(final SAMRecord record, final long recordNumber) {
-        if (!record.getReadPairedFlag() || record.getNotPrimaryAlignmentFlag()) {
+        if (!record.getReadPairedFlag() || record.isSecondaryOrSupplementary()) {
             return;
         }
 
diff --git a/src/java/net/sf/picard/sam/SamToFastq.java b/src/java/net/sf/picard/sam/SamToFastq.java
index 518b198..7e62978 100755
--- a/src/java/net/sf/picard/sam/SamToFastq.java
+++ b/src/java/net/sf/picard/sam/SamToFastq.java
@@ -130,7 +130,7 @@ public class SamToFastq extends CommandLineProgram {
 
         final ProgressLogger progress = new ProgressLogger(log);
         for (final SAMRecord currentRecord : reader) {
-            if (currentRecord.getNotPrimaryAlignmentFlag() && !INCLUDE_NON_PRIMARY_ALIGNMENTS)
+            if (currentRecord.isSecondaryOrSupplementary() && !INCLUDE_NON_PRIMARY_ALIGNMENTS)
                 continue;
 
             // Skip non-PF reads as necessary
diff --git a/src/java/net/sf/picard/util/AbstractInputParser.java b/src/java/net/sf/picard/util/AbstractInputParser.java
index 2b29037..c861c48 100644
--- a/src/java/net/sf/picard/util/AbstractInputParser.java
+++ b/src/java/net/sf/picard/util/AbstractInputParser.java
@@ -27,6 +27,7 @@ import net.sf.picard.PicardException;
 
 import java.util.Iterator;
 
+import net.sf.samtools.util.AbstractIterator;
 import net.sf.samtools.util.CloseableIterator;
 
 /**
diff --git a/src/java/net/sf/picard/util/CsvInputParser.java b/src/java/net/sf/picard/util/CsvInputParser.java
new file mode 100644
index 0000000..58e6a2c
--- /dev/null
+++ b/src/java/net/sf/picard/util/CsvInputParser.java
@@ -0,0 +1,36 @@
+package net.sf.picard.util;
+
+import java.io.File;
+import java.io.InputStream;
+
+public class CsvInputParser extends BasicInputParser {
+    /**
+     * Constructor
+     *
+     * @param stream  The input stream(s) to parse
+     */
+    public CsvInputParser(final boolean treatGroupedDelimitersAsOne, final InputStream... stream) {
+        super(treatGroupedDelimitersAsOne, stream);
+    }
+
+    /**
+     * Constructor
+     *
+     * @param file  The file(s) to parse
+     */
+    public CsvInputParser(final boolean treatGroupedDelimitersAsOne, final File... file) {
+        super(treatGroupedDelimitersAsOne, file);
+    }
+
+    /**
+     * Determines whether a given character is a delimiter
+     *
+     * @param b the character to evaluate
+     * @return  true if <code>b</code> is a delimiter; otherwise false
+     */
+    @Override
+    protected boolean isDelimiter(final byte b) {
+        return b == ',';
+    }
+
+}
diff --git a/src/java/net/sf/picard/util/DelimitedTextFileWithHeaderIterator.java b/src/java/net/sf/picard/util/DelimitedTextFileWithHeaderIterator.java
new file mode 100644
index 0000000..3681440
--- /dev/null
+++ b/src/java/net/sf/picard/util/DelimitedTextFileWithHeaderIterator.java
@@ -0,0 +1,111 @@
+package net.sf.picard.util;
+
+import net.sf.picard.PicardException;
+import net.sf.samtools.util.CloseableIterator;
+
+import java.util.*;
+
+/**
+ * Iterate through a delimited text file in which columns are found by looking at a header line rather than by position.
+ *
+ * TODO: This effectively replaces TabbedTextFileWithHeaderParser although the latter hasn't been modified to use this
+ * code instead.
+ *
+ * @author jgentry at broadinstitute.org
+ */
+public class DelimitedTextFileWithHeaderIterator implements CloseableIterator<DelimitedTextFileWithHeaderIterator.Row> {
+    public class Row {
+        private final String[] fields;
+        private final String currentLine;
+
+        Row(final String[] fields, final String source) {
+            this.fields = fields;
+            this.currentLine = source;
+        }
+
+        /**
+         * @return Array of fields in the order they appear in the file.
+         */
+        public String[] getFields() {
+            return fields;
+        }
+
+        public String getField(final String columnLabel) {
+            final Integer key = columnLabelIndices.get(columnLabel);
+            if (key == null) throw new NoSuchElementException(String.format("column %s in %s", columnLabel, parser.getFileName()));
+            return fields[key];
+        }
+
+        public Integer getIntegerField(final String columnLabel) {
+            if (fields[columnLabelIndices.get(columnLabel)] == null)  return null;
+            return Integer.parseInt(fields[columnLabelIndices.get(columnLabel)]);
+        }
+
+        public String getCurrentLine() {
+            return this.currentLine;
+        }
+    }
+
+    /**
+     * Map from column label to positional index.
+     */
+    private final Map<String, Integer> columnLabelIndices = new HashMap<String, Integer>();
+    private final BasicInputParser parser;
+
+    public DelimitedTextFileWithHeaderIterator(final BasicInputParser parser) {
+        this.parser = parser;
+        if (!parser.hasNext()) {
+            throw new PicardException("No header line found in file " + parser.getFileName());
+        }
+        final String[] columnLabels = parser.next();
+        for (int i = 0; i < columnLabels.length; ++i) {
+            columnLabelIndices.put(columnLabels[i], i);
+        }
+    }
+
+    /**
+     * @param columnLabel
+     * @return True if the given column label appears in the header.
+     */
+    public boolean hasColumn(final String columnLabel) {
+        return columnLabelIndices.containsKey(columnLabel);
+    }
+
+    /**
+     *
+     * @return The set of column labels for this file in no particular order.
+     */
+    public Set<String> columnLabels() {
+        return columnLabelIndices.keySet();
+    }
+
+    public int getCurrentLineNumber() {
+        return parser.getCurrentLineNumber();
+    }
+
+    public Set<String> getColumnNames() {
+        return Collections.unmodifiableSet(this.columnLabelIndices.keySet());
+    }
+
+    @Override
+    public boolean hasNext() {
+        return parser.hasNext();
+    }
+
+    @Override
+    public Row next() {
+        final String[] fields = parser.next();
+        final String source = parser.getCurrentLine();
+        return new Row(fields, source);
+    }
+
+    @Override
+    public void remove() {
+        throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public void close() {
+        parser.close();
+    }
+}
diff --git a/src/java/net/sf/picard/util/IntervalTreeMap.java b/src/java/net/sf/picard/util/IntervalTreeMap.java
index f79d246..2210874 100644
--- a/src/java/net/sf/picard/util/IntervalTreeMap.java
+++ b/src/java/net/sf/picard/util/IntervalTreeMap.java
@@ -239,10 +239,11 @@ public class IntervalTreeMap<T>
                 throw new NoSuchElementException("Iterator exhausted");
             }
             final IntervalTree.Node<T> node = mTreeIterator.next();
+            final String sequence = mSequence;
             if (!mTreeIterator.hasNext()) {
                 advanceSequence();
             }
-            final Interval key = new Interval(mSequence, node.getStart(), node.getEnd());
+            final Interval key = new Interval(sequence, node.getStart(), node.getEnd());
             final T value = node.getValue();
             return new MapEntry(key, value);
         }
diff --git a/src/java/net/sf/picard/util/IterableAdapter.java b/src/java/net/sf/picard/util/IterableAdapter.java
new file mode 100644
index 0000000..aeb9a47
--- /dev/null
+++ b/src/java/net/sf/picard/util/IterableAdapter.java
@@ -0,0 +1,29 @@
+package net.sf.picard.util;
+
+import java.util.ConcurrentModificationException;
+import java.util.Iterator;
+
+/**
+ * Provides an adapter to wrap an Iterator with an Iterable, allowing it to be run through a foreach loop. Will only
+ * allow iterator() to be called a single time - this is intended to be called inline.
+ *
+ * @author jgentry at broadinstitute.org
+ */
+public class IterableAdapter<T> implements Iterable<T> {
+    private boolean isIteratorCalled = false;
+    private final Iterator<T> theIterator;
+
+    public IterableAdapter(final Iterator<T> theIterator) {
+        this.theIterator = theIterator;
+    }
+
+    @Override
+    public Iterator<T> iterator() {
+        if (isIteratorCalled) {
+            throw new ConcurrentModificationException("iterator() can only be called once!");
+        }
+
+        isIteratorCalled = true;
+        return theIterator;
+    }
+}
diff --git a/src/java/net/sf/picard/util/IterableOnceIterator.java b/src/java/net/sf/picard/util/IterableOnceIterator.java
new file mode 100644
index 0000000..c4b651d
--- /dev/null
+++ b/src/java/net/sf/picard/util/IterableOnceIterator.java
@@ -0,0 +1,42 @@
+package net.sf.picard.util;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.util.Iterator;
+
+/**
+ * Abstract implementation of an iterator that also implements Iterable (to return itself)
+ * so that it can be used if for() loops.  Only supports calling iterator() once since new
+ * iterators are not manufactured but the same object returned.
+ *
+ * @author Tim Fennell
+ */
+public abstract class IterableOnceIterator<T> implements Iterable<T>, Iterator<T>, Closeable {
+    private boolean iterated = false;
+
+    /**
+     * On the first call returns this object which is also an iterator.  On subsequent calls throws
+     * an exception since new iterators cannot be generated.
+     */
+    @Override
+    public Iterator<T> iterator() {
+        if (iterated) {
+            throw new IllegalStateException("May not call iterator() more than once on IterableOnceIterator.");
+        }
+        else {
+            iterated = true;
+            return this;
+        }
+    }
+
+    /** Operation not supported. */
+    @Override
+    public void remove() {
+        throw new UnsupportedOperationException("remove() not supported");
+    }
+
+    /** Does nothing, intended to be overridden when needed. */
+    @Override public void close() throws IOException {
+        // Default do nothing implementation
+    }
+}
diff --git a/src/java/net/sf/picard/util/MathUtil.java b/src/java/net/sf/picard/util/MathUtil.java
index d538920..77edaaa 100644
--- a/src/java/net/sf/picard/util/MathUtil.java
+++ b/src/java/net/sf/picard/util/MathUtil.java
@@ -34,6 +34,9 @@ import static java.lang.Math.pow;
  * @author Tim Fennell
  */
 public class MathUtil {
+    /** The double value closest to 1 while still being less than 1. */
+    public static final double MAX_PROB_BELOW_ONE = 0.9999999999999999d;
+
     /** Calculated the mean of an array of doubles. */
     public static double mean(final double[] in, final int start, final int stop) {
         double total = 0;
@@ -115,7 +118,8 @@ public class MathUtil {
 
     /**
      * Takes a complete set of mutually exclusive log likelihoods and converts them to probabilities
-     * that sum to 1 with as much fidelity as possible.
+     * that sum to 1 with as much fidelity as possible.  Limits probabilities to be in the space:
+     * 0.9999999999999999 >= p >= (1-0.9999999999999999)/(likelihoods.length-1)
      */
     public static double[] logLikelihoodsToProbs(final double[] likelihoods) {
         // Note: bumping all the LLs so that the biggest is 300 ensures that we have the
@@ -132,8 +136,13 @@ public class MathUtil {
             total += tmp[i];
         }
 
+        final double maxP = MAX_PROB_BELOW_ONE;
+        final double minP = (1-MAX_PROB_BELOW_ONE) / (tmp.length-1);
+
         for (int i=0; i<likelihoods.length; ++i) {
             tmp[i] /= total;
+            if      (tmp[i] > maxP) tmp[i] = maxP;
+            else if (tmp[i] < minP) tmp[i] = minP;
         }
 
         return tmp;
diff --git a/src/java/net/sf/picard/util/ProcessExecutor.java b/src/java/net/sf/picard/util/ProcessExecutor.java
index cc7d0c7..369353c 100644
--- a/src/java/net/sf/picard/util/ProcessExecutor.java
+++ b/src/java/net/sf/picard/util/ProcessExecutor.java
@@ -25,6 +25,7 @@
 package net.sf.picard.util;
 
 import net.sf.picard.PicardException;
+import net.sf.samtools.util.StringUtil;
 
 import java.io.*;
 import java.util.concurrent.*;
@@ -120,6 +121,65 @@ public class ProcessExecutor {
 
     }
 
+    public static class ExitStatusAndOutput {
+        public final int exitStatus;
+        public final String stdout;
+        /** May be null if interleaved */
+        public final String stderr;
+
+        public ExitStatusAndOutput(int exitStatus, String stdout, String stderr) {
+            this.exitStatus = exitStatus;
+            this.stdout = stdout;
+            this.stderr = stderr;
+        }
+    }
+
+    /**
+     * Execute the command and capture stdout and stderr.
+     * @return Exit status of command, and both stderr and stdout interleaved into stdout attribute.
+     */
+    public static ExitStatusAndOutput executeAndReturnInterleavedOutput(final String command) {
+        try {
+            final Process process = Runtime.getRuntime().exec(command);
+            return interleaveProcessOutput(process);
+
+        } catch (Throwable t) {
+            throw new PicardException("Unexpected exception executing [" + command + "]", t);
+        }
+    }
+
+    /**
+     * Execute the command and capture stdout and stderr.
+     * @return Exit status of command, and both stderr and stdout interleaved into stdout attribute.
+     */
+    public static ExitStatusAndOutput executeAndReturnInterleavedOutput(final String[] commandArray) {
+        try {
+            final Process process = Runtime.getRuntime().exec(commandArray);
+            return interleaveProcessOutput(process);
+
+        } catch (Throwable t) {
+            throw new PicardException("Unexpected exception executing [" + StringUtil.join(" ", commandArray) + "]", t);
+        }
+    }
+
+    private static ExitStatusAndOutput interleaveProcessOutput(final Process process) throws InterruptedException, IOException {
+        final BufferedReader stdoutReader = new BufferedReader(new InputStreamReader(process.getInputStream()));
+        final BufferedReader stderrReader = new BufferedReader(new InputStreamReader(process.getErrorStream()));
+        final StringBuilder sb = new StringBuilder();
+
+        String stdoutLine = null;
+        String stderrLine = null;
+        while ((stderrLine = stderrReader.readLine()) != null ||
+                (stdoutLine = stdoutReader.readLine()) != null) {
+            if (stderrLine!= null) sb.append(stderrLine).append('\n');
+            if (stdoutLine!= null) sb.append(stdoutLine).append('\n');
+            stderrLine = null;
+            stdoutLine = null;
+        }
+        return new ExitStatusAndOutput(process.waitFor(), sb.toString(), null);
+
+    }
+
     private static int readStreamsAndWaitFor(final Process process)
             throws InterruptedException, ExecutionException {
         final Future<?> stderrReader = executorService.submit(new LogErrorProcessOutputReader(process.getErrorStream()));
diff --git a/src/java/net/sf/picard/util/RExecutor.java b/src/java/net/sf/picard/util/RExecutor.java
index 00bd7f8..b41486e 100644
--- a/src/java/net/sf/picard/util/RExecutor.java
+++ b/src/java/net/sf/picard/util/RExecutor.java
@@ -32,6 +32,7 @@ import java.util.Arrays;
 
 import net.sf.picard.PicardException;
 import net.sf.picard.io.IoUtil;
+import net.sf.samtools.util.CollectionUtil;
 
 /**
  * Util class for executing R scripts.
diff --git a/src/java/net/sf/picard/util/SamLocusIterator.java b/src/java/net/sf/picard/util/SamLocusIterator.java
index b998709..9dd8a54 100644
--- a/src/java/net/sf/picard/util/SamLocusIterator.java
+++ b/src/java/net/sf/picard/util/SamLocusIterator.java
@@ -118,7 +118,7 @@ public class SamLocusIterator implements Iterable<SamLocusIterator.LocusInfo>, C
     private final SAMFileReader samReader;
     private final ReferenceSequenceMask referenceSequenceMask;
     private PeekableIterator<SAMRecord> samIterator;
-    private List<SamRecordFilter> samFilters = Arrays.asList(new NotPrimaryAlignmentFilter(),
+    private List<SamRecordFilter> samFilters = Arrays.asList(new SecondaryOrSupplementaryFilter(),
                                                              new DuplicateReadFilter());
     private final List<Interval> intervals;
     private final boolean useIndex;
diff --git a/src/java/net/sf/picard/util/TabbedInputParser.java b/src/java/net/sf/picard/util/TabbedInputParser.java
index ef32a36..4abbae4 100644
--- a/src/java/net/sf/picard/util/TabbedInputParser.java
+++ b/src/java/net/sf/picard/util/TabbedInputParser.java
@@ -57,7 +57,8 @@ public class TabbedInputParser extends BasicInputParser {
      * @param b the character to evaluate
      * @return  true if <code>b</code> is a delimiter; otherwise false
      */
-    protected boolean isDelimiter(byte b) {
+    @Override
+    protected boolean isDelimiter(final byte b) {
         return b == '\t';
     }
 }
diff --git a/src/java/net/sf/picard/util/TabbedTextFileWithHeaderParser.java b/src/java/net/sf/picard/util/TabbedTextFileWithHeaderParser.java
index b5df861..fe5ce2a 100644
--- a/src/java/net/sf/picard/util/TabbedTextFileWithHeaderParser.java
+++ b/src/java/net/sf/picard/util/TabbedTextFileWithHeaderParser.java
@@ -66,7 +66,6 @@ public class TabbedTextFileWithHeaderParser implements Iterable<TabbedTextFileWi
         public String getCurrentLine() {
             return this.currentLine;
         }
-
     }
 
     class TheIterator implements CloseableIterator<Row> {
diff --git a/src/java/net/sf/picard/vcf/BcfIterator.java b/src/java/net/sf/picard/vcf/BcfIterator.java
deleted file mode 100644
index 25e7e80..0000000
--- a/src/java/net/sf/picard/vcf/BcfIterator.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
-* Copyright (c) 2013 The Broad Institute
-*
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-*
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-*
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package net.sf.picard.vcf;
-
-import net.sf.picard.PicardException;
-import net.sf.samtools.util.CloserUtil;
-import org.broad.tribble.FeatureCodecHeader;
-import org.broad.tribble.readers.PositionalBufferedStream;
-import org.broadinstitute.variant.bcf2.BCF2Codec;
-import org.broadinstitute.variant.variantcontext.VariantContext;
-import org.broadinstitute.variant.vcf.VCFHeader;
-
-import java.io.*;
-import java.util.NoSuchElementException;
-
-public class BcfIterator implements VariantContextIterator {
-    private final BCF2Codec bcfCodec = new BCF2Codec();
-    private final PositionalBufferedStream inputStream;
-    private final FeatureCodecHeader codecHeader;
-
-    public BcfIterator(final InputStream bcfStream) {
-        inputStream = new PositionalBufferedStream(bcfStream);
-        codecHeader = bcfCodec.readHeader(inputStream);
-    }
-
-    @Override
-    public void close() {
-        CloserUtil.close(inputStream);
-    }
-
-    @Override
-    public boolean hasNext() {
-        final boolean isDone;
-        try {
-            isDone = inputStream.isDone();
-        } catch (IOException ioe) {
-            throw new PicardException("Unable to determine if BcfIterator is exhausted", ioe);
-        }
-        return !isDone;
-    }
-
-    @Override
-    public VariantContext next() {
-        if (!this.hasNext()) {
-            throw new NoSuchElementException("Called next() on an exhausted BcfIterator");
-        }
-        return bcfCodec.decode(inputStream);
-    }
-
-    public VCFHeader getHeader() {
-        return (VCFHeader)codecHeader.getHeaderValue();
-    }
-
-    /**
-     * Unsupported.
-     */
-    @Override
-    public void remove() {
-        throw new UnsupportedOperationException();
-    }
-}
diff --git a/src/java/net/sf/picard/vcf/MakeSitesOnlyVcf.java b/src/java/net/sf/picard/vcf/MakeSitesOnlyVcf.java
new file mode 100644
index 0000000..cc708ee
--- /dev/null
+++ b/src/java/net/sf/picard/vcf/MakeSitesOnlyVcf.java
@@ -0,0 +1,90 @@
+package net.sf.picard.vcf;
+
+import net.sf.picard.PicardException;
+import net.sf.picard.cmdline.CommandLineProgram;
+import net.sf.picard.cmdline.Option;
+import net.sf.picard.cmdline.StandardOptionDefinitions;
+import net.sf.picard.io.IoUtil;
+import net.sf.picard.util.Log;
+import net.sf.picard.util.ProgressLogger;
+import net.sf.samtools.SAMFileReader;
+import net.sf.samtools.SAMSequenceDictionary;
+import net.sf.samtools.util.CloseableIterator;
+import net.sf.samtools.util.CloserUtil;
+import org.broadinstitute.variant.variantcontext.VariantContext;
+import org.broadinstitute.variant.variantcontext.writer.Options;
+import org.broadinstitute.variant.variantcontext.writer.VariantContextWriter;
+import org.broadinstitute.variant.variantcontext.writer.VariantContextWriterFactory;
+import org.broadinstitute.variant.vcf.VCFFileReader;
+import org.broadinstitute.variant.vcf.VCFHeader;
+
+import java.io.File;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.Set;
+
+/**
+ * Writes out a VCF that contains all the site-level information for all records in the input VCF and no per-sample information.
+ *
+ * @author Tim Fennell
+ */
+public class MakeSitesOnlyVcf extends CommandLineProgram {
+    @Option(shortName= StandardOptionDefinitions.INPUT_SHORT_NAME, doc="Input VCF or BCF")
+    public File INPUT;
+
+    @Option(shortName=StandardOptionDefinitions.OUTPUT_SHORT_NAME, doc="Output VCF or BCF to emit without per-sample info.")
+    public File OUTPUT;
+
+    @Option(shortName=StandardOptionDefinitions.SEQUENCE_DICTIONARY_SHORT_NAME, doc="Sequence dictionary to use when indexing the VCF.", optional = true)
+    public File SEQUENCE_DICTIONARY;
+
+    private static final Set<String> NO_SAMPLES = Collections.emptySet();
+    
+    // Stock main method
+    public static void main(final String[] args) {
+        new MakeSitesOnlyVcf().instanceMainWithExit(args);
+    }
+
+	public MakeSitesOnlyVcf() {
+		CREATE_INDEX = true;
+	}
+
+    @Override
+    protected int doWork() {
+        IoUtil.assertFileIsReadable(INPUT);
+        if (SEQUENCE_DICTIONARY != null) IoUtil.assertFileIsReadable(SEQUENCE_DICTIONARY);
+        IoUtil.assertFileIsWritable(OUTPUT);
+
+	    final VCFFileReader reader = new VCFFileReader(INPUT);
+	    final VCFHeader header = new VCFHeader(reader.getFileHeader());
+	    final SAMSequenceDictionary sequenceDictionary =
+			    SEQUENCE_DICTIONARY != null
+			            ? SAMFileReader.getSequenceDictionary(SEQUENCE_DICTIONARY)
+					    : header.getSequenceDictionary();
+	    if (CREATE_INDEX && sequenceDictionary == null) {
+		    throw new PicardException("A sequence dictionary must be available (either through the input file or by setting it explicitly) when creating indexed output.");
+	    }
+	    final EnumSet<Options> options = CREATE_INDEX ? EnumSet.of(Options.INDEX_ON_THE_FLY) : EnumSet.noneOf(Options.class);
+	    final VariantContextWriter writer = VariantContextWriterFactory.create(OUTPUT, sequenceDictionary, options);
+
+	    writer.writeHeader(header);
+
+        final ProgressLogger progress = new ProgressLogger(Log.getInstance(MakeSitesOnlyVcf.class), 10000);
+
+	    final CloseableIterator<VariantContext> iterator = reader.iterator();
+	    while (iterator.hasNext()) {
+		    final VariantContext context = iterator.next();
+		    writer.add(context.subContextFromSamples(
+                    NO_SAMPLES, 
+                    false // Do not re-derive the alleles from the new, subsetted genotypes: our site-only VCF should retain these values.
+            ));
+            progress.record(context.getChr(), context.getStart());
+        }
+
+	    CloserUtil.close(iterator);
+	    CloserUtil.close(reader);
+	    writer.close();
+
+        return 0;
+    }
+}
diff --git a/src/java/net/sf/picard/vcf/MergeVcfs.java b/src/java/net/sf/picard/vcf/MergeVcfs.java
index 33e6e98..d9a42c4 100644
--- a/src/java/net/sf/picard/vcf/MergeVcfs.java
+++ b/src/java/net/sf/picard/vcf/MergeVcfs.java
@@ -23,6 +23,7 @@
  */
 package net.sf.picard.vcf;
 
+import net.sf.picard.PicardException;
 import net.sf.picard.cmdline.CommandLineParser;
 import net.sf.picard.cmdline.CommandLineProgram;
 import net.sf.picard.cmdline.Option;
@@ -32,11 +33,16 @@ import net.sf.picard.io.IoUtil;
 import net.sf.picard.util.Log;
 import net.sf.picard.util.MergingIterator;
 import net.sf.picard.util.ProgressLogger;
+import net.sf.samtools.SAMFileReader;
 import net.sf.samtools.SAMSequenceDictionary;
 import net.sf.samtools.util.CloseableIterator;
+import net.sf.samtools.util.CloserUtil;
 import org.broadinstitute.variant.variantcontext.VariantContext;
+import org.broadinstitute.variant.variantcontext.VariantContextComparator;
 import org.broadinstitute.variant.variantcontext.writer.Options;
 import org.broadinstitute.variant.variantcontext.writer.VariantContextWriter;
+import org.broadinstitute.variant.variantcontext.writer.VariantContextWriterFactory;
+import org.broadinstitute.variant.vcf.VCFFileReader;
 import org.broadinstitute.variant.vcf.VCFHeader;
 import org.broadinstitute.variant.vcf.VCFUtils;
 
@@ -66,13 +72,13 @@ public class MergeVcfs extends CommandLineProgram {
 			"and, within contigs, by start position. The input files must have the same sample and " +
 			"contig lists. An index file is created and a sequence dictionary is required by default.";
 
-	@Option(shortName= StandardOptionDefinitions.INPUT_SHORT_NAME, doc="VCF or BCF input files", minElements=1)
+	@Option(shortName= StandardOptionDefinitions.INPUT_SHORT_NAME, doc="VCF or BCF input files File format is determined by file extension.", minElements=1)
 	public List<File> INPUT;
 
-	@Option(shortName=StandardOptionDefinitions.OUTPUT_SHORT_NAME, doc="The merged VCF file")
+	@Option(shortName=StandardOptionDefinitions.OUTPUT_SHORT_NAME, doc="The merged VCF or BCF file. File format is determined by file extension.")
 	public File OUTPUT;
 
-	@Option(shortName="D", doc="The index sequence dictionary (required if CREATE_INDEX=true)", optional = true)
+	@Option(shortName="D", doc="The index sequence dictionary to use instead of the sequence dictionary in the input file", optional = true)
 	public File SEQUENCE_DICTIONARY;
 
 	private final Log log = Log.getInstance(MergeVcfs.class);
@@ -93,54 +99,55 @@ public class MergeVcfs extends CommandLineProgram {
 		final Collection<VCFHeader> headers = new HashSet<VCFHeader>(INPUT.size());
 
 		VariantContextComparator variantContextComparator = null;
+		SAMSequenceDictionary sequenceDictionary = null;
+
+		if (SEQUENCE_DICTIONARY != null) sequenceDictionary = SAMFileReader.getSequenceDictionary(SEQUENCE_DICTIONARY);
 
 		for (final File file : INPUT) {
 			IoUtil.assertFileIsReadable(file);
-			final VariantContextIterator variantIterator = VariantContextIteratorFactory.create(file);
-			final VCFHeader header = variantIterator.getHeader();
+			final VCFFileReader fileReader = new VCFFileReader(file);
+			final VCFHeader fileHeader = fileReader.getFileHeader();
+
 			if (variantContextComparator == null) {
-				variantContextComparator = new VariantContextComparator(header.getContigLines());
+				variantContextComparator = fileHeader.getVCFRecordComparator();
 			} else {
-				if ( ! variantContextComparator.isCompatible(header.getContigLines())) {
+				if ( ! variantContextComparator.isCompatible(fileHeader.getContigLines())) {
 					throw new IllegalArgumentException(
 							"The contig entries in input file " + file.getAbsolutePath() + " are not compatible with the others.");
 				}
 			}
 
+			if (sequenceDictionary == null) sequenceDictionary = fileHeader.getSequenceDictionary();
+
 			if (sampleList.isEmpty()) {
-				sampleList.addAll(header.getSampleNamesInOrder());
+				sampleList.addAll(fileHeader.getSampleNamesInOrder());
 			} else {
-				if ( ! sampleList.equals(header.getSampleNamesInOrder())) {
+				if ( ! sampleList.equals(fileHeader.getSampleNamesInOrder())) {
 					throw new IllegalArgumentException("Input file " + file.getAbsolutePath() + " has sample entries that don't match the other files.");
 				}
 			}
 
-			headers.add(header);
-			iteratorCollection.add(variantIterator);
+			headers.add(fileHeader);
+			iteratorCollection.add(fileReader.iterator());
 		}
 
+		if (CREATE_INDEX && sequenceDictionary == null) {
+			throw new PicardException("A sequence dictionary must be available (either through the input file or by setting it explicitly) when creating indexed output.");
+		}
 		final EnumSet<Options> options = CREATE_INDEX ? EnumSet.of(Options.INDEX_ON_THE_FLY) : EnumSet.noneOf(Options.class);
-		final SAMSequenceDictionary sequenceDictionary =
-				SEQUENCE_DICTIONARY != null ? VariantContextUtils.getSequenceDictionary(SEQUENCE_DICTIONARY) : null;
-		final VariantContextWriter out = VariantContextUtils.getConditionallyCompressingWriter(OUTPUT, sequenceDictionary, options);
+		final VariantContextWriter writer = VariantContextWriterFactory.create(OUTPUT, sequenceDictionary, options);
 
-		out.writeHeader(new VCFHeader(VCFUtils.smartMergeHeaders(headers, false), sampleList));
+		writer.writeHeader(new VCFHeader(VCFUtils.smartMergeHeaders(headers, false), sampleList));
 
 		final MergingIterator<VariantContext> mergingIterator = new MergingIterator<VariantContext>(variantContextComparator, iteratorCollection);
 		while (mergingIterator.hasNext()) {
 			final VariantContext context = mergingIterator.next();
-			out.add(context);
+			writer.add(context);
 			progress.record(context.getChr(), context.getStart());
 		}
 
-		out.close();
+		CloserUtil.close(mergingIterator);
+		writer.close();
 		return 0;
 	}
-
-	protected String[] customCommandLineValidation() {
-		if (this.CREATE_INDEX && (this.SEQUENCE_DICTIONARY == null)) {
-			return new String[] { "If CREATE_INDEX is set a sequence dictionary must be specified." };
-		}
-		return null;
-	}
 }
diff --git a/src/java/net/sf/picard/vcf/SplitVcfs.java b/src/java/net/sf/picard/vcf/SplitVcfs.java
index 15523ea..1295bea 100644
--- a/src/java/net/sf/picard/vcf/SplitVcfs.java
+++ b/src/java/net/sf/picard/vcf/SplitVcfs.java
@@ -1,5 +1,6 @@
 package net.sf.picard.vcf;
 
+import net.sf.picard.PicardException;
 import net.sf.picard.cmdline.CommandLineParser;
 import net.sf.picard.cmdline.CommandLineProgram;
 import net.sf.picard.cmdline.Option;
@@ -8,10 +9,15 @@ import net.sf.picard.cmdline.Usage;
 import net.sf.picard.io.IoUtil;
 import net.sf.picard.util.Log;
 import net.sf.picard.util.ProgressLogger;
+import net.sf.samtools.SAMFileReader;
 import net.sf.samtools.SAMSequenceDictionary;
+import net.sf.samtools.util.CloseableIterator;
+import net.sf.samtools.util.CloserUtil;
 import org.broadinstitute.variant.variantcontext.VariantContext;
 import org.broadinstitute.variant.variantcontext.writer.Options;
 import org.broadinstitute.variant.variantcontext.writer.VariantContextWriter;
+import org.broadinstitute.variant.variantcontext.writer.VariantContextWriterFactory;
+import org.broadinstitute.variant.vcf.VCFFileReader;
 import org.broadinstitute.variant.vcf.VCFHeader;
 
 import java.io.File;
@@ -36,13 +42,13 @@ public class SplitVcfs extends CommandLineProgram {
 	@Option(shortName = StandardOptionDefinitions.INPUT_SHORT_NAME, doc="The VCF or BCF input file")
 	public File INPUT;
 
-	@Option(doc="The VCF file to which SNP records should be written")
+	@Option(doc="The VCF or BCF file to which SNP records should be written. The file format is determined by file extension.")
 	public File SNP_OUTPUT;
 
-	@Option(doc="The VCF file to which indel records should be written")
+	@Option(doc="The VCF or BCF file to which indel records should be written. The file format is determined by file extension.")
 	public File INDEL_OUTPUT;
 
-	@Option(shortName="D", doc="The index sequence dictionary (required if CREATE_INDEX=true)", optional = true)
+	@Option(shortName="D", doc="The index sequence dictionary to use instead of the sequence dictionaries in the input files", optional = true)
 	public File SEQUENCE_DICTIONARY;
 
     @Option(doc="If true an exception will be thrown if an event type other than SNP or indel is encountered")
@@ -62,25 +68,32 @@ public class SplitVcfs extends CommandLineProgram {
 	protected int doWork() {
 		IoUtil.assertFileIsReadable(INPUT);
 		final ProgressLogger progress = new ProgressLogger(log, 10000);
-		final VariantContextIterator variantIterator = VariantContextIteratorFactory.create(INPUT);
-		final VCFHeader header = variantIterator.getHeader();
 
-		final EnumSet<Options> options = CREATE_INDEX ? EnumSet.of(Options.INDEX_ON_THE_FLY) : EnumSet.noneOf(Options.class);
+		final VCFFileReader fileReader = new VCFFileReader(INPUT);
+		final VCFHeader fileHeader = fileReader.getFileHeader();
+
 		final SAMSequenceDictionary sequenceDictionary =
-				SEQUENCE_DICTIONARY != null ? VariantContextUtils.getSequenceDictionary(SEQUENCE_DICTIONARY) : null;
+				SEQUENCE_DICTIONARY != null
+						? SAMFileReader.getSequenceDictionary(SEQUENCE_DICTIONARY)
+						: fileHeader.getSequenceDictionary();
+		if (CREATE_INDEX && sequenceDictionary == null) {
+			throw new PicardException("A sequence dictionary must be available (either through the input file or by setting it explicitly) when creating indexed output.");
+		}
 
-		final VariantContextWriter snpOutput = VariantContextUtils.getConditionallyCompressingWriter(SNP_OUTPUT, sequenceDictionary, options);
-		final VariantContextWriter indelOutput = VariantContextUtils.getConditionallyCompressingWriter(INDEL_OUTPUT, sequenceDictionary, options);
-		snpOutput.writeHeader(header);
-		indelOutput.writeHeader(header);
+		final EnumSet<Options> options = CREATE_INDEX ? EnumSet.of(Options.INDEX_ON_THE_FLY) : EnumSet.noneOf(Options.class);
 
-        int incorrectVariantCount = 0;
+		final VariantContextWriter snpWriter = VariantContextWriterFactory.create(SNP_OUTPUT, sequenceDictionary, options);
+		final VariantContextWriter indelWriter = VariantContextWriterFactory.create(INDEL_OUTPUT, sequenceDictionary, options);
+		snpWriter.writeHeader(fileHeader);
+		indelWriter.writeHeader(fileHeader);
 
-		while (variantIterator.hasNext()) {
-			final VariantContext context = variantIterator.next();
+        int incorrectVariantCount = 0;
 
-			if (context.isIndel()) indelOutput.add(context);
-			else if (context.isSNP()) snpOutput.add(context);
+		final CloseableIterator<VariantContext> iterator = fileReader.iterator();
+		while (iterator.hasNext()) {
+			final VariantContext context = iterator.next();
+			if (context.isIndel()) indelWriter.add(context);
+			else if (context.isSNP()) snpWriter.add(context);
 			else {
                 if (STRICT) throw new IllegalStateException("Found a record with type " + context.getType().name());
                 else incorrectVariantCount++;
@@ -88,20 +101,16 @@ public class SplitVcfs extends CommandLineProgram {
 
             progress.record(context.getChr(), context.getStart());
 		}
+
         if (incorrectVariantCount > 0) {
             log.debug("Found " + incorrectVariantCount + " records that didn't match SNP or INDEL");
         }
 
-		snpOutput.close();
-		indelOutput.close();
+		CloserUtil.close(iterator);
+		CloserUtil.close(fileReader);
+		snpWriter.close();
+		indelWriter.close();
 
 		return 0;
 	}
-
-	protected String[] customCommandLineValidation() {
-		if (this.CREATE_INDEX && (this.SEQUENCE_DICTIONARY == null)) {
-			return new String[] { "If CREATE_INDEX is set a sequence dictionary must be specified." };
-		}
-		return null;
-	}
 }
diff --git a/src/java/net/sf/picard/vcf/VariantContextIterator.java b/src/java/net/sf/picard/vcf/VariantContextIterator.java
deleted file mode 100644
index 7225f94..0000000
--- a/src/java/net/sf/picard/vcf/VariantContextIterator.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
-* Copyright (c) 2013 The Broad Institute
-*
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-*
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-*
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package net.sf.picard.vcf;
-
-import net.sf.samtools.util.CloseableIterator;
-import org.broadinstitute.variant.variantcontext.VariantContext;
-import org.broadinstitute.variant.vcf.VCFHeader;
-
-public interface VariantContextIterator extends CloseableIterator<VariantContext> {
-    public VCFHeader getHeader();
-}
diff --git a/src/java/net/sf/picard/vcf/VariantContextIteratorFactory.java b/src/java/net/sf/picard/vcf/VariantContextIteratorFactory.java
deleted file mode 100644
index a74f5f4..0000000
--- a/src/java/net/sf/picard/vcf/VariantContextIteratorFactory.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
-* Copyright (c) 2013 The Broad Institute
-*
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-*
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-*
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package net.sf.picard.vcf;
-
-import net.sf.picard.io.IoUtil;
-
-import java.io.File;
-import java.io.InputStream;
-
-/**
- * Creates an iterator for a VCF/BCF based on the filename
- */
-public class VariantContextIteratorFactory {
-    private VariantContextIteratorFactory() {}
-
-    public static VariantContextIterator create(final File location) {
-        final InputStream inputStream = IoUtil.openFileForReading(location);
-        // TODO: Both this and VariantContextWriterFactory base this on filename, in the future we may want to change this
-        if (location.getName().toLowerCase().endsWith(".bcf")) {
-            return new BcfIterator(inputStream);
-        } else {
-            return new VcfIterator(inputStream);
-        }
-    }
-}
-
diff --git a/src/java/net/sf/picard/vcf/VariantContextUtils.java b/src/java/net/sf/picard/vcf/VariantContextUtils.java
deleted file mode 100644
index 6b506ba..0000000
--- a/src/java/net/sf/picard/vcf/VariantContextUtils.java
+++ /dev/null
@@ -1,64 +0,0 @@
-package net.sf.picard.vcf;
-
-import net.sf.picard.PicardException;
-import net.sf.samtools.Defaults;
-import net.sf.samtools.SAMFileReader;
-import net.sf.samtools.SAMSequenceDictionary;
-import org.broadinstitute.variant.variantcontext.writer.Options;
-import org.broadinstitute.variant.variantcontext.writer.VariantContextWriter;
-import org.broadinstitute.variant.variantcontext.writer.VariantContextWriterFactory;
-
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.OutputStream;
-import java.util.EnumSet;
-import java.util.zip.GZIPOutputStream;
-
-public final class VariantContextUtils {
-
-	/**
-	 * Create a VariantContextWriter for the given output file. If the output file has a .gz extension
-	 * a GZIPOutputStream is used to compress the data on the fly, otherwise an "ordinary", non-
-	 * compressing VariantContextWriter is returned. If compressed, the returned ...Writer will create
-	 * .gz a file that conforms to (and can be decompressed by) ordinary gzip. No sequence dictionaries
-	 * are used.
-	 *
-	 * Default compression level for compressed files is 5: it seems to be a good tradeoff between
-	 * compression ratio and time.
-	 */
-	public static VariantContextWriter getConditionallyCompressingWriter(final File output, final SAMSequenceDictionary indexSequenceDictionary, final EnumSet<Options> options) {
-		return output.getName().endsWith(".gz")
-				? getCompressingWriter(output, indexSequenceDictionary, options)
-				: VariantContextWriterFactory.create(output, indexSequenceDictionary, options);
-	}
-
-	/**
-	 * Create a compressing VariantContextWriter for the given File, even if the extension on the File
-	 * is not .gz. The returned ...Writer will create a file that conforms to (and can be decompressed by)
-	 * ordinary gzip. No sequence dictionaries are used.
-	 *
-	 * Default compression level for compressed files is 5: it seems to be a good tradeoff between
-	 * compression ratio and time.
-	 */
-	public static VariantContextWriter getCompressingWriter(final File output, final SAMSequenceDictionary indexSequenceDictionary, final EnumSet<Options> options) {
-		try {
-			final GZIPOutputStream gzipOutputStream = new GZIPOutputStream(new FileOutputStream(output)) {{
-				def.setLevel(Defaults.COMPRESSION_LEVEL);
-			}};
-			final OutputStream outputStream = new BufferedOutputStream(gzipOutputStream);
-			return VariantContextWriterFactory.create(output, outputStream, indexSequenceDictionary, options);
-
-		} catch (final Exception e) {
-			throw new PicardException("Could not create a compressed output stream for the VCF writer: " + e.getMessage(), e);
-		}
-	}
-
-	/**
-	 * Returns the SAMSequenceDictionary from the provided FASTA.
-	 */
-	public static SAMSequenceDictionary getSequenceDictionary(final File dictionaryFile) {
-		final SAMFileReader samFileReader = new SAMFileReader(dictionaryFile);
-		return samFileReader.getFileHeader().getSequenceDictionary();
-	}
-}
diff --git a/src/java/net/sf/picard/vcf/VcfFormatConverter.java b/src/java/net/sf/picard/vcf/VcfFormatConverter.java
index d847792..e2fa253 100644
--- a/src/java/net/sf/picard/vcf/VcfFormatConverter.java
+++ b/src/java/net/sf/picard/vcf/VcfFormatConverter.java
@@ -25,6 +25,7 @@
 
 package net.sf.picard.vcf;
 
+import net.sf.picard.PicardException;
 import net.sf.picard.cmdline.CommandLineProgram;
 import net.sf.picard.cmdline.Option;
 import net.sf.picard.cmdline.StandardOptionDefinitions;
@@ -32,14 +33,21 @@ import net.sf.picard.cmdline.Usage;
 import net.sf.picard.io.IoUtil;
 import net.sf.picard.util.Log;
 import net.sf.picard.util.ProgressLogger;
+import net.sf.samtools.SAMSequenceDictionary;
+import net.sf.samtools.util.CloseableIterator;
 import net.sf.samtools.util.CloserUtil;
 import org.broadinstitute.variant.variantcontext.VariantContext;
+import org.broadinstitute.variant.variantcontext.writer.Options;
 import org.broadinstitute.variant.variantcontext.writer.VariantContextWriter;
 import org.broadinstitute.variant.variantcontext.writer.VariantContextWriterFactory;
+import org.broadinstitute.variant.vcf.VCFFileReader;
+import org.broadinstitute.variant.vcf.VCFHeader;
 
 import java.io.File;
+import java.util.EnumSet;
 
-/** Converts an ASCII VCF file to a binary BCF or vice versa
+/**
+ * Converts an ASCII VCF file to a binary BCF or vice versa.
  *
  * @author jgentry at broadinstitute.org
  */
@@ -48,16 +56,27 @@ public class VcfFormatConverter extends CommandLineProgram {
     public static final Log LOG = Log.getInstance(VcfFormatConverter.class);
     
     @Usage
-    public String USAGE = getStandardUsagePreamble() + "Convert a VCF file to a BCF file, or BCF to VCF.\n" + "" +
+    public String USAGE = getStandardUsagePreamble() +
+		    "Convert a VCF file to a BCF file, or BCF to VCF.\n" + "" +
             "Input and output formats are determined by file extension.";
 
-    @Option(doc="The BCF or VCF file to parse.", shortName= StandardOptionDefinitions.INPUT_SHORT_NAME) public File INPUT;
-    @Option(doc="The BCF or VCF output file. ", shortName=StandardOptionDefinitions.OUTPUT_SHORT_NAME) public File OUTPUT;
+    @Option(doc="The BCF or VCF input file. The file format is determined by file extension.", shortName= StandardOptionDefinitions.INPUT_SHORT_NAME)
+    public File INPUT;
+
+    @Option(doc="The BCF or VCF output file. The file format is determined by file extension.", shortName=StandardOptionDefinitions.OUTPUT_SHORT_NAME)
+    public File OUTPUT;
+
+	@Option(doc="Fail if an index is not available for the input VCF/BCF")
+	public Boolean REQUIRE_INDEX = true;
 
     public static void main(final String[] argv) {
         new VcfFormatConverter().instanceMainWithExit(argv);
     }
 
+	public VcfFormatConverter() {
+		this.CREATE_INDEX = true;
+	}
+
     @Override
     protected int doWork() {
         final ProgressLogger progress = new ProgressLogger(LOG, 10000);
@@ -65,19 +84,28 @@ public class VcfFormatConverter extends CommandLineProgram {
         IoUtil.assertFileIsReadable(INPUT);
         IoUtil.assertFileIsWritable(OUTPUT);
 
-        final VariantContextIterator readerIterator = VariantContextIteratorFactory.create(INPUT);
-        final VariantContextWriter writer = VariantContextWriterFactory.create(OUTPUT, null);
+	    final VCFFileReader reader = new VCFFileReader(INPUT, REQUIRE_INDEX);
+	    final VCFHeader header = new VCFHeader(reader.getFileHeader());
+	    final SAMSequenceDictionary sequenceDictionary = header.getSequenceDictionary();
+	    if (CREATE_INDEX && sequenceDictionary == null) {
+		    throw new PicardException("A sequence dictionary must be available in the input file when creating indexed output.");
+	    }
+	    final EnumSet<Options> options = CREATE_INDEX ? EnumSet.of(Options.INDEX_ON_THE_FLY) : EnumSet.noneOf(Options.class);
+        final VariantContextWriter writer = VariantContextWriterFactory.create(OUTPUT, sequenceDictionary, options);
 
-        writer.writeHeader(readerIterator.getHeader());
+        writer.writeHeader(header);
 
-        while (readerIterator.hasNext()) {
-            final VariantContext v = readerIterator.next();
-            writer.add(v);
-            progress.record(v.getChr(), v.getStart());
+	    final CloseableIterator<VariantContext> iterator = reader.iterator();
+	    while (iterator.hasNext()) {
+		    final VariantContext context = iterator.next();
+            writer.add(context);
+            progress.record(context.getChr(), context.getStart());
         }
 
-        CloserUtil.close(readerIterator);
-        CloserUtil.close(writer);
+	    CloserUtil.close(iterator);
+	    CloserUtil.close(reader);
+        writer.close();
+
         return 0;
     }
 }
diff --git a/src/java/net/sf/picard/vcf/VcfIterator.java b/src/java/net/sf/picard/vcf/VcfIterator.java
deleted file mode 100644
index ab43b82..0000000
--- a/src/java/net/sf/picard/vcf/VcfIterator.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
-* Copyright (c) 2013 The Broad Institute
-*
-* Permission is hereby granted, free of charge, to any person
-* obtaining a copy of this software and associated documentation
-* files (the "Software"), to deal in the Software without
-* restriction, including without limitation the rights to use,
-* copy, modify, merge, publish, distribute, sublicense, and/or sell
-* copies of the Software, and to permit persons to whom the
-* Software is furnished to do so, subject to the following
-* conditions:
-*
-* The above copyright notice and this permission notice shall be
-* included in all copies or substantial portions of the Software.
-*
-* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-
-package net.sf.picard.vcf;
-
-import net.sf.samtools.util.CloserUtil;
-import net.sf.samtools.util.RuntimeIOException;
-import org.broad.tribble.readers.AsciiLineReader;
-import org.broadinstitute.variant.variantcontext.VariantContext;
-import org.broadinstitute.variant.vcf.VCFCodec;
-import org.broadinstitute.variant.vcf.VCFHeader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.NoSuchElementException;
-
-public class VcfIterator implements VariantContextIterator {
-    private final VCFCodec vcfCodec = new VCFCodec();
-    private final VCFHeader vcfHeader;
-    private final AsciiLineReader reader;
-
-    private String line = null;
-
-    public VcfIterator(final InputStream vcfStream) {
-        this.reader = new AsciiLineReader(vcfStream);
-        final Object header = vcfCodec.readHeader(reader);
-        if (!(header instanceof VCFHeader)) {
-            throw new IllegalArgumentException("No VCF header found");
-        }
-        this.vcfHeader = (VCFHeader) header;
-    }
-
-    // TODO: Add a c'tor that reads intervals.
-
-    @Override
-    public void close() {
-        CloserUtil.close(reader);
-    }
-
-    public VCFHeader getHeader() {
-        return this.vcfHeader;
-    }
-
-    @Override
-    public boolean hasNext() {
-        try {
-            if (line == null) line = reader.readLine();
-        } catch (IOException e) {
-            throw new RuntimeIOException(e);
-        }
-        return line != null;
-    }
-
-    @Override
-    public VariantContext next() {
-        if ( ! this.hasNext()) throw new NoSuchElementException("Called next() on an exhausted VcfIterator");
-        final String tmp = line;
-        line = null;
-        return vcfCodec.decode(tmp);
-    }
-
-    /**
-     * Unsupported.
-     */
-    @Override
-    public void remove() {
-        throw new UnsupportedOperationException();
-    }
-
-
-}
diff --git a/src/java/net/sf/samtools/SAMFileReader.java b/src/java/net/sf/samtools/SAMFileReader.java
index f7c0ba6..4a16b9a 100644
--- a/src/java/net/sf/samtools/SAMFileReader.java
+++ b/src/java/net/sf/samtools/SAMFileReader.java
@@ -57,6 +57,16 @@ public class SAMFileReader implements Iterable<SAMRecord>, Closeable {
         SAMFileReader.defaultValidationStringency = defaultValidationStringency;
     }
 
+	/**
+	 * Returns the SAMSequenceDictionary from the provided FASTA.
+	 */
+	public static SAMSequenceDictionary getSequenceDictionary(final File dictionaryFile) {
+		final SAMFileReader samFileReader = new SAMFileReader(dictionaryFile);
+		final SAMSequenceDictionary dict = samFileReader.getFileHeader().getSequenceDictionary();
+		CloserUtil.close(dictionaryFile);
+		return dict;
+	}
+
     private boolean mIsBinary = false;
     private BAMIndex mIndex = null;
     private SAMRecordFactory samRecordFactory = new DefaultSAMRecordFactory();
@@ -524,7 +534,12 @@ public class SAMFileReader implements Iterable<SAMRecord>, Closeable {
     // Its too expensive to examine the remote file to determine type.
     // Rely on file extension.
     private boolean streamLooksLikeBam(SeekableStream strm) {
-        return strm.getSource() == null || strm.getSource().toLowerCase().endsWith(".bam");
+        String source = strm.getSource();
+        if(source == null) return true;
+        source = source.toLowerCase();
+        //Source will typically be a file path or URL
+        //If it's a URL we require one of the query parameters to be bam file
+        return source.endsWith(".bam") || source.contains(".bam?")|| source.contains(".bam&") || source.contains(".bam%26");
     }
 
     private void init(final InputStream stream, final File file, File indexFile, final boolean eagerDecode, final ValidationStringency validationStringency) {
diff --git a/src/java/net/sf/samtools/SAMRecord.java b/src/java/net/sf/samtools/SAMRecord.java
index 1d5807b..372b636 100644
--- a/src/java/net/sf/samtools/SAMRecord.java
+++ b/src/java/net/sf/samtools/SAMRecord.java
@@ -144,6 +144,7 @@ public class SAMRecord implements Cloneable
     private static final int NOT_PRIMARY_ALIGNMENT_FLAG = 0x100;
     private static final int READ_FAILS_VENDOR_QUALITY_CHECK_FLAG = 0x200;
     private static final int DUPLICATE_READ_FLAG = 0x400;
+    private static final int SUPPLEMENTARY_ALIGNMENT_FLAG = 0x800;
 
 
     private String mReadName = null;
@@ -323,7 +324,7 @@ public class SAMRecord implements Cloneable
             mReferenceIndex = NO_ALIGNMENT_REFERENCE_INDEX;
             return;
         } else if (mHeader != null) {
-            int referenceIndex = mHeader.getSequenceIndex(value);
+            final int referenceIndex = mHeader.getSequenceIndex(value);
             if (referenceIndex != -1) {
                 setReferenceIndex(referenceIndex);
                 return;
@@ -382,7 +383,7 @@ public class SAMRecord implements Cloneable
             mMateReferenceIndex = NO_ALIGNMENT_REFERENCE_INDEX;
             return;
         } else if (mHeader != null) {
-            int referenceIndex = mHeader.getSequenceIndex(mateReferenceName);
+            final int referenceIndex = mHeader.getSequenceIndex(mateReferenceName);
             if (referenceIndex != -1) {
                 setMateReferenceIndex(referenceIndex);
                 return;
@@ -428,14 +429,14 @@ public class SAMRecord implements Cloneable
     }
 
     /**
-     * @return 1-based inclusive leftmost position of the clippped sequence, or 0 if there is no position.
+     * @return 1-based inclusive leftmost position of the clipped sequence, or 0 if there is no position.
      */
     public int getAlignmentStart() {
         return mAlignmentStart;
     }
 
     /**
-     * @param value 1-based inclusive leftmost position of the clippped sequence, or 0 if there is no position.
+     * @param value 1-based inclusive leftmost position of the clipped sequence, or 0 if there is no position.
      */
     public void setAlignmentStart(final int value) {
         mAlignmentStart = value;
@@ -446,7 +447,7 @@ public class SAMRecord implements Cloneable
     }
 
     /**
-     * @return 1-based inclusive rightmost position of the clippped sequence, or 0 read if unmapped.
+     * @return 1-based inclusive rightmost position of the clipped sequence, or 0 read if unmapped.
      */
     public int getAlignmentEnd() {
         if (getReadUnmappedFlag()) {
@@ -508,7 +509,7 @@ public class SAMRecord implements Cloneable
     }
 
     /**
-     * @return 1-based inclusive reference position of the unclippped sequence at a given offset,
+     * @return 1-based inclusive reference position of the unclipped sequence at a given offset,
      *         or 0 if there is no position.
      *         For example, given the sequence NNNAAACCCGGG, cigar 3S9M, and an alignment start of 1,
      *         and a (1-based)offset 10 (start of GGG) it returns 7 (1-based offset starting after the soft clip.
@@ -542,7 +543,7 @@ public class SAMRecord implements Cloneable
     }
 
     /**
-     * @return 1-based inclusive leftmost position of the clippped mate sequence, or 0 if there is no position.
+     * @return 1-based inclusive leftmost position of the clipped mate sequence, or 0 if there is no position.
      */
     public int getMateAlignmentStart() {
         return mMateAlignmentStart;
@@ -651,7 +652,7 @@ public class SAMRecord implements Cloneable
     }
 
     /**
-     * It is preferrable to use the get*Flag() methods that handle the flag word symbolically.
+     * It is preferable to use the get*Flag() methods that handle the flag word symbolically.
      */
     public int getFlags() {
         return mFlags;
@@ -758,6 +759,13 @@ public class SAMRecord implements Cloneable
     }
 
     /**
+     * the alignment is supplementary (TODO: further explanation?).
+     */
+    public boolean getSupplementaryAlignmentFlag() {
+        return (mFlags & SUPPLEMENTARY_ALIGNMENT_FLAG) != 0;
+    }    
+    
+    /**
      * the read fails platform/vendor quality checks.
      */
     public boolean getReadFailsVendorQualityCheckFlag() {
@@ -846,6 +854,13 @@ public class SAMRecord implements Cloneable
     }
 
     /**
+     * the alignment is supplementary (TODO: further explanation?).
+     */
+    public void setSupplementaryAlignmentFlag(final boolean flag) {
+        setFlag(flag, SUPPLEMENTARY_ALIGNMENT_FLAG);
+    }
+
+    /**
      * the read fails platform/vendor quality checks.
      */
     public void setReadFailsVendorQualityCheckFlag(final boolean flag) {
@@ -859,6 +874,14 @@ public class SAMRecord implements Cloneable
         setFlag(flag, DUPLICATE_READ_FLAG);
     }
 
+    /**
+     * Tests if this record is either a secondary and/or supplementary alignment;
+     * equivalent to {@code (getNotPrimaryAlignmentFlag() || getSupplementaryAlignmentFlag())}.
+     */
+    public boolean isSecondaryOrSupplementary() {
+        return getNotPrimaryAlignmentFlag() || getSupplementaryAlignmentFlag();
+    }
+    
     private void setFlag(final boolean flag, final int bit) {
         if (flag) {
             mFlags |= bit;
@@ -1134,7 +1157,7 @@ public class SAMRecord implements Cloneable
         setAttribute(tag, value, false);
     }
 
-    protected void setAttribute(final short tag, final Object value, boolean isUnsignedArray) {
+    protected void setAttribute(final short tag, final Object value, final boolean isUnsignedArray) {
         if (value != null &&
                 !(value instanceof Byte || value instanceof Short || value instanceof Integer ||
                 value instanceof String || value instanceof Character || value instanceof Float ||
@@ -1551,6 +1574,10 @@ public class SAMRecord implements Cloneable
                 if (ret == null) ret = new ArrayList<SAMValidationError>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_FLAG_NOT_PRIM_ALIGNMENT, "Not primary alignment flag should not be set for unmapped read.", getReadName()));
             }
+            if (getSupplementaryAlignmentFlag()) {
+                if (ret == null) ret = new ArrayList<SAMValidationError>();
+                ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_FLAG_SUPPLEMENTARY_ALIGNMENT, "Supplementary alignment flag should not be set for unmapped read.", getReadName()));
+            }
             if (getMappingQuality() != 0) {
                 if (ret == null) ret = new ArrayList<SAMValidationError>();
                 ret.add(new SAMValidationError(SAMValidationError.Type.INVALID_MAPPING_QUALITY, "MAPQ should be 0 for unmapped read.", getReadName()));
@@ -1609,18 +1636,19 @@ public class SAMRecord implements Cloneable
             if (ret == null) ret = new ArrayList<SAMValidationError>();
             ret.addAll(errors);
         }
-        if (this.getReadLength() == 0 && !this.getNotPrimaryAlignmentFlag()) {
-            Object fz = getAttribute(SAMTagUtil.getSingleton().FZ);
+        // TODO(mccowan): Is this asking "is this the primary alignment"?
+        if (this.getReadLength() == 0 && !this.getNotPrimaryAlignmentFlag()) {  
+            final Object fz = getAttribute(SAMTagUtil.getSingleton().FZ);
             if (fz == null) {
-                String cq = (String)getAttribute(SAMTagUtil.getSingleton().CQ);
-                String cs = (String)getAttribute(SAMTagUtil.getSingleton().CS);
+                final String cq = (String)getAttribute(SAMTagUtil.getSingleton().CQ);
+                final String cs = (String)getAttribute(SAMTagUtil.getSingleton().CS);
                 if (cq == null || cq.length() == 0 || cs == null || cs.length() == 0) {
                     if (ret == null) ret = new ArrayList<SAMValidationError>();
                     ret.add(new SAMValidationError(SAMValidationError.Type.EMPTY_READ,
                             "Zero-length read without FZ, CS or CQ tag", getReadName()));
                 } else if (!getReadUnmappedFlag()) {
                     boolean hasIndel = false;
-                    for (CigarElement cigarElement : getCigar().getCigarElements()) {
+                    for (final CigarElement cigarElement : getCigar().getCigarElements()) {
                         if (cigarElement.getOperator() == CigarOperator.DELETION ||
                                 cigarElement.getOperator() == CigarOperator.INSERTION) {
                             hasIndel = true;
diff --git a/src/java/net/sf/samtools/SAMRecordQueryNameComparator.java b/src/java/net/sf/samtools/SAMRecordQueryNameComparator.java
index a517dd6..bcf7a3e 100644
--- a/src/java/net/sf/samtools/SAMRecordQueryNameComparator.java
+++ b/src/java/net/sf/samtools/SAMRecordQueryNameComparator.java
@@ -50,6 +50,9 @@ public class SAMRecordQueryNameComparator implements SAMRecordComparator {
         if (samRecord1.getNotPrimaryAlignmentFlag() != samRecord2.getNotPrimaryAlignmentFlag()) {
             return samRecord2.getNotPrimaryAlignmentFlag()? -1: 1;
         }
+        if (samRecord1.getSupplementaryAlignmentFlag() != samRecord2.getSupplementaryAlignmentFlag()) {
+            return samRecord2.getSupplementaryAlignmentFlag() ? -1 : 1;
+        }
         final Integer hitIndex1 = samRecord1.getIntegerAttribute(SAMTag.HI.name());
         final Integer hitIndex2 = samRecord2.getIntegerAttribute(SAMTag.HI.name());
         if (hitIndex1 != null) {
diff --git a/src/java/net/sf/samtools/SAMSequenceRecord.java b/src/java/net/sf/samtools/SAMSequenceRecord.java
index 843766c..629a70a 100644
--- a/src/java/net/sf/samtools/SAMSequenceRecord.java
+++ b/src/java/net/sf/samtools/SAMSequenceRecord.java
@@ -115,7 +115,7 @@ public class SAMSequenceRecord extends AbstractSAMHeaderRecord implements Clonea
     public int getSequenceIndex() { return mSequenceIndex; }
 
     // Private state used only by SAM implementation.
-    void setSequenceIndex(final int value) { mSequenceIndex = value; }
+    public void setSequenceIndex(final int value) { mSequenceIndex = value; }
 
     /**
      * Looser comparison than equals().  We look only at sequence index, sequence length, and MD5 tag value
diff --git a/src/java/net/sf/samtools/SAMValidationError.java b/src/java/net/sf/samtools/SAMValidationError.java
index 53290de..0034766 100644
--- a/src/java/net/sf/samtools/SAMValidationError.java
+++ b/src/java/net/sf/samtools/SAMValidationError.java
@@ -65,6 +65,9 @@ public class SAMValidationError {
         /** not primary alignment flag set for unmapped read */
         INVALID_FLAG_NOT_PRIM_ALIGNMENT,
 
+        /** supplementary alignment flag set for unmapped read */
+        INVALID_FLAG_SUPPLEMENTARY_ALIGNMENT,
+        
         /** mapped read flat not set for mapped read */
         INVALID_FLAG_READ_UNMAPPED,
 
diff --git a/src/java/net/sf/samtools/SecondaryOrSupplementarySkippingIterator.java b/src/java/net/sf/samtools/SecondaryOrSupplementarySkippingIterator.java
new file mode 100644
index 0000000..f5899f6
--- /dev/null
+++ b/src/java/net/sf/samtools/SecondaryOrSupplementarySkippingIterator.java
@@ -0,0 +1,41 @@
+package net.sf.samtools;
+
+import net.sf.samtools.util.CloseableIterator;
+import net.sf.samtools.util.PeekIterator;
+
+/**
+ * Wrapper around SAMRecord iterator that skips over secondary and supplementary elements.
+ * This iterator conflates a filtering iterator and a peekable iterator.  It would be cleaner to
+ * handle those concerns separately. This class should be viewed as a replacement for NotPrimarySkippingIterator,
+ * in that we did not want to change the functionality of NPSI to no longer match its name
+ */
+public class SecondaryOrSupplementarySkippingIterator {
+    private final PeekIterator<SAMRecord> it;
+
+    public SecondaryOrSupplementarySkippingIterator(final CloseableIterator<SAMRecord> underlyingIt) {
+        it = new PeekIterator<SAMRecord>(underlyingIt);
+        skipAnyNotprimary();
+    }
+
+    public boolean hasCurrent() {
+        return it.hasNext();
+    }
+
+    public SAMRecord getCurrent() {
+        assert(hasCurrent());
+        return it.peek();
+    }
+
+    public boolean advance() {
+        it.next();
+        skipAnyNotprimary();
+        return hasCurrent();
+    }
+
+    private void skipAnyNotprimary() {
+        while (it.hasNext() && it.peek().isSecondaryOrSupplementary()) {
+            it.next();
+        }
+    }
+
+}
diff --git a/src/java/net/sf/samtools/seekablestream/SeekableStreamFactory.java b/src/java/net/sf/samtools/seekablestream/SeekableStreamFactory.java
index 395e317..fc44448 100644
--- a/src/java/net/sf/samtools/seekablestream/SeekableStreamFactory.java
+++ b/src/java/net/sf/samtools/seekablestream/SeekableStreamFactory.java
@@ -37,6 +37,15 @@ public class SeekableStreamFactory {
         return getStreamFor(url.toExternalForm());
     }
 
+    /**
+     * Does this path point to a regular file on disk and not something like a URL?
+     * @param path the path to test
+     * @return true if the path is to a file on disk
+     */
+    public static boolean isFilePath(final String path) {
+        return ! ( path.startsWith("http:") || path.startsWith("https:") || path.startsWith("ftp:") );
+    }
+
     public static SeekableStream getStreamFor(String path) throws IOException {
         // todo -- add support for SeekableBlockInputStream
 
diff --git a/src/java/net/sf/picard/util/AbstractIterator.java b/src/java/net/sf/samtools/util/AbstractIterator.java
similarity index 85%
rename from src/java/net/sf/picard/util/AbstractIterator.java
rename to src/java/net/sf/samtools/util/AbstractIterator.java
index d068bfa..7c3b741 100644
--- a/src/java/net/sf/picard/util/AbstractIterator.java
+++ b/src/java/net/sf/samtools/util/AbstractIterator.java
@@ -22,7 +22,7 @@
  * THE SOFTWARE.
  */
 
-package net.sf.picard.util;
+package net.sf.samtools.util;
 
 import java.util.Iterator;
 import java.util.NoSuchElementException;
@@ -34,13 +34,13 @@ import java.util.NoSuchElementException;
  * @author Doug Voet (dvoet at broadinstitute dot org)
  */
 public abstract class AbstractIterator<E> implements Iterator<E> {
-    private E next;
+    protected E next;
     private boolean iterating = false;
 
     @Override
     public boolean hasNext() {
         // If this is the start of iteration, queue up the first item
-        if(!iterating) {
+        if (!iterating) {
             next = advance();
             iterating = true;
         }
@@ -52,7 +52,7 @@ public abstract class AbstractIterator<E> implements Iterator<E> {
         if (!hasNext()) {
             throw new NoSuchElementException();
         }
-        
+
         E ret = next;
         next = advance();
         return ret;
@@ -69,6 +69,14 @@ public abstract class AbstractIterator<E> implements Iterator<E> {
     protected abstract E advance();
 
     /**
+     * Returns the next element in the iterator, if one exists.  Otherwise, returns null.  Invoking this method does not advance the iterator.
+     * @return The next element in the iterator, without advancing, or, if no other element exists, null.
+     */
+    public E peek() {
+        return next;
+    }
+
+    /**
      * @return true after the first time hasNext() or next() have been called
      */
     protected boolean isIterating() {
diff --git a/src/java/net/sf/samtools/util/BinaryCodec.java b/src/java/net/sf/samtools/util/BinaryCodec.java
index 737b7a5..84b0a9c 100644
--- a/src/java/net/sf/samtools/util/BinaryCodec.java
+++ b/src/java/net/sf/samtools/util/BinaryCodec.java
@@ -370,7 +370,8 @@ public class BinaryCodec {
         do {
             final int numRead = readBytesOrFewer(buffer, offset + totalNumRead, length - totalNumRead);
             if (numRead < 0) {
-                throw new RuntimeEOFException(constructErrorMessage("Premature EOF"));
+                String msg = String.format("Premature EOF. Expected %d but only received %d", length, totalNumRead);
+                throw new RuntimeEOFException(constructErrorMessage(msg));
             } else {
                 totalNumRead += numRead;
             }
diff --git a/src/java/net/sf/samtools/util/BufferedLineReader.java b/src/java/net/sf/samtools/util/BufferedLineReader.java
index 8a7a7f9..07ab61f 100644
--- a/src/java/net/sf/samtools/util/BufferedLineReader.java
+++ b/src/java/net/sf/samtools/util/BufferedLineReader.java
@@ -34,6 +34,8 @@ import java.nio.charset.Charset;
  * than AsciiLineReaderImpl.  If you use AsciiLineReader rather than this class, it will detect the OS
  * and delegate to the preferred implementation.
  *
+ * TODO: Replace this with {@link java.io.LineNumberReader}?
+ * 
  * @author alecw at broadinstitute.org
  */
 public class BufferedLineReader implements LineReader {
diff --git a/src/java/net/sf/picard/util/CollectionUtil.java b/src/java/net/sf/samtools/util/CollectionUtil.java
similarity index 94%
rename from src/java/net/sf/picard/util/CollectionUtil.java
rename to src/java/net/sf/samtools/util/CollectionUtil.java
index 7fb2dc6..e8a4760 100755
--- a/src/java/net/sf/picard/util/CollectionUtil.java
+++ b/src/java/net/sf/samtools/util/CollectionUtil.java
@@ -21,9 +21,7 @@
  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  * THE SOFTWARE.
  */
-package net.sf.picard.util;
-
-import net.sf.picard.PicardException;
+package net.sf.samtools.util;
 
 import java.util.*;
 
@@ -67,7 +65,7 @@ public class CollectionUtil {
 
     public static <T> T getSoleElement(final Collection<T> items) {
         if (items.size() != 1)
-            throw new PicardException(String.format("Expected a single element in %s, but found %s.", items, items.size()));
+            throw new IllegalArgumentException(String.format("Expected a single element in %s, but found %s.", items, items.size()));
         return items.iterator().next();
     }
     
@@ -161,12 +159,4 @@ public class CollectionUtil {
         }
     }
 
-    public static class Tuple<A, B> {
-        public final A a;
-        public final B b;
-        public Tuple(final A a, final B b) {
-            this.a = a;
-            this.b = b;
-        }
-    }
 }
diff --git a/src/java/net/sf/samtools/util/Tuple.java b/src/java/net/sf/samtools/util/Tuple.java
new file mode 100644
index 0000000..98ce0cd
--- /dev/null
+++ b/src/java/net/sf/samtools/util/Tuple.java
@@ -0,0 +1,16 @@
+package net.sf.samtools.util;
+
+/**
+ * A simple tuple class.
+ *
+ * @author mccowan
+ */
+public class Tuple<A, B> {
+    public final A a;
+    public final B b;
+
+    public Tuple(final A a, final B b) {
+        this.a = a;
+        this.b = b;
+    }
+}
diff --git a/src/java/org/broad/tribble/AbstractFeatureCodec.java b/src/java/org/broad/tribble/AbstractFeatureCodec.java
index 77b9fb5..cfc11d0 100644
--- a/src/java/org/broad/tribble/AbstractFeatureCodec.java
+++ b/src/java/org/broad/tribble/AbstractFeatureCodec.java
@@ -23,27 +23,25 @@
  */
 package org.broad.tribble;
 
-import org.broad.tribble.readers.PositionalBufferedStream;
-
 import java.io.IOException;
 
 /**
  * Simple basic class providing much of the basic functionality of codecs
  */
-public abstract class AbstractFeatureCodec<T extends Feature> implements FeatureCodec {
-    Class<T> myClass;
+public abstract class AbstractFeatureCodec<FEATURE_TYPE extends Feature, SOURCE> implements FeatureCodec<FEATURE_TYPE, SOURCE> {
+    private final Class<FEATURE_TYPE> myClass;
 
-    protected AbstractFeatureCodec(final Class<T> myClass) {
+    protected AbstractFeatureCodec(final Class<FEATURE_TYPE> myClass) {
         this.myClass = myClass;
     }
-
+    
     @Override
-    public Feature decodeLoc(final PositionalBufferedStream stream) throws IOException {
-        return decode(stream);
+    public Feature decodeLoc(final SOURCE source) throws IOException {
+        return decode(source);
     }
 
     @Override
-    public Class<T> getFeatureType() {
+    public Class<FEATURE_TYPE> getFeatureType() {
         return myClass;
     }
 
diff --git a/src/java/org/broad/tribble/AbstractFeatureReader.java b/src/java/org/broad/tribble/AbstractFeatureReader.java
index 195d180..2856578 100644
--- a/src/java/org/broad/tribble/AbstractFeatureReader.java
+++ b/src/java/org/broad/tribble/AbstractFeatureReader.java
@@ -29,7 +29,7 @@ import java.util.Iterator;
  * <p/>
  * the feature reader class, which uses indices and codecs to read in Tribble file formats.
  */
-public abstract class AbstractFeatureReader<T extends Feature> implements FeatureReader<T> {
+public abstract class AbstractFeatureReader<T extends Feature, SOURCE> implements FeatureReader<T> {
     // the logging destination for this source
     //private final static Logger log = Logger.getLogger("BasicFeatureSource");
 
@@ -38,13 +38,11 @@ public abstract class AbstractFeatureReader<T extends Feature> implements Featur
 
     // the query source, codec, and header
     // protected final QuerySource querySource;
-    protected final FeatureCodec codec;
+    protected final FeatureCodec<T, SOURCE> codec;
     protected FeatureCodecHeader header;
 
-    // A hook for the future, when we might allow clients to specify this.
-
-
-    public static final AbstractFeatureReader getFeatureReader(String featureFile, FeatureCodec codec) throws TribbleException {
+    /** Convenience overload which defaults to requiring an index. */
+    public static <FEATURE extends Feature, SOURCE> AbstractFeatureReader<FEATURE, SOURCE> getFeatureReader(final String featureFile, final FeatureCodec<FEATURE, SOURCE> codec) throws TribbleException {
         return getFeatureReader(featureFile, codec, true);
     }
 
@@ -54,19 +52,18 @@ public abstract class AbstractFeatureReader<T extends Feature> implements Featur
      * @param featureResource the feature file to create from
      * @param codec           the codec to read features with
      */
-    public static final AbstractFeatureReader getFeatureReader(String featureResource, FeatureCodec codec, boolean requireIndex) throws TribbleException {
+    public static <FEATURE extends Feature, SOURCE> AbstractFeatureReader<FEATURE, SOURCE> getFeatureReader(final String featureResource, final FeatureCodec<FEATURE, SOURCE> codec, final boolean requireIndex) throws TribbleException {
 
         try {
             // Test for tabix index
-            if (featureResource.endsWith(".gz") &&
-                    ParsingUtils.resourceExists(featureResource + ".tbi")) {
+            if (featureResource.endsWith(".gz") && ParsingUtils.resourceExists(featureResource + ".tbi")) {
                 if ( ! (codec instanceof AsciiFeatureCodec) )
                     throw new TribbleException("Tabix indexed files only work with ASCII codecs, but received non-Ascii codec " + codec.getClass().getSimpleName());
-                return new TabixFeatureReader(featureResource, (AsciiFeatureCodec)codec);
+                return new TabixFeatureReader<FEATURE, SOURCE>(featureResource, (AsciiFeatureCodec) codec);
             }
             // Not tabix => tribble index file (might be gzipped, but not block gzipped)
             else {
-                return new TribbleIndexedFeatureReader(featureResource, codec, requireIndex);
+                return new TribbleIndexedFeatureReader<FEATURE, SOURCE>(featureResource, codec, requireIndex);
             }
         } catch (IOException e) {
             throw new TribbleException.MalformedFeatureFile("Unable to create BasicFeatureReader using feature file ", featureResource, e);
@@ -85,16 +82,16 @@ public abstract class AbstractFeatureReader<T extends Feature> implements Featur
      * @return a reader for this data
      * @throws TribbleException
      */
-    public static final AbstractFeatureReader getFeatureReader(String featureResource, FeatureCodec codec, Index index) throws TribbleException {
+    public static <FEATURE extends Feature, SOURCE> AbstractFeatureReader<FEATURE, SOURCE> getFeatureReader(final String featureResource, final FeatureCodec<FEATURE, SOURCE>  codec, final Index index) throws TribbleException {
         try {
-            return new TribbleIndexedFeatureReader(featureResource, codec, index);
+            return new TribbleIndexedFeatureReader<FEATURE, SOURCE>(featureResource, codec, index);
         } catch (IOException e) {
             throw new TribbleException.MalformedFeatureFile("Unable to create AbstractFeatureReader using feature file ", featureResource, e);
         }
 
     }
 
-    protected AbstractFeatureReader(String path, FeatureCodec codec) {
+    protected AbstractFeatureReader(final String path, final FeatureCodec<T, SOURCE> codec) {
         this.path = path;
         this.codec = codec;
     }
@@ -109,10 +106,10 @@ public abstract class AbstractFeatureReader<T extends Feature> implements Featur
         return header.getHeaderValue();
     }
 
-    static class EmptyIterator<T extends Feature> implements CloseableTribbleIterator {
+    static class EmptyIterator<T extends Feature> implements CloseableTribbleIterator<T> {
         public Iterator iterator() { return this; }
         public boolean hasNext() { return false; }
-        public Object next() { return null; }
+        public T next() { return null; }
         public void remove() { }
         @Override public void close() { }
     }
diff --git a/src/java/org/broad/tribble/AsciiFeatureCodec.java b/src/java/org/broad/tribble/AsciiFeatureCodec.java
index 0844782..8554997 100644
--- a/src/java/org/broad/tribble/AsciiFeatureCodec.java
+++ b/src/java/org/broad/tribble/AsciiFeatureCodec.java
@@ -18,110 +18,74 @@
 
 package org.broad.tribble;
 
-import org.broad.tribble.readers.AsciiLineReader;
-import org.broad.tribble.readers.LineReader;
-import org.broad.tribble.readers.PositionalBufferedStream;
-import org.broad.tribble.util.ParsingUtils;
+import net.sf.samtools.util.CloserUtil;
+import org.broad.tribble.readers.*;
 
 import java.io.IOException;
-import java.util.regex.Pattern;
+import java.io.InputStream;
 
 /**
  * A convenience base class for codecs that want to read in features from ASCII lines.
- *
+ * <p/>
  * This class overrides the general decode locs for streams and presents instead
  * Strings to decode(String) and readHeader(LineReader) functions.
  *
  * @param <T> The feature type this codec reads
  */
-public abstract class AsciiFeatureCodec<T extends Feature> extends AbstractFeatureCodec<T> {
-    /** A cached line reader we will use for decode and decodeLoc() */
-    private final AsciiLineReader lineReader = new AsciiLineReader();
-
-    /**
-     * regex used to identify what separates fields
-     */
-    protected Pattern splitPattern = Pattern.compile("\\t");
-
+public abstract class AsciiFeatureCodec<T extends Feature> extends AbstractFeatureCodec<T, LineIterator> {
     protected AsciiFeatureCodec(final Class<T> myClass) {
         super(myClass);
     }
-
+    
     @Override
-    public Feature decodeLoc(final PositionalBufferedStream stream) throws IOException {
-        String line = readLine(stream);
-        try{
-            return decodeLoc(line);
-        }catch (RuntimeException e){
-            String msg = "\nLine: " + line;
-            throw new RuntimeException(msg, e);
-        }
+    public void close(final LineIterator lineIterator) {
+        CloserUtil.close(lineIterator);
     }
 
     @Override
-    public T decode(final PositionalBufferedStream stream) throws IOException {
-        String line = readLine(stream);
-        try{
-            return decode(line);
-        }catch (RuntimeException e){
-            String msg = "\nLine: " + line;
-            throw new RuntimeException(msg, e);
-        }
+    public boolean isDone(final LineIterator lineIterator) {
+        return !lineIterator.hasNext();
     }
 
     @Override
-    public FeatureCodecHeader readHeader(final PositionalBufferedStream stream) throws IOException {
-        final AsciiLineReader br = new AsciiLineReader(stream);
-        // TODO -- track header end here
-        return new FeatureCodecHeader(readHeader(br), FeatureCodecHeader.NO_HEADER_END);
+    public LocationAware makeIndexableSourceFromStream(final InputStream bufferedInputStream) {
+        final PositionalBufferedStream pbs;
+        if (bufferedInputStream instanceof PositionalBufferedStream) {
+            pbs = (PositionalBufferedStream) bufferedInputStream;
+        } else {
+            pbs = new PositionalBufferedStream(bufferedInputStream);
+        }
+        return new AsciiLineReaderIterator(new AsciiLineReader(pbs));
     }
 
-    private final String readLine(final PositionalBufferedStream stream) throws IOException {
-        return lineReader.readLine(stream);
+    @Override
+    public LineIterator makeSourceFromStream(final InputStream bufferedInputStream) {
+        return new LineIteratorImpl(LineReaderUtil.fromBufferedStream(bufferedInputStream));
     }
 
-    /**
-     * Decode a line to obtain just its FeatureLoc for indexing -- contig, start, and stop.
-     *
-     * @param line the input line to decode
-     * @return  Return the FeatureLoc encoded by the line, or null if the line does not represent a feature (e.g. is
-     * a comment)
+    /** 
+     * Convenience method.  Decoding in ASCII files operates line-by-line, so obviate the need to call 
+     * {@link org.broad.tribble.readers.LineIterator#next()} in implementing classes and, instead, have them implement
+     * {@link AsciiFeatureCodec#decode(String)}.
      */
-    public Feature decodeLoc(String line) {
-        return decode(line);
+    @Override
+    public T decode(final LineIterator lineIterator) {
+        return decode(lineIterator.next());
     }
 
-    /**
-     * Decode a set of tokens as a Feature.
-     * For backwards compatibility, the
-     * default implementation joins by tabs, and calls {@link #decode(String)}.
-     *
-     * It is recommended that you override {@link #decode(String[])}
-     * as well as {@link #decode(String)}
-     * @param tokens
-     * @return
-     */
-    public T decode(String[] tokens){
-        String line = ParsingUtils.join("\t", tokens);
-        return decode(line);
-    }
+    /** @see {@link AsciiFeatureCodec#decode(org.broad.tribble.readers.LineIterator)} */
+    public abstract T decode(String s);
 
-    /**
-     * Decode a line as a Feature.
-     *
-     * @param line the input line to decode
-     * @return  Return the Feature encoded by the line,  or null if the line does not represent a feature (e.g. is
-     * a comment)
-     */
-    abstract public T decode(String line);
+    @Override
+    public FeatureCodecHeader readHeader(final LineIterator lineIterator) throws IOException {
+        // TODO: Track header end here, rather than assuming there isn't one.
+        return new FeatureCodecHeader(readActualHeader(lineIterator), FeatureCodecHeader.NO_HEADER_END);
+    }
 
     /**
      * Read and return the header, or null if there is no header.
      *
      * @return the actual header data in the file, or null if none is available
      */
-    public Object readHeader(LineReader reader) {
-        return null;
-    }
-
+    abstract public Object readActualHeader(final LineIterator reader);
 }
diff --git a/src/java/org/broad/tribble/BinaryFeatureCodec.java b/src/java/org/broad/tribble/BinaryFeatureCodec.java
new file mode 100644
index 0000000..e28ccd9
--- /dev/null
+++ b/src/java/org/broad/tribble/BinaryFeatureCodec.java
@@ -0,0 +1,42 @@
+package org.broad.tribble;
+
+import net.sf.samtools.util.CloserUtil;
+import org.broad.tribble.readers.LocationAware;
+import org.broad.tribble.readers.PositionalBufferedStream;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * Implements common methods of {@link FeatureCodec}s that read from {@link PositionalBufferedStream}s.
+ * @author mccowan
+ */
+abstract public class BinaryFeatureCodec<T extends Feature> implements FeatureCodec<T, PositionalBufferedStream> {
+    @Override
+    public PositionalBufferedStream makeSourceFromStream(final InputStream bufferedInputStream) {
+        if (bufferedInputStream instanceof PositionalBufferedStream)
+            return (PositionalBufferedStream) bufferedInputStream;
+        else
+            return new PositionalBufferedStream(bufferedInputStream);
+    }
+
+    /** {@link PositionalBufferedStream} is already {@link LocationAware}. */
+    @Override
+    public LocationAware makeIndexableSourceFromStream(final InputStream bufferedInputStream) {
+        return makeSourceFromStream(bufferedInputStream);
+    }
+
+    @Override
+    public void close(final PositionalBufferedStream source) {
+        CloserUtil.close(source);
+    }
+
+    @Override
+    public boolean isDone(final PositionalBufferedStream source) {
+        try {
+            return source.isDone();
+        } catch (IOException e) {
+            throw new RuntimeException("Failure reading from stream.", e);
+        }
+    }
+}
diff --git a/src/java/org/broad/tribble/CloseableTribbleIterator.java b/src/java/org/broad/tribble/CloseableTribbleIterator.java
index ece4df6..70b1e0c 100644
--- a/src/java/org/broad/tribble/CloseableTribbleIterator.java
+++ b/src/java/org/broad/tribble/CloseableTribbleIterator.java
@@ -18,14 +18,10 @@
 
 package org.broad.tribble;
 
-import org.broad.tribble.Feature;
-
-import java.util.Iterator;
+import net.sf.samtools.util.CloseableIterator;
 
 /**
  * The basic iterator we use in Tribble, which allows closing and basic iteration.
  * @param <T> the feature type
  */
-public interface CloseableTribbleIterator<T extends Feature> extends Iterator<T>, Iterable<T> {
-    public void close();
-}
+public interface CloseableTribbleIterator<T extends Feature> extends CloseableIterator<T>, Iterable<T> {}
diff --git a/src/java/org/broad/tribble/FeatureCodec.java b/src/java/org/broad/tribble/FeatureCodec.java
index ff9bcd3..8b277b0 100644
--- a/src/java/org/broad/tribble/FeatureCodec.java
+++ b/src/java/org/broad/tribble/FeatureCodec.java
@@ -18,81 +18,94 @@
 
 package org.broad.tribble;
 
-import org.broad.tribble.readers.PositionalBufferedStream;
+import org.broad.tribble.readers.LocationAware;
 
 import java.io.IOException;
+import java.io.InputStream;
 
 /**
- * the base interface for classes that read in features.
- *
+ * The base interface for classes that read in features.
+ * <p/>
  * FeatureCodecs have to implement two key methods:
+ * <p/>
+ * {@link #readHeader(SOURCE)} - Reads the header, provided a {@link SOURCE} pointing at the beginning of the source input.
+ * {@link #decode(SOURCE)} - Reads a {@link Feature} record, provided a {@link SOURCE} pointing at the beginning of a record within the 
+ * source input.
+ * <p/>
+ * Note that it's not safe to carry state about the {@link SOURCE} within the codec.  There's no guarantee about its  state between calls.
  *
- * readHeader() => starting from the first line of the file, read the full header, if any, and
- *   return it, as well as the position in the file where the header stops and records begin.  The
- *   contract with the readers is that the header and decoders see fresh streams, so you can
- *   safely read into the first record of the file looking for the header.  It's always why
- *   you need to return the file position (via getPosition() in the stream) of the last
- *   header record.
- *
- * decode(stream) => parse out the next record, and return it.  Decode is always called on a
- *   fresh stream after the header is read.
- *
- * Note that it's not safe to carry state about the PositionalBufferedStream arguments here.  There's
- * no guarentee on the state of the stream between calls.
- *
- * The canDecode is used to determine if a file can be decoded by this codec.  Just open up the
- * file and check if it can be decoded with this codec.
- *
- * @param <T> The feature type this codec reads
+ * @param <FEATURE_TYPE> The type of {@link Feature} this codec generates
+ * @param <SOURCE> The type of the data source this codec reads from
  */
-public interface FeatureCodec<T extends Feature> {
+public interface FeatureCodec<FEATURE_TYPE extends Feature, SOURCE> {
     /**
      * Decode a line to obtain just its FeatureLoc for indexing -- contig, start, and stop.
      *
-     *
-     * @param stream the input stream from which to decode the next record
-     * @return  Return the FeatureLoc encoded by the line, or null if the line does not represent a feature (e.g. is
-     * a comment)
+     * @param source the input stream from which to decode the next record
+     * @return Return the FeatureLoc encoded by the line, or null if the line does not represent a feature (e.g. is
+     *         a comment)
      */
-    public Feature decodeLoc(final PositionalBufferedStream stream) throws IOException;
+    public Feature decodeLoc(final SOURCE source) throws IOException;
 
     /**
-     * Decode a line as a Feature.
-     *
+     * Decode a single {@link Feature} from the {@link SOURCE}, reading no further in the underlying source than beyond that feature.
      *
-     * @param stream the input stream from which to decode the next record
-     * @return  Return the Feature encoded by the line,  or null if the line does not represent a feature (e.g. is
-     * a comment)
+     * @param source the input stream from which to decode the next record
+     * @return Return the Feature encoded by the line,  or null if the line does not represent a feature (e.g. is
+     *         a comment)
      */
-    public T decode(final PositionalBufferedStream stream) throws IOException;
+    public FEATURE_TYPE decode(final SOURCE source) throws IOException;
 
     /**
      * Read and return the header, or null if there is no header.
+     * 
+     * Note: Implementers of this method must be careful to read exactly as much from {@link SOURCE} as needed to parse the header, and no 
+     * more. Otherwise, data that might otherwise be fed into parsing a {@link Feature} may be lost.
      *
-     *
-     *
-     * @param stream the input stream from which to decode the header
+     * @param source the source from which to decode the header
      * @return header object
      */
-    public FeatureCodecHeader readHeader(final PositionalBufferedStream stream) throws IOException;
+    public FeatureCodecHeader readHeader(final SOURCE source) throws IOException;
 
     /**
      * This function returns the object the codec generates.  This is allowed to be Feature in the case where
      * conditionally different types are generated.  Be as specific as you can though.
-     *
+     * <p/>
      * This function is used by reflections based tools, so we can know the underlying type
      *
      * @return the feature type this codec generates.
      */
-    public Class<T> getFeatureType();
+    public Class<FEATURE_TYPE> getFeatureType();
+
+    /**
+     * Generates a reader of type {@link SOURCE} appropriate for use by this codec from the generic input stream.  Implementers should
+     * assume the stream is buffered.
+     */
+    public SOURCE makeSourceFromStream(final InputStream bufferedInputStream);
+
+    /**
+     * Generates a {@link LocationAware} reader of type {@link SOURCE}.  Like {@link #makeSourceFromStream(java.io.InputStream)}, except
+     * the {@link LocationAware} compatibility is required for creating indexes.
+     * 
+     * Implementers of this method must return a type that is both {@link LocationAware} as well as {@link SOURCE}.  Note that this 
+     * requirement cannot be enforced via the method signature due to limitations in Java's generic typing system.  Instead, consumers
+     * should cast the call result into a {@link SOURCE} when applicable.
+     */
+    public LocationAware makeIndexableSourceFromStream(final InputStream bufferedInputStream);
+
+    /** Adapter method that assesses whether the provided {@link SOURCE} has more data. True if it does, false otherwise. */
+    public boolean isDone(final SOURCE source);
+
+    /** Adapter method that closes the provided {@link SOURCE}. */
+    public void close(final SOURCE source);
 
     /**
      * This function returns true iff the File potentialInput can be parsed by this
      * codec.
-     *
+     * <p/>
      * There is an assumption that there's never a situation where two different Codecs
      * return true for the same file.  If this occurs, the recommendation would be to error out.
-     *
+     * <p/>
      * Note this function must never throw an error.  All errors should be trapped
      * and false returned.
      *
diff --git a/src/java/org/broad/tribble/TabixFeatureReader.java b/src/java/org/broad/tribble/TabixFeatureReader.java
index 4ef9884..ad028e8 100644
--- a/src/java/org/broad/tribble/TabixFeatureReader.java
+++ b/src/java/org/broad/tribble/TabixFeatureReader.java
@@ -23,8 +23,8 @@
  */
 package org.broad.tribble;
 
-import org.broad.tribble.readers.*;
 import net.sf.samtools.util.BlockCompressedInputStream;
+import org.broad.tribble.readers.*;
 import org.broad.tribble.util.ParsingUtils;
 
 import java.io.IOException;
@@ -37,7 +37,7 @@ import java.util.List;
  * @author Jim Robinson
  * @since 2/11/12
  */
-public class TabixFeatureReader extends AbstractFeatureReader {
+public class TabixFeatureReader<T extends Feature, SOURCE> extends AbstractFeatureReader<T, SOURCE> {
 
     TabixReader tabixReader;
     List<String> sequenceNames;
@@ -48,7 +48,7 @@ public class TabixFeatureReader extends AbstractFeatureReader {
      * @param codec
      * @throws IOException
      */
-    public TabixFeatureReader(String featureFile, AsciiFeatureCodec codec) throws IOException {
+    public TabixFeatureReader(final String featureFile, final AsciiFeatureCodec codec) throws IOException {
         super(featureFile, codec);
         tabixReader = new TabixReader(featureFile);
         sequenceNames = new ArrayList<String>(tabixReader.mChr2tid.keySet());
@@ -63,15 +63,15 @@ public class TabixFeatureReader extends AbstractFeatureReader {
      * @throws IOException throws an IOException if we can't open the file
      */
     private void readHeader() throws IOException {
-        PositionalBufferedStream is = null;
+        SOURCE source = null;
         try {
-            is = new PositionalBufferedStream(new BlockCompressedInputStream(ParsingUtils.openInputStream(path)));
-            header = codec.readHeader(is);
+            source = codec.makeSourceFromStream(new PositionalBufferedStream(new BlockCompressedInputStream(ParsingUtils.openInputStream(path))));
+            header = codec.readHeader(source);
         } catch (Exception e) {
             throw new TribbleException.MalformedFeatureFile("Unable to parse header with error: " + e.getMessage(), path, e);
         } finally {
-            if (is != null) {
-                is.close();
+            if (source != null) {
+                codec.close(source);
             }
         }
     }
@@ -90,22 +90,22 @@ public class TabixFeatureReader extends AbstractFeatureReader {
      * @return
      * @throws IOException
      */
-    public CloseableTribbleIterator query(String chr, int start, int end) throws IOException {
-        List<String> mp = getSequenceNames();
+    public CloseableTribbleIterator<T> query(final String chr, final int start, final int end) throws IOException {
+        final List<String> mp = getSequenceNames();
         if (mp == null) throw new TribbleException.TabixReaderFailure("Unable to find sequence named " + chr +
                 " in the tabix index. ", path);
         if (!mp.contains(chr)) {
-            return new EmptyIterator();
+            return new EmptyIterator<T>();
         }
-        TabixIteratorLineReader lineReader = new TabixIteratorLineReader(tabixReader.query(tabixReader.mChr2tid.get(chr), start - 1, end));
-        return new FeatureIterator(lineReader, start - 1, end);
+        final TabixIteratorLineReader lineReader = new TabixIteratorLineReader(tabixReader.query(tabixReader.mChr2tid.get(chr), start - 1, end));
+        return new FeatureIterator<T>(lineReader, start - 1, end);
     }
 
-    public CloseableTribbleIterator iterator() throws IOException {
+    public CloseableTribbleIterator<T> iterator() throws IOException {
         final InputStream is = new BlockCompressedInputStream(ParsingUtils.openInputStream(path));
         final PositionalBufferedStream stream = new PositionalBufferedStream(is);
-        final LineReader reader = new AsciiLineReader(stream);
-        return new FeatureIterator(reader, 0, Integer.MAX_VALUE);
+        final LineReader reader = LineReaderUtil.fromBufferedStream(stream, LineReaderUtil.LineReaderOption.SYNCHRONOUS);
+        return new FeatureIterator<T>(reader, 0, Integer.MAX_VALUE);
     }
 
     public void close() throws IOException {
@@ -113,13 +113,13 @@ public class TabixFeatureReader extends AbstractFeatureReader {
     }
 
 
-    class FeatureIterator<T extends Feature> implements CloseableTribbleIterator {
+    class FeatureIterator<T extends Feature> implements CloseableTribbleIterator<T> {
         private T currentRecord;
         private LineReader lineReader;
         private int start;
         private int end;
 
-        public FeatureIterator(LineReader lineReader, int start, int end) throws IOException {
+        public FeatureIterator(final LineReader lineReader, final int start, final int end) throws IOException {
             this.lineReader = lineReader;
             this.start = start;
             this.end = end;
@@ -136,7 +136,7 @@ public class TabixFeatureReader extends AbstractFeatureReader {
             currentRecord = null;
             String nextLine;
             while (currentRecord == null && (nextLine = lineReader.readLine()) != null) {
-                Feature f = null;
+                final Feature f;
                 try {
                     f = ((AsciiFeatureCodec)codec).decode(nextLine);
                     if (f == null) {
diff --git a/src/java/org/broad/tribble/TribbleIndexedFeatureReader.java b/src/java/org/broad/tribble/TribbleIndexedFeatureReader.java
index 50b4b72..a243e4b 100644
--- a/src/java/org/broad/tribble/TribbleIndexedFeatureReader.java
+++ b/src/java/org/broad/tribble/TribbleIndexedFeatureReader.java
@@ -24,12 +24,12 @@
 package org.broad.tribble;
 
 import net.sf.samtools.seekablestream.SeekableStream;
+import net.sf.samtools.seekablestream.SeekableStreamFactory;
 import org.broad.tribble.index.Block;
 import org.broad.tribble.index.Index;
 import org.broad.tribble.index.IndexFactory;
 import org.broad.tribble.readers.PositionalBufferedStream;
 import org.broad.tribble.util.ParsingUtils;
-import net.sf.samtools.seekablestream.SeekableStreamFactory;
 
 import java.io.BufferedInputStream;
 import java.io.IOException;
@@ -48,10 +48,19 @@ import java.util.zip.GZIPInputStream;
  * @author Jim Robinson
  * @since 2/11/12
  */
-public class TribbleIndexedFeatureReader<T extends Feature> extends AbstractFeatureReader {
+public class TribbleIndexedFeatureReader<T extends Feature, SOURCE> extends AbstractFeatureReader<T, SOURCE> {
 
     private Index index;
 
+    /**
+     * is the path pointing to our source data a regular file?
+     */
+    private final boolean pathIsRegularFile;
+
+    /**
+     * a potentially reusable seekable stream for queries over regular files
+     */
+    private SeekableStream seekableStream = null;
 
     /**
      * @param featurePath  - path to the feature file, can be a local file path, http url, or ftp url
@@ -59,7 +68,7 @@ public class TribbleIndexedFeatureReader<T extends Feature> extends AbstractFeat
      * @param requireIndex - true if the reader will be queries for specific ranges.  An index (idx) file must exist
      * @throws IOException
      */
-    public TribbleIndexedFeatureReader(String featurePath, FeatureCodec codec, boolean requireIndex) throws IOException {
+    public TribbleIndexedFeatureReader(final String featurePath, final FeatureCodec<T, SOURCE> codec, final boolean requireIndex) throws IOException {
 
         super(featurePath, codec);
 
@@ -78,23 +87,60 @@ public class TribbleIndexedFeatureReader<T extends Feature> extends AbstractFeat
             }
         }
 
+        // does path point to a regular file?
+        this.pathIsRegularFile = SeekableStreamFactory.isFilePath(path);
+
         readHeader();
     }
 
     /**
+     * Get a seekable stream appropriate to read information from the current feature path
+     * <p/>
+     * This function ensures that if reuseStreamInQuery returns true then this function will only
+     * ever return a single unique instance of SeekableStream for all calls given this instance of
+     * TribbleIndexedFeatureReader.  If reuseStreamInQuery() returns false then the returned SeekableStream
+     * will be newly opened each time, and should be closed after each use.
+     *
+     * @return a SeekableStream
+     */
+    private SeekableStream getSeekableStream() throws IOException {
+        final SeekableStream result;
+        if (reuseStreamInQuery()) {
+            // if the stream points to an underlying file, only create the underlying seekable stream once
+            if (seekableStream == null) seekableStream = SeekableStreamFactory.getStreamFor(path);
+            result = seekableStream;
+        } else {
+            // we are not reusing the stream, so make a fresh copy each time we request it
+            result = SeekableStreamFactory.getStreamFor(path);
+        }
+
+        return result;
+    }
+
+    /**
+     * Are we attempting to reuse the underlying stream in query() calls?
+     *
+     * @return true if
+     */
+    private boolean reuseStreamInQuery() {
+        return pathIsRegularFile;
+    }
+
+    /**
      * @param featureFile - path to the feature file, can be a local file path, http url, or ftp url
      * @param codec       - codec to decode the features
      * @param index       - a tribble Index object
      * @throws IOException
      */
-    public TribbleIndexedFeatureReader(String featureFile, FeatureCodec codec, Index index) throws IOException {
+    public TribbleIndexedFeatureReader(final String featureFile, final FeatureCodec<T, SOURCE> codec, final Index index) throws IOException {
         this(featureFile, codec, false); // required to read the header
         this.index = index;
     }
 
 
     public void close() throws IOException {
-        // Nothing to do -- streams are opened and closed in the iterator classes
+        // close the seekable stream if that's necessary
+        if (seekableStream != null) seekableStream.close();
     }
 
     /**
@@ -122,35 +168,45 @@ public class TribbleIndexedFeatureReader<T extends Feature> extends AbstractFeat
                 is = new GZIPInputStream(new BufferedInputStream(is));
             }
             pbs = new PositionalBufferedStream(is);
-            header = codec.readHeader(pbs);
+            final SOURCE source = codec.makeSourceFromStream(pbs);
+            header = codec.readHeader(source);
         } catch (Exception e) {
             throw new TribbleException.MalformedFeatureFile("Unable to parse header with error: " + e.getMessage(), path, e);
         } finally {
-            if ( pbs != null ) pbs.close();
+            if (pbs != null) pbs.close();
             else if (is != null) is.close();
         }
     }
 
     /**
      * Return an iterator to iterate over features overlapping the specified interval
+     * <p/>
+     * Note that TribbleIndexedFeatureReader only supports issuing and manipulating a single query
+     * for each reader.  That is, the behavior of the following code is undefined:
+     * <p/>
+     * reader = new TribbleIndexedFeatureReader()
+     * Iterator it1 = reader.query("x", 10, 20)
+     * Iterator it2 = reader.query("x", 1000, 1010)
+     * <p/>
+     * As a consequence of this, the TribbleIndexedFeatureReader are also not thread-safe.
      *
-     * @param chr contig
+     * @param chr   contig
      * @param start start position
-     * @param end end position
+     * @param end   end position
      * @return an iterator of records in this interval
      * @throws IOException
      */
-    public CloseableTribbleIterator query(String chr, int start, int end) throws IOException {
+    public CloseableTribbleIterator<T> query(final String chr, final int start, final int end) throws IOException {
 
         if (index == null) {
             throw new TribbleException("Index not found for: " + path);
         }
 
         if (index.containsChromosome(chr)) {
-            List<Block> blocks = index.getBlocks(chr, start - 1, end);
+            final List<Block> blocks = index.getBlocks(chr, start - 1, end);
             return new QueryIterator(chr, start, end, blocks);
         } else {
-            return new EmptyIterator();
+            return new EmptyIterator<T>();
         }
     }
 
@@ -159,18 +215,16 @@ public class TribbleIndexedFeatureReader<T extends Feature> extends AbstractFeat
      * @return Return an iterator to iterate over the entire file
      * @throws IOException
      */
-    public CloseableTribbleIterator iterator() throws IOException {
+    public CloseableTribbleIterator<T> iterator() throws IOException {
         return new WFIterator();
     }
 
     /**
      * Class to iterator over an entire file.
-     *
-     * @param <T>
      */
-    class WFIterator<T extends Feature> implements CloseableTribbleIterator {
+    class WFIterator implements CloseableTribbleIterator<T> {
         private T currentRecord;
-        private PositionalBufferedStream stream;
+        private SOURCE source;
 
         /**
          * Constructor for iterating over the entire file (seekableStream).
@@ -180,26 +234,32 @@ public class TribbleIndexedFeatureReader<T extends Feature> extends AbstractFeat
         public WFIterator() throws IOException {
             final InputStream inputStream = ParsingUtils.openInputStream(path);
 
+            final PositionalBufferedStream pbs;
             if (path.endsWith(".gz")) {
                 // Gzipped -- we need to buffer the GZIPInputStream methods as this class makes read() calls,
                 // and seekableStream does not support single byte reads
-                InputStream is = new GZIPInputStream(new BufferedInputStream(inputStream, 512000));
-                stream = new PositionalBufferedStream(is, 1000);  // Small buffer as this is buffered already.
+                final InputStream is = new GZIPInputStream(new BufferedInputStream(inputStream, 512000));
+                pbs = new PositionalBufferedStream(is, 1000);  // Small buffer as this is buffered already.
             } else {
-                stream = new PositionalBufferedStream(inputStream, 512000);
+                pbs = new PositionalBufferedStream(inputStream, 512000);
             }
-
-            if ( header.skipHeaderBytes() ) stream.skip(header.getHeaderEnd());
+            /**
+             * The header was already read from the original source in the constructor; don't read it again, since some codecs keep state
+             * about its initializagtion.  Instead, skip that part of the stream.
+             */
+            pbs.skip(header.getHeaderEnd());
+            source = codec.makeSourceFromStream(pbs);
             readNextRecord();
         }
 
-
+        @Override
         public boolean hasNext() {
             return currentRecord != null;
         }
 
+        @Override
         public T next() {
-            T ret = currentRecord;
+            final T ret = currentRecord;
             try {
                 readNextRecord();
             } catch (IOException e) {
@@ -218,64 +278,61 @@ public class TribbleIndexedFeatureReader<T extends Feature> extends AbstractFeat
         private void readNextRecord() throws IOException {
             currentRecord = null;
 
-            while (! stream.isDone() ) {
-                Feature f = null;
+            while (!codec.isDone(source)) {
+                final T f;
                 try {
-                    f = codec.decode(stream);
+                    f = codec.decode(source);
 
                     if (f == null) {
                         continue;
                     }
 
-                    currentRecord = (T) f;
+                    currentRecord = f;
                     return;
 
                 } catch (TribbleException e) {
                     e.setSource(path);
                     throw e;
                 } catch (NumberFormatException e) {
-                    String error = "Error parsing line at byte position: " + stream.getPosition();
+                    final String error = "Error parsing line at byte position: " + source;
                     throw new TribbleException.MalformedFeatureFile(error, path, e);
                 }
             }
         }
 
+        @Override
         public void remove() {
             throw new UnsupportedOperationException("Remove is not supported in Iterators");
         }
 
+        @Override
         public void close() {
-            stream.close();
+            codec.close(source);
         }
 
-        public WFIterator<T> iterator() {
+        @Override
+        public WFIterator iterator() {
             return this;
         }
     }
 
     /**
      * Iterator for a query interval
-     *
-     * @param <T>
      */
-    class QueryIterator<T extends Feature> implements CloseableTribbleIterator {
-
-        private String chr;
+    class QueryIterator implements CloseableTribbleIterator<T> {
         private String chrAlias;
         int start;
         int end;
         private T currentRecord;
-        private PositionalBufferedStream stream;
+        private SOURCE source;
+        private SeekableStream mySeekableStream;
         private Iterator<Block> blockIterator;
-        private SeekableStream seekableStream;
-
 
-        public QueryIterator(String chr, int start, int end, List<Block> blocks) throws IOException {
 
-            seekableStream = SeekableStreamFactory.getStreamFor(path);
-            this.chr = chr;
+        public QueryIterator(final String chr, final int start, final int end, final List<Block> blocks) throws IOException {
             this.start = start;
             this.end = end;
+            mySeekableStream = getSeekableStream();
             blockIterator = blocks.iterator();
             advanceBlock();
             readNextRecord();
@@ -292,7 +349,7 @@ public class TribbleIndexedFeatureReader<T extends Feature> extends AbstractFeat
         }
 
         public T next() {
-            T ret = currentRecord;
+            final T ret = currentRecord;
             try {
                 readNextRecord();
             } catch (IOException e) {
@@ -305,20 +362,19 @@ public class TribbleIndexedFeatureReader<T extends Feature> extends AbstractFeat
 
         private void advanceBlock() throws IOException {
             while (blockIterator != null && blockIterator.hasNext()) {
-                Block block = blockIterator.next();
+                final Block block = blockIterator.next();
                 if (block.getSize() > 0) {
-                    seekableStream.seek(block.getStartPosition());
-                    int bufferSize = Math.min(2000000, block.getSize() > 100000000 ? 10000000 : (int)block.getSize());
-                    stream = new PositionalBufferedStream(new BlockStreamWrapper(seekableStream, block), bufferSize);
+                    final int bufferSize = Math.min(2000000, block.getSize() > 100000000 ? 10000000 : (int) block.getSize());
+                    source = codec.makeSourceFromStream(new PositionalBufferedStream(new BlockStreamWrapper(mySeekableStream, block), bufferSize));
                     // note we don't have to skip the header here as the block should never start in the header
                     return;
                 }
             }
 
             // If we get here the blocks are exhausted, set reader to null
-            if ( stream != null ) {
-                stream.close();
-                stream = null;
+            if (source != null) {
+                codec.close(source);
+                source = null;
             }
         }
 
@@ -329,17 +385,17 @@ public class TribbleIndexedFeatureReader<T extends Feature> extends AbstractFeat
          */
         private void readNextRecord() throws IOException {
 
-            if(stream == null) {
+            if (source == null) {
                 return;  // <= no more features to read
             }
 
             currentRecord = null;
 
             while (true) {   // Loop through blocks
-                while (!stream.isDone()) {  // Loop through current block
-                    Feature f = null;
+                while (!codec.isDone(source)) {  // Loop through current block
+                    final T f;
                     try {
-                        f = codec.decode(stream);
+                        f = codec.decode(source);
                         if (f == null) {
                             continue;   // Skip
                         }
@@ -355,14 +411,14 @@ public class TribbleIndexedFeatureReader<T extends Feature> extends AbstractFeat
                             continue;   // Skip
                         }
 
-                        currentRecord = (T) f;     // Success
+                        currentRecord = f;     // Success
                         return;
 
                     } catch (TribbleException e) {
                         e.setSource(path);
                         throw e;
                     } catch (NumberFormatException e) {
-                        String error = "Error parsing line: " + stream.getPosition();
+                        final String error = "Error parsing line: " + source;
                         throw new TribbleException.MalformedFeatureFile(error, path, e);
                     }
                 }
@@ -381,11 +437,15 @@ public class TribbleIndexedFeatureReader<T extends Feature> extends AbstractFeat
 
 
         public void close() {
-            if ( stream != null ) stream.close();
-            try {
-                seekableStream.close(); // todo -- uncomment to fix bug
-            } catch (IOException e) {
-                throw new TribbleException("Couldn't close seekable stream", e);
+            // Note that this depends on BlockStreamWrapper not actually closing the underlying stream
+            codec.close(source);
+            if (!reuseStreamInQuery()) {
+                // if we are going to reuse the underlying stream we don't close the underlying stream.
+                try {
+                    mySeekableStream.close();
+                } catch (IOException e) {
+                    throw new TribbleException("Couldn't close seekable stream", e);
+                }
             }
         }
 
@@ -404,7 +464,7 @@ public class TribbleIndexedFeatureReader<T extends Feature> extends AbstractFeat
         SeekableStream seekableStream;
         long maxPosition;
 
-        BlockStreamWrapper(SeekableStream seekableStream, Block block) throws IOException {
+        BlockStreamWrapper(final SeekableStream seekableStream, final Block block) throws IOException {
             this.seekableStream = seekableStream;
             seekableStream.seek(block.getStartPosition());
             maxPosition = block.getEndPosition();
@@ -416,15 +476,15 @@ public class TribbleIndexedFeatureReader<T extends Feature> extends AbstractFeat
         }
 
         @Override
-        public int read(byte[] bytes, int off, int len) throws IOException {
+        public int read(final byte[] bytes, final int off, final int len) throws IOException {
             // note the careful treatment here to ensure we can continue to
             // read very long > Integer sized blocks
-            long maxBytes = maxPosition - seekableStream.position();
+            final long maxBytes = maxPosition - seekableStream.position();
             if (maxBytes <= 0) {
                 return -1;
             }
 
-            int bytesToRead = (int)Math.min(len, Math.min(maxBytes, Integer.MAX_VALUE));
+            final int bytesToRead = (int) Math.min(len, Math.min(maxBytes, Integer.MAX_VALUE));
             return seekableStream.read(bytes, off, bytesToRead);
 
         }
diff --git a/src/java/org/broad/tribble/bed/BEDCodec.java b/src/java/org/broad/tribble/bed/BEDCodec.java
index 0f47d66..90dcdfb 100644
--- a/src/java/org/broad/tribble/bed/BEDCodec.java
+++ b/src/java/org/broad/tribble/bed/BEDCodec.java
@@ -25,6 +25,7 @@ package org.broad.tribble.bed;
 
 import org.broad.tribble.AsciiFeatureCodec;
 import org.broad.tribble.annotation.Strand;
+import org.broad.tribble.readers.LineIterator;
 import org.broad.tribble.util.ParsingUtils;
 
 import java.util.regex.Pattern;
@@ -38,7 +39,8 @@ import java.util.regex.Pattern;
  */
 public class BEDCodec extends AsciiFeatureCodec<BEDFeature> {
 
-    private int startOffsetValue;
+    private static final Pattern SPLIT_PATTERN = Pattern.compile("\\t|( +)");
+    private final int startOffsetValue;
 
     /**
      * Calls {@link #BEDCodec(StartOffset)} with an argument
@@ -53,10 +55,9 @@ public class BEDCodec extends AsciiFeatureCodec<BEDFeature> {
      * BED format is 0-based, but Tribble is 1-based.
      * Set desired start position at either ZERO or ONE
      */
-    public BEDCodec(StartOffset startOffset) {
+    public BEDCodec(final StartOffset startOffset) {
         super(BEDFeature.class);
         this.startOffsetValue = startOffset.value();
-        splitPattern = Pattern.compile("\\t|( +)");
     }
 
 
@@ -76,11 +77,15 @@ public class BEDCodec extends AsciiFeatureCodec<BEDFeature> {
             return null;
         }
 
-        String[] tokens = splitPattern.split(line, -1);
+        String[] tokens = SPLIT_PATTERN.split(line, -1);
         return decode(tokens);
     }
 
     @Override
+    public Object readActualHeader(LineIterator reader) {
+        return null;
+    }
+
     public BEDFeature decode(String[] tokens) {
         int tokenCount = tokens.length;
 
@@ -200,8 +205,8 @@ public class BEDCodec extends AsciiFeatureCodec<BEDFeature> {
 
     /**
      * Indicate whether co-ordinates or 0-based or 1-based.
-     * Tribble uses 1-based, BED files use 0.
      * <p/>
+     * Tribble uses 1-based, BED files use 0.
      * e.g.:
      * start_position = bedline_start_position - startIndex.value()
      */
diff --git a/src/java/org/broad/tribble/dbsnp/OldDbSNPCodec.java b/src/java/org/broad/tribble/dbsnp/OldDbSNPCodec.java
index 040914e..5e241fb 100644
--- a/src/java/org/broad/tribble/dbsnp/OldDbSNPCodec.java
+++ b/src/java/org/broad/tribble/dbsnp/OldDbSNPCodec.java
@@ -26,6 +26,7 @@ package org.broad.tribble.dbsnp;
 import org.broad.tribble.AsciiFeatureCodec;
 import org.broad.tribble.Feature;
 import org.broad.tribble.annotation.Strand;
+import org.broad.tribble.readers.LineIterator;
 
 
 /**
@@ -39,7 +40,7 @@ public class OldDbSNPCodec extends AsciiFeatureCodec<OldDbSNPFeature> {
 
     // the number of tokens we expect to parse from a dbSNP line
     static final int expectedTokenCount = 18;
-
+    
     public OldDbSNPCodec() {
         super(OldDbSNPFeature.class);
     }
@@ -77,7 +78,6 @@ public class OldDbSNPCodec extends AsciiFeatureCodec<OldDbSNPFeature> {
      * 18 weight
      */
     public OldDbSNPFeature decode(String line) {
-
         // we may be asked to process a header line; ignore it
         if (line.startsWith("#")) return null;
 
@@ -87,6 +87,10 @@ public class OldDbSNPCodec extends AsciiFeatureCodec<OldDbSNPFeature> {
     }
 
     @Override
+    public Object readActualHeader(final LineIterator reader) {
+        return null;
+    }
+
     public OldDbSNPFeature decode(String[] tokens){
         // check to see if we've parsed the string into the right number of tokens (expectedTokenCount)
         if (tokens.length != expectedTokenCount)
diff --git a/src/java/org/broad/tribble/example/ExampleBinaryCodec.java b/src/java/org/broad/tribble/example/ExampleBinaryCodec.java
index 615197a..0e01df0 100644
--- a/src/java/org/broad/tribble/example/ExampleBinaryCodec.java
+++ b/src/java/org/broad/tribble/example/ExampleBinaryCodec.java
@@ -25,6 +25,7 @@ package org.broad.tribble.example;
 
 import org.broad.tribble.*;
 import org.broad.tribble.readers.AsciiLineReader;
+import org.broad.tribble.readers.LineIterator;
 import org.broad.tribble.readers.PositionalBufferedStream;
 
 import java.io.*;
@@ -37,7 +38,7 @@ import java.util.List;
  *
  * @author Mark DePristo
  */
-public class ExampleBinaryCodec implements FeatureCodec<Feature> {
+public class ExampleBinaryCodec extends BinaryFeatureCodec<Feature> {
     public final static String HEADER_LINE = "# BinaryTestFeature";
 
     @Override
@@ -56,17 +57,13 @@ public class ExampleBinaryCodec implements FeatureCodec<Feature> {
 
     @Override
     public FeatureCodecHeader readHeader(final PositionalBufferedStream stream) throws IOException {
-        final AsciiLineReader reader = new AsciiLineReader(stream);
-        String line;
-        List<String> headerLines = new ArrayList<String>();
+        // Construct a reader that does not read ahead (because we don't want to consume data from the stream that is not the header)
+        final AsciiLineReader nonReadAheadLineReader = new AsciiLineReader(stream);
+        final List<String> headerLines = new ArrayList<String>();
         long headerLengthInBytes = 0;
-        while ((line = reader.readLine()) != null) {
-            if (line.startsWith("#")) {
-                headerLines.add(line);
-                headerLengthInBytes = stream.getPosition();
-            } else {
-                break; // no more header lines
-            }
+        while (stream.peek() == ('#' & 0xff)) { // Look for header lines, which are prefixed by '#'.
+            headerLines.add(nonReadAheadLineReader.readLine());
+            headerLengthInBytes = stream.getPosition();
         }
         return new FeatureCodecHeader(headerLines, headerLengthInBytes);
     }
@@ -91,8 +88,8 @@ public class ExampleBinaryCodec implements FeatureCodec<Feature> {
      * @param codec of the source file features
      * @throws IOException
      */
-    public static void convertToBinaryTest(final File source, final File dest, final FeatureCodec<Feature> codec) throws IOException {
-        final FeatureReader<Feature> reader = AbstractFeatureReader.getFeatureReader(source.getAbsolutePath(), codec, false); // IndexFactory.loadIndex(idxFile));
+    public static <FEATURE_TYPE extends Feature> void convertToBinaryTest(final File source, final File dest, final FeatureCodec<FEATURE_TYPE, LineIterator> codec) throws IOException {
+        final FeatureReader<FEATURE_TYPE> reader = AbstractFeatureReader.getFeatureReader(source.getAbsolutePath(), codec, false); // IndexFactory.loadIndex(idxFile));
         final OutputStream output = new FileOutputStream(dest);
         ExampleBinaryCodec.convertToBinaryTest(reader, output);
     }
@@ -104,10 +101,10 @@ public class ExampleBinaryCodec implements FeatureCodec<Feature> {
      *
      * @throws IOException
      */
-    public static void convertToBinaryTest(final FeatureReader<Feature> reader, final OutputStream out) throws IOException {
+    public static <FEATURE_TYPE extends Feature> void convertToBinaryTest(final FeatureReader<FEATURE_TYPE> reader, final OutputStream out) throws IOException {
         DataOutputStream dos = new DataOutputStream(out);
         dos.writeBytes(HEADER_LINE + "\n");
-        Iterator<Feature> it = reader.iterator();
+        Iterator<FEATURE_TYPE> it = reader.iterator();
         while ( it.hasNext() ) {
             final Feature f = it.next();
             dos.writeUTF(f.getChr());
diff --git a/src/java/org/broad/tribble/gelitext/GeliTextCodec.java b/src/java/org/broad/tribble/gelitext/GeliTextCodec.java
index dc3b4d3..b8b0c6b 100644
--- a/src/java/org/broad/tribble/gelitext/GeliTextCodec.java
+++ b/src/java/org/broad/tribble/gelitext/GeliTextCodec.java
@@ -23,68 +23,70 @@
  */
 package org.broad.tribble.gelitext;
 
+import net.sf.samtools.util.CollectionUtil;
 import org.broad.tribble.AsciiFeatureCodec;
 import org.broad.tribble.Feature;
 import org.broad.tribble.exception.CodecLineParsingException;
+import org.broad.tribble.readers.LineIterator;
 
 import java.util.Arrays;
 
 
 /**
- *         <p/>
- *         A codec for parsing geli text files, which is the text version of the geli binary format.
- *         <p/>
- *
- *         GELI text has the following tab-seperated fields:
- *         contig             the contig (string)
- *         position           the position on the contig (long)
- *         refBase            the reference base (char)
- *         depthOfCoverage    the depth of coverage at this position (int)
- *         maximumMappingQual the maximum mapping quality of a read at this position (int)
- *         genotype           the called genotype (string)
- *         LODBestToReference the LOD score of the best to the reference (double)
- *         LODBestToNext      the LOD score of the best to the next best genotype (double)
- *         likelihoods        the array of all genotype likelihoods, in ordinal ordering (array of 10 doubles, in ordinal order)
+ * <p/>
+ * A codec for parsing geli text files, which is the text version of the geli binary format.
+ * <p/>
+ * <p/>
+ * GELI text has the following tab-seperated fields:
+ * contig             the contig (string)
+ * position           the position on the contig (long)
+ * refBase            the reference base (char)
+ * depthOfCoverage    the depth of coverage at this position (int)
+ * maximumMappingQual the maximum mapping quality of a read at this position (int)
+ * genotype           the called genotype (string)
+ * LODBestToReference the LOD score of the best to the reference (double)
+ * LODBestToNext      the LOD score of the best to the next best genotype (double)
+ * likelihoods        the array of all genotype likelihoods, in ordinal ordering (array of 10 doubles, in ordinal order)
  *
  * @author aaron
  */
 public class GeliTextCodec extends AsciiFeatureCodec<GeliTextFeature> {
-    private String[] parts;
-    private String line;
-
     public GeliTextCodec() {
         super(GeliTextFeature.class);
     }
 
-    public Feature decodeLoc(String line) {
+    public Feature decodeLoc(final String line) {
         return decode(line);
     }
 
     @Override
-    public GeliTextFeature decode(String line) {
-            // clean out header lines and comments
-            if (line.startsWith("#") || line.startsWith("@"))
-                return null;
+    public GeliTextFeature decode(final String line) {
+        // clean out header lines and comments
+        if (line.startsWith("#") || line.startsWith("@"))
+            return null;
 
-            // parse into tokens
-            this.line= line;
-            parts = line.trim().split("\\s+");
-            return decode(parts);
+        // parse into tokens
+        final String[] parts = line.trim().split("\\s+");
+        return decode(parts);
     }
 
     @Override
-    public GeliTextFeature decode(String[] tokens) {
+    public Object readActualHeader(LineIterator reader) {
+        return null;
+    }
+
+    public GeliTextFeature decode(final String[] tokens) {
         try {
             // check that we got the correct number of tokens in the split
             if (tokens.length != 18)
-                throw new CodecLineParsingException("Invalid GeliTextFeature row found -- incorrect element count.  Expected 18, got " + tokens.length + " line = " + line);
+                throw new CodecLineParsingException("Invalid GeliTextFeature row found -- incorrect element count.  Expected 18, got " + tokens.length + " line = " + CollectionUtil.join(Arrays.asList(tokens), " "));
 
             // UPPER case and sort
-            char[] x = tokens[5].toUpperCase().toCharArray();
+            final char[] x = tokens[5].toUpperCase().toCharArray();
             Arrays.sort(x);
-            String bestGenotype = new String(x);
+            final String bestGenotype = new String(x);
 
-            double genotypeLikelihoods[] = new double[10];
+            final double[] genotypeLikelihoods = new double[10];
             for (int pieceIndex = 8, offset = 0; pieceIndex < 18; pieceIndex++, offset++) {
                 genotypeLikelihoods[offset] = Double.valueOf(tokens[pieceIndex]);
             }
@@ -99,10 +101,10 @@ public class GeliTextCodec extends AsciiFeatureCodec<GeliTextFeature> {
                     genotypeLikelihoods);
         } catch (CodecLineParsingException e) {
             e.printStackTrace();
-            throw new RuntimeException("Unable to parse line " + line, e);
+            throw new RuntimeException("Unable to parse line " + CollectionUtil.join(Arrays.asList(tokens), " "), e);
         } catch (NumberFormatException e) {
             e.printStackTrace();
-            throw new RuntimeException("Unable to parse line " + line, e);
+            throw new RuntimeException("Unable to parse line " + CollectionUtil.join(Arrays.asList(tokens), " "), e);
         }
     }
 }
diff --git a/src/java/org/broad/tribble/index/IndexFactory.java b/src/java/org/broad/tribble/index/IndexFactory.java
index d20d42b..bccde40 100644
--- a/src/java/org/broad/tribble/index/IndexFactory.java
+++ b/src/java/org/broad/tribble/index/IndexFactory.java
@@ -28,6 +28,7 @@ import org.broad.tribble.index.interval.IntervalIndexCreator;
 import org.broad.tribble.index.interval.IntervalTreeIndex;
 import org.broad.tribble.index.linear.LinearIndex;
 import org.broad.tribble.index.linear.LinearIndexCreator;
+import org.broad.tribble.readers.LocationAware;
 import org.broad.tribble.readers.PositionalBufferedStream;
 import org.broad.tribble.util.LittleEndianInputStream;
 import org.broad.tribble.util.LittleEndianOutputStream;
@@ -44,11 +45,7 @@ import java.util.zip.GZIPInputStream;
  * correct index type from the input file or stream
  */
 public class IndexFactory {
-
-    /**
-     * We can optimize index-file-creation for different factors.
-     * As of this writing, those are index-file size or seeking time.
-     */
+    /** We can optimize index-file-creation for different factors. As of this writing, those are index-file size or seeking time. */
     public enum IndexBalanceApproach {
         FOR_SIZE,
         FOR_SEEK_TIME
@@ -85,7 +82,7 @@ public class IndexFactory {
             return indexCreatorClass != null;
         }
 
-        IndexType(int headerValue, Class creator, Class indexClass, int defaultBinSize) {
+        IndexType(final int headerValue, final Class creator, final Class indexClass, final int defaultBinSize) {
             indexValue = headerValue;
             indexCreatorClass = creator;
             indexType = indexClass;
@@ -106,8 +103,8 @@ public class IndexFactory {
          * @return The {@code IndexType} based on the {@code headerValue}
          * @throws TribbleException.UnableToCreateCorrectIndexType
          */
-        public static IndexType getIndexType(int headerValue) {
-            for (IndexType type : IndexType.values())
+        public static IndexType getIndexType(final int headerValue) {
+            for (final IndexType type : IndexType.values())
                 if (type.indexValue == headerValue) return type;
             throw new TribbleException.UnableToCreateCorrectIndexType("Unknown index type value" + headerValue);
         }
@@ -121,7 +118,7 @@ public class IndexFactory {
      * @param indexFile from which to load the index
      * @return the parsed index object
      */
-    public static Index loadIndex(String indexFile) {
+    public static Index loadIndex(final String indexFile) {
         Index idx = null;
         InputStream is = null;
         LittleEndianInputStream dis = null;
@@ -135,9 +132,9 @@ public class IndexFactory {
             dis = new LittleEndianInputStream(is);
 
             // Read the type and version,  then create the appropriate type
-            int magicNumber = dis.readInt();
-            int type = dis.readInt();
-            Class indexClass = IndexType.getIndexType(type).getIndexType();
+            final int magicNumber = dis.readInt();
+            final int type = dis.readInt();
+            final Class indexClass = IndexType.getIndexType(type).getIndexType();
 
             idx = (Index) indexClass.newInstance();
             idx.read(dis);
@@ -165,7 +162,7 @@ public class IndexFactory {
      * @param codec     the codec to use for decoding records
      * @return a index
      */
-    public static Index createLinearIndex(File inputFile, FeatureCodec codec) {
+    public static Index createLinearIndex(final File inputFile, final FeatureCodec codec) {
         return createIndex(inputFile, codec, IndexType.LINEAR);
     }
 
@@ -178,7 +175,7 @@ public class IndexFactory {
      * @param binSize   the bin size
      * @return a index
      */
-    public static Index createLinearIndex(File inputFile, FeatureCodec codec, int binSize) {
+    public static <FEATURE_TYPE extends Feature, SOURCE_TYPE> Index createLinearIndex(final File inputFile, final FeatureCodec<FEATURE_TYPE, SOURCE_TYPE> codec, final int binSize) {
         return createIndex(inputFile, codec, IndexType.LINEAR, binSize);
     }
 
@@ -189,7 +186,7 @@ public class IndexFactory {
      * @param codec to decode the features
      * @return
      */
-    public static Index createIntervalIndex(File inputFile, FeatureCodec codec) {
+    public static <FEATURE_TYPE extends Feature, SOURCE_TYPE> Index createIntervalIndex(final File inputFile, final FeatureCodec<FEATURE_TYPE, SOURCE_TYPE> codec) {
         return createIndex(inputFile, codec, IndexType.INTERVAL_TREE);
     }
 
@@ -202,7 +199,7 @@ public class IndexFactory {
      * @param binSize   the bin size
      * @return a index
      */
-    public static Index createIntervalIndex(File inputFile, FeatureCodec codec, int binSize) {
+    public static <FEATURE_TYPE extends Feature, SOURCE_TYPE> Index createIntervalIndex(final File inputFile, final FeatureCodec<FEATURE_TYPE, SOURCE_TYPE> codec, final int binSize) {
         return createIndex(inputFile, codec, IndexType.INTERVAL_TREE, binSize);
     }
 
@@ -213,7 +210,7 @@ public class IndexFactory {
      * @param codec     the codec to use for decoding records
      * @return a index
      */
-    public static Index createDynamicIndex(File inputFile, FeatureCodec codec) {
+    public static <FEATURE_TYPE extends Feature, SOURCE_TYPE> Index createDynamicIndex(final File inputFile, final FeatureCodec<FEATURE_TYPE, SOURCE_TYPE> codec) {
         return createDynamicIndex(inputFile, codec, IndexBalanceApproach.FOR_SEEK_TIME);
     }
 
@@ -225,7 +222,7 @@ public class IndexFactory {
      * @param type      the type of index to create
      * @return a index
      */
-    public static Index createIndex(File inputFile, FeatureCodec codec, IndexType type) {
+    public static <FEATURE_TYPE extends Feature, SOURCE_TYPE> Index createIndex(final File inputFile, final FeatureCodec<FEATURE_TYPE, SOURCE_TYPE> codec, final IndexType type) {
         return createIndex(inputFile, codec, type, type.getDefaultBinSize());
     }
 
@@ -238,13 +235,13 @@ public class IndexFactory {
      * @param binSize   the bin size
      * @return a index
      */
-    public static Index createIndex(File inputFile, FeatureCodec codec, IndexType type, final int binSize) {
+    public static <FEATURE_TYPE extends Feature, SOURCE_TYPE> Index createIndex(final File inputFile, final FeatureCodec<FEATURE_TYPE, SOURCE_TYPE> codec, final IndexType type, final int binSize) {
         if ( ! type.canCreate() )
             throw new TribbleException("Tribble can only read, not create indices of type " + type.name());
 
-        IndexCreator idx = type.getIndexCreator();
+        final IndexCreator idx = type.getIndexCreator();
         idx.initialize(inputFile, binSize);
-        return createIndex(inputFile, new FeatureIterator(inputFile, codec), idx);
+        return createIndex(inputFile, new FeatureIterator<FEATURE_TYPE, SOURCE_TYPE>(inputFile, codec), idx);
     }
 
     /**
@@ -274,26 +271,26 @@ public class IndexFactory {
      * @param iba       the index balancing approach
      * @return a index
      */
-    public static Index createDynamicIndex(File inputFile, FeatureCodec codec, IndexBalanceApproach iba) {
+    public static <FEATURE_TYPE extends Feature, SOURCE_TYPE> Index createDynamicIndex(final File inputFile, final FeatureCodec<FEATURE_TYPE, SOURCE_TYPE> codec, final IndexBalanceApproach iba) {
         // get a list of index creators
-        DynamicIndexCreator indexCreator = new DynamicIndexCreator(iba);
+        final DynamicIndexCreator indexCreator = new DynamicIndexCreator(iba);
         indexCreator.initialize(inputFile, indexCreator.defaultBinSize());
-        return createIndex(inputFile, new FeatureIterator(inputFile, codec), indexCreator);
+        return createIndex(inputFile, new FeatureIterator<FEATURE_TYPE, SOURCE_TYPE>(inputFile, codec), indexCreator);
     }
 
 
-    private static Index createIndex(File inputFile, FeatureIterator iterator, IndexCreator creator) {
+    private static Index createIndex(final File inputFile, final FeatureIterator iterator, final IndexCreator creator) {
         Feature lastFeature = null;
         Feature currentFeature;
-        Map<String, Feature> visitedChromos = new HashMap<String, Feature>(40);
+        final Map<String, Feature> visitedChromos = new HashMap<String, Feature>(40);
         while (iterator.hasNext()) {
-            long position = iterator.getPosition();
+            final long position = iterator.getPosition();
             currentFeature = iterator.next();
 
             checkSorted(inputFile, lastFeature, currentFeature);
             //should only visit chromosomes once
-            String curChr = currentFeature.getChr();
-            String lastChr = lastFeature != null ? lastFeature.getChr() : null;
+            final String curChr = currentFeature.getChr();
+            final String lastChr = lastFeature != null ? lastFeature.getChr() : null;
             if(!curChr.equals(lastChr)){
                 if(visitedChromos.containsKey(curChr)){
                     String msg = "Input file must have contiguous chromosomes.";
@@ -315,11 +312,11 @@ public class IndexFactory {
         return creator.finalizeIndex(iterator.getPosition());
     }
 
-    private static String featToString(Feature feature){
+    private static String featToString(final Feature feature){
         return feature.getChr() + ":" + feature.getStart() + "-" + feature.getEnd();
     }
 
-    private static void checkSorted(File inputFile, Feature lastFeature, Feature currentFeature){
+    private static void checkSorted(final File inputFile, final Feature lastFeature, final Feature currentFeature){
         // if the last currentFeature is after the current currentFeature, exception out
         if (lastFeature != null && currentFeature.getStart() < lastFeature.getStart() && lastFeature.getChr().equals(currentFeature.getChr()))
             throw new TribbleException.MalformedFeatureFile("Input file is not sorted by start position. \n" +
@@ -331,13 +328,13 @@ public class IndexFactory {
     /**
      * Iterator for reading features from a file, given a {@code FeatureCodec}.
      */
-    static class FeatureIterator implements CloseableTribbleIterator<Feature> {
+    static class FeatureIterator<FEATURE_TYPE extends Feature, SOURCE> implements CloseableTribbleIterator<Feature> {
         // the stream we use to get features
-        private PositionalBufferedStream stream;
+        private SOURCE source;
         // the next feature
         private Feature nextFeature;
         // our codec
-        private final FeatureCodec codec;
+        private final FeatureCodec<FEATURE_TYPE, SOURCE> codec;
         private final File inputFile;
 
         // we also need cache our position
@@ -348,11 +345,11 @@ public class IndexFactory {
          * @param inputFile The file from which to read. Stream for reading is opened on construction.
          * @param codec
          */
-        public FeatureIterator(File inputFile, FeatureCodec codec) {
+        public FeatureIterator(final File inputFile, final FeatureCodec<FEATURE_TYPE, SOURCE> codec) {
             this.codec = codec;
             this.inputFile = inputFile;
-            FeatureCodecHeader header = readHeader();
-            stream = initStream(inputFile, header.getHeaderEnd());
+            final FeatureCodecHeader header = readHeader();
+            source = (SOURCE) codec.makeIndexableSourceFromStream(initStream(inputFile, header.getHeaderEnd()));
             readNextFeature();
         }
 
@@ -362,19 +359,19 @@ public class IndexFactory {
          */
         private FeatureCodecHeader readHeader() {
             try {
-                PositionalBufferedStream pbs = initStream(inputFile, 0);
-                FeatureCodecHeader header = this.codec.readHeader(pbs);
-                pbs.close();
+                final SOURCE source = this.codec.makeSourceFromStream(initStream(inputFile, 0));
+                final FeatureCodecHeader header = this.codec.readHeader(source);
+                codec.close(source);
                 return header;
             } catch (IOException e) {
                 throw new TribbleException.InvalidHeader("Error reading header " + e.getMessage());
             }
         }
 
-        private PositionalBufferedStream initStream(File inputFile, long skip) {
+        private PositionalBufferedStream initStream(final File inputFile, final long skip) {
             try {
                 final FileInputStream is = new FileInputStream(inputFile);
-                PositionalBufferedStream pbs = new PositionalBufferedStream(is);
+                final PositionalBufferedStream pbs = new PositionalBufferedStream(is);
                 if ( skip > 0 ) pbs.skip(skip);
                 return pbs;
             } catch (FileNotFoundException e) {
@@ -389,7 +386,7 @@ public class IndexFactory {
         }
 
         public Feature next() {
-            Feature ret = nextFeature;
+            final Feature ret = nextFeature;
             readNextFeature();
             return ret;
         }
@@ -406,7 +403,7 @@ public class IndexFactory {
          * @return the file position from the underlying reader
          */
         public long getPosition() {
-            return (hasNext()) ? cachedPosition : stream.getPosition();
+            return (hasNext()) ? cachedPosition : ((LocationAware) source).getPosition();
         }
 
         @Override
@@ -416,7 +413,7 @@ public class IndexFactory {
 
         @Override
         public void close() {
-            stream.close();
+            codec.close(source);
         }
 
         /**
@@ -424,11 +421,11 @@ public class IndexFactory {
          * @throws TribbleException.MalformedFeatureFile
          */
         private void readNextFeature() {
-            cachedPosition = stream.getPosition();
+            cachedPosition = ((LocationAware) source).getPosition();
             try {
                 nextFeature = null;
-                while ( ! stream.isDone() && nextFeature == null ) {
-                    nextFeature = codec.decodeLoc(stream);
+                while (nextFeature == null && !codec.isDone(source)) {
+                    nextFeature = codec.decodeLoc(source);
                 }
             } catch (IOException e) {
                 throw new TribbleException.MalformedFeatureFile("Unable to read a line from the file", inputFile.getAbsolutePath(), e);
diff --git a/src/java/org/broad/tribble/readers/AsciiLineReader.java b/src/java/org/broad/tribble/readers/AsciiLineReader.java
index d0666f7..37c48bb 100644
--- a/src/java/org/broad/tribble/readers/AsciiLineReader.java
+++ b/src/java/org/broad/tribble/readers/AsciiLineReader.java
@@ -24,9 +24,12 @@ import java.io.*;
 /**
  * A simple class that provides {@link #readLine()} functionality around a PositionalBufferedStream
  *
+ * {@link BufferedReader} and its {@link java.io.BufferedReader#readLine()} method should be used in preference to this class (when the
+ * {@link LocationAware} functionality is not required) because it offers greater performance.
+ * 
  * @author jrobinso
  */
-public class AsciiLineReader implements LineReader {
+public class AsciiLineReader implements LineReader, LocationAware {
     private static final int BUFFER_OVERFLOW_INCREASE_FACTOR = 2;
     private static final byte LINEFEED = (byte) ('\n' & 0xff);
     private static final byte CARRIAGE_RETURN = (byte) ('\r' & 0xff);
@@ -34,13 +37,6 @@ public class AsciiLineReader implements LineReader {
     PositionalBufferedStream is;
     char[] lineBuffer;
 
-    /**
-     * Initialize without a default stream
-     */
-    public AsciiLineReader() {
-        this(null);
-    }
-
     public AsciiLineReader(InputStream is){
         this(new PositionalBufferedStream(is));
     }
@@ -148,6 +144,19 @@ public class AsciiLineReader implements LineReader {
                 reader2.close();
             }
 
+            if ( includeBufferedReader ) {
+                LongLineBufferedReader longLineBufferedReader = new LongLineBufferedReader(new BufferedReader(new FileReader(testFile)));
+                t0 = System.currentTimeMillis();
+                lineCount = 0;
+                while (longLineBufferedReader.readLine() != null) {
+                    lineCount++;
+                }
+                dt = System.currentTimeMillis() - t0;
+                rate = ((double) lineCount) / dt;
+                printStatus("BufferedReader", lineCount, rate, dt);
+                longLineBufferedReader.close();
+            }
+            
             PositionalBufferedStream pbs = new PositionalBufferedStream(new FileInputStream(testFile));
             LineReader reader = new AsciiLineReader(pbs);
             t0 = System.currentTimeMillis();
diff --git a/src/java/org/broad/tribble/readers/AsciiLineReaderIterator.java b/src/java/org/broad/tribble/readers/AsciiLineReaderIterator.java
new file mode 100644
index 0000000..297a685
--- /dev/null
+++ b/src/java/org/broad/tribble/readers/AsciiLineReaderIterator.java
@@ -0,0 +1,99 @@
+package org.broad.tribble.readers;
+
+import net.sf.samtools.util.AbstractIterator;
+import net.sf.samtools.util.CloserUtil;
+import net.sf.samtools.util.Tuple;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+/**
+ * A class that iterates over the lines and line positions in an {@link AsciiLineReader}.
+ * 
+ * This class is slower than other {@link LineIterator}s because it is driven by {@link AsciiLineReader}, but offers the benefit of 
+ * implementing {@link LocationAware}, which is required for indexing.  If you do not require {@link LocationAware}, consider using 
+ * {@link LineIteratorImpl} as an alternative to this class.
+ * 
+ * Note an important distinction in the way this class and its inner iterator differ: in the inner iterator, the position stored with
+ * a line is the position at the start of that line.  However, {@link #getPosition()} of the outer class must return the position at the
+ * end of the most-recently-returned line (or the start of the underlying {@link AsciiLineReader}, if no line has been read).  The latter
+ * bit of logic here is required to conform with the interface described by {@link org.broad.tribble.readers.LocationAware#getPosition()}.
+ * 
+ * @author mccowan
+ */
+public class AsciiLineReaderIterator implements LocationAware, LineIterator, Closeable {
+    private final AsciiLineReader asciiLineReader;
+    private final TupleIterator i;
+    private Tuple<String, Long> current = null;
+
+    public AsciiLineReaderIterator(final AsciiLineReader asciiLineReader) {
+        this.asciiLineReader = asciiLineReader;
+        this.i = new TupleIterator();
+    }
+
+    @Override
+    public void close() throws IOException {
+        CloserUtil.close(asciiLineReader);
+    }
+
+    @Override
+    public boolean hasNext() {
+        return i.hasNext();
+    }
+
+    @Override
+    public String next() {
+        current = i.next();
+        return current.a;
+    }
+
+    @Override
+    public void remove() {
+        i.remove();
+    }
+
+    /**
+     * Returns the byte position at the end of the most-recently-read line (a.k.a., the beginning of the next line) from {@link #next()} in
+     * the underlying {@link AsciiLineReader}.
+     */
+    @Override
+    public long getPosition() {
+        return i.getPosition();
+    }
+
+    @Override
+    public String peek() {
+        return i.peek().a;
+    }
+
+    /**
+     * This is stored internally since it iterates over {@link net.sf.samtools.util.Tuple}, not {@link String} (and the outer 
+     * class can't do both).
+     */
+    private class TupleIterator extends AbstractIterator<Tuple<String, Long>> implements LocationAware {
+        
+        public TupleIterator() {
+            hasNext(); // Initialize the iterator, which appears to be a requirement of the parent class.  TODO: Really?
+        }
+        
+        @Override
+        protected Tuple<String, Long> advance() {
+            final String line;
+            final long position = asciiLineReader.getPosition(); // A line's position is where it starts, so get it before reading the line.
+            try {
+                line = asciiLineReader.readLine();
+            } catch (IOException e) {
+                throw new RuntimeException(e);
+            }
+            return line == null ? null : new Tuple<String, Long>(line, position);
+        }
+
+        /** Returns the byte position at the beginning of the next line. */
+        @Override
+        public long getPosition() {
+            final Tuple<String, Long> peek = peek();
+            // Be careful: peek will be null at the end of the stream.
+            return peek != null ? peek.b : asciiLineReader.getPosition();
+        }
+    }
+}
diff --git a/src/java/org/broad/tribble/readers/AsynchronousLineReader.java b/src/java/org/broad/tribble/readers/AsynchronousLineReader.java
new file mode 100644
index 0000000..b445be4
--- /dev/null
+++ b/src/java/org/broad/tribble/readers/AsynchronousLineReader.java
@@ -0,0 +1,97 @@
+package org.broad.tribble.readers;
+
+import net.sf.samtools.util.CloserUtil;
+import org.broad.tribble.TribbleException;
+
+import java.io.Reader;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * A LineReader implementation that delegates the work of reading and fetching lines to another thread.  The thread terminates when it
+ * encounters EOF in the underlying reader, or when this LineReader is closed.
+ *
+ * @author mccowan
+ */
+public class AsynchronousLineReader implements LineReader {
+    public static final int DEFAULT_NUMBER_LINES_BUFFER = 100;
+    
+    private final LongLineBufferedReader bufferedReader;
+    private final BlockingQueue<String> lineQueue;
+    private final Thread worker;
+    private volatile Exception workerException = null;
+    private volatile boolean eofReached = false;
+
+    public AsynchronousLineReader(final Reader reader, final int lineReadAheadSize) {
+        bufferedReader = new LongLineBufferedReader(reader);
+        lineQueue = new LinkedBlockingQueue<String>(lineReadAheadSize);
+        worker = new Thread(new Worker());
+        worker.start();
+    }
+
+    public AsynchronousLineReader(final Reader reader) {
+        this(reader, DEFAULT_NUMBER_LINES_BUFFER);
+    }
+
+    @Override
+    public String readLine() {
+        try {
+            // Continually poll until we get a result, unless the underlying reader is finished.
+            for (; ; ) {
+                checkAndThrowIfWorkerException();
+                final String pollResult = this.lineQueue.poll(100, TimeUnit.MILLISECONDS); // Not ideal for small files.
+                if (pollResult == null) {
+                    if (eofReached) {
+                        checkAndThrowIfWorkerException();
+                        return lineQueue.poll(); // If there is nothing left, returns null as expected.  Otherwise, grabs next element.
+                    }
+                } else {
+                    return pollResult;
+                }
+            }
+        } catch (InterruptedException e) {
+            throw new TribbleException("Line polling interrupted.", e);
+        }
+    }
+
+    private void checkAndThrowIfWorkerException() {
+        if (workerException != null) {
+            throw new TribbleException("Exception encountered in worker thread.", workerException);
+        }
+    }
+
+    @Override
+    public void close() {
+        this.worker.interrupt(); // Allow the worker to close gracefully.
+    } 
+
+    private class Worker implements Runnable {
+        @Override
+        public void run() {
+            try {
+                for (; ; ) {
+                    final String line = bufferedReader.readLine();
+                    if (line == null) {
+                        eofReached = true;
+                        break;
+                    } else {
+                        try {
+                            lineQueue.put(line);
+                        } catch (InterruptedException e) {
+                            /**
+                             * A thread interruption is not an exceptional state: it means a {@link AsynchronousLineReader#close();} has 
+                             * been called, so shut down gracefully.
+                             */
+                            break;
+                        }
+                    }
+                }
+            } catch (Exception e) {
+                AsynchronousLineReader.this.workerException = e;
+            } finally {
+                CloserUtil.close(AsynchronousLineReader.this.bufferedReader);
+            }
+        }
+    }
+}
diff --git a/src/java/org/broad/tribble/readers/LineIterator.java b/src/java/org/broad/tribble/readers/LineIterator.java
new file mode 100644
index 0000000..bd8e3f0
--- /dev/null
+++ b/src/java/org/broad/tribble/readers/LineIterator.java
@@ -0,0 +1,12 @@
+package org.broad.tribble.readers;
+
+import java.util.Iterator;
+
+/**
+ * A very simple descriptor for line-iterables.
+ * @author mccowan
+ */
+public interface LineIterator extends Iterator<String> {
+    /** Peeks at the next line, without expending any elements in the underlying iterator. */
+    public String peek();
+}
diff --git a/src/java/org/broad/tribble/readers/LineIteratorImpl.java b/src/java/org/broad/tribble/readers/LineIteratorImpl.java
new file mode 100644
index 0000000..4084e46
--- /dev/null
+++ b/src/java/org/broad/tribble/readers/LineIteratorImpl.java
@@ -0,0 +1,33 @@
+package org.broad.tribble.readers;
+
+import net.sf.samtools.util.AbstractIterator;
+import net.sf.samtools.util.CloserUtil;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+/** A simple iterator over the elements in LineReader. */
+public class LineIteratorImpl extends AbstractIterator<String> implements LineIterator, Closeable {
+    private final LineReader lineReader;
+
+    /**
+     * @param lineReader The line reader whose elements are to be iterated over.
+     */
+    public LineIteratorImpl(final LineReader lineReader) {
+        this.lineReader = lineReader;
+    }
+
+    @Override
+    protected String advance() {
+        try {
+            return lineReader.readLine();
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    @Override
+    public void close() throws IOException {
+        CloserUtil.close(lineReader);
+    }
+}
diff --git a/src/java/org/broad/tribble/readers/LineReaderUtil.java b/src/java/org/broad/tribble/readers/LineReaderUtil.java
new file mode 100644
index 0000000..bac947a
--- /dev/null
+++ b/src/java/org/broad/tribble/readers/LineReaderUtil.java
@@ -0,0 +1,61 @@
+package org.broad.tribble.readers;
+
+import net.sf.samtools.Defaults;
+import net.sf.samtools.util.CloserUtil;
+import org.broad.tribble.TribbleException;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+
+/**
+ * A collection of factories for generating {@link LineReader}s.
+ * 
+ * @author mccowan
+ */
+public class LineReaderUtil {
+    public enum LineReaderOption {
+        ASYNCHRONOUS, SYNCHRONOUS
+    }
+
+    /**
+     * Like {@link #fromBufferedStream(java.io.InputStream, org.broad.tribble.readers.LineReaderUtil.LineReaderOption)}, but the synchronicity
+     * option is determined by {@link net.sf.samtools.Defaults}: if asynchronous I/O is enabled, an asynchronous line reader will be
+     * returned.
+     */
+    public static LineReader fromBufferedStream(final InputStream stream) {
+        return fromBufferedStream(stream, Defaults.USE_ASYNC_IO ? LineReaderOption.ASYNCHRONOUS : LineReaderOption.SYNCHRONOUS);
+    }
+
+    /**
+     * Convenience factory for composing a LineReader from an InputStream.
+     */
+    public static LineReader fromBufferedStream(final InputStream bufferedStream, final LineReaderOption option) {
+        final InputStreamReader bufferedInputStreamReader = new InputStreamReader(bufferedStream);
+        switch (option) {
+            case ASYNCHRONOUS:
+                return new AsynchronousLineReader(bufferedInputStreamReader);
+            case SYNCHRONOUS:
+                return new LineReader() {
+                    final LongLineBufferedReader reader = new LongLineBufferedReader(bufferedInputStreamReader);
+
+                    @Override
+                    public String readLine() {
+                        try {
+                            return reader.readLine();
+                        } catch (IOException e) {
+                            throw new RuntimeException(e);
+                        }
+                    }
+
+                    @Override
+                    public void close() {
+                        CloserUtil.close(reader);
+                    }
+                };
+            default:
+                throw new TribbleException(String.format("Unrecognized LineReaderUtil option: %s.", option));
+        }
+    }
+
+}
diff --git a/src/java/org/broad/tribble/readers/LocationAware.java b/src/java/org/broad/tribble/readers/LocationAware.java
new file mode 100644
index 0000000..832b14f
--- /dev/null
+++ b/src/java/org/broad/tribble/readers/LocationAware.java
@@ -0,0 +1,24 @@
+package org.broad.tribble.readers;
+
+/**
+ * Describes API for getting current position in a stream, writer, or underlying file.
+ * 
+ * The expected functionality is simple: if you are a output stream / writer, and you've written 50 bytes to the stream, then 
+ * {@link #getPosition()} should return 50; if you are an input stream or file reader, and you've read 25 bytes from the object, then it 
+ * should return 25.
+ * 
+ * In the context of an iterator or any producer-like object that doesn't map directly to a byte stream, {@link #getPosition()} should
+ * return the position (in the underlying stream being read/written to) of the most-recently read/written element.  For example, if you
+ * are reading lines from a file with a {@link AsciiLineReaderIterator}, calling {@link #getPosition()} should return the byte position 
+ * of the start of the most recent line returned by {@link org.broad.tribble.readers.AsciiLineReaderIterator#next()}.
+ * 
+ * @author mccowan
+ */
+public interface LocationAware {
+    /**
+     * The current offset, in bytes, of this stream/writer/file.  Or, if this is an iterator/producer, the offset (in bytes) of the
+     * END of the most recently returned record (since a produced record corresponds to something that has been read already). See class
+     * javadoc for more.
+     */
+    public long getPosition();
+}
diff --git a/src/java/org/broad/tribble/readers/LongLineBufferedReader.java b/src/java/org/broad/tribble/readers/LongLineBufferedReader.java
new file mode 100644
index 0000000..aced12f
--- /dev/null
+++ b/src/java/org/broad/tribble/readers/LongLineBufferedReader.java
@@ -0,0 +1,490 @@
+/*
+ * The MIT License
+ *
+ * Copyright (c) 2013 The Broad Institute
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+ * THE SOFTWARE.
+ */
+
+package org.broad.tribble.readers;
+
+
+import java.io.IOException;
+import java.io.Reader;
+
+/**
+ * A variant of {@link java.io.BufferedReader} with improved performance reading files with long lines.
+ * 
+ * This class is almost identical to BufferedReader, but it retains a single line buffer for accumulating characters in a line, and allows
+ * its size to grow. Conversely, {@link java.io.BufferedReader} assumes each new line will be approximately 80 characters and its
+ * performance suffers when that is not the case.
+ * 
+ * Unlike {@link java.io.BufferedReader}, this class is not thread safe.
+ *
+ * @author mccowan
+ */
+public class LongLineBufferedReader extends Reader {
+
+    private Reader in;
+
+    private char cb[];
+    private int nChars, nextChar;
+
+    private static final int INVALIDATED = -2;
+    private static final int UNMARKED = -1;
+    private int markedChar = UNMARKED;
+    private int readAheadLimit = 0; /* Valid only when markedChar > 0 */
+
+    /**
+     * If the next character is a line feed, skip it
+     */
+    private boolean skipLF = false;
+
+    /**
+     * The skipLF flag when the mark was set
+     */
+    private boolean markedSkipLF = false;
+
+    private static final int DEFAULT_CHAR_BUFFER_SIZE = 8192;
+    private static final int DEFAULT_EXPECTED_LINE_LENGTH = 80;
+
+    private StringBuilder lineBuffer = new StringBuilder(DEFAULT_EXPECTED_LINE_LENGTH);
+
+    /**
+     * Creates a buffering character-input stream that uses an input buffer of
+     * the specified size.
+     *
+     * @param in A Reader
+     * @param sz Input-buffer size
+     * @throws IllegalArgumentException If sz is <= 0
+     */
+    public LongLineBufferedReader(Reader in, int sz) {
+        super(in);
+        if (sz <= 0)
+            throw new IllegalArgumentException("Buffer size <= 0");
+        this.in = in;
+        cb = new char[sz];
+        nextChar = nChars = 0;
+    }
+
+    /**
+     * Creates a buffering character-input stream that uses a default-sized
+     * input buffer.
+     *
+     * @param in A Reader
+     */
+    public LongLineBufferedReader(Reader in) {
+        this(in, DEFAULT_CHAR_BUFFER_SIZE);
+    }
+
+    /**
+     * Checks to make sure that the stream has not been closed
+     */
+    private void ensureOpen() throws IOException {
+        if (in == null)
+            throw new IOException("Stream closed");
+    }
+
+    /**
+     * Fills the input buffer, taking the mark into account if it is valid.
+     */
+    private void fill() throws IOException {
+        int dst;
+        if (markedChar <= UNMARKED) {
+        /* No mark */
+            dst = 0;
+        } else {
+        /* Marked */
+            int delta = nextChar - markedChar;
+            if (delta >= readAheadLimit) {
+        /* Gone past read-ahead limit: Invalidate mark */
+                markedChar = INVALIDATED;
+                readAheadLimit = 0;
+                dst = 0;
+            } else {
+                if (readAheadLimit <= cb.length) {
+		    /* Shuffle in the current buffer */
+                    System.arraycopy(cb, markedChar, cb, 0, delta);
+                    markedChar = 0;
+                    dst = delta;
+                } else {
+		    /* Reallocate buffer to accommodate read-ahead limit */
+                    char ncb[] = new char[readAheadLimit];
+                    System.arraycopy(cb, markedChar, ncb, 0, delta);
+                    cb = ncb;
+                    markedChar = 0;
+                    dst = delta;
+                }
+                nextChar = nChars = delta;
+            }
+        }
+
+        int n;
+        do {
+            n = in.read(cb, dst, cb.length - dst);
+        } while (n == 0);
+        if (n > 0) {
+            nChars = dst + n;
+            nextChar = dst;
+        }
+    }
+
+    /**
+     * Reads a single character.
+     *
+     * @return The character read, as an integer in the range
+     *         0 to 65535 (<tt>0x00-0xffff</tt>), or -1 if the
+     *         end of the stream has been reached
+     * @throws IOException If an I/O error occurs
+     */
+    public int read() throws IOException {
+        synchronized (lock) {
+            ensureOpen();
+            for (; ; ) {
+                if (nextChar >= nChars) {
+                    fill();
+                    if (nextChar >= nChars)
+                        return -1;
+                }
+                if (skipLF) {
+                    skipLF = false;
+                    if (cb[nextChar] == '\n') {
+                        nextChar++;
+                        continue;
+                    }
+                }
+                return cb[nextChar++];
+            }
+        }
+    }
+
+    /**
+     * Reads characters into a portion of an array, reading from the underlying
+     * stream if necessary.
+     */
+    private int read1(char[] cbuf, int off, int len) throws IOException {
+        if (nextChar >= nChars) {
+	    /* If the requested length is at least as large as the buffer, and
+	       if there is no mark/reset activity, and if line feeds are not
+	       being skipped, do not bother to copy the characters into the
+	       local buffer.  In this way buffered streams will cascade
+	       harmlessly. */
+            if (len >= cb.length && markedChar <= UNMARKED && !skipLF) {
+                return in.read(cbuf, off, len);
+            }
+            fill();
+        }
+        if (nextChar >= nChars) return -1;
+        if (skipLF) {
+            skipLF = false;
+            if (cb[nextChar] == '\n') {
+                nextChar++;
+                if (nextChar >= nChars)
+                    fill();
+                if (nextChar >= nChars)
+                    return -1;
+            }
+        }
+        int n = Math.min(len, nChars - nextChar);
+        System.arraycopy(cb, nextChar, cbuf, off, n);
+        nextChar += n;
+        return n;
+    }
+
+    /**
+     * Reads characters into a portion of an array.
+     * <p/>
+     * <p> This method implements the general contract of the corresponding
+     * <code>{@link Reader#read(char[], int, int) read}</code> method of the
+     * <code>{@link Reader}</code> class.  As an additional convenience, it
+     * attempts to read as many characters as possible by repeatedly invoking
+     * the <code>read</code> method of the underlying stream.  This iterated
+     * <code>read</code> continues until one of the following conditions becomes
+     * true: <ul>
+     * <p/>
+     * <li> The specified number of characters have been read,
+     * <p/>
+     * <li> The <code>read</code> method of the underlying stream returns
+     * <code>-1</code>, indicating end-of-file, or
+     * <p/>
+     * <li> The <code>ready</code> method of the underlying stream
+     * returns <code>false</code>, indicating that further input requests
+     * would block.
+     * <p/>
+     * </ul> If the first <code>read</code> on the underlying stream returns
+     * <code>-1</code> to indicate end-of-file then this method returns
+     * <code>-1</code>.  Otherwise this method returns the number of characters
+     * actually read.
+     * <p/>
+     * <p> Subclasses of this class are encouraged, but not required, to
+     * attempt to read as many characters as possible in the same fashion.
+     * <p/>
+     * <p> Ordinarily this method takes characters from this stream's character
+     * buffer, filling it from the underlying stream as necessary.  If,
+     * however, the buffer is empty, the mark is not valid, and the requested
+     * length is at least as large as the buffer, then this method will read
+     * characters directly from the underlying stream into the given array.
+     * Thus redundant <code>BufferedReaderTwo</code>s will not copy data
+     * unnecessarily.
+     *
+     * @param cbuf Destination buffer
+     * @param off  Offset at which to start storing characters
+     * @param len  Maximum number of characters to read
+     * @return The number of characters read, or -1 if the end of the
+     *         stream has been reached
+     * @throws IOException If an I/O error occurs
+     */
+    public int read(char cbuf[], int off, int len) throws IOException {
+        synchronized (lock) {
+            ensureOpen();
+            if ((off < 0) || (off > cbuf.length) || (len < 0) ||
+                    ((off + len) > cbuf.length) || ((off + len) < 0)) {
+                throw new IndexOutOfBoundsException();
+            } else if (len == 0) {
+                return 0;
+            }
+
+            int n = read1(cbuf, off, len);
+            if (n <= 0) return n;
+            while ((n < len) && in.ready()) {
+                int n1 = read1(cbuf, off + n, len - n);
+                if (n1 <= 0) break;
+                n += n1;
+            }
+            return n;
+        }
+    }
+
+    /**
+     * Reads a line of text.  A line is considered to be terminated by any one
+     * of a line feed ('\n'), a carriage return ('\r'), or a carriage return
+     * followed immediately by a linefeed.
+     *
+     * @param ignoreLF If true, the next '\n' will be skipped
+     * @return A String containing the contents of the line, not including
+     *         any line-termination characters, or null if the end of the
+     *         stream has been reached
+     * @throws IOException If an I/O error occurs
+     * @see java.io.LineNumberReader#readLine()
+     */
+    String readLine(boolean ignoreLF) throws IOException {
+        int startChar;
+        lineBuffer.setLength(0);
+        
+        synchronized (lock) {
+            ensureOpen();
+            boolean omitLF = ignoreLF || skipLF;
+
+            bufferLoop:
+            for (; ; ) {
+
+                if (nextChar >= nChars)
+                    fill();
+                if (nextChar >= nChars) { /* EOF */
+                    if (lineBuffer != null && lineBuffer.length() > 0)
+                        return lineBuffer.toString();
+                    else
+                        return null;
+                }
+                boolean eol = false;
+                char c = 0;
+                int i;
+
+                /* Skip a leftover '\n', if necessary */
+                if (omitLF && (cb[nextChar] == '\n'))
+                    nextChar++;
+                skipLF = false;
+                omitLF = false;
+
+                charLoop:
+                for (i = nextChar; i < nChars; i++) {
+                    c = cb[i];
+                    if ((c == '\n') || (c == '\r')) {
+                        eol = true;
+                        break charLoop;
+                    }
+                }
+
+                startChar = nextChar;
+                nextChar = i;
+
+                if (eol) {
+                    String str;
+                    lineBuffer.append(cb, startChar, i - startChar);
+                    str = lineBuffer.toString();
+                    nextChar++;
+                    if (c == '\r') {
+                        skipLF = true;
+                    }
+                    return str;
+                }
+
+                lineBuffer.append(cb, startChar, i - startChar);
+            }
+        }
+    }
+
+    /**
+     * Reads a line of text.  A line is considered to be terminated by any one
+     * of a line feed ('\n'), a carriage return ('\r'), or a carriage return
+     * followed immediately by a linefeed.
+     *
+     * @return A String containing the contents of the line, not including
+     *         any line-termination characters, or null if the end of the
+     *         stream has been reached
+     * @throws IOException If an I/O error occurs
+     */
+    public String readLine() throws IOException {
+        return readLine(false);
+    }
+
+    /**
+     * Skips characters.
+     *
+     * @param n The number of characters to skip
+     * @return The number of characters actually skipped
+     * @throws IllegalArgumentException If <code>n</code> is negative.
+     * @throws IOException              If an I/O error occurs
+     */
+    public long skip(long n) throws IOException {
+        if (n < 0L) {
+            throw new IllegalArgumentException("skip value is negative");
+        }
+        synchronized (lock) {
+            ensureOpen();
+            long r = n;
+            while (r > 0) {
+                if (nextChar >= nChars)
+                    fill();
+                if (nextChar >= nChars)	/* EOF */
+                    break;
+                if (skipLF) {
+                    skipLF = false;
+                    if (cb[nextChar] == '\n') {
+                        nextChar++;
+                    }
+                }
+                long d = nChars - nextChar;
+                if (r <= d) {
+                    nextChar += r;
+                    r = 0;
+                    break;
+                } else {
+                    r -= d;
+                    nextChar = nChars;
+                }
+            }
+            return n - r;
+        }
+    }
+
+    /**
+     * Tells whether this stream is ready to be read.  A buffered character
+     * stream is ready if the buffer is not empty, or if the underlying
+     * character stream is ready.
+     *
+     * @throws IOException If an I/O error occurs
+     */
+    public boolean ready() throws IOException {
+        synchronized (lock) {
+            ensureOpen();
+
+	    /* 
+	     * If newline needs to be skipped and the next char to be read
+	     * is a newline character, then just skip it right away.
+	     */
+            if (skipLF) {
+		/* Note that in.ready() will return true if and only if the next 
+		 * read on the stream will not block.
+		 */
+                if (nextChar >= nChars && in.ready()) {
+                    fill();
+                }
+                if (nextChar < nChars) {
+                    if (cb[nextChar] == '\n')
+                        nextChar++;
+                    skipLF = false;
+                }
+            }
+            return (nextChar < nChars) || in.ready();
+        }
+    }
+
+    /**
+     * Tells whether this stream supports the mark() operation, which it does.
+     */
+    public boolean markSupported() {
+        return true;
+    }
+
+    /**
+     * Marks the present position in the stream.  Subsequent calls to reset()
+     * will attempt to reposition the stream to this point.
+     *
+     * @param readAheadLimit Limit on the number of characters that may be
+     *                       read while still preserving the mark. An attempt
+     *                       to reset the stream after reading characters
+     *                       up to this limit or beyond may fail.
+     *                       A limit value larger than the size of the input
+     *                       buffer will cause a new buffer to be allocated
+     *                       whose size is no smaller than limit.
+     *                       Therefore large values should be used with care.
+     * @throws IllegalArgumentException If readAheadLimit is < 0
+     * @throws IOException              If an I/O error occurs
+     */
+    public void mark(int readAheadLimit) throws IOException {
+        if (readAheadLimit < 0) {
+            throw new IllegalArgumentException("Read-ahead limit < 0");
+        }
+        synchronized (lock) {
+            ensureOpen();
+            this.readAheadLimit = readAheadLimit;
+            markedChar = nextChar;
+            markedSkipLF = skipLF;
+        }
+    }
+
+    /**
+     * Resets the stream to the most recent mark.
+     *
+     * @throws IOException If the stream has never been marked,
+     *                     or if the mark has been invalidated
+     */
+    public void reset() throws IOException {
+        synchronized (lock) {
+            ensureOpen();
+            if (markedChar < 0)
+                throw new IOException((markedChar == INVALIDATED)
+                        ? "Mark invalid"
+                        : "Stream not marked");
+            nextChar = markedChar;
+            skipLF = markedSkipLF;
+        }
+    }
+
+    public void close() throws IOException {
+        synchronized (lock) {
+            if (in == null)
+                return;
+            in.close();
+            in = null;
+            cb = null;
+        }
+    }
+}
diff --git a/src/java/org/broad/tribble/readers/Positional.java b/src/java/org/broad/tribble/readers/Positional.java
index 9e47c19..0b5fb7d 100644
--- a/src/java/org/broad/tribble/readers/Positional.java
+++ b/src/java/org/broad/tribble/readers/Positional.java
@@ -26,22 +26,12 @@ package org.broad.tribble.readers;
 import java.io.IOException;
 
 /**
- * User: depristo
- * Date: Oct 7, 2010
- * Time: 10:53:20 AM
- *
- * Minimal interface for an object at support getting the current position in the stream / writer / file.
- *
- * The constrain here is simple.  If you are a output stream / writer, and you've written 50 bytes to the stream,
- * then getFilePointer() should return 50 bytes.  If you are an input stream or file reader, and you've read
- * 25 bytes from the object, then getFilePointer() should return 25.
+ * Minimal interface for an object at support getting the current position in the stream / writer / file, as well as a handful of other
+ * reader-like features.
+ * 
+ * @author depristo
  */
-public interface Positional {
-    /**
-     * @return the current offset, in bytes, in the stream / writer / file.
-     */
-    public long getPosition();
-
+public interface Positional extends LocationAware {
     /**
      * Is the stream done?  Equivalent to ! hasNext() for an iterator?
      * @return true if the stream has reached EOF, false otherwise
diff --git a/src/java/org/broad/tribble/readers/PositionalBufferedStream.java b/src/java/org/broad/tribble/readers/PositionalBufferedStream.java
index d064310..743f19f 100644
--- a/src/java/org/broad/tribble/readers/PositionalBufferedStream.java
+++ b/src/java/org/broad/tribble/readers/PositionalBufferedStream.java
@@ -22,9 +22,9 @@ import org.broad.tribble.TribbleException;
 import java.io.*;
 
 /**
- * A wrapper around an {@code InputStream} which performs it's own buffering, and keeps track
- * of the position
- * @author Mark DePristo
+ * A wrapper around an {@code InputStream} which performs it's own buffering, and keeps track of the position.
+ * 
+ * @author depristo
  */
 public final class PositionalBufferedStream extends InputStream implements Positional {
     final InputStream is;
@@ -71,8 +71,9 @@ public final class PositionalBufferedStream extends InputStream implements Posit
             while ( remaining > 0 ) {
                 // Try to Refill buffer if at the end of current buffer
                 if ( nChars == nextChar )
-                    if ( fill() < 0 )
+                    if ( fill() < 0 ) { // EOF
                         break;
+                    }
 
                 // we copy as many bytes from the buffer as possible, up to the number of need
                 final int nCharsToCopy = Math.min(nChars - nextChar, remaining);
@@ -86,7 +87,9 @@ public final class PositionalBufferedStream extends InputStream implements Posit
 
             // make sure we update our position tracker to reflect having advanced by nRead bytes
             position += nRead;
-            return nRead;
+            
+            /** Conform to {@link InputStream#read(byte[], int, int)} contract by returning -1 if EOF and no data was read. */
+            return nRead == 0 ? -1 : nRead;
         }
     }
 
diff --git a/src/java/org/broad/tribble/readers/TabixIteratorLineReader.java b/src/java/org/broad/tribble/readers/TabixIteratorLineReader.java
index 73cc5d7..cd7fb93 100644
--- a/src/java/org/broad/tribble/readers/TabixIteratorLineReader.java
+++ b/src/java/org/broad/tribble/readers/TabixIteratorLineReader.java
@@ -38,8 +38,12 @@ public class TabixIteratorLineReader implements LineReader {
         this.iterator = iterator;
     }
 
-    public String readLine() throws IOException {
-        return iterator != null ? iterator.next() : null;
+    public String readLine() {
+        try {
+            return iterator != null ? iterator.next() : null;
+        } catch (IOException e) {
+            throw new RuntimeException(e);
+        }
     }
 
     public void close() {
diff --git a/src/java/org/broad/tribble/util/IOUtil.java b/src/java/org/broad/tribble/util/IOUtil.java
deleted file mode 100644
index bd88356..0000000
--- a/src/java/org/broad/tribble/util/IOUtil.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * The MIT License
- *
- * Copyright (c) 2013 The Broad Institute
- *
- * Permission is hereby granted, free of charge, to any person obtaining a copy
- * of this software and associated documentation files (the "Software"), to deal
- * in the Software without restriction, including without limitation the rights
- * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
- * copies of the Software, and to permit persons to whom the Software is
- * furnished to do so, subject to the following conditions:
- *
- * The above copyright notice and this permission notice shall be included in
- * all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
- * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
- * THE SOFTWARE.
- */
-package org.broad.tribble.util;
-
-import java.io.BufferedInputStream;
-import java.io.File;
-import java.io.InputStream;
-
-/**
- * Miscellaneous stateless static IO-oriented methods.
- */
-public class IOUtil {
-    /**
-     * Wrap the given stream in a BufferedInputStream, if it isn't already wrapper
-     * @param stream stream to be wrapped
-     * @return A BufferedInputStream wrapping stream, or stream itself if stream instanceof BufferedInputStream.
-     */
-    public static BufferedInputStream toBufferedStream(final InputStream stream) {
-        if (stream instanceof BufferedInputStream) {
-            return (BufferedInputStream) stream;
-        } else {
-            return new BufferedInputStream(stream);
-        }
-    }
-
-    /**
-     * Delete a list of files, and write a warning message if one could not be deleted.
-     * @param files Files to be deleted.
-     */
-    public static void deleteFiles(final File... files) {
-        for (final File f : files) {
-            if (!f.delete()) {
-                System.err.println("Could not delete file " + f);
-            }
-        }
-    }
-
-    public static void deleteFiles(final Iterable<File> files) {
-        for (final File f : files) {
-            if (!f.delete()) {
-                System.err.println("Could not delete file " + f);
-            }
-        }
-    }
-}
\ No newline at end of file
diff --git a/src/java/org/broadinstitute/variant/bcf2/BCF2Codec.java b/src/java/org/broadinstitute/variant/bcf2/BCF2Codec.java
index 098b2a5..63ca54c 100644
--- a/src/java/org/broadinstitute/variant/bcf2/BCF2Codec.java
+++ b/src/java/org/broadinstitute/variant/bcf2/BCF2Codec.java
@@ -27,15 +27,17 @@ package org.broadinstitute.variant.bcf2;
 
 import com.google.java.contract.Ensures;
 import com.google.java.contract.Requires;
+import org.broad.tribble.BinaryFeatureCodec;
 import org.broad.tribble.Feature;
-import org.broad.tribble.FeatureCodec;
 import org.broad.tribble.FeatureCodecHeader;
 import org.broad.tribble.TribbleException;
-import org.broad.tribble.readers.AsciiLineReader;
+import org.broad.tribble.readers.LineIterator;
+import org.broad.tribble.readers.LineIteratorImpl;
+import org.broad.tribble.readers.LineReaderUtil;
 import org.broad.tribble.readers.PositionalBufferedStream;
 import org.broadinstitute.variant.utils.GeneralUtils;
-import org.broadinstitute.variant.vcf.*;
 import org.broadinstitute.variant.variantcontext.*;
+import org.broadinstitute.variant.vcf.*;
 
 import java.io.ByteArrayInputStream;
 import java.io.FileInputStream;
@@ -49,7 +51,7 @@ import java.util.Map;
 /**
  * Decode BCF2 files
  */
-public final class BCF2Codec implements FeatureCodec<VariantContext> {
+public final class BCF2Codec extends BinaryFeatureCodec<VariantContext> {
     private final static int ALLOWED_MAJOR_VERSION = 2;
     private final static int MIN_MINOR_VERSION = 1;
 
@@ -161,9 +163,9 @@ public final class BCF2Codec implements FeatureCodec<VariantContext> {
                 error("Couldn't read all of the bytes specified in the header length = " + headerSizeInBytes);
 
             final PositionalBufferedStream bps = new PositionalBufferedStream(new ByteArrayInputStream(headerBytes));
-            final AsciiLineReader headerReader = new AsciiLineReader(bps);
+            final LineIterator lineIterator = new LineIteratorImpl(LineReaderUtil.fromBufferedStream(bps, LineReaderUtil.LineReaderOption.SYNCHRONOUS));
             final VCFCodec headerParser = new VCFCodec();
-            this.header = (VCFHeader)headerParser.readHeader(headerReader);
+            this.header = (VCFHeader) headerParser.readActualHeader(lineIterator);
             bps.close();
         } catch ( IOException e ) {
             throw new TribbleException("I/O error while reading BCF2 header");
diff --git a/src/java/org/broadinstitute/variant/bcf2/BCF2Utils.java b/src/java/org/broadinstitute/variant/bcf2/BCF2Utils.java
index 0b16fd5..a0eba5a 100644
--- a/src/java/org/broadinstitute/variant/bcf2/BCF2Utils.java
+++ b/src/java/org/broadinstitute/variant/bcf2/BCF2Utils.java
@@ -84,7 +84,7 @@ public final class BCF2Utils {
 
         // set up the strings dictionary
         for ( VCFHeaderLine line : header.getMetaDataInInputOrder() ) {
-            if ( line instanceof VCFIDHeaderLine && ! (line instanceof VCFContigHeaderLine) ) {
+            if ( line.shouldBeAddedToDictionary() ) {
                 final VCFIDHeaderLine idLine = (VCFIDHeaderLine)line;
                 if ( ! seen.contains(idLine.getID())) {
                     dict.add(idLine.getID());
diff --git a/src/java/org/broadinstitute/variant/variantcontext/VariantContext.java b/src/java/org/broadinstitute/variant/variantcontext/VariantContext.java
index 1fce894..52eb957 100644
--- a/src/java/org/broadinstitute/variant/variantcontext/VariantContext.java
+++ b/src/java/org/broadinstitute/variant/variantcontext/VariantContext.java
@@ -522,7 +522,7 @@ public class VariantContext implements Feature { // to enable tribble integratio
      */
     public boolean isSimpleInsertion() {
         // can't just call !isSimpleDeletion() because of complex indels
-        return getType() == Type.INDEL && isBiallelic() && getReference().length() == 1;
+        return isSimpleIndel() && getReference().length() == 1;
     }
 
     /**
@@ -530,7 +530,19 @@ public class VariantContext implements Feature { // to enable tribble integratio
      */
     public boolean isSimpleDeletion() {
         // can't just call !isSimpleInsertion() because of complex indels
-        return getType() == Type.INDEL && isBiallelic() && getAlternateAllele(0).length() == 1;
+        return isSimpleIndel() && getAlternateAllele(0).length() == 1;
+    }
+
+    /**
+     * @return true if the alleles indicate a simple indel, false otherwise.
+     */
+    public boolean isSimpleIndel() {
+        return getType() == Type.INDEL                   // allelic lengths differ
+                && isBiallelic()                         // exactly 2 alleles
+                && getReference().length() > 0           // ref is not null or symbolic
+                && getAlternateAllele(0).length() > 0    // alt is not null or symbolic
+                && getReference().getBases()[0] == getAlternateAllele(0).getBases()[0]    // leading bases match for both alleles
+                && (getReference().length() == 1 || getAlternateAllele(0).length() == 1);
     }
 
     /**
diff --git a/src/java/net/sf/picard/vcf/VariantContextComparator.java b/src/java/org/broadinstitute/variant/variantcontext/VariantContextComparator.java
similarity index 97%
rename from src/java/net/sf/picard/vcf/VariantContextComparator.java
rename to src/java/org/broadinstitute/variant/variantcontext/VariantContextComparator.java
index 6773e29..1f51e5c 100644
--- a/src/java/net/sf/picard/vcf/VariantContextComparator.java
+++ b/src/java/org/broadinstitute/variant/variantcontext/VariantContextComparator.java
@@ -1,6 +1,5 @@
-package net.sf.picard.vcf;
+package org.broadinstitute.variant.variantcontext;
 
-import org.broadinstitute.variant.variantcontext.VariantContext;
 import org.broadinstitute.variant.vcf.VCFContigHeaderLine;
 
 import java.util.Collection;
diff --git a/src/java/org/broadinstitute/variant/variantcontext/writer/IndexingVariantContextWriter.java b/src/java/org/broadinstitute/variant/variantcontext/writer/IndexingVariantContextWriter.java
index 6766706..800cb84 100644
--- a/src/java/org/broadinstitute/variant/variantcontext/writer/IndexingVariantContextWriter.java
+++ b/src/java/org/broadinstitute/variant/variantcontext/writer/IndexingVariantContextWriter.java
@@ -93,7 +93,10 @@ abstract class IndexingVariantContextWriter implements VariantContextWriter {
      */
     public void close() {
         try {
-            // try to close the index stream (keep it separate to help debugging efforts)
+            // close the underlying output stream
+            outputStream.close();
+
+            // close the index stream (keep it separate to help debugging efforts)
             if (indexer != null) {
                 final Index index = indexer.finalizeIndex(positionalOutputStream.getPosition());
                 setIndexSequenceDictionary(index, refDict);
@@ -101,8 +104,7 @@ abstract class IndexingVariantContextWriter implements VariantContextWriter {
                 idxStream.close();
             }
 
-            // close the underlying output stream as well
-            outputStream.close();
+
         } catch (IOException e) {
             throw new RuntimeException("Unable to close index for " + getStreamName(), e);
         }
diff --git a/src/java/org/broadinstitute/variant/variantcontext/writer/VCFWriter.java b/src/java/org/broadinstitute/variant/variantcontext/writer/VCFWriter.java
index e794e92..c91dc79 100644
--- a/src/java/org/broadinstitute/variant/variantcontext/writer/VCFWriter.java
+++ b/src/java/org/broadinstitute/variant/variantcontext/writer/VCFWriter.java
@@ -42,6 +42,11 @@ import java.util.*;
 class VCFWriter extends IndexingVariantContextWriter {
     private final static String VERSION_LINE = VCFHeader.METADATA_INDICATOR + VCFHeaderVersion.VCF4_1.getFormatString() + "=" + VCFHeaderVersion.VCF4_1.getVersionString();
 
+    /**
+     * The encoding used for VCF files.  ISO-8859-1
+     */
+    static final private Charset charset = Charset.forName("ISO-8859-1");
+    
     // should we write genotypes or just sites?
     final protected boolean doNotWriteGenotypes;
 
@@ -60,13 +65,8 @@ class VCFWriter extends IndexingVariantContextWriter {
      */
     private static final int INITIAL_BUFFER_SIZE = 1024 * 16;
     private final ByteArrayOutputStream lineBuffer = new ByteArrayOutputStream(INITIAL_BUFFER_SIZE);
-    private final Writer writer;
-
-    /**
-     * The encoding used for VCF files.  ISO-8859-1
-     */
-    final private Charset charset;
-
+    /** Wrapping in a {@link BufferedWriter} avoids frequent conversions with individual writes to OutputStreamWriter. */
+    private final Writer writer = new BufferedWriter(new OutputStreamWriter(lineBuffer, charset));
     private IntGenotypeFieldAccessors intGenotypeFieldAccessors = new IntGenotypeFieldAccessors();
 
     public VCFWriter(final File location, final OutputStream output, final SAMSequenceDictionary refDict,
@@ -75,8 +75,6 @@ class VCFWriter extends IndexingVariantContextWriter {
         super(writerName(location, output), location, output, refDict, enableOnTheFlyIndexing);
         this.doNotWriteGenotypes = doNotWriteGenotypes;
         this.allowMissingFieldsInHeader = allowMissingFieldsInHeader;
-        this.charset = Charset.forName("ISO-8859-1");
-        this.writer = new OutputStreamWriter(lineBuffer, charset);
     }
 
     // --------------------------------------------------------------------------------
diff --git a/src/java/org/broadinstitute/variant/variantcontext/writer/VariantContextWriterFactory.java b/src/java/org/broadinstitute/variant/variantcontext/writer/VariantContextWriterFactory.java
index 542c7e4..57e69d2 100644
--- a/src/java/org/broadinstitute/variant/variantcontext/writer/VariantContextWriterFactory.java
+++ b/src/java/org/broadinstitute/variant/variantcontext/writer/VariantContextWriterFactory.java
@@ -25,12 +25,10 @@
 
 package org.broadinstitute.variant.variantcontext.writer;
 
+import net.sf.samtools.Defaults;
 import net.sf.samtools.SAMSequenceDictionary;
 
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.OutputStream;
+import java.io.*;
 import java.util.EnumSet;
 
 /**
diff --git a/src/java/org/broadinstitute/variant/vcf/AbstractVCFCodec.java b/src/java/org/broadinstitute/variant/vcf/AbstractVCFCodec.java
index a4ccd05..b23aaf6 100644
--- a/src/java/org/broadinstitute/variant/vcf/AbstractVCFCodec.java
+++ b/src/java/org/broadinstitute/variant/vcf/AbstractVCFCodec.java
@@ -25,12 +25,11 @@
 
 package org.broadinstitute.variant.vcf;
 
+import net.sf.samtools.util.BlockCompressedInputStream;
 import org.broad.tribble.AsciiFeatureCodec;
 import org.broad.tribble.Feature;
 import org.broad.tribble.NameAwareCodec;
 import org.broad.tribble.TribbleException;
-import org.broad.tribble.readers.LineReader;
-import net.sf.samtools.util.BlockCompressedInputStream;
 import org.broad.tribble.util.ParsingUtils;
 import org.broadinstitute.variant.utils.GeneralUtils;
 import org.broadinstitute.variant.variantcontext.*;
@@ -116,12 +115,6 @@ public abstract class AbstractVCFCodec extends AsciiFeatureCodec<VariantContext>
     }
 
     /**
-     * @param reader the line reader to take header lines from
-     * @return the number of header lines
-     */
-    public abstract Object readHeader(LineReader reader);
-
-    /**
      * parse the filter string, first checking to see if we already have parsed it in a previous attempt
      * @param filterString the string to parse
      * @return a set of the filters applied
diff --git a/src/java/org/broadinstitute/variant/vcf/VCF3Codec.java b/src/java/org/broadinstitute/variant/vcf/VCF3Codec.java
index 5e2cfb2..6c468c9 100644
--- a/src/java/org/broadinstitute/variant/vcf/VCF3Codec.java
+++ b/src/java/org/broadinstitute/variant/vcf/VCF3Codec.java
@@ -26,10 +26,13 @@
 package org.broadinstitute.variant.vcf;
 
 import org.broad.tribble.TribbleException;
+import org.broad.tribble.readers.LineIterator;
 import org.broad.tribble.readers.LineReader;
 
 import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
 
 
 /**
@@ -51,46 +54,41 @@ import java.util.*;
 public class VCF3Codec extends AbstractVCFCodec {
     public final static String VCF3_MAGIC_HEADER = "##fileformat=VCFv3";
 
-
     /**
      * @param reader the line reader to take header lines from
      * @return the number of header lines
      */
-    public Object readHeader(LineReader reader) {
-        List<String> headerStrings = new ArrayList<String>();
+    public Object readActualHeader(final LineIterator reader) {
+        final List<String> headerStrings = new ArrayList<String>();
 
-        String line;
         VCFHeaderVersion version = null;
-        try {
-            boolean foundHeaderVersion = false;
-            while ((line = reader.readLine()) != null) {
-                lineNo++;
-                if (line.startsWith(VCFHeader.METADATA_INDICATOR)) {
-                    String[] lineFields = line.substring(2).split("=");
-                    if (lineFields.length == 2 && VCFHeaderVersion.isFormatString(lineFields[0]) ) {
-                        if ( !VCFHeaderVersion.isVersionString(lineFields[1]) )
-                            throw new TribbleException.InvalidHeader(lineFields[1] + " is not a supported version");
-                        foundHeaderVersion = true;
-                        version = VCFHeaderVersion.toHeaderVersion(lineFields[1]);
-                        if ( version != VCFHeaderVersion.VCF3_3 && version != VCFHeaderVersion.VCF3_2 )
-                            throw new TribbleException.InvalidHeader("This codec is strictly for VCFv3 and does not support " + lineFields[1]);
-                    }
-                    headerStrings.add(line);
-                }
-                else if (line.startsWith(VCFHeader.HEADER_INDICATOR)) {
-                    if (!foundHeaderVersion) {
-                        throw new TribbleException.InvalidHeader("We never saw a header line specifying VCF version");
-                    }
-                    headerStrings.add(line);
-                    return super.parseHeaderFromLines(headerStrings, version);
+        boolean foundHeaderVersion = false;
+        while (reader.hasNext()) {
+            lineNo++;
+            final String line = reader.peek();
+            if (line.startsWith(VCFHeader.METADATA_INDICATOR)) {
+                final String[] lineFields = line.substring(2).split("=");
+                if (lineFields.length == 2 && VCFHeaderVersion.isFormatString(lineFields[0]) ) {
+                    if ( !VCFHeaderVersion.isVersionString(lineFields[1]) )
+                        throw new TribbleException.InvalidHeader(lineFields[1] + " is not a supported version");
+                    foundHeaderVersion = true;
+                    version = VCFHeaderVersion.toHeaderVersion(lineFields[1]);
+                    if ( version != VCFHeaderVersion.VCF3_3 && version != VCFHeaderVersion.VCF3_2 )
+                        throw new TribbleException.InvalidHeader("This codec is strictly for VCFv3 and does not support " + lineFields[1]);
                 }
-                else {
-                    throw new TribbleException.InvalidHeader("We never saw the required CHROM header line (starting with one #) for the input VCF file");
+                headerStrings.add(reader.next());
+            }
+            else if (line.startsWith(VCFHeader.HEADER_INDICATOR)) {
+                if (!foundHeaderVersion) {
+                    throw new TribbleException.InvalidHeader("We never saw a header line specifying VCF version");
                 }
-
+                headerStrings.add(reader.next());
+                return super.parseHeaderFromLines(headerStrings, version);
+            }
+            else {
+                throw new TribbleException.InvalidHeader("We never saw the required CHROM header line (starting with one #) for the input VCF file");
             }
-        } catch (IOException e) {
-            throw new RuntimeException("IO Exception ", e);
+
         }
         throw new TribbleException.InvalidHeader("We never saw the required CHROM header line (starting with one #) for the input VCF file");
     }
diff --git a/src/java/org/broadinstitute/variant/vcf/VCFCodec.java b/src/java/org/broadinstitute/variant/vcf/VCFCodec.java
index adb8b08..50a4f08 100644
--- a/src/java/org/broadinstitute/variant/vcf/VCFCodec.java
+++ b/src/java/org/broadinstitute/variant/vcf/VCFCodec.java
@@ -26,9 +26,8 @@
 package org.broadinstitute.variant.vcf;
 
 import org.broad.tribble.TribbleException;
-import org.broad.tribble.readers.LineReader;
+import org.broad.tribble.readers.LineIterator;
 
-import java.io.IOException;
 import java.util.*;
 
 /**
@@ -73,46 +72,45 @@ public class VCFCodec extends AbstractVCFCodec {
     public final static String VCF4_MAGIC_HEADER = "##fileformat=VCFv4";
 
     /**
-     * @param reader the line reader to take header lines from
-     * @return the number of header lines
+     * Reads all of the header from the provided iterator, but no reads no further.
+     * @param lineIterator the line reader to take header lines from
+     * @return The parsed header
      */
-    public Object readHeader(LineReader reader) {
-        List<String> headerStrings = new ArrayList<String>();
+    @Override
+    public Object readActualHeader(final LineIterator lineIterator) {
+        final List<String> headerStrings = new ArrayList<String>();
 
         String line;
-        try {
-            boolean foundHeaderVersion = false;
-            while ((line = reader.readLine()) != null) {
-                lineNo++;
-                if (line.startsWith(VCFHeader.METADATA_INDICATOR)) {
-                    String[] lineFields = line.substring(2).split("=");
-                    if (lineFields.length == 2 && VCFHeaderVersion.isFormatString(lineFields[0]) ) {
-                        if ( !VCFHeaderVersion.isVersionString(lineFields[1]) )
-                            throw new TribbleException.InvalidHeader(lineFields[1] + " is not a supported version");
-                        foundHeaderVersion = true;
-                        version = VCFHeaderVersion.toHeaderVersion(lineFields[1]);
-                        if ( version == VCFHeaderVersion.VCF3_3 || version == VCFHeaderVersion.VCF3_2 )
-                            throw new TribbleException.InvalidHeader("This codec is strictly for VCFv4; please use the VCF3 codec for " + lineFields[1]);
-                        if ( version != VCFHeaderVersion.VCF4_0 && version != VCFHeaderVersion.VCF4_1 )
-                            throw new TribbleException.InvalidHeader("This codec is strictly for VCFv4 and does not support " + lineFields[1]);
-                    }
-                    headerStrings.add(line);
-                }
-                else if (line.startsWith(VCFHeader.HEADER_INDICATOR)) {
-                    if (!foundHeaderVersion) {
-                        throw new TribbleException.InvalidHeader("We never saw a header line specifying VCF version");
-                    }
-                    headerStrings.add(line);
-                    super.parseHeaderFromLines(headerStrings, version);
-                    return this.header;
+        boolean foundHeaderVersion = false;
+        while (lineIterator.hasNext()) {
+            line = lineIterator.peek();
+            lineNo++;
+            if (line.startsWith(VCFHeader.METADATA_INDICATOR)) {
+                final String[] lineFields = line.substring(2).split("=");
+                if (lineFields.length == 2 && VCFHeaderVersion.isFormatString(lineFields[0]) ) {
+                    if ( !VCFHeaderVersion.isVersionString(lineFields[1]) )
+                        throw new TribbleException.InvalidHeader(lineFields[1] + " is not a supported version");
+                    foundHeaderVersion = true;
+                    version = VCFHeaderVersion.toHeaderVersion(lineFields[1]);
+                    if ( version == VCFHeaderVersion.VCF3_3 || version == VCFHeaderVersion.VCF3_2 )
+                        throw new TribbleException.InvalidHeader("This codec is strictly for VCFv4; please use the VCF3 codec for " + lineFields[1]);
+                    if ( version != VCFHeaderVersion.VCF4_0 && version != VCFHeaderVersion.VCF4_1 )
+                        throw new TribbleException.InvalidHeader("This codec is strictly for VCFv4 and does not support " + lineFields[1]);
                 }
-                else {
-                    throw new TribbleException.InvalidHeader("We never saw the required CHROM header line (starting with one #) for the input VCF file");
+                headerStrings.add(lineIterator.next());
+            }
+            else if (line.startsWith(VCFHeader.HEADER_INDICATOR)) {
+                if (!foundHeaderVersion) {
+                    throw new TribbleException.InvalidHeader("We never saw a header line specifying VCF version");
                 }
-
+                headerStrings.add(lineIterator.next());
+                super.parseHeaderFromLines(headerStrings, version);
+                return this.header;
+            }
+            else {
+                throw new TribbleException.InvalidHeader("We never saw the required CHROM header line (starting with one #) for the input VCF file");
             }
-        } catch (IOException e) {
-            throw new RuntimeException("IO Exception ", e);
+
         }
         throw new TribbleException.InvalidHeader("We never saw the required CHROM header line (starting with one #) for the input VCF file");
     }
diff --git a/src/java/org/broadinstitute/variant/vcf/VCFConstants.java b/src/java/org/broadinstitute/variant/vcf/VCFConstants.java
index 41659d7..b45edea 100644
--- a/src/java/org/broadinstitute/variant/vcf/VCFConstants.java
+++ b/src/java/org/broadinstitute/variant/vcf/VCFConstants.java
@@ -93,6 +93,7 @@ public final class VCFConstants {
     public static final String ALT_HEADER_START = "##ALT";
     public static final String CONTIG_HEADER_KEY = "contig";
     public static final String CONTIG_HEADER_START = "##" + CONTIG_HEADER_KEY;
+    public static final String GATK_COMMAND_START = "##GATKCommandLine";
 
     // old indel alleles
     public static final char DELETION_ALLELE_v3 = 'D';
diff --git a/src/java/org/broadinstitute/variant/vcf/VCFContigHeaderLine.java b/src/java/org/broadinstitute/variant/vcf/VCFContigHeaderLine.java
index 5e6a73b..abb6a67 100644
--- a/src/java/org/broadinstitute/variant/vcf/VCFContigHeaderLine.java
+++ b/src/java/org/broadinstitute/variant/vcf/VCFContigHeaderLine.java
@@ -25,17 +25,20 @@
 
 package org.broadinstitute.variant.vcf;
 
+import net.sf.samtools.SAMSequenceRecord;
+import org.broad.tribble.TribbleException;
+
+import java.util.HashMap;
 import java.util.Map;
 
 /**
- * A special class representing a contig VCF header line.  Nows the true contig order and sorts on that
+ * A special class representing a contig VCF header line.  Knows the true contig order and sorts on that
  *
  * @author mdepristo
  */
 public class VCFContigHeaderLine extends VCFSimpleHeaderLine {
     final Integer contigIndex;
 
-
     /**
      * create a VCF contig header line
      *
@@ -43,25 +46,42 @@ public class VCFContigHeaderLine extends VCFSimpleHeaderLine {
      * @param version   the vcf header version
      * @param key            the key for this header line
      */
-    public VCFContigHeaderLine(final String line, final VCFHeaderVersion version, final String key, int contigIndex) {
+    public VCFContigHeaderLine(final String line, final VCFHeaderVersion version, final String key, final int contigIndex) {
         super(line, version, key, null);
+	    if (contigIndex < 0) throw new TribbleException("The contig index is less than zero.");
         this.contigIndex = contigIndex;
     }
 
-    public VCFContigHeaderLine(final Map<String, String> mapping, int contigIndex) {
-        super(VCFHeader.CONTIG_KEY, mapping, null);
+    public VCFContigHeaderLine(final Map<String, String> mapping, final int contigIndex) {
+        super(VCFHeader.CONTIG_KEY, mapping);
+	    if (contigIndex < 0) throw new TribbleException("The contig index is less than zero.");
         this.contigIndex = contigIndex;
     }
 
+	VCFContigHeaderLine(final SAMSequenceRecord sequenceRecord, final String assembly) {
+		super(sequenceRecord.getId(), new HashMap<String, String>() {{
+			// Now inside an init block in an anon HashMap subclass
+			this.put("ID", sequenceRecord.getSequenceName());
+			this.put("length", Integer.toString(sequenceRecord.getSequenceLength()));
+			if ( assembly != null ) this.put("assembly", assembly);
+		}});
+		this.contigIndex = sequenceRecord.getSequenceIndex();
+	}
+
     public Integer getContigIndex() {
         return contigIndex;
     }
 
+	public SAMSequenceRecord getSAMSequenceRecord() {
+		final String lengthString = this.getGenericFieldValue("length");
+		if (lengthString == null) throw new TribbleException("Contig " + this.getID() + " does not have a length field.");
+		final SAMSequenceRecord record = new SAMSequenceRecord(this.getID(), Integer.valueOf(lengthString));
+		record.setSequenceIndex(this.contigIndex);
+		return record;
+	}
+
     /**
-     * IT IS CRITIAL THAT THIS BE OVERRIDDEN SO WE SORT THE CONTIGS IN THE CORRECT ORDER
-     *
-     * @param other
-     * @return
+     * IT IS CRITICAL THAT THIS BE OVERRIDDEN SO WE SORT THE CONTIGS IN THE CORRECT ORDER
      */
     @Override
     public int compareTo(final Object other) {
diff --git a/src/java/org/broadinstitute/variant/vcf/VCFFileReader.java b/src/java/org/broadinstitute/variant/vcf/VCFFileReader.java
new file mode 100644
index 0000000..3f3eb3a
--- /dev/null
+++ b/src/java/org/broadinstitute/variant/vcf/VCFFileReader.java
@@ -0,0 +1,69 @@
+package org.broadinstitute.variant.vcf;
+
+import net.sf.samtools.SAMSequenceDictionary;
+import net.sf.samtools.util.CloseableIterator;
+import net.sf.samtools.util.CloserUtil;
+import org.broad.tribble.AbstractFeatureReader;
+import org.broad.tribble.FeatureReader;
+import org.broad.tribble.TribbleException;
+import org.broadinstitute.variant.bcf2.BCF2Codec;
+import org.broadinstitute.variant.variantcontext.VariantContext;
+
+import java.io.Closeable;
+import java.io.File;
+import java.io.IOException;
+
+public class VCFFileReader implements Closeable {
+
+	private final FeatureReader<VariantContext> reader;
+
+	/**
+	 * Returns true if the given file appears to be a BCF file.
+	 */
+	public static boolean isBCF(final File file) {
+		return file.getAbsolutePath().endsWith(".bcf");
+	}
+
+	/**
+	 * Returns the SAMSequenceDictionary from the provided VCF file.
+	 */
+	public static SAMSequenceDictionary getSequenceDictionary(final File file) {
+		final SAMSequenceDictionary dict = new VCFFileReader(file).getFileHeader().getSequenceDictionary();
+		CloserUtil.close(file);
+		return dict;
+	}
+
+	public VCFFileReader(final File file) {
+		this(file, true);
+	}
+
+	public VCFFileReader(final File file, boolean requireIndex) {
+		this.reader =
+				AbstractFeatureReader.getFeatureReader(
+						file.getAbsolutePath(),
+						isBCF(file)
+								? new BCF2Codec()
+								: new VCFCodec(),
+						requireIndex);
+	}
+
+	public VCFHeader getFileHeader() {
+		return (VCFHeader) reader.getHeader();
+	}
+
+	public CloseableIterator<VariantContext> iterator() {
+		try {
+			return reader.iterator();
+		} catch (final IOException ioe) {
+			throw new TribbleException("Could not create an iterator from a feature reader: " + ioe.getMessage(), ioe);
+		}
+	}
+
+	public void close() {
+		try {
+			this.reader.close();
+		} catch (final IOException ioe) {
+			throw new TribbleException("Could not close a variant context feature reader: " + ioe.getMessage(), ioe);
+		}
+	}
+}
diff --git a/src/java/org/broadinstitute/variant/vcf/VCFFilterHeaderLine.java b/src/java/org/broadinstitute/variant/vcf/VCFFilterHeaderLine.java
index c853033..72f70e4 100644
--- a/src/java/org/broadinstitute/variant/vcf/VCFFilterHeaderLine.java
+++ b/src/java/org/broadinstitute/variant/vcf/VCFFilterHeaderLine.java
@@ -60,4 +60,9 @@ public class VCFFilterHeaderLine extends VCFSimpleHeaderLine  {
     public VCFFilterHeaderLine(String line, VCFHeaderVersion version) {
         super(line, version, "FILTER", Arrays.asList("ID", "Description"));
     }
+
+    @Override
+    public boolean shouldBeAddedToDictionary() {
+        return true;
+    }
 }
\ No newline at end of file
diff --git a/src/java/org/broadinstitute/variant/vcf/VCFFormatHeaderLine.java b/src/java/org/broadinstitute/variant/vcf/VCFFormatHeaderLine.java
index 0e88e02..ea05753 100644
--- a/src/java/org/broadinstitute/variant/vcf/VCFFormatHeaderLine.java
+++ b/src/java/org/broadinstitute/variant/vcf/VCFFormatHeaderLine.java
@@ -54,4 +54,9 @@ public class VCFFormatHeaderLine extends VCFCompoundHeaderLine {
     boolean allowFlagValues() {
         return false;
     }
+
+    @Override
+    public boolean shouldBeAddedToDictionary() {
+        return true;
+    }
 }
\ No newline at end of file
diff --git a/src/java/org/broadinstitute/variant/vcf/VCFHeader.java b/src/java/org/broadinstitute/variant/vcf/VCFHeader.java
index d00eaea..59c5f27 100644
--- a/src/java/org/broadinstitute/variant/vcf/VCFHeader.java
+++ b/src/java/org/broadinstitute/variant/vcf/VCFHeader.java
@@ -25,11 +25,25 @@
 
 package org.broadinstitute.variant.vcf;
 
+import net.sf.samtools.SAMSequenceDictionary;
+import net.sf.samtools.SAMSequenceRecord;
 import org.broad.tribble.TribbleException;
 import org.broad.tribble.util.ParsingUtils;
 import org.broadinstitute.variant.utils.GeneralUtils;
-
-import java.util.*;
+import org.broadinstitute.variant.variantcontext.VariantContextComparator;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
 
 
 /**
@@ -105,12 +119,11 @@ public class VCFHeader {
     }
 
     /**
-     * Creates a shallow copy of the meta data in VCF header toCopy
-     *
-     * @param toCopy
+     * Creates a deep copy of the given VCFHeader, duplicating all its metadata and
+     * sample names.
      */
     public VCFHeader(final VCFHeader toCopy) {
-        this(toCopy.mMetaData);
+        this(toCopy.mMetaData, toCopy.mGenotypeSampleNames);
     }
 
     /**
@@ -156,9 +169,9 @@ public class VCFHeader {
 
 
     /**
-     * Adds a header line to the header metadata.
-     *
-     * @param headerLine Line to add to the existing metadata component.
+     * Sets a header line in the header metadata. This is essentially a Set.add call, which means that
+     * equals() and hashCode() are used to determine whether an additional header line is added or an
+     * existing header line is replaced.
      */
     public void addMetaDataLine(final VCFHeaderLine headerLine) {
         mMetaData.add(headerLine);
@@ -172,6 +185,36 @@ public class VCFHeader {
         return Collections.unmodifiableList(contigMetaData);
     }
 
+	/**
+	 * Returns the contigs in this VCF file as a SAMSequenceDictionary. Returns null if contigs lines are
+	 * not present in the header. Throws PicardException if one or more contig lines do not have length
+	 * information.
+	 */
+	public SAMSequenceDictionary getSequenceDictionary() {
+		final List<VCFContigHeaderLine> contigHeaderLines = this.getContigLines();
+		if (contigHeaderLines.isEmpty()) return null;
+
+		final List<SAMSequenceRecord> sequenceRecords = new ArrayList<SAMSequenceRecord>(contigHeaderLines.size());
+		for (final VCFContigHeaderLine contigHeaderLine : contigHeaderLines) {
+			sequenceRecords.add(contigHeaderLine.getSAMSequenceRecord());
+		}
+
+		return new SAMSequenceDictionary(sequenceRecords);
+	}
+
+	/**
+	 * Completely replaces the contig records in this header with those in the given SAMSequenceDictionary.
+	 */
+	public void setSequenceDictionary(final SAMSequenceDictionary dictionary) {
+		this.contigMetaData.clear();
+		for (final SAMSequenceRecord record : dictionary.getSequences()) {
+			this.contigMetaData.add(new VCFContigHeaderLine(record, null));
+		}
+	}
+
+	public VariantContextComparator getVCFRecordComparator() {
+		return new VariantContextComparator(this.getContigLines());
+	}
 
     /**
      * @return all of the VCF FILTER lines in their original file order, or an empty list if none were present
@@ -252,7 +295,7 @@ public class VCFHeader {
      * @param line
      * @param <T>
      */
-    private final <T extends VCFCompoundHeaderLine> void addMetaDataMapBinding(final Map<String, T> map, T line) {
+    private <T extends VCFCompoundHeaderLine> void addMetaDataMapBinding(final Map<String, T> map, final T line) {
         final String key = line.getID();
         if ( map.containsKey(key) ) {
             if ( GeneralUtils.DEBUG_MODE_ENABLED ) {
diff --git a/src/java/org/broadinstitute/variant/vcf/VCFHeaderLine.java b/src/java/org/broadinstitute/variant/vcf/VCFHeaderLine.java
index 222ec19..a096cf1 100644
--- a/src/java/org/broadinstitute/variant/vcf/VCFHeaderLine.java
+++ b/src/java/org/broadinstitute/variant/vcf/VCFHeaderLine.java
@@ -44,7 +44,6 @@ public class VCFHeaderLine implements Comparable {
     private String mKey = null;
     private String mValue = null;
 
-
     /**
      * create a VCF header line
      *
@@ -80,6 +79,15 @@ public class VCFHeaderLine implements Comparable {
         return mValue;
     }
 
+    /**
+     * By default the header lines won't be added to the dictionary, unless this method will be override (for example in FORMAT, INFO or FILTER header lines)
+     *
+     * @return false
+     */
+    public boolean shouldBeAddedToDictionary() {
+        return false;
+    }
+
     public String toString() {
         return toStringEncoding();
     }
diff --git a/src/java/org/broadinstitute/variant/vcf/VCFInfoHeaderLine.java b/src/java/org/broadinstitute/variant/vcf/VCFInfoHeaderLine.java
index 8ecf522..5fc64ac 100644
--- a/src/java/org/broadinstitute/variant/vcf/VCFInfoHeaderLine.java
+++ b/src/java/org/broadinstitute/variant/vcf/VCFInfoHeaderLine.java
@@ -51,4 +51,9 @@ public class VCFInfoHeaderLine extends VCFCompoundHeaderLine {
     boolean allowFlagValues() {
         return true;
     }
+
+    @Override
+    public boolean shouldBeAddedToDictionary() {
+        return true;
+    }
 }
diff --git a/src/java/org/broadinstitute/variant/vcf/VCFSimpleHeaderLine.java b/src/java/org/broadinstitute/variant/vcf/VCFSimpleHeaderLine.java
index dde308c..2ee8261 100644
--- a/src/java/org/broadinstitute/variant/vcf/VCFSimpleHeaderLine.java
+++ b/src/java/org/broadinstitute/variant/vcf/VCFSimpleHeaderLine.java
@@ -62,15 +62,23 @@ public class VCFSimpleHeaderLine extends VCFHeaderLine implements VCFIDHeaderLin
      * @param expectedTagOrdering the tag ordering expected for this header line
      */
     public VCFSimpleHeaderLine(final String line, final VCFHeaderVersion version, final String key, final List<String> expectedTagOrdering) {
-        this(key, VCFHeaderLineTranslator.parseLine(version, line, expectedTagOrdering), expectedTagOrdering);
+        this(key, VCFHeaderLineTranslator.parseLine(version, line, expectedTagOrdering));
     }
 
-    public VCFSimpleHeaderLine(final String key, final Map<String, String> mapping, final List<String> expectedTagOrdering) {
+    public VCFSimpleHeaderLine(final String key, final Map<String, String> mapping) {
         super(key, "");
         name = mapping.get("ID");
         initialize(name, mapping);
     }
 
+	/**
+	 * Returns the String value associated with the given key. Returns null if there is no value. Key
+	 * must not be null.
+	 */
+	String getGenericFieldValue(final String key) {
+		return this.genericFields.get(key);
+	}
+
     protected void initialize(String name, Map<String, String> genericFields) {
         if ( name == null || genericFields == null || genericFields.isEmpty() )
             throw new IllegalArgumentException(String.format("Invalid VCFSimpleHeaderLine: key=%s name=%s", super.getKey(), name));
diff --git a/src/scripts/explain_sam_flags.py b/src/scripts/explain_sam_flags.py
index f1ebaab..cb6ad02 100755
--- a/src/scripts/explain_sam_flags.py
+++ b/src/scripts/explain_sam_flags.py
@@ -42,7 +42,8 @@ lstFlags = [
     ("second in pair", 0x80),
     ("not primary alignment", 0x100),
     ("read fails platform/vendor quality checks", 0x200),
-    ("read is PCR or optical duplicate", 0x400)
+    ("read is PCR or optical duplicate", 0x400),
+    ("supplementary alignment", 0x800)
     ]
     
 
diff --git a/src/scripts/net/sf/picard/analysis/insertSizeHistogram.R b/src/scripts/net/sf/picard/analysis/insertSizeHistogram.R
index 514e8ed..a2cdd32 100644
--- a/src/scripts/net/sf/picard/analysis/insertSizeHistogram.R
+++ b/src/scripts/net/sf/picard/analysis/insertSizeHistogram.R
@@ -1,53 +1,55 @@
-# script to generate histogram of insert sizes from metrics file
-# expecting 3 arguments:
-# first is the metrics file with the histogram info
-# second is the output file
-# third is a name for the plot
+## script to generate histogram of insert sizes from metrics file
+## expecting 3 arguments:
+## first is the metrics file with the histogram info
+## second is the output file
+## third is a name for the plot
 
-args <- commandArgs(trailing=T)
+args <- commandArgs(trailing=TRUE)
 metricsFile <- args[1]
 pdfFile <- args[2]
 bamName <- args[3]
 histoWidth <- ifelse(length(args) < 4, 0, as.numeric(args[4]))
 
-
 startFinder <- scan(metricsFile, what="character", sep="\n", quiet=TRUE, blank.lines.skip=FALSE)
 
 firstBlankLine=0
 
-for (i in 1:length(startFinder))
-{
-        if (startFinder[i] == "") {
-                if (firstBlankLine==0) {
-                        firstBlankLine=i+1
-                } else {
-                        secondBlankLine=i+1
-                        break
-                }
-        } 
+for (i in 1:length(startFinder)) {
+  if (startFinder[i] == "") {
+    if (firstBlankLine==0) {
+      firstBlankLine=i+1
+    } else {
+      secondBlankLine=i+1
+      break
+    }
+  }
 }
 
-histogram <- read.table(metricsFile, header=T, sep="\t", skip=secondBlankLine, comment.char="",quote='')
+histogram <- read.table(metricsFile, header=TRUE, sep="\t", skip=secondBlankLine, comment.char="", quote='', check.names=FALSE)
 
-# The histogram has a fr_count/rf_count/tandem_count for each metric "level"
-# This code parses out the distinct levels so we can output one graph per level
+## The histogram has a fr_count/rf_count/tandem_count for each metric "level"
+## This code parses out the distinct levels so we can output one graph per level
 headers <- sapply(sub(".fr_count","",names(histogram),fixed=TRUE), "[[" ,1)
 headers <- sapply(sub(".rf_count","",headers,fixed=TRUE), "[[" ,1)
 headers <- sapply(sub(".tandem_count","",headers,fixed=TRUE), "[[" ,1)
-levels <- c()
-for (i in 2:length(headers)) {
+
+## Duplicated header names cause this to barf. KT & Yossi report that this is going to be extremely difficult to
+## resolve and it's unlikely that anyone cares anyways. Trap this situation and avoid the PDF so it won't cause
+## the workflow to fail
+if (any(duplicated(headers))) {
+  print(paste("Not creating insert size PDF as there are duplicated header names:", headers[which(duplicated(headers))]))
+} else {
+  levels <- c()
+  for (i in 2:length(headers)) {
     if (!(headers[i] %in% levels)) {
-        levels[length(levels)+1] <- headers[i]
+      levels[length(levels)+1] <- headers[i]
     }
-}
-
-
-pdf(pdfFile)
-
+  }
 
-for (i in 1:length(levels)) {
+  pdf(pdfFile)
 
-    # Reconstitutes the histogram column headers for this level
+  for (i in 1:length(levels)) {
+    ## Reconstitutes the histogram column headers for this level
     fr <- paste(levels[i], "fr_count", sep=".")
     rf <- paste(levels[i], "rf_count", sep=".")
     tandem <- paste(levels[i], "tandem_count", sep=".")
@@ -60,39 +62,37 @@ for (i in 1:length(levels)) {
     xrange <- ifelse(histoWidth > 0, histoWidth, max(histogram$insert_size))
 
     plot(x=NULL, y=NULL,
-        type="n",
-        main=paste("Insert Size Histogram for", levels[i], "\nin file", bamName),
-        xlab="Insert Size",
-        ylab="Count",
-        xlim=range(0, xrange),
-        ylim=range(0, yrange))
+         type="n",
+         main=paste("Insert Size Histogram for", levels[i], "\nin file", bamName),
+         xlab="Insert Size",
+         ylab="Count",
+         xlim=range(0, xrange),
+         ylim=range(0, yrange))
 
     colors <- c()
     labels <- c()
 
-    if( fr %in% names(histogram) )
-    {
-        lines(histogram$insert_size, as.matrix(histogram[fr]),  type="h", col="red")
-        colors <- c(colors, "red")
-        labels <- c(labels, "FR")
+    if (fr %in% names(histogram) ) {
+      lines(histogram$insert_size, as.matrix(histogram[fr]),  type="h", col="red")
+      colors <- c(colors, "red")
+      labels <- c(labels, "FR")
     }
-    if( rf %in% names(histogram) )
-    {
-        lines(histogram$insert_size, as.matrix(histogram[rf]),  type="h", col="blue")
-        colors <- c(colors, "blue")
-        labels <- c(labels, "RF")
+    if (rf %in% names(histogram)) {
+      lines(histogram$insert_size, as.matrix(histogram[rf]),  type="h", col="blue")
+      colors <- c(colors, "blue")
+      labels <- c(labels, "RF")
     }
 
-    if( tandem %in% names(histogram) )
-    {
-        lines(histogram$insert_size, as.matrix(histogram[tandem]),  type="h", col="orange")
-        colors <- c(colors, "orange")
-        labels <- c(labels, "TANDEM")
+    if (tandem %in% names(histogram)) {
+      lines(histogram$insert_size, as.matrix(histogram[tandem]),  type="h", col="orange")
+      colors <- c(colors, "orange")
+      labels <- c(labels, "TANDEM")
     }
 
-    # Create the legend
-    legend("topright", labels, fill=colors, col=colors, cex=0.7);
+    ## Create the legend
+    legend("topright", labels, fill=colors, col=colors, cex=0.7)
+  }
 
+  dev.off()
 }
-dev.off()
 
diff --git a/src/scripts/net/sf/picard/analysis/rnaSeqCoverage.R b/src/scripts/net/sf/picard/analysis/rnaSeqCoverage.R
index d16c911..db64b0f 100644
--- a/src/scripts/net/sf/picard/analysis/rnaSeqCoverage.R
+++ b/src/scripts/net/sf/picard/analysis/rnaSeqCoverage.R
@@ -25,16 +25,24 @@ for (i in 1:length(startFinder)) {
         }
 }
 
-data <- read.table(metricsFile, header=T, sep="\t", skip=secondBlankLine)
+data <- read.table(metricsFile, header=T, sep="\t", skip=secondBlankLine, check.names=FALSE)
 pdf(outputFile)
 
 # The histogram has a normalized_position and normalized_coverage column for each metric "level"
 # This code parses out the distinct levels so we can output one graph per level
 headers <- sapply(sub(".normalized_coverage","",names(data),fixed=TRUE), "[[" ,1)
-levels <- c()
-for (i in 2:length(headers)) {
-    if (!(headers[i] %in% levels)) {
-        levels[length(levels)+1] <- headers[i]
+
+## Duplicated header names cause this to barf. KT & Yossi report that this is going to be extremely difficult to
+## resolve and it's unlikely that anyone cares anyways. Trap this situation and avoid the PDF so it won't cause
+## the workflow to fail
+if (any(duplicated(headers))) {
+  print(paste("Not creating insert size PDF as there are duplicated header names:", headers[which(duplicated(headers))]))
+} else {
+    levels <- c()
+    for (i in 2:length(headers)) {
+        if (!(headers[i] %in% levels)) {
+            levels[length(levels)+1] <- headers[i]
+        }
     }
 }
 
diff --git a/src/scripts/release_picard.sh b/src/scripts/release_picard.sh
index 70b7601..a77c9a0 100755
--- a/src/scripts/release_picard.sh
+++ b/src/scripts/release_picard.sh
@@ -65,6 +65,12 @@ then echo "EDITOR environment variable must be set." >&2
        exit 1
 fi
 
+# Require actual Java 1.6.  This is not necessary for compiling, because can run 1.7 with -target 1.6,
+# but this is necessary in order to force unit tests to run with 1.6.
+(echo $JAVA_HOME | fgrep -q 1.6 ) || { echo "JAVA_HOME $JAVA_HOME is not 1.6" ; exit 1; }
+java_version=`java -version 2>&1 | fgrep -i version`
+(echo $java_version | fgrep -q 1.6. ) || { echo "java -version: $java_version is not 1.6"; exit 1; }
+
 SVNROOT=svn+ssh://$USERNAME@svn.code.sf.net/p/picard/code
 
 RELEASE_ID=$1
diff --git a/src/tests/java/net/sf/picard/analysis/MultiLevelCollectorTest.java b/src/tests/java/net/sf/picard/analysis/MultiLevelCollectorTest.java
index e2a94ef..bd616ab 100644
--- a/src/tests/java/net/sf/picard/analysis/MultiLevelCollectorTest.java
+++ b/src/tests/java/net/sf/picard/analysis/MultiLevelCollectorTest.java
@@ -6,7 +6,7 @@ import net.sf.picard.reference.ReferenceSequence;
 import net.sf.samtools.SAMFileReader;
 import net.sf.samtools.SAMReadGroupRecord;
 import net.sf.samtools.SAMRecord;
-import static net.sf.picard.util.CollectionUtil.*;
+import static net.sf.samtools.util.CollectionUtil.*;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
diff --git a/src/tests/java/net/sf/picard/cmdline/CommandLineParserTest.java b/src/tests/java/net/sf/picard/cmdline/CommandLineParserTest.java
index 56add7d..66dda4e 100644
--- a/src/tests/java/net/sf/picard/cmdline/CommandLineParserTest.java
+++ b/src/tests/java/net/sf/picard/cmdline/CommandLineParserTest.java
@@ -23,7 +23,7 @@
  */
 package net.sf.picard.cmdline;
 
-import net.sf.picard.util.CollectionUtil;
+import net.sf.samtools.util.CollectionUtil;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
diff --git a/src/tests/java/net/sf/picard/illumina/CheckIlluminaDirectoryTest.java b/src/tests/java/net/sf/picard/illumina/CheckIlluminaDirectoryTest.java
index 4235cd7..0975f7d 100644
--- a/src/tests/java/net/sf/picard/illumina/CheckIlluminaDirectoryTest.java
+++ b/src/tests/java/net/sf/picard/illumina/CheckIlluminaDirectoryTest.java
@@ -6,7 +6,7 @@ import java.nio.MappedByteBuffer;
 import java.nio.channels.FileChannel;
 import java.util.*;
 
-import static net.sf.picard.util.CollectionUtil.makeList;
+import static net.sf.samtools.util.CollectionUtil.makeList;
 import static net.sf.picard.illumina.parser.IlluminaFileUtil.SupportedIlluminaFormat;
 import static net.sf.picard.illumina.parser.IlluminaFileUtil.SupportedIlluminaFormat.*;
 import static net.sf.picard.illumina.parser.IlluminaDataType.*;
diff --git a/src/tests/java/net/sf/picard/illumina/ExtractIlluminaBarcodesTest.java b/src/tests/java/net/sf/picard/illumina/ExtractIlluminaBarcodesTest.java
index eacdba2..d1bdcbd 100644
--- a/src/tests/java/net/sf/picard/illumina/ExtractIlluminaBarcodesTest.java
+++ b/src/tests/java/net/sf/picard/illumina/ExtractIlluminaBarcodesTest.java
@@ -23,6 +23,7 @@
  */
 package net.sf.picard.illumina;
 
+import net.sf.picard.illumina.parser.readers.BclQualityEvaluationStrategy;
 import net.sf.picard.util.BasicInputParser;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.AfterTest;
@@ -226,7 +227,8 @@ public class ExtractIlluminaBarcodesTest {
 
         // Tack on test of barcode-informed Illumina Basecall parsing
         final ReadStructure rs = new ReadStructure("36T6B");
-        final IlluminaDataProviderFactory factory = new IlluminaDataProviderFactory(basecallsDir, lane, rs,
+        final IlluminaDataProviderFactory factory = new IlluminaDataProviderFactory(basecallsDir, lane, rs, 
+                new BclQualityEvaluationStrategy(BclQualityEvaluationStrategy.ILLUMINA_ALLEGED_MINIMUM_QUALITY),
                 IlluminaDataType.BaseCalls, IlluminaDataType.QualityScores, IlluminaDataType.Barcodes);
         testParsing(factory, rs, metricACAGTG, barcodePosition);
     }
diff --git a/src/tests/java/net/sf/picard/illumina/ReadStructureTest.java b/src/tests/java/net/sf/picard/illumina/ReadStructureTest.java
index c964cec..4bd5cb5 100644
--- a/src/tests/java/net/sf/picard/illumina/ReadStructureTest.java
+++ b/src/tests/java/net/sf/picard/illumina/ReadStructureTest.java
@@ -1,11 +1,8 @@
 package net.sf.picard.illumina;
 
-import net.sf.picard.PicardException;
-import net.sf.picard.illumina.parser.Range;
 import net.sf.picard.illumina.parser.ReadStructure;
 import net.sf.picard.illumina.parser.ReadDescriptor;
 import net.sf.picard.illumina.parser.ReadType;
-import net.sf.samtools.util.CoordMath;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -13,7 +10,7 @@ import org.testng.annotations.Test;
 import java.util.ArrayList;
 import java.util.List;
 import static net.sf.picard.illumina.parser.ReadType.*;
-import static net.sf.picard.util.CollectionUtil.*;
+import static net.sf.samtools.util.CollectionUtil.*;
 
 public class ReadStructureTest {
 
diff --git a/src/tests/java/net/sf/picard/illumina/parser/BclParserTest.java b/src/tests/java/net/sf/picard/illumina/parser/BclParserTest.java
index 64de9be..77d594b 100644
--- a/src/tests/java/net/sf/picard/illumina/parser/BclParserTest.java
+++ b/src/tests/java/net/sf/picard/illumina/parser/BclParserTest.java
@@ -7,6 +7,7 @@ import static net.sf.picard.illumina.parser.BinTdUtil.T;
 import static net.sf.picard.illumina.parser.BinTdUtil.P;
 
 import net.sf.picard.PicardException;
+import net.sf.picard.illumina.parser.readers.BclQualityEvaluationStrategy;
 import net.sf.picard.io.IoUtil;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
@@ -83,7 +84,7 @@ public class BclParserTest {
         final ReadStructure rs = new ReadStructure(readStructure);
         final OutputMapping outputMapping = new OutputMapping(rs);
 
-        final BclParser bclParser = new BclParser(dir, 3, makeCycleIlluminaFileMap(dir, tiles, outputMapping.getOutputCycles()), outputMapping);
+        final BclParser bclParser = new BclParser(dir, 3, makeCycleIlluminaFileMap(dir, tiles, outputMapping.getOutputCycles()), outputMapping, new BclQualityEvaluationStrategy(BclQualityEvaluationStrategy.ILLUMINA_ALLEGED_MINIMUM_QUALITY));
         final Map<Integer, ClusterData> testData = BinTdUtil.clusterData(LANE, Arrays.asList(boxArr(tiles)), readStructure, DATA_TYPES);
 
         int count = 0;
@@ -401,6 +402,6 @@ public class BclParserTest {
 
 class TestBclParser extends BclParser{
     public TestBclParser() {
-        super(null, 1, null, new OutputMapping(new ReadStructure("1T")));
+        super(null, 1, null, new OutputMapping(new ReadStructure("1T")), new BclQualityEvaluationStrategy(BclQualityEvaluationStrategy.ILLUMINA_ALLEGED_MINIMUM_QUALITY));
     }
 }
diff --git a/src/tests/java/net/sf/picard/illumina/parser/BinTdUtil.java b/src/tests/java/net/sf/picard/illumina/parser/BinTdUtil.java
index f66e113..20090ac 100644
--- a/src/tests/java/net/sf/picard/illumina/parser/BinTdUtil.java
+++ b/src/tests/java/net/sf/picard/illumina/parser/BinTdUtil.java
@@ -1,6 +1,6 @@
 package net.sf.picard.illumina.parser;
 
-import static net.sf.picard.util.CollectionUtil.makeList;
+import static net.sf.samtools.util.CollectionUtil.makeList;
 
 import java.io.File;
 import java.util.*;
diff --git a/src/tests/java/net/sf/picard/illumina/parser/CycleIlluminaFileMapTest.java b/src/tests/java/net/sf/picard/illumina/parser/CycleIlluminaFileMapTest.java
index 966a234..8c5b887 100644
--- a/src/tests/java/net/sf/picard/illumina/parser/CycleIlluminaFileMapTest.java
+++ b/src/tests/java/net/sf/picard/illumina/parser/CycleIlluminaFileMapTest.java
@@ -33,7 +33,7 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import static net.sf.picard.util.CollectionUtil.makeList;
+import static net.sf.samtools.util.CollectionUtil.makeList;
 
 /**
 * @author jburke at broadinstitute.org
diff --git a/src/tests/java/net/sf/picard/illumina/parser/IlluminaDataProviderFactoryTest.java b/src/tests/java/net/sf/picard/illumina/parser/IlluminaDataProviderFactoryTest.java
index c1e0316..e317d96 100644
--- a/src/tests/java/net/sf/picard/illumina/parser/IlluminaDataProviderFactoryTest.java
+++ b/src/tests/java/net/sf/picard/illumina/parser/IlluminaDataProviderFactoryTest.java
@@ -1,6 +1,8 @@
 package net.sf.picard.illumina.parser;
 
 
+import net.sf.picard.illumina.parser.readers.BclQualityEvaluationStrategy;
+
 import java.io.File;
 import java.util.List;
 
@@ -8,7 +10,7 @@ public class IlluminaDataProviderFactoryTest {
 
     class TestFactory extends IlluminaDataProviderFactory{
         public TestFactory(final File basecallDirectory, final int lane, final ReadStructure readStructure, final IlluminaDataType... dataTypes) {
-            super(basecallDirectory, lane, readStructure, dataTypes);
+            super(basecallDirectory, lane, readStructure, new BclQualityEvaluationStrategy(BclQualityEvaluationStrategy.ILLUMINA_ALLEGED_MINIMUM_QUALITY), dataTypes);
         }
 
     }
diff --git a/src/tests/java/net/sf/picard/illumina/parser/IlluminaDataProviderTest.java b/src/tests/java/net/sf/picard/illumina/parser/IlluminaDataProviderTest.java
index 20a92c5..15c1c0b 100644
--- a/src/tests/java/net/sf/picard/illumina/parser/IlluminaDataProviderTest.java
+++ b/src/tests/java/net/sf/picard/illumina/parser/IlluminaDataProviderTest.java
@@ -24,7 +24,7 @@
 package net.sf.picard.illumina.parser;
 
 import net.sf.picard.PicardException;
-import net.sf.picard.util.SolexaQualityConverter;
+import net.sf.picard.illumina.parser.readers.BclQualityEvaluationStrategy;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -35,7 +35,7 @@ import java.util.List;
 import java.util.Map;
 import static net.sf.picard.illumina.parser.QSeqTdUtil.*;
 import static net.sf.picard.illumina.parser.QSeqTdUtil.getTiledReadData;
-import static net.sf.picard.util.CollectionUtil.*;
+import static net.sf.samtools.util.CollectionUtil.*;
 
 /**
 * @author jburke at broadinstitute.org
@@ -43,6 +43,7 @@ import static net.sf.picard.util.CollectionUtil.*;
 
 public class IlluminaDataProviderTest {
 
+    public static final BclQualityEvaluationStrategy bclQualityEvaluationStrategy = new BclQualityEvaluationStrategy(BclQualityEvaluationStrategy.ILLUMINA_ALLEGED_MINIMUM_QUALITY);
     public static final File PARSING_TEST_BASECALLS_DIR = new File("testdata/net/sf/picard/illumina/IlluminaBarcodeParsingTest/BaseCalls");
     public static final File TEST_DATA_LOCATION = new File("testdata/net/sf/picard/illumina/IlluminaTests/BasecallsDir");
     public static final File BINARY_TD_LOCATION = new File("testdata/net/sf/picard/illumina/CompleteIlluminaDir/Intensities/BaseCalls");
@@ -61,7 +62,7 @@ public class IlluminaDataProviderTest {
             throws Exception {
 
         final IlluminaDataType [] dts = getDataTypes(extraDataTypes);
-        final IlluminaDataProviderFactory factory = new IlluminaDataProviderFactory(basecallsDirectory, lane, new ReadStructure(illuminaConfigStr), dts);
+        final IlluminaDataProviderFactory factory = new IlluminaDataProviderFactory(basecallsDirectory, lane, new ReadStructure(illuminaConfigStr), bclQualityEvaluationStrategy, dts);
         final IlluminaDataProvider dataProvider   = factory.makeDataProvider();
 
         runTest(testName, size, readNoToClusterData, seekAfterFirstRead, seekTestDataReadOffset, dataProvider);
@@ -237,7 +238,7 @@ public class IlluminaDataProviderTest {
 
     @Test
     public void barcodeParsingTest() {
-        runBarcodeParsingTest(new IlluminaDataProviderFactory(PARSING_TEST_BASECALLS_DIR, 6, new ReadStructure("76T76T6B"), IlluminaDataType.Barcodes));
+        runBarcodeParsingTest(new IlluminaDataProviderFactory(PARSING_TEST_BASECALLS_DIR, 6, new ReadStructure("76T76T6B"), bclQualityEvaluationStrategy, IlluminaDataType.Barcodes));
     }
 
     @DataProvider(name="binaryData")
@@ -339,7 +340,7 @@ public class IlluminaDataProviderTest {
         final IlluminaDataType [] dts = getDataTypes(extraDataTypes);
 
         Map<Integer, ClusterData> readNoToClusterData = BinTdUtil.clusterData(lane, tiles, illuminaConfigStr, dts);
-        final IlluminaDataProviderFactory factory = new IlluminaDataProviderFactory(basecallsDirectory, lane, new ReadStructure(illuminaConfigStr), dts);
+        final IlluminaDataProviderFactory factory = new IlluminaDataProviderFactory(basecallsDirectory, lane, new ReadStructure(illuminaConfigStr), bclQualityEvaluationStrategy, dts);
         final IlluminaDataProvider dataProvider   = factory.makeDataProvider();
 
         runTest(testName, size, readNoToClusterData, seekAfterFirstRead, seekTestDataReadOffset, dataProvider);
@@ -409,7 +410,7 @@ public class IlluminaDataProviderTest {
             final String illuminaConfigStr,
             final File basecallsDirectory)
             throws Exception {
-        final IlluminaDataProviderFactory factory = new IlluminaDataProviderFactory(basecallsDirectory, lane, new ReadStructure(illuminaConfigStr), actualDts);
+        final IlluminaDataProviderFactory factory = new IlluminaDataProviderFactory(basecallsDirectory, lane, new ReadStructure(illuminaConfigStr), bclQualityEvaluationStrategy, actualDts);
         final IlluminaDataProvider dataProvider   = factory.makeDataProvider();
     }
 }
diff --git a/src/tests/java/net/sf/picard/illumina/parser/IlluminaFileUtilTest.java b/src/tests/java/net/sf/picard/illumina/parser/IlluminaFileUtilTest.java
index 23c01b8..8306556 100644
--- a/src/tests/java/net/sf/picard/illumina/parser/IlluminaFileUtilTest.java
+++ b/src/tests/java/net/sf/picard/illumina/parser/IlluminaFileUtilTest.java
@@ -2,7 +2,7 @@ package net.sf.picard.illumina.parser;
 
 import net.sf.picard.PicardException;
 import net.sf.picard.io.IoUtil;
-import static net.sf.picard.util.CollectionUtil.makeList;
+import static net.sf.samtools.util.CollectionUtil.makeList;
 import org.testng.Assert;
 import org.testng.annotations.*;
 
diff --git a/src/tests/java/net/sf/picard/illumina/parser/PerTileParserTest.java b/src/tests/java/net/sf/picard/illumina/parser/PerTileParserTest.java
index 55362db..b686ef7 100644
--- a/src/tests/java/net/sf/picard/illumina/parser/PerTileParserTest.java
+++ b/src/tests/java/net/sf/picard/illumina/parser/PerTileParserTest.java
@@ -1,6 +1,6 @@
 package net.sf.picard.illumina.parser;
 
-import static net.sf.picard.util.CollectionUtil.makeList;
+import static net.sf.samtools.util.CollectionUtil.makeList;
 
 import net.sf.picard.PicardException;
 import net.sf.samtools.util.CloseableIterator;
diff --git a/src/tests/java/net/sf/picard/illumina/parser/PerTilePerCycleParserTest.java b/src/tests/java/net/sf/picard/illumina/parser/PerTilePerCycleParserTest.java
index 1acaa64..4e57a2c 100644
--- a/src/tests/java/net/sf/picard/illumina/parser/PerTilePerCycleParserTest.java
+++ b/src/tests/java/net/sf/picard/illumina/parser/PerTilePerCycleParserTest.java
@@ -1,6 +1,6 @@
 package net.sf.picard.illumina.parser;
 
-import static net.sf.picard.util.CollectionUtil.makeList;
+import static net.sf.samtools.util.CollectionUtil.makeList;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -37,6 +37,7 @@ public class PerTilePerCycleParserTest {
         public MockPerTilePerCycleParser(final File directory, final int lane, final CycleIlluminaFileMap tilesToCycleFiles, final OutputMapping outputMapping) {
             super(directory, lane, tilesToCycleFiles, outputMapping);
             expectedOutputLengths = outputMapping.getOutputReadLengths();
+            this.initialize();
         }
 
         @Override
diff --git a/src/tests/java/net/sf/picard/illumina/parser/PosParserTest.java b/src/tests/java/net/sf/picard/illumina/parser/PosParserTest.java
index 2e03042..9a28f92 100644
--- a/src/tests/java/net/sf/picard/illumina/parser/PosParserTest.java
+++ b/src/tests/java/net/sf/picard/illumina/parser/PosParserTest.java
@@ -6,7 +6,7 @@ import org.testng.annotations.Test;
 
 import java.io.File;
 import java.util.*;
-import static net.sf.picard.util.CollectionUtil.makeList;
+import static net.sf.samtools.util.CollectionUtil.makeList;
 
 public class PosParserTest {
 
diff --git a/src/tests/java/net/sf/picard/illumina/parser/QseqParserTest.java b/src/tests/java/net/sf/picard/illumina/parser/QseqParserTest.java
index 21f1712..c31a3b2 100644
--- a/src/tests/java/net/sf/picard/illumina/parser/QseqParserTest.java
+++ b/src/tests/java/net/sf/picard/illumina/parser/QseqParserTest.java
@@ -6,7 +6,7 @@ import org.testng.annotations.DataProvider;
 
 import java.util.*;
 
-import static net.sf.picard.util.CollectionUtil.makeList;
+import static net.sf.samtools.util.CollectionUtil.makeList;
 import static net.sf.picard.illumina.parser.QSeqTdUtil.*;
 import static net.sf.picard.illumina.parser.OutputMapping.TwoDIndex;
 
diff --git a/src/tests/java/net/sf/picard/illumina/parser/readers/BclReaderTest.java b/src/tests/java/net/sf/picard/illumina/parser/readers/BclReaderTest.java
index fad97c4..446fc5c 100644
--- a/src/tests/java/net/sf/picard/illumina/parser/readers/BclReaderTest.java
+++ b/src/tests/java/net/sf/picard/illumina/parser/readers/BclReaderTest.java
@@ -6,79 +6,156 @@ import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import java.io.File;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.concurrent.*;
 
 public class BclReaderTest {
 
     public static File TestDataDir = new File("testdata/net/sf/picard/illumina/readerTests");
-    public static final File PASSING_BCL_FILE  = new File(TestDataDir, "bcl_passing.bcl");
-    public static final File QUAL_0FAILING_BCL_FILE  = new File(TestDataDir, "bcl_failing.bcl");
-    public static final File QUAL_1FAILING_BCL_FILE  = new File(TestDataDir, "bcl_failing2.bcl");
-    public static final File FILE_TOO_LARGE  = new File(TestDataDir, "bcl_tooLarge.bcl");
-    public static final File FILE_TOO_SHORT  = new File(TestDataDir, "bcl_tooShort.bcl");
-
-    public static final char [] expectedBases = new char[]{
-            'C', 'A', 'A', 'A',     'T', 'C', 'T', 'G',     'T', 'A', 'A', 'G',     'C', 'C', 'A', 'A', 
-            'C', 'A', 'C', 'C',     'A', 'A', 'C', 'G',     'A', 'T', 'A', 'C',     'A', 'A', 'C', 'A', 
-            'T', 'G', 'C', 'A',     'C', 'A', 'A', 'C',     'G', 'C', 'A', 'A',     'G', 'T', 'G', 'C', 
-            'A', 'C', 'G', 'T',     'A', 'C', 'A', 'A',     'C', 'G', 'C', 'A',     'C', 'A', 'T', 'T', 
-            'T', 'A', 'A', 'G',     'C', 'G', 'T', 'C',     'A', 'T', 'G', 'A',     'G', 'C', 'T', 'C', 
-            'T', 'A', 'C', 'G',     'A', 'A', 'C', 'C',     'C', 'A', 'T', 'A',     'T', 'G', 'G', 'G', 
-            'C', 'T', 'G', 'A',     'A', '.', '.', 'G',     'A', 'C', 'C', 'G',     'T', 'A', 'C', 'A', 
-            'G', 'T', 'G', 'T',     'A', '.'
+    public static final File PASSING_BCL_FILE = new File(TestDataDir, "bcl_passing.bcl");
+    public static final File QUAL_0FAILING_BCL_FILE = new File(TestDataDir, "bcl_failing.bcl");
+    public static final File QUAL_1FAILING_BCL_FILE = new File(TestDataDir, "bcl_failing2.bcl");
+    public static final File FILE_TOO_LARGE = new File(TestDataDir, "bcl_tooLarge.bcl");
+    public static final File FILE_TOO_SHORT = new File(TestDataDir, "bcl_tooShort.bcl");
+
+    public static final char[] expectedBases = new char[]{
+            'C', 'A', 'A', 'A', 'T', 'C', 'T', 'G', 'T', 'A', 'A', 'G', 'C', 'C', 'A', 'A',
+            'C', 'A', 'C', 'C', 'A', 'A', 'C', 'G', 'A', 'T', 'A', 'C', 'A', 'A', 'C', 'A',
+            'T', 'G', 'C', 'A', 'C', 'A', 'A', 'C', 'G', 'C', 'A', 'A', 'G', 'T', 'G', 'C',
+            'A', 'C', 'G', 'T', 'A', 'C', 'A', 'A', 'C', 'G', 'C', 'A', 'C', 'A', 'T', 'T',
+            'T', 'A', 'A', 'G', 'C', 'G', 'T', 'C', 'A', 'T', 'G', 'A', 'G', 'C', 'T', 'C',
+            'T', 'A', 'C', 'G', 'A', 'A', 'C', 'C', 'C', 'A', 'T', 'A', 'T', 'G', 'G', 'G',
+            'C', 'T', 'G', 'A', 'A', '.', '.', 'G', 'A', 'C', 'C', 'G', 'T', 'A', 'C', 'A',
+            'G', 'T', 'G', 'T', 'A', '.'
     };
 
-    public static final int [] expectedQuals = new int[]{
-        18, 29,  8, 17,     27, 25, 28, 27,      9, 29,  8, 20,     25, 24, 27, 27,
-        30,  8, 19, 24,     29, 29, 25, 28,      8, 29, 26, 24,     29,  8, 18,  8,
-        29, 28, 26, 29,     25, 8,  26, 25,     28, 25,  8, 28,     28, 27, 29, 26,
-        25, 26, 27, 25,      8, 18,  8, 26,     24, 29, 25,  8,     24,  8, 25, 27,
-        27, 25,  8, 28,     24, 27, 25, 25,      8, 27, 25,  8,     16, 24, 28, 25,
-        28,  8, 24, 27,     25,  8, 20, 29,     24, 27, 28,  8,     23, 10, 23, 11,
-        15, 11, 10, 12,     12,  2,  2, 31,     24,  8,  4, 36,     12, 17, 21,  4,
-         8, 12, 18, 23,     27,  2
+    public static final int[] expectedQuals = new int[]{
+            18, 29, 8, 17, 27, 25, 28, 27, 9, 29, 8, 20, 25, 24, 27, 27,
+            30, 8, 19, 24, 29, 29, 25, 28, 8, 29, 26, 24, 29, 8, 18, 8,
+            29, 28, 26, 29, 25, 8, 26, 25, 28, 25, 8, 28, 28, 27, 29, 26,
+            25, 26, 27, 25, 8, 18, 8, 26, 24, 29, 25, 8, 24, 8, 25, 27,
+            27, 25, 8, 28, 24, 27, 25, 25, 8, 27, 25, 8, 16, 24, 28, 25,
+            28, 8, 24, 27, 25, 8, 20, 29, 24, 27, 28, 8, 23, 10, 23, 11,
+            15, 11, 10, 12, 12, 2, 2, 31, 24, 8, 4, 36, 12, 17, 21, 4,
+            8, 12, 18, 23, 27, 2
     };
 
-    public byte [] qualsAsBytes() {
-        final byte [] byteVals = new byte[expectedQuals.length];
-        for(int i = 0; i < byteVals.length; i++) {
-            byteVals[i] = (byte)expectedQuals[i];
+    public byte[] qualsAsBytes() {
+        final byte[] byteVals = new byte[expectedQuals.length];
+        for (int i = 0; i < byteVals.length; i++) {
+            byteVals[i] = (byte) expectedQuals[i];
         }
         return byteVals;
     }
 
     @Test
     public void readValidFile() {
-        final BclReader reader = new BclReader(PASSING_BCL_FILE);
-        final byte [] quals = qualsAsBytes();
+        final BclQualityEvaluationStrategy bclQualityEvaluationStrategy = new BclQualityEvaluationStrategy(BclQualityEvaluationStrategy.ILLUMINA_ALLEGED_MINIMUM_QUALITY);
+        final BclReader reader = new BclReader(PASSING_BCL_FILE, bclQualityEvaluationStrategy);
+        final byte[] quals = qualsAsBytes();
 
         Assert.assertEquals(reader.numClusters, expectedBases.length);
 
         int readNum = 0;
-        while(readNum < reader.numClusters) {
+        while (readNum < reader.numClusters) {
             final BclReader.BclValue bv = reader.next();
-            Assert.assertEquals(bv.base,    expectedBases[readNum], " On num cluster: " + readNum);
+            Assert.assertEquals(bv.base, expectedBases[readNum], " On num cluster: " + readNum);
             Assert.assertEquals(bv.quality, quals[readNum], " On num cluster: " + readNum);
             ++readNum;
         }
+        bclQualityEvaluationStrategy.assertMinimumQualities();
     }
 
-    @DataProvider(name="failingFiles")
+    @DataProvider(name = "failingFiles")
     public Object[][] failingFiles() {
-        return new Object[][] {
-            {QUAL_0FAILING_BCL_FILE},
-            {QUAL_1FAILING_BCL_FILE},
-            {new File(TestDataDir, "SomeNoneExistantFile.bcl")},
-            {FILE_TOO_LARGE},
-            {FILE_TOO_SHORT}
+        return new Object[][]{
+                {QUAL_0FAILING_BCL_FILE},
+                {QUAL_1FAILING_BCL_FILE},
+                {new File(TestDataDir, "SomeNoneExistantFile.bcl")},
+                {FILE_TOO_LARGE},
+                {FILE_TOO_SHORT}
         };
     }
 
     @Test(expectedExceptions = PicardException.class, dataProvider = "failingFiles")
     public void failingFileTest(final File failingFile) {
-        final BclReader reader = new BclReader(failingFile);
+        final BclQualityEvaluationStrategy bclQualityEvaluationStrategy = new BclQualityEvaluationStrategy(BclQualityEvaluationStrategy.ILLUMINA_ALLEGED_MINIMUM_QUALITY);
+        final BclReader reader = new BclReader(failingFile, bclQualityEvaluationStrategy);
         Assert.assertEquals(reader.numClusters, expectedBases.length);
-        while(reader.hasNext()) {
+        while (reader.hasNext()) {
             reader.next();
         }
+        bclQualityEvaluationStrategy.assertMinimumQualities();
+    }
+
+    /**
+     * Asserts appropriate functionality of a quality-minimum-customized BLC reader, such that (1) if sub-Q2 qualities are found, the BCL
+     * reader does not throw an exception, (2) sub-minimum calls are set to quality 1 and (3) sub-minimum calls are counted up properly.
+     */
+    @Test
+    public void lowQualityButPassingTest() throws ExecutionException, InterruptedException {
+        final BclQualityEvaluationStrategy bclQualityEvaluationStrategy = new BclQualityEvaluationStrategy(1);
+
+        // Build a list of callables, then submit them and check for errors.
+        final Collection<Callable<Void>> callables = new LinkedList<Callable<Void>>();
+        for (int i = 0; i < 10; i++) {
+            final boolean even_i = i % 2 == 0;
+            callables.add(new Callable<Void>() {
+                @Override
+                public Void call() throws Exception {
+                    final BclReader reader = new BclReader(even_i ? QUAL_1FAILING_BCL_FILE : QUAL_0FAILING_BCL_FILE, bclQualityEvaluationStrategy);
+                    Assert.assertEquals(reader.numClusters, expectedBases.length);
+                    while (reader.hasNext()) {
+                        reader.next();
+                    }
+                    return null;
+                }
+            });
+        }
+        final ExecutorService executorService = Executors.newFixedThreadPool(callables.size());
+        final Collection<Future<Void>> futures = new LinkedList<Future<Void>>();
+        for (final Callable<Void> callable : callables) {
+            futures.add(executorService.submit(callable));
+        }
+        for (final Future<Void> future : futures) {
+            future.get();
+        }
+        bclQualityEvaluationStrategy.assertMinimumQualities();
+        Assert.assertEquals((int) bclQualityEvaluationStrategy.getPoorQualityFrequencies().get((byte) 0), 25);
+        Assert.assertEquals((int) bclQualityEvaluationStrategy.getPoorQualityFrequencies().get((byte) 1), 25);
+    }
+
+    @Test(expectedExceptions = PicardException.class)
+    public void lowQualityAndFailingTest() throws ExecutionException, InterruptedException {
+        final BclQualityEvaluationStrategy bclQualityEvaluationStrategy = new BclQualityEvaluationStrategy(BclQualityEvaluationStrategy.ILLUMINA_ALLEGED_MINIMUM_QUALITY);
+
+        // Build a list of callables, then submit them and check for errors.
+        final Collection<Callable<Void>> callables = new LinkedList<Callable<Void>>();
+        for (int i = 0; i < 10; i++) {
+            final boolean even_i = i % 2 == 0;
+            callables.add(new Callable<Void>() {
+                @Override
+                public Void call() throws Exception {
+                    final BclReader reader = new BclReader(even_i ? QUAL_1FAILING_BCL_FILE : QUAL_0FAILING_BCL_FILE, bclQualityEvaluationStrategy);
+                    Assert.assertEquals(reader.numClusters, expectedBases.length);
+                    while (reader.hasNext()) {
+                        reader.next();
+                    }
+                    return null;
+                }
+            });
+        }
+        final ExecutorService executorService = Executors.newFixedThreadPool(callables.size());
+        final Collection<Future<Void>> futures = new LinkedList<Future<Void>>();
+        for (final Callable<Void> callable : callables) {
+            futures.add(executorService.submit(callable));
+        }
+        for (final Future<Void> future : futures) {
+            future.get();
+        }
+        Assert.assertEquals((int) bclQualityEvaluationStrategy.getPoorQualityFrequencies().get((byte) 0), 25);
+        Assert.assertEquals((int) bclQualityEvaluationStrategy.getPoorQualityFrequencies().get((byte) 1), 25);
+        bclQualityEvaluationStrategy.assertMinimumQualities();
     }
 }
diff --git a/src/tests/java/net/sf/picard/io/IoUtilTest.java b/src/tests/java/net/sf/picard/io/IoUtilTest.java
index 1139993..04cdf2d 100644
--- a/src/tests/java/net/sf/picard/io/IoUtilTest.java
+++ b/src/tests/java/net/sf/picard/io/IoUtilTest.java
@@ -25,14 +25,20 @@ package net.sf.picard.io;
 
 import net.sf.picard.util.ProcessExecutor;
 import net.sf.samtools.util.CloserUtil;
+import net.sf.samtools.util.CollectionUtil;
 import org.testng.annotations.Test;
 import org.testng.Assert;
 
 import java.io.*;
 import java.nio.charset.Charset;
+import java.util.Arrays;
+import java.util.List;
 
 public class IoUtilTest {
 
+    private static final File SLURP_TEST_FILE = new File("testdata/net/sf/picard/io/slurptest.txt");
+    private static final List<String> SLURP_TEST_LINES = Arrays.asList("bacon   and rice   ","for breakfast  ","wont you join me");
+    private static final String SLURP_TEST_LINE_SEPARATOR = "\n";
     private static final String TEST_FILE_PREFIX = "foo";
     private static final String TEST_FILE_EXTENSIONS[] = { ".txt", ".txt.gz", ".txt.bz2" };
     private static final String TEST_STRING = "bar!";
@@ -109,4 +115,13 @@ public class IoUtilTest {
         }
     }
 
+    @Test
+    public void slurpLinesTest() throws FileNotFoundException {
+        Assert.assertEquals(IoUtil.slurpLines(SLURP_TEST_FILE), SLURP_TEST_LINES);
+    }
+    
+    @Test
+    public void slurpTest() throws FileNotFoundException {
+        Assert.assertEquals(IoUtil.slurp(SLURP_TEST_FILE), CollectionUtil.join(SLURP_TEST_LINES, SLURP_TEST_LINE_SEPARATOR));
+    }
 }
diff --git a/src/tests/java/net/sf/picard/sam/MergeBamAlignmentTest.java b/src/tests/java/net/sf/picard/sam/MergeBamAlignmentTest.java
index 7b7863e..0f4d095 100644
--- a/src/tests/java/net/sf/picard/sam/MergeBamAlignmentTest.java
+++ b/src/tests/java/net/sf/picard/sam/MergeBamAlignmentTest.java
@@ -54,6 +54,7 @@ public class MergeBamAlignmentTest {
     private static final File firstReadAlignedBam_secondHalf = new File(TEST_DATA_DIR, "secondhalf.read1.trimmed.aligned.sam");
     private static final File secondReadAlignedBam_firstHalf = new File(TEST_DATA_DIR, "firsthalf.read2.trimmed.aligned.sam");
     private static final File secondReadAlignedBam_secondHalf = new File(TEST_DATA_DIR, "secondhalf.read2.trimmed.aligned.sam");
+    private static final File supplementalReadAlignedBam = new File(TEST_DATA_DIR, "aligned.supplement.sam");
     private static final File alignedQuerynameSortedBam =
             new File("testdata/net/sf/picard/sam/aligned_queryname_sorted.sam");
     private static final File fasta = new File("testdata/net/sf/picard/sam/merger.fasta");
@@ -66,6 +67,82 @@ public class MergeBamAlignmentTest {
     private static final String ONE_OF_THE_BEST_TAG = "YB";
 
     @Test
+    public void testMergerWithSupplemental() throws Exception {
+        final File outputWithSupplemental = File.createTempFile("mergeWithSupplementalTest", ".sam");
+        outputWithSupplemental.deleteOnExit();
+        final MergeBamAlignment merger = new MergeBamAlignment();
+        merger.UNMAPPED_BAM = unmappedBam;
+        merger.ALIGNED_BAM = Arrays.asList(supplementalReadAlignedBam);
+        merger.ALIGNED_READS_ONLY = false;
+        merger.CLIP_ADAPTERS = true;
+        merger.IS_BISULFITE_SEQUENCE = false;
+        merger.MAX_INSERTIONS_OR_DELETIONS = 1;
+        merger.PROGRAM_RECORD_ID = "0";
+        merger.PROGRAM_GROUP_VERSION = "1.0";
+        merger.PROGRAM_GROUP_COMMAND_LINE = "align!";
+        merger.PROGRAM_GROUP_NAME = "myAligner";
+        merger.PAIRED_RUN = true;
+        merger.REFERENCE_SEQUENCE = fasta;
+        merger.OUTPUT = outputWithSupplemental;
+        merger.EXPECTED_ORIENTATIONS=Arrays.asList(SamPairUtil.PairOrientation.FR);
+
+        Assert.assertEquals(merger.doWork(), 0, "Merge did not succeed");
+        final SAMFileReader result = new SAMFileReader(outputWithSupplemental);
+
+        final List<Integer> clipAdapterFlags = new ArrayList<Integer>(Arrays.asList(99, 2147, 147, 2195));
+        final List<Integer> foundClipAdapterFlags = new ArrayList<Integer>();
+
+        for (final SAMRecord sam : result) {
+            if (sam.getReadName().equals("both_reads_align_clip_adapter")) {
+                foundClipAdapterFlags.add(sam.getFlags());
+            }
+
+            // This tests that we clip both (a) when the adapter is marked in the unmapped BAM file and
+            // (b) when the insert size is less than the read length
+            if (sam.getReadName().equals("both_reads_align_clip_adapter") ||
+                    sam.getReadName().equals("both_reads_align_clip_marked")) {
+                Assert.assertEquals(sam.getReferenceName(), "chr7");
+                if (sam.getReadNegativeStrandFlag()) {
+                    Assert.assertEquals(sam.getCigarString(), "5S96M", "Incorrect CIGAR string for " +
+                            sam.getReadName());
+                } else {
+                    Assert.assertEquals(sam.getCigarString(), "96M5S", "Incorrect CIGAR string for " +
+                            sam.getReadName());
+                }
+            }
+            // This tests that we DON'T clip when we run off the end if there are equal to or more than
+            // MIN_ADAPTER_BASES hanging off the end
+            else if (sam.getReadName().equals("both_reads_align_min_adapter_bases_exceeded")) {
+                Assert.assertEquals(sam.getReferenceName(), "chr7");
+                Assert.assertTrue(!sam.getCigarString().contains("S"),
+                        "Read was clipped when it should not be.");
+            } else if (sam.getReadName().equals("neither_read_aligns_or_present")) {
+                Assert.assertTrue(sam.getReadUnmappedFlag(), "Read should be unmapped but isn't");
+            }
+            // Two pairs in which only the first read should align
+            else if (sam.getReadName().equals("both_reads_present_only_first_aligns") ||
+                    sam.getReadName().equals("read_2_too_many_gaps")) {
+                if (sam.getFirstOfPairFlag()) {
+                    Assert.assertEquals(sam.getReferenceName(), "chr7", "Read should be mapped but isn't");
+                } else {
+                    Assert.assertTrue(sam.getReadUnmappedFlag(), "Read should not be mapped but is");
+                }
+            } else {
+                throw new Exception("Unexpected read name: " + sam.getReadName());
+            }
+        }
+
+        // Make sure that we have the appropriate primary and supplementary reads in the new file
+        Assert.assertEquals(clipAdapterFlags.size(), foundClipAdapterFlags.size());
+        Collections.sort(clipAdapterFlags);
+        Collections.sort(foundClipAdapterFlags);
+        for (int i = 0; i < clipAdapterFlags.size(); i++) {
+            Assert.assertEquals(clipAdapterFlags.get(i), foundClipAdapterFlags.get(i));
+        }
+
+    }
+
+    @Test
     public void testMerger() throws Exception {
         final File output = File.createTempFile("mergeTest", ".sam");
         output.deleteOnExit();
@@ -265,7 +342,6 @@ public class MergeBamAlignmentTest {
         };
     }
 
-
     /**
      * Minimal test of merging data from separate read 1 and read 2 alignments
      */
@@ -297,8 +373,7 @@ public class MergeBamAlignmentTest {
          final SAMFileReader result = new SAMFileReader(output);
          final SAMProgramRecord pg = result.getFileHeader().getProgramRecords().get(0);
 
-
-        for (final SAMRecord sam : result) {
+         for (final SAMRecord sam : result) {
             // Get the alignment record
             final List<File> rFiles = sam.getFirstOfPairFlag() ? r1Align : r2Align;
             SAMRecord alignment = null;
@@ -1152,7 +1227,7 @@ public class MergeBamAlignmentTest {
      * Test that clipping of FR reads for fragments shorter than read length happens only when it should.
      */
     @Test
-    public  void testShortFragmentClipping() throws Exception {
+    public void testShortFragmentClipping() throws Exception {
         final File output = File.createTempFile("testShortFragmentClipping", ".sam");
         output.deleteOnExit();
         final MergeBamAlignment merger = new MergeBamAlignment();
diff --git a/src/tests/java/net/sf/picard/util/DelimitedTextFileWithHeaderIteratorTest.java b/src/tests/java/net/sf/picard/util/DelimitedTextFileWithHeaderIteratorTest.java
new file mode 100644
index 0000000..510ef19
--- /dev/null
+++ b/src/tests/java/net/sf/picard/util/DelimitedTextFileWithHeaderIteratorTest.java
@@ -0,0 +1,95 @@
+package net.sf.picard.util;
+
+import net.sf.samtools.util.StringUtil;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+
+public class DelimitedTextFileWithHeaderIteratorTest {
+    @Test
+    public void basicParsingTest() throws Exception {
+        final String[][] data = new String[][] {
+                new String[] {"FOO", "BAR", "SPLAT"},
+                new String[] {"1", "2", "3"},
+                new String[] {"a", "b", "c"},
+                new String[] {"foo", "bar", "splat"},
+        };
+
+        final File tmp = File.createTempFile("tabbedTextTest.", ".txt");
+        tmp.deleteOnExit();
+        final BufferedWriter out = new BufferedWriter(new FileWriter(tmp));
+
+        for (final String[] fields : data) {
+            out.write(StringUtil.join("\t", fields));
+            out.newLine();
+        }
+
+        out.close();
+
+        final TabbedInputParser parser = new TabbedInputParser(false, tmp);
+        final DelimitedTextFileWithHeaderIterator fileIterator = new DelimitedTextFileWithHeaderIterator(parser);
+        for (final String col : data[0]) Assert.assertTrue(fileIterator.hasColumn(col));
+
+        int i=1;
+        for (final DelimitedTextFileWithHeaderIterator.Row row : new IterableAdapter<DelimitedTextFileWithHeaderIterator.Row>(fileIterator)) {
+            final String[] expected = data[i++];
+            Assert.assertEquals(row.getFields(), expected);
+            Assert.assertEquals(row.getCurrentLine(), StringUtil.join("\t", expected));
+        }
+
+        Assert.assertEquals(i, data.length);
+    }
+
+    @Test
+    public void parsingWithColumnHeadersTest() throws Exception {
+        final String[] headers = {"STRING", "STRING2", "NUMBER"};
+
+        final String[][] data = new String[][] {
+                headers,
+                new String[] {"1", "2", "3"},
+                new String[] {"a", "b", "2"},
+                new String[] {"foo", "bar", ""},
+        };
+
+
+        final File tmp = File.createTempFile("tabbedTextTest.", ".txt");
+        tmp.deleteOnExit();
+        final BufferedWriter out = new BufferedWriter(new FileWriter(tmp));
+
+        for (final String[] fields : data) {
+            out.write(StringUtil.join("\t", fields));
+            out.newLine();
+        }
+
+        out.close();
+
+        final TabbedInputParser parser = new TabbedInputParser(false, tmp);
+        final DelimitedTextFileWithHeaderIterator fileIterator = new DelimitedTextFileWithHeaderIterator(parser);
+        for (final String col : headers) Assert.assertTrue(fileIterator.hasColumn(col));
+
+        int i=1;
+        for (final DelimitedTextFileWithHeaderIterator.Row row : new IterableAdapter<DelimitedTextFileWithHeaderIterator.Row>(fileIterator)) {
+            final String[] expected = data[i++];
+            final String[] actual = row.getFields();
+            for (int j = 0; j < expected.length; j++) {
+                Assert.assertTrue((expected[j].equals("") && actual[j] == null) || expected[j].equals(actual[j]));
+            }
+            Assert.assertEquals(row.getCurrentLine(), StringUtil.join("\t", expected));
+            try {
+                row.getField(headers[0]);
+                row.getField(headers[1]);
+                row.getIntegerField(headers[2]);
+            }
+            catch(Exception e) {
+                Assert.fail("Failed to parse one of the fields in " + row.getCurrentLine() + ": " + e.getMessage());
+            }
+        }
+
+        Assert.assertEquals(i, data.length);
+    }
+
+
+}
diff --git a/src/java/org/broad/tribble/AbstractFeatureCodec.java b/src/tests/java/net/sf/picard/util/IntervalTreeMapTest.java
similarity index 61%
copy from src/java/org/broad/tribble/AbstractFeatureCodec.java
copy to src/tests/java/net/sf/picard/util/IntervalTreeMapTest.java
index 77b9fb5..2f2defa 100644
--- a/src/java/org/broad/tribble/AbstractFeatureCodec.java
+++ b/src/tests/java/net/sf/picard/util/IntervalTreeMapTest.java
@@ -21,35 +21,27 @@
  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  * THE SOFTWARE.
  */
-package org.broad.tribble;
+package net.sf.picard.util;
 
-import org.broad.tribble.readers.PositionalBufferedStream;
+import org.testng.Assert;
+import org.testng.annotations.Test;
 
-import java.io.IOException;
+import java.util.Iterator;
+import java.util.Set;
 
-/**
- * Simple basic class providing much of the basic functionality of codecs
- */
-public abstract class AbstractFeatureCodec<T extends Feature> implements FeatureCodec {
-    Class<T> myClass;
-
-    protected AbstractFeatureCodec(final Class<T> myClass) {
-        this.myClass = myClass;
-    }
+public class IntervalTreeMapTest {
+    @Test
+    public void testBasic() {
+        IntervalTreeMap<Interval> m=new IntervalTreeMap<Interval>();
 
-    @Override
-    public Feature decodeLoc(final PositionalBufferedStream stream) throws IOException {
-        return decode(stream);
-    }
+        Interval chr1Interval = new Interval("chr1", 1,100);
+        m.put(chr1Interval, chr1Interval);
+        Interval chr2Interval = new Interval("chr2", 1,200);
+        m.put(chr2Interval, chr2Interval);
 
-    @Override
-    public Class<T> getFeatureType() {
-        return myClass;
+        final Iterator<Interval> iterator = m.keySet().iterator();
+        Assert.assertEquals(iterator.next(), chr1Interval);
+        Assert.assertEquals(iterator.next(), chr2Interval);
+        Assert.assertFalse(iterator.hasNext());
     }
-
-    @Override
-    public boolean canDecode(final String path) {
-        return false;
-    }
-
 }
diff --git a/src/tests/java/net/sf/picard/util/SamLocusIteratorTest.java b/src/tests/java/net/sf/picard/util/SamLocusIteratorTest.java
index 16cfd69..702aa1e 100644
--- a/src/tests/java/net/sf/picard/util/SamLocusIteratorTest.java
+++ b/src/tests/java/net/sf/picard/util/SamLocusIteratorTest.java
@@ -80,8 +80,6 @@ public class SamLocusIteratorTest {
         final SAMFileReader samReader = createSamFileReader(exampleSam);
         final SamLocusIterator sli = new SamLocusIterator(samReader);
 
-
-
         // make sure we accumulated depth of 2 for each position
         int pos = 1;
         final int coveredStart = 165;
diff --git a/src/tests/java/net/sf/picard/vcf/MergeVcfsTest.java b/src/tests/java/net/sf/picard/vcf/MergeVcfsTest.java
index 9af5c17..8378255 100644
--- a/src/tests/java/net/sf/picard/vcf/MergeVcfsTest.java
+++ b/src/tests/java/net/sf/picard/vcf/MergeVcfsTest.java
@@ -1,9 +1,10 @@
 package net.sf.picard.vcf;
 
+import net.sf.samtools.util.CloseableIterator;
+import org.broad.tribble.TribbleException;
 import org.broadinstitute.variant.variantcontext.VariantContext;
-import org.broadinstitute.variant.variantcontext.writer.VariantContextWriter;
-import org.broadinstitute.variant.variantcontext.writer.VariantContextWriterFactory;
-import org.broadinstitute.variant.vcf.VCFContigHeaderLine;
+import org.broadinstitute.variant.variantcontext.VariantContextComparator;
+import org.broadinstitute.variant.vcf.VCFFileReader;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
@@ -44,14 +45,13 @@ public class MergeVcfsTest {
 		mergeVcfs.instanceMain(new String[0]);
 	}
 
-	@Test (expectedExceptions = IllegalArgumentException.class)
+	@Test (expectedExceptions = TribbleException.class)
 	public void testFailsOnNoContigList() {
 		final File contiglessIndelFile = new File(TEST_DATA_PATH + "CEUTrio-indels-no-contigs.vcf");
 		final File snpInputFile = new File(TEST_DATA_PATH, "CEUTrio-snps.vcf");
 
 		final MergeVcfs mergeVcfs = new MergeVcfs();
 		mergeVcfs.OUTPUT = new File("/dev/null/blah");
-		mergeVcfs.CREATE_INDEX = false;
 		mergeVcfs.INPUT = Arrays.asList(contiglessIndelFile, snpInputFile);
 
 		mergeVcfs.instanceMain(new String[0]);
@@ -64,7 +64,6 @@ public class MergeVcfsTest {
 
 		final MergeVcfs mergeVcfs = new MergeVcfs();
 		mergeVcfs.OUTPUT = new File("/dev/null/blah");
-		mergeVcfs.CREATE_INDEX = false;
 		mergeVcfs.INPUT = Arrays.asList(badSampleIndelFile, snpInputFile);
 
 		mergeVcfs.instanceMain(new String[0]);
@@ -76,18 +75,11 @@ public class MergeVcfsTest {
 		final File snpInputFile = new File(TEST_DATA_PATH + "CEUTrio-snps.vcf");
 		final File output = new File(TEST_DATA_PATH + "merge-indels-snps-test-output-delete-me.vcf");
 
-		final VariantContextIterator indelIterator = VariantContextIteratorFactory.create(indelInputFile);
-		final VariantContextIterator snpIterator = VariantContextIteratorFactory.create(snpInputFile);
-
-		final Queue<String> indelContigPositions = getContigPositions(indelIterator);
-		final Queue<String> snpContigPositions = getContigPositions(snpIterator);
-
-		indelIterator.close();
-		snpIterator.close();
+		final Queue<String> indelContigPositions = loadContigPositions(indelInputFile);
+		final Queue<String> snpContigPositions = loadContigPositions(snpInputFile);
 
 		final MergeVcfs mergeVcfs = new MergeVcfs();
 		mergeVcfs.OUTPUT = output;
-		mergeVcfs.CREATE_INDEX = false;
 		mergeVcfs.INPUT = Arrays.asList(indelInputFile, snpInputFile);
 
 		final int returnCode = mergeVcfs.instanceMain(new String[0]);
@@ -98,11 +90,12 @@ public class MergeVcfsTest {
 		// if the context is an indel (snp), the next genomic position in the indel
 		// (snp) queue is the same. Also make sure that the context is in the order
 		// specified by the input files.
-		final VariantContextIterator outputIterator = VariantContextIteratorFactory.create(output);
-		final VariantContextComparator outputComparator = new VariantContextComparator(getContigs(outputIterator));
+		final VCFFileReader outputReader = new VCFFileReader(output);
+		final VariantContextComparator outputComparator = outputReader.getFileHeader().getVCFRecordComparator();
 		VariantContext last = null;
-		while (outputIterator.hasNext()) {
-			final VariantContext outputContext = outputIterator.next();
+		final CloseableIterator<VariantContext> iterator = outputReader.iterator();
+		while (iterator.hasNext()) {
+			final VariantContext outputContext = iterator.next();
 			if (outputContext.isIndel()) Assert.assertEquals(getContigPosition(outputContext), indelContigPositions.poll());
 			if (outputContext.isSNP()) Assert.assertEquals(getContigPosition(outputContext), snpContigPositions.poll());
 			if (last != null) Assert.assertTrue(outputComparator.compare(last, outputContext) < 0);
@@ -126,28 +119,29 @@ public class MergeVcfsTest {
 		final File five = new File(TEST_DATA_PATH, "CEUTrio-random-scatter-5.vcf");
 
 		final List<Queue<String>> positionQueues = new ArrayList<Queue<String>>(6);
-		positionQueues.add(0, getContigPositions(VariantContextIteratorFactory.create(zero)));
-		positionQueues.add(1, getContigPositions(VariantContextIteratorFactory.create(one)));
-		positionQueues.add(2, getContigPositions(VariantContextIteratorFactory.create(two)));
-		positionQueues.add(3, getContigPositions(VariantContextIteratorFactory.create(three)));
-		positionQueues.add(4, getContigPositions(VariantContextIteratorFactory.create(four)));
-		positionQueues.add(5, getContigPositions(VariantContextIteratorFactory.create(five)));
+		positionQueues.add(0, loadContigPositions(zero));
+		positionQueues.add(1, loadContigPositions(one));
+		positionQueues.add(2, loadContigPositions(two));
+		positionQueues.add(3, loadContigPositions(three));
+		positionQueues.add(4, loadContigPositions(four));
+		positionQueues.add(5, loadContigPositions(five));
 
 		final File output = new File(TEST_DATA_PATH + "random-scatter-test-output-delete-me.vcf");
+		output.deleteOnExit();
 
 		final MergeVcfs mergeVcfs = new MergeVcfs();
 		mergeVcfs.OUTPUT = output;
-		mergeVcfs.CREATE_INDEX = false;
 		mergeVcfs.INPUT = Arrays.asList(zero, one, two, three, four, five);
 
 		final int returnCode = mergeVcfs.instanceMain(new String[0]);
 		Assert.assertEquals(returnCode, 0);
 
-		final VariantContextIterator outputIterator = VariantContextIteratorFactory.create(output);
-		final VariantContextComparator outputComparator = new VariantContextComparator(getContigs(outputIterator));
+		final VCFFileReader outputReader = new VCFFileReader(output);
+		final VariantContextComparator outputComparator = outputReader.getFileHeader().getVCFRecordComparator();
 		VariantContext last = null;
-		while (outputIterator.hasNext()) {
-			final VariantContext outputContext = outputIterator.next();
+		final CloseableIterator<VariantContext> iterator = outputReader.iterator();
+		while (iterator.hasNext()) {
+			final VariantContext outputContext = iterator.next();
 			final String position = getContigPosition(outputContext);
 			for (final Queue<String> positionQueue : positionQueues) {
 				if (position.equals(positionQueue.peek())) {
@@ -163,43 +157,19 @@ public class MergeVcfsTest {
 		for (final Queue<String> positionQueue : positionQueues) {
 			Assert.assertEquals(positionQueue.size(), 0);
 		}
-
-		output.deleteOnExit();
-	}
-
-	@Test (enabled = false)
-	public void dumpHeaders() {
-		final File[] files = new File[] {
-				new File("/Volumes/Disko Segundo/mergevcfs/t2d_genes_contam_test4_per_sample_plus_five.snps.recalibrated.vcf"),
-				new File("/Volumes/Disko Segundo/mergevcfs/t2d_genes_contam_test4_per_sample_plus_five.indels.filtered.vcf"),
-				new File("/Volumes/Disko Segundo/mergevcfs/t2d_genes_contam_test4_per_sample_plus_five.unannotated.vcf"),
-				new File("/Users/jrose/development/long-merge-test.vcf")
-		};
-		for (final File file : files) {
-			final VariantContextIterator iterator = VariantContextIteratorFactory.create(file);
-			final File output = new File("/Volumes/Disko Segundo/mergevcfs/", file.getName() + ".header");
-			final VariantContextWriter writer = VariantContextWriterFactory.create(output, null, VariantContextWriterFactory.NO_OPTIONS);
-			writer.writeHeader(iterator.getHeader());
-			writer.close();
-		}
 	}
 
-	static Queue<String> getContigPositions(final VariantContextIterator iterator) {
+	static Queue<String> loadContigPositions(final File inputFile) {
+		final VCFFileReader reader = new VCFFileReader(inputFile);
 		final Queue<String> contigPositions = new LinkedList<String>();
+		final CloseableIterator<VariantContext> iterator = reader.iterator();
 		while (iterator.hasNext()) contigPositions.add(getContigPosition(iterator.next()));
+		iterator.close();
+		reader.close();
 		return contigPositions;
 	}
 
 	static String getContigPosition(final VariantContext context) {
 		return context.getChr() + "-" + Integer.toString(context.getStart());
 	}
-
-	static List<String> getContigs(final VariantContextIterator iterator) {
-		final List<String> contigList = new ArrayList<String>();
-		for (final VCFContigHeaderLine contigHeaderLine : iterator.getHeader().getContigLines()) {
-			contigList.add(contigHeaderLine.getID());
-		}
-
-		return contigList;
-	}
 }
diff --git a/src/tests/java/net/sf/picard/vcf/SplitVcfsTest.java b/src/tests/java/net/sf/picard/vcf/SplitVcfsTest.java
index 072a78d..ad0092a 100644
--- a/src/tests/java/net/sf/picard/vcf/SplitVcfsTest.java
+++ b/src/tests/java/net/sf/picard/vcf/SplitVcfsTest.java
@@ -1,9 +1,11 @@
 package net.sf.picard.vcf;
 
+import net.sf.samtools.util.CloseableIterator;
 import org.broadinstitute.variant.variantcontext.VariantContext;
 import org.broadinstitute.variant.variantcontext.VariantContext.Type;
 import org.broadinstitute.variant.variantcontext.writer.VariantContextWriter;
 import org.broadinstitute.variant.variantcontext.writer.VariantContextWriterFactory;
+import org.broadinstitute.variant.vcf.VCFFileReader;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
@@ -22,24 +24,24 @@ public class SplitVcfsTest {
 		final File snpOutputFile = new File(TEST_DATA_PATH + "split-vcfs-test-snps-delete-me.vcf");
 		final File input = new File(TEST_DATA_PATH + "CEUTrio-merged-indels-snps.vcf");
 
+		indelOutputFile.deleteOnExit();
+		snpOutputFile.deleteOnExit();
+
 		final SplitVcfs splitVcfs = new SplitVcfs();
 		splitVcfs.SNP_OUTPUT = snpOutputFile;
 		splitVcfs.INDEL_OUTPUT = indelOutputFile;
 		splitVcfs.INPUT = input;
-		splitVcfs.CREATE_INDEX = false;
 
 		final int returnCode = splitVcfs.instanceMain(new String[0]);
 		Assert.assertEquals(returnCode, 0);
 
-		final VariantContextIterator indelIterator = VariantContextIteratorFactory.create(indelOutputFile);
-		final VariantContextIterator snpIterator = VariantContextIteratorFactory.create(snpOutputFile);
-
-		final Queue<String> indelContigPositions = MergeVcfsTest.getContigPositions(indelIterator);
-		final Queue<String> snpContigPositions = MergeVcfsTest.getContigPositions(snpIterator);
+		final Queue<String> indelContigPositions = MergeVcfsTest.loadContigPositions(indelOutputFile);
+		final Queue<String> snpContigPositions = MergeVcfsTest.loadContigPositions(snpOutputFile);
 
-		final VariantContextIterator inputIterator = VariantContextIteratorFactory.create(input);
-		while (inputIterator.hasNext()) {
-			final VariantContext inputContext = inputIterator.next();
+		final VCFFileReader reader = new VCFFileReader(input);
+		final CloseableIterator<VariantContext> iterator = reader.iterator();
+		while (iterator.hasNext()) {
+			final VariantContext inputContext = iterator.next();
 			if (inputContext.isIndel()) Assert.assertEquals(MergeVcfsTest.getContigPosition(inputContext), indelContigPositions.poll());
 			if (inputContext.isSNP()) Assert.assertEquals(MergeVcfsTest.getContigPosition(inputContext), snpContigPositions.poll());
 		}
@@ -47,9 +49,6 @@ public class SplitVcfsTest {
 		// We should have polled everything off the indel (snp) queues
 		Assert.assertEquals(indelContigPositions.size(), 0);
 		Assert.assertEquals(snpContigPositions.size(), 0);
-
-		indelOutputFile.deleteOnExit();
-		snpOutputFile.deleteOnExit();
 	}
 
 	@Test (enabled = false)
@@ -62,18 +61,18 @@ public class SplitVcfsTest {
 		final Map<Type, Integer> inputCounts = new HashMap<Type, Integer>();
 		final Map<Type, Integer> outputCounts = new HashMap<Type, Integer>();
 		final File INPUT = new File("/Volumes/Disko Segundo/splitvcfs/CEUTrio.HiSeq.WGS.b37.snps_and_indels.recalibrated.filtered.phased.CURRENT.vcf.gz");
-		final VariantContextIterator variantContextIterator = VariantContextIteratorFactory.create(INPUT);
+		final VCFFileReader reader = new VCFFileReader(INPUT);
 
 		final VariantContextWriter OUTPUT =
 				VariantContextWriterFactory.create(
 						new File("/Volumes/shm/CEUTrio-REDUCED.vcf"),
 						null,
 						VariantContextWriterFactory.NO_OPTIONS);
-		OUTPUT.writeHeader(variantContextIterator.getHeader());
-
-		while (variantContextIterator.hasNext()) {
-			final VariantContext variantContext = variantContextIterator.next();
+		OUTPUT.writeHeader(reader.getFileHeader());
 
+		final CloseableIterator<VariantContext> iterator = reader.iterator();
+		while (iterator.hasNext()) {
+			final VariantContext variantContext = iterator.next();
 			totalIn++;
 
 			final Integer inputCount = inputCounts.get(variantContext.getType());
@@ -89,7 +88,7 @@ public class SplitVcfsTest {
 			}
 		}
 
-		variantContextIterator.close();
+		reader.close();
 		OUTPUT.close();
 
 		System.out.println("INPUT: " + totalIn + "; OUTPUT: " + totalOut);
diff --git a/src/tests/java/net/sf/picard/vcf/VariantContextComparatorTest.java b/src/tests/java/net/sf/picard/vcf/VariantContextComparatorTest.java
index 85dff08..18588d3 100644
--- a/src/tests/java/net/sf/picard/vcf/VariantContextComparatorTest.java
+++ b/src/tests/java/net/sf/picard/vcf/VariantContextComparatorTest.java
@@ -26,6 +26,7 @@ package net.sf.picard.vcf;
 import org.broadinstitute.variant.variantcontext.Allele;
 import org.broadinstitute.variant.variantcontext.VariantContext;
 import org.broadinstitute.variant.variantcontext.VariantContextBuilder;
+import org.broadinstitute.variant.variantcontext.VariantContextComparator;
 import org.testng.Assert;
 import org.testng.annotations.Test;
 
diff --git a/src/tests/java/net/sf/picard/vcf/VariantContextIteratorTest.java b/src/tests/java/net/sf/picard/vcf/VariantContextIteratorTest.java
deleted file mode 100644
index d9c8e8a..0000000
--- a/src/tests/java/net/sf/picard/vcf/VariantContextIteratorTest.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package net.sf.picard.vcf;
-
-import net.sf.samtools.util.CloseableIterator;
-import org.broadinstitute.variant.variantcontext.VariantContext;
-import org.broadinstitute.variant.vcf.VCFHeader;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import java.io.File;
-
-public class VariantContextIteratorTest {
-
-	private static final String TEST_DATA_PATH = "testdata/net/sf/picard/vcf/";
-
-	@Test
-	public void testNext() {
-		final File input = new File(TEST_DATA_PATH + "CEUTrio-merged-indels-snps.vcf");
-		final CloseableIterator<VariantContext> variantContextIterator = VariantContextIteratorFactory.create(input);
-
-		Assert.assertNotNull(variantContextIterator.next());
-		Assert.assertTrue(variantContextIterator.hasNext());
-		Assert.assertNotNull(variantContextIterator.next());
-	}
-}
diff --git a/src/tests/java/net/sf/picard/vcf/VcfFormatConverterTest.java b/src/tests/java/net/sf/picard/vcf/VcfFormatConverterTest.java
index 7bbcc6d..1dadb88 100644
--- a/src/tests/java/net/sf/picard/vcf/VcfFormatConverterTest.java
+++ b/src/tests/java/net/sf/picard/vcf/VcfFormatConverterTest.java
@@ -32,57 +32,54 @@ import org.testng.annotations.Test;
 import java.io.File;
 import java.io.IOException;
 
-/**
- * Test methods for VcfFormatConverter
- *
- * @author jgentry at broadinstitute.org
- */
 public class VcfFormatConverterTest {
     private static final String TEST_DATA_PATH = "testdata/net/sf/picard/vcf/";
     private static final String TEST_FILE_BASE = "vcfFormatTest";
 
-    private static final File TEST_VCF = new File(TEST_DATA_PATH, TEST_FILE_BASE + VCF_FORMAT.VCF.getExtension());
-    private static final File TEST_BCF = new File(TEST_DATA_PATH, TEST_FILE_BASE + VCF_FORMAT.BCF.getExtension());
+	private static final String VCF = ".vcf";
+	private static final String BCF = ".bcf";
 
+    private static final File TEST_VCF = new File(TEST_DATA_PATH, TEST_FILE_BASE + VCF);
+    private static final File TEST_BCF = new File(TEST_DATA_PATH, TEST_FILE_BASE + BCF);
 
     @Test
     public void testVcfToVcf() {
-        runLikeTest(TEST_VCF, VCF_FORMAT.VCF);
+        runLikeTest(TEST_VCF, VCF);
     }
 
     @Test
     public void testVcfToBcf() {
-        runBackAndForthTest(TEST_VCF, VCF_FORMAT.BCF);
+        runBackAndForthTest(TEST_VCF, BCF);
     }
 
     @Test
     public void testBcfToBcf() {
-        runLikeTest(TEST_BCF, VCF_FORMAT.BCF);
+        runLikeTest(TEST_BCF, BCF);
     }
 
     @Test
     public void testBcfToVcf() {
-        runBackAndForthTest(TEST_BCF, VCF_FORMAT.VCF);
+        runBackAndForthTest(TEST_BCF, VCF);
     }
 
-    private void runLikeTest(final File input, final VCF_FORMAT vcfFormat) {
-        final File outputFile = convertFile(input, "likeTest", vcfFormat);
+    private void runLikeTest(final File input, final String format) {
+        final File outputFile = convertFile(input, "likeTest", format);
         compareFiles(input, outputFile);
     }
 
-    private void runBackAndForthTest(final File input, final VCF_FORMAT vcfFormat) {
+    private void runBackAndForthTest(final File input, final String format) {
         final String tempPrefix = "backAndForth";
 
-        final File backAndForth = convertFile(input, tempPrefix, vcfFormat);
-        final File backAndForthSeries2 = convertFile(backAndForth, tempPrefix, VCF_FORMAT.getOppositeFormat(vcfFormat));
+        final File backAndForth = convertFile(input, tempPrefix, format);
+        final File backAndForthSeries2 = convertFile(backAndForth, tempPrefix, getOppositeFormat(format));
 
         compareFiles(input, backAndForthSeries2);
     }
 
-    private File convertFile(final File input, final String prefix, final VCF_FORMAT vcfFormat) {
+    private File convertFile(final File input, final String prefix, final String format) {
         final File outputFile;
         try {
-            outputFile = File.createTempFile(prefix, vcfFormat.getExtension());
+            outputFile = File.createTempFile(prefix, format);
         } catch (IOException ioe) {
             throw new PicardException("Unable to create temp file!");
         }
@@ -95,7 +92,7 @@ public class VcfFormatConverterTest {
         return outputFile;
     }
 
-   private void compareFiles(final File file1, final File file2) {
+    private void compareFiles(final File file1, final File file2) {
         // Ok, so this isn't exactly comparing md5 checksums or anything, but it should be good enough
         // for our purposes.
         Assert.assertTrue(file1.exists());
@@ -103,26 +100,9 @@ public class VcfFormatConverterTest {
         Assert.assertEquals(file1.length(), file2.length());
     }
 
-    private enum VCF_FORMAT {
-        VCF(".vcf"),
-        BCF(".bcf");
-
-        VCF_FORMAT(final String extension) {
-            this.extension = extension;
-        }
-
-        private final String extension;
-
-        public String getExtension() {
-            return extension;
-        }
-
-        public static VCF_FORMAT getOppositeFormat(final VCF_FORMAT curFormat) {
-            if (curFormat.equals(VCF)) {
-                return BCF;
-            } else {
-                return VCF;
-            }
-        }
-    }
+	public static String getOppositeFormat(final String curFormat) {
+		if (curFormat.equals(VCF)) return BCF;
+		else if (curFormat.equals(BCF)) return VCF;
+		else throw new RuntimeException();
+	}
 }
diff --git a/src/tests/java/net/sf/samtools/seekablestream/SeekableStreamFactoryTest.java b/src/tests/java/net/sf/samtools/seekablestream/SeekableStreamFactoryTest.java
new file mode 100644
index 0000000..66aeacc
--- /dev/null
+++ b/src/tests/java/net/sf/samtools/seekablestream/SeekableStreamFactoryTest.java
@@ -0,0 +1,15 @@
+package net.sf.samtools.seekablestream;
+
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+public class SeekableStreamFactoryTest {
+    @Test
+    public void testIsFilePath() throws Exception {
+        Assert.assertEquals(SeekableStreamFactory.isFilePath("x"), true);
+        Assert.assertEquals(SeekableStreamFactory.isFilePath(""), true);
+        Assert.assertEquals(SeekableStreamFactory.isFilePath("http://broadinstitute.org"), false);
+        Assert.assertEquals(SeekableStreamFactory.isFilePath("https://broadinstitute.org"), false);
+        Assert.assertEquals(SeekableStreamFactory.isFilePath("ftp://broadinstitute.org"), false);
+    }
+}
diff --git a/src/tests/java/org/broad/tribble/AbstractFeatureReaderTest.java b/src/tests/java/org/broad/tribble/AbstractFeatureReaderTest.java
index 7302ff8..0f409f6 100644
--- a/src/tests/java/org/broad/tribble/AbstractFeatureReaderTest.java
+++ b/src/tests/java/org/broad/tribble/AbstractFeatureReaderTest.java
@@ -1,9 +1,16 @@
 package org.broad.tribble;
 
 import org.broad.tribble.bed.BEDCodec;
+import org.broad.tribble.bed.BEDFeature;
+import org.broad.tribble.readers.LineIterator;
+import org.broadinstitute.variant.VariantBaseTest;
+import org.broadinstitute.variant.variantcontext.VariantContext;
+import org.broadinstitute.variant.vcf.VCFCodec;
 import org.testng.annotations.Test;
 
-import static org.testng.Assert.assertNotNull;
+import java.io.IOException;
+
+import static org.testng.Assert.*;
 
 /**
  * @author jacob
@@ -11,15 +18,33 @@ import static org.testng.Assert.assertNotNull;
  */
 public class AbstractFeatureReaderTest {
 
+    final static String HTTP_INDEXED_VCF_PATH = "http://www.broadinstitute.org/~picard/testdata/ex2.vcf";
+    final static String LOCAL_MIRROR_HTTP_INDEXED_VCF_PATH = VariantBaseTest.variantTestDataRoot + "ex2.vcf";
+
+    /**
+     * Asserts readability and correctness of VCF over HTTP.  The VCF is indexed and requires and index.
+     */
+    @Test
+    public void testVcfOverHTTP() throws IOException {
+        final VCFCodec codec = new VCFCodec();
+        final AbstractFeatureReader<VariantContext, LineIterator> featureReaderHttp =
+                AbstractFeatureReader.getFeatureReader(HTTP_INDEXED_VCF_PATH, codec, true); // Require an index to
+        final AbstractFeatureReader<VariantContext, LineIterator> featureReaderLocal =
+                AbstractFeatureReader.getFeatureReader(LOCAL_MIRROR_HTTP_INDEXED_VCF_PATH, codec, false);
+        final CloseableTribbleIterator<VariantContext> localIterator = featureReaderLocal.iterator();
+        for (final Feature feat : featureReaderHttp.iterator()) {
+            assertEquals(feat.toString(), localIterator.next().toString());
+        }
+        assertFalse(localIterator.hasNext());
+    }
+
     @Test
     public void testLoadBEDFTP() throws Exception {
-        String path = "ftp://ftp.broadinstitute.org/distribution/igv/TEST/cpgIslands with spaces.hg18.bed";
-        FeatureCodec codec = new BEDCodec();
-        AbstractFeatureReader<Feature> bfs = AbstractFeatureReader.getFeatureReader(path, codec, false);
-        for(Feature feat: bfs.iterator()){
+        final String path = "ftp://ftp.broadinstitute.org/distribution/igv/TEST/cpgIslands with spaces.hg18.bed";
+        final BEDCodec codec = new BEDCodec();
+        final AbstractFeatureReader<BEDFeature, LineIterator> bfs = AbstractFeatureReader.getFeatureReader(path, codec, false);
+        for (final Feature feat : bfs.iterator()) {
             assertNotNull(feat);
         }
-
     }
-
 }
diff --git a/src/tests/java/org/broad/tribble/BinaryFeaturesTest.java b/src/tests/java/org/broad/tribble/BinaryFeaturesTest.java
index a4d9ad6..a0235f7 100644
--- a/src/tests/java/org/broad/tribble/BinaryFeaturesTest.java
+++ b/src/tests/java/org/broad/tribble/BinaryFeaturesTest.java
@@ -2,11 +2,14 @@ package org.broad.tribble;
 
 import org.broad.tribble.bed.BEDCodec;
 import org.broad.tribble.example.ExampleBinaryCodec;
+import org.broad.tribble.readers.LineIterator;
+import org.broad.tribble.readers.LineReader;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
-import java.io.*;
+import java.io.File;
+import java.io.IOException;
 import java.util.Iterator;
 import java.util.List;
 
@@ -22,7 +25,7 @@ public class BinaryFeaturesTest {
     }
 
     @Test(enabled = true, dataProvider = "BinaryFeatureSources")
-    public void testBinaryCodec(final File source, final FeatureCodec<Feature> codec) throws IOException {
+    public void testBinaryCodec(final File source, final FeatureCodec<Feature, LineIterator> codec) throws IOException {
         final File tmpFile = File.createTempFile("testBinaryCodec", ".binary.bed");
         ExampleBinaryCodec.convertToBinaryTest(source, tmpFile, codec);
         tmpFile.deleteOnExit();
@@ -31,7 +34,7 @@ public class BinaryFeaturesTest {
         final FeatureReader<Feature> binaryReader = AbstractFeatureReader.getFeatureReader(tmpFile.getAbsolutePath(), new ExampleBinaryCodec(), false);
 
         // make sure the header is what we expect
-        final List<String> header = (List<String>)binaryReader.getHeader();
+        final List<String> header = (List<String>) binaryReader.getHeader();
         Assert.assertEquals(header.size(), 1, "We expect exactly one header line");
         Assert.assertEquals(header.get(0), ExampleBinaryCodec.HEADER_LINE, "Failed to read binary header line");
 
diff --git a/src/tests/java/org/broad/tribble/FeatureReaderTest.java b/src/tests/java/org/broad/tribble/FeatureReaderTest.java
index 196a03c..e4530d3 100644
--- a/src/tests/java/org/broad/tribble/FeatureReaderTest.java
+++ b/src/tests/java/org/broad/tribble/FeatureReaderTest.java
@@ -1,11 +1,13 @@
 package org.broad.tribble;
 
 import net.sf.samtools.seekablestream.SeekableFileStream;
+import net.sf.samtools.util.CloserUtil;
 import org.broad.tribble.bed.BEDCodec;
 import org.broad.tribble.example.ExampleBinaryCodec;
 import org.broad.tribble.index.Block;
 import org.broad.tribble.index.Index;
 import org.broad.tribble.index.IndexFactory;
+import org.broad.tribble.readers.LocationAware;
 import org.broad.tribble.util.ParsingUtils;
 import org.testng.Assert;
 import org.testng.annotations.AfterClass;
@@ -48,8 +50,8 @@ public class FeatureReaderTest {
     }
 
     @Test(dataProvider = "indexProvider")
-    public void testBedQuery(final File featureFile, IndexFactory.IndexType indexType, FeatureCodec<Feature> codec) throws IOException {
-        final AbstractFeatureReader<Feature> reader = getReader(featureFile, indexType, codec);
+    public void testBedQuery(final File featureFile, final IndexFactory.IndexType indexType, final FeatureCodec<Feature, LocationAware> codec) throws IOException {
+        final AbstractFeatureReader<Feature, ?> reader = getReader(featureFile, indexType, codec);
 
         // Query
         testQuery(reader, "chr1", 1, 500, 3);
@@ -73,18 +75,17 @@ public class FeatureReaderTest {
     }
 
     @Test(dataProvider = "indexProvider")
-    public void testLargeNumberOfQueries(final File featureFile, IndexFactory.IndexType indexType, FeatureCodec<Feature> codec) throws IOException {
-        final AbstractFeatureReader<Feature> reader = getReader(featureFile, indexType, codec);
-
-        final List<Integer> sites = Arrays.asList(500, 200, 201, 600, 100000);
+    public void testLargeNumberOfQueries(final File featureFile, final IndexFactory.IndexType indexType, final FeatureCodec<Feature, LocationAware> codec) throws IOException {
+        final AbstractFeatureReader<Feature, LocationAware> reader = getReader(featureFile, indexType, codec);
         for (int i = 0; i < 2000; i++) {
-            for (int start : sites) {
-                int end = start + 1; // query so we find something
-                if (start < end) {
-                    for (final String chr : Arrays.asList("chr1", "chr2", "chr3")) {
-                        CloseableTribbleIterator<Feature> iter = reader.query(chr, start, end);
-                        Assert.assertTrue(iter != null, "Failed to create non-null iterator");
-                        iter.close();
+            for (final int start : Arrays.asList(500, 200, 201, 600, 100000)) {
+                for (final String chr : Arrays.asList("chr1", "chr2", "chr3")) {
+                    CloseableTribbleIterator<Feature> iter = null;
+                    try {
+                        iter = reader.query(chr, start, start + 1);
+                        Assert.assertNotNull(iter, "Failed to create non-null iterator");
+                    } finally {
+                        CloserUtil.close(iter);
                     }
                 }
             }
@@ -94,8 +95,8 @@ public class FeatureReaderTest {
         reader.close();
     }
 
-    private void testQuery(AbstractFeatureReader<Feature> reader, final String chr, int start, int stop, int expectedNumRecords) throws IOException {
-        Iterator<Feature> iter = reader.query(chr, start, stop);
+    private void testQuery(final AbstractFeatureReader<Feature, ?> reader, final String chr, final int start, final int stop, final int expectedNumRecords) throws IOException {
+        final Iterator<Feature> iter = reader.query(chr, start, stop);
         int count = 0;
         while (iter.hasNext()) {
             final Feature f = iter.next();
@@ -106,32 +107,32 @@ public class FeatureReaderTest {
     }
 
     @Test(dataProvider = "indexProvider")
-    public void testBedNames(final File featureFile, IndexFactory.IndexType indexType, FeatureCodec<Feature> codec) throws IOException {
-        final AbstractFeatureReader<Feature> reader = getReader(featureFile, indexType, codec);
-        String[] expectedSequences = {"chr1", "chr2"};
+    public void testBedNames(final File featureFile, final IndexFactory.IndexType indexType, final FeatureCodec<Feature, LocationAware> codec) throws IOException {
+        final AbstractFeatureReader<Feature, ?> reader = getReader(featureFile, indexType, codec);
+        final String[] expectedSequences = {"chr1", "chr2"};
 
-        List<String> seqNames = reader.getSequenceNames();
+        final List<String> seqNames = reader.getSequenceNames();
         Assert.assertEquals(seqNames.size(), expectedSequences.length,
                 "Expected sequences " + ParsingUtils.join(",", expectedSequences) + " but saw " + ParsingUtils.join(",", seqNames));
 
-        for (String s : expectedSequences) {
+        for (final String s : expectedSequences) {
             Assert.assertTrue(seqNames.contains(s));
         }
     }
 
-    private AbstractFeatureReader<Feature> getReader(final File featureFile,
-                                                     IndexFactory.IndexType indexType,
-                                                     FeatureCodec<Feature> codec)
+    private static <FEATURE extends Feature, SOURCE extends LocationAware> AbstractFeatureReader<FEATURE, SOURCE> getReader(final File featureFile,
+                                                                                                                            final IndexFactory.IndexType indexType,
+                                                                                                                            final FeatureCodec<FEATURE, SOURCE> codec)
             throws IOException {
         if (indexType.canCreate()) {
             // for types we can create make a new index each time
-            File idxFile = Tribble.indexFile(featureFile);
+            final File idxFile = Tribble.indexFile(featureFile);
 
             // delete an already existing index
             if (idxFile.exists()) {
                 idxFile.delete();
             }
-            Index idx = IndexFactory.createIndex(featureFile, codec, indexType);
+            final Index idx = IndexFactory.createIndex(featureFile, codec, indexType);
             IndexFactory.writeIndex(idx, idxFile);
 
             idxFile.deleteOnExit();
diff --git a/src/tests/java/org/broad/tribble/index/IndexFactoryTest.java b/src/tests/java/org/broad/tribble/index/IndexFactoryTest.java
index e098da0..5e81282 100644
--- a/src/tests/java/org/broad/tribble/index/IndexFactoryTest.java
+++ b/src/tests/java/org/broad/tribble/index/IndexFactoryTest.java
@@ -23,7 +23,6 @@
  */
 package org.broad.tribble.index;
 
-import org.broad.tribble.FeatureCodec;
 import org.broad.tribble.TestUtils;
 import org.broad.tribble.TribbleException;
 import org.broad.tribble.bed.BEDCodec;
@@ -43,7 +42,7 @@ public class IndexFactoryTest {
     final File sortedBedFile = new File(TestUtils.DATA_DIR + "bed/Unigene.sample.bed");
     final File unsortedBedFile = new File(TestUtils.DATA_DIR + "bed/unsorted.bed");
     final File discontinuousFile = new File(TestUtils.DATA_DIR + "bed/disconcontigs.bed");
-    final FeatureCodec bedCodec = new BEDCodec();
+    final BEDCodec bedCodec = new BEDCodec();
 
     @Test
     public void testCreateLinearIndex() throws Exception {
diff --git a/src/tests/java/org/broad/tribble/index/interval/IntervalTreeTest.java b/src/tests/java/org/broad/tribble/index/interval/IntervalTreeTest.java
index 95c6c3a..e15ac18 100644
--- a/src/tests/java/org/broad/tribble/index/interval/IntervalTreeTest.java
+++ b/src/tests/java/org/broad/tribble/index/interval/IntervalTreeTest.java
@@ -131,7 +131,7 @@ public class IntervalTreeTest {
 
         // Interval tree index
         int batchSize = 1;
-        Index idx = IndexFactory.createIntervalIndex(new File(bedFile), new BEDCodec(),batchSize);
+        Index idx = IndexFactory.createIntervalIndex(new File(bedFile), new BEDCodec(), batchSize);
 
         FeatureReader<BEDFeature> bfr = AbstractFeatureReader.getFeatureReader(bedFile, new BEDCodec(), idx);
         CloseableTribbleIterator<BEDFeature>iter = bfr.query(chr, start, end);
diff --git a/src/tests/java/org/broad/tribble/readers/AsynchronousLineReaderTest.java b/src/tests/java/org/broad/tribble/readers/AsynchronousLineReaderTest.java
new file mode 100644
index 0000000..f388aba
--- /dev/null
+++ b/src/tests/java/org/broad/tribble/readers/AsynchronousLineReaderTest.java
@@ -0,0 +1,31 @@
+package org.broad.tribble.readers;
+
+import org.broad.tribble.TestUtils;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.*;
+
+/**
+ * @author mccowan
+ */
+public class AsynchronousLineReaderTest {
+
+        /**
+         * Test that we read the correct number of lines
+         * from a file
+         * @throws Exception
+         */
+        @Test
+        public void testReadLines() throws Exception {
+            final File filePath = new File(TestUtils.DATA_DIR + "large.txt");
+            final AsynchronousLineReader reader = new AsynchronousLineReader(new InputStreamReader( new FileInputStream(filePath)));
+            final BufferedReader br = new BufferedReader(new InputStreamReader( new FileInputStream(filePath)));
+
+            String nextLine;
+            while((nextLine = br.readLine()) != null){
+                Assert.assertEquals(nextLine, reader.readLine());
+            }
+            Assert.assertNull(reader.readLine());
+        }
+}
diff --git a/src/tests/java/org/broad/tribble/readers/LineReaderUtilTest.java b/src/tests/java/org/broad/tribble/readers/LineReaderUtilTest.java
new file mode 100644
index 0000000..539cfa9
--- /dev/null
+++ b/src/tests/java/org/broad/tribble/readers/LineReaderUtilTest.java
@@ -0,0 +1,27 @@
+package org.broad.tribble.readers;
+
+import org.broad.tribble.TestUtils;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+
+/**
+ * @author mccowan
+ */
+public class LineReaderUtilTest {
+    @Test
+    public void testLineReaderIterator() throws Exception {
+        final File filePath = new File(TestUtils.DATA_DIR + "gwas/smallp.gwas");
+        final LineIterator lineIterator = new LineIteratorImpl(LineReaderUtil.fromBufferedStream(new PositionalBufferedStream(new FileInputStream(filePath))));
+        final BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath)));
+
+        while (lineIterator.hasNext()) {
+            Assert.assertEquals(lineIterator.next(), br.readLine());
+        }
+        Assert.assertNull(br.readLine());
+    }
+}
diff --git a/src/tests/java/org/broad/tribble/readers/LongLineBufferedReaderTest.java b/src/tests/java/org/broad/tribble/readers/LongLineBufferedReaderTest.java
new file mode 100644
index 0000000..eaed097
--- /dev/null
+++ b/src/tests/java/org/broad/tribble/readers/LongLineBufferedReaderTest.java
@@ -0,0 +1,33 @@
+package org.broad.tribble.readers;
+
+import org.broad.tribble.TestUtils;
+import org.testng.Assert;
+import org.testng.annotations.Test;
+
+import java.io.BufferedReader;
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+
+/**
+ * @author mccowan
+ */
+public class LongLineBufferedReaderTest {
+
+    /**
+     * Test that we read the correct number of lines
+     * from a file
+     * @throws Exception
+     */
+    @Test
+    public void testReadLines() throws Exception {
+        String filePath = TestUtils.DATA_DIR + "large.txt";
+        BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(filePath)));
+        LongLineBufferedReader testReader = new LongLineBufferedReader(new InputStreamReader(new FileInputStream(filePath)));
+        String line;
+        while((line = reader.readLine()) != null){
+            Assert.assertEquals(testReader.readLine(), line);
+        }
+        Assert.assertNull(testReader.readLine());
+    }
+}
diff --git a/src/tests/java/org/broad/tribble/readers/ReaderTest.java b/src/tests/java/org/broad/tribble/readers/ReaderTest.java
index d0345aa..6746698 100644
--- a/src/tests/java/org/broad/tribble/readers/ReaderTest.java
+++ b/src/tests/java/org/broad/tribble/readers/ReaderTest.java
@@ -144,9 +144,14 @@ public class ReaderTest {
         final byte[] bytes = lines.getBytes();
         final InputStream is = new ByteArrayInputStream(bytes);
         final PositionalBufferedStream pbs = new PositionalBufferedStream(is);
-        final AsciiLineReader alr = new AsciiLineReader(pbs);
+        final LineReader alr = new AsciiLineReader(pbs); // AsciiLineReader must be used here because it does not read ahead.
 
         int bytePos = 0, linePos = 0;
+        /** 
+         * TODO: Requires revision: we're calling readLine() here, but making assumptions about how the underlying input stream operates.
+         * Specifically, these tests assume the underlying stream only advances exactly the required number of characters to find the
+         * newline, which is not true for most buffered readers.
+         */
         while ( ! pbs.isDone() ) {
             Assert.assertTrue(bytePos < bytes.length);
 
diff --git a/src/tests/java/org/broadinstitute/variant/bcf2/BCF2UtilsUnitTest.java b/src/tests/java/org/broadinstitute/variant/bcf2/BCF2UtilsUnitTest.java
index 5d01a45..1e0cf97 100644
--- a/src/tests/java/org/broadinstitute/variant/bcf2/BCF2UtilsUnitTest.java
+++ b/src/tests/java/org/broadinstitute/variant/bcf2/BCF2UtilsUnitTest.java
@@ -26,10 +26,13 @@
 package org.broadinstitute.variant.bcf2;
 
 import org.broadinstitute.variant.VariantBaseTest;
+import org.broadinstitute.variant.bcf2.BCF2Utils;
 import org.broadinstitute.variant.utils.GeneralUtils;
 import org.broadinstitute.variant.vcf.*;
 
 import java.util.*;
+
+import org.broadinstitute.variant.vcf.VCFSimpleHeaderLine;
 import org.testng.Assert;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
@@ -58,6 +61,29 @@ public final class BCF2UtilsUnitTest extends VariantBaseTest {
             Assert.assertEquals(BCF2Utils.explodeStringList(actualCollapsed), in);
     }
 
+    @Test
+    public void testCreateDictionary() {
+        final List<VCFHeaderLine> inputLines = new ArrayList<VCFHeaderLine>();
+        int counter = 0;
+        inputLines.add(new VCFFilterHeaderLine(String.valueOf(counter++)));
+        inputLines.add(new VCFFilterHeaderLine(String.valueOf(counter++)));
+        inputLines.add(new VCFContigHeaderLine(Collections.singletonMap("ID", String.valueOf(counter++)), counter));
+        inputLines.add(new VCFContigHeaderLine(Collections.singletonMap("ID", String.valueOf(counter++)), counter));
+        inputLines.add(new VCFInfoHeaderLine(String.valueOf(counter++), VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.Integer, "x"));
+        inputLines.add(new VCFInfoHeaderLine(String.valueOf(counter++), VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.Integer, "x"));
+        inputLines.add(new VCFHeaderLine("x", "misc"));
+        inputLines.add(new VCFHeaderLine("y", "misc"));
+        inputLines.add(new VCFSimpleHeaderLine("GATKCommandLine","z","misc"));
+        inputLines.add(new VCFFormatHeaderLine(String.valueOf(counter++), VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.Integer, "x"));
+        inputLines.add(new VCFFormatHeaderLine(String.valueOf(counter++), VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.Integer, "x"));
+        final int inputLineCounter = counter;
+        final VCFHeader inputHeader = new VCFHeader(new LinkedHashSet<VCFHeaderLine>(inputLines));
+        final ArrayList<String> dict = BCF2Utils.makeDictionary(inputHeader);
+        final int dict_size = dict.size();
+        Assert.assertEquals(7,dict_size);
+    }
+
+
     @DataProvider(name = "HeaderOrderTestProvider")
     public Object[][] makeHeaderOrderTestProvider() {
         final List<VCFHeaderLine> inputLines = new ArrayList<VCFHeaderLine>();
@@ -150,4 +176,8 @@ public final class BCF2UtilsUnitTest extends VariantBaseTest {
         final boolean actualOrderConsistency = BCF2Utils.headerLinesAreOrderedConsistently(testHeader, inputHeader);
         Assert.assertEquals(actualOrderConsistency, expectedConsistent);
     }
+
+
+
+
 }
diff --git a/src/tests/java/org/broadinstitute/variant/variantcontext/VariantContextTestProvider.java b/src/tests/java/org/broadinstitute/variant/variantcontext/VariantContextTestProvider.java
index 7014b21..2ea0bed 100644
--- a/src/tests/java/org/broadinstitute/variant/variantcontext/VariantContextTestProvider.java
+++ b/src/tests/java/org/broadinstitute/variant/variantcontext/VariantContextTestProvider.java
@@ -27,18 +27,19 @@ package org.broadinstitute.variant.variantcontext;
 
 import org.broad.tribble.FeatureCodec;
 import org.broad.tribble.FeatureCodecHeader;
+import org.broad.tribble.readers.LineIterator;
+import org.broad.tribble.readers.LineIteratorImpl;
+import org.broad.tribble.readers.LineReaderUtil;
 import org.broad.tribble.readers.PositionalBufferedStream;
 import org.broadinstitute.variant.VariantBaseTest;
 import org.broadinstitute.variant.bcf2.BCF2Codec;
 import org.broadinstitute.variant.utils.GeneralUtils;
-import org.broadinstitute.variant.vcf.*;
 import org.broadinstitute.variant.variantcontext.writer.Options;
 import org.broadinstitute.variant.variantcontext.writer.VariantContextWriter;
+import org.broadinstitute.variant.vcf.*;
 import org.testng.Assert;
 
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
+import java.io.*;
 import java.util.*;
 
 /**
@@ -91,14 +92,16 @@ public class VariantContextTestProvider {
         }
     }
 
-    public abstract static class VariantContextIOTest {
+    public abstract static class VariantContextIOTest<CODECTYPE> {
         public String toString() {
             return "VariantContextIOTest:" + getExtension();
         }
         public abstract String getExtension();
-        public abstract FeatureCodec<VariantContext> makeCodec();
+        public abstract CODECTYPE makeCodec();
         public abstract VariantContextWriter makeWriter(final File outputFile, final EnumSet<Options> baseOptions);
 
+        public abstract VariantContextContainer readAllVCs(final File input) throws IOException;
+        
         public List<VariantContext> preprocess(final VCFHeader header, List<VariantContext> vcsBeforeIO) {
             return vcsBeforeIO;
         }
@@ -633,7 +636,7 @@ public class VariantContextTestProvider {
             writeVCsToFile(writer, header, data.vcs);
 
             // ensure writing of expected == actual
-            final VariantContextContainer p = readAllVCs(tmpFile, tester.makeCodec());
+            final VariantContextContainer p = tester.readAllVCs(tmpFile);
             final Iterable<VariantContext> actual = p.getVCs();
 
             int i = 0;
@@ -672,13 +675,13 @@ public class VariantContextTestProvider {
         writeVCsToFile(writer, header, vcs);
 
         // ensure writing of expected == actual
-        final VariantContextContainer p = readAllVCs(tmpFile, tester.makeCodec());
+        final VariantContextContainer p = tester.readAllVCs(tmpFile);
         final Iterable<VariantContext> actual = p.getVCs();
         assertEquals(actual, expected);
 
         if ( recurse ) {
             // if we are doing a recursive test, grab a fresh iterator over the written values
-            final Iterable<VariantContext> read = readAllVCs(tmpFile, tester.makeCodec()).getVCs();
+            final Iterable<VariantContext> read = tester.readAllVCs(tmpFile).getVCs();
             testReaderWriter(tester, p.getHeader(), expected, read, false);
         }
     }
@@ -692,34 +695,11 @@ public class VariantContextTestProvider {
         writer.close();
     }
 
-    /**
-     * Utility class to read all of the VC records from a file
-     *
-     * @param source
-     * @param codec
-     * @return
-     * @throws IOException
-     */
-    public final static VariantContextContainer readAllVCs( final File source, final FeatureCodec<VariantContext> codec ) throws IOException {
-        // read in the features
-        PositionalBufferedStream pbs = new PositionalBufferedStream(new FileInputStream(source));
-        FeatureCodecHeader header = codec.readHeader(pbs);
-        pbs.close();
-
-        pbs = new PositionalBufferedStream(new FileInputStream(source));
-        pbs.skip(header.getHeaderEnd());
-
-        final VCFHeader vcfHeader = (VCFHeader)header.getHeaderValue();
-        return new VariantContextContainer(vcfHeader, new VCIterable(pbs, codec, vcfHeader));
-    }
-
-    public static class VCIterable implements Iterable<VariantContext>, Iterator<VariantContext> {
-        final PositionalBufferedStream pbs;
-        final FeatureCodec<VariantContext> codec;
+    public static abstract class VCIterable<SOURCE> implements Iterable<VariantContext>, Iterator<VariantContext> {
+        final FeatureCodec<VariantContext, SOURCE> codec;
         final VCFHeader header;
 
-        private VCIterable(final PositionalBufferedStream pbs, final FeatureCodec<VariantContext> codec, final VCFHeader header) {
-            this.pbs = pbs;
+        public VCIterable(final FeatureCodec<VariantContext, SOURCE> codec, final VCFHeader header) {
             this.codec = codec;
             this.header = header;
         }
@@ -730,18 +710,14 @@ public class VariantContextTestProvider {
         }
 
         @Override
-        public boolean hasNext() {
-            try {
-                return ! pbs.isDone();
-            } catch ( IOException e ) {
-                throw new RuntimeException(e);
-            }
-        }
+        public abstract boolean hasNext();
 
+        public abstract SOURCE nextSource();
+        
         @Override
         public VariantContext next() {
             try {
-                final VariantContext vc = codec.decode(pbs);
+                final VariantContext vc = codec.decode(nextSource());
                 return vc == null ? null : vc.fullyDecode(header, false);
             } catch ( IOException e ) {
                 throw new RuntimeException(e);
@@ -749,11 +725,51 @@ public class VariantContextTestProvider {
         }
 
         @Override
-        public void remove() {
-            //To change body of implemented methods use File | Settings | File Templates.
-        }
+        public void remove() { }
     }
 
+    public static VariantContextContainer readAllVCs(final File input, final BCF2Codec codec) throws IOException {
+        PositionalBufferedStream headerPbs = new PositionalBufferedStream(new FileInputStream(input));
+        FeatureCodecHeader header = codec.readHeader(headerPbs);
+        headerPbs.close();
+
+        final PositionalBufferedStream pbs = new PositionalBufferedStream(new FileInputStream(input));
+        pbs.skip(header.getHeaderEnd());
+
+        final VCFHeader vcfHeader = (VCFHeader)header.getHeaderValue();
+        return new VariantContextTestProvider.VariantContextContainer(vcfHeader, new VariantContextTestProvider.VCIterable(codec, vcfHeader) {
+            @Override
+            public boolean hasNext() {
+                try {
+                    return !pbs.isDone();
+                } catch (IOException e) {
+                    throw new RuntimeException(e);
+                }
+            }
+
+            @Override
+            public Object nextSource() {
+                return pbs;
+            }
+        });
+    }
+
+    public static VariantContextContainer readAllVCs(final File input, final VCFCodec codec) throws FileNotFoundException {
+        final LineIterator lineIterator = new LineIteratorImpl(LineReaderUtil.fromBufferedStream(new BufferedInputStream(new FileInputStream(input))));
+        final VCFHeader vcfHeader = (VCFHeader) codec.readActualHeader(lineIterator);
+        return new VariantContextTestProvider.VariantContextContainer(vcfHeader, new VariantContextTestProvider.VCIterable<LineIterator>(codec, vcfHeader) {
+            @Override
+            public boolean hasNext() {
+                return lineIterator.hasNext();
+            }
+
+            @Override
+            public LineIterator nextSource() {
+                return lineIterator;
+            }
+        });
+    }
+    
     public static void assertVCFandBCFFilesAreTheSame(final File vcfFile, final File bcfFile) throws IOException {
         final VariantContextContainer vcfData = readAllVCs(vcfFile, new VCFCodec());
         final VariantContextContainer bcfData = readAllVCs(bcfFile, new BCF2Codec());
diff --git a/src/tests/java/org/broadinstitute/variant/variantcontext/VariantContextUnitTest.java b/src/tests/java/org/broadinstitute/variant/variantcontext/VariantContextUnitTest.java
index 44f7862..4564496 100644
--- a/src/tests/java/org/broadinstitute/variant/variantcontext/VariantContextUnitTest.java
+++ b/src/tests/java/org/broadinstitute/variant/variantcontext/VariantContextUnitTest.java
@@ -184,6 +184,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
         Assert.assertFalse(vc.isIndel());
         Assert.assertFalse(vc.isSimpleInsertion());
         Assert.assertFalse(vc.isSimpleDeletion());
+        Assert.assertFalse(vc.isSimpleIndel());
         Assert.assertFalse(vc.isMixed());
         Assert.assertTrue(vc.isBiallelic());
         Assert.assertEquals(vc.getNAlleles(), 2);
@@ -211,6 +212,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
         Assert.assertFalse(vc.isIndel());
         Assert.assertFalse(vc.isSimpleInsertion());
         Assert.assertFalse(vc.isSimpleDeletion());
+        Assert.assertFalse(vc.isSimpleIndel());
         Assert.assertFalse(vc.isMixed());
         Assert.assertFalse(vc.isBiallelic());
         Assert.assertEquals(vc.getNAlleles(), 1);
@@ -237,6 +239,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
         Assert.assertTrue(vc.isIndel());
         Assert.assertFalse(vc.isSimpleInsertion());
         Assert.assertTrue(vc.isSimpleDeletion());
+        Assert.assertTrue(vc.isSimpleIndel());
         Assert.assertFalse(vc.isMixed());
         Assert.assertTrue(vc.isBiallelic());
         Assert.assertEquals(vc.getNAlleles(), 2);
@@ -252,6 +255,34 @@ public class VariantContextUnitTest extends VariantBaseTest {
     }
 
     @Test
+    public void testCreatingComplexSubstitutionVariantContext() {
+        List<Allele> alleles = Arrays.asList(Tref, ATC);
+        VariantContext vc = new VariantContextBuilder("test", insLoc, insLocStart, insLocStop, alleles).make();
+
+        Assert.assertEquals(vc.getChr(), insLoc);
+        Assert.assertEquals(vc.getStart(), insLocStart);
+        Assert.assertEquals(vc.getEnd(), insLocStop);
+        Assert.assertEquals(vc.getType(), VariantContext.Type.INDEL);
+        Assert.assertFalse(vc.isSNP());
+        Assert.assertTrue(vc.isIndel());
+        Assert.assertFalse(vc.isSimpleInsertion());
+        Assert.assertFalse(vc.isSimpleDeletion());
+        Assert.assertFalse(vc.isSimpleIndel());
+        Assert.assertFalse(vc.isMixed());
+        Assert.assertTrue(vc.isBiallelic());
+        Assert.assertEquals(vc.getNAlleles(), 2);
+
+        Assert.assertEquals(vc.getReference(), Tref);
+        Assert.assertEquals(vc.getAlleles().size(), 2);
+        Assert.assertEquals(vc.getAlternateAlleles().size(), 1);
+        Assert.assertEquals(vc.getAlternateAllele(0), ATC);
+
+        Assert.assertFalse(vc.hasGenotypes());
+
+        Assert.assertEquals(vc.getSampleNames().size(), 0);
+    }
+
+    @Test
     public void testMatchingAlleles() {
         List<Allele> alleles = Arrays.asList(ATCref, del);
         VariantContext vc = new VariantContextBuilder("test", delLoc, delLocStart, delLocStop, alleles).make();
@@ -274,6 +305,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
         Assert.assertTrue(vc.isIndel());
         Assert.assertTrue(vc.isSimpleInsertion());
         Assert.assertFalse(vc.isSimpleDeletion());
+        Assert.assertTrue(vc.isSimpleIndel());
         Assert.assertFalse(vc.isMixed());
         Assert.assertTrue(vc.isBiallelic());
         Assert.assertEquals(vc.getNAlleles(), 2);
@@ -867,7 +899,7 @@ public class VariantContextUnitTest extends VariantBaseTest {
         VariantContext vc = new VariantContextBuilder("genotypes", snpLoc, snpLocStart, snpLocStop, Arrays.asList(Aref, T)).genotypes(gc).make();
 
         // same sample names => success
-        Assert.assertEquals(vc.getSampleNames(), new HashSet<String>(cfg.sampleNames), "vc.getSampleNames() = " + vc.getSampleNames());
+        Assert.assertTrue(vc.getSampleNames().equals(new HashSet<String>(cfg.sampleNames)), "vc.getSampleNames() = " + vc.getSampleNames());
         Assert.assertEquals(vc.getSampleNamesOrderedByName(), cfg.sampleNamesInOrder, "vc.getSampleNamesOrderedByName() = " + vc.getSampleNamesOrderedByName());
 
         assertGenotypesAreInOrder(vc.getGenotypesOrderedByName(), cfg.sampleNamesInOrder);
@@ -924,4 +956,4 @@ public class VariantContextUnitTest extends VariantBaseTest {
             Assert.assertEquals(vc.getMixedCount(), nMixed);
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/tests/java/org/broadinstitute/variant/variantcontext/writer/VCFWriterUnitTest.java b/src/tests/java/org/broadinstitute/variant/variantcontext/writer/VCFWriterUnitTest.java
index 840e946..5b963dd 100644
--- a/src/tests/java/org/broadinstitute/variant/variantcontext/writer/VCFWriterUnitTest.java
+++ b/src/tests/java/org/broadinstitute/variant/variantcontext/writer/VCFWriterUnitTest.java
@@ -25,7 +25,8 @@
 
 package org.broadinstitute.variant.variantcontext.writer;
 
-import net.sf.picard.reference.IndexedFastaSequenceFile;
+import net.sf.samtools.SAMSequenceDictionary;
+import net.sf.samtools.util.TestUtil;
 import org.broad.tribble.AbstractFeatureReader;
 import org.broad.tribble.FeatureReader;
 import org.broad.tribble.Tribble;
@@ -36,13 +37,10 @@ import org.broadinstitute.variant.vcf.VCFHeaderLine;
 import org.broadinstitute.variant.vcf.VCFHeaderVersion;
 import org.broadinstitute.variant.variantcontext.*;
 import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
+import java.io.*;
 import java.util.*;
 
 
@@ -114,6 +112,11 @@ public class VCFWriterUnitTest extends VariantBaseTest {
      * @return a VCFRecord
      */
     private VariantContext createVC(VCFHeader header) {
+
+       return createVCGeneral(header,"chr1",1);
+    }
+
+    private VariantContext createVCGeneral(VCFHeader header,String chrom, int position) {
         List<Allele> alleles = new ArrayList<Allele>();
         Set<String> filters = null;
         Map<String, Object> attributes = new HashMap<String,Object>();
@@ -127,7 +130,7 @@ public class VCFWriterUnitTest extends VariantBaseTest {
             Genotype gt = new GenotypeBuilder(name,alleles.subList(1,2)).GQ(0).attribute("BB", "1").phased(true).make();
             genotypes.add(gt);
         }
-        return new VariantContextBuilder("RANDOM", "chr1", 1, 1, alleles)
+        return new VariantContextBuilder("RANDOM", chrom, position, position, alleles)
                 .genotypes(genotypes).attributes(attributes).make();
     }
 
@@ -184,5 +187,41 @@ public class VCFWriterUnitTest extends VariantBaseTest {
     public void testVCFWriterDoubleFormatTestData(final double d, final String expected) {
         Assert.assertEquals(VCFWriter.formatVCFDouble(d), expected, "Failed to pretty print double in VCFWriter");
     }
+
+    @Test(enabled=true)
+    public void TestWritingLargeVCF() throws FileNotFoundException, InterruptedException {
+
+        final Set<String> Columns = new HashSet<String>();
+        for (int i = 0; i < 123; i++) {
+
+            Columns.add(String.format("SAMPLE_%d", i));
+        }
+
+        final VCFHeader header = createFakeHeader(metaData,Columns);
+        final EnumSet<Options> options = EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER,Options.INDEX_ON_THE_FLY);
+
+        final File tempDir = TestUtil.getTempDirecory("VCFWriter", "StaleIndex");
+
+        tempDir.deleteOnExit();
+
+        final File vcf = new File(tempDir, "test.vcf");
+        final File vcfIndex = new File(tempDir, "test.vcf.idx");
+        final SAMSequenceDictionary dict=createArtificialSequenceDictionary();
+
+        for(int count=1;count<2; count++){
+            final VariantContextWriter writer = VariantContextWriterFactory.create(vcf, dict, options);
+            writer.writeHeader(header);
+
+            for (int i = 1; i < 17 ; i++) { // write 17 chromosomes
+                for (int j = 1; j < 10; j++) { //10 records each
+                    writer.add(createVCGeneral(header, String.format("chr%d", i), j * 100));
+                }
+            }
+            writer.close();
+
+            Assert.assertTrue(vcf.lastModified() <= vcfIndex.lastModified());
+        }
+    }
+
 }
 
diff --git a/src/tests/java/org/broadinstitute/variant/variantcontext/writer/VariantContextWritersUnitTest.java b/src/tests/java/org/broadinstitute/variant/variantcontext/writer/VariantContextWritersUnitTest.java
index ff75474..af8ac99 100644
--- a/src/tests/java/org/broadinstitute/variant/variantcontext/writer/VariantContextWritersUnitTest.java
+++ b/src/tests/java/org/broadinstitute/variant/variantcontext/writer/VariantContextWritersUnitTest.java
@@ -29,20 +29,19 @@ package org.broadinstitute.variant.variantcontext.writer;
 // the imports for unit testing.
 
 
-import net.sf.picard.reference.IndexedFastaSequenceFile;
 import net.sf.samtools.SAMSequenceDictionary;
-import org.broad.tribble.FeatureCodec;
 import org.broadinstitute.variant.VariantBaseTest;
 import org.broadinstitute.variant.bcf2.BCF2Codec;
-import org.broadinstitute.variant.vcf.VCFCodec;
-import org.broadinstitute.variant.vcf.VCFHeader;
 import org.broadinstitute.variant.variantcontext.VariantContext;
 import org.broadinstitute.variant.variantcontext.VariantContextTestProvider;
+import org.broadinstitute.variant.vcf.VCFCodec;
+import org.broadinstitute.variant.vcf.VCFHeader;
 import org.testng.annotations.BeforeSuite;
 import org.testng.annotations.DataProvider;
 import org.testng.annotations.Test;
 
 import java.io.File;
+import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.EnumSet;
@@ -82,14 +81,14 @@ public class VariantContextWritersUnitTest extends VariantBaseTest {
         VariantContextTestProvider.testReaderWriterWithMissingGenotypes(new BCFIOTester(), testData);
     }
 
-    private class BCFIOTester extends VariantContextTestProvider.VariantContextIOTest {
+    private class BCFIOTester extends VariantContextTestProvider.VariantContextIOTest<BCF2Codec> {
         @Override
         public String getExtension() {
             return ".bcf";
         }
 
         @Override
-        public FeatureCodec<VariantContext> makeCodec() {
+        public BCF2Codec makeCodec() {
             return new BCF2Codec();
         }
 
@@ -97,6 +96,12 @@ public class VariantContextWritersUnitTest extends VariantBaseTest {
         public VariantContextWriter makeWriter(final File file, final EnumSet<Options> baseOptions) {
             return VariantContextWriterFactory.create(file, dictionary, baseOptions);
         }
+
+        @Override
+        public VariantContextTestProvider.VariantContextContainer readAllVCs(File input) throws IOException {
+            final BCF2Codec codec = this.makeCodec();
+            return VariantContextTestProvider.readAllVCs(input, codec);
+        }
     }
 
     // --------------------------------------------------------------------------------
@@ -115,7 +120,7 @@ public class VariantContextWritersUnitTest extends VariantBaseTest {
         VariantContextTestProvider.testReaderWriterWithMissingGenotypes(new VCFIOTester(), testData);
     }
 
-    private class VCFIOTester extends VariantContextTestProvider.VariantContextIOTest {
+    private class VCFIOTester extends VariantContextTestProvider.VariantContextIOTest<VCFCodec> {
         @Override
         public String getExtension() {
             return ".vcf";
@@ -132,7 +137,7 @@ public class VariantContextWritersUnitTest extends VariantBaseTest {
         }
 
         @Override
-        public FeatureCodec<VariantContext> makeCodec() {
+        public VCFCodec makeCodec() {
             return new VCFCodec();
         }
 
@@ -140,5 +145,11 @@ public class VariantContextWritersUnitTest extends VariantBaseTest {
         public VariantContextWriter makeWriter(final File file, final EnumSet<Options> baseOptions) {
             return VariantContextWriterFactory.create(file, dictionary, baseOptions);
         }
+
+        @Override
+        public VariantContextTestProvider.VariantContextContainer readAllVCs(File input) throws FileNotFoundException {
+            final VCFCodec codec = this.makeCodec();
+            return VariantContextTestProvider.readAllVCs(input, codec);
+        }
     }
 }
\ No newline at end of file
diff --git a/src/tests/java/org/broadinstitute/variant/vcf/IndexFactoryUnitTest.java b/src/tests/java/org/broadinstitute/variant/vcf/IndexFactoryUnitTest.java
index 5de13af..080153d 100644
--- a/src/tests/java/org/broadinstitute/variant/vcf/IndexFactoryUnitTest.java
+++ b/src/tests/java/org/broadinstitute/variant/vcf/IndexFactoryUnitTest.java
@@ -25,7 +25,6 @@
 
 package org.broadinstitute.variant.vcf;
 
-import net.sf.picard.reference.IndexedFastaSequenceFile;
 import net.sf.samtools.SAMSequenceDictionary;
 import org.broad.tribble.AbstractFeatureReader;
 import org.broad.tribble.CloseableTribbleIterator;
@@ -41,7 +40,6 @@ import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
 import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.EnumSet;
@@ -67,14 +65,14 @@ public class IndexFactoryUnitTest extends VariantBaseTest {
     //
     @Test
     public void testOnTheFlyIndexing1() throws IOException {
-        Index indexFromInputFile = IndexFactory.createDynamicIndex(inputFile, new VCFCodec());
+        final Index indexFromInputFile = IndexFactory.createDynamicIndex(inputFile, new VCFCodec());
         if ( outputFileIndex.exists() ) {
             System.err.println("Deleting " + outputFileIndex);
             outputFileIndex.delete();
         }
 
         for ( int maxRecords : Arrays.asList(0, 1, 10, 100, 1000, -1)) {
-            AbstractFeatureReader<VariantContext> source = AbstractFeatureReader.getFeatureReader(inputFile.getAbsolutePath(), new VCFCodec(), indexFromInputFile);
+            final AbstractFeatureReader source = AbstractFeatureReader.getFeatureReader(inputFile.getAbsolutePath(), new VCFCodec(), indexFromInputFile);
 
             int counter = 0;
             final EnumSet<Options> options = EnumSet.of(Options.ALLOW_MISSING_FIELDS_IN_HEADER);
diff --git a/src/tests/java/org/broadinstitute/variant/vcf/VCFHeaderUnitTest.java b/src/tests/java/org/broadinstitute/variant/vcf/VCFHeaderUnitTest.java
index 7d6b119..1c90769 100644
--- a/src/tests/java/org/broadinstitute/variant/vcf/VCFHeaderUnitTest.java
+++ b/src/tests/java/org/broadinstitute/variant/vcf/VCFHeaderUnitTest.java
@@ -25,11 +25,11 @@
 
 package org.broadinstitute.variant.vcf;
 
-import org.broad.tribble.readers.AsciiLineReader;
+import org.broad.tribble.readers.LineIteratorImpl;
+import org.broad.tribble.readers.LineReaderUtil;
 import org.broad.tribble.readers.PositionalBufferedStream;
 import org.broadinstitute.variant.VariantBaseTest;
 import org.testng.Assert;
-
 import org.testng.annotations.Test;
 
 import java.io.*;
@@ -48,7 +48,7 @@ public class VCFHeaderUnitTest extends VariantBaseTest {
 
     private VCFHeader createHeader(String headerStr) {
         VCFCodec codec = new VCFCodec();
-        VCFHeader header = (VCFHeader)codec.readHeader(new AsciiLineReader(new PositionalBufferedStream(new StringBufferInputStream(headerStr))));
+        VCFHeader header = (VCFHeader) codec.readActualHeader(new LineIteratorImpl(LineReaderUtil.fromBufferedStream(new PositionalBufferedStream(new StringBufferInputStream(headerStr)), LineReaderUtil.LineReaderOption.SYNCHRONOUS)));
         Assert.assertEquals(header.getMetaDataInInputOrder().size(), VCF4headerStringCount);
         return header;
     }
diff --git a/testdata/net/sf/picard/io/slurptest.txt b/testdata/net/sf/picard/io/slurptest.txt
new file mode 100644
index 0000000..44718c3
--- /dev/null
+++ b/testdata/net/sf/picard/io/slurptest.txt
@@ -0,0 +1,3 @@
+bacon   and rice   
+for breakfast  
+wont you join me
\ No newline at end of file
diff --git a/testdata/net/sf/picard/sam/MergeBamAlignment/aligned.supplement.sam b/testdata/net/sf/picard/sam/MergeBamAlignment/aligned.supplement.sam
new file mode 100644
index 0000000..3b9dd0e
--- /dev/null
+++ b/testdata/net/sf/picard/sam/MergeBamAlignment/aligned.supplement.sam
@@ -0,0 +1,21 @@
+ at HD	VN:1.0	SO:queryname
+ at SQ	SN:chr1	LN:101
+ at SQ	SN:chr2	LN:101
+ at SQ	SN:chr3	LN:101
+ at SQ	SN:chr4	LN:101
+ at SQ	SN:chr5	LN:101
+ at SQ	SN:chr6	LN:101
+ at SQ	SN:chr7	LN:404
+ at SQ	SN:chr8	LN:202
+ at RG	ID:0	SM:Hi,Mom!
+ at PG	ID:1	PN:Hey!	VN:2.0
+both_reads_align_clip_adapter	99	chr7	21	255	101M	=	16	96	CAACAGAAGCNGGNATCTGTGTTTGTGTTTCGGATTTCCTGCTGAANNGNTTNTCGNNTCNNNNNNNNATCCCGATTTCNTTCCGCAGCTNACCTCCCAAN	)'.*.+2,))&&'&*/)-&*-)&.-)&)&),/-&&..)./.,.).*&&,&.&&-)&&&0*&&&&&&&&/32/,01460&&/6/*0*/2/283//36868/&	RG:Z:0
+both_reads_align_clip_adapter	147	chr7	16	255	101M	=	21	-96	CAACAGAAGCNGGNATCTGTGTTTGTGTTTCGGATTTCCTGCTGAANNGNTTNTCGNNTCNNNNNNNNATCCCGATTTCNTTCCGCAGCTNACCTCCCAAN	)'.*.+2,))&&'&*/)-&*-)&.-)&)&),/-&&..)./.,.).*&&,&.&&-)&&&0*&&&&&&&&/32/,01460&&/6/*0*/2/283//36868/&	RG:Z:0
+both_reads_align_clip_adapter	2147	chr7	21	255	101M	=	16	96	AAACAGAAGCNGGNATCTGTGTTTGTGTTTCGGATTTCCTGCTGAANNGNTTNTCGNNTCNNNNNNNNATCCCGATTTCNTTCCGCAGCTNACCTCCCAAN	)'.*.+2,))&&'&*/)-&*-)&.-)&)&),/-&&..)./.,.).*&&,&.&&-)&&&0*&&&&&&&&/32/,01460&&/6/*0*/2/283//36868/&	RG:Z:0
+both_reads_align_clip_adapter	2195	chr7	16	255	101M	=	21	-96	AAACAGAAGCNGGNATCTGTGTTTGTGTTTCGGATTTCCTGCTGAANNGNTTNTCGNNTCNNNNNNNNATCCCGATTTCNTTCCGCAGCTNACCTCCCAAN	)'.*.+2,))&&'&*/)-&*-)&.-)&)&),/-&&..)./.,.).*&&,&.&&-)&&&0*&&&&&&&&/32/,01460&&/6/*0*/2/283//36868/&	RG:Z:0
+both_reads_align_clip_marked	83	chr7	1	255	101M	=	302	201	CAACAGAAGCNGGNATCTGTGTTTGTGTTTCGGATTTCCTGCTGAANNGNTTNTCGNNTCNNNNNNNNATCCCGATTTCNTTCCGCAGCTNACCTCCCAAN	)'.*.+2,))&&'&*/)-&*-)&.-)&)&),/-&&..)./.,.).*&&,&.&&-)&&&0*&&&&&&&&/32/,01460&&/6/*0*/2/283//36868/&	RG:Z:0
+both_reads_align_clip_marked	163	chr7	302	255	101M	=	1	-201	NCGCGGCATCNCGATTTCTTTCCGCAGCTAACCTCCCGACAGATCGGCAGCGCGTCGTGTAGGTTATTATGGTACATCTTGTCGTGCGGCNAGAGCATACA	&/15445666651/566666553+2/14/&/555512+3/)-'/-&-'*+))*''13+3)'//++''/'))/3+&*5++)&'2+&+/*&-&&*)&-./1'1	RG:Z:0
+both_reads_present_only_first_aligns	89	chr7	1	255	101M	*	0	0	CAACAGAAGCNGGNATCTGTGTTTGTGTTTCGGATTTCCTGCTGAANNGNTTNTCGNNTCNNNNNNNNATCCCGATTTCNTTCCGCAGCTNACCTCCCAAN	)'.*.+2,))&&'&*/)-&*-)&.-)&)&),/-&&..)./.,.).*&&,&.&&-)&&&0*&&&&&&&&/32/,01460&&/6/*0*/2/283//36868/&	RG:Z:0
+both_reads_present_only_first_aligns	165	*	0	0	*	chr7	1	0	NCGCGGCATCNCGATTTCTTTCCGCAGCTAACCTCCCGACAGATCGGCAGCGCGTCGTGTAGGTTATTATGGTACATCTTGTCGTGCGGCNAGAGCATACA	&/15445666651/566666553+2/14/&/555512+3/)-'/-&-'*+))*''13+3)'//++''/'))/3+&*5++)&'2+&+/*&-&&*)&-./1'1	RG:Z:0
+read_2_too_many_gaps	83	chr7	1	255	101M	=	302	201	CAACAGAAGCNGGNATCTGTGTTTGTGTTTCGGATTTCCTGCTGAANNGNTTNTCGNNTCNNNNNNNNATCCCGATTTCNTTCCGCAGCTNACCTCCCAAN	)'.*.+2,))&&'&*/)-&*-)&.-)&)&),/-&&..)./.,.).*&&,&.&&-)&&&0*&&&&&&&&/32/,01460&&/6/*0*/2/283//36868/&	RG:Z:0
+read_2_too_many_gaps	163	chr7	302	255	10M1D10M5I76M	=	1	-201	NCGCGGCATCNCGATTTCTTTCCGCAGCTAACCTCCCGACAGATCGGCAGCGCGTCGTGTAGGTTATTATGGTACATCTTGTCGTGCGGCNAGAGCATACA	&/15445666651/566666553+2/14/&/555512+3/)-'/-&-'*+))*''13+3)'//++''/'))/3+&*5++)&'2+&+/*&-&&*)&-./1'1	RG:Z:0
diff --git a/testdata/net/sf/picard/vcf/CEUTrio-indels-bad-samples.vcf.idx b/testdata/net/sf/picard/vcf/CEUTrio-indels-bad-samples.vcf.idx
new file mode 100644
index 0000000..22b72e2
Binary files /dev/null and b/testdata/net/sf/picard/vcf/CEUTrio-indels-bad-samples.vcf.idx differ
diff --git a/testdata/net/sf/picard/vcf/CEUTrio-indels-dissimilar-contigs.vcf.idx b/testdata/net/sf/picard/vcf/CEUTrio-indels-dissimilar-contigs.vcf.idx
new file mode 100644
index 0000000..0d44aa4
Binary files /dev/null and b/testdata/net/sf/picard/vcf/CEUTrio-indels-dissimilar-contigs.vcf.idx differ
diff --git a/testdata/net/sf/picard/vcf/CEUTrio-indels.vcf.idx b/testdata/net/sf/picard/vcf/CEUTrio-indels.vcf.idx
new file mode 100644
index 0000000..f976157
Binary files /dev/null and b/testdata/net/sf/picard/vcf/CEUTrio-indels.vcf.idx differ
diff --git a/testdata/net/sf/picard/vcf/CEUTrio-merged-indels-snps.vcf.idx b/testdata/net/sf/picard/vcf/CEUTrio-merged-indels-snps.vcf.idx
new file mode 100644
index 0000000..829f573
Binary files /dev/null and b/testdata/net/sf/picard/vcf/CEUTrio-merged-indels-snps.vcf.idx differ
diff --git a/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-0.vcf.idx b/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-0.vcf.idx
new file mode 100644
index 0000000..50c7e37
Binary files /dev/null and b/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-0.vcf.idx differ
diff --git a/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-1.vcf.idx b/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-1.vcf.idx
new file mode 100644
index 0000000..45d6501
Binary files /dev/null and b/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-1.vcf.idx differ
diff --git a/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-2.vcf.idx b/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-2.vcf.idx
new file mode 100644
index 0000000..b8f989e
Binary files /dev/null and b/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-2.vcf.idx differ
diff --git a/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-3.vcf.idx b/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-3.vcf.idx
new file mode 100644
index 0000000..1114912
Binary files /dev/null and b/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-3.vcf.idx differ
diff --git a/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-4.vcf.idx b/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-4.vcf.idx
new file mode 100644
index 0000000..912e4df
Binary files /dev/null and b/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-4.vcf.idx differ
diff --git a/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-5.vcf.idx b/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-5.vcf.idx
new file mode 100644
index 0000000..49d4772
Binary files /dev/null and b/testdata/net/sf/picard/vcf/CEUTrio-random-scatter-5.vcf.idx differ
diff --git a/testdata/net/sf/picard/vcf/CEUTrio-snps.vcf.idx b/testdata/net/sf/picard/vcf/CEUTrio-snps.vcf.idx
new file mode 100644
index 0000000..823ce49
Binary files /dev/null and b/testdata/net/sf/picard/vcf/CEUTrio-snps.vcf.idx differ
diff --git a/testdata/net/sf/picard/vcf/vcfFormatTest.bcf b/testdata/net/sf/picard/vcf/vcfFormatTest.bcf
index 9082e04..9842bf8 100644
Binary files a/testdata/net/sf/picard/vcf/vcfFormatTest.bcf and b/testdata/net/sf/picard/vcf/vcfFormatTest.bcf differ
diff --git a/testdata/net/sf/picard/vcf/vcfFormatTest.bcf.idx b/testdata/net/sf/picard/vcf/vcfFormatTest.bcf.idx
new file mode 100644
index 0000000..30842a6
Binary files /dev/null and b/testdata/net/sf/picard/vcf/vcfFormatTest.bcf.idx differ
diff --git a/testdata/net/sf/picard/vcf/vcfFormatTest.vcf b/testdata/net/sf/picard/vcf/vcfFormatTest.vcf
index 6eb8ca6..38e231d 100644
--- a/testdata/net/sf/picard/vcf/vcfFormatTest.vcf
+++ b/testdata/net/sf/picard/vcf/vcfFormatTest.vcf
@@ -129,54 +129,318 @@
 ##reference=file:///humgen/gsa-hpprojects/GATK/bundle/current/b37/human_g1k_v37.fasta
 ##source=PhaseByTransmission
 #CHROM	POS	ID	REF	ALT	QUAL	FILTER	INFO	FORMAT	NA12878	NA12891	NA12892
+1	8216712	rs11121115	A	G	1540.26	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=0.917;DB;DP=131;Dels=0.00;FS=11.67;HaplotypeScore=3.35;MLEAC=3;MLEAF=0.500;MQ=57.74;MQ0=1;MQRankSum=0.427;QD=11.76;ReadPosRankSum=-2.190e-01;SB=-9.390e+02;VQSLOD=5.53;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0/1:23,28:51:99:681,0,668:127	0/1:16,18:34:99:338,0,244:127	0/1:24,22:46:99:560,0,323:127
+1	17032814	rs2773183	T	C	2828.26	VQSRTrancheSNP99.00to99.90	AC=3;AF=0.500;AN=6;BaseQRankSum=-3.879e+00;DB;DP=322;Dels=0.00;FS=2.43;HaplotypeScore=15.45;MLEAC=3;MLEAF=0.500;MQ=56.86;MQ0=0;MQRankSum=2.92;QD=8.78;ReadPosRankSum=-1.245e+00;SB=-1.943e+03;VQSLOD=-1.421e+00;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0/1:63,59:122:99:1434,0,1831:127	0/1:53,56:109:99:910,0,871:127	0/1:61,30:91:99:523,0,1257:127
 1	25154033	rs55822956	GT	G	743.25	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=3.32;DB;DP=214;FS=2.27;HaplotypeScore=77.33;MLEAC=3;MLEAF=0.500;MQ=55.48;MQ0=0;MQRankSum=0.114;QD=3.47;RPA=19,18;RU=T;ReadPosRankSum=0.884;SB=-3.070e+02;STR;VQSLOD=2.10;culprit=QD	GT:AD:DP:GQ:PL:TP	0/1:60,12:82:67:292,0,67:18	0/1:55,7:69:42:245,0,42:18	0/1:37,21:60:18:254,0,18:18
+1	36900350	rs72008966	C	CA	137.53	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-3.837e+00;DB;DP=139;FS=0.00;HaplotypeScore=32.79;MLEAC=3;MLEAF=0.500;MQ=49.20;MQ0=3;MQRankSum=0.772;QD=0.990;RPA=22,23;RU=A;ReadPosRankSum=-3.220e+00;SB=-4.273e+01;STR;VQSLOD=1.87;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0/1:35,7:44:41:70,0,41:9	0/1:48,0:47:20:20,0,41:9	0/1:32,12:42:9:95,0,9:9
 1	47818917	rs12749507	A	G	1285.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-1.707e+00;DB;DP=252;Dels=0.00;FS=2.14;HaplotypeScore=0.666;MLEAC=1;MLEAF=0.167;MQ=59.83;MQ0=0;MQRankSum=-4.670e-01;QD=13.39;ReadPosRankSum=0.535;SB=-6.730e+02;VQSLOD=9.27;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|0:81,0:81:99:0,241,3171:127	0|1:53,42:96:99:1320,0,1615:127	0|0:75,0:75:99:0,217,2762:127
+1	59635894	rs331648	T	A	839.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-4.038e+00;DB;DP=211;Dels=0.00;FS=1.43;HaplotypeScore=1.55;MLEAC=1;MLEAF=0.167;MQ=58.68;MQ0=0;MQRankSum=-4.220e-01;QD=11.82;ReadPosRankSum=-1.800e-01;SB=-5.520e+02;VQSLOD=7.95;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:78,0:78:99:0,199,2578:127	0|0:62,0:62:99:0,169,2205:127	0|1:33,38:71:99:874,0,897:127
+1	69502962	rs12724640	T	C	1868.23	VQSRTrancheSNP99.00to99.90	AC=6;AF=1.00;AN=6;BaseQRankSum=-1.732e+00;DB;DP=110;Dels=0.00;FS=4.72;HaplotypeScore=1.93;MLEAC=6;MLEAF=1.00;MQ=29.46;MQ0=22;MQRankSum=1.65;QD=16.98;ReadPosRankSum=-5.640e-01;SB=-8.110e+02;VQSLOD=0.210;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|1:5,23:29:63:693,63,0:50	1|1:5,38:43:66:675,66,0:50	1|1:9,29:38:51:538,51,0:50
+1	80481528	rs61774278	G	C	1775.24	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-1.614e+00;DB;DP=168;Dels=0.00;FS=8.29;HaplotypeScore=2.05;MLEAC=3;MLEAF=0.500;MQ=54.28;MQ0=0;MQRankSum=-5.200e-02;QD=16.75;ReadPosRankSum=1.54;SB=-1.133e+03;VQSLOD=4.77;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|1:35,26:61:99:767,0,778:80	0|0:62,0:62:84:0,84,1046:80	1|1:0,45:45:84:1053,84,0:80
+1	90146546	rs7512447	C	T	8721.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=241;Dels=0.00;FS=0.00;HaplotypeScore=0.00;MLEAC=6;MLEAF=1.00;MQ=59.40;MQ0=0;QD=36.19;SB=-4.084e+03;VQSLOD=11.32;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:0,71:71:99:2599,196,0:127	1|1:0,86:86:99:3042,235,0:127	1|1:0,84:84:99:3080,238,0:127
 1	100763317	rs6681810	G	C	5583.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=196;Dels=0.00;FS=0.00;HaplotypeScore=3.72;MLEAC=6;MLEAF=1.00;MQ=58.52;MQ0=0;QD=28.48;SB=-2.737e+03;VQSLOD=7.92;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:1,76:77:99:2211,192,0:127	1|1:0,56:56:99:1593,141,0:127	1|1:0,62:62:99:1779,156,0:127
+1	110091282	rs1279195	C	T	952.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=2.65;DB;DP=101;Dels=0.00;FS=19.67;HaplotypeScore=1.92;MLEAC=2;MLEAF=0.333;MQ=56.72;MQ0=0;MQRankSum=-3.450e-01;QD=12.37;ReadPosRankSum=0.567;SB=-5.350e+02;VQSLOD=4.17;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:14,29:46:99:726,0,219:42	1|0:13,12:31:99:265,0,298:42	0|0:18,0:24:42:0,42,522:42
+1	120811174	.	C	CA	74.77	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=-1.393e+00;DP=148;FS=0.00;HaplotypeScore=20.35;MLEAC=5;MLEAF=0.833;MQ=15.79;MQ0=88;MQRankSum=-3.480e-01;QD=0.510;RPA=24,25;RU=A;ReadPosRankSum=-1.044e+00;SB=-1.531e+01;STR;VQSLOD=5.09;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:47,5:9:11:53,11,0:3	1|0:51,0:15:5:37,0,5:3	1|1:43,0:8:6:32,6,0:3
+1	146610795	rs58742099	C	T	2831.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=1.30;DB;DP=261;Dels=0.00;FS=7.03;HaplotypeScore=13.91;MLEAC=2;MLEAF=0.333;MQ=59.62;MQ0=0;MQRankSum=0.753;QD=16.27;ReadPosRankSum=-1.405e+00;SB=-1.481e+03;VQSLOD=3.42;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:39,43:82:99:1329,0,1080:127	1|0:40,52:92:99:1541,0,1241:127	0|0:87,0:87:99:0,238,3098:127
+1	156248265	rs12038203	C	T	643.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-3.110e-01;DB;DP=145;Dels=0.00;FS=4.57;HaplotypeScore=0.277;MLEAC=1;MLEAF=0.167;MQ=59.08;MQ0=0;MQRankSum=0.862;QD=12.86;ReadPosRankSum=0.177;SB=-3.450e+02;VQSLOD=7.50;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|0:47,0:47:99:0,123,1604:110	0|0:48,0:48:99:0,111,1433:110	0|1:25,25:50:99:678,0,694:110
+1	165431906	rs7528908	G	T	2381.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=6.01;DB;DP=259;Dels=0.00;FS=4.73;HaplotypeScore=0.986;MLEAC=2;MLEAF=0.333;MQ=59.75;MQ0=0;MQRankSum=0.916;QD=14.34;ReadPosRankSum=-9.070e-01;SB=-9.050e+02;VQSLOD=8.13;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|1:48,40:88:99:1142,0,1428:127	0|0:93,0:93:99:0,259,3166:127	1|0:38,40:78:99:1278,0,1075:127
+1	175207612	rs11484568	C	T	1920.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=5.64;DB;DP=216;Dels=0.00;FS=0.537;HaplotypeScore=2.25;MLEAC=2;MLEAF=0.333;MQ=58.99;MQ0=0;MQRankSum=0.383;QD=14.12;ReadPosRankSum=0.169;SB=-1.064e+03;VQSLOD=8.59;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|1:34,40:74:99:1152,0,995:127	0|0:80,0:80:99:0,208,2602:127	1|0:34,28:62:99:807,0,998:127
 1	186656152	rs4422959	G	A	1342.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=7.23;DB;DP=270;Dels=0.00;FS=7.29;HaplotypeScore=1.17;MLEAC=1;MLEAF=0.167;MQ=59.71;MQ0=0;MQRankSum=0.399;QD=17.21;ReadPosRankSum=-8.690e-01;SB=-6.320e+02;VQSLOD=7.67;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|0:99,0:99:99:0,283,3341:127	0|0:93,0:93:99:0,256,3022:127	0|1:33,45:78:99:1377,0,917:127
+1	195392852	rs2119441	T	G	5369.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-2.284e+00;DB;DP=241;Dels=0.00;FS=3.31;HaplotypeScore=1.87;MLEAC=4;MLEAF=0.667;MQ=58.35;MQ0=0;MQRankSum=-6.430e-01;QD=22.28;ReadPosRankSum=0.178;SB=-2.332e+03;VQSLOD=7.85;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:37,42:79:99:1233,0,1145:127	1|1:0,84:84:99:2741,211,0:127	0|1:31,47:78:99:1395,0,804:127
+1	204945934	rs6657372	A	G	1311.24	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-6.582e+00;DB;DP=126;Dels=0.00;FS=0.00;HaplotypeScore=2.71;MLEAC=3;MLEAF=0.500;MQ=58.78;MQ0=0;MQRankSum=0.480;QD=15.43;ReadPosRankSum=1.28;SB=-6.370e+02;VQSLOD=7.84;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:22,28:50:99:629,0,675:68	0|0:40,0:41:84:0,84,1057:68	1|1:0,35:35:69:727,69,0:68
+1	216407409	rs3767692	A	G	8820.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=-8.030e-01;DB;DP=295;Dels=0.00;FS=1.35;HaplotypeScore=2.58;MLEAC=5;MLEAF=0.833;MQ=59.28;MQ0=0;MQRankSum=-3.750e-01;QD=29.90;ReadPosRankSum=-3.630e-01;SB=-4.445e+03;VQSLOD=7.05;culprit=DP	GT:AD:DP:GQ:PL:TP	1|1:1,115:116:99:4251,328,0:127	1|0:39,51:90:99:1430,0,1275:127	1|1:0,89:89:99:3139,241,0:127
+1	226163330	rs10915914	C	G	747.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-3.123e+00;DB;DP=121;Dels=0.00;FS=0.863;HaplotypeScore=0.829;MLEAC=2;MLEAF=0.333;MQ=58.96;MQ0=0;MQRankSum=-4.780e-01;QD=9.70;ReadPosRankSum=-1.340e-01;SB=-2.780e+02;VQSLOD=8.08;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:43,1:44:99:0,117,1447:116	0|1:17,15:32:99:289,0,390:116	0|1:26,19:45:99:497,0,677:116
+1	235750933	rs111686144	GA	G	679.19	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=2.04;DB;DP=64;FS=7.39;HaplotypeScore=400.20;MLEAC=4;MLEAF=0.667;MQ=32.98;MQ0=0;MQRankSum=-5.900e-02;QD=10.61;ReadPosRankSum=2.44;SB=-1.575e+02;VQSLOD=-3.140e-01;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:12,0:12:6:110,0,6:6	1|1:9,9:19:31:491,31,0:6	0|1:31,0:32:99:128,0,160:6
 1	243135892	rs7517403	C	T	2313.25	VQSRTrancheSNP99.00to99.90	AC=4;AF=0.667;AN=6;BaseQRankSum=7.05;DB;DP=168;Dels=0.00;FS=0.00;HaplotypeScore=0.588;MLEAC=4;MLEAF=0.667;MQ=37.62;MQ0=11;MQRankSum=-5.659e+00;QD=13.77;ReadPosRankSum=-9.770e-01;SB=-9.200e+02;VQSLOD=2.25;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|0:31,24:55:99:501,0,891:123	1|1:0,58:58:99:1263,123,0:123	0|1:28,27:55:99:591,0,714:123
 2	1143476	rs4998209	C	T	1483.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-4.814e+00;DB;DP=189;Dels=0.00;FS=5.61;HaplotypeScore=0.324;MLEAC=2;MLEAF=0.333;MQ=58.36;MQ0=0;MQRankSum=1.58;QD=12.06;ReadPosRankSum=0.326;SB=-9.320e+02;VQSLOD=6.81;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|0:66,0:66:99:0,178,2264:127	0|1:33,38:71:99:844,0,1024:127	0|1:26,26:52:99:678,0,719:127
+2	9240279	rs56249990	A	G	3978.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=1.70;DB;DP=213;Dels=0.00;FS=7.83;HaplotypeScore=1.19;MLEAC=3;MLEAF=0.500;MQ=59.40;MQ0=0;MQRankSum=0.143;QD=27.25;ReadPosRankSum=-9.700e-02;SB=-1.991e+03;VQSLOD=9.14;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:33,42:75:99:1400,0,1031:127	0|0:67,0:67:99:0,178,2277:127	1|1:0,71:71:99:2578,199,0:127
+2	18016237	rs12710649	A	T	3950.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-3.700e-02;DB;DP=250;Dels=0.00;FS=2.16;HaplotypeScore=2.08;MLEAC=3;MLEAF=0.500;MQ=59.14;MQ0=0;MQRankSum=0.494;QD=15.80;ReadPosRankSum=0.705;SB=-2.142e+03;VQSLOD=8.14;culprit=FS	GT:AD:DP:GQ:PL:TP	0/1:46,59:105:99:1733,0,1505:127	0/1:39,37:76:99:925,0,1164:127	0/1:24,44:68:99:1292,0,628:127
 2	28790711	rs34787802	T	G	731.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-4.421e+00;DB;DP=232;Dels=0.00;FS=2.40;HaplotypeScore=1.52;MLEAC=1;MLEAF=0.167;MQ=57.98;MQ0=0;MQRankSum=0.447;QD=10.30;ReadPosRankSum=-1.833e+00;SB=-4.550e+02;VQSLOD=6.89;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:81,0:81:99:0,223,2877:127	0|1:36,35:71:99:766,0,947:127	0|0:79,0:80:99:0,205,2640:127
+2	36994439	rs11124542	A	C	1302.26	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-7.406e+00;DB;DP=191;Dels=0.00;FS=3.34;HaplotypeScore=2.18;MLEAC=3;MLEAF=0.500;MQ=58.93;MQ0=0;MQRankSum=-7.250e-01;QD=6.82;ReadPosRankSum=0.302;SB=-1.006e+03;VQSLOD=7.95;culprit=QD	GT:AD:DP:GQ:PL:TP	0/1:32,28:60:99:710,0,1044:127	0/1:34,25:59:99:309,0,772:127	0/1:45,27:72:99:322,0,1096:127
+2	45429089	rs13418430	C	A	2341.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=0.861;DB;DP=283;Dels=0.00;FS=2.85;HaplotypeScore=1.33;MLEAC=2;MLEAF=0.333;MQ=59.62;MQ0=0;MQRankSum=-9.390e-01;QD=12.52;ReadPosRankSum=-1.869e+00;SB=-1.156e+03;VQSLOD=7.15;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|0:60,49:109:99:1406,0,2061:127	1|0:41,37:78:99:974,0,1238:127	0|0:96,0:96:99:0,256,3312:127
+2	54408283	rs55993481	T	G	2128.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=1.96;DB;DP=222;Dels=0.00;FS=1.74;HaplotypeScore=3.96;MLEAC=2;MLEAF=0.333;MQ=59.69;MQ0=0;MQRankSum=0.670;QD=14.28;ReadPosRankSum=1.31;SB=-1.076e+03;VQSLOD=6.51;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:35,46:81:99:1392,0,921:127	1|0:35,33:68:99:775,0,878:127	0|0:73,0:73:99:0,175,2164:127
+2	64848970	rs35473866	T	C	655.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=0.263;DB;DP=218;Dels=0.00;FS=8.01;HaplotypeScore=2.58;MLEAC=1;MLEAF=0.167;MQ=58.36;MQ0=0;MQRankSum=-1.680e-01;QD=8.29;ReadPosRankSum=-3.070e-01;SB=-3.420e+02;VQSLOD=5.92;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:78,0:78:99:0,208,2627:127	0|0:61,0:61:99:0,135,1725:127	0|1:35,42:79:99:690,0,1042:127
+2	75490647	rs112304545	T	TGG	1000.21	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=0.322;DB;DP=188;FS=0.785;HaplotypeScore=58.72;MLEAC=4;MLEAF=0.667;MQ=57.19;MQ0=0;MQRankSum=-2.290e-01;QD=5.32;RPA=10,12;RU=G;ReadPosRankSum=-2.534e+00;SB=-4.650e+02;STR;VQSLOD=2.69;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:33,35:76:57:976,57,0:56	1|0:39,3:49:38:38,0,730:56	1|0:50,4:63:37:37,0,900:56
+2	85513238	rs62162679	C	T	742.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=1.16;DB;DP=137;Dels=0.00;FS=4.47;HaplotypeScore=1.39;MLEAC=2;MLEAF=0.333;MQ=57.50;MQ0=0;MQRankSum=0.479;QD=7.57;ReadPosRankSum=-2.720e-01;SB=-5.520e+02;VQSLOD=7.13;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:39,0:39:99:0,105,1267:104	0|1:23,23:47:99:430,0,551:104	0|1:31,20:51:99:351,0,633:104
+2	96514373	rs10186946	C	T	206.26	VQSRTrancheSNP99.00to99.90	AC=2;AF=0.333;AN=6;BaseQRankSum=4.49;DB;DP=175;Dels=0.00;FS=2.81;HaplotypeScore=2.42;MLEAC=2;MLEAF=0.333;MQ=39.07;MQ0=7;MQRankSum=-7.529e+00;QD=1.61;ReadPosRankSum=0.813;SB=-2.501e+01;VQSLOD=-8.655e-01;culprit=QD	GT:AD:DP:GQ:PL:TP	1|0:35,23:58:62:62,0,484:61	1|0:22,46:70:99:183,0,387:61	0|0:46,0:47:99:0,99,1162:61
 2	107234623	rs35291621	GTA	G	7569	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=2.65;DB;DP=242;FS=7.84;HaplotypeScore=19.31;MLEAC=4;MLEAF=0.667;MQ=60.04;MQ0=1;MQRankSum=-1.570e-01;QD=31.28;ReadPosRankSum=1.66;SB=-3.633e+03;VQSLOD=4.30;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:46,30:76:99:1290,0,2120:127	0|1:35,66:102:99:2998,0,1435:127	1|1:0,63:63:99:3281,190,0:127
 2	117966808	rs333847	C	T	9037.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=269;Dels=0.00;FS=0.00;HaplotypeScore=3.70;MLEAC=6;MLEAF=1.00;MQ=58.21;MQ0=0;QD=33.59;SB=-4.673e+03;VQSLOD=7.05;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:0,78:78:99:2652,205,0:127	1|1:0,101:101:99:3158,247,0:127	1|1:0,89:90:99:3227,250,0:127
+2	127445050	rs113647072	A	G	1773.25	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-3.856e+00;DB;DP=154;Dels=0.00;FS=4.20;HaplotypeScore=2.27;MLEAC=4;MLEAF=0.667;MQ=59.16;MQ0=0;MQRankSum=0.759;QD=11.51;ReadPosRankSum=1.29;SB=-1.402e+03;VQSLOD=7.92;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|1:26,28:54:99:652,0,472:78	0|1:26,22:49:99:350,0,462:78	1|1:0,50:51:78:813,78,0:78
+2	133338909	rs72548244	CT	C	2103.21	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=0.444;DB;DP=212;FS=5.22;HaplotypeScore=27.63;MLEAC=5;MLEAF=0.833;MQ=59.64;MQ0=0;MQRankSum=2.43;QD=9.92;RPA=15,14;RU=T;ReadPosRankSum=2.78;SB=-1.050e+03;STR;VQSLOD=2.66;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:17,39:59:61:837,61,0:60	1|0:47,22:72:99:241,0,444:60	1|1:20,58:81:74:1076,74,0:60
+2	144289763	rs7607879	C	T	3308.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=1.39;DB;DP=225;Dels=0.00;FS=9.72;HaplotypeScore=1.50;MLEAC=3;MLEAF=0.500;MQ=59.53;MQ0=0;MQRankSum=0.283;QD=14.70;ReadPosRankSum=1.39;SB=-1.760e+03;VQSLOD=8.24;culprit=FS	GT:AD:DP:GQ:PL:TP	0/1:41,39:81:99:1190,0,1312:127	0/1:28,36:64:99:1057,0,834:127	0/1:44,36:80:99:1061,0,1275:127
+2	155726628	.	T	G	1351.23	VQSRTrancheSNP99.00to99.90	AC=6;AF=1.00;AN=6;BaseQRankSum=0.207;DP=193;Dels=0.00;FS=9.25;HaplotypeScore=0.522;MLEAC=6;MLEAF=1.00;MQ=16.17;MQ0=87;MQRankSum=2.40;QD=7.00;ReadPosRankSum=1.07;SB=-4.420e+02;VQSLOD=-2.787e+00;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|1:3,56:59:60:591,60,0:27	1|1:8,49:57:27:245,27,0:27	1|1:12,65:77:57:553,57,0:27
+2	167166899	.	T	A	1064.78	VQSRTrancheSNP99.00to99.90	AC=5;AF=0.833;AN=6;BaseQRankSum=0.023;DP=278;Dels=0.00;FS=9.89;HaplotypeScore=0.277;MLEAC=6;MLEAF=1.00;MQ=10.47;MQ0=239;MQRankSum=1.64;QD=3.83;ReadPosRankSum=0.347;SB=-1.083e-02;VQSLOD=-6.945e+00;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|1:174,12:186:36:401,36,0:4	1|1:37,14:52:39:431,39,0:4	1|0:31,9:40:3:271,0,3:4
+2	177175707	rs7355385	A	G	2590.24	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-8.660e-01;DB;DP=206;Dels=0.00;FS=14.87;HaplotypeScore=0.311;MLEAC=3;MLEAF=0.500;MQ=59.12;MQ0=0;MQRankSum=-9.370e-01;QD=20.40;ReadPosRankSum=-2.870e-01;SB=-8.970e+02;VQSLOD=6.82;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:33,32:65:99:856,0,1069:127	1|1:0,62:62:99:1779,150,0:127	0|0:78,1:79:99:0,193,2411:127
 2	187694833	rs10191094	T	C	1006.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-7.181e+00;DB;DP=236;Dels=0.00;FS=9.08;HaplotypeScore=2.27;MLEAC=1;MLEAF=0.167;MQ=59.73;MQ0=0;MQRankSum=-2.180e-01;QD=13.07;ReadPosRankSum=-8.770e-01;SB=-5.830e+02;VQSLOD=7.60;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|0:82,0:82:99:0,232,3021:127	0|0:77,0:77:99:0,217,2790:127	0|1:33,44:77:99:1041,0,988:127
+2	198567638	rs12619333	C	G	2201.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-2.800e-02;DB;DP=240;Dels=0.00;FS=4.76;HaplotypeScore=1.37;MLEAC=2;MLEAF=0.333;MQ=59.37;MQ0=1;MQRankSum=0.131;QD=13.42;ReadPosRankSum=0.123;SB=-1.051e+03;VQSLOD=8.13;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:38,45:84:99:1298,0,1249:127	1|0:45,35:80:99:942,0,1493:127	0|0:76,0:76:99:0,196,2542:127
+2	209758643	rs12992166	C	T	1534.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=6.18;DB;DP=163;Dels=0.00;FS=8.81;HaplotypeScore=2.36;MLEAC=2;MLEAF=0.333;MQ=58.93;MQ0=0;MQRankSum=1.99;QD=12.08;ReadPosRankSum=1.08;SB=-4.720e+02;VQSLOD=6.82;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:35,22:59:99:610,0,886:93	1|0:32,36:68:99:963,0,797:93	0|0:36,0:36:93:0,93,1058:93
+2	218911764	.	TA	T	229.22	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=0.591;DP=218;FS=1.76;HaplotypeScore=32.57;MLEAC=3;MLEAF=0.500;MQ=57.41;MQ0=0;MQRankSum=1.79;QD=1.05;RPA=19,18;RU=A;ReadPosRankSum=0.676;SB=-1.060e+02;STR;VQSLOD=2.31;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0/1:56,5:65:99:101,0,313:70	0/1:59,18:82:99:105,0,395:70	0/1:64,4:70:71:71,0,263:70
+2	228532558	rs6436725	G	A	3484.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=7.16;DB;DP=263;Dels=0.00;FS=1.55;HaplotypeScore=4.97;MLEAC=3;MLEAF=0.500;MQ=58.60;MQ0=0;MQRankSum=1.14;QD=21.91;ReadPosRankSum=-4.160e-01;SB=-1.472e+03;VQSLOD=5.44;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|1:42,47:89:99:1549,0,932:127	0|0:104,0:104:99:0,247,2939:127	1|1:0,69:70:99:1935,156,0:127
+2	237156689	rs13390270	G	A	852.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-1.150e+00;DB;DP=205;Dels=0.00;FS=0.710;HaplotypeScore=1.73;MLEAC=1;MLEAF=0.167;MQ=59.06;MQ0=0;MQRankSum=-1.930e-01;QD=11.67;ReadPosRankSum=1.26;SB=-4.790e+02;VQSLOD=8.50;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:73,0:73:99:0,202,2606:108	0|1:39,34:73:99:887,0,1027:108	0|0:59,0:59:99:0,108,1384:108
+3	1609737	rs2648459	A	G	6492.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=187;Dels=0.00;FS=0.00;HaplotypeScore=1.18;MLEAC=6;MLEAF=1.00;MQ=59.90;MQ0=0;QD=34.72;SB=-3.251e+03;VQSLOD=11.50;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,53:53:99:1943,150,0:127	1|1:0,62:62:99:2206,172,0:127	1|1:0,72:72:99:2343,181,0:127
+3	8399330	rs6770171	G	A	6816.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=4.69;DB;DP=253;Dels=0.00;FS=0.637;HaplotypeScore=1.28;MLEAC=5;MLEAF=0.833;MQ=58.42;MQ0=0;MQRankSum=0.607;QD=26.94;ReadPosRankSum=-5.030e-01;SB=-3.369e+03;VQSLOD=8.01;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	1|1:0,106:106:99:3784,292,0:127	1|1:0,72:72:99:2164,169,0:127	1|0:42,33:75:99:868,0,1146:127
+3	17501103	.	T	G	45.26	VQSRTrancheSNP99.00to99.90	AC=2;AF=0.333;AN=6;BaseQRankSum=-6.477e+00;DP=156;Dels=0.00;FS=71.75;HaplotypeScore=1.53;MLEAC=2;MLEAF=0.333;MQ=58.02;MQ0=0;MQRankSum=-1.560e-01;QD=0.430;ReadPosRankSum=-3.974e+00;SB=-1.474e-02;VQSLOD=-1.415e+01;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:43,8:51:99:0,114,1252:33	0|1:22,24:46:34:34,0,492:33	0|1:24,34:59:50:50,0,492:33
+3	26960556	rs4422260	T	C	878.26	VQSRTrancheSNP99.00to99.90	AC=2;AF=0.333;AN=6;BaseQRankSum=-2.140e+00;DB;DP=220;Dels=0.00;FS=1.11;HaplotypeScore=1.72;MLEAC=2;MLEAF=0.333;MQ=41.87;MQ0=3;MQRankSum=-6.679e+00;QD=5.97;ReadPosRankSum=-3.100e-02;SB=-4.600e+02;VQSLOD=0.717;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|0:73,0:73:99:0,208,2660:127	0|1:32,48:80:99:759,0,915:127	0|1:37,30:67:99:158,0,1092:127
+3	37334187	rs112182713	G	GGTT	1235.20	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=0.420;DB;DP=233;FS=10.65;HaplotypeScore=68.31;MLEAC=1;MLEAF=0.167;MQ=56.19;MQ0=0;MQRankSum=-1.868e+00;QD=17.65;RPA=1,2;RU=GTT;ReadPosRankSum=1.38;SB=-4.490e+02;STR;VQSLOD=3.14;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:83,0:83:99:0,244,4702:127	0|0:79,0:77:99:0,220,4149:127	0|1:40,23:68:99:1279,0,1696:127
+3	48757773	rs61137521	A	G	3103.25	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-7.940e-01;DB;DP=172;Dels=0.00;FS=0.00;HaplotypeScore=1.90;MLEAC=4;MLEAF=0.667;MQ=59.34;MQ0=0;MQRankSum=-6.930e-01;QD=18.04;ReadPosRankSum=-1.667e+00;SB=-1.874e+03;VQSLOD=7.92;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,50:51:99:1723,141,0:127	1|0:31,34:65:99:745,0,814:127	1|0:24,32:56:99:677,0,603:127
+3	60283791	rs10154846	T	C	669.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-3.270e+00;DB;DP=204;Dels=0.00;FS=1.59;HaplotypeScore=1.66;MLEAC=1;MLEAF=0.167;MQ=59.39;MQ0=0;MQRankSum=0.370;QD=10.14;ReadPosRankSum=1.64;SB=-2.690e+02;VQSLOD=8.95;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:63,0:63:99:0,190,2440:127	0|1:34,32:66:99:704,0,1020:127	0|0:74,0:75:99:0,190,2372:127
+3	68765638	rs1504297	C	G	4623.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-5.561e+00;DB;DP=225;Dels=0.00;FS=3.18;HaplotypeScore=0.843;MLEAC=4;MLEAF=0.667;MQ=59.80;MQ0=0;MQRankSum=-6.190e-01;QD=20.55;ReadPosRankSum=-1.001e+00;SB=-2.371e+03;VQSLOD=8.74;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|0:40,39:79:99:1220,0,1330:127	1|1:0,75:75:99:2518,205,0:127	0|1:37,34:71:99:885,0,1185:127
+3	76722766	rs264537	C	G	845.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=3.67;DB;DP=202;Dels=0.00;FS=0.00;HaplotypeScore=0.277;MLEAC=1;MLEAF=0.167;MQ=59.79;MQ0=0;MQRankSum=-7.950e-01;QD=13.21;ReadPosRankSum=1.05;SB=-2.910e+02;VQSLOD=8.53;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|0:70,0:70:99:0,208,2570:127	0|0:68,0:68:99:0,193,2332:127	0|1:34,30:64:99:880,0,995:127
 3	86234718	rs2324883	T	C	7655.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=234;Dels=0.00;FS=0.00;HaplotypeScore=0.649;MLEAC=6;MLEAF=1.00;MQ=59.51;MQ0=0;QD=32.71;SB=-4.142e+03;VQSLOD=11.21;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,80:80:99:2857,223,0:127	1|1:0,76:77:99:2512,196,0:127	1|1:0,77:77:99:2286,178,0:127
+3	99106655	rs2623376	A	G	1093.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=1.49;DB;DP=191;Dels=0.00;FS=2.28;HaplotypeScore=0.627;MLEAC=2;MLEAF=0.333;MQ=59.76;MQ0=0;MQRankSum=-1.141e+00;QD=9.51;ReadPosRankSum=0.079;SB=-5.610e+02;VQSLOD=8.15;culprit=QD	GT:AD:DP:GQ:PL:TP	0|1:30,22:52:99:653,0,970:127	0|0:76,0:76:99:0,193,2462:127	1|0:44,19:63:99:479,0,1261:127
+3	108612711	rs2399252	T	A	3430.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-7.830e-01;DB;DP=239;Dels=0.00;FS=4.44;HaplotypeScore=0.659;MLEAC=3;MLEAF=0.500;MQ=59.64;MQ0=0;MQRankSum=-2.690e-01;QD=14.35;ReadPosRankSum=-7.010e-01;SB=-1.733e+03;VQSLOD=8.51;culprit=MQ	GT:AD:DP:GQ:PL:TP	0/1:48,34:82:99:1066,0,1620:127	0/1:45,43:88:99:1370,0,1452:127	0/1:36,32:69:99:994,0,1128:127
+3	118367863	rs71617664	TAGATAGATTA	T	12039	PASS	AC=6;AF=1.00;AN=6;BaseQRankSum=1.12;DB;DP=201;FS=9.45;HaplotypeScore=212.50;MLEAC=6;MLEAF=1.00;MQ=55.55;MQ0=1;MQRankSum=1.58;QD=59.90;ReadPosRankSum=0.721;SB=-5.599e+03;VQSLOD=-4.074e-01;culprit=QD	GT:AD:DP:GQ:PL:TP	1|1:13,57:71:99:4299,164,0:127	1|1:60,4:64:99:3312,174,0:127	1|1:9,50:64:99:4428,177,0:127
+3	128140891	rs73201490	C	T	1315.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=5.39;DB;DP=142;Dels=0.00;FS=0.667;HaplotypeScore=3.49;MLEAC=2;MLEAF=0.333;MQ=58.69;MQ0=0;MQRankSum=1.73;QD=14.30;ReadPosRankSum=0.758;SB=-6.400e+02;VQSLOD=6.70;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|1:29,24:53:99:636,0,670:104	0|0:50,0:50:99:0,105,1188:104	1|0:12,27:39:99:718,0,250:104
+3	137840974	rs2622698	C	A	6792.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=232;Dels=0.00;FS=0.00;HaplotypeScore=2.65;MLEAC=6;MLEAF=1.00;MQ=59.04;MQ0=0;QD=29.28;SB=-3.999e+03;VQSLOD=9.77;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,79:79:99:2640,205,0:127	1|1:0,78:78:99:2229,178,0:127	1|1:0,75:75:99:1923,153,0:127
+3	148344553	rs62274102	C	A	3191.24	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-1.860e+00;DB;DP=232;Dels=0.00;FS=12.06;HaplotypeScore=2.37;MLEAC=3;MLEAF=0.500;MQ=59.32;MQ0=0;MQRankSum=-5.510e-01;QD=19.58;ReadPosRankSum=2.41;SB=-1.757e+03;VQSLOD=6.29;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	1|0:46,39:85:99:985,0,1339:127	1|1:0,78:78:99:2251,175,0:127	0|0:69,0:69:99:0,153,2018:127
+3	158557110	rs34445802	G	A	2374.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=1.80;DB;DP=237;Dels=0.00;FS=1.02;HaplotypeScore=1.66;MLEAC=2;MLEAF=0.333;MQ=57.48;MQ0=0;MQRankSum=0.558;QD=14.05;ReadPosRankSum=-1.147e+00;SB=-1.505e+03;VQSLOD=7.13;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|0:43,56:99:99:1624,0,1344:127	1|0:33,37:70:99:789,0,916:127	0|0:68,0:68:99:0,156,2014:127
+3	168108331	rs35075740	TA	T	51.36	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=1.78;DB;DP=162;FS=3.97;HaplotypeScore=20.38;MLEAC=3;MLEAF=0.500;MQ=55.92;MQ0=0;MQRankSum=3.47;QD=0.320;RPA=18,17;RU=A;ReadPosRankSum=1.96;SB=-7.120e+00;STR;VQSLOD=2.10;culprit=QD	GT:AD:DP:GQ:PL:TP	0/1:46,5:53:16:16,0,313:15	0/1:46,12:60:61:61,0,267:15	0/1:40,6:47:22:22,0,255:15
+3	179020937	rs13082816	C	T	1116.25	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=5.60;DB;DP=82;Dels=0.00;FS=0.00;HaplotypeScore=0.636;MLEAC=4;MLEAF=0.667;MQ=54.57;MQ0=1;MQRankSum=0.374;QD=13.61;ReadPosRankSum=-3.700e-02;SB=-2.010e+02;VQSLOD=6.09;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:13,13:26:99:388,0,178:36	1|1:0,24:24:36:442,36,0:36	0|1:13,19:32:99:328,0,179:36
 3	189981901	.	A	G	179.25	VQSRTrancheSNP99.00to99.90	AC=1;AF=0.167;AN=6;BaseQRankSum=2.26;DP=274;Dels=0.00;FS=73.74;HaplotypeScore=1.83;MLEAC=1;MLEAF=0.167;MQ=54.08;MQ0=6;MQRankSum=-2.651e+00;QD=1.97;ReadPosRankSum=-6.101e+00;SB=-6.400e+01;VQSLOD=-1.820e+01;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	1|0:64,27:91:99:214,0,679:51	1|0:78,10:88:0:0,21,1685:51	0|0:86,9:95:74:0,74,1891:51
 3	197056886	rs7612900	G	A	7880.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=229;Dels=0.00;FS=0.00;HaplotypeScore=0.611;MLEAC=6;MLEAF=1.00;MQ=59.18;MQ0=0;QD=34.41;SB=-3.459e+03;VQSLOD=11.81;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,75:75:99:2786,214,0:127	1|1:0,81:81:99:2598,202,0:127	1|1:0,73:73:99:2496,193,0:127
+4	6333669	rs35678078	G	T	2186.26	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=1.22;DB;DP=198;Dels=0.00;FS=1.75;HaplotypeScore=0.778;MLEAC=3;MLEAF=0.500;MQ=59.27;MQ0=0;MQRankSum=-2.560e-01;QD=11.04;ReadPosRankSum=-4.690e-01;SB=-1.621e+03;VQSLOD=9.41;culprit=QD	GT:AD:DP:GQ:PL:TP	0/1:33,34:67:99:875,0,848:127	0/1:27,32:59:99:602,0,534:127	0/1:33,38:72:99:748,0,676:127
 4	12652343	rs13114660	G	T	610.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-3.176e+00;DB;DP=127;Dels=0.00;FS=0.00;HaplotypeScore=1.57;MLEAC=1;MLEAF=0.167;MQ=58.86;MQ0=0;MQRankSum=-4.680e-01;QD=16.06;ReadPosRankSum=1.25;SB=-3.060e+02;VQSLOD=7.84;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:43,0:43:99:0,120,1561:102	0|1:15,23:38:99:645,0,407:102	0|0:44,1:46:99:0,103,1539:102
+4	21819269	rs4285068	C	G	7252.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=216;Dels=0.00;FS=0.00;HaplotypeScore=0.651;MLEAC=6;MLEAF=1.00;MQ=55.91;MQ0=0;QD=33.57;SB=-3.284e+03;VQSLOD=8.54;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,83:83:99:2859,226,0:127	1|1:0,73:73:99:2413,193,0:127	1|1:0,59:59:99:1980,156,0:127
+4	30898977	rs4386561	G	A	2449.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=3.82;DB;DP=231;Dels=0.00;FS=0.00;HaplotypeScore=0.938;MLEAC=2;MLEAF=0.333;MQ=58.94;MQ0=0;MQRankSum=0.173;QD=15.70;ReadPosRankSum=1.05;SB=-1.131e+03;VQSLOD=8.36;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:33,46:79:99:1380,0,944:127	0|0:75,0:75:99:0,202,2608:127	1|0:43,34:77:99:1108,0,1335:127
+4	39062189	rs2566127	T	C	2641.26	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-2.019e+00;DB;DP=201;Dels=0.00;FS=1.10;HaplotypeScore=2.88;MLEAC=3;MLEAF=0.500;MQ=58.96;MQ0=0;MQRankSum=0.932;QD=13.14;ReadPosRankSum=-5.960e-01;SB=-1.401e+03;VQSLOD=7.97;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0/1:35,29:64:99:880,0,1168:127	0/1:37,32:70:99:796,0,1021:127	0/1:31,36:67:99:1004,0,877:127
+4	48922864	rs7697262	G	C	1383.23	VQSRTrancheSNP99.00to99.90	AC=6;AF=1.00;AN=6;DB;DP=100;Dels=0.00;FS=0.00;HaplotypeScore=0.00;MLEAC=6;MLEAF=1.00;MQ=20.20;MQ0=24;QD=13.83;SB=-3.860e+02;VQSLOD=-3.210e-01;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|1:3,38:41:57:625,57,0:32	1|1:0,28:28:33:368,33,0:32	1|1:2,29:31:39:428,39,0:32
+4	57135985	rs1715504	G	A	1213.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=3.55;DB;DP=173;Dels=0.00;FS=4.10;HaplotypeScore=1.63;MLEAC=2;MLEAF=0.333;MQ=58.44;MQ0=0;MQRankSum=-1.006e+00;QD=10.28;ReadPosRankSum=-9.330e-01;SB=-4.500e+02;VQSLOD=7.41;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:54,0:55:99:0,135,1546:127	0|1:30,23:54:99:563,0,608:127	0|1:35,28:64:99:689,0,562:127
+4	64522064	rs4860541	A	G	5942.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=188;Dels=0.00;FS=0.00;HaplotypeScore=0.289;MLEAC=6;MLEAF=1.00;MQ=56.20;MQ0=0;QD=31.61;SB=-3.348e+03;VQSLOD=8.54;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,68:68:99:2119,169,0:127	1|1:0,52:53:99:1661,132,0:127	1|1:0,67:67:99:2162,172,0:127
+4	71449060	rs60358213	G	A	2269.26	VQSRTrancheSNP99.00to99.90	AC=2;AF=0.333;AN=6;BaseQRankSum=5.70;DB;DP=227;Dels=0.00;FS=13.05;HaplotypeScore=20.14;MLEAC=2;MLEAF=0.333;MQ=55.35;MQ0=0;MQRankSum=0.477;QD=13.19;ReadPosRankSum=2.30;SB=-8.910e+02;VQSLOD=1.89;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:60,40:100:99:1221,0,1476:127	1|0:37,35:72:99:1087,0,987:127	0|0:53,1:54:99:0,157,2026:127
+4	82427976	rs6841516	G	A	561.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-1.022e+00;DB;DP=166;Dels=0.00;FS=0.00;HaplotypeScore=1.47;MLEAC=1;MLEAF=0.167;MQ=59.56;MQ0=0;MQRankSum=-1.555e+00;QD=11.00;ReadPosRankSum=0.016;SB=-3.880e+02;VQSLOD=8.10;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:51,0:51:99:0,132,1705:127	0|1:22,29:51:99:596,0,483:127	0|0:64,0:64:99:0,135,1755:127
+4	92648168	rs61329801	A	ATTTG	7717	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=0.947;DB;DP=186;FS=8.29;HaplotypeScore=7.88;MLEAC=4;MLEAF=0.667;MQ=56.58;MQ0=0;MQRankSum=-1.740e-01;QD=41.49;ReadPosRankSum=-4.440e-01;SB=-3.591e+03;VQSLOD=3.52;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:45,35:80:99:2213,0,2213:127	1|1:3,59:62:99:4128,181,0:127	0|1:22,22:44:99:1376,0,1181:127
+4	103859292	rs7676943	C	G	7573.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=235;Dels=0.00;FS=0.00;HaplotypeScore=0.00;MLEAC=6;MLEAF=1.00;MQ=58.63;MQ0=0;QD=32.23;SB=-3.953e+03;VQSLOD=10.39;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:1,95:96:99:3289,256,0:127	1|1:0,73:73:99:2137,166,0:127	1|1:0,66:66:99:2147,166,0:127
+4	114869733	rs11381039	A	AT	2798.22	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=0.957;DB;DP=267;FS=4.37;HaplotypeScore=26.36;MLEAC=3;MLEAF=0.500;MQ=59.97;MQ0=0;MQRankSum=0.673;QD=10.48;RPA=8,9;RU=T;ReadPosRankSum=-1.351e+00;SB=-1.173e+03;STR;VQSLOD=5.55;culprit=QD	GT:AD:DP:GQ:PL:TP	0/1:47,41:89:99:994,0,1219:127	0/1:50,41:92:99:1063,0,1043:127	0/1:50,34:86:99:789,0,1153:127
 4	122900130	rs4833786	C	T	2361.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-3.900e-01;DB;DP=244;Dels=0.00;FS=0.495;HaplotypeScore=0.543;MLEAC=2;MLEAF=0.333;MQ=59.91;MQ0=0;MQRankSum=-6.280e-01;QD=15.23;ReadPosRankSum=1.22;SB=-1.282e+03;VQSLOD=8.81;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|0:89,0:89:99:0,250,3286:127	0|1:42,44:86:99:1257,0,1325:127	0|1:32,37:69:99:1143,0,941:127
+4	132671861	rs6856716	C	G	2622.26	VQSRTrancheSNP99.90to100.00	AC=2;AF=0.333;AN=6;BaseQRankSum=3.14;DB;DP=617;DS;Dels=0.00;FS=5.81;HaplotypeScore=34.41;MLEAC=2;MLEAF=0.333;MQ=24.78;MQ0=42;MQRankSum=3.29;QD=5.89;ReadPosRankSum=-7.360e-01;SB=-1.412e+03;VQSLOD=-7.735e+01;culprit=DP	GT:AD:DP:GQ:PL:TP	0|1:125,98:223:99:1761,0,1486:127	0|0:171,0:172:99:0,300,2641:127	1|0:152,70:222:99:900,0,2086:127
+4	141514835	rs55984232	C	T	1645.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=7.14;DB;DP=191;Dels=0.00;FS=2.76;HaplotypeScore=1.07;MLEAC=2;MLEAF=0.333;MQ=59.26;MQ0=0;MQRankSum=0.726;QD=13.06;ReadPosRankSum=-7.890e-01;SB=-8.550e+02;VQSLOD=8.94;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:32,28:60:99:913,0,820:127	0|0:65,0:65:99:0,156,1886:127	1|0:34,32:66:99:771,0,750:127
+4	152029268	.	CT	C	358.22	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=2.69;DP=199;FS=13.72;HaplotypeScore=32.57;MLEAC=2;MLEAF=0.333;MQ=55.17;MQ0=0;MQRankSum=-9.150e-01;QD=2.42;RPA=17,16;RU=T;ReadPosRankSum=0.548;SB=-1.419e+02;STR;VQSLOD=0.393;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:50,15:75:99:301,0,225:38	1|0:56,11:72:99:105,0,209:38	0|0:46,2:51:38:0,38,284:38
 4	161180100	rs167176	G	C	2497.26	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=6.81;DB;DP=191;Dels=0.00;FS=5.37;HaplotypeScore=0.781;MLEAC=3;MLEAF=0.500;MQ=59.78;MQ0=0;MQRankSum=-5.840e-01;QD=13.07;ReadPosRankSum=0.756;SB=-1.206e+03;VQSLOD=8.00;culprit=MQ	GT:AD:DP:GQ:PL:TP	0/1:32,36:68:99:1100,0,895:127	0/1:41,35:76:99:987,0,1028:127	0/1:29,17:47:99:449,0,820:127
+4	169085855	rs2251558	A	G	1161.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=0.721;DB;DP=244;Dels=0.00;FS=5.25;HaplotypeScore=0.489;MLEAC=1;MLEAF=0.167;MQ=59.91;MQ0=0;MQRankSum=1.57;QD=13.99;ReadPosRankSum=0.454;SB=-5.590e+02;VQSLOD=7.42;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|0:77,0:77:99:0,226,2915:127	0|1:43,40:83:99:1196,0,1294:127	0|0:84,0:84:99:0,232,2950:127
+4	177855678	rs1545090	A	G	7065.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=229;Dels=0.00;FS=0.00;HaplotypeScore=1.17;MLEAC=6;MLEAF=1.00;MQ=59.50;MQ0=0;QD=30.85;SB=-3.799e+03;VQSLOD=11.01;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,84:84:99:2701,235,0:127	1|1:0,79:80:99:2401,211,0:127	1|1:0,65:65:99:1963,165,0:127
+4	184811263	rs2871379	A	G	6376.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=211;Dels=0.00;FS=0.00;HaplotypeScore=0.508;MLEAC=6;MLEAF=1.00;MQ=58.38;MQ0=0;QD=30.22;SB=-3.472e+03;VQSLOD=10.49;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,78:78:99:2539,199,0:127	1|1:0,67:67:99:2072,165,0:127	1|1:0,66:66:99:1765,135,0:127
+4	190538070	rs62323772	G	A	106.69	VQSRTrancheSNP99.00to99.90	AC=3;AF=0.500;AN=6;BaseQRankSum=0.099;DB;DP=19;Dels=0.00;FS=0.00;HaplotypeScore=4.36;MLEAC=3;MLEAF=0.500;MQ=33.71;MQ0=0;MQRankSum=0.591;QD=5.93;ReadPosRankSum=-3.940e-01;SB=-7.601e+01;VQSLOD=-1.392e+00;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|1:4,9:13:69:69,0,92:3	0|0:1,0:1:3:0,3,33:3	1|1:0,4:5:6:77,6,0:3
+5	3276081	rs4866540	G	A	3653.01	VQSRTrancheSNP99.00to99.90	AC=4;AF=0.667;AN=6;BaseQRankSum=5.00;DB;DP=176;Dels=0.00;FS=2.78;HaplotypeScore=19.00;MLEAC=4;MLEAF=0.667;MQ=57.74;MQ0=0;MQRankSum=-5.690e-01;QD=20.76;ReadPosRankSum=1.31;SB=-6.610e+02;VQSLOD=3.18;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:1,71:72:99:2337,175,0:127	1|0:26,25:51:99:719,0,601:127	1|0:26,27:53:99:597,0,662:127
+5	10958957	rs852590	T	C	8430.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=243;Dels=0.00;FS=0.00;HaplotypeScore=1.83;MLEAC=6;MLEAF=1.00;MQ=59.31;MQ0=0;QD=34.69;SB=-4.118e+03;VQSLOD=10.87;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:1,96:97:99:3594,277,0:127	1|1:1,73:74:99:2318,181,0:127	1|1:0,72:72:99:2518,196,0:127
+5	20435939	rs12697584	A	C	2286.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-5.396e+00;DB;DP=246;Dels=0.00;FS=4.68;HaplotypeScore=0.874;MLEAC=2;MLEAF=0.333;MQ=59.74;MQ0=0;MQRankSum=-8.500e-01;QD=14.20;ReadPosRankSum=0.391;SB=-1.305e+03;VQSLOD=8.13;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|0:85,0:85:99:0,241,3183:127	0|1:41,37:78:99:1062,0,1304:127	0|1:39,44:83:99:1263,0,1222:127
+5	29069162	rs658439	G	A	499.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=1.98;DB;DP=199;Dels=0.00;FS=11.20;HaplotypeScore=1.60;MLEAC=1;MLEAF=0.167;MQ=52.70;MQ0=0;MQRankSum=-6.080e-01;QD=9.42;ReadPosRankSum=-1.103e+00;SB=-2.310e+02;VQSLOD=4.02;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:86,0:86:99:0,211,2509:110	0|0:60,0:60:99:0,111,1240:110	0|1:30,23:53:99:534,0,523:110
+5	36740339	rs36676	T	G	6995.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=230;Dels=0.00;FS=0.00;HaplotypeScore=2.08;MLEAC=6;MLEAF=1.00;MQ=59.46;MQ0=0;QD=30.41;SB=-3.034e+03;VQSLOD=10.92;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,75:75:99:2261,195,0:127	1|1:0,83:83:99:2579,217,0:127	1|1:0,71:72:99:2155,181,0:127
+5	49438828	rs137880658	G	A	424.24	VQSRTrancheSNP99.90to100.00	AC=1;AF=0.167;AN=6;BaseQRankSum=-4.376e+00;DB;DP=748;DS;Dels=0.00;FS=7.18;HaplotypeScore=20.66;MLEAC=1;MLEAF=0.167;MQ=19.63;MQ0=158;MQRankSum=3.82;QD=1.70;ReadPosRankSum=1.19;SB=-2.220e+02;VQSLOD=-9.822e+01;culprit=DP	GT:AD:DP:GQ:PL:TP	0|0:249,0:249:99:0,555,4848:127	0|1:204,46:250:99:459,0,2894:127	0|0:249,0:249:99:0,480,4326:127
+5	58243302	rs457566	A	C	6237.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=-1.663e+00;DB;DP=233;Dels=0.00;FS=3.43;HaplotypeScore=0.886;MLEAC=5;MLEAF=0.833;MQ=59.70;MQ0=0;MQRankSum=-2.320e-01;QD=26.77;ReadPosRankSum=0.957;SB=-2.566e+03;VQSLOD=9.35;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|1:0,87:87:99:2923,229,0:127	1|0:36,50:86:99:1429,0,1006:127	1|1:0,60:60:99:1885,147,0:127
+5	70856663	rs157045	T	C	2040.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-7.809e+00;DB;DP=234;Dels=0.00;FS=1.82;HaplotypeScore=1.05;MLEAC=2;MLEAF=0.333;MQ=58.65;MQ0=0;MQRankSum=0.989;QD=13.16;ReadPosRankSum=0.719;SB=-8.820e+02;VQSLOD=7.35;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:44,41:85:99:1087,0,1448:127	1|0:28,42:70:99:992,0,789:127	0|0:79,0:79:99:0,181,2342:127
+5	80900719	rs2917540	G	T	4938.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=3.26;DB;DP=240;Dels=0.00;FS=0.489;HaplotypeScore=0.596;MLEAC=4;MLEAF=0.667;MQ=59.63;MQ0=0;MQRankSum=-2.115e+00;QD=20.58;ReadPosRankSum=0.288;SB=-2.548e+03;VQSLOD=8.44;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|1:52,40:92:99:1207,0,1768:127	0|1:37,33:70:99:960,0,1205:127	1|1:0,78:78:99:2771,211,0:127
+5	93119893	rs6879620	G	A	4262.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=4.93;DB;DP=239;Dels=0.00;FS=1.05;HaplotypeScore=2.16;MLEAC=4;MLEAF=0.667;MQ=59.81;MQ0=0;MQRankSum=0.891;QD=17.83;ReadPosRankSum=-2.670e+00;SB=-2.596e+03;VQSLOD=7.07;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	0|1:49,42:91:99:1098,0,1410:127	0|1:36,38:74:99:890,0,1008:127	1|1:0,74:74:99:2274,178,0:127
+5	102962771	rs6866105	A	G	5087.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=-2.665e+00;DB;DP=200;Dels=0.00;FS=8.08;HaplotypeScore=2.28;MLEAC=5;MLEAF=0.833;MQ=58.71;MQ0=0;MQRankSum=-5.030e-01;QD=25.44;ReadPosRankSum=-4.820e-01;SB=-2.172e+03;VQSLOD=7.22;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,75:75:99:2373,184,0:127	1|1:0,59:59:99:1776,141,0:127	1|0:31,35:66:99:938,0,803:127
+5	112750674	rs9716322	A	C	598.24	VQSRTrancheSNP99.00to99.90	AC=1;AF=0.167;AN=6;BaseQRankSum=-2.930e-01;DB;DP=199;Dels=0.00;FS=2.95;HaplotypeScore=0.333;MLEAC=1;MLEAF=0.167;MQ=45.61;MQ0=2;MQRankSum=-2.398e+00;QD=12.21;ReadPosRankSum=1.16;SB=-8.101e+01;VQSLOD=2.77;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|0:81,0:81:99:0,196,2534:127	0|1:23,26:49:99:633,0,479:127	0|0:68,1:69:99:0,174,2191:127
+5	121302662	.	G	T	143.72	VQSRTrancheSNP99.90to100.00	AC=3;AF=0.500;AN=6;BaseQRankSum=1.41;DP=150;Dels=0.020;FS=161.48;HaplotypeScore=2.86;MLEAC=3;MLEAF=0.500;MQ=35.76;MQ0=1;MQRankSum=0.422;QD=0.960;ReadPosRankSum=-5.675e+00;SB=-1.466e+00;VQSLOD=-7.724e+01;culprit=FS	GT:AD:DP:GQ:PL:TP	0/1:32,21:53:99:120,0,251:5	0/1:35,12:47:57:57,0,271:5	0/1:35,12:47:4:4,0,259:5
+5	132377304	rs72801439	A	T	858.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-1.113e+00;DB;DP=219;Dels=0.00;FS=10.63;HaplotypeScore=0.664;MLEAC=1;MLEAF=0.167;MQ=59.41;MQ0=0;MQRankSum=-8.810e-01;QD=11.29;ReadPosRankSum=0.435;SB=-3.050e+02;VQSLOD=7.68;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:67,0:67:99:0,163,2149:127	0|1:43,33:76:99:893,0,1280:127	0|0:76,0:76:99:0,199,2553:127
 5	144321895	rs35068704	A	T	1535.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=6.05;DB;DP=203;Dels=0.00;FS=13.29;HaplotypeScore=0.507;MLEAC=2;MLEAF=0.333;MQ=57.36;MQ0=0;MQRankSum=2.19;QD=11.37;ReadPosRankSum=-1.361e+00;SB=-7.640e+02;VQSLOD=5.72;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:34,25:59:99:595,0,976:127	0|0:68,0:68:99:0,190,2435:127	1|0:39,37:76:99:979,0,1274:127
+5	154441640	rs467330	A	C	1133.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-1.127e+00;DB;DP=235;Dels=0.00;FS=2.10;HaplotypeScore=1.19;MLEAC=1;MLEAF=0.167;MQ=59.65;MQ0=0;MQRankSum=0.766;QD=12.59;ReadPosRankSum=-5.350e-01;SB=-6.500e+02;VQSLOD=9.83;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|0:61,0:61:99:0,172,2172:127	0|1:45,45:90:99:1168,0,1368:127	0|0:82,1:84:99:0,226,2825:127
+5	164600452	rs7711548	C	A	588.32	VQSRTrancheSNP99.00to99.90	AC=6;AF=1.00;AN=6;BaseQRankSum=-3.291e+00;DB;DP=192;Dels=0.020;FS=2.32;HaplotypeScore=32.67;MLEAC=6;MLEAF=1.00;MQ=57.55;MQ0=0;MQRankSum=1.54;QD=3.06;ReadPosRankSum=-1.533e+00;SB=-2.085e+03;VQSLOD=-1.433e+00;culprit=QD	GT:AD:DP:GQ:PL:TP	1|1:15,52:69:18:155,18,0:15	1|1:11,55:66:36:296,36,0:15	1|1:18,36:54:15:175,15,0:15
+5	174249229	rs11739079	C	G	1613.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-4.540e+00;DB;DP=190;Dels=0.00;FS=6.48;HaplotypeScore=2.04;MLEAC=2;MLEAF=0.333;MQ=59.51;MQ0=0;MQRankSum=-8.980e-01;QD=11.86;ReadPosRankSum=0.601;SB=-8.320e+02;VQSLOD=7.78;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:32,43:75:99:1069,0,928:123	0|0:54,0:54:99:0,123,1554:123	1|0:33,27:61:99:583,0,883:123
+6	1620147	rs2569882	T	C	252.30	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-8.620e-01;DB;DP=38;Dels=0.00;FS=8.02;HaplotypeScore=0.222;MLEAC=5;MLEAF=0.833;MQ=57.65;MQ0=0;MQRankSum=-9.180e-01;QD=9.34;ReadPosRankSum=0.584;SB=-4.005e+01;VQSLOD=4.33;culprit=DP	GT:AD:DP:GQ:PL:TP	1|1:0,17:17:21:254,21,0:4	1|0:3,8:11:2:0,3,40:4	1|1:0,10:10:3:39,3,0:4
 6	9408051	rs11751621	C	G	5140.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-2.350e+00;DB;DP=241;Dels=0.00;FS=0.00;HaplotypeScore=0.664;MLEAC=4;MLEAF=0.667;MQ=59.81;MQ0=0;MQRankSum=-2.050e-01;QD=21.33;ReadPosRankSum=-3.300e-02;SB=-2.552e+03;VQSLOD=8.99;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:42,40:82:99:1156,0,1352:127	0|1:36,45:81:99:1253,0,1134:127	1|1:0,78:78:99:2731,214,0:127
+6	18687376	rs9383429	A	T	1162.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-2.746e+00;DB;DP=232;Dels=0.00;FS=0.638;HaplotypeScore=0.277;MLEAC=1;MLEAF=0.167;MQ=59.42;MQ0=0;MQRankSum=-7.330e-01;QD=15.71;ReadPosRankSum=-1.460e-01;SB=-6.490e+02;VQSLOD=9.43;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|0:82,0:82:99:0,241,3181:127	0|0:76,0:76:99:0,217,2832:127	0|1:33,41:74:99:1197,0,1008:127
 6	26687649	.	G	C	1654.26	VQSRTrancheSNP99.00to99.90	AC=2;AF=0.333;AN=6;BaseQRankSum=9.72;DP=410;Dels=0.00;FS=5.97;HaplotypeScore=5.08;MLEAC=2;MLEAF=0.333;MQ=28.19;MQ0=32;MQRankSum=0.048;QD=6.17;ReadPosRankSum=0.408;SB=-5.110e+02;VQSLOD=-8.735e+00;culprit=DP	GT:AD:DP:GQ:PL:TP	0|1:117,49:166:99:1013,0,2495:127	0|0:141,1:142:99:0,244,2576:127	1|0:70,32:102:99:680,0,1048:127
+6	31390139	rs72548006	T	TA	2159.22	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=0.491;DB;DP=257;FS=2.38;HaplotypeScore=26.89;MLEAC=2;MLEAF=0.333;MQ=59.99;MQ0=0;MQRankSum=1.59;QD=12.85;RPA=5,6;RU=A;ReadPosRankSum=1.85;SB=-9.710e+02;STR;VQSLOD=5.53;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|1:51,37:89:99:1076,0,1364:127	0|0:89,0:89:99:0,256,3112:127	1|0:41,38:79:99:1131,0,1103:127
+6	35346891	.	C	T	483.26	VQSRTrancheSNP99.90to100.00	AC=3;AF=0.500;AN=6;BaseQRankSum=-6.402e+00;DP=205;Dels=0.020;FS=214.15;HaplotypeScore=4.51;MLEAC=3;MLEAF=0.500;MQ=51.07;MQ0=1;MQRankSum=-2.038e+00;QD=2.36;ReadPosRankSum=-2.250e+00;SB=-1.065e-02;VQSLOD=-1.170e+02;culprit=FS	GT:AD:DP:GQ:PL:TP	0/1:48,31:79:99:257,0,635:80	0/1:40,22:62:99:185,0,308:80	0/1:44,16:60:80:80,0,534:80
+6	45421630	rs1343799	C	T	5422.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=3.42;DB;DP=240;Dels=0.00;FS=10.69;HaplotypeScore=0.798;MLEAC=4;MLEAF=0.667;MQ=59.46;MQ0=0;MQRankSum=1.09;QD=22.59;ReadPosRankSum=1.25;SB=-2.647e+03;VQSLOD=7.70;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:29,43:72:99:1445,0,843:127	0|1:43,37:80:99:1121,0,1302:127	1|1:0,88:88:99:2856,220,0:127
+6	55412503	rs12201934	T	C	882.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-1.280e+00;DB;DP=182;Dels=0.00;FS=0.744;HaplotypeScore=0.333;MLEAC=1;MLEAF=0.167;MQ=59.64;MQ0=0;MQRankSum=-7.650e-01;QD=13.57;ReadPosRankSum=0.147;SB=-4.590e+02;VQSLOD=9.38;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|0:61,0:61:99:0,181,2371:127	0|1:35,30:65:99:917,0,1046:127	0|0:56,0:56:99:0,150,1969:127
 6	63303894	rs34270846	T	TC	7366	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=1.31;DB;DP=202;FS=2.81;HaplotypeScore=88.29;MLEAC=5;MLEAF=0.833;MQ=61.10;MQ0=0;MQRankSum=-3.900e-01;QD=36.47;ReadPosRankSum=-3.150e-01;SB=-2.762e+03;VQSLOD=3.86;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,89:89:99:3885,265,0:127	1|0:29,25:54:99:999,0,1050:127	1|1:0,57:59:99:2482,169,0:127
+6	71018445	rs1321063	C	A	518.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=2.67;DB;DP=185;Dels=0.00;FS=1.62;HaplotypeScore=0.692;MLEAC=1;MLEAF=0.167;MQ=58.93;MQ0=0;MQRankSum=0.355;QD=8.78;ReadPosRankSum=1.31;SB=-4.020e+02;VQSLOD=7.75;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:67,0:67:99:0,184,2404:127	0|0:59,0:59:99:0,160,2065:127	0|1:29,30:59:99:553,0,809:127
+6	79887872	rs6454101	C	T	2130.26	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-3.355e+00;DB;DP=204;Dels=0.00;FS=11.82;HaplotypeScore=2.38;MLEAC=3;MLEAF=0.500;MQ=59.68;MQ0=0;MQRankSum=1.01;QD=10.44;ReadPosRankSum=-7.690e-01;SB=-1.370e+03;VQSLOD=7.52;culprit=MQ	GT:AD:DP:GQ:PL:TP	0/1:31,31:62:99:806,0,990:127	0/1:42,31:73:99:606,0,921:127	0/1:29,40:69:99:757,0,749:127
+6	89947117	rs9942462	A	G	5019.01	PASS	AC=6;AF=1.00;AN=6;BaseQRankSum=1.65;DB;DP=181;Dels=0.00;FS=0.00;HaplotypeScore=3.32;MLEAC=6;MLEAF=1.00;MQ=53.30;MQ0=0;MQRankSum=0.428;QD=27.73;ReadPosRankSum=1.09;SB=-3.151e+03;VQSLOD=5.68;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:0,59:60:99:1715,138,0:92	1|1:1,55:56:93:1401,93,0:92	1|1:0,65:65:99:1903,153,0:92
+6	100462085	rs113348222	G	GT	5284	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=-4.600e-02;DB;DP=285;FS=7.48;HaplotypeScore=252.09;MLEAC=5;MLEAF=0.833;MQ=60.07;MQ0=0;MQRankSum=1.24;QD=18.54;RPA=10,11;RU=T;ReadPosRankSum=-2.539e+00;SB=-2.505e+03;STR;VQSLOD=2.21;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	1|1:8,79:94:99:2218,200,0:127	1|0:54,37:97:99:818,0,914:127	1|1:10,83:94:99:2248,221,0:127
+6	110345430	rs56272355	T	C	882.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-7.241e+00;DB;DP=230;Dels=0.00;FS=2.16;HaplotypeScore=1.76;MLEAC=1;MLEAF=0.167;MQ=59.70;MQ0=0;MQRankSum=-2.000e-01;QD=11.46;ReadPosRankSum=-4.800e-01;SB=-5.580e+02;VQSLOD=8.98;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|0:77,0:77:99:0,220,2860:127	0|0:75,0:76:99:0,184,2321:127	0|1:35,42:77:99:917,0,926:127
+6	120734422	rs62424426	T	C	1091.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-3.268e+00;DB;DP=208;Dels=0.00;FS=4.35;HaplotypeScore=0.703;MLEAC=1;MLEAF=0.167;MQ=59.71;MQ0=0;MQRankSum=-1.880e-01;QD=14.36;ReadPosRankSum=-1.490e-01;SB=-5.560e+02;VQSLOD=8.65;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|0:70,1:71:99:0,205,2692:127	0|0:61,0:61:99:0,175,2298:127	0|1:39,37:76:99:1126,0,1140:127
+6	131725921	.	G	A	64.24	VQSRTrancheSNP99.00to99.90	AC=1;AF=0.167;AN=6;BaseQRankSum=3.65;DP=182;Dels=0.00;FS=3.54;HaplotypeScore=2.24;MLEAC=1;MLEAF=0.167;MQ=50.85;MQ0=1;MQRankSum=-6.039e+00;QD=1.31;ReadPosRankSum=-1.757e+00;SB=-4.901e+01;VQSLOD=-6.667e-01;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:59,0:60:99:0,168,1897:98	0|1:33,16:49:99:99,0,790:98	0|0:73,0:73:99:0,156,1824:98
 6	143691852	rs17792267	C	T	3641.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-1.465e+00;DB;DP=203;Dels=0.00;FS=2.41;HaplotypeScore=0.322;MLEAC=3;MLEAF=0.500;MQ=59.54;MQ0=0;MQRankSum=1.20;QD=27.17;ReadPosRankSum=-1.140e+00;SB=-1.772e+03;VQSLOD=8.87;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:29,33:62:99:1131,0,803:127	1|1:0,72:72:99:2510,196,0:127	0|0:69,0:69:99:0,190,2480:127
+6	154006585	rs7752561	G	A	919.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-2.323e+00;DB;DP=223;Dels=0.00;FS=0.00;HaplotypeScore=0.825;MLEAC=1;MLEAF=0.167;MQ=59.35;MQ0=0;MQRankSum=-7.490e-01;QD=10.45;ReadPosRankSum=-1.550e-01;SB=-5.260e+02;VQSLOD=8.84;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:70,0:70:99:0,202,2601:127	0|1:51,37:88:99:954,0,1475:127	0|0:65,0:65:99:0,175,2269:127
+6	162459664	rs2849564	C	T	1819.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=8.40;DB;DP=201;Dels=0.00;FS=3.30;HaplotypeScore=2.56;MLEAC=2;MLEAF=0.333;MQ=59.38;MQ0=0;MQRankSum=1.79;QD=12.90;ReadPosRankSum=-1.045e+00;SB=-1.068e+03;VQSLOD=8.12;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:28,40:68:99:1007,0,793:120	1|0:38,35:73:99:851,0,831:120	0|0:60,0:60:99:0,120,1421:120
+6	169196022	.	T	TG	229.23	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=0.887;DP=84;FS=7.13;HaplotypeScore=6.43;MLEAC=2;MLEAF=0.333;MQ=39.46;MQ0=16;MQRankSum=2.28;QD=3.82;RPA=6,7;RU=G;ReadPosRankSum=-9.830e-01;SB=-2.849e+01;STR;VQSLOD=1.35;culprit=QD	GT:AD:DP:GQ:PL:TP	1|0:19,10:15:78:78,0,88:27	1|0:17,13:21:99:199,0,172:27	0|0:24,0:15:27:0,27,308:27
+7	4683258	rs62450857	G	A	1127.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-9.910e-01;DB;DP=191;Dels=0.00;FS=3.24;HaplotypeScore=0.277;MLEAC=1;MLEAF=0.167;MQ=57.16;MQ0=0;MQRankSum=1.30;QD=14.09;ReadPosRankSum=0.469;SB=-4.730e+02;VQSLOD=6.62;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|0:66,0:66:99:0,196,2495:120	0|1:38,42:80:99:1162,0,1016:120	0|0:45,0:45:99:0,120,1531:120
+7	12502849	rs848025	C	G	4109.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-8.809e+00;DB;DP=220;Dels=0.00;FS=1.14;HaplotypeScore=1.80;MLEAC=4;MLEAF=0.667;MQ=59.56;MQ0=0;MQRankSum=0.625;QD=18.68;ReadPosRankSum=-1.900e-02;SB=-1.989e+03;VQSLOD=8.71;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:47,45:92:99:1216,0,1501:127	1|1:0,71:71:99:2145,184,0:127	0|1:27,30:57:99:748,0,743:127
+7	20767781	rs150977587	TA	T	2754.21	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=6.30;DB;DP=259;FS=0.00;HaplotypeScore=122.50;MLEAC=4;MLEAF=0.667;MQ=59.41;MQ0=1;MQRankSum=-6.400e-01;QD=10.63;RPA=13,12;RU=A;ReadPosRankSum=0.189;SB=-1.344e+03;STR;VQSLOD=3.30;culprit=QD	GT:AD:DP:GQ:PL:TP	1|0:59,45:107:99:923,0,795:116	1|1:15,54:72:99:1313,116,0:116	0|1:44,31:79:99:569,0,617:116
+7	29544610	rs6462145	A	T	4020.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=0.326;DB;DP=189;Dels=0.00;FS=5.39;HaplotypeScore=1.30;MLEAC=4;MLEAF=0.667;MQ=58.54;MQ0=0;MQRankSum=1.29;QD=21.27;ReadPosRankSum=-1.212e+00;SB=-1.878e+03;VQSLOD=7.32;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:27,49:77:99:1466,0,733:127	1|1:0,64:64:99:1888,147,0:127	0|1:18,27:48:99:666,0,410:127
+7	38984187	rs118081099	A	C	569.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-1.100e-01;DB;DP=182;Dels=0.00;FS=6.20;HaplotypeScore=0.553;MLEAC=1;MLEAF=0.167;MQ=54.22;MQ0=0;MQRankSum=1.67;QD=9.33;ReadPosRankSum=1.16;SB=-1.260e+02;VQSLOD=4.89;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:72,0:72:99:0,196,2446:127	0|1:38,23:61:99:604,0,1196:127	0|0:48,1:49:99:0,138,1711:127
 7	48862174	rs965346	A	G	877.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-6.250e+00;DB;DP=194;Dels=0.00;FS=6.11;HaplotypeScore=0.649;MLEAC=1;MLEAF=0.167;MQ=57.54;MQ0=0;MQRankSum=2.06;QD=16.25;ReadPosRankSum=-1.204e+00;SB=-4.040e+02;VQSLOD=6.24;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:82,0:82:99:0,232,3007:127	0|0:58,0:58:99:0,160,2067:127	0|1:22,32:54:99:912,0,686:127
+7	56584972	rs34475770	C	T	2465.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-1.030e-01;DB;DP=255;Dels=0.00;FS=2.23;HaplotypeScore=0.884;MLEAC=2;MLEAF=0.333;MQ=58.76;MQ0=0;MQRankSum=1.77;QD=15.03;ReadPosRankSum=1.13;SB=-1.412e+03;VQSLOD=6.57;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:91,0:91:99:0,259,3369:127	0|1:36,49:85:99:1444,0,1000:127	0|1:38,41:79:99:1060,0,1084:127
+7	62232046	.	C	T	1155.24	VQSRTrancheSNP99.00to99.90	AC=1;AF=0.167;AN=6;BaseQRankSum=-2.643e+00;DP=230;Dels=0.00;FS=6.30;HaplotypeScore=12.77;MLEAC=1;MLEAF=0.167;MQ=58.61;MQ0=3;MQRankSum=-6.970e-01;QD=15.20;ReadPosRankSum=-1.202e+00;SB=-4.440e+02;VQSLOD=2.57;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|0:77,1:78:99:0,217,2824:127	0|0:76,0:76:99:0,202,2604:127	0|1:34,42:76:99:1190,0,994:127
+7	69334430	rs917719	C	T	1223.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=5.84;DB;DP=193;Dels=0.00;FS=6.66;HaplotypeScore=0.815;MLEAC=2;MLEAF=0.333;MQ=59.81;MQ0=0;MQRankSum=1.63;QD=9.63;ReadPosRankSum=1.46;SB=-7.370e+02;VQSLOD=7.00;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|0:38,39:77:99:770,0,1258:127	1|0:26,24:50:99:492,0,840:127	0|0:66,0:66:99:0,172,2210:127
+7	79508376	rs2525826	G	T	716.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=2.15;DB;DP=196;Dels=0.00;FS=0.00;HaplotypeScore=0.866;MLEAC=1;MLEAF=0.167;MQ=59.48;MQ0=0;MQRankSum=1.42;QD=13.51;ReadPosRankSum=0.263;SB=-4.080e+02;VQSLOD=9.57;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:65,0:65:99:0,181,2312:127	0|0:78,0:78:99:0,226,2916:127	0|1:27,26:53:99:751,0,845:127
+7	89752110	rs39232	T	G	3944.01	VQSRTrancheSNP99.00to99.90	AC=6;AF=1.00;AN=6;DB;DP=216;Dels=0.00;FS=0.00;HaplotypeScore=1.45;MLEAC=6;MLEAF=1.00;MQ=38.19;MQ0=21;QD=18.26;SB=-2.716e+03;VQSLOD=2.66;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|1:0,70:70:99:1564,132,0:101	1|1:0,79:79:99:1258,108,0:101	1|1:0,66:66:99:1122,102,0:101
+7	101161506	rs2158739	C	T	4522.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=1.70;DB;DP=172;Dels=0.00;FS=4.28;HaplotypeScore=1.51;MLEAC=5;MLEAF=0.833;MQ=58.18;MQ0=0;MQRankSum=-2.950e-01;QD=26.29;ReadPosRankSum=0.974;SB=-2.287e+03;VQSLOD=8.28;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,72:73:99:2443,202,0:110	1|0:23,26:50:99:711,0,586:110	1|1:0,48:49:99:1368,111,0:110
+7	110843795	rs214455	A	G	8548.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=258;Dels=0.00;FS=0.00;HaplotypeScore=2.36;MLEAC=6;MLEAF=1.00;MQ=59.35;MQ0=0;QD=33.13;SB=-4.197e+03;VQSLOD=10.76;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,106:106:99:3535,301,0:127	1|1:0,66:66:99:2270,193,0:127	1|1:0,86:86:99:2743,229,0:127
 7	124589684	rs12113765	G	C	4948.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-2.607e+00;DB;DP=243;Dels=0.00;FS=1.04;HaplotypeScore=3.38;MLEAC=4;MLEAF=0.667;MQ=58.74;MQ0=0;MQRankSum=1.25;QD=20.36;ReadPosRankSum=-3.870e-01;SB=-2.411e+03;VQSLOD=7.05;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:44,27:71:99:796,0,1350:127	1|1:0,84:85:99:2760,217,0:127	0|1:41,46:87:99:1392,0,1180:127
+7	134269995	rs67235184	G	GA	2917.20	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=0.835;DB;DP=219;FS=1.09;HaplotypeScore=39.14;MLEAC=3;MLEAF=0.500;MQ=59.44;MQ0=0;MQRankSum=-3.700e-02;QD=19.19;RPA=7,8;RU=A;ReadPosRankSum=-9.210e-01;SB=-1.316e+03;STR;VQSLOD=5.11;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:51,33:84:99:916,0,1151:127	1|1:2,65:68:99:2055,192,0:127	0|0:65,0:67:99:0,186,2051:127
+7	144383888	rs73161092	T	C	689.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-7.434e+00;DB;DP=254;Dels=0.00;FS=8.63;HaplotypeScore=0.00;MLEAC=1;MLEAF=0.167;MQ=59.54;MQ0=0;MQRankSum=0.163;QD=8.62;ReadPosRankSum=-1.802e+00;SB=-3.560e+02;VQSLOD=6.45;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|0:90,0:90:99:0,268,3553:127	0|1:47,33:80:99:724,0,1392:127	0|0:84,0:84:99:0,202,2644:127
+7	152208470	rs6464236	G	C	4321.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=5.23;DB;DP=169;Dels=0.00;FS=1.63;HaplotypeScore=1.36;MLEAC=5;MLEAF=0.833;MQ=57.18;MQ0=1;MQRankSum=1.91;QD=25.57;ReadPosRankSum=0.892;SB=-2.137e+03;VQSLOD=6.76;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|1:1,56:57:99:1949,150,0:122	1|1:0,54:54:99:1557,123,0:122	1|0:30,28:58:99:815,0,715:122
+7	158686803	rs55765467	T	C	200.26	VQSRTrancheSNP99.00to99.90	AC=3;AF=0.500;AN=6;BaseQRankSum=-1.011e+01;DB;DP=310;Dels=0.00;FS=2.60;HaplotypeScore=1.28;MLEAC=3;MLEAF=0.500;MQ=47.32;MQ0=0;MQRankSum=-4.959e+00;QD=0.650;ReadPosRankSum=-1.096e+01;SB=-1.206e+03;VQSLOD=-1.764e+01;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	0/1:64,33:97:99:148,0,1949:32	0/1:73,45:118:58:58,0,2287:32	0/1:65,30:95:33:33,0,1910:32
+8	4048009	rs7007410	C	T	2713.26	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=0.954;DB;DP=197;Dels=0.00;FS=2.48;HaplotypeScore=0.712;MLEAC=3;MLEAF=0.500;MQ=57.03;MQ0=0;MQRankSum=0.882;QD=13.77;ReadPosRankSum=-4.960e-01;SB=-1.406e+03;VQSLOD=6.50;culprit=FS	GT:AD:DP:GQ:PL:TP	0/1:38,30:68:99:847,0,1168:127	0/1:30,30:60:99:916,0,921:127	0/1:32,36:69:99:989,0,882:127
+8	8974322	rs28440961	G	A	1682.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=1.72;DB;DP=190;Dels=0.00;FS=0.579;HaplotypeScore=1.54;MLEAC=2;MLEAF=0.333;MQ=59.33;MQ0=0;MQRankSum=-6.520e-01;QD=12.65;ReadPosRankSum=0.746;SB=-7.040e+02;VQSLOD=9.37;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	1|0:32,32:64:99:790,0,1002:125	1|0:33,36:69:99:931,0,866:125	0|0:57,0:57:99:0,126,1622:125
+8	14463368	rs1355302	T	A	2523.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=6.79;DB;DP=236;Dels=0.00;FS=3.78;HaplotypeScore=0.324;MLEAC=2;MLEAF=0.333;MQ=59.14;MQ0=0;MQRankSum=-3.810e-01;QD=15.97;ReadPosRankSum=-4.500e-02;SB=-1.421e+03;VQSLOD=7.69;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|1:36,43:79:99:1332,0,1176:127	0|0:78,0:78:99:0,214,2749:127	1|0:35,44:79:99:1230,0,1162:127
+8	21736923	rs13256921	G	A	217.24	VQSRTrancheSNP99.00to99.90	AC=1;AF=0.167;AN=6;BaseQRankSum=1.88;DB;DP=93;Dels=0.00;FS=5.53;HaplotypeScore=6.50;MLEAC=1;MLEAF=0.167;MQ=42.13;MQ0=3;MQRankSum=0.517;QD=6.79;ReadPosRankSum=1.71;SB=-1.980e+02;VQSLOD=-2.643e+00;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|0:34,0:34:66:0,66,691:41	0|1:15,17:32:99:252,0,211:41	0|0:27,0:27:42:0,42,425:41
+8	30862954	rs149290124	C	CA	3883	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=-1.321e+00;DB;DP=261;FS=0.755;HaplotypeScore=37.27;MLEAC=5;MLEAF=0.833;MQ=58.08;MQ0=0;MQRankSum=0.330;QD=14.88;RPA=11,12;RU=A;ReadPosRankSum=-5.780e-01;SB=-1.926e+03;STR;VQSLOD=5.37;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:21,76:105:99:1980,179,0:91	1|0:51,22:75:99:597,0,485:91	1|1:24,54:81:92:1306,92,0:91
+8	42044954	rs1058720	G	A	2760.25	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=4.31;DB;DP=157;Dels=0.00;FS=0.00;HaplotypeScore=2.10;MLEAC=4;MLEAF=0.667;MQ=58.82;MQ0=0;MQRankSum=-2.021e+00;QD=17.58;ReadPosRankSum=-4.620e-01;SB=-1.377e+03;VQSLOD=7.58;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,56:56:99:1820,147,0:127	1|0:24,27:51:99:555,0,233:127	1|0:26,24:50:99:427,0,458:127
+8	56608897	rs57623198	C	T	1911.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-1.515e+00;DB;DP=229;Dels=0.00;FS=0.00;HaplotypeScore=1.13;MLEAC=2;MLEAF=0.333;MQ=59.80;MQ0=0;MQRankSum=0.336;QD=12.66;ReadPosRankSum=-2.326e+00;SB=-9.730e+02;VQSLOD=8.56;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:78,0:78:99:0,229,2989:127	0|1:44,30:74:99:897,0,1444:127	0|1:44,33:77:99:1053,0,1400:127
+8	66327344	rs13282622	G	A	3918.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=4.51;DB;DP=192;Dels=0.00;FS=3.53;HaplotypeScore=1.65;MLEAC=4;MLEAF=0.667;MQ=58.91;MQ0=0;MQRankSum=0.706;QD=20.41;ReadPosRankSum=-3.270e-01;SB=-2.091e+03;VQSLOD=8.63;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:34,37:71:99:1137,0,923:127	0|1:32,28:61:99:806,0,988:127	1|1:0,60:60:99:1975,153,0:127
+8	78181000	rs1563030	A	G	5178.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=-3.000e-02;DB;DP=192;Dels=0.00;FS=4.68;HaplotypeScore=2.95;MLEAC=5;MLEAF=0.833;MQ=59.62;MQ0=0;MQRankSum=-4.670e-01;QD=26.97;ReadPosRankSum=-8.550e-01;SB=-2.617e+03;VQSLOD=7.34;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:0,59:59:99:1954,150,0:127	1|0:32,37:70:99:1198,0,888:127	1|1:0,63:63:99:2026,156,0:127
+8	90278211	rs11998540	A	T	357.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-1.457e+00;DB;DP=69;Dels=0.00;FS=1.29;HaplotypeScore=0.00;MLEAC=2;MLEAF=0.333;MQ=58.96;MQ0=0;MQRankSum=1.41;QD=8.12;ReadPosRankSum=1.26;SB=-9.501e+01;VQSLOD=5.96;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|1:12,7:19:99:185,0,416:60	0|0:25,0:25:60:0,60,805:60	1|0:16,9:25:99:211,0,500:60
+8	102355800	rs10103956	G	T	1756.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=0.927;DB;DP=189;Dels=0.00;FS=5.92;HaplotypeScore=1.54;MLEAC=2;MLEAF=0.333;MQ=59.66;MQ0=0;MQRankSum=-4.200e-01;QD=13.41;ReadPosRankSum=-1.900e-02;SB=-5.470e+02;VQSLOD=8.16;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|0:32,38:70:99:1078,0,999:127	1|0:35,26:61:99:717,0,1103:127	0|0:57,0:58:99:0,156,2011:127
 8	113376378	rs41340951	T	C	4379.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-8.019e+00;DB;DP=211;Dels=0.00;FS=1.83;HaplotypeScore=0.263;MLEAC=4;MLEAF=0.667;MQ=59.85;MQ0=0;MQRankSum=1.27;QD=20.75;ReadPosRankSum=0.144;SB=-1.893e+03;VQSLOD=8.84;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:0,67:67:99:2221,187,0:127	1|0:32,37:69:99:1083,0,1066:127	1|0:35,40:75:99:1075,0,1108:127
+8	123499998	rs9693135	T	C	926.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-7.820e+00;DB;DP=207;Dels=0.00;FS=2.68;HaplotypeScore=1.26;MLEAC=2;MLEAF=0.333;MQ=56.08;MQ0=0;MQRankSum=-2.729e+00;QD=6.34;ReadPosRankSum=-2.690e-01;SB=-8.230e+02;VQSLOD=4.30;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:61,0:61:99:0,172,2301:127	0|1:37,35:72:99:622,0,769:127	0|1:48,26:74:99:343,0,1280:127
 8	132727531	rs7000409	G	A	2050.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=5.47;DB;DP=220;Dels=0.00;FS=5.20;HaplotypeScore=1.94;MLEAC=2;MLEAF=0.333;MQ=59.71;MQ0=0;MQRankSum=1.23;QD=14.14;ReadPosRankSum=-8.710e-01;SB=-1.080e+03;VQSLOD=7.79;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|1:29,35:64:99:1215,0,785:127	0|0:75,0:75:99:0,190,2423:127	1|0:50,31:81:99:874,0,1611:127
 8	140651779	rs1469039	G	A	1879.26	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-2.189e+00;DB;DP=145;Dels=0.00;FS=0.00;HaplotypeScore=1.77;MLEAC=3;MLEAF=0.500;MQ=58.39;MQ0=0;MQRankSum=0.745;QD=12.96;ReadPosRankSum=-1.590e-01;SB=-9.360e+02;VQSLOD=9.01;culprit=FS	GT:AD:DP:GQ:PL:TP	0/1:23,24:47:99:612,0,740:127	0/1:21,31:52:99:684,0,478:127	0/1:20,26:46:99:622,0,367:127
 9	2072356	rs4741641	T	G	2054.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-7.235e+00;DB;DP=243;Dels=0.00;FS=0.00;HaplotypeScore=2.21;MLEAC=2;MLEAF=0.333;MQ=59.64;MQ0=0;MQRankSum=1.17;QD=13.00;ReadPosRankSum=-4.750e-01;SB=-1.004e+03;VQSLOD=9.02;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:49,45:94:99:1245,0,1615:127	0|0:85,0:85:99:0,217,2818:127	1|0:32,32:64:99:848,0,936:127
+9	9066127	rs57751104	ATATT	A	4412	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-8.800e-02;DB;DP=214;FS=8.47;HaplotypeScore=16.59;MLEAC=2;MLEAF=0.333;MQ=58.75;MQ0=0;MQRankSum=0.462;QD=29.61;RPA=2,1;RU=TATT;ReadPosRankSum=1.38;SB=-1.837e+03;STR;VQSLOD=4.42;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:41,36:79:99:2129,0,2123:127	1|0:32,38:70:99:2283,0,1679:127	0|0:65,0:65:99:0,184,4023:127
+9	16302655	rs9298755	T	G	1394.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=3.36;DB;DP=200;Dels=0.00;FS=0.00;HaplotypeScore=3.19;MLEAC=2;MLEAF=0.333;MQ=59.33;MQ0=0;MQRankSum=0.586;QD=10.03;ReadPosRankSum=-7.710e-01;SB=-8.720e+02;VQSLOD=7.77;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|0:60,1:61:99:0,165,2004:127	0|1:29,28:57:99:494,0,759:127	0|1:41,40:82:99:939,0,1069:127
+9	25201692	rs9408058	C	T	8643.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=245;Dels=0.00;FS=0.00;HaplotypeScore=0.898;MLEAC=6;MLEAF=1.00;MQ=59.13;MQ0=0;QD=35.28;SB=-4.398e+03;VQSLOD=12.17;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,89:89:99:3312,256,0:127	1|1:0,81:81:99:2771,223,0:127	1|1:0,75:75:99:2560,202,0:127
+9	33446281	.	C	CT	31.72	VQSRTrancheINDEL99.90to100.00	AC=1;AF=0.167;AN=6;BaseQRankSum=-2.119e+00;DP=178;FS=127.57;HaplotypeScore=21.55;MLEAC=1;MLEAF=0.167;MQ=58.69;MQ0=0;MQRankSum=1.61;QD=0.620;RPA=2,3;RU=T;ReadPosRankSum=-5.268e+00;SB=-4.493e+00;STR;VQSLOD=-1.057e+01;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:77,0:79:73:0,73,2094:6	0|0:46,0:48:5:0,5,958:6	0|1:35,15:51:75:75,0,1073:6
 9	44998848	.	C	G	254.43	VQSRTrancheSNP99.00to99.90	AC=5;AF=0.833;AN=6;BaseQRankSum=2.20;DP=193;Dels=0.00;FS=10.82;HaplotypeScore=0.245;MLEAC=5;MLEAF=0.833;MQ=6.48;MQ0=163;MQRankSum=-1.206e+00;QD=1.32;ReadPosRankSum=0.790;SB=-1.372e+02;VQSLOD=-6.632e+00;culprit=QD	GT:AD:DP:GQ:PL:TP	1|0:39,27:66:23:132,0,23:8	1|1:36,16:52:15:118,15,0:8	0|1:53,22:75:1:45,6,0:8
+9	69810010	rs113063015	A	C	280.26	VQSRTrancheSNP99.90to100.00	AC=3;AF=0.500;AN=6;BaseQRankSum=-1.223e+00;DB;DP=716;DS;Dels=0.00;FS=9.00;HaplotypeScore=39.83;MLEAC=3;MLEAF=0.500;MQ=37.32;MQ0=64;MQRankSum=-3.574e+00;QD=0.390;ReadPosRankSum=-8.970e-01;SB=-1.065e-02;VQSLOD=-6.817e+01;culprit=DP	GT:AD:DP:GQ:PL:TP	0/1:218,23:241:73:73,0,3591:70	0/1:188,39:227:99:172,0,4233:70	0/1:217,31:248:74:74,0,5042:70
 9	78741390	rs7049138	G	A	1797.24	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=6.24;DB;DP=168;Dels=0.00;FS=4.00;HaplotypeScore=1.09;MLEAC=3;MLEAF=0.500;MQ=55.32;MQ0=2;MQRankSum=-1.673e+00;QD=16.34;ReadPosRankSum=-6.260e-01;SB=-7.740e+02;VQSLOD=5.19;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|0:37,33:70:99:828,0,918:80	1|1:0,40:40:81:1014,81,0:80	0|0:58,0:58:99:0,129,1403:80
+9	89010476	rs164937	T	C	736.24	VQSRTrancheSNP99.00to99.90	AC=1;AF=0.167;AN=6;BaseQRankSum=-3.977e+00;DB;DP=216;Dels=0.00;FS=1.48;HaplotypeScore=1.06;MLEAC=1;MLEAF=0.167;MQ=39.79;MQ0=9;MQRankSum=-3.028e+00;QD=9.56;ReadPosRankSum=0.542;SB=-4.240e+02;VQSLOD=2.08;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|0:66,0:66:99:0,174,2097:127	0|1:40,37:77:99:771,0,876:127	0|0:73,0:73:99:0,183,2147:127
+9	98491693	rs12337935	C	A	1474.25	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=2.10;DB;DP=97;Dels=0.00;FS=14.28;HaplotypeScore=1.77;MLEAC=4;MLEAF=0.667;MQ=58.64;MQ0=0;MQRankSum=-2.240e-01;QD=15.20;ReadPosRankSum=1.17;SB=-6.430e+02;VQSLOD=5.80;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:14,12:26:99:348,0,352:63	1|1:0,28:29:63:781,63,0:63	0|1:17,25:42:99:387,0,364:63
+9	109619207	rs4743032	A	T	3810.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-2.859e+00;DB;DP=214;Dels=0.00;FS=10.28;HaplotypeScore=0.322;MLEAC=3;MLEAF=0.500;MQ=59.59;MQ0=0;MQRankSum=0.557;QD=25.07;ReadPosRankSum=-8.360e-01;SB=-2.019e+03;VQSLOD=8.14;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:37,39:76:99:1185,0,1225:127	1|1:0,76:76:99:2625,202,0:127	0|0:62,0:62:99:0,156,2049:127
 9	118670009	rs66523513	CT	C	5633	PASS	AC=6;AF=1.00;AN=6;BaseQRankSum=-6.600e-02;DB;DP=222;FS=6.40;HaplotypeScore=30.72;MLEAC=6;MLEAF=1.00;MQ=59.97;MQ0=0;MQRankSum=0.490;QD=25.37;RPA=11,10;RU=T;ReadPosRankSum=2.43;SB=-2.667e+03;STR;VQSLOD=3.39;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:6,71:78:99:2079,216,0:120	1|1:10,54:65:99:1524,120,0:120	1|1:9,69:79:99:2030,209,0:120
+9	130235282	.	A	G	166.26	VQSRTrancheSNP99.90to100.00	AC=2;AF=0.333;AN=6;BaseQRankSum=-8.543e+00;DP=232;Dels=0.00;FS=233.04;HaplotypeScore=3.00;MLEAC=2;MLEAF=0.333;MQ=59.04;MQ0=0;MQRankSum=-2.038e+00;QD=1.15;ReadPosRankSum=-7.649e+00;SB=-3.100e-03;VQSLOD=-1.398e+02;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:74,14:88:99:0,108,2160:59	0|1:42,36:79:60:60,0,966:59	0|1:47,18:65:99:145,0,922:59
+9	138617823	rs55724592	C	T	53.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-3.800e-01;DB;DP=72;Dels=0.00;FS=8.08;HaplotypeScore=1.02;MLEAC=1;MLEAF=0.167;MQ=58.90;MQ0=0;MQRankSum=0.851;QD=2.66;ReadPosRankSum=-3.440e-01;SB=-9.096e+01;VQSLOD=4.27;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:20,0:20:48:0,48,580:44	0|0:32,0:32:48:0,48,577:44	0|1:10,10:20:88:88,0,199:44
+10	4588547	rs313427	C	T	1985.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=3.69;DB;DP=204;Dels=0.00;FS=1.81;HaplotypeScore=0.614;MLEAC=2;MLEAF=0.333;MQ=59.73;MQ0=0;MQRankSum=0.241;QD=14.18;ReadPosRankSum=0.717;SB=-1.082e+03;VQSLOD=9.78;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|0:39,35:74:99:1029,0,1182:127	1|0:32,34:66:99:995,0,1024:127	0|0:64,0:64:99:0,175,2242:127
+10	11560326	rs4424580	C	T	1027.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-1.780e-01;DB;DP=207;Dels=0.00;FS=0.573;HaplotypeScore=2.20;MLEAC=2;MLEAF=0.333;MQ=59.24;MQ0=0;MQRankSum=-9.880e-01;QD=7.78;ReadPosRankSum=-4.920e-01;SB=-7.450e+02;VQSLOD=8.46;culprit=QD	GT:AD:DP:GQ:PL:TP	1|0:41,32:73:99:609,0,930:96	1|0:29,30:59:99:457,0,570:96	0|0:74,1:75:96:0,96,1185:96
+10	19567676	rs61458006	G	T	599.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-2.610e+00;DB;DP=195;Dels=0.00;FS=0.844;HaplotypeScore=0.879;MLEAC=1;MLEAF=0.167;MQ=59.57;MQ0=0;MQRankSum=-1.910e-01;QD=11.31;ReadPosRankSum=-1.635e+00;SB=-2.780e+02;VQSLOD=9.36;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:75,0:75:99:0,208,2645:127	0|1:30,23:53:99:634,0,915:127	0|0:67,0:67:99:0,199,2540:127
+10	29082980	.	G	C	42.53	VQSRTrancheSNP99.00to99.90	AC=3;AF=0.500;AN=6;BaseQRankSum=-1.216e+00;DP=107;Dels=0.00;FS=60.98;HaplotypeScore=1.09;MLEAC=3;MLEAF=0.500;MQ=55.72;MQ0=0;MQRankSum=-6.330e-01;QD=0.400;ReadPosRankSum=-4.307e+00;SB=-2.798e-01;VQSLOD=-1.117e+01;culprit=FS	GT:AD:DP:GQ:PL:TP	0/1:30,12:43:12:12,0,757:12	0/1:19,11:30:35:35,0,429:12	0/1:25,9:34:34:34,0,388:12
+10	38560911	rs80189231	T	A	1072.26	VQSRTrancheSNP99.00to99.90	AC=3;AF=0.500;AN=6;BaseQRankSum=0.103;DB;DP=356;Dels=0.00;FS=12.04;HaplotypeScore=5.40;MLEAC=3;MLEAF=0.500;MQ=49.74;MQ0=6;MQRankSum=-2.686e+00;QD=3.01;ReadPosRankSum=0.797;SB=-1.690e+02;VQSLOD=-3.513e+00;culprit=QD	GT:AD:DP:GQ:PL:TP	0/1:109,26:135:99:389,0,2879:127	0/1:84,27:111:99:539,0,2214:127	0/1:93,17:110:99:183,0,2277:127
+10	46605359	.	CT	C	883.20	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=1.62;DP=136;FS=9.66;HaplotypeScore=4.70;MLEAC=3;MLEAF=0.500;MQ=22.12;MQ0=1;MQRankSum=0.368;QD=8.57;RPA=7,6;RU=T;ReadPosRankSum=-1.076e+00;SB=-1.190e+02;STR;VQSLOD=2.55;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:33,0:32:90:0,90,1016:25	0|1:66,15:81:99:303,0,1810:25	0|1:2,19:20:0:634,54,0:25
 10	56495833	rs6481118	G	A	7633.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=212;Dels=0.00;FS=0.00;HaplotypeScore=0.333;MLEAC=6;MLEAF=1.00;MQ=54.64;MQ0=0;QD=36.00;SB=-2.091e+03;VQSLOD=8.67;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,62:62:99:2383,184,0:127	1|1:1,67:68:99:2375,184,0:127	1|1:0,82:82:99:2875,223,0:127
+10	65355538	rs10733794	A	G	2464.24	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-8.392e+00;DB;DP=193;Dels=0.00;FS=7.47;HaplotypeScore=2.54;MLEAC=3;MLEAF=0.500;MQ=58.06;MQ0=2;MQRankSum=0.999;QD=18.96;ReadPosRankSum=-1.229e+00;SB=-1.065e+03;VQSLOD=6.39;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:33,32:65:99:830,0,1039:127	1|1:1,64:65:99:1679,153,0:127	0|0:63,0:63:99:0,159,2042:127
+10	74020452	rs72806301	C	A	1116.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-2.155e+00;DB;DP=135;Dels=0.00;FS=3.28;HaplotypeScore=0.872;MLEAC=2;MLEAF=0.333;MQ=58.76;MQ0=0;MQRankSum=-8.180e-01;QD=11.88;ReadPosRankSum=1.48;SB=-6.800e+02;VQSLOD=7.27;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	0|1:20,27:47:99:658,0,446:104	0|0:41,0:41:99:0,105,1324:104	1|0:25,22:47:99:497,0,482:104
 10	85258657	rs4454662	C	A	7638.01	PASS	AC=6;AF=1.00;AN=6;BaseQRankSum=1.72;DB;DP=223;Dels=0.00;FS=4.52;HaplotypeScore=1.13;MLEAC=6;MLEAF=1.00;MQ=58.32;MQ0=0;MQRankSum=-1.230e+00;QD=34.25;ReadPosRankSum=1.15;SB=-2.841e+03;VQSLOD=7.65;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:1,64:65:99:2156,169,0:127	1|1:0,89:89:99:2982,238,0:127	1|1:0,69:69:99:2500,196,0:127
+10	95435201	rs2422323	C	T	4017.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=6.44;DB;DP=203;Dels=0.00;FS=13.13;HaplotypeScore=3.24;MLEAC=4;MLEAF=0.667;MQ=55.24;MQ0=0;MQRankSum=1.17;QD=19.79;ReadPosRankSum=-6.550e-01;SB=-2.177e+03;VQSLOD=4.60;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:38,45:83:99:1327,0,931:127	1|1:1,63:64:99:1966,156,0:127	0|1:30,25:56:99:724,0,807:127
 10	106444111	rs72813617	A	G	632.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-8.380e-01;DB;DP=197;Dels=0.00;FS=4.12;HaplotypeScore=1.82;MLEAC=1;MLEAF=0.167;MQ=59.35;MQ0=0;MQRankSum=-1.142e+00;QD=12.40;ReadPosRankSum=-1.947e+00;SB=-2.390e+02;VQSLOD=7.62;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	0|0:84,0:84:99:0,241,3129:127	0|0:62,0:62:99:0,159,2008:127	0|1:27,24:51:99:667,0,584:127
+10	116646860	rs4752347	T	A	4159.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-3.459e+00;DB;DP=197;Dels=0.00;FS=3.52;HaplotypeScore=0.631;MLEAC=4;MLEAF=0.667;MQ=59.27;MQ0=0;MQRankSum=1.05;QD=21.11;ReadPosRankSum=-1.271e+00;SB=-2.347e+03;VQSLOD=8.33;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	1|1:0,63:63:99:2191,169,0:127	1|0:31,35:66:99:1031,0,863:127	1|0:31,37:68:99:937,0,774:127
+10	125452232	rs7900707	A	G	3211.24	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-6.610e+00;DB;DP=189;Dels=0.00;FS=17.74;HaplotypeScore=0.00;MLEAC=4;MLEAF=0.667;MQ=59.66;MQ0=0;MQRankSum=0.614;QD=16.99;ReadPosRankSum=-3.210e-01;SB=-1.553e+03;VQSLOD=6.90;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:0,61:61:99:1944,168,0:127	1|0:44,26:70:99:627,0,1357:127	1|0:29,29:58:99:682,0,862:127
+10	132949760	rs10829924	A	C	684.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-2.328e+00;DB;DP=215;Dels=0.00;FS=15.46;HaplotypeScore=1.32;MLEAC=1;MLEAF=0.167;MQ=59.70;MQ0=0;MQRankSum=-1.053e+00;QD=10.53;ReadPosRankSum=-2.893e+00;SB=-2.920e+02;VQSLOD=5.89;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	0|0:80,0:81:99:0,229,2999:127	0|0:69,0:69:99:0,175,2268:127	0|1:35,30:65:99:719,0,1046:127
 11	3839556	rs5789310	CA	C	803.10	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=0.138;DB;DP=170;FS=4.50;HaplotypeScore=36.64;MLEAC=3;MLEAF=0.500;MQ=55.95;MQ0=2;MQRankSum=2.28;QD=4.72;RPA=17,16;RU=A;ReadPosRankSum=3.21;SB=-3.839e+02;STR;VQSLOD=1.81;culprit=QD	GT:AD:DP:GQ:PL:TP	0/1:35,17:55:32:329,0,32:3	0/1:45,9:57:8:276,0,8:3	0/1:30,19:52:1:244,0,1:3
+11	10741836	rs72853043	G	A	175.24	VQSRTrancheSNP99.00to99.90	AC=1;AF=0.167;AN=6;BaseQRankSum=0.127;DB;DP=174;Dels=0.00;FS=7.09;HaplotypeScore=7.46;MLEAC=1;MLEAF=0.167;MQ=55.72;MQ0=1;MQRankSum=-1.291e+00;QD=3.44;ReadPosRankSum=-1.185e+00;SB=-2.220e+02;VQSLOD=1.61;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:78,0:78:99:0,105,1278:65	0|0:42,0:45:66:0,66,802:65	0|1:21,29:51:99:210,0,469:65
+11	19325064	rs793250	G	A	5752.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=5.08;DB;DP=203;Dels=0.00;FS=2.53;HaplotypeScore=0.817;MLEAC=5;MLEAF=0.833;MQ=59.35;MQ0=0;MQRankSum=-6.100e-02;QD=28.34;ReadPosRankSum=0.095;SB=-3.093e+03;VQSLOD=9.46;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,71:71:99:2527,190,0:127	1|1:0,78:78:99:2626,205,0:127	1|0:32,22:54:99:599,0,768:127
+11	26026566	rs7942277	T	C	5414.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=176;Dels=0.00;FS=0.00;HaplotypeScore=0.245;MLEAC=6;MLEAF=1.00;MQ=58.89;MQ0=0;QD=30.76;SB=-2.812e+03;VQSLOD=11.05;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:0,53:53:99:1707,132,0:122	1|1:0,53:53:99:1581,123,0:122	1|1:0,70:70:99:2126,163,0:122
+11	35980461	rs263087	A	T	3494.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-7.010e-01;DB;DP=186;Dels=0.00;FS=0.00;HaplotypeScore=2.96;MLEAC=4;MLEAF=0.667;MQ=59.34;MQ0=0;MQRankSum=0.836;QD=18.79;ReadPosRankSum=7.000e-03;SB=-1.535e+03;VQSLOD=7.64;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|1:36,35:71:99:1001,0,1165:127	0|1:30,26:56:99:710,0,796:127	1|1:0,59:59:99:1783,144,0:127
+11	44985620	rs7106313	C	T	338.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=2.18;DB;DP=162;Dels=0.00;FS=4.84;HaplotypeScore=2.00;MLEAC=1;MLEAF=0.167;MQ=56.94;MQ0=0;MQRankSum=2.02;QD=8.67;ReadPosRankSum=0.983;SB=-2.190e+02;VQSLOD=6.66;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:79,0:80:99:0,174,2084:72	0|0:43,0:43:72:0,72,877:72	0|1:20,18:39:99:373,0,365:72
+11	55040918	rs187118824	A	T	1019.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=1.08;DB;DP=65;Dels=0.020;FS=12.72;HaplotypeScore=0.977;MLEAC=2;MLEAF=0.333;MQ=56.64;MQ0=1;MQRankSum=0.323;QD=25.48;ReadPosRankSum=0.873;SB=-2.060e+01;VQSLOD=4.42;culprit=DP	GT:AD:DP:GQ:PL:TP	0|1:2,9:11:48:325,0,48:50	0|0:24,1:25:63:0,63,833:50	1|0:6,22:28:51:733,0,51:50
+11	65339346	rs1194104	C	T	1804.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-2.465e+00;DB;DP=189;Dels=0.00;FS=8.43;HaplotypeScore=2.27;MLEAC=2;MLEAF=0.333;MQ=59.31;MQ0=0;MQRankSum=-1.200e-01;QD=13.67;ReadPosRankSum=-3.060e-01;SB=-1.061e+03;VQSLOD=7.95;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:57,0:57:99:0,157,2050:127	0|1:30,35:65:99:781,0,822:127	0|1:23,44:67:99:1062,0,690:127
+11	75978490	rs655877	G	C	2986.25	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=-8.460e-01;DB;DP=127;Dels=0.00;FS=5.43;HaplotypeScore=0.263;MLEAC=5;MLEAF=0.833;MQ=59.42;MQ0=0;MQRankSum=0.026;QD=23.51;ReadPosRankSum=0.168;SB=-1.613e+03;VQSLOD=7.04;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:0,50:50:99:1405,111,0:101	1|1:0,45:46:99:1269,102,0:101	1|0:17,14:31:99:354,0,469:101
+11	86061661	rs11825046	T	C	6014.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-3.250e+00;DB;DP=282;Dels=0.00;FS=0.960;HaplotypeScore=1.64;MLEAC=4;MLEAF=0.667;MQ=59.57;MQ0=0;MQRankSum=1.06;QD=21.33;ReadPosRankSum=-1.763e+00;SB=-3.081e+03;VQSLOD=7.35;culprit=DP	GT:AD:DP:GQ:PL:TP	1|0:54,49:103:99:1498,0,1848:127	1|1:0,83:83:99:2924,223,0:127	0|1:44,52:96:99:1592,0,1380:127
+11	96559202	rs72048706	C	CAA	373.24	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-7.960e-01;DB;DP=121;FS=1.54;HaplotypeScore=64.55;MLEAC=4;MLEAF=0.667;MQ=39.96;MQ0=6;MQRankSum=-5.390e-01;QD=3.08;RPA=21,23;RU=A;ReadPosRankSum=-3.583e+00;SB=-1.985e+02;STR;VQSLOD=1.31;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	1|0:20,8:32:24:111,0,24:22	1|1:40,0:41:29:180,29,0:22	0|1:39,0:39:41:133,0,41:22
+11	103989190	rs1545865	T	C	4915.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-4.030e+00;DB;DP=219;Dels=0.00;FS=1.23;HaplotypeScore=1.21;MLEAC=4;MLEAF=0.667;MQ=59.37;MQ0=0;MQRankSum=1.10;QD=22.44;ReadPosRankSum=0.773;SB=-2.213e+03;VQSLOD=8.91;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	0|1:23,40:63:99:1203,0,669:127	0|1:36,52:88:99:1433,0,1136:127	1|1:0,68:68:99:2279,178,0:127
+11	113825129	rs1150234	G	A	2441.26	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=3.98;DB;DP=210;Dels=0.00;FS=0.00;HaplotypeScore=2.07;MLEAC=3;MLEAF=0.500;MQ=59.49;MQ0=0;MQRankSum=0.877;QD=11.63;ReadPosRankSum=0.262;SB=-1.513e+03;VQSLOD=9.04;culprit=FS	GT:AD:DP:GQ:PL:TP	0/1:30,33:63:99:881,0,774:127	0/1:46,33:80:99:726,0,1020:127	0/1:32,35:67:99:873,0,759:127
+11	123470819	rs1148095	T	C	6964.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=-8.500e-01;DB;DP=242;Dels=0.00;FS=1.48;HaplotypeScore=1.39;MLEAC=5;MLEAF=0.833;MQ=59.25;MQ0=0;MQRankSum=-1.734e+00;QD=28.78;ReadPosRankSum=-7.490e-01;SB=-3.242e+03;VQSLOD=8.79;culprit=MQRankSum	GT:AD:DP:GQ:PL:TP	1|1:0,92:92:99:3408,265,0:127	1|0:35,51:86:99:1520,0,1059:127	1|1:0,64:64:99:2036,157,0:127
+11	132780959	rs61906922	C	T	1342.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=6.22;DB;DP=263;Dels=0.00;FS=0.601;HaplotypeScore=1.84;MLEAC=1;MLEAF=0.167;MQ=59.58;MQ0=0;MQRankSum=0.997;QD=15.25;ReadPosRankSum=-7.460e-01;SB=-6.890e+02;VQSLOD=8.81;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	0|0:87,0:87:99:0,250,2877:127	0|0:87,0:88:99:0,253,3009:127	0|1:43,45:88:99:1377,0,1138:127
+12	5922552	rs17786352	C	G	1874.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-7.386e+00;DB;DP=231;Dels=0.00;FS=0.00;HaplotypeScore=0.305;MLEAC=2;MLEAF=0.333;MQ=54.49;MQ0=0;MQRankSum=2.66;QD=13.11;ReadPosRankSum=1.04;SB=-9.400e+02;VQSLOD=4.75;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:44,35:79:99:897,0,1383:127	1|0:28,36:64:99:1016,0,734:127	0|0:88,0:88:99:0,238,3004:127
+12	12560642	rs10772559	G	T	501.28	VQSRTrancheSNP99.00to99.90	AC=4;AF=0.667;AN=6;BaseQRankSum=0.747;DB;DP=229;Dels=0.040;FS=17.41;HaplotypeScore=19.70;MLEAC=4;MLEAF=0.667;MQ=49.37;MQ0=0;MQRankSum=-3.656e+00;QD=2.19;ReadPosRankSum=-5.397e+00;SB=-2.196e+03;VQSLOD=-1.020e+01;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	1|1:0,66:66:24:273,24,0:24	1|0:29,45:74:99:148,0,702:24	1|0:33,46:80:99:122,0,781:24
 12	21328424	rs4149040	G	C	2565.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=0.888;DB;DP=252;Dels=0.00;FS=9.75;HaplotypeScore=0.608;MLEAC=2;MLEAF=0.333;MQ=59.49;MQ0=0;MQRankSum=1.17;QD=15.18;ReadPosRankSum=0.966;SB=-1.181e+03;VQSLOD=7.55;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:40,49:89:99:1534,0,1308:127	0|0:83,0:83:99:0,241,3117:127	1|0:45,35:80:99:1070,0,1402:127
 12	29935607	rs7305813	A	C	810.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=0.307;DB;DP=203;Dels=0.00;FS=14.33;HaplotypeScore=1.98;MLEAC=1;MLEAF=0.167;MQ=59.57;MQ0=0;MQRankSum=-1.640e-01;QD=12.86;ReadPosRankSum=-7.330e-01;SB=-2.950e+02;VQSLOD=7.90;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:71,0:71:99:0,193,2479:127	0|0:69,0:69:99:0,196,2464:127	0|1:28,35:63:99:845,0,847:127
 12	39118960	rs79977305	G	A	1330.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=6.31;DB;DP=263;Dels=0.00;FS=0.597;HaplotypeScore=1.46;MLEAC=1;MLEAF=0.167;MQ=58.83;MQ0=0;MQRankSum=1.92;QD=13.57;ReadPosRankSum=-5.480e-01;SB=-7.360e+02;VQSLOD=7.37;culprit=MQRankSum	GT:AD:DP:GQ:PL:TP	0|0:81,0:81:99:0,241,2869:127	0|0:84,0:84:99:0,229,2712:127	0|1:51,47:98:99:1365,0,1343:127
+12	48120010	rs3829317	A	C	4015.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=1.66;DB;DP=162;Dels=0.00;FS=1.83;HaplotypeScore=2.18;MLEAC=5;MLEAF=0.833;MQ=59.48;MQ0=0;MQRankSum=0.154;QD=24.78;ReadPosRankSum=0.482;SB=-2.240e+03;VQSLOD=8.13;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,63:63:99:2024,163,0:110	1|0:24,27:51:99:550,0,540:110	1|1:0,48:48:99:1441,111,0:110
+12	58867437	rs140632614	G	T	829.25	VQSRTrancheSNP99.00to99.90	AC=4;AF=0.667;AN=6;BaseQRankSum=1.41;DB;DP=144;Dels=0.00;FS=68.61;HaplotypeScore=0.867;MLEAC=4;MLEAF=0.667;MQ=35.15;MQ0=16;MQRankSum=-7.440e+00;QD=5.76;ReadPosRankSum=-2.450e-01;SB=-1.011e+00;VQSLOD=-1.649e+01;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:34,24:58:99:291,0,1100:44	0|1:42,8:50:61:61,0,1319:44	1|1:1,35:36:45:519,45,0:44
+12	68921960	.	TATTTTC	T	803.22	VQSRTrancheINDEL99.90to100.00	AC=3;AF=0.500;AN=6;BaseQRankSum=9.86;DP=270;FS=190.60;HaplotypeScore=223.07;MLEAC=3;MLEAF=0.500;MQ=54.84;MQ0=1;MQRankSum=-4.552e+00;QD=2.97;ReadPosRankSum=-6.674e+00;SB=-1.330e-03;VQSLOD=-1.777e+01;culprit=FS	GT:AD:DP:GQ:PL:TP	0/1:69,22:95:99:394,0,4974:101	0/1:87,0:86:99:355,0,5072:101	0/1:83,0:84:99:102,0,5546:101
+12	79238948	rs10778234	C	T	3719.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=8.19;DB;DP=241;Dels=0.00;FS=7.07;HaplotypeScore=0.245;MLEAC=3;MLEAF=0.500;MQ=59.41;MQ0=0;MQRankSum=-3.810e-01;QD=15.43;ReadPosRankSum=0.579;SB=-1.864e+03;VQSLOD=7.81;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0/1:42,35:77:99:1049,0,1213:127	0/1:37,32:69:99:988,0,957:127	0/1:42,53:95:99:1682,0,1062:127
+12	90856710	rs825945	T	C	1096.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-6.342e+00;DB;DP=193;Dels=0.00;FS=2.16;HaplotypeScore=0.00;MLEAC=2;MLEAF=0.333;MQ=59.64;MQ0=0;MQRankSum=2.07;QD=9.70;ReadPosRankSum=-1.697e+00;SB=-5.710e+02;VQSLOD=7.31;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|0:80,0:80:99:0,232,3042:127	0|1:30,21:51:99:511,0,921:127	0|1:36,26:62:99:624,0,1085:127
+12	101460237	rs7965836	C	T	3485.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-2.070e+00;DB;DP=247;Dels=0.00;FS=2.89;HaplotypeScore=0.656;MLEAC=3;MLEAF=0.500;MQ=59.83;MQ0=0;MQRankSum=-6.550e-01;QD=14.11;ReadPosRankSum=-9.830e-01;SB=-1.581e+03;VQSLOD=8.49;culprit=MQ	GT:AD:DP:GQ:PL:TP	0/1:40,43:83:99:1287,0,1179:127	0/1:44,36:80:99:1068,0,1363:127	0/1:43,41:84:99:1130,0,1321:127
+12	112830546	rs150699511	C	A	487.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-1.014e+00;DB;DP=179;Dels=0.00;FS=0.860;HaplotypeScore=2.34;MLEAC=1;MLEAF=0.167;MQ=59.16;MQ0=0;MQRankSum=1.14;QD=8.86;ReadPosRankSum=0.950;SB=-2.870e+02;VQSLOD=8.11;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:62,0:62:99:0,166,2141:127	0|1:34,21:55:99:522,0,751:127	0|0:62,0:62:99:0,141,1808:127
+12	122942070	rs10744217	A	G	1812.24	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-3.353e+00;DB;DP=179;Dels=0.00;FS=0.663;HaplotypeScore=1.42;MLEAC=3;MLEAF=0.500;MQ=58.69;MQ0=0;MQRankSum=0.992;QD=15.10;ReadPosRankSum=1.08;SB=-5.860e+02;VQSLOD=8.48;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:35,36:71:99:762,0,1100:92	1|1:0,48:49:93:1095,93,0:92	0|0:59,0:59:99:0,120,1560:92
+12	130522866	rs1613499	C	T	3975.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=130;Dels=0.00;FS=0.00;HaplotypeScore=0.633;MLEAC=6;MLEAF=1.00;MQ=59.11;MQ0=0;QD=30.58;SB=-1.386e+03;VQSLOD=10.23;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,57:57:99:1939,144,0:73	1|1:0,34:34:75:990,75,0:73	1|1:0,39:39:78:1046,78,0:73
+13	21520045	rs9509464	A	C	5103.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=195;Dels=0.00;FS=0.00;HaplotypeScore=1.61;MLEAC=6;MLEAF=1.00;MQ=47.37;MQ0=0;QD=26.17;SB=-2.582e+03;VQSLOD=5.19;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|1:0,71:71:99:2106,186,0:98	1|1:0,78:78:99:1856,162,0:98	1|1:0,46:46:99:1141,99,0:98
 13	28463938	rs954750	C	T	2740.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=3.21;DB;DP=242;Dels=0.00;FS=0.00;HaplotypeScore=4.01;MLEAC=2;MLEAF=0.333;MQ=59.46;MQ0=0;MQRankSum=0.880;QD=16.61;ReadPosRankSum=-6.170e-01;SB=-1.312e+03;VQSLOD=6.77;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:40,43:83:99:1362,0,1242:127	1|0:35,47:82:99:1417,0,1010:127	0|0:76,0:77:99:0,205,2635:127
+13	37484693	rs1571317	T	C	971.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-2.315e+00;DB;DP=229;Dels=0.00;FS=3.05;HaplotypeScore=0.660;MLEAC=1;MLEAF=0.167;MQ=58.51;MQ0=0;MQRankSum=1.88;QD=12.78;ReadPosRankSum=-3.430e-01;SB=-5.750e+02;VQSLOD=7.31;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:94,0:94:99:0,253,3258:127	0|1:35,41:76:99:1006,0,924:127	0|0:59,0:59:99:0,147,1872:127
+13	46407442	rs11147990	G	A	2668.24	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=1.23;DB;DP=199;Dels=0.00;FS=0.530;HaplotypeScore=1.80;MLEAC=3;MLEAF=0.500;MQ=58.99;MQ0=0;MQRankSum=0.023;QD=20.21;ReadPosRankSum=-4.520e-01;SB=-1.651e+03;VQSLOD=9.20;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	0|1:36,33:69:99:994,0,1056:127	0|0:67,0:67:99:0,141,1821:127	1|1:0,63:63:99:1719,135,0:127
+13	56456878	rs2997102	C	A	9454.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=257;Dels=0.00;FS=0.00;HaplotypeScore=0.954;MLEAC=6;MLEAF=1.00;MQ=59.57;MQ0=0;QD=36.79;SB=-4.826e+03;VQSLOD=11.51;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,99:99:99:3692,280,0:127	1|1:0,88:88:99:3239,253,0:127	1|1:0,70:70:99:2523,193,0:127
+13	65275154	rs359338	G	A	1963.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=1.73;DB;DP=173;Dels=0.00;FS=3.63;HaplotypeScore=0.948;MLEAC=2;MLEAF=0.333;MQ=59.49;MQ0=0;MQRankSum=-6.760e-01;QD=14.54;ReadPosRankSum=0.399;SB=-1.054e+03;VQSLOD=8.45;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:31,28:59:99:883,0,990:108	1|0:39,37:76:99:1119,0,1112:108	0|0:38,0:38:99:0,108,1390:108
+13	74611546	rs73214804	C	T	639.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-4.870e-01;DB;DP=164;Dels=0.00;FS=0.00;HaplotypeScore=1.41;MLEAC=1;MLEAF=0.167;MQ=58.95;MQ0=0;MQRankSum=0.902;QD=11.21;ReadPosRankSum=-5.410e-01;SB=-3.000e+02;VQSLOD=8.88;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:53,0:53:99:0,135,1773:110	0|0:53,1:54:99:0,111,1440:110	0|1:24,33:57:99:674,0,388:110
+13	83670546	.	G	A	169.26	VQSRTrancheSNP99.00to99.90	AC=2;AF=0.333;AN=6;BaseQRankSum=5.30;DP=178;Dels=0.00;FS=0.00;HaplotypeScore=36.51;MLEAC=2;MLEAF=0.333;MQ=42.20;MQ0=11;MQRankSum=-6.654e+00;QD=1.38;ReadPosRankSum=-7.610e-01;SB=-1.340e+02;VQSLOD=-2.041e+01;culprit=QD	GT:AD:DP:GQ:PL:TP	0|1:37,16:53:99:139,0,523:90	0|0:51,4:55:91:0,91,1393:90	1|0:58,12:70:69:69,0,1214:90
 13	92742124	rs2148446	G	A	1884.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-2.340e+00;DB;DP=213;Dels=0.00;FS=0.00;HaplotypeScore=0.316;MLEAC=2;MLEAF=0.333;MQ=59.37;MQ0=0;MQRankSum=-2.935e+00;QD=12.99;ReadPosRankSum=-5.790e-01;SB=-9.540e+02;VQSLOD=7.28;culprit=MQRankSum	GT:AD:DP:GQ:PL:TP	0|1:38,38:76:99:1130,0,1190:127	0|0:68,0:68:99:0,190,2454:127	1|0:38,31:69:99:793,0,1094:127
+13	101976707	rs527328	A	T	9240.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=259;Dels=0.00;FS=0.00;HaplotypeScore=0.245;MLEAC=6;MLEAF=1.00;MQ=53.28;MQ0=0;QD=35.68;SB=-4.696e+03;VQSLOD=7.52;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:0,92:92:99:3410,262,0:127	1|1:0,88:88:99:3195,247,0:127	1|1:1,78:79:99:2635,202,0:127
+13	109990322	rs7328109	T	C	7198.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=-2.487e+00;DB;DP=245;Dels=0.00;FS=5.89;HaplotypeScore=1.88;MLEAC=5;MLEAF=0.833;MQ=59.60;MQ0=0;MQRankSum=-2.360e-01;QD=29.38;ReadPosRankSum=0.136;SB=-3.593e+03;VQSLOD=8.89;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,79:79:99:2825,220,0:127	1|0:33,54:87:99:1591,0,933:127	1|1:0,79:79:99:2782,214,0:127
+14	20313371	rs10137604	A	C	2154.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-4.894e+00;DB;DP=255;Dels=0.00;FS=0.00;HaplotypeScore=2.05;MLEAC=2;MLEAF=0.333;MQ=56.32;MQ0=1;MQRankSum=1.61;QD=13.72;ReadPosRankSum=0.126;SB=-1.124e+03;VQSLOD=5.62;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:40,48:88:99:1426,0,1229:127	0|0:98,0:98:99:0,268,3527:127	1|0:43,26:69:99:767,0,1284:127
+14	28032147	rs67442200	AT	A	9959	PASS	AC=6;AF=1.00;AN=6;DB;DP=259;FS=0.00;HaplotypeScore=15.29;MLEAC=6;MLEAF=1.00;MQ=60.66;MQ0=0;QD=38.45;RPA=3,2;RU=T;SB=-4.646e+03;STR;VQSLOD=6.01;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:2,86:91:99:3493,259,0:127	1|1:3,76:79:99:3025,226,0:127	1|1:3,86:89:99:3441,256,0:127
+14	38160457	rs2181345	A	C	4683.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=169;Dels=0.00;FS=0.00;HaplotypeScore=2.54;MLEAC=6;MLEAF=1.00;MQ=59.00;MQ0=0;QD=27.71;SB=-2.390e+03;VQSLOD=9.99;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,56:56:99:1818,141,0:107	1|1:0,58:58:99:1523,123,0:107	1|1:0,53:55:99:1342,108,0:107
+14	47568119	rs1952206	T	C	3399.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-9.721e+00;DB;DP=230;Dels=0.00;FS=0.483;HaplotypeScore=0.658;MLEAC=3;MLEAF=0.500;MQ=59.54;MQ0=0;MQRankSum=-4.790e-01;QD=14.78;ReadPosRankSum=1.26;SB=-1.667e+03;VQSLOD=9.16;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	0/1:41,43:84:99:1239,0,1315:127	0/1:28,43:71:99:1178,0,816:127	0/1:40,35:75:99:982,0,1314:127
+14	56951735	rs1189276	G	A	6939.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=3.88;DB;DP=235;Dels=0.00;FS=9.37;HaplotypeScore=0.906;MLEAC=5;MLEAF=0.833;MQ=59.52;MQ0=0;MQRankSum=-5.500e-01;QD=29.53;ReadPosRankSum=1.10;SB=-3.438e+03;VQSLOD=8.76;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:1,92:93:99:3558,274,0:127	1|1:0,60:60:99:2120,165,0:127	1|0:40,42:82:99:1261,0,1194:127
+14	68051087	rs11628035	G	A	4028.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=3.93;DB;DP=236;Dels=0.00;FS=8.18;HaplotypeScore=1.33;MLEAC=3;MLEAF=0.500;MQ=58.83;MQ0=0;MQRankSum=1.83;QD=25.49;ReadPosRankSum=2.59;SB=-1.989e+03;VQSLOD=6.79;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	1|0:38,39:77:99:1265,0,1112:127	1|1:0,81:81:99:2763,214,0:127	0|0:78,0:78:99:0,208,2673:127
+14	77813103	rs59474357	G	GT	328.58	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=-2.653e+00;DB;DP=164;FS=5.22;HaplotypeScore=117.86;MLEAC=5;MLEAF=0.833;MQ=52.18;MQ0=0;MQRankSum=1.26;QD=2.00;RPA=24,25;RU=T;ReadPosRankSum=-1.030e+00;SB=-1.639e+02;STR;VQSLOD=2.16;culprit=QD	GT:AD:DP:GQ:PL:TP	1|0:51,1:53:18:105,0,18:6	1|1:58,1:59:13:150,13,0:6	0|1:34,13:52:1:123,6,0:6
+14	87263523	rs2542229	A	C	8104.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=239;Dels=0.00;FS=0.00;HaplotypeScore=0.631;MLEAC=6;MLEAF=1.00;MQ=59.17;MQ0=0;QD=33.91;SB=-4.393e+03;VQSLOD=11.30;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,77:77:99:2810,214,0:127	1|1:0,85:85:99:2877,223,0:127	1|1:0,77:77:99:2417,187,0:127
+14	96490389	rs72702859	C	G	3394.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-6.529e+00;DB;DP=264;Dels=0.00;FS=2.75;HaplotypeScore=3.13;MLEAC=3;MLEAF=0.500;MQ=59.49;MQ0=0;MQRankSum=-1.258e+00;QD=18.25;ReadPosRankSum=-9.610e-01;SB=-1.966e+03;VQSLOD=6.34;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:34,52:86:99:1358,0,886:127	1|1:0,100:100:99:2036,180,0:127	0|0:78,0:78:99:0,184,2393:127
+14	106085773	rs2955055	C	T	2115.26	VQSRTrancheSNP99.00to99.90	AC=3;AF=0.500;AN=6;BaseQRankSum=-7.460e-01;DB;DP=172;Dels=0.00;FS=18.42;HaplotypeScore=2.95;MLEAC=3;MLEAF=0.500;MQ=35.68;MQ0=18;MQRankSum=3.25;QD=12.30;ReadPosRankSum=0.100;SB=-5.530e+02;VQSLOD=1.29;culprit=MQ	GT:AD:DP:GQ:PL:TP	0/1:32,33:65:99:600,0,554:80	0/1:30,42:72:99:1001,0,400:80	0/1:12,23:35:80:553,0,80:80
+15	23743658	rs142070302	T	A	1007.26	VQSRTrancheSNP99.90to100.00	AC=3;AF=0.500;AN=6;BaseQRankSum=-3.164e+00;DB;DP=408;Dels=0.00;FS=166.85;HaplotypeScore=53.54;MLEAC=3;MLEAF=0.500;MQ=38.96;MQ0=14;MQRankSum=-6.283e+00;QD=2.47;ReadPosRankSum=2.49;SB=-1.079e-02;VQSLOD=-3.048e+02;culprit=FS	GT:AD:DP:GQ:PL:TP	0/1:104,39:143:99:365,0,2802:45	0/1:99,40:139:99:636,0,2335:45	0/1:97,28:126:45:45,0,2324:45
+15	32615218	.	G	GA	82.61	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=0.361;DP=225;FS=0.00;HaplotypeScore=2.49;MLEAC=4;MLEAF=0.667;MQ=5.76;MQ0=211;MQRankSum=-8.760e-01;QD=0.560;RPA=9,10;RU=A;ReadPosRankSum=-1.159e+00;SB=-1.330e-03;STR;VQSLOD=3.74;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:41,19:3:9:88,9,0:5	1|0:62,24:5:44:44,0,77:5	1|0:76,0:1:2:0,3,31:5
+15	42065422	rs2925337	A	C	5231.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=176;Dels=0.00;FS=0.00;HaplotypeScore=0.00;MLEAC=6;MLEAF=1.00;MQ=59.68;MQ0=0;QD=29.72;SB=-3.112e+03;VQSLOD=11.03;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:0,54:54:99:1770,138,0:127	1|1:0,68:68:99:1750,138,0:127	1|1:0,54:54:99:1711,132,0:127
+15	53677501	.	T	G	770.26	VQSRTrancheSNP99.90to100.00	AC=3;AF=0.500;AN=6;BaseQRankSum=-8.969e+00;DP=210;Dels=0.00;FS=347.47;HaplotypeScore=2.09;MLEAC=3;MLEAF=0.500;MQ=58.31;MQ0=0;MQRankSum=0.438;QD=3.67;ReadPosRankSum=-6.409e+00;SB=-5.499e-03;VQSLOD=-3.071e+02;culprit=FS	GT:AD:DP:GQ:PL:TP	0/1:59,20:79:94:94,0,1389:94	0/1:41,31:72:99:438,0,721:94	0/1:37,21:59:99:277,0,521:94
+15	62088952	rs35033959	A	T	3546.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-3.220e+00;DB;DP=241;Dels=0.00;FS=14.29;HaplotypeScore=0.277;MLEAC=3;MLEAF=0.500;MQ=59.77;MQ0=0;MQRankSum=-1.969e+00;QD=14.71;ReadPosRankSum=0.341;SB=-1.970e+03;VQSLOD=6.38;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0/1:45,44:89:99:1265,0,1533:127	0/1:34,38:72:99:1133,0,1137:127	0/1:41,39:80:99:1148,0,1303:127
+15	71657506	.	A	G	140.40	VQSRTrancheSNP99.00to99.90	AC=2;AF=0.333;AN=6;BaseQRankSum=-1.611e+00;DP=133;Dels=0.00;FS=35.59;HaplotypeScore=4.06;MLEAC=2;MLEAF=0.333;MQ=51.29;MQ0=1;MQRankSum=-3.344e+00;QD=1.53;ReadPosRankSum=-3.999e+00;SB=-5.015e+01;VQSLOD=-4.805e+00;culprit=QD	GT:AD:DP:GQ:PL:TP	1|0:46,11:57:93:93,0,851:15	1|0:23,12:35:86:86,0,299:15	0|0:33,8:41:15:0,15,503:15
+15	83241984	rs11336258	AT	A	1748.21	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-1.821e+00;DB;DP=199;FS=5.74;HaplotypeScore=29.94;MLEAC=4;MLEAF=0.667;MQ=59.67;MQ0=0;MQRankSum=1.12;QD=8.78;RPA=15,14;RU=T;ReadPosRankSum=-1.763e+00;SB=-7.830e+02;STR;VQSLOD=3.49;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|1:41,27:72:99:434,0,434:118	0|1:36,26:66:99:470,0,275:118	1|1:11,44:61:99:895,118,0:118
+15	92857342	rs12101550	A	G	2734.26	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-7.995e+00;DB;DP=226;Dels=0.00;FS=3.76;HaplotypeScore=1.57;MLEAC=3;MLEAF=0.500;MQ=59.41;MQ0=0;MQRankSum=0.761;QD=12.10;ReadPosRankSum=-5.870e-01;SB=-1.469e+03;VQSLOD=9.15;culprit=FS	GT:AD:DP:GQ:PL:TP	0/1:45,37:82:99:853,0,1356:127	0/1:29,40:69:99:966,0,852:127	0/1:38,37:75:99:954,0,1085:127
+15	100799787	rs4965610	C	T	2133.26	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=8.37;DB;DP=230;Dels=0.00;FS=1.04;HaplotypeScore=0.543;MLEAC=3;MLEAF=0.500;MQ=58.84;MQ0=0;MQRankSum=0.169;QD=9.28;ReadPosRankSum=-9.800e-02;SB=-1.328e+03;VQSLOD=7.88;culprit=QD	GT:AD:DP:GQ:PL:TP	0/1:40,36:76:99:948,0,701:127	0/1:45,38:83:99:761,0,635:127	0/1:35,36:71:99:463,0,706:127
+16	5754758	rs11644707	C	T	1012.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=1.78;DB;DP=161;Dels=0.00;FS=0.00;HaplotypeScore=2.46;MLEAC=2;MLEAF=0.333;MQ=59.71;MQ0=0;MQRankSum=0.699;QD=9.64;ReadPosRankSum=2.62;SB=-5.580e+02;VQSLOD=7.10;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	0|1:31,26:57:99:627,0,943:125	0|0:56,0:56:99:0,126,1597:125	1|0:32,16:48:99:424,0,785:125
 16	11397080	rs1019552	T	C	2295.44	PASS	AC=6;AF=1.00;AN=6;DB;DP=169;Dels=1.000e-02;FS=0.00;HaplotypeScore=8.22;MLEAC=6;MLEAF=1.00;MQ=59.32;MQ0=0;QD=13.58;SB=-2.379e+03;VQSLOD=4.38;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:0,71:71:96:1220,96,0:9	1|1:0,48:48:9:78,9,0:9	1|1:0,48:48:81:1035,81,0:9
+16	19633462	rs11371821	C	CT	239.23	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-4.097e+00;DB;DP=182;FS=1.34;HaplotypeScore=21.26;MLEAC=4;MLEAF=0.667;MQ=50.55;MQ0=0;MQRankSum=-1.358e+00;QD=1.31;RPA=22,23;RU=T;ReadPosRankSum=-3.963e+00;SB=-9.798e+01;STR;VQSLOD=2.31;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	1|0:61,0:63:45:70,0,45:24	1|1:44,15:58:28:192,28,0:24	0|1:52,0:60:28:28,0,37:24
+16	30509723	rs7193693	A	G	497.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-4.054e+00;DB;DP=167;Dels=0.00;FS=0.00;HaplotypeScore=1.16;MLEAC=1;MLEAF=0.167;MQ=57.89;MQ0=0;MQRankSum=-5.450e-01;QD=9.38;ReadPosRankSum=0.541;SB=-2.410e+02;VQSLOD=8.43;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:57,0:57:99:0,153,2009:101	0|0:57,0:57:99:0,102,1304:101	0|1:26,27:53:99:532,0,526:101
+16	34610294	rs72812776	C	T	603.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-1.361e+00;DB;DP=270;Dels=0.00;FS=6.07;HaplotypeScore=3.16;MLEAC=1;MLEAF=0.167;MQ=59.66;MQ0=0;MQRankSum=1.60;QD=7.83;ReadPosRankSum=0.296;SB=-2.820e+02;VQSLOD=3.37;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|0:123,0:123:99:0,343,4443:127	0|0:69,1:70:99:0,184,2397:127	0|1:48,29:77:99:638,0,1483:127
+16	55910673	rs17265788	G	C	2345.24	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=2.66;DB;DP=208;Dels=0.00;FS=3.16;HaplotypeScore=1.92;MLEAC=3;MLEAF=0.500;MQ=59.67;MQ0=0;MQRankSum=-3.330e-01;QD=16.63;ReadPosRankSum=-4.250e-01;SB=-1.468e+03;VQSLOD=8.37;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|1:33,33:66:99:769,0,761:122	0|0:67,0:67:99:0,123,1506:122	1|1:0,75:75:99:1621,132,0:122
+16	65776985	rs72784548	T	C	3527.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=5.44;DB;DP=195;Dels=0.00;FS=1.23;HaplotypeScore=0.879;MLEAC=4;MLEAF=0.667;MQ=58.78;MQ0=0;MQRankSum=1.63;QD=18.09;ReadPosRankSum=1.36;SB=-1.904e+03;VQSLOD=7.68;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	1|1:0,68:68:99:2351,181,0:127	1|0:33,28:61:99:586,0,710:127	1|0:31,35:66:99:590,0,468:127
+16	77418810	rs71137811	T	TG	7389	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=0.643;DB;DP=244;FS=2.25;HaplotypeScore=114.41;MLEAC=5;MLEAF=0.833;MQ=61.09;MQ0=0;MQRankSum=2.49;QD=30.28;ReadPosRankSum=2.98;SB=-2.597e+03;VQSLOD=2.91;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:8,84:93:99:3558,264,0:127	1|1:8,65:75:99:2723,179,0:127	1|0:42,32:76:99:1108,0,899:127
+16	83153777	rs4395063	T	C	3143.25	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-1.771e+00;DB;DP=205;Dels=0.00;FS=1.80;HaplotypeScore=1.04;MLEAC=4;MLEAF=0.667;MQ=59.55;MQ0=0;MQRankSum=-1.380e+00;QD=15.33;ReadPosRankSum=0.424;SB=-2.104e+03;VQSLOD=8.82;culprit=FS	GT:AD:DP:GQ:PL:TP	0|1:31,31:62:99:867,0,865:127	0|1:45,28:73:99:412,0,801:127	1|1:0,70:70:99:1906,150,0:127
+16	88914235	rs12932521	C	T	329.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=1.98;DB;DP=114;Dels=0.00;FS=1.93;HaplotypeScore=0.940;MLEAC=1;MLEAF=0.167;MQ=58.60;MQ0=0;MQRankSum=0.854;QD=7.84;ReadPosRankSum=-3.390e-01;SB=-2.970e+02;VQSLOD=8.10;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:43,0:43:99:0,102,1192:39	0|0:29,0:29:39:0,39,468:39	0|1:15,26:42:99:364,0,105:39
+17	5764764	rs2309483	C	G	4071.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=4.24;DB;DP=154;Dels=0.00;FS=0.00;HaplotypeScore=4.41;MLEAC=5;MLEAF=0.833;MQ=58.88;MQ0=0;MQRankSum=0.456;QD=26.44;ReadPosRankSum=0.385;SB=-1.791e+03;VQSLOD=6.95;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:0,61:62:99:2049,165,0:107	1|0:23,26:49:99:648,0,482:107	1|1:0,43:43:99:1374,108,0:107
+17	14096866	.	C	T	489.26	VQSRTrancheSNP99.00to99.90	AC=3;AF=0.500;AN=6;BaseQRankSum=4.54;DP=181;Dels=0.00;FS=33.90;HaplotypeScore=1.08;MLEAC=3;MLEAF=0.500;MQ=24.52;MQ0=71;MQRankSum=0.353;QD=2.70;ReadPosRankSum=0.385;SB=-1.450e+02;VQSLOD=-6.217e+00;culprit=QD	GT:AD:DP:GQ:PL:TP	0/1:48,17:66:99:228,0,822:63	0/1:42,16:58:99:237,0,389:63	0/1:44,13:57:63:63,0,523:63
+17	21547285	rs62049731	A	C	1322.26	VQSRTrancheSNP99.90to100.00	AC=3;AF=0.500;AN=6;BaseQRankSum=-9.870e-01;DB;DP=569;Dels=0.00;FS=2.32;HaplotypeScore=42.45;MLEAC=3;MLEAF=0.500;MQ=44.59;MQ0=24;MQRankSum=-6.240e+00;QD=2.32;ReadPosRankSum=-5.171e+00;SB=-6.369e+02;VQSLOD=-3.953e+01;culprit=DP	GT:AD:DP:GQ:PL:TP	0/1:146,28:174:99:217,0,4187:127	0/1:162,56:218:99:804,0,4298:127	0/1:139,38:177:99:340,0,3842:127
+17	33513649	rs3744366	A	G	3743.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-3.729e+00;DB;DP=213;Dels=0.00;FS=0.526;HaplotypeScore=2.91;MLEAC=4;MLEAF=0.667;MQ=58.99;MQ0=0;MQRankSum=-1.520e-01;QD=17.57;ReadPosRankSum=0.530;SB=-2.067e+03;VQSLOD=8.02;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:44,40:84:99:1083,0,1341:127	1|1:0,67:67:99:2074,162,0:127	0|1:37,24:62:99:586,0,1015:127
+17	44809197	rs116187585	A	G	8703.01	PASS	AC=6;AF=1.00;AN=6;BaseQRankSum=-5.540e-01;DB;DP=254;Dels=0.00;FS=3.49;HaplotypeScore=4.06;MLEAC=6;MLEAF=1.00;MQ=59.58;MQ0=0;MQRankSum=1.68;QD=34.26;ReadPosRankSum=1.48;SB=-4.228e+03;VQSLOD=6.61;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:1,96:97:99:3470,268,0:127	1|1:0,82:84:99:2846,220,0:127	1|1:0,73:73:99:2387,184,0:127
+17	55158811	rs7217371	G	A	2849.26	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=8.62;DB;DP=206;Dels=0.00;FS=0.511;HaplotypeScore=1.10;MLEAC=3;MLEAF=0.500;MQ=59.66;MQ0=0;MQRankSum=0.103;QD=13.83;ReadPosRankSum=0.958;SB=-1.448e+03;VQSLOD=9.94;culprit=MQ	GT:AD:DP:GQ:PL:TP	0/1:40,31:72:99:982,0,1147:127	0/1:35,40:75:99:1236,0,984:127	0/1:32,27:59:99:670,0,882:127
 17	66991216	rs61697543	T	C	738.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-4.530e+00;DB;DP=252;Dels=0.00;FS=1.49;HaplotypeScore=1.71;MLEAC=1;MLEAF=0.167;MQ=59.36;MQ0=0;MQRankSum=0.771;QD=10.86;ReadPosRankSum=-7.240e-01;SB=-3.930e+02;VQSLOD=8.89;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:95,0:95:99:0,268,3511:127	0|0:89,0:89:99:0,235,3058:127	0|1:33,35:68:99:773,0,965:127
+17	75679403	.	TGTG	T	1024.22	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=8.62;DP=161;FS=1.69;HaplotypeScore=259.32;MLEAC=2;MLEAF=0.333;MQ=51.57;MQ0=0;MQRankSum=-8.617e+00;QD=9.48;ReadPosRankSum=-4.980e-01;SB=-4.680e+02;VQSLOD=1.76;culprit=QD	GT:AD:DP:GQ:PL:TP	1|0:29,19:58:99:711,0,1547:127	1|0:34,11:50:99:361,0,1922:127	0|0:53,0:53:99:0,157,3186:127
+18	1629264	rs5025369	A	G	86.26	VQSRTrancheSNP99.00to99.90	AC=2;AF=0.333;AN=6;BaseQRankSum=-1.739e+00;DB;DP=149;Dels=0.00;FS=15.36;HaplotypeScore=7.14;MLEAC=2;MLEAF=0.333;MQ=39.72;MQ0=11;MQRankSum=-2.543e+00;QD=0.810;ReadPosRankSum=0.088;SB=-3.380e+01;VQSLOD=-6.205e+00;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|1:51,15:66:50:50,0,1143:49	0|0:37,5:42:81:0,81,941:49	1|0:35,6:41:75:75,0,818:49
+18	10010426	rs4797393	T	C	4197.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=0.425;DB;DP=210;Dels=0.00;FS=1.13;HaplotypeScore=1.27;MLEAC=4;MLEAF=0.667;MQ=59.43;MQ0=0;MQRankSum=-2.916e+00;QD=19.99;ReadPosRankSum=0.379;SB=-2.001e+03;VQSLOD=7.03;culprit=MQRankSum	GT:AD:DP:GQ:PL:TP	1|1:0,65:65:99:2247,181,0:127	1|0:35,39:75:99:1170,0,967:127	1|0:41,29:70:99:780,0,1052:127
+18	20025030	rs4800406	A	G	1113.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-3.700e+00;DB;DP=230;Dels=0.00;FS=0.00;HaplotypeScore=1.68;MLEAC=1;MLEAF=0.167;MQ=59.56;MQ0=0;MQRankSum=1.64;QD=12.65;ReadPosRankSum=-1.166e+00;SB=-6.160e+02;VQSLOD=8.66;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:78,0:78:99:0,223,2978:127	0|1:46,42:88:99:1148,0,1460:127	0|0:64,0:64:99:0,175,2303:127
+18	30361151	rs35024407	T	TTG	9768	PASS	AC=6;AF=1.00;AN=6;DB;DP=234;FS=0.00;HaplotypeScore=13.10;MLEAC=6;MLEAF=1.00;MQ=60.40;MQ0=2;QD=41.74;RPA=3,4;RU=TG;SB=-4.721e+03;STR;VQSLOD=5.50;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:7,80:86:99:3686,241,0:127	1|1:8,77:85:99:3454,229,0:127	1|1:3,58:61:99:2628,172,0:127
+18	41449098	rs2045671	C	T	8380.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=237;Dels=0.00;FS=0.00;HaplotypeScore=1.35;MLEAC=6;MLEAF=1.00;MQ=59.65;MQ0=0;QD=35.36;SB=-4.462e+03;VQSLOD=11.65;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,76:76:99:2756,214,0:127	1|1:0,75:75:99:2635,202,0:127	1|1:0,86:86:99:2989,229,0:127
 18	50973169	rs11661305	A	G	5320.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=2.75;DB;DP=196;Dels=0.00;FS=2.57;HaplotypeScore=1.13;MLEAC=5;MLEAF=0.833;MQ=57.73;MQ0=0;MQRankSum=0.246;QD=27.14;ReadPosRankSum=0.186;SB=-2.411e+03;VQSLOD=7.50;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|1:0,74:74:99:2621,202,0:127	1|0:30,37:67:99:1009,0,842:127	1|1:0,55:55:99:1690,132,0:127
+18	59836086	.	A	AT	575.46	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-1.815e+00;DP=231;FS=20.46;HaplotypeScore=140.20;MLEAC=3;MLEAF=0.500;MQ=56.52;MQ0=0;MQRankSum=1.20;QD=2.49;RPA=20,21;RU=T;ReadPosRankSum=-1.968e+00;SB=-2.575e+02;STR;VQSLOD=-3.166e-01;culprit=FS	GT:AD:DP:GQ:PL:TP	0/1:52,29:86:11:309,0,11:9	0/1:72,2:82:63:109,0,63:9	0/1:31,24:63:15:205,0,15:9
+18	67950442	rs62091919	T	G	1779.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-2.517e+00;DB;DP=195;Dels=0.00;FS=0.562;HaplotypeScore=1.55;MLEAC=2;MLEAF=0.333;MQ=59.26;MQ0=0;MQRankSum=6.000e-03;QD=12.89;ReadPosRankSum=-2.680e-01;SB=-9.170e+02;VQSLOD=9.63;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	0|1:40,32:72:99:967,0,1318:127	0|0:55,2:57:99:0,141,1865:127	1|0:36,30:66:99:851,0,1042:127
+18	75204349	rs9948476	C	T	646.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=1.97;DB;DP=185;Dels=0.00;FS=0.817;HaplotypeScore=1.40;MLEAC=1;MLEAF=0.167;MQ=59.35;MQ0=0;MQRankSum=0.276;QD=10.95;ReadPosRankSum=-1.580e-01;SB=-3.210e+02;VQSLOD=9.37;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:65,0:66:99:0,168,2026:127	0|0:60,0:60:99:0,147,1768:127	0|1:34,25:59:99:681,0,893:127
+19	3739001	rs56138006	C	T	976.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=4.63;DB;DP=118;Dels=0.00;FS=3.68;HaplotypeScore=1.44;MLEAC=2;MLEAF=0.333;MQ=58.66;MQ0=0;MQRankSum=0.753;QD=11.22;ReadPosRankSum=1.11;SB=-6.380e+02;VQSLOD=7.45;culprit=QD	GT:AD:DP:GQ:PL:TP	0|1:30,22:52:99:612,0,559:45	0|0:31,0:31:45:0,45,527:45	1|0:12,23:35:93:403,0,93:45
 19	11730690	rs4804613	C	T	2578.26	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=5.06;DB;DP=197;Dels=0.00;FS=0.529;HaplotypeScore=0.00;MLEAC=3;MLEAF=0.500;MQ=59.36;MQ0=0;MQRankSum=0.416;QD=13.09;ReadPosRankSum=-5.540e-01;SB=-1.389e+03;VQSLOD=9.39;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0/1:21,26:47:99:586,0,623:127	0/1:42,45:87:99:1192,0,1055:127	0/1:32,30:63:99:839,0,932:127
+19	20782710	rs10413568	T	C	6304.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=203;Dels=0.00;FS=0.00;HaplotypeScore=0.692;MLEAC=6;MLEAF=1.00;MQ=59.56;MQ0=0;QD=31.05;SB=-2.367e+03;VQSLOD=10.77;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,74:74:99:2375,214,0:127	1|1:0,80:80:99:2480,217,0:127	1|1:0,49:49:99:1449,129,0:127
+19	28150587	rs4404191	A	G	7419.01	PASS	AC=6;AF=1.00;AN=6;DB;DP=209;Dels=0.00;FS=0.00;HaplotypeScore=0.245;MLEAC=6;MLEAF=1.00;MQ=59.61;MQ0=0;QD=35.50;SB=-3.717e+03;VQSLOD=11.99;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|1:0,70:70:99:2584,202,0:127	1|1:0,74:74:99:2509,193,0:127	1|1:0,65:65:99:2326,181,0:127
+19	37226152	rs3108217	G	C	2076.25	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=0.215;DB;DP=145;Dels=0.00;FS=2.16;HaplotypeScore=0.587;MLEAC=4;MLEAF=0.667;MQ=58.59;MQ0=0;MQRankSum=1.16;QD=14.32;ReadPosRankSum=-7.250e-01;SB=-1.027e+03;VQSLOD=7.49;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:25,30:55:99:789,0,709:63	1|1:0,29:29:63:772,63,0:63	0|1:35,25:61:99:557,0,789:63
+19	47012249	rs60424854	ATTTTT	A	1121.22	VQSRTrancheINDEL99.00to99.90	AC=2;AF=0.333;AN=6;BaseQRankSum=-1.922e+00;DB;DP=144;FS=61.85;HaplotypeScore=338.74;MLEAC=2;MLEAF=0.333;MQ=44.50;MQ0=1;MQRankSum=-2.345e+00;QD=10.29;RPA=16,11;RU=T;ReadPosRankSum=5.64;SB=-1.090e+02;STR;VQSLOD=-2.399e+00;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	1|0:30,9:43:99:371,0,961:89	1|0:44,15:63:99:798,0,1293:89	0|0:32,0:33:89:0,89,1424:89
 19	54644879	rs36636	G	A	1790.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=2.51;DB;DP=187;Dels=0.00;FS=4.41;HaplotypeScore=1.50;MLEAC=2;MLEAF=0.333;MQ=59.51;MQ0=0;MQRankSum=-1.266e+00;QD=13.88;ReadPosRankSum=-1.226e+00;SB=-1.081e+03;VQSLOD=7.80;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:30,27:57:99:811,0,726:127	1|0:31,41:72:99:1018,0,730:127	0|0:58,0:58:99:0,144,1815:127
+20	3199373	rs2422861	G	A	896.26	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=0.452;DB;DP=135;Dels=0.00;FS=0.664;HaplotypeScore=2.84;MLEAC=3;MLEAF=0.500;MQ=58.47;MQ0=0;MQRankSum=-2.170e-01;QD=6.64;ReadPosRankSum=0.111;SB=-6.080e+02;VQSLOD=7.71;culprit=QD	GT:AD:DP:GQ:PL:TP	0/1:32,24:56:99:472,0,640:127	0/1:25,15:40:99:229,0,603:127	0/1:20,19:39:99:234,0,379:127
+20	12166868	rs7272217	C	T	926.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-3.444e+00;DB;DP=200;Dels=0.00;FS=2.63;HaplotypeScore=0.841;MLEAC=1;MLEAF=0.167;MQ=58.94;MQ0=0;MQRankSum=-1.362e+00;QD=13.23;ReadPosRankSum=-1.055e+00;SB=-4.040e+02;VQSLOD=7.62;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:68,0:68:99:0,190,2486:127	0|0:62,0:62:99:0,169,2218:127	0|1:33,37:70:99:961,0,870:127
 20	21523349	rs73127889	A	G	2286.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-6.469e+00;DB;DP=253;Dels=0.00;FS=3.73;HaplotypeScore=1.54;MLEAC=2;MLEAF=0.333;MQ=59.31;MQ0=0;MQRankSum=0.821;QD=13.69;ReadPosRankSum=1.51;SB=-8.640e+02;VQSLOD=7.84;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	0|1:44,48:92:99:1319,0,1356:127	0|0:86,0:86:99:0,235,2994:127	1|0:37,38:75:99:1006,0,1111:127
+20	29566945	rs6119037	C	G	1427.26	VQSRTrancheSNP99.00to99.90	AC=2;AF=0.333;AN=6;BaseQRankSum=-3.826e+00;DB;DP=271;Dels=0.00;FS=1.15;HaplotypeScore=2.76;MLEAC=2;MLEAF=0.333;MQ=47.42;MQ0=21;MQRankSum=2.42;QD=8.06;ReadPosRankSum=0.363;SB=-7.560e+02;VQSLOD=1.79;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|0:59,32:91:99:782,0,1210:127	1|0:55,31:86:99:684,0,1422:127	0|0:94,0:94:99:0,208,2592:127
+20	41560394	rs761027	A	G	5634.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=-4.444e+00;DB;DP=222;Dels=0.00;FS=0.829;HaplotypeScore=1.14;MLEAC=5;MLEAF=0.833;MQ=59.60;MQ0=0;MQRankSum=0.392;QD=25.38;ReadPosRankSum=-6.400e-02;SB=-3.048e+03;VQSLOD=10.04;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,72:72:99:2228,187,0:127	1|1:0,82:82:99:2435,211,0:127	1|0:25,43:68:99:971,0,567:127
+20	51353834	rs241795	G	A	6115.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=7.91;DB;DP=268;Dels=0.00;FS=0.00;HaplotypeScore=0.245;MLEAC=4;MLEAF=0.667;MQ=59.38;MQ0=0;MQRankSum=-5.180e-01;QD=22.82;ReadPosRankSum=-2.530e-01;SB=-3.328e+03;VQSLOD=8.40;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:46,54:100:99:1663,0,1247:127	1|1:0,91:91:99:3189,247,0:127	0|1:36,41:77:99:1263,0,901:127
 20	59978756	rs1892320	T	C	3967.01	PASS	AC=4;AF=0.667;AN=6;BaseQRankSum=-2.562e+00;DB;DP=206;Dels=0.00;FS=14.25;HaplotypeScore=1.38;MLEAC=4;MLEAF=0.667;MQ=59.89;MQ0=0;MQRankSum=-2.890e-01;QD=19.26;ReadPosRankSum=0.221;SB=-2.292e+03;VQSLOD=7.62;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|1:0,72:72:99:2337,181,0:127	1|0:38,29:67:99:743,0,1180:127	1|0:31,36:67:99:887,0,873:127
 21	10750991	.	C	T	602.26	VQSRTrancheSNP99.90to100.00	AC=2;AF=0.333;AN=6;BaseQRankSum=10.10;DP=735;DS;Dels=0.00;FS=27.02;HaplotypeScore=107.27;MLEAC=2;MLEAF=0.333;MQ=44.40;MQ0=2;MQRankSum=-1.456e+01;QD=1.24;ReadPosRankSum=0.063;SB=-8.430e+02;VQSLOD=-1.009e+02;culprit=DP	GT:AD:DP:GQ:PL:TP	1|0:164,78:243:99:357,0,5193:103	1|0:187,55:242:99:284,0,5252:103	0|0:199,50:250:99:0,103,5864:103
+21	18433036	rs969905	A	G	781.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-6.610e+00;DB;DP=229;Dels=0.00;FS=2.23;HaplotypeScore=1.29;MLEAC=1;MLEAF=0.167;MQ=59.55;MQ0=0;MQRankSum=0.300;QD=10.15;ReadPosRankSum=-3.840e-01;SB=-4.990e+02;VQSLOD=9.58;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:62,0:63:99:0,181,2380:127	0|0:89,0:89:99:0,244,3177:127	0|1:39,38:77:99:816,0,1256:127
 21	25702950	rs2260875	C	G	3462.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=0.156;DB;DP=237;Dels=0.00;FS=2.90;HaplotypeScore=1.54;MLEAC=3;MLEAF=0.500;MQ=59.04;MQ0=0;MQRankSum=0.386;QD=14.61;ReadPosRankSum=-1.455e+00;SB=-1.683e+03;VQSLOD=8.05;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	0/1:37,40:77:99:1282,0,1101:127	0/1:43,36:79:99:1114,0,1317:127	0/1:45,36:81:99:1066,0,1329:127
 21	34546711	rs11701692	T	C	1030.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-1.884e+00;DB;DP=152;Dels=0.00;FS=8.32;HaplotypeScore=1.93;MLEAC=2;MLEAF=0.333;MQ=57.61;MQ0=0;MQRankSum=-2.910e-01;QD=10.51;ReadPosRankSum=0.258;SB=-4.600e+02;VQSLOD=6.46;culprit=FS	GT:AD:DP:GQ:PL:TP	1|0:35,25:61:99:574,0,1137:127	1|0:19,18:37:99:495,0,498:127	0|0:54,0:54:99:0,141,1811:127
+21	42833227	rs460904	T	C	3179.23	PASS	AC=6;AF=1.00;AN=6;DB;DP=155;Dels=0.00;FS=0.00;HaplotypeScore=2.46;MLEAC=6;MLEAF=1.00;MQ=58.18;MQ0=0;QD=20.51;SB=-2.173e+03;VQSLOD=7.70;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,60:60:90:1085,90,0:74	1|1:0,54:54:96:1172,96,0:74	1|1:0,41:41:75:960,75,0:74
+22	17351507	rs5748687	G	A	4244.01	VQSRTrancheSNP99.00to99.90	AC=5;AF=0.833;AN=6;BaseQRankSum=-2.240e-01;DB;DP=261;Dels=0.00;FS=0.710;HaplotypeScore=1.72;MLEAC=5;MLEAF=0.833;MQ=28.84;MQ0=57;MQRankSum=-7.852e+00;QD=16.26;ReadPosRankSum=-1.243e+00;SB=-2.174e+03;VQSLOD=-1.710e+00;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|1:0,92:92:99:1529,141,0:127	1|0:34,49:83:99:727,0,1065:127	1|1:0,86:86:99:1988,180,0:127
+22	24416101	rs66532277	C	CTT	458.98	VQSRTrancheINDEL99.00to99.90	AC=3;AF=0.500;AN=6;BaseQRankSum=-4.190e+00;DB;DP=251;FS=68.55;HaplotypeScore=55.31;MLEAC=3;MLEAF=0.500;MQ=51.85;MQ0=3;MQRankSum=2.42;QD=1.83;RPA=23,25;RU=T;ReadPosRankSum=2.88;SB=-3.571e+01;STR;VQSLOD=-2.028e+00;culprit=QD	GT:AD:DP:GQ:PL:TP	0/1:68,20:91:65:207,0,65:3	0/1:84,0:83:0:184,0,0:3	0/1:72,0:72:83:114,0,83:3
+22	34204930	rs13053313	T	C	435.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-2.517e+00;DB;DP=82;Dels=0.00;FS=5.84;HaplotypeScore=0.533;MLEAC=1;MLEAF=0.167;MQ=58.65;MQ0=0;MQRankSum=1.12;QD=12.80;ReadPosRankSum=-1.270e+00;SB=-1.170e+02;VQSLOD=6.44;culprit=DP	GT:AD:DP:GQ:PL:TP	0|0:12,0:12:27:0,27,360:27	0|1:16,18:34:99:470,0,403:27	0|0:36,0:36:93:0,93,1230:27
+22	43356130	rs6519353	A	C	3526.01	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=0.809;DB;DP=228;Dels=0.00;FS=6.34;HaplotypeScore=1.63;MLEAC=3;MLEAF=0.500;MQ=59.41;MQ0=0;MQRankSum=-1.460e-01;QD=22.46;ReadPosRankSum=2.03;SB=-1.954e+03;VQSLOD=8.50;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	1|0:40,41:81:99:1020,0,1222:127	1|1:0,76:76:99:2506,196,0:127	0|0:71,0:71:99:0,165,2091:127
+22	50309915	rs9616205	A	T	1712.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=-9.360e-01;DB;DP=162;Dels=0.00;FS=2.86;HaplotypeScore=2.15;MLEAC=2;MLEAF=0.333;MQ=56.21;MQ0=5;MQRankSum=0.892;QD=13.92;ReadPosRankSum=-1.040e+00;SB=-9.940e+02;VQSLOD=6.28;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|1:39,33:72:99:777,0,994:78	0|0:39,0:39:78:0,78,1010:78	1|0:16,35:51:99:974,0,257:78
 X	4712340	rs5915814	C	A	2233.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=8.78;DB;DP=176;Dels=0.00;FS=0.00;HaplotypeScore=0.443;MLEAC=2;MLEAF=0.333;MQ=59.50;MQ0=0;MQRankSum=-1.782e+00;QD=15.73;ReadPosRankSum=0.359;SB=-1.240e+03;VQSLOD=8.39;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|1:32,37:69:99:1114,0,859:81	0|0:34,0:34:81:0,81,948:81	1|0:32,41:73:99:1158,0,575:81
+X	23948720	rs35152285	C	CA	535.35	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-3.538e+00;DB;DP=148;FS=0.00;HaplotypeScore=24.32;MLEAC=3;MLEAF=0.500;MQ=53.02;MQ0=0;MQRankSum=0.234;QD=3.62;RPA=16,17;RU=A;ReadPosRankSum=-1.638e+00;SB=-2.841e+02;STR;VQSLOD=3.02;culprit=FS	GT:AD:DP:GQ:PL:TP	0/1:37,27:68:62:391,0,62:13	0/1:16,8:25:13:102,0,13:13	0/1:47,4:53:90:90,0,142:13
+X	42192093	rs58959554	T	C	1058.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-5.464e+00;DB;DP=190;Dels=0.00;FS=6.81;HaplotypeScore=1.88;MLEAC=1;MLEAF=0.167;MQ=59.41;MQ0=0;MQRankSum=-2.120e-01;QD=15.34;ReadPosRankSum=-1.730e-01;SB=-3.970e+02;VQSLOD=8.06;culprit=FS	GT:AD:DP:GQ:PL:TP	0|0:70,1:71:99:0,182,2683:127	0|0:50,0:50:99:0,138,1794:127	0|1:31,38:69:99:1093,0,947:127
+X	66290558	rs1606094	C	T	2602.24	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=2.17;DB;DP=213;Dels=0.00;FS=13.13;HaplotypeScore=0.00;MLEAC=3;MLEAF=0.500;MQ=59.59;MQ0=0;MQRankSum=1.34;QD=18.86;ReadPosRankSum=2.27;SB=-1.122e+03;VQSLOD=6.00;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	1|0:54,47:101:99:1381,0,1762:99	1|1:0,37:37:99:1266,99,0:99	0|0:75,0:75:99:0,205,2705:99
+X	84165195	rs5923187	C	A	1009.24	PASS	AC=1;AF=0.167;AN=6;BaseQRankSum=-2.103e+00;DB;DP=194;Dels=0.00;FS=3.07;HaplotypeScore=2.82;MLEAC=1;MLEAF=0.167;MQ=59.31;MQ0=0;MQRankSum=-6.750e-01;QD=12.01;ReadPosRankSum=-1.463e+00;SB=-5.890e+02;VQSLOD=8.24;culprit=HaplotypeScore	GT:AD:DP:GQ:PL:TP	0|0:79,1:80:99:0,223,2856:60	0|0:30,0:30:60:0,60,764:60	0|1:40,44:84:99:1044,0,1007:60
+X	95868251	.	G	A	108.36	VQSRTrancheSNP99.00to99.90	AC=2;AF=0.333;AN=6;BaseQRankSum=1.44;DP=90;Dels=0.00;FS=4.21;HaplotypeScore=4.94;MLEAC=2;MLEAF=0.333;MQ=44.60;MQ0=8;MQRankSum=-2.939e+00;QD=1.55;ReadPosRankSum=2.87;SB=-7.411e+01;VQSLOD=-3.242e+00;culprit=QD	GT:AD:DP:GQ:PL:TP	0|1:29,9:38:99:126,0,251:23	0|0:20,0:20:24:0,24,266:23	1|0:27,5:32:21:21,0,97:23
+X	116855730	rs980389	A	G	2366.24	PASS	AC=3;AF=0.500;AN=6;BaseQRankSum=-3.939e+00;DB;DP=173;Dels=0.00;FS=3.51;HaplotypeScore=0.908;MLEAC=3;MLEAF=0.500;MQ=59.87;MQ0=0;MQRankSum=0.653;QD=21.51;ReadPosRankSum=0.730;SB=-1.218e+03;VQSLOD=7.96;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|0:37,33:70:99:965,0,1249:114	1|1:0,40:40:99:1446,114,0:114	0|0:63,0:63:99:0,175,2284:114
 X	135282092	rs5974586	G	C	2193.26	PASS	AC=2;AF=0.333;AN=6;BaseQRankSum=4.26;DB;DP=203;Dels=0.00;FS=0.540;HaplotypeScore=0.659;MLEAC=2;MLEAF=0.333;MQ=59.47;MQ0=0;MQRankSum=1.00;QD=13.46;ReadPosRankSum=1.34;SB=-1.069e+03;VQSLOD=9.09;culprit=ReadPosRankSum	GT:AD:DP:GQ:PL:TP	0|1:47,33:80:99:1026,0,1466:116	0|0:40,0:40:99:0,117,1486:116	1|0:43,40:83:99:1206,0,1311:116
+X	150239197	rs6627349	T	C	4677.01	PASS	AC=5;AF=0.833;AN=6;BaseQRankSum=0.731;DB;DP=179;Dels=0.00;FS=0.00;HaplotypeScore=1.61;MLEAC=5;MLEAF=0.833;MQ=59.69;MQ0=0;MQRankSum=0.676;QD=26.13;ReadPosRankSum=0.494;SB=-2.442e+03;VQSLOD=10.55;culprit=FS	GT:AD:DP:GQ:PL:TP	1|1:0,66:66:99:2387,190,0:99	1|1:0,40:40:99:1243,99,0:99	1|0:36,37:73:99:1047,0,1039:99
+Y	13265732	.	C	T	356.72	VQSRTrancheSNP99.00to99.90	AC=2;AF=0.333;AN=6;BaseQRankSum=2.82;DP=142;Dels=0.00;FS=7.94;HaplotypeScore=19.82;MLEAC=2;MLEAF=0.333;MQ=38.22;MQ0=11;MQRankSum=-2.145e+00;QD=5.10;ReadPosRankSum=1.49;SB=-1.466e+00;VQSLOD=-1.443e+01;culprit=MQ	GT:AD:DP:GQ:PL:TP	0|1:35,15:50:99:327,0,624:5	0|0:65,7:72:4:0,4,1831:5	1|0:15,5:20:67:67,0,227:5
+Y	59024923	.	A	T	1977.26	VQSRTrancheSNP99.90to100.00	AC=3;AF=0.500;AN=6;BaseQRankSum=1.84;DP=745;DS;Dels=0.00;FS=0.367;HaplotypeScore=8.30;MLEAC=3;MLEAF=0.500;MQ=43.53;MQ0=19;MQRankSum=2.50;QD=2.65;ReadPosRankSum=0.098;SB=-9.720e+02;VQSLOD=-3.676e+01;culprit=DP	GT:AD:DP:GQ:PL:TP	0/1:208,42:250:99:730,0,6428:127	0/1:210,35:245:99:588,0,6218:127	0/1:208,42:250:99:698,0,6060:127
+GL000214.1	115144	.	C	T	102.07	VQSRTrancheSNP99.00to99.90	AC=2;AF=0.333;AN=6;BaseQRankSum=0.854;DP=103;Dels=0.00;FS=14.19;HaplotypeScore=14.95;MLEAC=2;MLEAF=0.333;MQ=38.45;MQ0=1;MQRankSum=-2.314e+00;QD=1.05;ReadPosRankSum=1.48;SB=-6.153e+01;VQSLOD=-1.476e+01;culprit=QD	GT:AD:DP:GQ:PL:TP	0|0:6,0:6:3:0,3,22:4	0|1:9,11:20:16:81,0,16:4	0|1:63,14:77:58:58,0,908:4
+GL000215.1	142616	.	T	C	976.25	VQSRTrancheSNP99.00to99.90	AC=6;AF=1.00;AN=6;DP=80;Dels=0.00;FS=0.00;HaplotypeScore=0.00;MLEAC=6;MLEAF=1.00;MQ=21.04;MQ0=42;QD=12.20;SB=-8.001e+01;VQSLOD=-4.104e-01;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|1:0,33:33:42:516,42,0:19	1|1:0,21:21:24:272,24,0:19	1|1:0,26:26:21:226,21,0:19
+GL000212.1	28454	.	T	C	2248.99	VQSRTrancheSNP99.00to99.90	AC=4;AF=1.00;AN=4;BaseQRankSum=1.69;DP=94;Dels=0.00;FS=3.27;HaplotypeScore=0.962;MLEAC=4;MLEAF=1.00;MQ=36.99;MQ0=7;MQRankSum=-4.280e-01;QD=24.18;ReadPosRankSum=-1.284e+00;SB=-1.053e+03;VQSLOD=1.24;culprit=MQ	GT:AD:DP:GQ:PL:TP	1|1:0,29:29:63:755,63,0:63	./.	1|1:1,63:64:99:1530,135,0:63
diff --git a/testdata/net/sf/picard/vcf/vcfFormatTest.vcf.idx b/testdata/net/sf/picard/vcf/vcfFormatTest.vcf.idx
new file mode 100644
index 0000000..e90c25c
Binary files /dev/null and b/testdata/net/sf/picard/vcf/vcfFormatTest.vcf.idx differ
diff --git a/testdata/tribble/large.txt b/testdata/tribble/large.txt
new file mode 100644
index 0000000..5f4e1d4
--- /dev/null
+++ b/testdata/tribble/large.txt
@@ -0,0 +1,165 @@
+##fileformat=VCFv4.1
+##ApplyRecalibration="analysis_type=ApplyRecalibration input_file=[] read_buffer_size=null phone_home=STANDARD gatk_key=null tag=NA read_filter=[] intervals=[/seq/dax/t2d_genes/v1/t2d_genes.padded.interval_list] excludeIntervals=null interval_set_rule=UNION interval_merging=ALL interval_padding=0 reference_sequence=/seq/references/Homo_sapiens_assembly19/v1/Homo_sapiens_assembly19.fasta nonDeterministicRandomSeed=false disableRandomization=false maxRuntime=-1 maxRuntimeUnits=MINUTES down [...]
+##CombineVariants="analysis_type=CombineVariants input_file=[] read_buffer_size=null phone_home=STANDARD gatk_key=null tag=NA read_filter=[] intervals=[/seq/dax/t2d_genes/v1/t2d_genes.padded.interval_list] excludeIntervals=null interval_set_rule=UNION interval_merging=ALL interval_padding=0 reference_sequence=/seq/references/Homo_sapiens_assembly19/v1/Homo_sapiens_assembly19.fasta nonDeterministicRandomSeed=false disableRandomization=false maxRuntime=-1 maxRuntimeUnits=MINUTES downsampli [...]
+##FILTER=<ID=Indel_FS,Description="FS>200.0">
+##FILTER=<ID=Indel_InbreedingCoeff,Description="InbreedingCoeff<-0.8">
+##FILTER=<ID=Indel_QD,Description="QD<2.0">
+##FILTER=<ID=Indel_ReadPosRankSum,Description="ReadPosRankSum<-20.0">
+##FILTER=<ID=LowQual,Description="Low quality">
+##FILTER=<ID=VQSRTrancheSNP98.50to98.60,Description="Truth sensitivity tranche level for SNP model at VQS Lod: -0.9687 <= x < -0.8298">
+##FILTER=<ID=VQSRTrancheSNP98.60to98.80,Description="Truth sensitivity tranche level for SNP model at VQS Lod: -1.2821 <= x < -0.9687">
+##FILTER=<ID=VQSRTrancheSNP98.80to98.90,Description="Truth sensitivity tranche level for SNP model at VQS Lod: -1.5011 <= x < -1.2821">
+##FILTER=<ID=VQSRTrancheSNP98.90to99.00,Description="Truth sensitivity tranche level for SNP model at VQS Lod: -1.7494 <= x < -1.5011">
+##FILTER=<ID=VQSRTrancheSNP99.00to99.30,Description="Truth sensitivity tranche level for SNP model at VQS Lod: -2.9782 <= x < -1.7494">
+##FILTER=<ID=VQSRTrancheSNP99.30to99.50,Description="Truth sensitivity tranche level for SNP model at VQS Lod: -4.7694 <= x < -2.9782">
+##FILTER=<ID=VQSRTrancheSNP99.50to99.90,Description="Truth sensitivity tranche level for SNP model at VQS Lod: -15.7985 <= x < -4.7694">
+##FILTER=<ID=VQSRTrancheSNP99.90to100.00+,Description="Truth sensitivity tranche level for SNP model at VQS Lod < -4789.109">
+##FILTER=<ID=VQSRTrancheSNP99.90to100.00,Description="Truth sensitivity tranche level for SNP model at VQS Lod: -4789.109 <= x < -15.7985">
+##FORMAT=<ID=AD,Number=.,Type=Integer,Description="Allelic depths for the ref and alt alleles in the order listed">
+##FORMAT=<ID=DP,Number=1,Type=Integer,Description="Approximate read depth (reads with MQ=255 or with bad mates are filtered)">
+##FORMAT=<ID=GQ,Number=1,Type=Integer,Description="Genotype Quality">
+##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">
+##FORMAT=<ID=PL,Number=G,Type=Integer,Description="Normalized, Phred-scaled likelihoods for genotypes as defined in the VCF specification">
+##INFO=<ID=AC,Number=A,Type=Integer,Description="Allele count in genotypes, for each ALT allele, in the same order as listed">
+##INFO=<ID=AF,Number=A,Type=Float,Description="Allele Frequency, for each ALT allele, in the same order as listed">
+##INFO=<ID=AN,Number=1,Type=Integer,Description="Total number of alleles in called genotypes">
+##INFO=<ID=BaseQRankSum,Number=1,Type=Float,Description="Z-score from Wilcoxon rank sum test of Alt Vs. Ref base qualities">
+##INFO=<ID=DB,Number=0,Type=Flag,Description="dbSNP Membership">
+##INFO=<ID=DP,Number=1,Type=Integer,Description="Approximate read depth; some reads may have been filtered">
+##INFO=<ID=DS,Number=0,Type=Flag,Description="Were any of the samples downsampled?">
+##INFO=<ID=Dels,Number=1,Type=Float,Description="Fraction of Reads Containing Spanning Deletions">
+##INFO=<ID=END,Number=1,Type=Integer,Description="Stop position of the interval">
+##INFO=<ID=FS,Number=1,Type=Float,Description="Phred-scaled p-value using Fisher's exact test to detect strand bias">
+##INFO=<ID=HaplotypeScore,Number=1,Type=Float,Description="Consistency of the site with at most two segregating haplotypes">
+##INFO=<ID=InbreedingCoeff,Number=1,Type=Float,Description="Inbreeding coefficient as estimated from the genotype likelihoods per-sample when compared against the Hardy-Weinberg expectation">
+##INFO=<ID=MLEAC,Number=A,Type=Integer,Description="Maximum likelihood expectation (MLE) for the allele counts (not necessarily the same as the AC), for each ALT allele, in the same order as listed">
+##INFO=<ID=MLEAF,Number=A,Type=Float,Description="Maximum likelihood expectation (MLE) for the allele frequency (not necessarily the same as the AF), for each ALT allele, in the same order as listed">
+##INFO=<ID=MQ,Number=1,Type=Float,Description="RMS Mapping Quality">
+##INFO=<ID=MQ0,Number=1,Type=Integer,Description="Total Mapping Quality Zero Reads">
+##INFO=<ID=MQRankSum,Number=1,Type=Float,Description="Z-score From Wilcoxon rank sum test of Alt vs. Ref read mapping qualities">
+##INFO=<ID=QD,Number=1,Type=Float,Description="Variant Confidence/Quality by Depth">
+##INFO=<ID=RPA,Number=.,Type=Integer,Description="Number of times tandem repeat unit is repeated, for each allele (including reference)">
+##INFO=<ID=RU,Number=1,Type=String,Description="Tandem repeat unit (bases)">
+##INFO=<ID=ReadPosRankSum,Number=1,Type=Float,Description="Z-score from Wilcoxon rank sum test of Alt vs. Ref read position bias">
+##INFO=<ID=SNPEFF_AMINO_ACID_CHANGE,Number=1,Type=String,Description="Old/New amino acid for the highest-impact effect resulting from the current variant (in HGVS style)">
+##INFO=<ID=SNPEFF_CODON_CHANGE,Number=1,Type=String,Description="Old/New codon for the highest-impact effect resulting from the current variant">
+##INFO=<ID=SNPEFF_EFFECT,Number=1,Type=String,Description="The highest-impact effect resulting from the current variant (or one of the highest-impact effects, if there is a tie)">
+##INFO=<ID=SNPEFF_EXON_ID,Number=1,Type=String,Description="Exon ID for the highest-impact effect resulting from the current variant">
+##INFO=<ID=SNPEFF_FUNCTIONAL_CLASS,Number=1,Type=String,Description="Functional class of the highest-impact effect resulting from the current variant: [NONE, SILENT, MISSENSE, NONSENSE]">
+##INFO=<ID=SNPEFF_GENE_BIOTYPE,Number=1,Type=String,Description="Gene biotype for the highest-impact effect resulting from the current variant">
+##INFO=<ID=SNPEFF_GENE_NAME,Number=1,Type=String,Description="Gene name for the highest-impact effect resulting from the current variant">
+##INFO=<ID=SNPEFF_IMPACT,Number=1,Type=String,Description="Impact of the highest-impact effect resulting from the current variant [MODIFIER, LOW, MODERATE, HIGH]">
+##INFO=<ID=SNPEFF_TRANSCRIPT_ID,Number=1,Type=String,Description="Transcript ID for the highest-impact effect resulting from the current variant">
+##INFO=<ID=STR,Number=0,Type=Flag,Description="Variant is a short tandem repeat">
+##INFO=<ID=VQSLOD,Number=1,Type=Float,Description="Log odds ratio of being a true variant versus being false under the trained gaussian mixture model">
+##INFO=<ID=culprit,Number=1,Type=String,Description="The annotation which was the worst performing in the Gaussian mixture model, likely the reason why the variant was filtered out">
+##INFO=<ID=set,Number=1,Type=String,Description="Source VCF for the merged record in CombineVariants">
+##OriginalSnpEffCmd="SnpEff eff -v -onlyCoding true -c /seq/references/Homo_sapiens_assembly19/v1/snpEff/Homo_sapiens_assembly19.snpEff.config -i vcf -o vcf GRCh37.64 /seq/dax/t2d_genes/v1/t2d_genes.unannotated.vcf "
+##OriginalSnpEffVersion="2.0.5 (build 2011-12-24), by Pablo Cingolani"
+##SelectVariants="analysis_type=SelectVariants input_file=[] read_buffer_size=null phone_home=STANDARD gatk_key=null tag=NA read_filter=[] intervals=[/seq/dax/t2d_genes/v1/t2d_genes.padded.interval_list] excludeIntervals=null interval_set_rule=UNION interval_merging=ALL interval_padding=0 reference_sequence=/seq/references/Homo_sapiens_assembly19/v1/Homo_sapiens_assembly19.fasta nonDeterministicRandomSeed=false disableRandomization=false maxRuntime=-1 maxRuntimeUnits=MINUTES downsampling [...]
+##UnifiedGenotyper="analysis_type=UnifiedGenotyper input_file=[/seq/dax/t2d_genes/v1/t2d_genes.bam.list] read_buffer_size=null phone_home=STANDARD gatk_key=null tag=NA read_filter=[] intervals=[/seq/dax/t2d_genes/v1/scatter/temp_0001_of_2500/scattered.intervals] excludeIntervals=null interval_set_rule=UNION interval_merging=ALL interval_padding=0 reference_sequence=/seq/references/Homo_sapiens_assembly19/v1/Homo_sapiens_assembly19.fasta nonDeterministicRandomSeed=false disableRandomizati [...]
+##VariantAnnotator="analysis_type=VariantAnnotator input_file=[] read_buffer_size=null phone_home=STANDARD gatk_key=null tag=NA read_filter=[] intervals=[/seq/references/HybSelOligos/whole_exome_agilent_1.1_refseq_plus_3_boosters/whole_exome_agilent_1.1_refseq_plus_3_boosters.Homo_sapiens_assembly19.targets.interval_list] excludeIntervals=null interval_set_rule=UNION interval_merging=ALL interval_padding=50 reference_sequence=/seq/references/Homo_sapiens_assembly19/v1/Homo_sapiens_assemb [...]
+##VariantFiltration="analysis_type=VariantFiltration input_file=[] read_buffer_size=null phone_home=STANDARD gatk_key=null tag=NA read_filter=[] intervals=[/seq/dax/t2d_genes/v1/t2d_genes.padded.interval_list] excludeIntervals=null interval_set_rule=UNION interval_merging=ALL interval_padding=0 reference_sequence=/seq/references/Homo_sapiens_assembly19/v1/Homo_sapiens_assembly19.fasta nonDeterministicRandomSeed=false disableRandomization=false maxRuntime=-1 maxRuntimeUnits=MINUTES downsa [...]
+##contig=<ID=1,length=249250621>
+##contig=<ID=2,length=243199373>
+##contig=<ID=3,length=198022430>
+##contig=<ID=4,length=191154276>
+##contig=<ID=5,length=180915260>
+##contig=<ID=6,length=171115067>
+##contig=<ID=7,length=159138663>
+##contig=<ID=8,length=146364022>
+##contig=<ID=9,length=141213431>
+##contig=<ID=10,length=135534747>
+##contig=<ID=11,length=135006516>
+##contig=<ID=12,length=133851895>
+##contig=<ID=13,length=115169878>
+##contig=<ID=14,length=107349540>
+##contig=<ID=15,length=102531392>
+##contig=<ID=16,length=90354753>
+##contig=<ID=17,length=81195210>
+##contig=<ID=18,length=78077248>
+##contig=<ID=19,length=59128983>
+##contig=<ID=20,length=63025520>
+##contig=<ID=21,length=48129895>
+##contig=<ID=22,length=51304566>
+##contig=<ID=X,length=155270560>
+##contig=<ID=Y,length=59373566>
+##contig=<ID=MT,length=16569>
+##contig=<ID=GL000207.1,length=4262>
+##contig=<ID=GL000226.1,length=15008>
+##contig=<ID=GL000229.1,length=19913>
+##contig=<ID=GL000231.1,length=27386>
+##contig=<ID=GL000210.1,length=27682>
+##contig=<ID=GL000239.1,length=33824>
+##contig=<ID=GL000235.1,length=34474>
+##contig=<ID=GL000201.1,length=36148>
+##contig=<ID=GL000247.1,length=36422>
+##contig=<ID=GL000245.1,length=36651>
+##contig=<ID=GL000197.1,length=37175>
+##contig=<ID=GL000203.1,length=37498>
+##contig=<ID=GL000246.1,length=38154>
+##contig=<ID=GL000249.1,length=38502>
+##contig=<ID=GL000196.1,length=38914>
+##contig=<ID=GL000248.1,length=39786>
+##contig=<ID=GL000244.1,length=39929>
+##contig=<ID=GL000238.1,length=39939>
+##contig=<ID=GL000202.1,length=40103>
+##contig=<ID=GL000234.1,length=40531>
+##contig=<ID=GL000232.1,length=40652>
+##contig=<ID=GL000206.1,length=41001>
+##contig=<ID=GL000240.1,length=41933>
+##contig=<ID=GL000236.1,length=41934>
+##contig=<ID=GL000241.1,length=42152>
+##contig=<ID=GL000243.1,length=43341>
+##contig=<ID=GL000242.1,length=43523>
+##contig=<ID=GL000230.1,length=43691>
+##contig=<ID=GL000237.1,length=45867>
+##contig=<ID=GL000233.1,length=45941>
+##contig=<ID=GL000204.1,length=81310>
+##contig=<ID=GL000198.1,length=90085>
+##contig=<ID=GL000208.1,length=92689>
+##contig=<ID=GL000191.1,length=106433>
+##contig=<ID=GL000227.1,length=128374>
+##contig=<ID=GL000228.1,length=129120>
+##contig=<ID=GL000214.1,length=137718>
+##contig=<ID=GL000221.1,length=155397>
+##contig=<ID=GL000209.1,length=159169>
+##contig=<ID=GL000218.1,length=161147>
+##contig=<ID=GL000220.1,length=161802>
+##contig=<ID=GL000213.1,length=164239>
+##contig=<ID=GL000211.1,length=166566>
+##contig=<ID=GL000199.1,length=169874>
+##contig=<ID=GL000217.1,length=172149>
+##contig=<ID=GL000216.1,length=172294>
+##contig=<ID=GL000215.1,length=172545>
+##contig=<ID=GL000205.1,length=174588>
+##contig=<ID=GL000219.1,length=179198>
+##contig=<ID=GL000224.1,length=179693>
+##contig=<ID=GL000223.1,length=180455>
+##contig=<ID=GL000195.1,length=182896>
+##contig=<ID=GL000212.1,length=186858>
+##contig=<ID=GL000222.1,length=186861>
+##contig=<ID=GL000200.1,length=187035>
+##contig=<ID=GL000193.1,length=189789>
+##contig=<ID=GL000194.1,length=191469>
+##contig=<ID=GL000225.1,length=211173>
+##contig=<ID=GL000192.1,length=547496>
+##contig=<ID=NC_007605,length=171823>
+##reference=file:///seq/references/Homo_sapiens_assembly19/v1/Homo_sapiens_assembly19.fasta
+##source=SelectVariants
+#CHROM	POS	ID	REF	ALT	QUAL	FILTER	INFO	FORMAT	100	1000-10D	1001	1001-10D	10018	1002-10D	10029	1003-10D	10032	1004-10D	10046	10049	1005	1005-10D	10050	10059	1006-10D	10069	1007-10BD	1008-10D	10088	1009-10D	10090	1010-10D	10105	10109	1011-10D	10110	10114	10118	1012-10D	1013-10D	1014-10D	10148	1015-10D	1016-10D	1018-10D	1019-10D	10191	10194	10196	1020-10D	10204.0	1021-10D	10210	10211.0	10221	10268	1028	10282	10304	10326	10330	10339	10356	10371	10376	10388.0	10397	10402	10434	10447	10451	104 [...]
+1	69270	.	A	G	67802.61	VQSRTrancheSNP99.50to99.90	AC=4763;AF=0.736;AN=6468;BaseQRankSum=-29.966;DP=1452155;Dels=0.00;FS=0.000;HaplotypeScore=0.0519;InbreedingCoeff=0.3564;MLEAC=3470;MLEAF=0.536;MQ=2.72;MQ0=522186;MQRankSum=17.776;QD=0.32;ReadPosRankSum=-9.274;SNPEFF_AMINO_ACID_CHANGE=S108;SNPEFF_CODON_CHANGE=tcA/tcG;SNPEFF_EFFECT=SYNONYMOUS_CODING;SNPEFF_EXON_ID=exon_1_69037_69829;SNPEFF_FUNCTIONAL_CLASS=SILENT;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_NAME=OR4F5;SNPEFF_IMPACT=LOW;S [...]
+1	69335	.	G	A	97.21	VQSRTrancheSNP99.90to100.00	AC=1;AF=6.701e-05;AN=14924;BaseQRankSum=3.221;DP=1123713;Dels=0.00;FS=0.000;HaplotypeScore=0.0941;InbreedingCoeff=-0.0589;MLEAC=1;MLEAF=6.701e-05;MQ=4.99;MQ0=459016;MQRankSum=0.427;QD=1.30;ReadPosRankSum=-1.122;SNPEFF_AMINO_ACID_CHANGE=R130H;SNPEFF_CODON_CHANGE=cGc/cAc;SNPEFF_EFFECT=NON_SYNONYMOUS_CODING;SNPEFF_EXON_ID=exon_1_69037_69829;SNPEFF_FUNCTIONAL_CLASS=MISSENSE;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_NAME=OR4F5;SNPEFF_IMPACT [...]
+1	69366	.	T	G	871.42	VQSRTrancheSNP99.50to99.90	AC=2;AF=1.239e-04;AN=16146;BaseQRankSum=-1.719;DP=990333;Dels=0.00;FS=0.000;HaplotypeScore=0.0933;InbreedingCoeff=-0.0247;MLEAC=2;MLEAF=1.239e-04;MQ=5.96;MQ0=407294;MQRankSum=0.001;QD=21.25;ReadPosRankSum=0.517;SNPEFF_AMINO_ACID_CHANGE=V140;SNPEFF_CODON_CHANGE=gtT/gtG;SNPEFF_EFFECT=SYNONYMOUS_CODING;SNPEFF_EXON_ID=exon_1_69037_69829;SNPEFF_FUNCTIONAL_CLASS=SILENT;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_NAME=OR4F5;SNPEFF_IMPACT=LOW;SN [...]
+1	69409	.	A	G	4406.28	VQSRTrancheSNP99.50to99.90	AC=8;AF=4.147e-04;AN=19290;BaseQRankSum=13.621;DP=1362430;Dels=0.00;FS=26.031;HaplotypeScore=0.1448;InbreedingCoeff=0.0053;MLEAC=8;MLEAF=4.147e-04;MQ=11.68;MQ0=219922;MQRankSum=5.323;QD=9.71;ReadPosRankSum=-9.497;SNPEFF_AMINO_ACID_CHANGE=I107V;SNPEFF_CODON_CHANGE=Atc/Gtc;SNPEFF_EFFECT=NON_SYNONYMOUS_CODING;SNPEFF_EXON_ID=exon_1_69091_70008;SNPEFF_FUNCTIONAL_CLASS=MISSENSE;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_NAME=OR4F5;SNPEFF_IMP [...]
+1	69428	rs140739101	T	G	419908.06	VQSRTrancheSNP99.50to99.90	AC=453;AF=0.022;AN=20242;BaseQRankSum=49.526;DB;DP=1394601;Dels=0.00;FS=975.380;HaplotypeScore=0.1195;InbreedingCoeff=0.2461;MLEAC=334;MLEAF=0.017;MQ=16.42;MQ0=138676;MQRankSum=-2.177;QD=17.57;ReadPosRankSum=-10.322;SNPEFF_AMINO_ACID_CHANGE=F113C;SNPEFF_CODON_CHANGE=tTt/tGt;SNPEFF_EFFECT=NON_SYNONYMOUS_CODING;SNPEFF_EXON_ID=exon_1_69091_70008;SNPEFF_FUNCTIONAL_CLASS=MISSENSE;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_NAME=O [...]
+1	69438	.	T	C	3107.59	VQSRTrancheSNP99.50to99.90	AC=2;AF=9.729e-05;AN=20558;BaseQRankSum=-2.899;DP=1379680;Dels=0.00;FS=8.925;HaplotypeScore=0.1499;InbreedingCoeff=0.0001;MLEAC=2;MLEAF=9.729e-05;MQ=22.72;MQ0=95489;MQRankSum=0.850;QD=24.66;ReadPosRankSum=-0.870;SNPEFF_AMINO_ACID_CHANGE=Y116;SNPEFF_CODON_CHANGE=taT/taC;SNPEFF_EFFECT=SYNONYMOUS_CODING;SNPEFF_EXON_ID=exon_1_69091_70008;SNPEFF_FUNCTIONAL_CLASS=SILENT;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_NAME=OR4F5;SNPEFF_IMPACT=LOW; [...]
+1	69453	rs142004627	G	A	923.30	VQSRTrancheSNP99.00to99.30	AC=141;AF=7.059e-03;AN=19974;BaseQRankSum=-19.711;DB;DP=1292464;Dels=0.00;FS=234.751;HaplotypeScore=0.2262;InbreedingCoeff=0.1247;MLEAC=44;MLEAF=2.203e-03;MQ=31.12;MQ0=15075;MQRankSum=-11.276;QD=1.14;ReadPosRankSum=-4.900;SNPEFF_AMINO_ACID_CHANGE=K121;SNPEFF_CODON_CHANGE=aaG/aaA;SNPEFF_EFFECT=SYNONYMOUS_CODING;SNPEFF_EXON_ID=exon_1_69091_70008;SNPEFF_FUNCTIONAL_CLASS=SILENT;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_NAME=OR4F5 [...]
+1	69478	.	T	C	625.46	VQSRTrancheSNP99.50to99.90	AC=2;AF=9.713e-05;AN=20590;BaseQRankSum=-5.701;DP=1224140;Dels=0.00;FS=9.248;HaplotypeScore=0.1621;InbreedingCoeff=-0.0073;MLEAC=1;MLEAF=4.857e-05;MQ=28.40;MQ0=31089;MQRankSum=-0.497;QD=7.19;ReadPosRankSum=-4.289;SNPEFF_AMINO_ACID_CHANGE=C130R;SNPEFF_CODON_CHANGE=Tgt/Cgt;SNPEFF_EFFECT=NON_SYNONYMOUS_CODING;SNPEFF_EXON_ID=exon_1_69091_70008;SNPEFF_FUNCTIONAL_CLASS=MISSENSE;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_NAME=OR4F5;SNPEFF_IMPA [...]
+1	69489	.	A	C	1035.55	VQSRTrancheSNP99.50to99.90	AC=3;AF=1.469e-04;AN=20422;BaseQRankSum=4.705;DP=1117540;Dels=0.00;FS=2.442;HaplotypeScore=0.1773;InbreedingCoeff=-0.0003;MLEAC=2;MLEAF=9.793e-05;MQ=28.74;MQ0=29993;MQRankSum=-1.170;QD=12.78;ReadPosRankSum=-2.427;SNPEFF_AMINO_ACID_CHANGE=A133;SNPEFF_CODON_CHANGE=gcA/gcC;SNPEFF_EFFECT=SYNONYMOUS_CODING;SNPEFF_EXON_ID=exon_1_69091_70008;SNPEFF_FUNCTIONAL_CLASS=SILENT;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_NAME=OR4F5;SNPEFF_IMPACT=LOW [...]
+1	69496	rs150690004	G	A	74883.53	VQSRTrancheSNP99.00to99.30	AC=33;AF=1.628e-03;AN=20272;BaseQRankSum=-5.450;DB;DP=993841;Dels=0.00;FS=22.218;HaplotypeScore=0.2924;InbreedingCoeff=0.0259;MLEAC=17;MLEAF=8.386e-04;MQ=29.16;MQ0=27895;MQRankSum=10.649;QD=70.12;ReadPosRankSum=0.884;SNPEFF_AMINO_ACID_CHANGE=G136S;SNPEFF_CODON_CHANGE=Ggc/Agc;SNPEFF_EFFECT=NON_SYNONYMOUS_CODING;SNPEFF_EXON_ID=exon_1_69091_70008;SNPEFF_FUNCTIONAL_CLASS=MISSENSE;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_NAME=O [...]
+1	69496	.	GGCATTATGGCTGTCA	G	555413.58	Indel_FS	AC=2988;AF=0.148;AN=20226;BaseQRankSum=43.899;DP=993841;FS=Infinity;HaplotypeScore=20.7358;InbreedingCoeff=0.7977;MLEAC=3420;MLEAF=0.169;MQ=29.16;MQ0=0;MQRankSum=-13.694;QD=3.63;ReadPosRankSum=37.478;SNPEFF_AMINO_ACID_CHANGE=GIMAVT136A;SNPEFF_CODON_CHANGE=ggcattatggctgtcaca/gca;SNPEFF_EFFECT=CODON_CHANGE_PLUS_CODON_DELETION;SNPEFF_EXON_ID=exon_1_69091_70008;SNPEFF_FUNCTIONAL_CLASS=NONE;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_NAME=OR4 [...]
+1	69502	.	A	G	509.73	VQSRTrancheSNP99.50to99.90	AC=2;AF=9.975e-05;AN=20050;BaseQRankSum=6.603;DP=919807;Dels=0.00;FS=1.954;HaplotypeScore=0.3679;InbreedingCoeff=-0.0070;MLEAC=1;MLEAF=4.988e-05;MQ=29.88;MQ0=25457;MQRankSum=-2.320;QD=6.22;ReadPosRankSum=-2.827;SNPEFF_AMINO_ACID_CHANGE=M138V;SNPEFF_CODON_CHANGE=Atg/Gtg;SNPEFF_EFFECT=NON_SYNONYMOUS_CODING;SNPEFF_EXON_ID=exon_1_69091_70008;SNPEFF_FUNCTIONAL_CLASS=MISSENSE;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_NAME=OR4F5;SNPEFF_IMPACT [...]
+1	69511	rs75062661	A	G	31383854.34	VQSRTrancheSNP99.50to99.90	AC=16694;AF=0.858;AN=19460;BaseQRankSum=60.147;DB;DP=773705;Dels=0.00;FS=138.417;HaplotypeScore=0.2818;InbreedingCoeff=0.5035;MLEAC=16887;MLEAF=0.868;MQ=31.39;MQ0=16811;MQRankSum=-164.893;QD=52.42;ReadPosRankSum=-36.782;SNPEFF_AMINO_ACID_CHANGE=T141A;SNPEFF_CODON_CHANGE=Aca/Gca;SNPEFF_EFFECT=NON_SYNONYMOUS_CODING;SNPEFF_EXON_ID=exon_1_69091_70008;SNPEFF_FUNCTIONAL_CLASS=MISSENSE;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_N [...]
+1	69521	.	T	C	722.77	VQSRTrancheSNP99.50to99.90	AC=2;AF=1.011e-04;AN=19776;BaseQRankSum=2.538;DP=915452;Dels=0.00;FS=1.110;HaplotypeScore=0.2063;InbreedingCoeff=-0.0088;MLEAC=2;MLEAF=1.011e-04;MQ=34.73;MQ0=15188;MQRankSum=-1.312;QD=4.82;ReadPosRankSum=-4.246;SNPEFF_AMINO_ACID_CHANGE=I144T;SNPEFF_CODON_CHANGE=aTt/aCt;SNPEFF_EFFECT=NON_SYNONYMOUS_CODING;SNPEFF_EXON_ID=exon_1_69091_70008;SNPEFF_FUNCTIONAL_CLASS=MISSENSE;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_NAME=OR4F5;SNPEFF_IMPACT [...]
+1	69534	rs190717287	T	C	10791.59	VQSRTrancheSNP99.50to99.90	AC=14;AF=7.034e-04;AN=19902;BaseQRankSum=15.572;DB;DP=1059436;Dels=0.00;FS=1.036;HaplotypeScore=0.1863;InbreedingCoeff=-0.0037;MLEAC=14;MLEAF=7.034e-04;MQ=35.59;MQ0=12762;MQRankSum=-2.121;QD=11.54;ReadPosRankSum=-6.516;SNPEFF_AMINO_ACID_CHANGE=H148;SNPEFF_CODON_CHANGE=caT/caC;SNPEFF_EFFECT=SYNONYMOUS_CODING;SNPEFF_EXON_ID=exon_1_69091_70008;SNPEFF_FUNCTIONAL_CLASS=SILENT;SNPEFF_GENE_BIOTYPE=protein_coding;SNPEFF_GENE_NAME=OR4F5; [...]

-- 
Alioth's /git/debian-med/git-commit-notice on /srv/git.debian.org/git/debian-med/picard-tools.git



More information about the debian-med-commit mailing list