[med-svn] [sambamba] 02/04: New upstream version 0.6.6

Andreas Tille tille at debian.org
Sun Mar 5 12:22:37 UTC 2017


This is an automated email from the git hooks/post-receive script.

tille pushed a commit to branch master
in repository sambamba.

commit 7d9214a6930ce3d6c09b4b3dd35b97fc2e56fdf1
Author: Andreas Tille <tille at debian.org>
Date:   Sun Mar 5 13:11:54 2017 +0100

    New upstream version 0.6.6
---
 .gitignore                                    |   2 +
 .test_suite.sh                                |   7 +-
 .travis.yml                                   |  12 ++-
 Makefile                                      |  37 +++++--
 Makefile.guix                                 |  83 +++++++++++++++
 README.md                                     |  65 +++++++++---
 bioconda_push.sh                              |  14 +++
 bioconda_yaml_gen.py                          |   3 +-
 cram/reader.d                                 |   5 +-
 cram/wrappers.d                               |  28 ++---
 cram/writer.d                                 |   4 +-
 gen_ldc_version_info.py                       |  22 ++++
 main.d                                        |  13 +++
 man/README.md                                 |  18 ++++
 man/sambamba-index.1                          |  24 ++---
 man/sambamba-index.1.ronn                     |  10 +-
 sambamba-ldmd-debug.rsp                       |   2 +-
 sambamba-ldmd-release.rsp                     |   2 +-
 sambamba/depth.d                              |   4 +-
 sambamba/index.d                              |   2 +-
 sambamba/markdup.d                            |   2 +-
 sambamba/merge.d                              |   4 +-
 sambamba/pileup.d                             | 142 +++++++++++++++-----------
 sambamba/slice.d                              |  30 +++---
 sambamba/sort.d                               |  18 ++--
 sambamba/utils/common/bed.d                   |   5 +-
 sambamba/utils/common/file.d                  |   4 +-
 sambamba/utils/common/filtering.d             |  35 +++++--
 sambamba/utils/view/alignmentrangeprocessor.d |   8 +-
 sambamba/view.d                               |  22 ++++
 utils/lz4.d                                   |   4 +-
 utils/strip_bcf_header.d                      |   2 +-
 utils/version_.d                              |   2 +-
 33 files changed, 451 insertions(+), 184 deletions(-)

diff --git a/.gitignore b/.gitignore
index 84a1605..7e1d516 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
 build/
+undeaD/
 shunit*
 /*.sam
 /*.bam
@@ -6,3 +7,4 @@ shunit*
 /*.cram
 /*.crai
 /*.txt
+/utils/ldc_version_info_.d
diff --git a/.test_suite.sh b/.test_suite.sh
index 87bcf8b..9fe581e 100755
--- a/.test_suite.sh
+++ b/.test_suite.sh
@@ -113,4 +113,9 @@ testIssue225() {
     assertEquals 0 $?
 }
 
-. shunit2-2.0.3/src/shell/shunit2
+shunit2=`which shunit2`
+if [ -x "$shunit2" ]; then
+    . $shunit2
+else
+    . shunit2-2.0.3/src/shell/shunit2
+fi
diff --git a/.travis.yml b/.travis.yml
index ffd849c..53a111d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -4,18 +4,20 @@ language: objective-c
 env:
   global:
   - ACCESS_LEVEL=sandbox
+  - LDC_RELEASE=1.0.0
+  - LDC_VERSION=ldc2-${LDC_RELEASE}-osx-x86_64
   - secure: blVtZH4Ia+4MkeTm0gRwL07r/bzhVRJJjUEbtvt0pIQNAtDq5aYuKnCI9sIw99XmEbv9+tSLc6DXq9d6LrDDR0EG7UUB4sTP6XW1XNJXm/kLIuoOCxTnVk6WBHZ4jusMWE0MNNDVmIS8nHrpoDp2ERXbfasD/9GT+3avVUvnsMg=
   - secure: Kzdt/6PLGBhD5f8yOPaPdX5yxHisCPvmfYEKUcEVksKOEurQoIpsNjzAky/yzso4R21T6xycjawFvDXvlVA83m/WJZSDDiFLZVgGoaMoZl9UYOPESeZlJR57/HkjgCss0iz/AX9abUzJ1jGfRt9ujtwcoXB15PqUhbjk2iHF0f0=
   - secure: K0J2r5R8xQMlAUwCaPVWHZUO4U+c+czK6H9bnN5RZPYs5BXpYjdV7/RgkOHsN9b+UksFx7LQRstgcVVcC+mQJRV8slGrx/GSmIA93Dp2mskU9eR1aQjxcUK10VMbIQI6qbXsZvvRSWPirNDQY4s3CjEKEv34t2IGALTkwaU1NmI=
   - secure: J1ta2/K8lj5dbHRvk+jTUAMICSnepfyc8ILCfM/HFCRaXMJQJTX1HDzEMwK3tAzzKaKEp9Tbm2b3IwE1tCtFaAfYUGozmHLirkFfKqCu/jNUkxL807M8NiqMx+H7tu2aR6t9opYyBjNhtuthbIdir3c42nldYdpse0ZLhzQTLfs=
 
 before_install:
-  - brew install xz
-  - wget https://github.com/ldc-developers/ldc/releases/download/v0.17.1/ldc2-0.17.1-osx-x86_64.tar.xz
-  - tar xJf ldc2-0.17.1-osx-x86_64.tar.xz
-  - export PATH=`pwd`/ldc2-0.17.1-osx-x86_64/bin:$PATH
-  - export LIBRARY_PATH=`pwd`/ldc2-0.17.1-osx-x86_64/lib
+  - brew install xz libconfig
+  - curl -L "https://github.com/ldc-developers/ldc/releases/download/v${LDC_RELEASE}/${LDC_VERSION}.tar.xz" | tar Jx
+  - export PATH=$(pwd)/${LDC_VERSION}/bin:${PATH}
+  - export LIBRARY_PATH=$(pwd)/${LDC_VERSION}/lib:${LIBRARY_PATH}
   - git submodule update --init --recursive
+  - git clone https://github.com/dlang/undeaD.git
   - wget https://github.com/craigcitro/r-travis/raw/master/scripts/dropbox.sh
   - chmod +x dropbox.sh
   - curl -L "https://dl.dropboxusercontent.com/u/7916095/shunit2-2.0.3.tgz" | tar zx
diff --git a/Makefile b/Makefile
index 28a0c50..07f7898 100644
--- a/Makefile
+++ b/Makefile
@@ -1,5 +1,6 @@
 D_COMPILER=dmd
-D_FLAGS=--compiler=dmd -IBioD -g -d#-O -release -inline # -version=serial
+D_FLAGS=--compiler=dmd -IBioD -IundeaD/src -g -d#-O -release -inline # -version=serial
+LDMD=ldmd2
 
 STATIC_LIB_PATH=-Lhtslib -Llz4/lib
 STATIC_LIB_SUBCMD=$(STATIC_LIB_PATH) -Wl,-Bstatic -lhts -llz4 -Wl,-Bdynamic
@@ -8,30 +9,52 @@ RDMD_FLAGS=--force --build-only --compiler=$(D_COMPILER) $(D_FLAGS)
 PLATFORM := $(shell uname -s)
 
 ifeq "$(PLATFORM)" "Darwin"
+
 LINK_CMD=gcc -dead_strip -lphobos2-ldc -ldruntime-ldc -lm -lpthread htslib/libhts.a lz4/lib/liblz4.a build/sambamba.o -o build/sambamba
 DMD_STATIC_LIBS=htslib/libhts.a lz4/lib/liblz4.a
+
+define split-debug
+dsymutil build/sambamba -o build/sambamba.dSYM
+strip -S build/sambamba
+endef
+
 else
+
 LINK_CMD=gcc -Wl,--gc-sections -o build/sambamba build/sambamba.o $(STATIC_LIB_SUBCMD) -l:libphobos2-ldc.a -l:libdruntime-ldc.a  -lrt -lpthread -lm
 DMD_STATIC_LIBS=-L-Lhtslib -L-l:libhts.a -L-l:libphobos2.a -L-Llz4/lib -L-l:liblz4.a
+
+define split-debug
+objcopy --only-keep-debug build/sambamba sambamba.debug
+objcopy --strip-debug build/sambamba
+objcopy --add-gnu-debuglink=sambamba.debug build/sambamba
+mv sambamba.debug build/
+endef
+
 endif
 
+PREREQS := ldc-version-info htslib-static lz4-static
+
 # DMD only - this goal is used because of fast compilation speed, during development
-all: htslib-static lz4-static
+all: $(PREREQS)
 	mkdir -p build/
 	rdmd --force --build-only $(D_FLAGS) $(DMD_STATIC_LIBS) -ofbuild/sambamba main.d
 
 # This is the main Makefile goal, used for building releases (best performance)
-sambamba-ldmd2-64: htslib-static lz4-static
+sambamba-ldmd2-64: $(PREREQS)
 	mkdir -p build/
-	ldmd2 @sambamba-ldmd-release.rsp
+	$(LDMD) @sambamba-ldmd-release.rsp
 	$(LINK_CMD)
+	$(split-debug)
 
 # For debugging; GDB & Valgrind are more friendly to executables created using LDC/GDC than DMD
-sambamba-ldmd2-debug: htslib-static lz4-static
+sambamba-ldmd2-debug: $(PREREQS)
 	mkdir -p build/
-	ldmd2 @sambamba-ldmd-debug.rsp
+	$(LDMD) @sambamba-ldmd-debug.rsp
 	$(LINK_CMD)
 
+ldc-version-info:
+	./gen_ldc_version_info.py $(shell which $(LDMD)) > utils/ldc_version_info_.d
+
 htslib-static:
 	cd htslib && $(MAKE)
 
@@ -78,7 +101,7 @@ sambamba-pileup:
 	mkdir -p build/
 	rdmd $(RDMD_FLAGS) -L-lhts -version=standalone -ofbuild/sambamba-pileup sambamba/pileup.d
 
-.PHONY: clean
+.PHONY: clean ldc-version-info
 
 clean:
 	rm -rf build/ ; $(MAKE) -C htslib clean ; $(MAKE) -C lz4 clean
diff --git a/Makefile.guix b/Makefile.guix
new file mode 100644
index 0000000..e296f4c
--- /dev/null
+++ b/Makefile.guix
@@ -0,0 +1,83 @@
+# GNU Guix makefile
+#
+# To build sambamba on GNU Guix:
+#
+#   make -f Makefile.guix
+#
+# run with
+#
+#   ./build/sambamba
+
+# The following two are modified by the Guix package:
+D_COMPILER=ldc2
+LDC_LIB_PATH=$(HOME)/.guix-profile/lib
+
+DFLAGS = -wi -I. -IBioD -IundeaD/src
+DLIBS  = $(LDC_LIB_PATH)/libphobos2-ldc.a $(LDC_LIB_PATH)/libdruntime-ldc.a
+DLIBS_DEBUG = $(LDC_LIB_PATH)/libphobos2-ldc-debug.a $(LDC_LIB_PATH)/libdruntime-ldc-debug.a
+RPATH  = -L--rpath=$(dir $(realpath $(LDC_LIB_PATH)/libz.so)):$(dir $(realpath $(LDC_LIB_PATH)/liblz4.so))
+LIBS   = htslib/libhts.a -L-L$(LDC_LIB_PATH) -L-lrt -L-lpthread -L-lm -L-lz -L-llz4
+SRC    = $(wildcard main.d utils/*.d thirdparty/*.d cram/*.d) $(wildcard undeaD/src/undead/*.d) $(wildcard BioD/bio/*/*.d BioD/bio/*/*/*.d) $(wildcard sambamba/*.d sambamba/*/*.d sambamba/*/*/*.d)
+OBJ    = $(SRC:.d=.o) utils/ldc_version_info_.o
+OUT    = build/sambamba
+
+# The Guix targets resolve the RPATH automatically
+guix:        DFLAGS += -O -g -inline
+
+guix-debug:  DFLAGS += -O0 -g -d-debug
+
+# The development options are run from ~/.guix-profile and need to inject the RPATH
+debug:       DFLAGS += -O0 -g -d-debug $(RPATH) -link-debuglib
+
+release:     DFLAGS += -O -release -inline -noboundscheck $(RPATH)
+
+profile:     DFLAGS += -g -O -profile $(RPATH)
+
+guix release:             LIBS += $(DLIBS)
+
+guix-debug debug profile: LIBS += $(DLIBS_DEBUG)
+
+.PHONY: all guix guix-debug debug release profile clean test
+
+all: debug
+
+htslib-static:
+	cd htslib && $(MAKE)
+
+ldc-version-info:
+	./gen_ldc_version_info.py $(shell which ldmd2) > utils/ldc_version_info_.d
+
+utils/ldc_version_info_.o: ldc-version-info
+	$(D_COMPILER) $(DFLAGS) -c utils/ldc_version_info_.d -od=$(dir $@)
+
+build-setup: htslib-static ldc-version-info
+	mkdir -p build/
+
+guix guix-debug default debug release profile: $(OUT)
+
+# ---- Compile step
+%.o: %.d
+	$(D_COMPILER) $(DFLAGS) -c $< -od=$(dir $@)
+
+# ---- Link step
+$(OUT): build-setup $(OBJ)
+	$(D_COMPILER) $(DFLAGS) -of=build/sambamba $(OBJ) $(LIBS)
+
+test:
+	./.run_tests.sh
+
+debug-strip: debug
+	objcopy --only-keep-debug build/sambamba sambamba.debug
+	objcopy --strip-debug build/sambamba
+	objcopy --add-gnu-debuglink=sambamba.debug build/sambamba
+	mv sambamba.debug build/
+
+install:
+	install -m 0755 build/sambamba $(prefix)/bin
+
+clean: clean-d
+	cd htslib ; make clean
+
+clean-d:
+	rm -rf build/*
+	rm -f $(OBJ) $(OUT) trace.{def,log}
diff --git a/README.md b/README.md
index d9995ad..2630678 100644
--- a/README.md
+++ b/README.md
@@ -7,10 +7,10 @@ Sambamba is a high performance modern robust and fast tool (and
 library), written in the D programming language, for working with SAM
 and BAM files.  Current parallelised functionality is an important
 subset of samtools functionality, including view, index, sort,
-markdup, and depth. 
+markdup, and depth.
 
 Because of efficient use of modern multicore CPUs, usually `sambamba` is much faster
-than `samtools`. For example, indexing a 2.5 Gb BAM file (fully cached into RAM) 
+than `samtools`. For example, indexing a 2.5 Gb BAM file (fully cached into RAM)
 on a 8 core machine utilizes all cores at 64% CPU:
 
     time sambamba index merged_NIT20120138_F3_20130715.bam -t8
@@ -32,13 +32,13 @@ Even so, it makes a big difference, shifting the focus to I/O optimization, i.e.
 less temporary files, more UNIX pipes, faster disk storage, tweaking filesystem, etc.
 Most tools in `sambamba` support piping: just specify `/dev/stdin` or `/dev/stdout` as filenames.
 
-Notice that `samtools` implements parallel BAM compression in `sort` and `merge`, 
-but `sambamba` should be faster for these tasks (given same amount of memory) 
+Notice that `samtools` implements parallel BAM compression in `sort` and `merge`,
+but `sambamba` should be faster for these tasks (given same amount of memory)
 due to more cache-friendly approach to parallelization.
 If it is not the case for you, please file a bug.
 
 Sambamba is free and open source software, licensed under GPLv2+.
-See manual pages [online](https://lomereiter.github.io/sambamba/docs/sambamba-view.html) 
+See manual pages [online](https://lomereiter.github.io/sambamba/docs/sambamba-view.html)
 to know more about what is available and how to use it.
 
 For more information on Sambamba you can contact Artem Tarasov and Pjotr Prins.
@@ -60,43 +60,78 @@ which targets LLVM.
 ## Compiling for Linux
 
 The LDC compiler's github repository also provides binary images. The current
-preferred release for sambamba is LDC - the LLVM D compiler (>= 0.16.1). After
+preferred release for sambamba is LDC - the LLVM D compiler (>= 1.1.0). After
 installing LDC:
 
 ```sh
     git clone --recursive https://github.com/lomereiter/sambamba.git
     cd sambamba
+    git clone https://github.com/dlang/undeaD
     make sambamba-ldmd2-64
 ```
 
 Installing LDC only means unpacking an archive and setting some environmental variables, e.g. unpacking into `$HOME`:
 ```sh
 cd
-wget https://github.com/ldc-developers/ldc/releases/download/v0.17.1/ldc2-0.17.1-linux-x86_64.tar.xz
-tar xJf ldc2-0.17.1-linux-x86_64.tar.xz
-export PATH=~/ldc2-0.17.1-linux-x86_64/bin/:$PATH
-export LIBRARY_PATH=~/ldc2-0.17.1-linux-x86_64/lib/
+wget https://github.com/ldc-developers/ldc/releases/download/$ver/ldc2-$ver-linux-x86_64.tar.xz
+tar xJf ldc2-$ver-linux-x86_64.tar.xz
+export PATH=~/ldc2-$ver-linux-x86_64/bin/:$PATH
+export LIBRARY_PATH=~/ldc2-$ver-linux-x86_64/lib/
 ```
 
+### GNU Guix
+
+A GNU Guix package for LDC is also available
+
+```sh
+guix package -i ldc
+```
+
+
 ## Compiling for Mac OS X
 
 ```sh
     brew install ldc
     git clone --recursive https://github.com/lomereiter/sambamba.git
     cd sambamba
+    git clone https://github.com/dlang/undeaD
     make sambamba-ldmd2-64
 ```
 
 # Troubleshooting
 
-In case of crashes it's helpful to have GDB stacktraces (`bt` command).
+In case of crashes it's helpful to have GDB stacktraces (`bt` command). A full stacktrace
+for all threads:
+
+```
+thread apply all backtrace full
+```
 
 Note that GDB should be made aware of D garbage collector:
 ```
-handle SIGUSR1 nostop
-handle SIGUSR1 noprint
-handle SIGUSR2 nostop
-handle SIGUSR2 noprint
+handle SIGUSR1 SIGUSR2 nostop noprint
+```
+
+A binary relocatable install of sambamba with debug information can be fetched from
+
+```sh
+wget http://biogems.info/contrib/genenetwork/s7l4l5jnrwvvyr3pva242yakvmbfpm06-sambamba-0.6.6-pre3-6ae174b-debug-x86_64.tar.bz2
+md5sum ca64fd6f2fa2ba901937afc6b189e98d
+mkdir tmp
+tar xvjf ../*sambamba*.tar.bz2
+cd tmp
+```
+
+unpack the tarball and run the contained install.sh script with TARGET
+
+```
+./install.sh ~/sambamba-test
+```
+
+Run sambamba in gdb with
+
+```
+gdb --args ~/sambamba-test/sambamba-*/bin/sambamba view --throw-error
 ```
 
 # Development
diff --git a/bioconda_push.sh b/bioconda_push.sh
new file mode 100644
index 0000000..f3e0b7d
--- /dev/null
+++ b/bioconda_push.sh
@@ -0,0 +1,14 @@
+RECIPES=~/github/bioconda-recipes # location of the cloned fork
+REMOTE=bioconda                   # bioconda/bioconda-recipes remote
+
+UPDATED_RECIPE=/tmp/sambamba.yaml
+python bioconda_yaml_gen.py > $UPDATED_RECIPE
+VERSION=`grep version $UPDATED_RECIPE | cut -d\' -f2`
+
+cd $RECIPES
+git checkout master
+git pull $REMOTE master
+git checkout -b sambamba-${VERSION}
+cp $UPDATED_RECIPE recipes/sambamba/meta.yaml
+git commit -am "sambamba ${VERSION}"
+git push origin sambamba-${VERSION}
diff --git a/bioconda_yaml_gen.py b/bioconda_yaml_gen.py
index c9d24fe..1362a10 100644
--- a/bioconda_yaml_gen.py
+++ b/bioconda_yaml_gen.py
@@ -25,8 +25,7 @@ test:
 about:
   home: https://github.com/lomereiter/sambamba
   license: GPLv2
-  summary: Tools for working with SAM/BAM data
-"""
+  summary: Tools for working with SAM/BAM data"""
 
 import json
 from urllib2 import urlopen
diff --git a/cram/reader.d b/cram/reader.d
index f3a3583..ef87060 100644
--- a/cram/reader.d
+++ b/cram/reader.d
@@ -75,7 +75,6 @@ class CramReader : IBamSamReader {
 
     BamRead[] unmappedReads() {
         throw new Exception("* region unimplemented for CRAM files");
-        return [];
     }
 
     SamHeader header() @property {
@@ -99,8 +98,8 @@ class CramReader : IBamSamReader {
 
     auto reads() {
         alias R = CramFilterResult;
-        auto s = fd().slices(c => c.length > 0 ? R.pass : R.skip, 
-                             null, 
+        auto s = fd().slices(c => c.length > 0 ? R.pass : R.skip,
+                             null,
                              _task_pool);
         // no trust for delegates implementation => use zip-repeat trick
         auto reads = s.zip(repeat(this), repeat(bamReadAlloc(_seq_op)))
diff --git a/cram/wrappers.d b/cram/wrappers.d
index 56b892c..8d9d7f3 100644
--- a/cram/wrappers.d
+++ b/cram/wrappers.d
@@ -19,7 +19,7 @@ struct RcPtr(T, alias Free) {
         }
 
         T* ptr;
-        this(T* ptr) { 
+        this(T* ptr) {
             this.ptr = ptr;
             debug {
                 payload_id = ++payload_counter;
@@ -27,7 +27,7 @@ struct RcPtr(T, alias Free) {
             }
         }
 
-        ~this() { 
+        ~this() {
             debug {
                 stderr.writeln("Free ", T.stringof, "* #", payload_id);
             }
@@ -51,14 +51,14 @@ struct RcPtr(T, alias Free) {
     this(this)
     {
         static if (is(T == cram_slice)) {
-            debug writeln("COPIED #", data.ptr.id + 1);
+            debug writeln("COPIED #", data.payload_id + 1);
         }
     }
 
     this(T* ptr) { data = Data(ptr); }
 }
 
-auto nullChecked(alias func, T...)(string err_msg, cram_fd* fd, 
+auto nullChecked(alias func, T...)(string err_msg, cram_fd* fd,
                                    auto ref T other_params)
 {
     auto ptr = func(fd, other_params);
@@ -91,7 +91,7 @@ CramFd openCram(string filename) {
     cram_set_option(fd, cram_option.CRAM_OPT_DECODE_MD);
 
     // initialize locks, but we will use the pool from D standard library
-    // instead of the htslib implementation 
+    // instead of the htslib implementation
     import core.sys.posix.pthread;
     pthread_mutex_init(&fd.metrics_lock, null);
     pthread_mutex_init(&fd.ref_lock, null);
@@ -193,7 +193,7 @@ struct CramContainerRange {
             throw new CramException(err_msg);
 
         err_msg = "Failed to decode compression header";
-        front.comp_hdr = cram_decode_compression_header(_fd, 
+        front.comp_hdr = cram_decode_compression_header(_fd,
                                                         front.comp_hdr_block);
         if (front.comp_hdr is null)
             throw new CramException(err_msg);
@@ -256,7 +256,7 @@ class UndecodedSliceRange {
         return false;
     }
 
-    private void setupFront(cram_slice* ptr) { 
+    private void setupFront(cram_slice* ptr) {
         front = CramSlice(_fd, _container, RcCramSlice(ptr));
     }
 
@@ -273,7 +273,7 @@ class UndecodedSliceRange {
     }
 }
 
-auto undecodedSlices(CramFd fd, CramContainerFilter cf, 
+auto undecodedSlices(CramFd fd, CramContainerFilter cf,
                      UndecodedSliceFilter sf=null)
 {
     return new UndecodedSliceRange(fd, cf, sf);
@@ -281,7 +281,7 @@ auto undecodedSlices(CramFd fd, CramContainerFilter cf,
 
 void decodeSlice(cram_fd* fd, cram_container* c, cram_slice* s) {
     auto err_msg = "Failure in cram_decode_slice";
-    debug writeln("DECODING slice #", s.id + 1);
+    // debug writeln("DECODING slice #", s.id + 1);
     int ret = cram_decode_slice(fd, c, s, fd.header);
     if (ret != 0)
         throw new CramException(err_msg);
@@ -293,7 +293,7 @@ void decodeSlice(CramSlice slice) {
 
 import bio.core.utils.roundbuf;
 
-struct CramSliceDecoder(R) 
+struct CramSliceDecoder(R)
     if (isInputRange!R && is(ElementType!R == CramSlice))
 {
     private {
@@ -302,7 +302,7 @@ struct CramSliceDecoder(R)
 
         // FIXME: D arrays don't call element destructors when GC-d :(
         RoundBuf!CramSlice _input_queue;
-        alias DecodeTask = Task!(decodeSlice, 
+        alias DecodeTask = Task!(decodeSlice,
                                  cram_fd*, cram_container*, cram_slice*)*;
         RoundBuf!DecodeTask _output_queue;
 
@@ -315,7 +315,7 @@ struct CramSliceDecoder(R)
             cram_fd* fd = slice.fd;
             cram_container* c = slice.container;
             cram_slice* s = slice;
-            debug writeln("PUT slice #", s.id + 1, " into queue");
+            // debug writeln("PUT slice #", s.id + 1, " into queue");
             version (serial) {
                 decodeSlice(fd, c, s);
             } else {
@@ -356,13 +356,13 @@ struct CramSliceDecoder(R)
 
         front = _input_queue.front;
         _input_queue.popFront();
-        debug writeln("GET slice #", front.id + 1, " from queue");
+        // debug writeln("GET slice #", front.id + 1, " from queue");
         if (!_slices.empty)
             putNextSliceIntoQueue();
     }
 }
 
-auto decode(R)(R slices, std.parallelism.TaskPool pool) 
+auto decode(R)(R slices, std.parallelism.TaskPool pool)
     if(isInputRange!R && is(ElementType!R == CramSlice))
 {
     return CramSliceDecoder!R(slices, pool);
diff --git a/cram/writer.d b/cram/writer.d
index 0b7583d..6113db0 100644
--- a/cram/writer.d
+++ b/cram/writer.d
@@ -1,6 +1,6 @@
 module cram.writer;
 
-import std.stream;
+import undead.stream;
 import std.string;
 
 import cram.htslib;
@@ -84,7 +84,7 @@ final class CramWriter {
     }
 
     void writeRecord(R)(auto ref R read) if(isBamRead!R) {
-        // somewhat messy code because of the decision 
+        // somewhat messy code because of the decision
         // to use ending zero byte of qname for flags
         auto offset = 32 + read.name.length;
         ubyte old_byte = read.raw_data[offset];
diff --git a/gen_ldc_version_info.py b/gen_ldc_version_info.py
new file mode 100755
index 0000000..386103b
--- /dev/null
+++ b/gen_ldc_version_info.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+from __future__ import print_function
+import re, sys, subprocess
+
+if len(sys.argv) < 2:
+    print("Usage: {} <path to ldmd2 executable>".format(sys.argv[0]))
+    sys.exit(1)
+
+ldc = sys.argv[1].replace("ldmd2", "ldc2")
+ldc_output = subprocess.check_output([ldc, "-version"])
+version_re = r"""^.+\((?P<LDC>[^\)]+)\):\n\s*based on DMD (?P<DMD>\S+) and LLVM (?P<LLVM>\S+)\n(?:\s*built with (?P<BOOTSTRAP>.*)\n)?"""
+match = re.match(version_re, ldc_output, re.MULTILINE)
+
+if not match:
+    sys.exit("ERROR: failed to generated LDC version information")
+
+print("module utils.ldc_version_info_;")
+for component, version in match.groupdict().items():
+    if version is None:
+        version = "version not available"
+    print("immutable {}_VERSION_STRING = \"{}\";".format(component, version))
diff --git a/main.d b/main.d
index 8093d3c..5ac82d9 100644
--- a/main.d
+++ b/main.d
@@ -33,6 +33,7 @@ import sambamba.utils.common.ldc_gc_workaround;
 import utils.strip_bcf_header;
 import utils.lz4;
 import utils.version_ : VERSION;
+import utils.ldc_version_info_ : LDC_VERSION_STRING, DMD_VERSION_STRING, LLVM_VERSION_STRING, BOOTSTRAP_VERSION_STRING;
 
 import std.stdio;
 
@@ -50,6 +51,17 @@ void printUsage() {
     stderr.writeln();
 }
 
+void printVersion() {
+    stderr.writeln("sambamba " ~ VERSION);
+    stderr.writeln();
+    stderr.writeln("This version was built with:");
+    stderr.writeln("    LDC " ~ LDC_VERSION_STRING);
+    stderr.writeln("    using DMD " ~ DMD_VERSION_STRING);
+    stderr.writeln("    using LLVM " ~ LLVM_VERSION_STRING);
+    stderr.writeln("    bootstrapped with " ~ BOOTSTRAP_VERSION_STRING);
+    stderr.writeln();
+}
+
 int main(string[] args) {
     if (args.length == 1) {
         printUsage();
@@ -73,6 +85,7 @@ int main(string[] args) {
         case "fixbins":  return fixbins_main(_args);
         case "strip_bcf_header": return strip_bcf_header_main(_args);
         case "lz4compress": return lz4compress_main();
+        case "--version": printVersion(); return 0;
         default:
             printUsage();
             return 1;
diff --git a/man/README.md b/man/README.md
new file mode 100644
index 0000000..befdce8
--- /dev/null
+++ b/man/README.md
@@ -0,0 +1,18 @@
+# Guide for developers
+
+## Ronn
+
+All documentation is written in Markdown format with `.ronn` extension.
+
+[Ronn](https://rtomayko.github.io/ronn/) is a generator that can produce both man pages and HTML snippets. 
+You can install it as a Ruby gem or through your package manager (`ruby-ronn` on Ubuntu).
+
+During editing, it's very helpful to check that output looks as intended; 
+this is done by running `ronn -m <filename.ronn>` which will launch the man pager.
+
+## Generated files
+
+* Man pages, generated by `ronn -r`, currently go here, alongside `.ronn` files.
+* HTML snippets are generated by `ronn -f` and go into `gh-pages` branch. 
+The branch should be cloned to a separate directory on your machine, 
+and the destination for the snippets (`.html_fragment` extension) is the `_includes` directory.
diff --git a/man/sambamba-index.1 b/man/sambamba-index.1
index 9918c9a..78377e0 100644
--- a/man/sambamba-index.1
+++ b/man/sambamba-index.1
@@ -1,13 +1,13 @@
 .\" generated with Ronn/v0.7.3
 .\" http://github.com/rtomayko/ronn/tree/0.7.3
 .
-.TH "SAMBAMBA\-INDEX" "1" "February 2015" "" ""
+.TH "SAMBAMBA\-INDEX" "1" "2016-10-24" "" ""
 .
 .SH "NAME"
 \fBsambamba\-index\fR \- tool for building standard index files for BAM data
 .
 .SH "SYNOPSIS"
-\fBsambamba index\fR [\fB\-p\fR|\fB\-\-show\-progress\fR] [\fB\-n\fR|\fB\-\-threads\fR=NTHREADS] <input\.bam> [<output\.bai>]
+\fBsambamba index\fR [\fB\-p\fR|\fB\-\-show\-progress\fR] [\fB\-t\fR|\fB\-\-nthreads\fR=NTHREADS] <input\.bam> [<output\.bai>]
 .
 .SH "DESCRIPTION"
 \fBsambamba index\fR builds an index for a sorted by coordinate BAM file\. This step is required for effective region querying in most tools for working with BAM data\.
@@ -28,26 +28,14 @@ Number of threads to use\.
 .SH "EXAMPLES"
 Build index file example\.bam\.bai given a sorted BAM file example\.bam:
 .
-.IP "" 4
-.
-.nf
-
+.P
 $ sambamba index example\.bam
 .
-.fi
-.
-.IP "" 0
-.
 .P
 Build index file at custom location showing progress:
 .
-.IP "" 4
-.
-.nf
-
+.P
 $ sambamba index \-\-show\-progress example\.bam /tmp/example\.bam\.bai
 .
-.fi
-.
-.IP "" 0
-For more information on the original samtools INDEX behaviour, check out the [samtools documentation](http://samtools\.sourceforge\.net/samtools\.shtml
+.SH "SEE ALSO"
+For more information on the original samtools INDEX behaviour, check out the samtools documentation \fIhttp://samtools\.sourceforge\.net/samtools\.shtml\fR
diff --git a/man/sambamba-index.1.ronn b/man/sambamba-index.1.ronn
index 625d09b..7cc2c2c 100644
--- a/man/sambamba-index.1.ronn
+++ b/man/sambamba-index.1.ronn
@@ -3,7 +3,7 @@ sambamba-index(1) -- tool for building standard index files for BAM data
 
 ## SYNOPSIS
 
-`sambamba index` [`-p`|`--show-progress`] [`-n`|`--threads`=NTHREADS] <input.bam> [<output.bai>]
+`sambamba index` [`-p`|`--show-progress`] [`-t`|`--nthreads`=NTHREADS] <input.bam> [<output.bai>]
 
 ## DESCRIPTION
 
@@ -28,14 +28,14 @@ argument.
 
 Build index file example.bam.bai given a sorted BAM file example.bam:
 
-    $ sambamba index example.bam
+  $ sambamba index example.bam
 
 Build index file at custom location showing progress:
 
-    $ sambamba index --show-progress example.bam /tmp/example.bam.bai
+  $ sambamba index --show-progress example.bam /tmp/example.bam.bai
 
-# SEE ALSO
+## SEE ALSO
 
 For more information on the original samtools INDEX behaviour, check
-out the [samtools documentation](http://samtools.sourceforge.net/samtools.shtml
+out the [samtools documentation](http://samtools.sourceforge.net/samtools.shtml)
 
diff --git a/sambamba-ldmd-debug.rsp b/sambamba-ldmd-debug.rsp
index 31e9f0a..0a87fa8 100644
--- a/sambamba-ldmd-debug.rsp
+++ b/sambamba-ldmd-debug.rsp
@@ -1 +1 @@
-"-g" "-c" "-m64" "-noboundscheck" "-IBioD/" "-ofbuild/sambamba.o" "-odbuild" "-I." "main.d" "BioD/bio/bam/baifile.d" "sambamba/depth.d" "BioD/bio/core/utils/switchendianness.d" "sambamba/utils/common/readstorage.d" "BioD/bio/core/utils/tmpfile.d" "sambamba/utils/common/bed.d" "BioD/bio/bam/utils/samheadermerger.d" "thirdparty/mergesort.d" "BioD/bio/bam/readrange.d" "cram/exception.d" "sambamba/utils/view/headerserializer.d" "BioD/bio/bam/splitter.d" "cram/htslib.d" "BioD/bio/core/utils/r [...]
+"-g" "-c" "-m64" "-noboundscheck" "-IBioD/" "-ofbuild/sambamba.o" "-odbuild" "-I." "main.d" "BioD/bio/bam/baifile.d" "sambamba/depth.d" "BioD/bio/core/utils/switchendianness.d" "sambamba/utils/common/readstorage.d" "BioD/bio/core/utils/tmpfile.d" "sambamba/utils/common/bed.d" "BioD/bio/bam/utils/samheadermerger.d" "thirdparty/mergesort.d" "BioD/bio/bam/readrange.d" "cram/exception.d" "sambamba/utils/view/headerserializer.d" "BioD/bio/bam/splitter.d" "cram/htslib.d" "BioD/bio/core/utils/r [...]
diff --git a/sambamba-ldmd-release.rsp b/sambamba-ldmd-release.rsp
index 8a2e180..89b6d79 100644
--- a/sambamba-ldmd-release.rsp
+++ b/sambamba-ldmd-release.rsp
@@ -1 +1 @@
-"-O2" "-c" "-m64" "-noboundscheck" "-release" "-inline" "-IBioD/" "-ofbuild/sambamba.o" "-odbuild" "-I." "main.d" "BioD/bio/bam/baifile.d" "sambamba/depth.d" "BioD/bio/core/utils/switchendianness.d" "sambamba/utils/common/readstorage.d" "BioD/bio/core/utils/tmpfile.d" "sambamba/utils/common/bed.d" "BioD/bio/bam/utils/samheadermerger.d" "thirdparty/mergesort.d" "BioD/bio/bam/readrange.d" "cram/exception.d" "sambamba/utils/view/headerserializer.d" "BioD/bio/bam/splitter.d" "cram/htslib.d"  [...]
+"-g" "-O2" "-c" "-m64" "-release" "-IBioD/" "-IundeaD/src/" "-ofbuild/sambamba.o" "-odbuild" "-I." "main.d" "BioD/bio/bam/baifile.d" "sambamba/depth.d" "BioD/bio/core/utils/switchendianness.d" "sambamba/utils/common/readstorage.d" "BioD/bio/core/utils/tmpfile.d" "sambamba/utils/common/bed.d" "BioD/bio/bam/utils/samheadermerger.d" "thirdparty/mergesort.d" "BioD/bio/bam/readrange.d" "cram/exception.d" "sambamba/utils/view/headerserializer.d" "BioD/bio/bam/splitter.d" "cram/htslib.d" "BioD/ [...]
diff --git a/sambamba/depth.d b/sambamba/depth.d
index e6bd6d5..8c5dc0c 100644
--- a/sambamba/depth.d
+++ b/sambamba/depth.d
@@ -1202,6 +1202,8 @@ int depth_main(string[] args) {
                 printer.raw_bed_lines = [s];
                 printer.setBed(bed);
             }
+            // the following line breaks in debug mode on an BAM file not sorted by sambamba
+            // because of https://github.com/biod/BioD/blob/1248586b54af4bd4dfb28ebfebfc6bf012e7a587/bio/bam/read.d#L1761
             reads = inputRangeObject(bam.getReadsOverlapping(bed)
                     .map!(r => CustomBamRead(r, rg2id)));
         } else {
@@ -1236,6 +1238,4 @@ int depth_main(string[] args) {
         }
         return 1;
     }
-
-    return 0;
 }
diff --git a/sambamba/index.d b/sambamba/index.d
index bfc2965..0879a42 100644
--- a/sambamba/index.d
+++ b/sambamba/index.d
@@ -20,7 +20,7 @@
 module sambamba.index;
 
 import std.stdio;
-import std.stream;
+import undead.stream;
 import std.range;
 import std.parallelism;
 import std.getopt;
diff --git a/sambamba/markdup.d b/sambamba/markdup.d
index 028e88a..8630644 100644
--- a/sambamba/markdup.d
+++ b/sambamba/markdup.d
@@ -34,7 +34,7 @@ import bio.bam.reader, bio.bam.readrange, bio.bam.writer, bio.bam.referenceinfo,
        bio.bam.multireader;
 import std.traits, std.typecons, std.range, std.algorithm, std.parallelism,
        std.exception, std.file, std.typetuple, std.conv, std.array, std.bitmanip,
-       std.c.stdlib, std.datetime, std.stream : BufferedFile, FileMode;
+       std.c.stdlib, std.datetime, undead.stream : BufferedFile, FileMode;
 
 /// Read + its index (0-based)
 struct IndexedBamRead {
diff --git a/sambamba/merge.d b/sambamba/merge.d
index 6cfad25..d01634d 100644
--- a/sambamba/merge.d
+++ b/sambamba/merge.d
@@ -85,7 +85,7 @@ import std.typecons;
 import std.traits;
 import std.numeric;
 import std.parallelism;
-import std.stream;
+import undead.stream;
 import std.getopt;
 
 import core.atomic;
@@ -386,7 +386,7 @@ int merge_main(string[] args) {
                         writer.writeRecord(read);
                     break;
                 case SortingOrder.coordinate:
-                    foreach (read; nWayUnion!compareCoordinates(modifiedranges))
+                    foreach (read; nWayUnion!compareCoordinatesAndStrand(modifiedranges))
                         writer.writeRecord(read);
                     break;
                 default: assert(0);
diff --git a/sambamba/pileup.d b/sambamba/pileup.d
index aed9a40..1d52430 100644
--- a/sambamba/pileup.d
+++ b/sambamba/pileup.d
@@ -40,17 +40,19 @@ import bio.core.utils.stream;
 import std.process;
 import std.stdio;
 import std.parallelism;
-import std.file : rmdirRecurse;
+import std.file : rmdirRecurse, exists;
 import std.algorithm;
 import std.array;
+import std.exception;
 import std.getopt;
 import std.string : strip, indexOf, toStringz;
-import std.c.stdlib;
+import core.stdc.stdlib;
 import std.typecons;
-import std.stream;
+import undead.stream;
 import std.range;
 import std.algorithm;
 import std.path;
+import std.regex;
 import std.traits;
 import std.typecons;
 import std.conv;
@@ -65,7 +67,8 @@ import core.stdc.errno;
 extern(C) char* mkdtemp(char* template_);
 extern(C) int mkfifo(immutable(char)* fn, int mode);
 
-string samtoolsBin     = null;  // cached path to samtools binary
+// Cached values
+string samtoolsBin     = null;
 string samtoolsVersion = null;
 string bcftoolsBin     = null;
 string bcftoolsVersion = null;
@@ -73,44 +76,46 @@ string bcftoolsVersion = null;
 // Return path to samtools after testing whether it exists and supports mpileup
 auto samtoolsInfo()
 {
-  if (samtoolsBin == null) {
+  if (samtoolsBin is null) {
     auto paths = environment["PATH"].split(":");
-    auto a = array(filter!(path => std.file.exists(path ~ "/samtools"))(paths));
-    if (a.length == 0)
-      throw new Exception("failed to locate samtools executable in PATH");
+    auto a = array(filter!(path => exists(path ~ "/samtools"))(paths));
+    enforce(!a.empty, "failed to locate samtools executable in PATH");
     samtoolsBin = a[0] ~ "/samtools";
-    // we found the path, now test the binary
+  }
+  enforce(exists(samtoolsBin), samtoolsBin ~ " is invalid");
+  if (samtoolsVersion is null) {
     auto samtools = execute([samtoolsBin]);
-    if (samtools.status != 1)
-      throw new Exception("samtools failed: ", samtools.output);
+    enforce(samtools.status==1, "samtools failed: " ~ samtools.output);
     samtoolsVersion = samtools.output.split("\n")[2];
-    if (samtoolsVersion.startsWith("Version: 0."))
-      throw new Exception("versions 0.* of samtools/bcftools are unsupported");
+    enforce(samtoolsVersion.startsWith("Version: 1."), "version " ~ samtoolsVersion ~ " of samtools is unsupported");
   }
   return [samtoolsBin, samtoolsVersion];
 }
 
 auto samtoolsPath() { return samtoolsInfo()[0]; }
 
-auto bcftoolsPath()
+auto bcftoolsInfo()
 {
-  if (bcftoolsBin == null) {
+  if (bcftoolsBin is null) {
     auto paths = environment["PATH"].split(":");
-    auto a = array(filter!(path => std.file.exists(path ~ "/bcftools"))(paths));
-    if (a.length == 0)
-      throw new Exception("failed to locate bcftools executable in PATH");
+    auto a = array(filter!(path => exists(path ~ "/bcftools"))(paths));
+    enforce(!a.empty, "failed to locate bcftools executable in PATH");
     bcftoolsBin = a[0] ~ "/bcftools";
-    // we found the path, now test the binary
+  }
+  enforce(exists(bcftoolsBin), bcftoolsBin ~ " is invalid");
+  if (bcftoolsVersion is null) {
     auto bcftools = execute([bcftoolsBin]);
-    if (bcftools.status != 1)
-      throw new Exception("bcftools failed: ", bcftools.output);
-    bcftoolsVersion = bcftools.output.split("\n")[2];
-    if (bcftoolsVersion.startsWith("Version: 0."))
-      throw new Exception("versions 0.* of samtools/bcftools are unsupported");
+    enforce(bcftools.status == 1, "bcftools failed: " ~ bcftools.output);
+    auto r = regex(r"Version: 1\.\d\.\d[^\n]+");
+    enforce(matchFirst(bcftools.output, r), "Can not find version in " ~ bcftools.output);
+    bcftoolsVersion = matchFirst(bcftools.output, r).hit;
+    enforce(bcftoolsVersion.startsWith("Version: 1."), "version " ~ bcftoolsVersion ~ " of bcftools is unsupported");
   }
   return [bcftoolsBin, bcftoolsVersion];
 }
 
+auto bcftoolsPath() { return bcftoolsInfo()[0]; }
+
 void makeFifo(string filename) {
     auto s = toStringz(filename);
     int ret = mkfifo(s, octal!"666");
@@ -163,17 +168,17 @@ private {
 void init() {
     lz4decompressor = new LZ4Decompressor();
 
-    recipes[FileFormat.pileup] =          Recipe(this_app~" strip_bcf_header --vcf",
-                                                 this_app~" lz4compress",
+    recipes[FileFormat.pileup] =          Recipe(this_app ~ " strip_bcf_header --vcf",
+                                                 this_app ~ " lz4compress",
                                                  &lz4decompress);
-    recipes[FileFormat.BCF] =             Recipe(this_app~" strip_bcf_header --bcf",
+    recipes[FileFormat.BCF] =             Recipe(this_app ~ " strip_bcf_header --bcf",
                                                  null,
                                                  &dump);
-    recipes[FileFormat.uncompressedBCF] = Recipe(this_app~" strip_bcf_header --ubcf",
-                                                 this_app~" lz4compress",
+    recipes[FileFormat.uncompressedBCF] = Recipe(this_app ~ " strip_bcf_header --ubcf",
+                                                 this_app ~ " lz4compress",
                                                  &lz4decompress);
-    recipes[FileFormat.VCF] =             Recipe(this_app~" strip_bcf_header --vcf",
-                                                 this_app~" lz4compress",
+    recipes[FileFormat.VCF] =             Recipe(this_app ~ " strip_bcf_header --vcf",
+                                                 this_app ~ " lz4compress",
                                                  &lz4decompress);
 }
 
@@ -197,14 +202,14 @@ struct Args {
     string[] bcftools_args;
     FileFormat input_format;
 
-    this(string[] samtools_args_, string[] bcftools_args_) {
+    this(string[] samtools_args_, bool use_bcftools, string[] bcftools_args_) {
         samtools_args = unbundle(samtools_args_);
         bcftools_args = unbundle(bcftools_args_, "O"); // keep -Ov|-Ob|...
-        auto samtools_output_fmt = fixSamtoolsArgs(samtools_args, !bcftools_args.empty);
-        auto bcftools_output_fmt = fixBcftoolsArgs(bcftools_args);
+        auto samtools_output_fmt = fixSamtoolsArgs(use_bcftools, samtools_args);
+        auto bcftools_output_fmt = fixBcftoolsArgs(use_bcftools, bcftools_args, samtools_args);
 
         input_format = samtools_output_fmt;
-        if (bcftools_args.length > 0)
+        if (use_bcftools)
             input_format = bcftools_output_fmt;
     }
 
@@ -214,7 +219,7 @@ struct Args {
         auto samtools_cmd = (basic_args ~ samtools_args).join(" ");
         string cmd = samtools_cmd;
         if (bcftools_args.length > 0) {
-            auto bcftools_cmd = bcftoolsPath()[0] ~ " " ~ bcftools_args.join(" ");
+            auto bcftools_cmd = bcftoolsPath() ~ " " ~ bcftools_args.join(" ");
             cmd = samtools_cmd ~ " | " ~ bcftools_cmd;
         }
 
@@ -257,7 +262,7 @@ string[] unbundle(string[] args, string exclude="") {
 
 // input: unbundled samtools arguments
 // output: detected output format
-FileFormat fixSamtoolsArgs(ref string[] args, bool use_caller) {
+FileFormat fixSamtoolsArgs(bool use_bcftools, ref string[] args) {
     bool vcf = false;
     bool bcf = false;
     bool uncompressed = false;
@@ -269,7 +274,7 @@ FileFormat fixSamtoolsArgs(ref string[] args, bool use_caller) {
         if (args[i] == "-g") {
             bcf = true; keep ~= true;
         } else if (args[i] == "-v") {
-            vcf = true; keep ~= !use_caller;
+            vcf = true; keep ~= !use_bcftools;
         } else if (args[i] == "-u") {
             bcf = true; uncompressed = true; keep ~= true;
         } else {
@@ -283,18 +288,19 @@ FileFormat fixSamtoolsArgs(ref string[] args, bool use_caller) {
             fixed_args ~= args[i];
     }
 
+    // When using bcftools add these switches
     bool fixes_applied;
-    if (vcf && use_caller) {
+    if (vcf && use_bcftools) {
         fixed_args ~= ["-g", "-u"];
         fixes_applied = true;
-    } else if (bcf && use_caller && !uncompressed) {
+    } else if (bcf && use_bcftools && !uncompressed) {
         fixed_args ~= "-u";
         fixes_applied = true;
     }
 
     args = fixed_args;
 
-    if (fixes_applied && use_caller) {
+    if (fixes_applied && use_bcftools) {
         stderr.writeln("NOTE: changed samtools output format to uncompressed BCF for better performance (-gu)");
     }
 
@@ -316,7 +322,7 @@ FileFormat fixSamtoolsArgs(ref string[] args, bool use_caller) {
 
 // input: unbundled bcftools arguments
 // output: detected output format
-FileFormat fixBcftoolsArgs(ref string[] args) {
+FileFormat fixBcftoolsArgs(bool use_bcftools, ref string[] args, ref string[] samtools_args) {
     FileFormat fmt = FileFormat.VCF;
     bool[] keep;
     foreach (i; 0 .. args.length) {
@@ -328,7 +334,7 @@ FileFormat fixBcftoolsArgs(ref string[] args) {
         } else if (args[i] == "-Oz") {
             // TODO
             throw new Exception("compressed VCF is not supported, please use bgzip and uncompressed VCF");
-            fmt = FileFormat.gzippedVCF; keep ~= false;
+            // fmt = FileFormat.gzippedVCF; keep ~= false;
         } else if (args[i] == "-Ob") {
             fmt = FileFormat.BCF; keep ~= true;
         } else if (args[i] == "-Ou") {
@@ -343,7 +349,11 @@ FileFormat fixBcftoolsArgs(ref string[] args) {
         if (keep[i])
             fixed_args ~= args[i];
     }
-
+    // When using bcftools and args is empty add these switches
+    if (use_bcftools && fixed_args.empty) {
+      fixed_args = ["view", "-"];
+      samtools_args ~= [ "-g", "-u" ];
+    }
     args = fixed_args;
     return fmt;
 }
@@ -489,7 +499,7 @@ class ChunkDispatcher(ChunkRange) {
             }
             decompressIntoFile(result.data, format_, output_file_);
             stderr.writeln("[chunk dumped] ", result.num);
-            std.c.stdlib.free(result.data.ptr);
+            core.stdc.stdlib.free(result.data.ptr);
         }
     }
 
@@ -542,7 +552,7 @@ void worker(Dispatcher)(Dispatcher d,
 
         size_t capa = 1_024_576;
         size_t used = 0;
-        char* output = cast(char*)std.c.stdlib.malloc(capa);
+        char* output = cast(char*)core.stdc.stdlib.malloc(capa);
 
         char[4096] buffer = void;
         while (true) {
@@ -551,7 +561,7 @@ void worker(Dispatcher)(Dispatcher d,
                 break;
             if (used + buf.length > capa) {
                 capa = max(capa * 2, used + buf.length);
-                output = cast(char*)std.c.stdlib.realloc(cast(void*)output, capa);
+                output = cast(char*)core.stdc.stdlib.realloc(cast(void*)output, capa);
                 if (output is null)
                     throw new Exception("failed to allocate " ~ capa.to!string ~ " bytes");
             }
@@ -577,7 +587,9 @@ void printUsage() {
     stderr.writeln("                       [--samtools <samtools mpileup args>]");
     stderr.writeln("                       [--bcftools <bcftools call args>]");
     stderr.writeln();
-    stderr.writeln("This subcommand relies on external tools and acts as a multi-core implementation of samtools and bcftools.");
+    stderr.writeln("This subcommand relies on external tools and acts as a multi-core");
+    stderr.writeln("implementation of samtools and bcftools.");
+    stderr.writeln();
     stderr.writeln("Therefore, the following tools should be present in $PATH:");
     stderr.writeln("    * samtools");
     stderr.writeln("    * bcftools (when used)");
@@ -610,6 +622,12 @@ void printUsage() {
     stderr.writeln("                    chunk size (in bytes)");
     stderr.writeln("         -B, --output-buffer-size=512_000_000");
     stderr.writeln("                    output buffer size (in bytes)");
+    stderr.writeln();
+    stderr.writeln("Sambamba paths:\n");
+    samtoolsInfo();
+    stderr.writeln("         samtools: ", samtoolsBin, " ", samtoolsVersion);
+    bcftoolsInfo();
+    stderr.writeln("         bcftools: ", bcftoolsBin, " ", bcftoolsVersion);
 }
 
 version(standalone) {
@@ -627,21 +645,21 @@ int pileup_main(string[] args) {
     init();
 
     auto bcftools_args = find(args, "--bcftools");
-    auto args1 = (bcftools_args.length>0 ? args[0 .. $-bcftools_args.length] : args );
+    immutable use_bcftools = !bcftools_args.empty;
+    auto args1 = (use_bcftools ? args[0 .. $-bcftools_args.length] : args );
     auto samtools_args = find(args1, "--samtools");
     auto own_args = (samtools_args.length>0 ? args1[0 .. $-samtools_args.length] : args1 );
 
     if (!samtools_args.empty) {
-        samtools_args.popFront();
+        samtools_args.popFront(); // remove --samtools switch
     } else {
         samtools_args = [];
     }
 
-    if (!bcftools_args.empty) {
+    if (use_bcftools) {
         bcftools_args.popFront(); // remove the switch --bcftools
     }
 
-    //string query;
     uint n_threads = defaultPoolThreads;
     std.stdio.File output_file = stdout;
     size_t buffer_size = 64_000_000;
@@ -665,8 +683,12 @@ int pileup_main(string[] args) {
             return 0;
         }
 
-        stderr.writeln("samtools mpileup options: ",samtools_args.join(" "));
-        if (bcftools_args.length>0)
+        samtoolsInfo();      // initialize samtools path before threading
+        if (use_bcftools)
+            bcftoolsInfo();  // initialize bcftools path before threading
+
+        stderr.writeln("samtools mpileup options: ", samtools_args.join(" "));
+        if (use_bcftools)
             stderr.writeln("bcftools options: ", bcftools_args.join(" "));
 
         if (output_filename != null) {
@@ -681,7 +703,7 @@ int pileup_main(string[] args) {
         string tmp_dir = randomSubdir(tmp_dir_prefix);
         scope(exit) rmdirRecurse(tmp_dir);
 
-        auto bundled_args = Args(samtools_args, bcftools_args);
+        auto bundled_args = Args(samtools_args, use_bcftools, bcftools_args);
 
         InputRange!BamRead reads;
         if (bed_filename is null) {
@@ -715,11 +737,15 @@ int pileup_main(string[] args) {
     } catch (Exception e) {
         stderr.writeln("sambamba-pileup: ", e.msg);
 
-        version(development) {
+        debug {
+          throw e;
+        }
+        else {
+          version(development) {
             throw e;
+          }
+          return 1;
         }
-
-        return 1;
     }
 
     return 0;
diff --git a/sambamba/slice.d b/sambamba/slice.d
index bc55ccd..d22a3e9 100644
--- a/sambamba/slice.d
+++ b/sambamba/slice.d
@@ -28,7 +28,7 @@ import bio.core.utils.stream;
 import bio.core.region;
 
 import std.array;
-import std.stream;
+import undead.stream;
 import std.getopt;
 import std.parallelism;
 import std.conv;
@@ -47,15 +47,15 @@ import sambamba.utils.common.overwrite;
         s2_start_offset = virtual offset of the first read whose position >= beg
         s2_end_offset = virtual offset of the first read whose position >= end
 
-                /\/\/\/\/\/\/\/\/\/\/\/\/#########==============  ...  =============#####      
+                /\/\/\/\/\/\/\/\/\/\/\/\/#########==============  ...  =============#####
 BGZF blocks   ..........)[...........)[..........)[..........)[.  ...  ....)[.........)[.......
-                .                        .                                               .     
+                .                        .                                               .
                 s1_start_offset          s2_start_offset                          s2_end_offset
-                                                                                               
-                /\/\/\/\/\/\/\/\/\/\/\/\/######                   ...                          
+
+                /\/\/\/\/\/\/\/\/\/\/\/\/######                   ...
          ...)[..........)[...........)[..........)[..........)[.  ...  ....)[.........)[.......
-                .                        .     .                                               
-                s1_start_offset   s2_start_offset, s2_end_offset                               
+                .                        .     .
+                s1_start_offset   s2_start_offset, s2_end_offset
 
     These numbers are not correctly defined in some cases, so let's extend their
     definitions.
@@ -76,7 +76,7 @@ BGZF blocks   ..........)[...........)[..........)[..........)[.  ...  ....)[...
     Now we divide the algorithm into subcases.
 
     1) Both R1 and R2 are empty.
-        
+
         Output BAM file with no reads.
 
     2) R1 is not empty, R2 is empty.
@@ -91,7 +91,7 @@ BGZF blocks   ..........)[...........)[..........)[..........)[.  ...  ....)[...
         from the left, and output it. Set start_offset to the start file offset of the next BGZF block.
         Take last read from R2. Adjust its last BGZF block by chomping everything after the end of
         the alignment record. Set end_offset to the start file offset of this BGZF block.
-        
+
         Output first adjusted block, then copy of file since start_offset till end_offset, then
         second adjusted block.
 
@@ -147,7 +147,7 @@ void fetchRegion(BamReader bam, Region region, ref Stream stream)
     if (reads1.empty && reads2.empty) {
         // are there any reads with position >= beg?
         s2_end_offset = s2_start_offset;
-    } else if (!reads1.empty && reads2.empty) { 
+    } else if (!reads1.empty && reads2.empty) {
         // are there any reads with position >= end?
         s2_end_offset = bam[chr].endVirtualOffset();
     } else {
@@ -181,7 +181,7 @@ void fetchUnmapped(BamReader bam, Stream stream) {
 
     auto unmapped_reads = bam.unmappedReads();
     if (!unmapped_reads.empty) {
-        copyAsIs(bam, stream, 
+        copyAsIs(bam, stream,
                  unmapped_reads.front.start_virtual_offset,
                  bam.eofVirtualOffset());
     }
@@ -194,12 +194,12 @@ version (Linux) {
     extern(C) int posix_fadvise(int, off_t, off_t, int);
 }
 
-void copyAsIs(BamReader bam, Stream stream, 
+void copyAsIs(BamReader bam, Stream stream,
               VirtualOffset s2_start_offset, VirtualOffset s2_end_offset)
 {
     // R2 is non-empty
     if (s2_start_offset < s2_end_offset) {
-       
+
         // Either R2 is fully contained in one BGZF block...
         if (s2_start_offset.coffset == s2_end_offset.coffset) {
             // write chomped block
@@ -302,7 +302,7 @@ int slice_main(string[] args) {
         scope(exit) stream.close();
 
         if (output_filename != null) {
-            stream = new std.stream.BufferedFile(output_filename, FileMode.OutNew);
+            stream = new undead.stream.BufferedFile(output_filename, FileMode.OutNew);
         } else {
             immutable BUFSIZE = 1_048_576;
             version (Posix) {
@@ -312,7 +312,7 @@ int slice_main(string[] args) {
                 import core.sys.windows.windows;
                 auto handle = GetStdHandle(STD_OUTPUT_HANDLE);
             }
-            stream = new std.stream.BufferedFile(handle, FileMode.Out, BUFSIZE);
+            stream = new undead.stream.BufferedFile(handle, FileMode.Out, BUFSIZE);
         }
 
         if (args[2] == "*") {
diff --git a/sambamba/sort.d b/sambamba/sort.d
index 4eed5f7..d83a93b 100644
--- a/sambamba/sort.d
+++ b/sambamba/sort.d
@@ -37,7 +37,7 @@ import std.parallelism;
 import std.getopt;
 import std.path;
 import std.file;
-import std.stream;
+import undead.stream;
 import std.stdio;
 import std.typecons;
 import core.atomic;
@@ -146,7 +146,7 @@ class Sorter {
                 auto len = read.raw_data.length;
                 if (len + _used > max_sz)
                     break;
-                
+
                 if (_n_reads == _reads_capa) {
                     auto realloc_reads = cast(BamRead*)std.c.stdlib.realloc(_reads, 2 * _reads_capa * BamRead.sizeof);
                     if (realloc_reads is null) {
@@ -217,7 +217,7 @@ class Sorter {
         } else if (natural_sort) {
             mergeSort!(mixedCompareReadNames, false)(chunk, task_pool, tmp);
         } else {
-            mergeSort!(compareCoordinates, false)(chunk, task_pool, tmp);
+            mergeSort!(compareCoordinatesAndStrand, false)(chunk, task_pool, tmp);
         }
         version (development) {
         stderr.writeln("Finished sorting of chunk #", n, " in ", sw.peek().seconds, "s");
@@ -266,7 +266,7 @@ class Sorter {
         else if (natural_sort)
             mergeSortedChunks!mixedCompareReadNames();
         else
-            mergeSortedChunks!compareCoordinates();
+            mergeSortedChunks!compareCoordinatesAndStrand();
     }
 
     private void createHeader() {
@@ -356,7 +356,7 @@ class Sorter {
 
         string fn;
 
-        if (k <= 1) { 
+        if (k <= 1) {
             level = compression_level;
             fn = output_filename;
         } else {
@@ -405,7 +405,7 @@ class Sorter {
 
         auto input_buf_size = min(16_000_000, memory_limit / 4 / num_of_chunks);
         auto output_buf_size = min(64_000_000, memory_limit / 6);
-        auto stream = bufferedFile(output_filename, FileMode.OutNew, 
+        auto stream = bufferedFile(output_filename, FileMode.OutNew,
                                    output_buf_size);
         scope(failure) stream.close();
 
@@ -435,8 +435,8 @@ class Sorter {
                 alignmentranges[i] = bamfile.readsWithProgress(
                         // WTF is going on here? See this thread:
                         // http://forum.dlang.org/thread/mailman.112.1341467786.31962.digitalmars-d@puremagic.com
-                        (size_t j) { 
-                        return (lazy float progress) { 
+                        (size_t j) {
+                        return (lazy float progress) {
                         atomicStore(merging_progress[j], progress);
                         synchronized (bar) {
                         bar.update(dotProduct(merging_progress, weights));
@@ -582,7 +582,7 @@ size_t parseMemory(string str) {
 /// Params:
 ///     unsorted_fn - filename of unsorted BAM
 ///     chunk_num   - 0-based index of the chunk
-///                                               
+///
 string chunkBaseName(string unsorted_fn, size_t chunk_num) {
     return baseName(unsorted_fn) ~ "." ~ to!string(chunk_num);
 }
diff --git a/sambamba/utils/common/bed.d b/sambamba/utils/common/bed.d
index bb6835a..3e5009f 100644
--- a/sambamba/utils/common/bed.d
+++ b/sambamba/utils/common/bed.d
@@ -23,6 +23,7 @@ import std.stdio;
 import std.algorithm;
 import std.string;
 import std.conv;
+import std.file;
 import std.math;
 import std.array;
 import std.range;
@@ -58,7 +59,7 @@ alias Interval[][string] BedIndex;
 BedIndex readIntervals(string bed_filename, bool non_overlapping=true, string[]* lines=null, Tuple!(string, Interval)[]* intervals=null) {
     BedIndex index;
 
-    auto bed = cast(string)(std.file.readText(bed_filename));
+    auto bed = cast(string)(readText(bed_filename));
     foreach (str; bed.splitter('\n')) {
         auto fields = split(str);
         if (fields.length < 2)
@@ -126,7 +127,7 @@ public import bio.bam.region;
 
 BamRegion[] parseBed(Reader)(string bed_filename, Reader bam, bool non_overlapping=true, string[]* bed_lines=null) {
     Tuple!(string, Interval)[] ivs;
-    auto index = sambamba.utils.common.bed.readIntervals(bed_filename, non_overlapping, bed_lines, &ivs);
+    auto index = readIntervals(bed_filename, non_overlapping, bed_lines, &ivs);
     BamRegion[] regions;
     if (non_overlapping) {
         foreach (reference, intervals; index) {
diff --git a/sambamba/utils/common/file.d b/sambamba/utils/common/file.d
index ac3418b..8ba3ef7 100644
--- a/sambamba/utils/common/file.d
+++ b/sambamba/utils/common/file.d
@@ -1,8 +1,8 @@
 module sambamba.utils.common.file;
-import std.stream;
+import undead.stream;
 import std.stdio;
 
-BufferedFile bufferedFile(string fn, std.stream.FileMode mode, size_t buffer_size=8192) {
+BufferedFile bufferedFile(string fn, undead.stream.FileMode mode, size_t buffer_size=8192) {
     if (fn == "-")
         return new BufferedFile(std.stdio.stdout.fileno, mode, buffer_size);
     else
diff --git a/sambamba/utils/common/filtering.d b/sambamba/utils/common/filtering.d
index d60e64d..58f9bb8 100644
--- a/sambamba/utils/common/filtering.d
+++ b/sambamba/utils/common/filtering.d
@@ -51,7 +51,7 @@ Filter createFilterFromQuery(string query) {
 }
 
 /**
-  Set of filters for alignments. 
+  Set of filters for alignments.
   All share a common interface and can be easily combined.
 */
 
@@ -76,7 +76,7 @@ final class NullFilter : Filter {
 
 /// Validating filter
 final class ValidAlignmentFilter : Filter {
-    
+
     bool accepts(ref BamRead a) {
         return isValid(a);
     }
@@ -173,6 +173,19 @@ final class ChimericFilter : Filter {
     }
 }
 
+final class FlagBitFilter : Filter {
+    private ushort _bits_set, _bits_unset;
+    this(ushort bits_set, ushort bits_unset) {
+        _bits_set = bits_set;
+        _bits_unset = bits_unset;
+    }
+
+    bool accepts(ref BamRead a) {
+        return ((a.flag & _bits_set) == _bits_set) &&
+               ((a.flag & _bits_unset) == 0);
+    }
+}
+
 float avg_base_quality(BamRead r) {
     return reduce!"a+b"(0.0f, r.base_qualities)/r.sequence_length;
 }
@@ -209,9 +222,9 @@ final class TagExistenceFilter(string op) : Filter {
     }
     bool accepts(ref BamRead a) {
         auto v = a[_tagname];
-        if (_should_exist) 
+        if (_should_exist)
             return !v.is_nothing;
-        else 
+        else
             return v.is_nothing;
     }
 }
@@ -228,7 +241,7 @@ final class IntegerTagFilter(string op) : Filter {
 
     bool accepts(ref BamRead a) {
         auto v = a[_tagname];
-        if (!v.is_integer && !v.is_float) 
+        if (!v.is_integer && !v.is_float)
             return false;
         if (v.is_float) {
             mixin(`return cast(float)v` ~ op ~ `_value;`);
@@ -287,15 +300,17 @@ final class StringTagFilter(string op) : Filter {
 final class RegexpFieldFilter : Filter {
     private string _fieldname;
     private Regex!char _pattern;
-    
+
     this(string fieldname, Regex!char pattern) {
-        _fieldname = fieldname; 
+        _fieldname = fieldname;
         _pattern = pattern;
     }
 
     bool accepts(ref BamRead a) {
         switch(_fieldname) {
             case "read_name": return !match(a.name, cast()_pattern).empty;
+            case "ref_name": return !match(a.ref_name, cast()_pattern).empty;
+            case "mate_ref_name": return !match(a.mate_ref_name, cast()_pattern).empty;
             case "sequence": return !match(to!string(a.sequence), cast()_pattern).empty;
             case "cigar": return !match(a.cigarString(), cast()_pattern).empty;
             default: throw new Exception("unknown string field '" ~ _fieldname ~ "'");
@@ -304,10 +319,10 @@ final class RegexpFieldFilter : Filter {
 }
 
 /// Filtering string tags with a regular expression
-final class RegexpTagFilter : Filter { 
+final class RegexpTagFilter : Filter {
     private string _tagname;
     private Regex!char _pattern;
-    
+
     this(string tagname, Regex!char pattern) {
         _tagname = tagname;
         _pattern = pattern;
@@ -325,7 +340,7 @@ final class RegexpTagFilter : Filter {
 final class SubsampleFilter : Filter {
     private ulong _threshold;
     private ulong _seed;
-    
+
     this(double subsample_frac, ulong seed) {
         _threshold = (0x100000000UL * subsample_frac).to!ulong;
         _seed = seed;
diff --git a/sambamba/utils/view/alignmentrangeprocessor.d b/sambamba/utils/view/alignmentrangeprocessor.d
index ff96219..12b090b 100644
--- a/sambamba/utils/view/alignmentrangeprocessor.d
+++ b/sambamba/utils/view/alignmentrangeprocessor.d
@@ -34,14 +34,14 @@ import std.range;
 import std.array;
 import std.format;
 import std.traits;
-import std.stream : Stream, BufferedFile, FileMode;
+import undead.stream : Stream, BufferedFile, FileMode;
 import std.conv;
 import std.algorithm;
 import std.parallelism;
 
 class ReadCounter {
     size_t number_of_reads;
-    
+
     void process(R, SB)(R reads, SB bam) {
         number_of_reads = walkLength(reads);
     }
@@ -147,7 +147,7 @@ final class BamSerializer {
 
     enum is_serial = true;
 
-    void process(R, SB)(R reads, SB bam) 
+    void process(R, SB)(R reads, SB bam)
     {
         version (Posix) {
             auto handle = _f.fileno;
@@ -156,7 +156,7 @@ final class BamSerializer {
             import core.stdc.stdio : _fdToHandle;
             auto handle = _fdToHandle(_f.fileno);
         }
-        Stream output_stream = new BufferedFile(handle, FileMode.OutNew, 
+        Stream output_stream = new BufferedFile(handle, FileMode.OutNew,
                                                 BUFSIZE);
         auto writer = new BamWriter(output_stream, _level, _task_pool);
         writer.setFilename(output_filename);
diff --git a/sambamba/view.d b/sambamba/view.d
index 66ac917..f60357d 100644
--- a/sambamba/view.d
+++ b/sambamba/view.d
@@ -33,6 +33,7 @@ import sambamba.utils.common.progressbar;
 import sambamba.utils.view.alignmentrangeprocessor;
 import sambamba.utils.view.headerserializer;
 import sambamba.utils.common.bed;
+// import core.sys.posix.stdlib; // for exit
 
 import bio.core.utils.format;
 import utils.version_ : addPG;
@@ -54,6 +55,9 @@ void printUsage() {
     stderr.writeln();
     stderr.writeln("Options: -F, --filter=FILTER");
     stderr.writeln("                    set custom filter for alignments");
+    stderr.writeln("         --num-filter=NUMFILTER");
+    stderr.writeln("                    filter flag bits; 'i1/i2' corresponds to -f i1 -F i2 samtools arguments;");
+    stderr.writeln("                    either of the numbers can be omitted");
     stderr.writeln("         -f, --format=sam|bam|cram|json");
     stderr.writeln("                    specify which format to use for output (default is SAM)");
     stderr.writeln("         -h, --with-header");
@@ -112,6 +116,7 @@ void outputReferenceInfoJson(T)(T bam) {
 
 string format = "sam";
 string query;
+string numfilter;
 string ref_fn;
 bool with_header;
 bool header_only;
@@ -139,6 +144,14 @@ version(standalone) {
 }
 
 int view_main(string[] args) {
+    foreach(arg; args) {
+      if (arg == "--throw-error") {
+        // undocumented: can throw a null pointer exception for testing debugger(s)
+        char *p = null;
+        *p = 'X'; // force an exception
+      }
+    }
+
     n_threads = totalCPUs;
 
     subsampling_seed = unpredictableSeed;
@@ -152,6 +165,7 @@ int view_main(string[] args) {
         getopt(args,
                std.getopt.config.caseSensitive,
                "filter|F",            &query,
+               "num-filter",          &numfilter,
                "format|f",            &format,
                "with-header|h",       &with_header,
                "header|H",            &header_only,
@@ -249,6 +263,14 @@ int sambambaMain(T)(T _bam, TaskPool pool, string[] args)
         read_filter = new AndFilter(read_filter, new ValidAlignmentFilter());
     }
 
+    if (numfilter !is null) {
+        ushort i1, i2;
+        auto masks = numfilter.splitter("/").array();
+        if (masks.length > 0 && masks[0].length > 0) i1 = masks[0].to!ushort;
+        if (masks.length > 1 && masks[1].length > 0) i2 = masks[1].to!ushort;
+        read_filter = new AndFilter(read_filter, new FlagBitFilter(i1, i2));
+    }
+
     if (query !is null) {
         auto query_filter = createFilterFromQuery(query);
         if (query_filter is null)
diff --git a/utils/lz4.d b/utils/lz4.d
index bda43fc..faf06f1 100644
--- a/utils/lz4.d
+++ b/utils/lz4.d
@@ -18,7 +18,7 @@
 
 */
 module utils.lz4;
-import std.stdio, std.stream, std.conv;
+import std.stdio, undead.stream, std.conv;
 
 extern(C) {
   alias size_t LZ4F_errorCode_t;
@@ -279,7 +279,7 @@ class LZ4Decompressor {
     out_buff.length = 256 << 10;
   }
 
-  void decompress(std.stream.InputStream input_file,
+  void decompress(undead.stream.InputStream input_file,
                   std.stdio.File output_file) {
     size_t bytes_read, bytes_written;
 
diff --git a/utils/strip_bcf_header.d b/utils/strip_bcf_header.d
index 48a6a0f..e871933 100644
--- a/utils/strip_bcf_header.d
+++ b/utils/strip_bcf_header.d
@@ -25,7 +25,7 @@ void stripBcfHeader(File input_file, File output_file) {
   import bio.core.bgzf.constants;
   import bio.core.bgzf.block;
   import bio.core.bgzf.compress;
-  import std.cstream, std.exception;
+  import undead.cstream, std.exception;
 
   auto stream = new CFile(input_file.getFP(), FileMode.In);
   auto supplier = new StreamSupplier(stream);
diff --git a/utils/version_.d b/utils/version_.d
index 329f63d..9db2bb1 100644
--- a/utils/version_.d
+++ b/utils/version_.d
@@ -1,6 +1,6 @@
 module utils.version_;
 
-immutable string VERSION = "0.6.5";
+immutable string VERSION = "0.6.6";
 
 import bio.sam.header;
 import std.array : join;

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/sambamba.git



More information about the debian-med-commit mailing list