[med-svn] [Git][med-team/python-dnaio][upstream] New upstream version 0.4

Steffen Möller gitlab at salsa.debian.org
Wed Oct 30 11:08:06 GMT 2019



Steffen Möller pushed to branch upstream at Debian Med / python-dnaio


Commits:
5add7f8d by Steffen Moeller at 2019-10-30T10:30:15Z
New upstream version 0.4
- - - - -


25 changed files:

- .editorconfig
- .gitattributes
- .gitignore
- .travis.yml
- README.md
- + pyproject.toml
- − setup.cfg
- setup.py
- src/dnaio/__init__.py
- src/dnaio/_core.pyx
- − src/dnaio/_version.py
- src/dnaio/chunks.py
- src/dnaio/writers.py
- + tests/data/simple.fasta.bz2
- + tests/data/simple.fasta.gz
- + tests/data/simple.fasta.xz
- + tests/data/simple.fastq.bz2
- + tests/data/simple.fastq.gz
- + tests/data/simple.fastq.xz
- − tests/test_api.py
- tests/test_chunks.py
- tests/test_internal.py
- + tests/test_open.py
- tox.ini
- − versioneer.py


Changes:

=====================================
.editorconfig
=====================================
@@ -1,4 +1,4 @@
-[*.py]
+[*.{py,pyx}]
 charset=utf-8
 end_of_line=lf
 insert_final_newline=true


=====================================
.gitattributes
=====================================
@@ -1,3 +1,2 @@
 *.fastq -crlf
 *.fasta -crlf
-src/dnaio/_version.py export-subst


=====================================
.gitignore
=====================================
@@ -9,3 +9,4 @@ __pycache__
 /src/*/*.so
 /src/*.egg-info/
 /.tox/
+/src/dnaio/_version.py


=====================================
.travis.yml
=====================================
@@ -1,5 +1,7 @@
-sudo: false
 language: python
+
+dist: xenial
+
 cache:
   directories:
     - $HOME/.cache/pip
@@ -8,13 +10,19 @@ python:
   - "3.4"
   - "3.5"
   - "3.6"
+  - "3.7"
   - "nightly"
 
 install:
+  - pip install --upgrade coverage codecov
   - pip install .[dev]
 
 script:
-  - pytest
+  - coverage run -m pytest
+
+after_success:
+  - coverage combine
+  - codecov
 
 env:
   global:
@@ -24,13 +32,7 @@ env:
 
 jobs:
   include:
-    - stage: test
-      python: "3.7"
-      sudo: true  # This may possibly be removed in the future
-      dist: xenial
-
     - stage: deploy
-      sudo: required
       services:
         - docker
       python: "3.6"


=====================================
README.md
=====================================
@@ -1,10 +1,11 @@
 [![Travis](https://travis-ci.org/marcelm/dnaio.svg?branch=master)](https://travis-ci.org/marcelm/dnaio)
 [![PyPI](https://img.shields.io/pypi/v/dnaio.svg?branch=master)](https://pypi.python.org/pypi/dnaio)
+[![Codecov](https://codecov.io/gh/marcelm/dnaio/branch/master/graph/badge.svg)](https://codecov.io/gh/marcelm/dnaio)
 
 # dnaio parses FASTQ and FASTA
 
 `dnaio` is a Python 3 library for fast parsing of FASTQ and also FASTA files. The code was previously part of the
-[cutadapt](https://cutadapt.readthedocs.io/) tool and has been improved since it has been split out.
+[Cutadapt](https://cutadapt.readthedocs.io/) tool and has been improved since it has been split out.
 
 
 ## Example usage


=====================================
pyproject.toml
=====================================
@@ -0,0 +1,2 @@
+[build-system]
+requires = ["setuptools", "wheel", "setuptools_scm", "Cython"]


=====================================
setup.cfg deleted
=====================================
@@ -1,7 +0,0 @@
-[versioneer]
-VCS = git
-style = pep440
-versionfile_source = src/dnaio/_version.py
-versionfile_build = dnaio/_version.py
-tag_prefix = v
-parentdir_prefix = dnaio-


=====================================
setup.py
=====================================
@@ -1,9 +1,8 @@
 import sys
 import os.path
-from setuptools import setup, Extension
+from setuptools import setup, Extension, find_packages
 from distutils.command.sdist import sdist as _sdist
 from distutils.command.build_ext import build_ext as _build_ext
-import versioneer
 
 if sys.version_info[:2] < (3, 4):
     sys.stdout.write('Python 3.4 or later is required\n')
@@ -30,12 +29,8 @@ extensions = [
     Extension('dnaio._core', sources=['src/dnaio/_core.pyx']),
 ]
 
-cmdclass = versioneer.get_cmdclass()
-versioneer_build_ext = cmdclass.get('build_ext', _build_ext)
-versioneer_sdist = cmdclass.get('sdist', _sdist)
 
-
-class build_ext(versioneer_build_ext):
+class BuildExt(_build_ext):
     def run(self):
         # If we encounter a PKG-INFO file, then this is likely a .tar.gz/.zip
         # file retrieved from PyPI that already includes the pre-cythonized
@@ -47,26 +42,24 @@ class build_ext(versioneer_build_ext):
             # only sensible thing is to require Cython to be installed.
             from Cython.Build import cythonize
             self.extensions = cythonize(self.extensions)
-        versioneer_build_ext.run(self)
+        super().run()
 
 
-class sdist(versioneer_sdist):
+class SDist(_sdist):
     def run(self):
         # Make sure the compiled Cython files in the distribution are up-to-date
         from Cython.Build import cythonize
         cythonize(extensions)
-        versioneer_sdist.run(self)
-
+        super().run()
 
-cmdclass['build_ext'] = build_ext
-cmdclass['sdist'] = sdist
 
 with open('README.md', encoding='utf-8') as f:
     long_description = f.read()
 
 setup(
     name='dnaio',
-    version=versioneer.get_version(),
+    setup_requires=['setuptools_scm'],  # Support pip versions that don't know about pyproject.toml
+    use_scm_version={'write_to': 'src/dnaio/_version.py'},
     author='Marcel Martin',
     author_email='marcel.martin at scilifelab.se',
     url='https://github.com/marcelm/dnaio/',
@@ -74,14 +67,15 @@ setup(
     long_description=long_description,
     long_description_content_type='text/markdown',
     license='MIT',
-    packages=['dnaio'],
     package_dir={'': 'src'},
+    packages=find_packages('src'),
     extras_require={
         'dev': ['Cython', 'pytest'],
     },
     ext_modules=extensions,
-    cmdclass=cmdclass,
-    install_requires=['xopen'],
+    cmdclass={'build_ext': BuildExt, 'sdist': SDist},
+    install_requires=['xopen>=0.8.2'],
+    python_requires='>=3.4',
     classifiers=[
             "Development Status :: 3 - Alpha",
             "Intended Audience :: Science/Research",


=====================================
src/dnaio/__init__.py
=====================================
@@ -21,6 +21,7 @@ __all__ = [
 import os
 from contextlib import ExitStack
 import functools
+import pathlib
 
 from xopen import xopen
 
@@ -29,10 +30,19 @@ from .readers import FastaReader, FastqReader
 from .writers import FastaWriter, FastqWriter
 from .exceptions import UnknownFileFormat, FileFormatError, FastaFormatError, FastqFormatError
 from .chunks import read_chunks, read_paired_chunks
+from ._version import version as __version__
 
-from ._version import get_versions
-__version__ = get_versions()['version']
-del get_versions
+
+try:
+    from os import fspath  # Exists in Python 3.6+
+except ImportError:
+    def fspath(path):
+        if hasattr(path, "__fspath__"):
+            return path.__fspath__()
+        # Python 3.4 and 3.5 do not support the file system path protocol
+        if isinstance(path, pathlib.Path):
+            return str(path)
+        return path
 
 
 def open(file1, *, file2=None, fileformat=None, interleaved=False, mode='r', qualities=None):
@@ -42,10 +52,10 @@ def open(file1, *, file2=None, fileformat=None, interleaved=False, mode='r', qua
     classes also defined in this module.
 
     file1, file2 -- Paths to regular or compressed files or file-like
-        objects. Use file1 if data is single-end. If also file2 is provided,
-        sequences are paired.
+        objects (as str or as pathlib.Path). Use only file1 if data is single-end.
+        If sequences are paired, use also file2.
 
-    mode -- Either 'r' for reading or 'w' for writing.
+    mode -- Either 'r' for reading, 'w' for writing or 'a' for appending.
 
     interleaved -- If True, then file1 contains interleaved paired-end data.
         file2 must be None in this case.
@@ -62,20 +72,26 @@ def open(file1, *, file2=None, fileformat=None, interleaved=False, mode='r', qua
         * When False (no qualities available), an exception is raised when the
           auto-detected output format is FASTQ.
     """
-    if mode not in ('r', 'w'):
-        raise ValueError("Mode must be 'r' or 'w'")
+    if mode not in ("r", "w", "a"):
+        raise ValueError("Mode must be 'r', 'w' or 'a'")
     if interleaved and file2 is not None:
         raise ValueError("When interleaved is set, file2 must be None")
     if file2 is not None:
-        if mode == 'r':
+        if mode in "wa" and file1 == file2:
+            raise ValueError("The paired-end output files are identical")
+        if mode == "r":
             return PairedSequenceReader(file1, file2, fileformat)
-        else:
+        elif mode == "w":
             return PairedSequenceWriter(file1, file2, fileformat, qualities)
+        else:
+            return PairedSequenceAppender(file1, file2, fileformat, qualities)
     if interleaved:
-        if mode == 'r':
+        if mode == "r":
             return InterleavedSequenceReader(file1, fileformat)
-        else:
+        elif mode == "w":
             return InterleavedSequenceWriter(file1, fileformat, qualities)
+        else:
+            return InterleavedSequenceAppender(file1, fileformat, qualities)
 
     # The multi-file options have been dealt with, delegate rest to the
     # single-file function.
@@ -106,15 +122,21 @@ def _open_single(file, *, fileformat=None, mode='r', qualities=None):
     """
     Open a single sequence file. See description of open() above.
     """
-    if mode not in ('r', 'w'):
-        raise ValueError("Mode must be 'r' or 'w'")
-    if isinstance(file, str):
-        file = xopen(file, mode + 'b')
+    if mode not in ("r", "w", "a"):
+        raise ValueError("Mode must be 'r', 'w' or 'a'")
+
+    if isinstance(file, (str, pathlib.Path)):
+        path = fspath(file)
+        file = xopen(path, mode + 'b')
         close_file = True
     else:
         if mode == 'r' and not hasattr(file, 'readinto'):
             raise ValueError(
                 'When passing in an open file-like object, it must have been opened in binary mode')
+        if hasattr(file, "name") and isinstance(file.name, str):
+            path = file.name
+        else:
+            path = None
         close_file = False
     if mode == 'r':
         fastq_handler = FastqReader
@@ -122,59 +144,56 @@ def _open_single(file, *, fileformat=None, mode='r', qualities=None):
     else:
         fastq_handler = FastqWriter
         fasta_handler = FastaWriter
-    fastq_handler = functools.partial(fastq_handler, _close_file=close_file)
-    fasta_handler = functools.partial(fasta_handler, _close_file=close_file)
-
-    if fileformat:  # Explict file format given
-        fileformat = fileformat.lower()
-        if fileformat == 'fasta':
-            return fasta_handler(file)
-        elif fileformat == 'fastq':
-            return fastq_handler(file)
-        else:
+    handlers = {
+        'fastq': functools.partial(fastq_handler, _close_file=close_file),
+        'fasta': functools.partial(fasta_handler, _close_file=close_file),
+    }
+
+    if fileformat:
+        try:
+            handler = handlers[fileformat.lower()]
+        except KeyError:
             raise UnknownFileFormat(
                 "File format {!r} is unknown (expected 'fasta' or 'fastq').".format(fileformat))
+        return handler(file)
 
-    # First, try to detect the file format from the file name only
-    format = None
-    if hasattr(file, "name") and isinstance(file.name, str):
-        format = _detect_format_from_name(file.name)
-    if format is None and mode == 'w' and qualities is not None:
+    if path is not None:
+        fileformat = _detect_format_from_name(path)
+    if fileformat is None and mode == 'w' and qualities is not None:
         # Format not recognized, but we know whether to use a format with or without qualities
-        format = 'fastq' if qualities else 'fasta'
+        fileformat = 'fastq' if qualities else 'fasta'
 
-    if mode == 'r' and format is None:
+    if mode == 'r' and fileformat is None:
         # No format detected so far. Try to read from the file.
         if file.seekable():
             first_char = file.read(1)
             file.seek(-1, 1)
         else:
             first_char = file.peek(1)[0:1]
-        if first_char == b'#':
-            # A comment char - only valid for some FASTA variants (csfasta)
-            format = 'fasta'
-        elif first_char == b'>':
-            format = 'fasta'
-        elif first_char == b'@':
-            format = 'fastq'
-        elif first_char == b'':
-            # Empty input. Pretend this is FASTQ
-            format = 'fastq'
-        else:
+        formats = {
+            b'@': 'fastq',
+            b'>': 'fasta',
+            b'#': 'fasta',  # Some FASTA variants allow comments
+            b'': 'fastq',  # Pretend FASTQ for empty input
+        }
+        try:
+            fileformat = formats[first_char]
+        except KeyError:
             raise UnknownFileFormat(
                 'Could not determine whether file {!r} is FASTA or FASTQ. The file extension was '
                 'not available or not recognized and the first character in the file ({!r}) is '
                 'unexpected.'.format(file, first_char))
 
-    if format is None:
+    if fileformat is None:
         assert mode == 'w'
-        raise UnknownFileFormat('Cannot determine whether to write in FASTA or FASTQ format')
+        extra = " because the output file name is not available" if path is None else ""
+        raise UnknownFileFormat("Auto-detection of the output file format (FASTA/FASTQ) failed" + extra)
 
-    if format == 'fastq' and mode == 'w' and qualities is False:
+    if fileformat == 'fastq' and mode in "wa" and qualities is False:
         raise ValueError(
             'Output format cannot be FASTQ since no quality values are available.')
 
-    return fastq_handler(file) if format == 'fastq' else fasta_handler(file)
+    return handlers[fileformat](file)
 
 
 def _sequence_names_match(r1, r2):
@@ -280,11 +299,13 @@ class InterleavedSequenceReader:
 
 
 class PairedSequenceWriter:
+    _mode = "w"
+
     def __init__(self, file1, file2, fileformat='fastq', qualities=None):
         with ExitStack() as stack:
-            self._writer1 = stack.enter_context(_open_single(file1, fileformat=fileformat, mode='w',
+            self._writer1 = stack.enter_context(_open_single(file1, fileformat=fileformat, mode=self._mode,
                 qualities=qualities))
-            self._writer2 = stack.enter_context(_open_single(file2, fileformat=fileformat, mode='w',
+            self._writer2 = stack.enter_context(_open_single(file2, fileformat=fileformat, mode=self._mode,
                 qualities=qualities))
             self._close = stack.pop_all().close
 
@@ -303,15 +324,20 @@ class PairedSequenceWriter:
         self.close()
 
 
+class PairedSequenceAppender(PairedSequenceWriter):
+    _mode = "a"
+
+
 class InterleavedSequenceWriter:
     """
     Write paired-end reads to an interleaved FASTA or FASTQ file
     """
+    _mode = "w"
 
     def __init__(self, file, fileformat='fastq', qualities=None):
 
         self._writer = _open_single(
-            file, fileformat=fileformat, mode='w', qualities=qualities)
+            file, fileformat=fileformat, mode=self._mode, qualities=qualities)
 
     def write(self, read1, read2):
         self._writer.write(read1)
@@ -326,3 +352,7 @@ class InterleavedSequenceWriter:
 
     def __exit__(self, *args):
         self.close()
+
+
+class InterleavedSequenceAppender(InterleavedSequenceWriter):
+    _mode = "a"


=====================================
src/dnaio/_core.pyx
=====================================
@@ -8,59 +8,59 @@ from ._util import shorten
 
 
 cdef class Sequence:
-	"""
-	A record in a FASTA or FASTQ file. For FASTA, the qualities attribute
-	is None. For FASTQ, qualities is a string and it contains the qualities
-	encoded as ascii(qual+33).
-	"""
-	cdef:
-		public str name
-		public str sequence
-		public str qualities
-
-	def __cinit__(self, str name, str sequence, str qualities=None):
-		"""Set qualities to None if there are no quality values"""
-		self.name = name
-		self.sequence = sequence
-		self.qualities = qualities
-
-		if qualities is not None and len(qualities) != len(sequence):
-			rname = shorten(name)
-			raise ValueError("In read named {!r}: length of quality sequence "
-				"({}) and length of read ({}) do not match".format(
-					rname, len(qualities), len(sequence)))
-
-	def __getitem__(self, key):
-		"""slicing"""
-		return self.__class__(
-			self.name,
-			self.sequence[key],
-			self.qualities[key] if self.qualities is not None else None)
-
-	def __repr__(self):
-		qstr = ''
-		if self.qualities is not None:
-			qstr = ', qualities={!r}'.format(shorten(self.qualities))
-		return '<Sequence(name={!r}, sequence={!r}{})>'.format(
-			shorten(self.name), shorten(self.sequence), qstr)
-
-	def __len__(self):
-		return len(self.sequence)
-
-	def __richcmp__(self, other, int op):
-		if 2 <= op <= 3:
-			eq = self.name == other.name and \
-				self.sequence == other.sequence and \
-				self.qualities == other.qualities
-			if op == 2:
-				return eq
-			else:
-				return not eq
-		else:
-			raise NotImplementedError()
-
-	def __reduce__(self):
-		return (Sequence, (self.name, self.sequence, self.qualities))
+    """
+    A record in a FASTA or FASTQ file. For FASTA, the qualities attribute
+    is None. For FASTQ, qualities is a string and it contains the qualities
+    encoded as ascii(qual+33).
+    """
+    cdef:
+        public str name
+        public str sequence
+        public str qualities
+
+    def __cinit__(self, str name, str sequence, str qualities=None):
+        """Set qualities to None if there are no quality values"""
+        self.name = name
+        self.sequence = sequence
+        self.qualities = qualities
+
+        if qualities is not None and len(qualities) != len(sequence):
+            rname = shorten(name)
+            raise ValueError("In read named {!r}: length of quality sequence "
+                "({}) and length of read ({}) do not match".format(
+                    rname, len(qualities), len(sequence)))
+
+    def __getitem__(self, key):
+        """slicing"""
+        return self.__class__(
+            self.name,
+            self.sequence[key],
+            self.qualities[key] if self.qualities is not None else None)
+
+    def __repr__(self):
+        qstr = ''
+        if self.qualities is not None:
+            qstr = ', qualities={!r}'.format(shorten(self.qualities))
+        return '<Sequence(name={!r}, sequence={!r}{})>'.format(
+            shorten(self.name), shorten(self.sequence), qstr)
+
+    def __len__(self):
+        return len(self.sequence)
+
+    def __richcmp__(self, other, int op):
+        if 2 <= op <= 3:
+            eq = self.name == other.name and \
+                self.sequence == other.sequence and \
+                self.qualities == other.qualities
+            if op == 2:
+                return eq
+            else:
+                return not eq
+        else:
+            raise NotImplementedError()
+
+    def __reduce__(self):
+        return (Sequence, (self.name, self.sequence, self.qualities))
 
 
 # It would be nice to be able to have the first parameter be an
@@ -69,216 +69,216 @@ cdef class Sequence:
 # See <https://stackoverflow.com/questions/28203670/>
 
 ctypedef fused bytes_or_bytearray:
-	bytes
-	bytearray
+    bytes
+    bytearray
 
 
 def paired_fastq_heads(bytes_or_bytearray buf1, bytes_or_bytearray buf2, Py_ssize_t end1, Py_ssize_t end2):
-	"""
-	Skip forward in the two buffers by multiples of four lines.
-
-	Return a tuple (length1, length2) such that buf1[:length1] and
-	buf2[:length2] contain the same number of lines (where the
-	line number is divisible by four).
-	"""
-	cdef:
-		Py_ssize_t pos1 = 0, pos2 = 0
-		Py_ssize_t linebreaks = 0
-		unsigned char* data1 = buf1
-		unsigned char* data2 = buf2
-		Py_ssize_t record_start1 = 0
-		Py_ssize_t record_start2 = 0
-
-	while True:
-		while pos1 < end1 and data1[pos1] != b'\n':
-			pos1 += 1
-		if pos1 == end1:
-			break
-		pos1 += 1
-		while pos2 < end2 and data2[pos2] != b'\n':
-			pos2 += 1
-		if pos2 == end2:
-			break
-		pos2 += 1
-		linebreaks += 1
-		if linebreaks == 4:
-			linebreaks = 0
-			record_start1 = pos1
-			record_start2 = pos2
-
-	# Hit the end of the data block
-	return record_start1, record_start2
+    """
+    Skip forward in the two buffers by multiples of four lines.
+
+    Return a tuple (length1, length2) such that buf1[:length1] and
+    buf2[:length2] contain the same number of lines (where the
+    line number is divisible by four).
+    """
+    cdef:
+        Py_ssize_t pos1 = 0, pos2 = 0
+        Py_ssize_t linebreaks = 0
+        unsigned char* data1 = buf1
+        unsigned char* data2 = buf2
+        Py_ssize_t record_start1 = 0
+        Py_ssize_t record_start2 = 0
+
+    while True:
+        while pos1 < end1 and data1[pos1] != b'\n':
+            pos1 += 1
+        if pos1 == end1:
+            break
+        pos1 += 1
+        while pos2 < end2 and data2[pos2] != b'\n':
+            pos2 += 1
+        if pos2 == end2:
+            break
+        pos2 += 1
+        linebreaks += 1
+        if linebreaks == 4:
+            linebreaks = 0
+            record_start1 = pos1
+            record_start2 = pos2
+
+    # Hit the end of the data block
+    return record_start1, record_start2
 
 
 def fastq_iter(file, sequence_class, Py_ssize_t buffer_size):
-	"""
-	Parse a FASTQ file and yield Sequence objects
-
-	The *first value* that the generator yields is a boolean indicating whether
-	the first record in the FASTQ has a repeated header (in the third row
-	after the ``+``).
-
-	file -- a file-like object, opened in binary mode (it must have a readinto
-	method)
-
-	buffer_size -- size of the initial buffer. This is automatically grown
-	    if a FASTQ record is encountered that does not fit.
-	"""
-	cdef:
-		bytearray buf = bytearray(buffer_size)
-		char[:] buf_view = buf
-		char* c_buf = buf
-		int endskip
-		str name
-		char* name_encoded
-		Py_ssize_t bufstart, bufend, pos, record_start, sequence_start
-		Py_ssize_t second_header_start, sequence_length, qualities_start
-		Py_ssize_t second_header_length, name_length
-		bint custom_class = sequence_class is not Sequence
-		Py_ssize_t n_records = 0
-		bint extra_newline = False
-
-	if buffer_size < 1:
-		raise ValueError("Starting buffer size too small")
-
-	# buf is a byte buffer that is re-used in each iteration. Its layout is:
-	#
-	# |-- complete records --|
-	# +---+------------------+---------+-------+
-	# |   |                  |         |       |
-	# +---+------------------+---------+-------+
-	# ^   ^                  ^         ^       ^
-	# 0   bufstart           end       bufend  len(buf)
-	#
-	# buf[0:bufstart] is the 'leftover' data that could not be processed
-	# in the previous iteration because it contained an incomplete
-	# FASTQ record.
-
-	readinto = file.readinto
-	bufstart = 0
-
-	# The input file is processed in chunks that each fit into buf
-	while True:
-		assert bufstart < len(buf_view)
-		bufend = readinto(buf_view[bufstart:]) + bufstart
-		if bufstart == bufend:
-			# End of file
-			if bufstart > 0 and buf_view[bufstart-1] != b'\n':
-				# There is still data in the buffer and its last character is
-				# not a newline: This is a file that is missing the final
-				# newline. Append a newline and continue.
-				buf_view[bufstart] = b'\n'
-				bufstart += 1
-				bufend += 1
-				extra_newline = True
-			else:
-				break
-
-		# Parse all complete FASTQ records in this chunk
-		pos = 0
-		record_start = 0
-		while True:
-			# Parse the name (line 0)
-			if c_buf[pos] != b'@':
-				raise FastqFormatError("Line expected to "
-					"start with '@', but found {!r}".format(chr(c_buf[pos])),
-					line=n_records * 4)
-			pos += 1
-			while pos < bufend and c_buf[pos] != b'\n':
-				pos += 1
-			if pos == bufend:
-				break
-			endskip = 1 if c_buf[pos-1] == b'\r' else 0
-			name_length = pos - endskip - record_start - 1
-			name_encoded = c_buf + record_start + 1
-			# .decode('latin-1') is 50% faster than .decode('ascii')
-			name = c_buf[record_start+1:pos-endskip].decode('latin-1')
-
-			pos += 1
-
-			# Parse the sequence (line 1)
-			sequence_start = pos
-			while pos < bufend and c_buf[pos] != b'\n':
-				pos += 1
-			if pos == bufend:
-				break
-			endskip = 1 if c_buf[pos-1] == b'\r' else 0
-			sequence = c_buf[sequence_start:pos-endskip].decode('latin-1')
-			sequence_length = pos - endskip - sequence_start
-			pos += 1
-
-			# Parse second header (line 2)
-			second_header_start = pos
-			if pos == bufend:
-				break
-			if c_buf[pos] != b'+':
-				raise FastqFormatError("Line expected to "
-					"start with '+', but found {!r}".format(chr(c_buf[pos])),
-					line=n_records * 4 + 2)
-			pos += 1  # skip over the '+'
-			while pos < bufend and c_buf[pos] != b'\n':
-				pos += 1
-			if pos == bufend:
-				break
-			endskip = 1 if c_buf[pos-1] == b'\r' else 0
-			second_header_length = pos - endskip - second_header_start - 1
-			if second_header_length == 0:
-				second_header = False
-			else:
-				if (name_length != second_header_length or
-						strncmp(c_buf+second_header_start+1,
-							name_encoded, second_header_length) != 0):
-					raise FastqFormatError(
-						"Sequence descriptions don't match ('{}' != '{}').\n"
-						"The second sequence description must be either "
-						"empty or equal to the first description.".format(
-							name_encoded[:name_length].decode('latin-1'),
-							c_buf[second_header_start+1:pos-endskip]
-							.decode('latin-1')), line=n_records * 4 + 2)
-				second_header = True
-			pos += 1
-
-			# Parse qualities (line 3)
-			qualities_start = pos
-			while pos < bufend and c_buf[pos] != b'\n':
-				pos += 1
-			if pos == bufend:
-				break
-			endskip = 1 if c_buf[pos-1] == b'\r' else 0
-			qualities = c_buf[qualities_start:pos-endskip].decode('latin-1')
-			if pos - endskip - qualities_start != sequence_length:
-				raise FastqFormatError("Length of sequence and "
-					"qualities differ", line=n_records * 4 + 3)
-			pos += 1
-			if n_records == 0:
-				yield second_header  # first yielded value is special
-			if custom_class:
-				yield sequence_class(name, sequence, qualities)
-			else:
-				yield Sequence.__new__(Sequence, name, sequence, qualities)
-			n_records += 1
-			record_start = pos
-			if pos == bufend:
-				break
-		if pos == bufend:
-			if record_start == 0 and bufend == len(buf):
-				# buffer too small, double it
-				buffer_size *= 2
-				prev_buf = buf
-				buf = bytearray(buffer_size)
-				buf[0:bufend] = prev_buf
-				del prev_buf
-				bufstart = bufend
-				buf_view = buf
-				c_buf = buf
-			else:
-				bufstart = bufend - record_start
-				buf[0:bufstart] = buf[record_start:bufend]
-	if pos > record_start:
-		if extra_newline:
-			pos -= 1
-		lines = buf[record_start:pos].count(b'\n')
-		raise FastqFormatError(
-			'Premature end of file encountered. The incomplete final record was: '
-			'{!r}'.format(shorten(buf[record_start:pos].decode('latin-1'), 500)),
-			line=n_records * 4 + lines)
+    """
+    Parse a FASTQ file and yield Sequence objects
+
+    The *first value* that the generator yields is a boolean indicating whether
+    the first record in the FASTQ has a repeated header (in the third row
+    after the ``+``).
+
+    file -- a file-like object, opened in binary mode (it must have a readinto
+    method)
+
+    buffer_size -- size of the initial buffer. This is automatically grown
+        if a FASTQ record is encountered that does not fit.
+    """
+    cdef:
+        bytearray buf = bytearray(buffer_size)
+        char[:] buf_view = buf
+        char* c_buf = buf
+        int endskip
+        str name
+        char* name_encoded
+        Py_ssize_t bufstart, bufend, pos, record_start, sequence_start
+        Py_ssize_t second_header_start, sequence_length, qualities_start
+        Py_ssize_t second_header_length, name_length
+        bint custom_class = sequence_class is not Sequence
+        Py_ssize_t n_records = 0
+        bint extra_newline = False
+
+    if buffer_size < 1:
+        raise ValueError("Starting buffer size too small")
+
+    # buf is a byte buffer that is re-used in each iteration. Its layout is:
+    #
+    # |-- complete records --|
+    # +---+------------------+---------+-------+
+    # |   |                  |         |       |
+    # +---+------------------+---------+-------+
+    # ^   ^                  ^         ^       ^
+    # 0   bufstart           end       bufend  len(buf)
+    #
+    # buf[0:bufstart] is the 'leftover' data that could not be processed
+    # in the previous iteration because it contained an incomplete
+    # FASTQ record.
+
+    readinto = file.readinto
+    bufstart = 0
+
+    # The input file is processed in chunks that each fit into buf
+    while True:
+        assert bufstart < len(buf_view)
+        bufend = readinto(buf_view[bufstart:]) + bufstart
+        if bufstart == bufend:
+            # End of file
+            if bufstart > 0 and buf_view[bufstart-1] != b'\n':
+                # There is still data in the buffer and its last character is
+                # not a newline: This is a file that is missing the final
+                # newline. Append a newline and continue.
+                buf_view[bufstart] = b'\n'
+                bufstart += 1
+                bufend += 1
+                extra_newline = True
+            else:
+                break
+
+        # Parse all complete FASTQ records in this chunk
+        pos = 0
+        record_start = 0
+        while True:
+            # Parse the name (line 0)
+            if c_buf[pos] != b'@':
+                raise FastqFormatError("Line expected to "
+                    "start with '@', but found {!r}".format(chr(c_buf[pos])),
+                    line=n_records * 4)
+            pos += 1
+            while pos < bufend and c_buf[pos] != b'\n':
+                pos += 1
+            if pos == bufend:
+                break
+            endskip = 1 if c_buf[pos-1] == b'\r' else 0
+            name_length = pos - endskip - record_start - 1
+            name_encoded = c_buf + record_start + 1
+            # .decode('latin-1') is 50% faster than .decode('ascii')
+            name = c_buf[record_start+1:pos-endskip].decode('latin-1')
+
+            pos += 1
+
+            # Parse the sequence (line 1)
+            sequence_start = pos
+            while pos < bufend and c_buf[pos] != b'\n':
+                pos += 1
+            if pos == bufend:
+                break
+            endskip = 1 if c_buf[pos-1] == b'\r' else 0
+            sequence = c_buf[sequence_start:pos-endskip].decode('latin-1')
+            sequence_length = pos - endskip - sequence_start
+            pos += 1
+
+            # Parse second header (line 2)
+            second_header_start = pos
+            if pos == bufend:
+                break
+            if c_buf[pos] != b'+':
+                raise FastqFormatError("Line expected to "
+                    "start with '+', but found {!r}".format(chr(c_buf[pos])),
+                    line=n_records * 4 + 2)
+            pos += 1  # skip over the '+'
+            while pos < bufend and c_buf[pos] != b'\n':
+                pos += 1
+            if pos == bufend:
+                break
+            endskip = 1 if c_buf[pos-1] == b'\r' else 0
+            second_header_length = pos - endskip - second_header_start - 1
+            if second_header_length == 0:
+                second_header = False
+            else:
+                if (name_length != second_header_length or
+                        strncmp(c_buf+second_header_start+1,
+                            name_encoded, second_header_length) != 0):
+                    raise FastqFormatError(
+                        "Sequence descriptions don't match ('{}' != '{}').\n"
+                        "The second sequence description must be either "
+                        "empty or equal to the first description.".format(
+                            name_encoded[:name_length].decode('latin-1'),
+                            c_buf[second_header_start+1:pos-endskip]
+                            .decode('latin-1')), line=n_records * 4 + 2)
+                second_header = True
+            pos += 1
+
+            # Parse qualities (line 3)
+            qualities_start = pos
+            while pos < bufend and c_buf[pos] != b'\n':
+                pos += 1
+            if pos == bufend:
+                break
+            endskip = 1 if c_buf[pos-1] == b'\r' else 0
+            qualities = c_buf[qualities_start:pos-endskip].decode('latin-1')
+            if pos - endskip - qualities_start != sequence_length:
+                raise FastqFormatError("Length of sequence and "
+                    "qualities differ", line=n_records * 4 + 3)
+            pos += 1
+            if n_records == 0:
+                yield second_header  # first yielded value is special
+            if custom_class:
+                yield sequence_class(name, sequence, qualities)
+            else:
+                yield Sequence.__new__(Sequence, name, sequence, qualities)
+            n_records += 1
+            record_start = pos
+            if pos == bufend:
+                break
+        if pos == bufend:
+            if record_start == 0 and bufend == len(buf):
+                # buffer too small, double it
+                buffer_size *= 2
+                prev_buf = buf
+                buf = bytearray(buffer_size)
+                buf[0:bufend] = prev_buf
+                del prev_buf
+                bufstart = bufend
+                buf_view = buf
+                c_buf = buf
+            else:
+                bufstart = bufend - record_start
+                buf[0:bufstart] = buf[record_start:bufend]
+    if pos > record_start:
+        if extra_newline:
+            pos -= 1
+        lines = buf[record_start:pos].count(b'\n')
+        raise FastqFormatError(
+            'Premature end of file encountered. The incomplete final record was: '
+            '{!r}'.format(shorten(buf[record_start:pos].decode('latin-1'), 500)),
+            line=n_records * 4 + lines)


=====================================
src/dnaio/_version.py deleted
=====================================
@@ -1,520 +0,0 @@
-
-# This file helps to compute a version number in source trees obtained from
-# git-archive tarball (such as those provided by githubs download-from-tag
-# feature). Distribution tarballs (built by setup.py sdist) and build
-# directories (produced by setup.py build) will contain a much shorter file
-# that just contains the computed version number.
-
-# This file is released into the public domain. Generated by
-# versioneer-0.18 (https://github.com/warner/python-versioneer)
-
-"""Git implementation of _version.py."""
-
-import errno
-import os
-import re
-import subprocess
-import sys
-
-
-def get_keywords():
-    """Get the keywords needed to look up the version information."""
-    # these strings will be replaced by git during git-archive.
-    # setup.py/versioneer.py will grep for the variable names, so they must
-    # each be defined on a line of their own. _version.py will just call
-    # get_keywords().
-    git_refnames = " (tag: v0.3)"
-    git_full = "a16a94708e8b7b974a8598e56da4ffbf892f8898"
-    git_date = "2018-10-03 14:11:31 +0200"
-    keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
-    return keywords
-
-
-class VersioneerConfig:
-    """Container for Versioneer configuration parameters."""
-
-
-def get_config():
-    """Create, populate and return the VersioneerConfig() object."""
-    # these strings are filled in when 'setup.py versioneer' creates
-    # _version.py
-    cfg = VersioneerConfig()
-    cfg.VCS = "git"
-    cfg.style = "pep440"
-    cfg.tag_prefix = "v"
-    cfg.parentdir_prefix = "dnaio-"
-    cfg.versionfile_source = "src/dnaio/_version.py"
-    cfg.verbose = False
-    return cfg
-
-
-class NotThisMethod(Exception):
-    """Exception raised if a method is not valid for the current scenario."""
-
-
-LONG_VERSION_PY = {}
-HANDLERS = {}
-
-
-def register_vcs_handler(vcs, method):  # decorator
-    """Decorator to mark a method as the handler for a particular VCS."""
-    def decorate(f):
-        """Store f in HANDLERS[vcs][method]."""
-        if vcs not in HANDLERS:
-            HANDLERS[vcs] = {}
-        HANDLERS[vcs][method] = f
-        return f
-    return decorate
-
-
-def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
-                env=None):
-    """Call the given command(s)."""
-    assert isinstance(commands, list)
-    p = None
-    for c in commands:
-        try:
-            dispcmd = str([c] + args)
-            # remember shell=False, so use git.cmd on windows, not just git
-            p = subprocess.Popen([c] + args, cwd=cwd, env=env,
-                                 stdout=subprocess.PIPE,
-                                 stderr=(subprocess.PIPE if hide_stderr
-                                         else None))
-            break
-        except EnvironmentError:
-            e = sys.exc_info()[1]
-            if e.errno == errno.ENOENT:
-                continue
-            if verbose:
-                print("unable to run %s" % dispcmd)
-                print(e)
-            return None, None
-    else:
-        if verbose:
-            print("unable to find command, tried %s" % (commands,))
-        return None, None
-    stdout = p.communicate()[0].strip()
-    if sys.version_info[0] >= 3:
-        stdout = stdout.decode()
-    if p.returncode != 0:
-        if verbose:
-            print("unable to run %s (error)" % dispcmd)
-            print("stdout was %s" % stdout)
-        return None, p.returncode
-    return stdout, p.returncode
-
-
-def versions_from_parentdir(parentdir_prefix, root, verbose):
-    """Try to determine the version from the parent directory name.
-
-    Source tarballs conventionally unpack into a directory that includes both
-    the project name and a version string. We will also support searching up
-    two directory levels for an appropriately named parent directory
-    """
-    rootdirs = []
-
-    for i in range(3):
-        dirname = os.path.basename(root)
-        if dirname.startswith(parentdir_prefix):
-            return {"version": dirname[len(parentdir_prefix):],
-                    "full-revisionid": None,
-                    "dirty": False, "error": None, "date": None}
-        else:
-            rootdirs.append(root)
-            root = os.path.dirname(root)  # up a level
-
-    if verbose:
-        print("Tried directories %s but none started with prefix %s" %
-              (str(rootdirs), parentdir_prefix))
-    raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
-
-
- at register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs):
-    """Extract version information from the given file."""
-    # the code embedded in _version.py can just fetch the value of these
-    # keywords. When used from setup.py, we don't want to import _version.py,
-    # so we do it with a regexp instead. This function is not used from
-    # _version.py.
-    keywords = {}
-    try:
-        f = open(versionfile_abs, "r")
-        for line in f.readlines():
-            if line.strip().startswith("git_refnames ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["refnames"] = mo.group(1)
-            if line.strip().startswith("git_full ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["full"] = mo.group(1)
-            if line.strip().startswith("git_date ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["date"] = mo.group(1)
-        f.close()
-    except EnvironmentError:
-        pass
-    return keywords
-
-
- at register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(keywords, tag_prefix, verbose):
-    """Get version information from git keywords."""
-    if not keywords:
-        raise NotThisMethod("no keywords at all, weird")
-    date = keywords.get("date")
-    if date is not None:
-        # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
-        # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
-        # -like" string, which we must then edit to make compliant), because
-        # it's been around since git-1.5.3, and it's too difficult to
-        # discover which version we're using, or to work around using an
-        # older one.
-        date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-    refnames = keywords["refnames"].strip()
-    if refnames.startswith("$Format"):
-        if verbose:
-            print("keywords are unexpanded, not using")
-        raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
-    refs = set([r.strip() for r in refnames.strip("()").split(",")])
-    # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
-    # just "foo-1.0". If we see a "tag: " prefix, prefer those.
-    TAG = "tag: "
-    tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
-    if not tags:
-        # Either we're using git < 1.8.3, or there really are no tags. We use
-        # a heuristic: assume all version tags have a digit. The old git %d
-        # expansion behaves like git log --decorate=short and strips out the
-        # refs/heads/ and refs/tags/ prefixes that would let us distinguish
-        # between branches and tags. By ignoring refnames without digits, we
-        # filter out many common branch names like "release" and
-        # "stabilization", as well as "HEAD" and "master".
-        tags = set([r for r in refs if re.search(r'\d', r)])
-        if verbose:
-            print("discarding '%s', no digits" % ",".join(refs - tags))
-    if verbose:
-        print("likely tags: %s" % ",".join(sorted(tags)))
-    for ref in sorted(tags):
-        # sorting will prefer e.g. "2.0" over "2.0rc1"
-        if ref.startswith(tag_prefix):
-            r = ref[len(tag_prefix):]
-            if verbose:
-                print("picking %s" % r)
-            return {"version": r,
-                    "full-revisionid": keywords["full"].strip(),
-                    "dirty": False, "error": None,
-                    "date": date}
-    # no suitable tags, so version is "0+unknown", but full hex is still there
-    if verbose:
-        print("no suitable tags, using unknown + full revision id")
-    return {"version": "0+unknown",
-            "full-revisionid": keywords["full"].strip(),
-            "dirty": False, "error": "no suitable tags", "date": None}
-
-
- at register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
-    """Get version from 'git describe' in the root of the source tree.
-
-    This only gets called if the git-archive 'subst' keywords were *not*
-    expanded, and _version.py hasn't already been rewritten with a short
-    version string, meaning we're inside a checked out source tree.
-    """
-    GITS = ["git"]
-    if sys.platform == "win32":
-        GITS = ["git.cmd", "git.exe"]
-
-    out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
-                          hide_stderr=True)
-    if rc != 0:
-        if verbose:
-            print("Directory %s not under git control" % root)
-        raise NotThisMethod("'git rev-parse --git-dir' returned error")
-
-    # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
-    # if there isn't one, this yields HEX[-dirty] (no NUM)
-    describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
-                                          "--always", "--long",
-                                          "--match", "%s*" % tag_prefix],
-                                   cwd=root)
-    # --long was added in git-1.5.5
-    if describe_out is None:
-        raise NotThisMethod("'git describe' failed")
-    describe_out = describe_out.strip()
-    full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
-    if full_out is None:
-        raise NotThisMethod("'git rev-parse' failed")
-    full_out = full_out.strip()
-
-    pieces = {}
-    pieces["long"] = full_out
-    pieces["short"] = full_out[:7]  # maybe improved later
-    pieces["error"] = None
-
-    # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
-    # TAG might have hyphens.
-    git_describe = describe_out
-
-    # look for -dirty suffix
-    dirty = git_describe.endswith("-dirty")
-    pieces["dirty"] = dirty
-    if dirty:
-        git_describe = git_describe[:git_describe.rindex("-dirty")]
-
-    # now we have TAG-NUM-gHEX or HEX
-
-    if "-" in git_describe:
-        # TAG-NUM-gHEX
-        mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
-        if not mo:
-            # unparseable. Maybe git-describe is misbehaving?
-            pieces["error"] = ("unable to parse git-describe output: '%s'"
-                               % describe_out)
-            return pieces
-
-        # tag
-        full_tag = mo.group(1)
-        if not full_tag.startswith(tag_prefix):
-            if verbose:
-                fmt = "tag '%s' doesn't start with prefix '%s'"
-                print(fmt % (full_tag, tag_prefix))
-            pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
-                               % (full_tag, tag_prefix))
-            return pieces
-        pieces["closest-tag"] = full_tag[len(tag_prefix):]
-
-        # distance: number of commits since tag
-        pieces["distance"] = int(mo.group(2))
-
-        # commit: short hex revision ID
-        pieces["short"] = mo.group(3)
-
-    else:
-        # HEX: no tags
-        pieces["closest-tag"] = None
-        count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
-                                    cwd=root)
-        pieces["distance"] = int(count_out)  # total number of commits
-
-    # commit date: see ISO-8601 comment in git_versions_from_keywords()
-    date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
-                       cwd=root)[0].strip()
-    pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-
-    return pieces
-
-
-def plus_or_dot(pieces):
-    """Return a + if we don't already have one, else return a ."""
-    if "+" in pieces.get("closest-tag", ""):
-        return "."
-    return "+"
-
-
-def render_pep440(pieces):
-    """Build up version string, with post-release "local version identifier".
-
-    Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
-    get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
-
-    Exceptions:
-    1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += plus_or_dot(pieces)
-            rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
-            if pieces["dirty"]:
-                rendered += ".dirty"
-    else:
-        # exception #1
-        rendered = "0+untagged.%d.g%s" % (pieces["distance"],
-                                          pieces["short"])
-        if pieces["dirty"]:
-            rendered += ".dirty"
-    return rendered
-
-
-def render_pep440_pre(pieces):
-    """TAG[.post.devDISTANCE] -- No -dirty.
-
-    Exceptions:
-    1: no tags. 0.post.devDISTANCE
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"]:
-            rendered += ".post.dev%d" % pieces["distance"]
-    else:
-        # exception #1
-        rendered = "0.post.dev%d" % pieces["distance"]
-    return rendered
-
-
-def render_pep440_post(pieces):
-    """TAG[.postDISTANCE[.dev0]+gHEX] .
-
-    The ".dev0" means dirty. Note that .dev0 sorts backwards
-    (a dirty tree will appear "older" than the corresponding clean one),
-    but you shouldn't be releasing software with -dirty anyways.
-
-    Exceptions:
-    1: no tags. 0.postDISTANCE[.dev0]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += ".post%d" % pieces["distance"]
-            if pieces["dirty"]:
-                rendered += ".dev0"
-            rendered += plus_or_dot(pieces)
-            rendered += "g%s" % pieces["short"]
-    else:
-        # exception #1
-        rendered = "0.post%d" % pieces["distance"]
-        if pieces["dirty"]:
-            rendered += ".dev0"
-        rendered += "+g%s" % pieces["short"]
-    return rendered
-
-
-def render_pep440_old(pieces):
-    """TAG[.postDISTANCE[.dev0]] .
-
-    The ".dev0" means dirty.
-
-    Eexceptions:
-    1: no tags. 0.postDISTANCE[.dev0]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += ".post%d" % pieces["distance"]
-            if pieces["dirty"]:
-                rendered += ".dev0"
-    else:
-        # exception #1
-        rendered = "0.post%d" % pieces["distance"]
-        if pieces["dirty"]:
-            rendered += ".dev0"
-    return rendered
-
-
-def render_git_describe(pieces):
-    """TAG[-DISTANCE-gHEX][-dirty].
-
-    Like 'git describe --tags --dirty --always'.
-
-    Exceptions:
-    1: no tags. HEX[-dirty]  (note: no 'g' prefix)
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"]:
-            rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
-    else:
-        # exception #1
-        rendered = pieces["short"]
-    if pieces["dirty"]:
-        rendered += "-dirty"
-    return rendered
-
-
-def render_git_describe_long(pieces):
-    """TAG-DISTANCE-gHEX[-dirty].
-
-    Like 'git describe --tags --dirty --always -long'.
-    The distance/hash is unconditional.
-
-    Exceptions:
-    1: no tags. HEX[-dirty]  (note: no 'g' prefix)
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
-    else:
-        # exception #1
-        rendered = pieces["short"]
-    if pieces["dirty"]:
-        rendered += "-dirty"
-    return rendered
-
-
-def render(pieces, style):
-    """Render the given version pieces into the requested style."""
-    if pieces["error"]:
-        return {"version": "unknown",
-                "full-revisionid": pieces.get("long"),
-                "dirty": None,
-                "error": pieces["error"],
-                "date": None}
-
-    if not style or style == "default":
-        style = "pep440"  # the default
-
-    if style == "pep440":
-        rendered = render_pep440(pieces)
-    elif style == "pep440-pre":
-        rendered = render_pep440_pre(pieces)
-    elif style == "pep440-post":
-        rendered = render_pep440_post(pieces)
-    elif style == "pep440-old":
-        rendered = render_pep440_old(pieces)
-    elif style == "git-describe":
-        rendered = render_git_describe(pieces)
-    elif style == "git-describe-long":
-        rendered = render_git_describe_long(pieces)
-    else:
-        raise ValueError("unknown style '%s'" % style)
-
-    return {"version": rendered, "full-revisionid": pieces["long"],
-            "dirty": pieces["dirty"], "error": None,
-            "date": pieces.get("date")}
-
-
-def get_versions():
-    """Get version information or return default if unable to do so."""
-    # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
-    # __file__, we can work backwards from there to the root. Some
-    # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
-    # case we can only use expanded keywords.
-
-    cfg = get_config()
-    verbose = cfg.verbose
-
-    try:
-        return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
-                                          verbose)
-    except NotThisMethod:
-        pass
-
-    try:
-        root = os.path.realpath(__file__)
-        # versionfile_source is the relative path from the top of the source
-        # tree (where the .git directory might live) to this file. Invert
-        # this to find the root from __file__.
-        for i in cfg.versionfile_source.split('/'):
-            root = os.path.dirname(root)
-    except NameError:
-        return {"version": "0+unknown", "full-revisionid": None,
-                "dirty": None,
-                "error": "unable to find root of source tree",
-                "date": None}
-
-    try:
-        pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
-        return render(pieces, cfg.style)
-    except NotThisMethod:
-        pass
-
-    try:
-        if cfg.parentdir_prefix:
-            return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
-    except NotThisMethod:
-        pass
-
-    return {"version": "0+unknown", "full-revisionid": None,
-            "dirty": None,
-            "error": "unable to compute version", "date": None}


=====================================
src/dnaio/chunks.py
=====================================
@@ -1,4 +1,5 @@
 """Chunked reading of FASTA and FASTQ files"""
+
 from ._core import paired_fastq_heads as _paired_fastq_heads
 from .exceptions import FileFormatError, FastaFormatError, UnknownFileFormat
 
@@ -53,6 +54,9 @@ def read_chunks(f, buffer_size=4*1024**2):
         head = _fastq_head
     elif start == 1 and (buf[0:1] == b'#' or buf[0:1] == b'>'):
         head = _fasta_head
+    elif start == 0:
+        # Empty file
+        return
     else:
         raise UnknownFileFormat('Input file format unknown')
 


=====================================
src/dnaio/writers.py
=====================================
@@ -1,4 +1,3 @@
-import io
 from xopen import xopen
 
 


=====================================
tests/data/simple.fasta.bz2
=====================================
Binary files /dev/null and b/tests/data/simple.fasta.bz2 differ


=====================================
tests/data/simple.fasta.gz
=====================================
Binary files /dev/null and b/tests/data/simple.fasta.gz differ


=====================================
tests/data/simple.fasta.xz
=====================================
Binary files /dev/null and b/tests/data/simple.fasta.xz differ


=====================================
tests/data/simple.fastq.bz2
=====================================
Binary files /dev/null and b/tests/data/simple.fastq.bz2 differ


=====================================
tests/data/simple.fastq.gz
=====================================
Binary files /dev/null and b/tests/data/simple.fastq.gz differ


=====================================
tests/data/simple.fastq.xz
=====================================
Binary files /dev/null and b/tests/data/simple.fastq.xz differ


=====================================
tests/test_api.py deleted
=====================================
@@ -1,57 +0,0 @@
-import dnaio
-from xopen import xopen
-
-
-def test_version():
-    _ = dnaio.__version__
-
-
-def test_open():
-    with dnaio.open('tests/data/simple.fasta') as f:
-        records = list(f)
-        assert len(records) == 2
-
-
-def test_detect_fastq_from_content():
-    """FASTQ file that is not named .fastq"""
-    with dnaio.open('tests/data/missingextension') as f:
-        record = next(iter(f))
-        assert record.name == 'prefix:1_13_573/1'
-
-
-def test_detect_compressed_fastq_from_content():
-    """Compressed FASTQ file that is not named .fastq.gz"""
-    with dnaio.open('tests/data/missingextension.gz') as f:
-        record = next(iter(f))
-        assert record.name == 'prefix:1_13_573/1'
-
-
-def test_write(tmpdir):
-    s = dnaio.Sequence('name', 'ACGT', 'HHHH')
-    out_fastq = tmpdir.join('out.fastq')
-    with dnaio.open(str(out_fastq), mode='w') as f:
-        f.write(s)
-    assert out_fastq.read() == '@name\nACGT\n+\nHHHH\n'
-
-
-def test_write_gz(tmpdir):
-    s = dnaio.Sequence('name', 'ACGT', 'HHHH')
-    out_fastq = tmpdir.join('out.fastq.gz')
-    with dnaio.open(str(out_fastq), mode='w') as f:
-        f.write(s)
-
-    import gzip
-    with gzip.open(str(out_fastq)) as f:
-        assert f.read() == b'@name\nACGT\n+\nHHHH\n'
-
-
-def test_write_gz_with_xopen(tmpdir):
-    s = dnaio.Sequence('name', 'ACGT', 'HHHH')
-    out_fastq = tmpdir.join('out.fastq.gz')
-    with xopen(str(out_fastq), 'wb') as gzf:
-        with dnaio.open(gzf, mode='w') as f:
-            f.write(s)
-
-    import gzip
-    with gzip.open(str(out_fastq)) as f:
-        assert f.read() == b'@name\nACGT\n+\nHHHH\n'


=====================================
tests/test_chunks.py
=====================================
@@ -52,3 +52,7 @@ def test_read_chunks():
         # Buffer too small
         with raises(OverflowError):
             list(read_chunks(BytesIO(data), buffer_size=4))
+
+
+def test_read_chunks_empty():
+    assert list(read_chunks(BytesIO(b''))) == []


=====================================
tests/test_internal.py
=====================================
@@ -6,16 +6,16 @@ from io import BytesIO
 from tempfile import mkdtemp
 from textwrap import dedent
 
+from pytest import raises, mark
+
 import dnaio
 from dnaio import (
     FileFormatError, FastaFormatError, FastqFormatError,
     FastaReader, FastqReader, InterleavedSequenceReader,
     FastaWriter, FastqWriter, InterleavedSequenceWriter,
     PairedSequenceReader)
-from dnaio import _sequence_names_match
-from dnaio._core import Sequence
+from dnaio import _sequence_names_match, Sequence
 
-from pytest import raises, mark
 
 # files tests/data/simple.fast{q,a}
 simple_fastq = [
@@ -112,7 +112,7 @@ class TestFastqReader:
     def test_fastqreader_buffersize_too_small(self):
         with raises(ValueError):
             with FastqReader("tests/data/simple.fastq", buffer_size=0) as f:
-                reads = list(f)
+                reads = list(f)  # pragma: no cover
 
     def test_fastqreader_dos(self):
         # DOS line breaks
@@ -127,7 +127,7 @@ class TestFastqReader:
     def test_fastq_wrongformat(self):
         with raises(FastqFormatError) as info:
             with FastqReader("tests/data/withplus.fastq") as f:
-                list(f)
+                list(f)  # pragma: no cover
         assert info.value.line == 2
 
     def test_empty_fastq(self):
@@ -171,7 +171,7 @@ class TestFastqReader:
         buffer_size = len('@r\nACG\n+\n')
         with raises(FastqFormatError) as info:
             with FastqReader(fastq, buffer_size=buffer_size) as fq:
-                list(fq)
+                list(fq)  # pragma: no cover
         assert 'Length of sequence and qualities differ' in info.value.message
         assert info.value.line == 3
 
@@ -231,7 +231,7 @@ class TestFastqReader:
         fastq = BytesIO(b'@r1\nACG\n+xy\n')
         with raises(FastqFormatError) as info:
             with FastqReader(fastq) as fq:
-                list(fq)
+                list(fq)  # pragma: no cover
         assert "Sequence descriptions don't match" in info.value.message
 
 
@@ -281,7 +281,8 @@ class TestOpen:
             assert isinstance(f, FastaWriter)
             for seq in simple_fastq:
                 f.write(seq)
-        assert list(dnaio.open(path)) == simple_fasta
+        with dnaio.open(path) as f:
+            assert list(f) == simple_fasta
 
     def test_autodetect_fastq_format(self):
         path = os.path.join(self._tmpdir, 'tmp.fastq')
@@ -289,7 +290,19 @@ class TestOpen:
             assert isinstance(f, FastqWriter)
             for seq in simple_fastq:
                 f.write(seq)
-        assert list(dnaio.open(path)) == simple_fastq
+        with dnaio.open(path) as f:
+            assert list(f) == simple_fastq
+
+    def test_autodetect_fastq_weird_name(self):
+        path = os.path.join(self._tmpdir, 'tmp.fastq.gz')
+        with dnaio.open(path, mode='w') as f:
+            assert isinstance(f, FastqWriter)
+            for seq in simple_fastq:
+                f.write(seq)
+        weird_path = os.path.join(self._tmpdir, 'tmp.weird.gz')
+        os.rename(path, weird_path)
+        with dnaio.open(weird_path) as f:
+            assert list(f) == simple_fastq
 
     def test_fastq_qualities_missing(self):
         path = os.path.join(self._tmpdir, 'tmp.fastq')
@@ -436,6 +449,14 @@ class TestInterleavedWriter:
 
 
 class TestPairedSequenceReader:
+    def test_read(self):
+        s1 = BytesIO(b'@r1\nACG\n+\nHHH\n')
+        s2 = BytesIO(b'@r2\nGTT\n+\n858\n')
+        with PairedSequenceReader(s1, s2) as psr:
+            assert [
+                (Sequence("r1", "ACG", "HHH"), Sequence("r2", "GTT", "858")),
+            ] == list(psr)
+
     def test_sequence_names_match(self):
         def match(name1, name2):
             seq1 = Sequence(name1, 'ACGT')


=====================================
tests/test_open.py
=====================================
@@ -0,0 +1,231 @@
+from pathlib import Path
+
+import dnaio
+from xopen import xopen
+
+import pytest
+
+
+ at pytest.fixture(params=["", ".gz", ".bz2", ".xz"])
+def extension(request):
+    return request.param
+
+
+ at pytest.fixture(params=["fasta", "fastq"])
+def fileformat(request):
+    return request.param
+
+
+SIMPLE_RECORDS = {
+    "fasta": [
+        dnaio.Sequence("first_sequence", "SEQUENCE1"),
+        dnaio.Sequence("second_sequence", "SEQUENCE2"),
+    ],
+    "fastq": [
+        dnaio.Sequence("first_sequence", "SEQUENCE1", ":6;;8<=:<"),
+        dnaio.Sequence("second_sequence", "SEQUENCE2", "83<??:(61"),
+    ],
+}
+
+
+def formatted_sequence(record, fileformat):
+    if fileformat == "fastq":
+        return "@{}\n{}\n+\n{}\n".format(record.name, record.sequence, record.qualities)
+    else:
+        return ">{}\n{}\n".format(record.name, record.sequence)
+
+
+def formatted_sequences(records, fileformat):
+    return "".join(formatted_sequence(record, fileformat) for record in records)
+
+
+def test_formatted_sequence():
+    s = dnaio.Sequence("s1", "ACGT", "HHHH")
+    assert ">s1\nACGT\n" == formatted_sequence(s, "fasta")
+    assert "@s1\nACGT\n+\nHHHH\n" == formatted_sequence(s, "fastq")
+
+
+def test_version():
+    _ = dnaio.__version__
+
+
+def test_read(fileformat, extension):
+    with dnaio.open("tests/data/simple." + fileformat + extension) as f:
+        records = list(f)
+    assert records == SIMPLE_RECORDS[fileformat]
+
+
+def test_read_pathlib_path(fileformat, extension):
+    path = Path("tests/data/simple." + fileformat + extension)
+    with dnaio.open(path) as f:
+        records = list(f)
+    assert records == SIMPLE_RECORDS[fileformat]
+
+
+def test_detect_fastq_from_content():
+    """FASTQ file that is not named .fastq"""
+    with dnaio.open('tests/data/missingextension') as f:
+        record = next(iter(f))
+        assert record.name == 'prefix:1_13_573/1'
+
+
+def test_detect_compressed_fastq_from_content():
+    """Compressed FASTQ file that is not named .fastq.gz"""
+    with dnaio.open('tests/data/missingextension.gz') as f:
+        record = next(iter(f))
+    assert record.name == 'prefix:1_13_573/1'
+
+
+def test_write(tmpdir, extension):
+    s = dnaio.Sequence('name', 'ACGT', 'HHHH')
+    out_fastq = tmpdir.join("out.fastq" + extension)
+    with dnaio.open(str(out_fastq), mode='w') as f:
+        f.write(s)
+    with xopen(out_fastq) as f:
+        assert f.read() == '@name\nACGT\n+\nHHHH\n'
+
+
+def test_write_with_xopen(tmpdir, fileformat, extension):
+    s = dnaio.Sequence('name', 'ACGT', 'HHHH')
+    out_fastq = str(tmpdir.join("out." + fileformat + extension))
+    with xopen(out_fastq, 'wb') as outer_f:
+        with dnaio.open(outer_f, mode='w', fileformat=fileformat) as f:
+            f.write(s)
+
+    with xopen(out_fastq) as f:
+        if fileformat == "fasta":
+            assert f.read() == ">name\nACGT\n"
+        else:
+            assert f.read() == "@name\nACGT\n+\nHHHH\n"
+
+
+def test_write_pathlib(tmpdir, fileformat, extension):
+    s1 = dnaio.Sequence("s1", "ACGT", "HHHH")
+    path = Path(str(tmpdir / ("out." + fileformat + extension)))
+    with dnaio.open(path, mode="w") as f:
+        f.write(s1)
+    if fileformat == "fasta":
+        expected = b">s1\nACGT\n"
+    else:
+        expected = b"@s1\nACGT\n+\nHHHH\n"
+    with xopen(path, "rb") as f:
+        assert f.read() == expected
+
+
+def test_write_paired_same_path(tmpdir):
+    path1 = str(tmpdir / "same.fastq")
+    path2 = str(tmpdir / "same.fastq")
+    with pytest.raises(ValueError) as e:
+        with dnaio.open(file1=path1, file2=path2, mode="w") as f:
+            pass
+
+
+def test_write_paired(tmpdir, fileformat, extension):
+    r1 = [
+        dnaio.Sequence("s1", "ACGT", "HHHH"),
+        dnaio.Sequence("s2", "CGCA", "8383"),
+    ]
+    r2 = [
+        dnaio.Sequence("t1", "TCGT", "5HHH"),
+        dnaio.Sequence("t2", "TGCA", "5383"),
+    ]
+    path1 = str(tmpdir / ("out.1." + fileformat + extension))
+    path2 = str(tmpdir / ("out.2." + fileformat + extension))
+
+    with dnaio.open(path1, file2=path2, fileformat=fileformat, mode="w") as f:
+        f.write(r1[0], r2[0])
+        f.write(r1[1], r2[1])
+    with xopen(path1) as f:
+        assert formatted_sequences(r1, fileformat) == f.read()
+    with xopen(path2) as f:
+        assert formatted_sequences(r2, fileformat) == f.read()
+
+
+def test_write_interleaved(tmpdir, fileformat, extension):
+    r1 = [
+        dnaio.Sequence("s1", "ACGT", "HHHH"),
+        dnaio.Sequence("s2", "CGCA", "8383"),
+    ]
+    r2 = [
+        dnaio.Sequence("t1", "TCGT", "5HHH"),
+        dnaio.Sequence("t2", "TGCA", "5383"),
+    ]
+    path = str(tmpdir / ("out.interleaved." + fileformat + extension))
+
+    with dnaio.open(path, interleaved=True, fileformat=fileformat, mode="w") as f:
+        f.write(r1[0], r2[0])
+        f.write(r1[1], r2[1])
+    expected = [r1[0], r2[0], r1[1], r2[1]]
+    with xopen(path) as f:
+        assert formatted_sequences(expected, fileformat) == f.read()
+
+
+def test_append(tmpdir, fileformat, extension):
+    s1 = dnaio.Sequence("s1", "ACGT", "HHHH")
+    s2 = dnaio.Sequence("s2", "CGCA", "8383")
+    path = str(tmpdir / ("out." + fileformat + extension))
+    with dnaio.open(path, mode="w") as f:
+        f.write(s1)
+    with dnaio.open(path, mode="a") as f:
+        f.write(s2)
+    with xopen(path) as f:
+        assert formatted_sequences([s1, s2], fileformat) == f.read()
+
+
+def test_append_paired(tmpdir, fileformat, extension):
+    r1 = [
+        dnaio.Sequence("s1", "ACGT", "HHHH"),
+        dnaio.Sequence("s2", "CGCA", "8383"),
+    ]
+    r2 = [
+        dnaio.Sequence("t1", "TCGT", "5HHH"),
+        dnaio.Sequence("t2", "TGCA", "5383"),
+    ]
+    path1 = str(tmpdir / ("out.1." + fileformat + extension))
+    path2 = str(tmpdir / ("out.2." + fileformat + extension))
+
+    with dnaio.open(path1, file2=path2, fileformat=fileformat, mode="w") as f:
+        f.write(r1[0], r2[0])
+    with dnaio.open(path1, file2=path2, fileformat=fileformat, mode="a") as f:
+        f.write(r1[1], r2[1])
+    with xopen(path1) as f:
+        assert formatted_sequences(r1, fileformat) == f.read()
+    with xopen(path2) as f:
+        assert formatted_sequences(r2, fileformat) == f.read()
+
+
+def test_append_interleaved(tmpdir, fileformat, extension):
+    r1 = [
+        dnaio.Sequence("s1", "ACGT", "HHHH"),
+        dnaio.Sequence("s2", "CGCA", "8383"),
+    ]
+    r2 = [
+        dnaio.Sequence("t1", "TCGT", "5HHH"),
+        dnaio.Sequence("t2", "TGCA", "5383"),
+    ]
+    path = str(tmpdir / ("out.interleaved." + fileformat + extension))
+
+    with dnaio.open(path, interleaved=True, fileformat=fileformat, mode="w") as f:
+        f.write(r1[0], r2[0])
+    with dnaio.open(path, interleaved=True, fileformat=fileformat, mode="a") as f:
+        f.write(r1[1], r2[1])
+    expected = [r1[0], r2[0], r1[1], r2[1]]
+    with xopen(path) as f:
+        assert formatted_sequences(expected, fileformat) == f.read()
+
+
+def make_random_fasta(path, n_records):
+    from random import choice
+    with xopen(path, "w") as f:
+        for i in range(n_records):
+            name = "sequence_{}".format(i)
+            sequence = "".join(choice("ACGT") for _ in range(300))
+            print(">", name, "\n", sequence, sep="", file=f)
+
+
+def test_islice_gzip_does_not_fail(tmpdir):
+    path = str(tmpdir / "file.fasta.gz")
+    make_random_fasta(path, 100)
+    f = dnaio.open(path)
+    next(iter(f))
+    f.close()


=====================================
tox.ini
=====================================
@@ -4,4 +4,19 @@ envlist = py34,py35,py36,py37
 [testenv]
 deps =
     pytest
-commands = pytest
+    coverage
+commands =
+    coverage run --concurrency=multiprocessing -m pytest --doctest-modules --pyargs tests/
+    coverage combine
+    coverage report
+
+[coverage:run]
+parallel = True
+include =
+    */site-packages/dnaio/*
+    tests/*
+
+[coverage:paths]
+source =
+    src/
+    */site-packages/


=====================================
versioneer.py deleted
=====================================
@@ -1,1822 +0,0 @@
-
-# Version: 0.18
-
-"""The Versioneer - like a rocketeer, but for versions.
-
-The Versioneer
-==============
-
-* like a rocketeer, but for versions!
-* https://github.com/warner/python-versioneer
-* Brian Warner
-* License: Public Domain
-* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy
-* [![Latest Version]
-(https://pypip.in/version/versioneer/badge.svg?style=flat)
-](https://pypi.python.org/pypi/versioneer/)
-* [![Build Status]
-(https://travis-ci.org/warner/python-versioneer.png?branch=master)
-](https://travis-ci.org/warner/python-versioneer)
-
-This is a tool for managing a recorded version number in distutils-based
-python projects. The goal is to remove the tedious and error-prone "update
-the embedded version string" step from your release process. Making a new
-release should be as easy as recording a new tag in your version-control
-system, and maybe making new tarballs.
-
-
-## Quick Install
-
-* `pip install versioneer` to somewhere to your $PATH
-* add a `[versioneer]` section to your setup.cfg (see below)
-* run `versioneer install` in your source tree, commit the results
-
-## Version Identifiers
-
-Source trees come from a variety of places:
-
-* a version-control system checkout (mostly used by developers)
-* a nightly tarball, produced by build automation
-* a snapshot tarball, produced by a web-based VCS browser, like github's
-  "tarball from tag" feature
-* a release tarball, produced by "setup.py sdist", distributed through PyPI
-
-Within each source tree, the version identifier (either a string or a number,
-this tool is format-agnostic) can come from a variety of places:
-
-* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
-  about recent "tags" and an absolute revision-id
-* the name of the directory into which the tarball was unpacked
-* an expanded VCS keyword ($Id$, etc)
-* a `_version.py` created by some earlier build step
-
-For released software, the version identifier is closely related to a VCS
-tag. Some projects use tag names that include more than just the version
-string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
-needs to strip the tag prefix to extract the version identifier. For
-unreleased software (between tags), the version identifier should provide
-enough information to help developers recreate the same tree, while also
-giving them an idea of roughly how old the tree is (after version 1.2, before
-version 1.3). Many VCS systems can report a description that captures this,
-for example `git describe --tags --dirty --always` reports things like
-"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
-0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
-uncommitted changes.
-
-The version identifier is used for multiple purposes:
-
-* to allow the module to self-identify its version: `myproject.__version__`
-* to choose a name and prefix for a 'setup.py sdist' tarball
-
-## Theory of Operation
-
-Versioneer works by adding a special `_version.py` file into your source
-tree, where your `__init__.py` can import it. This `_version.py` knows how to
-dynamically ask the VCS tool for version information at import time.
-
-`_version.py` also contains `$Revision$` markers, and the installation
-process marks `_version.py` to have this marker rewritten with a tag name
-during the `git archive` command. As a result, generated tarballs will
-contain enough information to get the proper version.
-
-To allow `setup.py` to compute a version too, a `versioneer.py` is added to
-the top level of your source tree, next to `setup.py` and the `setup.cfg`
-that configures it. This overrides several distutils/setuptools commands to
-compute the version when invoked, and changes `setup.py build` and `setup.py
-sdist` to replace `_version.py` with a small static file that contains just
-the generated version data.
-
-## Installation
-
-See [INSTALL.md](./INSTALL.md) for detailed installation instructions.
-
-## Version-String Flavors
-
-Code which uses Versioneer can learn about its version string at runtime by
-importing `_version` from your main `__init__.py` file and running the
-`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
-import the top-level `versioneer.py` and run `get_versions()`.
-
-Both functions return a dictionary with different flavors of version
-information:
-
-* `['version']`: A condensed version string, rendered using the selected
-  style. This is the most commonly used value for the project's version
-  string. The default "pep440" style yields strings like `0.11`,
-  `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
-  below for alternative styles.
-
-* `['full-revisionid']`: detailed revision identifier. For Git, this is the
-  full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
-
-* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the
-  commit date in ISO 8601 format. This will be None if the date is not
-  available.
-
-* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
-  this is only accurate if run in a VCS checkout, otherwise it is likely to
-  be False or None
-
-* `['error']`: if the version string could not be computed, this will be set
-  to a string describing the problem, otherwise it will be None. It may be
-  useful to throw an exception in setup.py if this is set, to avoid e.g.
-  creating tarballs with a version string of "unknown".
-
-Some variants are more useful than others. Including `full-revisionid` in a
-bug report should allow developers to reconstruct the exact code being tested
-(or indicate the presence of local changes that should be shared with the
-developers). `version` is suitable for display in an "about" box or a CLI
-`--version` output: it can be easily compared against release notes and lists
-of bugs fixed in various releases.
-
-The installer adds the following text to your `__init__.py` to place a basic
-version in `YOURPROJECT.__version__`:
-
-    from ._version import get_versions
-    __version__ = get_versions()['version']
-    del get_versions
-
-## Styles
-
-The setup.cfg `style=` configuration controls how the VCS information is
-rendered into a version string.
-
-The default style, "pep440", produces a PEP440-compliant string, equal to the
-un-prefixed tag name for actual releases, and containing an additional "local
-version" section with more detail for in-between builds. For Git, this is
-TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
-tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
-that this commit is two revisions ("+2") beyond the "0.11" tag. For released
-software (exactly equal to a known tag), the identifier will only contain the
-stripped tag, e.g. "0.11".
-
-Other styles are available. See [details.md](details.md) in the Versioneer
-source tree for descriptions.
-
-## Debugging
-
-Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
-to return a version of "0+unknown". To investigate the problem, run `setup.py
-version`, which will run the version-lookup code in a verbose mode, and will
-display the full contents of `get_versions()` (including the `error` string,
-which may help identify what went wrong).
-
-## Known Limitations
-
-Some situations are known to cause problems for Versioneer. This details the
-most significant ones. More can be found on Github
-[issues page](https://github.com/warner/python-versioneer/issues).
-
-### Subprojects
-
-Versioneer has limited support for source trees in which `setup.py` is not in
-the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are
-two common reasons why `setup.py` might not be in the root:
-
-* Source trees which contain multiple subprojects, such as
-  [Buildbot](https://github.com/buildbot/buildbot), which contains both
-  "master" and "slave" subprojects, each with their own `setup.py`,
-  `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
-  distributions (and upload multiple independently-installable tarballs).
-* Source trees whose main purpose is to contain a C library, but which also
-  provide bindings to Python (and perhaps other langauges) in subdirectories.
-
-Versioneer will look for `.git` in parent directories, and most operations
-should get the right version string. However `pip` and `setuptools` have bugs
-and implementation details which frequently cause `pip install .` from a
-subproject directory to fail to find a correct version string (so it usually
-defaults to `0+unknown`).
-
-`pip install --editable .` should work correctly. `setup.py install` might
-work too.
-
-Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
-some later version.
-
-[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking
-this issue. The discussion in
-[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the
-issue from the Versioneer side in more detail.
-[pip PR#3176](https://github.com/pypa/pip/pull/3176) and
-[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
-pip to let Versioneer work correctly.
-
-Versioneer-0.16 and earlier only looked for a `.git` directory next to the
-`setup.cfg`, so subprojects were completely unsupported with those releases.
-
-### Editable installs with setuptools <= 18.5
-
-`setup.py develop` and `pip install --editable .` allow you to install a
-project into a virtualenv once, then continue editing the source code (and
-test) without re-installing after every change.
-
-"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a
-convenient way to specify executable scripts that should be installed along
-with the python package.
-
-These both work as expected when using modern setuptools. When using
-setuptools-18.5 or earlier, however, certain operations will cause
-`pkg_resources.DistributionNotFound` errors when running the entrypoint
-script, which must be resolved by re-installing the package. This happens
-when the install happens with one version, then the egg_info data is
-regenerated while a different version is checked out. Many setup.py commands
-cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
-a different virtualenv), so this can be surprising.
-
-[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes
-this one, but upgrading to a newer version of setuptools should probably
-resolve it.
-
-### Unicode version strings
-
-While Versioneer works (and is continually tested) with both Python 2 and
-Python 3, it is not entirely consistent with bytes-vs-unicode distinctions.
-Newer releases probably generate unicode version strings on py2. It's not
-clear that this is wrong, but it may be surprising for applications when then
-write these strings to a network connection or include them in bytes-oriented
-APIs like cryptographic checksums.
-
-[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates
-this question.
-
-
-## Updating Versioneer
-
-To upgrade your project to a new release of Versioneer, do the following:
-
-* install the new Versioneer (`pip install -U versioneer` or equivalent)
-* edit `setup.cfg`, if necessary, to include any new configuration settings
-  indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
-* re-run `versioneer install` in your source tree, to replace
-  `SRC/_version.py`
-* commit any changed files
-
-## Future Directions
-
-This tool is designed to make it easily extended to other version-control
-systems: all VCS-specific components are in separate directories like
-src/git/ . The top-level `versioneer.py` script is assembled from these
-components by running make-versioneer.py . In the future, make-versioneer.py
-will take a VCS name as an argument, and will construct a version of
-`versioneer.py` that is specific to the given VCS. It might also take the
-configuration arguments that are currently provided manually during
-installation by editing setup.py . Alternatively, it might go the other
-direction and include code from all supported VCS systems, reducing the
-number of intermediate scripts.
-
-
-## License
-
-To make Versioneer easier to embed, all its code is dedicated to the public
-domain. The `_version.py` that it creates is also in the public domain.
-Specifically, both are released under the Creative Commons "Public Domain
-Dedication" license (CC0-1.0), as described in
-https://creativecommons.org/publicdomain/zero/1.0/ .
-
-"""
-
-from __future__ import print_function
-try:
-    import configparser
-except ImportError:
-    import ConfigParser as configparser
-import errno
-import json
-import os
-import re
-import subprocess
-import sys
-
-
-class VersioneerConfig:
-    """Container for Versioneer configuration parameters."""
-
-
-def get_root():
-    """Get the project root directory.
-
-    We require that all commands are run from the project root, i.e. the
-    directory that contains setup.py, setup.cfg, and versioneer.py .
-    """
-    root = os.path.realpath(os.path.abspath(os.getcwd()))
-    setup_py = os.path.join(root, "setup.py")
-    versioneer_py = os.path.join(root, "versioneer.py")
-    if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
-        # allow 'python path/to/setup.py COMMAND'
-        root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
-        setup_py = os.path.join(root, "setup.py")
-        versioneer_py = os.path.join(root, "versioneer.py")
-    if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
-        err = ("Versioneer was unable to run the project root directory. "
-               "Versioneer requires setup.py to be executed from "
-               "its immediate directory (like 'python setup.py COMMAND'), "
-               "or in a way that lets it use sys.argv[0] to find the root "
-               "(like 'python path/to/setup.py COMMAND').")
-        raise VersioneerBadRootError(err)
-    try:
-        # Certain runtime workflows (setup.py install/develop in a setuptools
-        # tree) execute all dependencies in a single python process, so
-        # "versioneer" may be imported multiple times, and python's shared
-        # module-import table will cache the first one. So we can't use
-        # os.path.dirname(__file__), as that will find whichever
-        # versioneer.py was first imported, even in later projects.
-        me = os.path.realpath(os.path.abspath(__file__))
-        me_dir = os.path.normcase(os.path.splitext(me)[0])
-        vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
-        if me_dir != vsr_dir:
-            print("Warning: build in %s is using versioneer.py from %s"
-                  % (os.path.dirname(me), versioneer_py))
-    except NameError:
-        pass
-    return root
-
-
-def get_config_from_root(root):
-    """Read the project setup.cfg file to determine Versioneer config."""
-    # This might raise EnvironmentError (if setup.cfg is missing), or
-    # configparser.NoSectionError (if it lacks a [versioneer] section), or
-    # configparser.NoOptionError (if it lacks "VCS="). See the docstring at
-    # the top of versioneer.py for instructions on writing your setup.cfg .
-    setup_cfg = os.path.join(root, "setup.cfg")
-    parser = configparser.SafeConfigParser()
-    with open(setup_cfg, "r") as f:
-        parser.readfp(f)
-    VCS = parser.get("versioneer", "VCS")  # mandatory
-
-    def get(parser, name):
-        if parser.has_option("versioneer", name):
-            return parser.get("versioneer", name)
-        return None
-    cfg = VersioneerConfig()
-    cfg.VCS = VCS
-    cfg.style = get(parser, "style") or ""
-    cfg.versionfile_source = get(parser, "versionfile_source")
-    cfg.versionfile_build = get(parser, "versionfile_build")
-    cfg.tag_prefix = get(parser, "tag_prefix")
-    if cfg.tag_prefix in ("''", '""'):
-        cfg.tag_prefix = ""
-    cfg.parentdir_prefix = get(parser, "parentdir_prefix")
-    cfg.verbose = get(parser, "verbose")
-    return cfg
-
-
-class NotThisMethod(Exception):
-    """Exception raised if a method is not valid for the current scenario."""
-
-
-# these dictionaries contain VCS-specific tools
-LONG_VERSION_PY = {}
-HANDLERS = {}
-
-
-def register_vcs_handler(vcs, method):  # decorator
-    """Decorator to mark a method as the handler for a particular VCS."""
-    def decorate(f):
-        """Store f in HANDLERS[vcs][method]."""
-        if vcs not in HANDLERS:
-            HANDLERS[vcs] = {}
-        HANDLERS[vcs][method] = f
-        return f
-    return decorate
-
-
-def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
-                env=None):
-    """Call the given command(s)."""
-    assert isinstance(commands, list)
-    p = None
-    for c in commands:
-        try:
-            dispcmd = str([c] + args)
-            # remember shell=False, so use git.cmd on windows, not just git
-            p = subprocess.Popen([c] + args, cwd=cwd, env=env,
-                                 stdout=subprocess.PIPE,
-                                 stderr=(subprocess.PIPE if hide_stderr
-                                         else None))
-            break
-        except EnvironmentError:
-            e = sys.exc_info()[1]
-            if e.errno == errno.ENOENT:
-                continue
-            if verbose:
-                print("unable to run %s" % dispcmd)
-                print(e)
-            return None, None
-    else:
-        if verbose:
-            print("unable to find command, tried %s" % (commands,))
-        return None, None
-    stdout = p.communicate()[0].strip()
-    if sys.version_info[0] >= 3:
-        stdout = stdout.decode()
-    if p.returncode != 0:
-        if verbose:
-            print("unable to run %s (error)" % dispcmd)
-            print("stdout was %s" % stdout)
-        return None, p.returncode
-    return stdout, p.returncode
-
-
-LONG_VERSION_PY['git'] = '''
-# This file helps to compute a version number in source trees obtained from
-# git-archive tarball (such as those provided by githubs download-from-tag
-# feature). Distribution tarballs (built by setup.py sdist) and build
-# directories (produced by setup.py build) will contain a much shorter file
-# that just contains the computed version number.
-
-# This file is released into the public domain. Generated by
-# versioneer-0.18 (https://github.com/warner/python-versioneer)
-
-"""Git implementation of _version.py."""
-
-import errno
-import os
-import re
-import subprocess
-import sys
-
-
-def get_keywords():
-    """Get the keywords needed to look up the version information."""
-    # these strings will be replaced by git during git-archive.
-    # setup.py/versioneer.py will grep for the variable names, so they must
-    # each be defined on a line of their own. _version.py will just call
-    # get_keywords().
-    git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
-    git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
-    git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
-    keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
-    return keywords
-
-
-class VersioneerConfig:
-    """Container for Versioneer configuration parameters."""
-
-
-def get_config():
-    """Create, populate and return the VersioneerConfig() object."""
-    # these strings are filled in when 'setup.py versioneer' creates
-    # _version.py
-    cfg = VersioneerConfig()
-    cfg.VCS = "git"
-    cfg.style = "%(STYLE)s"
-    cfg.tag_prefix = "%(TAG_PREFIX)s"
-    cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
-    cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
-    cfg.verbose = False
-    return cfg
-
-
-class NotThisMethod(Exception):
-    """Exception raised if a method is not valid for the current scenario."""
-
-
-LONG_VERSION_PY = {}
-HANDLERS = {}
-
-
-def register_vcs_handler(vcs, method):  # decorator
-    """Decorator to mark a method as the handler for a particular VCS."""
-    def decorate(f):
-        """Store f in HANDLERS[vcs][method]."""
-        if vcs not in HANDLERS:
-            HANDLERS[vcs] = {}
-        HANDLERS[vcs][method] = f
-        return f
-    return decorate
-
-
-def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
-                env=None):
-    """Call the given command(s)."""
-    assert isinstance(commands, list)
-    p = None
-    for c in commands:
-        try:
-            dispcmd = str([c] + args)
-            # remember shell=False, so use git.cmd on windows, not just git
-            p = subprocess.Popen([c] + args, cwd=cwd, env=env,
-                                 stdout=subprocess.PIPE,
-                                 stderr=(subprocess.PIPE if hide_stderr
-                                         else None))
-            break
-        except EnvironmentError:
-            e = sys.exc_info()[1]
-            if e.errno == errno.ENOENT:
-                continue
-            if verbose:
-                print("unable to run %%s" %% dispcmd)
-                print(e)
-            return None, None
-    else:
-        if verbose:
-            print("unable to find command, tried %%s" %% (commands,))
-        return None, None
-    stdout = p.communicate()[0].strip()
-    if sys.version_info[0] >= 3:
-        stdout = stdout.decode()
-    if p.returncode != 0:
-        if verbose:
-            print("unable to run %%s (error)" %% dispcmd)
-            print("stdout was %%s" %% stdout)
-        return None, p.returncode
-    return stdout, p.returncode
-
-
-def versions_from_parentdir(parentdir_prefix, root, verbose):
-    """Try to determine the version from the parent directory name.
-
-    Source tarballs conventionally unpack into a directory that includes both
-    the project name and a version string. We will also support searching up
-    two directory levels for an appropriately named parent directory
-    """
-    rootdirs = []
-
-    for i in range(3):
-        dirname = os.path.basename(root)
-        if dirname.startswith(parentdir_prefix):
-            return {"version": dirname[len(parentdir_prefix):],
-                    "full-revisionid": None,
-                    "dirty": False, "error": None, "date": None}
-        else:
-            rootdirs.append(root)
-            root = os.path.dirname(root)  # up a level
-
-    if verbose:
-        print("Tried directories %%s but none started with prefix %%s" %%
-              (str(rootdirs), parentdir_prefix))
-    raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
-
-
- at register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs):
-    """Extract version information from the given file."""
-    # the code embedded in _version.py can just fetch the value of these
-    # keywords. When used from setup.py, we don't want to import _version.py,
-    # so we do it with a regexp instead. This function is not used from
-    # _version.py.
-    keywords = {}
-    try:
-        f = open(versionfile_abs, "r")
-        for line in f.readlines():
-            if line.strip().startswith("git_refnames ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["refnames"] = mo.group(1)
-            if line.strip().startswith("git_full ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["full"] = mo.group(1)
-            if line.strip().startswith("git_date ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["date"] = mo.group(1)
-        f.close()
-    except EnvironmentError:
-        pass
-    return keywords
-
-
- at register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(keywords, tag_prefix, verbose):
-    """Get version information from git keywords."""
-    if not keywords:
-        raise NotThisMethod("no keywords at all, weird")
-    date = keywords.get("date")
-    if date is not None:
-        # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
-        # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
-        # -like" string, which we must then edit to make compliant), because
-        # it's been around since git-1.5.3, and it's too difficult to
-        # discover which version we're using, or to work around using an
-        # older one.
-        date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-    refnames = keywords["refnames"].strip()
-    if refnames.startswith("$Format"):
-        if verbose:
-            print("keywords are unexpanded, not using")
-        raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
-    refs = set([r.strip() for r in refnames.strip("()").split(",")])
-    # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
-    # just "foo-1.0". If we see a "tag: " prefix, prefer those.
-    TAG = "tag: "
-    tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
-    if not tags:
-        # Either we're using git < 1.8.3, or there really are no tags. We use
-        # a heuristic: assume all version tags have a digit. The old git %%d
-        # expansion behaves like git log --decorate=short and strips out the
-        # refs/heads/ and refs/tags/ prefixes that would let us distinguish
-        # between branches and tags. By ignoring refnames without digits, we
-        # filter out many common branch names like "release" and
-        # "stabilization", as well as "HEAD" and "master".
-        tags = set([r for r in refs if re.search(r'\d', r)])
-        if verbose:
-            print("discarding '%%s', no digits" %% ",".join(refs - tags))
-    if verbose:
-        print("likely tags: %%s" %% ",".join(sorted(tags)))
-    for ref in sorted(tags):
-        # sorting will prefer e.g. "2.0" over "2.0rc1"
-        if ref.startswith(tag_prefix):
-            r = ref[len(tag_prefix):]
-            if verbose:
-                print("picking %%s" %% r)
-            return {"version": r,
-                    "full-revisionid": keywords["full"].strip(),
-                    "dirty": False, "error": None,
-                    "date": date}
-    # no suitable tags, so version is "0+unknown", but full hex is still there
-    if verbose:
-        print("no suitable tags, using unknown + full revision id")
-    return {"version": "0+unknown",
-            "full-revisionid": keywords["full"].strip(),
-            "dirty": False, "error": "no suitable tags", "date": None}
-
-
- at register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
-    """Get version from 'git describe' in the root of the source tree.
-
-    This only gets called if the git-archive 'subst' keywords were *not*
-    expanded, and _version.py hasn't already been rewritten with a short
-    version string, meaning we're inside a checked out source tree.
-    """
-    GITS = ["git"]
-    if sys.platform == "win32":
-        GITS = ["git.cmd", "git.exe"]
-
-    out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
-                          hide_stderr=True)
-    if rc != 0:
-        if verbose:
-            print("Directory %%s not under git control" %% root)
-        raise NotThisMethod("'git rev-parse --git-dir' returned error")
-
-    # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
-    # if there isn't one, this yields HEX[-dirty] (no NUM)
-    describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
-                                          "--always", "--long",
-                                          "--match", "%%s*" %% tag_prefix],
-                                   cwd=root)
-    # --long was added in git-1.5.5
-    if describe_out is None:
-        raise NotThisMethod("'git describe' failed")
-    describe_out = describe_out.strip()
-    full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
-    if full_out is None:
-        raise NotThisMethod("'git rev-parse' failed")
-    full_out = full_out.strip()
-
-    pieces = {}
-    pieces["long"] = full_out
-    pieces["short"] = full_out[:7]  # maybe improved later
-    pieces["error"] = None
-
-    # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
-    # TAG might have hyphens.
-    git_describe = describe_out
-
-    # look for -dirty suffix
-    dirty = git_describe.endswith("-dirty")
-    pieces["dirty"] = dirty
-    if dirty:
-        git_describe = git_describe[:git_describe.rindex("-dirty")]
-
-    # now we have TAG-NUM-gHEX or HEX
-
-    if "-" in git_describe:
-        # TAG-NUM-gHEX
-        mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
-        if not mo:
-            # unparseable. Maybe git-describe is misbehaving?
-            pieces["error"] = ("unable to parse git-describe output: '%%s'"
-                               %% describe_out)
-            return pieces
-
-        # tag
-        full_tag = mo.group(1)
-        if not full_tag.startswith(tag_prefix):
-            if verbose:
-                fmt = "tag '%%s' doesn't start with prefix '%%s'"
-                print(fmt %% (full_tag, tag_prefix))
-            pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
-                               %% (full_tag, tag_prefix))
-            return pieces
-        pieces["closest-tag"] = full_tag[len(tag_prefix):]
-
-        # distance: number of commits since tag
-        pieces["distance"] = int(mo.group(2))
-
-        # commit: short hex revision ID
-        pieces["short"] = mo.group(3)
-
-    else:
-        # HEX: no tags
-        pieces["closest-tag"] = None
-        count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
-                                    cwd=root)
-        pieces["distance"] = int(count_out)  # total number of commits
-
-    # commit date: see ISO-8601 comment in git_versions_from_keywords()
-    date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
-                       cwd=root)[0].strip()
-    pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-
-    return pieces
-
-
-def plus_or_dot(pieces):
-    """Return a + if we don't already have one, else return a ."""
-    if "+" in pieces.get("closest-tag", ""):
-        return "."
-    return "+"
-
-
-def render_pep440(pieces):
-    """Build up version string, with post-release "local version identifier".
-
-    Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
-    get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
-
-    Exceptions:
-    1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += plus_or_dot(pieces)
-            rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
-            if pieces["dirty"]:
-                rendered += ".dirty"
-    else:
-        # exception #1
-        rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
-                                          pieces["short"])
-        if pieces["dirty"]:
-            rendered += ".dirty"
-    return rendered
-
-
-def render_pep440_pre(pieces):
-    """TAG[.post.devDISTANCE] -- No -dirty.
-
-    Exceptions:
-    1: no tags. 0.post.devDISTANCE
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"]:
-            rendered += ".post.dev%%d" %% pieces["distance"]
-    else:
-        # exception #1
-        rendered = "0.post.dev%%d" %% pieces["distance"]
-    return rendered
-
-
-def render_pep440_post(pieces):
-    """TAG[.postDISTANCE[.dev0]+gHEX] .
-
-    The ".dev0" means dirty. Note that .dev0 sorts backwards
-    (a dirty tree will appear "older" than the corresponding clean one),
-    but you shouldn't be releasing software with -dirty anyways.
-
-    Exceptions:
-    1: no tags. 0.postDISTANCE[.dev0]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += ".post%%d" %% pieces["distance"]
-            if pieces["dirty"]:
-                rendered += ".dev0"
-            rendered += plus_or_dot(pieces)
-            rendered += "g%%s" %% pieces["short"]
-    else:
-        # exception #1
-        rendered = "0.post%%d" %% pieces["distance"]
-        if pieces["dirty"]:
-            rendered += ".dev0"
-        rendered += "+g%%s" %% pieces["short"]
-    return rendered
-
-
-def render_pep440_old(pieces):
-    """TAG[.postDISTANCE[.dev0]] .
-
-    The ".dev0" means dirty.
-
-    Eexceptions:
-    1: no tags. 0.postDISTANCE[.dev0]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += ".post%%d" %% pieces["distance"]
-            if pieces["dirty"]:
-                rendered += ".dev0"
-    else:
-        # exception #1
-        rendered = "0.post%%d" %% pieces["distance"]
-        if pieces["dirty"]:
-            rendered += ".dev0"
-    return rendered
-
-
-def render_git_describe(pieces):
-    """TAG[-DISTANCE-gHEX][-dirty].
-
-    Like 'git describe --tags --dirty --always'.
-
-    Exceptions:
-    1: no tags. HEX[-dirty]  (note: no 'g' prefix)
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"]:
-            rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
-    else:
-        # exception #1
-        rendered = pieces["short"]
-    if pieces["dirty"]:
-        rendered += "-dirty"
-    return rendered
-
-
-def render_git_describe_long(pieces):
-    """TAG-DISTANCE-gHEX[-dirty].
-
-    Like 'git describe --tags --dirty --always -long'.
-    The distance/hash is unconditional.
-
-    Exceptions:
-    1: no tags. HEX[-dirty]  (note: no 'g' prefix)
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
-    else:
-        # exception #1
-        rendered = pieces["short"]
-    if pieces["dirty"]:
-        rendered += "-dirty"
-    return rendered
-
-
-def render(pieces, style):
-    """Render the given version pieces into the requested style."""
-    if pieces["error"]:
-        return {"version": "unknown",
-                "full-revisionid": pieces.get("long"),
-                "dirty": None,
-                "error": pieces["error"],
-                "date": None}
-
-    if not style or style == "default":
-        style = "pep440"  # the default
-
-    if style == "pep440":
-        rendered = render_pep440(pieces)
-    elif style == "pep440-pre":
-        rendered = render_pep440_pre(pieces)
-    elif style == "pep440-post":
-        rendered = render_pep440_post(pieces)
-    elif style == "pep440-old":
-        rendered = render_pep440_old(pieces)
-    elif style == "git-describe":
-        rendered = render_git_describe(pieces)
-    elif style == "git-describe-long":
-        rendered = render_git_describe_long(pieces)
-    else:
-        raise ValueError("unknown style '%%s'" %% style)
-
-    return {"version": rendered, "full-revisionid": pieces["long"],
-            "dirty": pieces["dirty"], "error": None,
-            "date": pieces.get("date")}
-
-
-def get_versions():
-    """Get version information or return default if unable to do so."""
-    # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
-    # __file__, we can work backwards from there to the root. Some
-    # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
-    # case we can only use expanded keywords.
-
-    cfg = get_config()
-    verbose = cfg.verbose
-
-    try:
-        return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
-                                          verbose)
-    except NotThisMethod:
-        pass
-
-    try:
-        root = os.path.realpath(__file__)
-        # versionfile_source is the relative path from the top of the source
-        # tree (where the .git directory might live) to this file. Invert
-        # this to find the root from __file__.
-        for i in cfg.versionfile_source.split('/'):
-            root = os.path.dirname(root)
-    except NameError:
-        return {"version": "0+unknown", "full-revisionid": None,
-                "dirty": None,
-                "error": "unable to find root of source tree",
-                "date": None}
-
-    try:
-        pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
-        return render(pieces, cfg.style)
-    except NotThisMethod:
-        pass
-
-    try:
-        if cfg.parentdir_prefix:
-            return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
-    except NotThisMethod:
-        pass
-
-    return {"version": "0+unknown", "full-revisionid": None,
-            "dirty": None,
-            "error": "unable to compute version", "date": None}
-'''
-
-
- at register_vcs_handler("git", "get_keywords")
-def git_get_keywords(versionfile_abs):
-    """Extract version information from the given file."""
-    # the code embedded in _version.py can just fetch the value of these
-    # keywords. When used from setup.py, we don't want to import _version.py,
-    # so we do it with a regexp instead. This function is not used from
-    # _version.py.
-    keywords = {}
-    try:
-        f = open(versionfile_abs, "r")
-        for line in f.readlines():
-            if line.strip().startswith("git_refnames ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["refnames"] = mo.group(1)
-            if line.strip().startswith("git_full ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["full"] = mo.group(1)
-            if line.strip().startswith("git_date ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["date"] = mo.group(1)
-        f.close()
-    except EnvironmentError:
-        pass
-    return keywords
-
-
- at register_vcs_handler("git", "keywords")
-def git_versions_from_keywords(keywords, tag_prefix, verbose):
-    """Get version information from git keywords."""
-    if not keywords:
-        raise NotThisMethod("no keywords at all, weird")
-    date = keywords.get("date")
-    if date is not None:
-        # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
-        # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
-        # -like" string, which we must then edit to make compliant), because
-        # it's been around since git-1.5.3, and it's too difficult to
-        # discover which version we're using, or to work around using an
-        # older one.
-        date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-    refnames = keywords["refnames"].strip()
-    if refnames.startswith("$Format"):
-        if verbose:
-            print("keywords are unexpanded, not using")
-        raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
-    refs = set([r.strip() for r in refnames.strip("()").split(",")])
-    # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
-    # just "foo-1.0". If we see a "tag: " prefix, prefer those.
-    TAG = "tag: "
-    tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
-    if not tags:
-        # Either we're using git < 1.8.3, or there really are no tags. We use
-        # a heuristic: assume all version tags have a digit. The old git %d
-        # expansion behaves like git log --decorate=short and strips out the
-        # refs/heads/ and refs/tags/ prefixes that would let us distinguish
-        # between branches and tags. By ignoring refnames without digits, we
-        # filter out many common branch names like "release" and
-        # "stabilization", as well as "HEAD" and "master".
-        tags = set([r for r in refs if re.search(r'\d', r)])
-        if verbose:
-            print("discarding '%s', no digits" % ",".join(refs - tags))
-    if verbose:
-        print("likely tags: %s" % ",".join(sorted(tags)))
-    for ref in sorted(tags):
-        # sorting will prefer e.g. "2.0" over "2.0rc1"
-        if ref.startswith(tag_prefix):
-            r = ref[len(tag_prefix):]
-            if verbose:
-                print("picking %s" % r)
-            return {"version": r,
-                    "full-revisionid": keywords["full"].strip(),
-                    "dirty": False, "error": None,
-                    "date": date}
-    # no suitable tags, so version is "0+unknown", but full hex is still there
-    if verbose:
-        print("no suitable tags, using unknown + full revision id")
-    return {"version": "0+unknown",
-            "full-revisionid": keywords["full"].strip(),
-            "dirty": False, "error": "no suitable tags", "date": None}
-
-
- at register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
-    """Get version from 'git describe' in the root of the source tree.
-
-    This only gets called if the git-archive 'subst' keywords were *not*
-    expanded, and _version.py hasn't already been rewritten with a short
-    version string, meaning we're inside a checked out source tree.
-    """
-    GITS = ["git"]
-    if sys.platform == "win32":
-        GITS = ["git.cmd", "git.exe"]
-
-    out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
-                          hide_stderr=True)
-    if rc != 0:
-        if verbose:
-            print("Directory %s not under git control" % root)
-        raise NotThisMethod("'git rev-parse --git-dir' returned error")
-
-    # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
-    # if there isn't one, this yields HEX[-dirty] (no NUM)
-    describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
-                                          "--always", "--long",
-                                          "--match", "%s*" % tag_prefix],
-                                   cwd=root)
-    # --long was added in git-1.5.5
-    if describe_out is None:
-        raise NotThisMethod("'git describe' failed")
-    describe_out = describe_out.strip()
-    full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
-    if full_out is None:
-        raise NotThisMethod("'git rev-parse' failed")
-    full_out = full_out.strip()
-
-    pieces = {}
-    pieces["long"] = full_out
-    pieces["short"] = full_out[:7]  # maybe improved later
-    pieces["error"] = None
-
-    # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
-    # TAG might have hyphens.
-    git_describe = describe_out
-
-    # look for -dirty suffix
-    dirty = git_describe.endswith("-dirty")
-    pieces["dirty"] = dirty
-    if dirty:
-        git_describe = git_describe[:git_describe.rindex("-dirty")]
-
-    # now we have TAG-NUM-gHEX or HEX
-
-    if "-" in git_describe:
-        # TAG-NUM-gHEX
-        mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
-        if not mo:
-            # unparseable. Maybe git-describe is misbehaving?
-            pieces["error"] = ("unable to parse git-describe output: '%s'"
-                               % describe_out)
-            return pieces
-
-        # tag
-        full_tag = mo.group(1)
-        if not full_tag.startswith(tag_prefix):
-            if verbose:
-                fmt = "tag '%s' doesn't start with prefix '%s'"
-                print(fmt % (full_tag, tag_prefix))
-            pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
-                               % (full_tag, tag_prefix))
-            return pieces
-        pieces["closest-tag"] = full_tag[len(tag_prefix):]
-
-        # distance: number of commits since tag
-        pieces["distance"] = int(mo.group(2))
-
-        # commit: short hex revision ID
-        pieces["short"] = mo.group(3)
-
-    else:
-        # HEX: no tags
-        pieces["closest-tag"] = None
-        count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
-                                    cwd=root)
-        pieces["distance"] = int(count_out)  # total number of commits
-
-    # commit date: see ISO-8601 comment in git_versions_from_keywords()
-    date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
-                       cwd=root)[0].strip()
-    pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
-
-    return pieces
-
-
-def do_vcs_install(manifest_in, versionfile_source, ipy):
-    """Git-specific installation logic for Versioneer.
-
-    For Git, this means creating/changing .gitattributes to mark _version.py
-    for export-subst keyword substitution.
-    """
-    GITS = ["git"]
-    if sys.platform == "win32":
-        GITS = ["git.cmd", "git.exe"]
-    files = [manifest_in, versionfile_source]
-    if ipy:
-        files.append(ipy)
-    try:
-        me = __file__
-        if me.endswith(".pyc") or me.endswith(".pyo"):
-            me = os.path.splitext(me)[0] + ".py"
-        versioneer_file = os.path.relpath(me)
-    except NameError:
-        versioneer_file = "versioneer.py"
-    files.append(versioneer_file)
-    present = False
-    try:
-        f = open(".gitattributes", "r")
-        for line in f.readlines():
-            if line.strip().startswith(versionfile_source):
-                if "export-subst" in line.strip().split()[1:]:
-                    present = True
-        f.close()
-    except EnvironmentError:
-        pass
-    if not present:
-        f = open(".gitattributes", "a+")
-        f.write("%s export-subst\n" % versionfile_source)
-        f.close()
-        files.append(".gitattributes")
-    run_command(GITS, ["add", "--"] + files)
-
-
-def versions_from_parentdir(parentdir_prefix, root, verbose):
-    """Try to determine the version from the parent directory name.
-
-    Source tarballs conventionally unpack into a directory that includes both
-    the project name and a version string. We will also support searching up
-    two directory levels for an appropriately named parent directory
-    """
-    rootdirs = []
-
-    for i in range(3):
-        dirname = os.path.basename(root)
-        if dirname.startswith(parentdir_prefix):
-            return {"version": dirname[len(parentdir_prefix):],
-                    "full-revisionid": None,
-                    "dirty": False, "error": None, "date": None}
-        else:
-            rootdirs.append(root)
-            root = os.path.dirname(root)  # up a level
-
-    if verbose:
-        print("Tried directories %s but none started with prefix %s" %
-              (str(rootdirs), parentdir_prefix))
-    raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
-
-
-SHORT_VERSION_PY = """
-# This file was generated by 'versioneer.py' (0.18) from
-# revision-control system data, or from the parent directory name of an
-# unpacked source archive. Distribution tarballs contain a pre-generated copy
-# of this file.
-
-import json
-
-version_json = '''
-%s
-'''  # END VERSION_JSON
-
-
-def get_versions():
-    return json.loads(version_json)
-"""
-
-
-def versions_from_file(filename):
-    """Try to determine the version from _version.py if present."""
-    try:
-        with open(filename) as f:
-            contents = f.read()
-    except EnvironmentError:
-        raise NotThisMethod("unable to read _version.py")
-    mo = re.search(r"version_json = '''\n(.*)'''  # END VERSION_JSON",
-                   contents, re.M | re.S)
-    if not mo:
-        mo = re.search(r"version_json = '''\r\n(.*)'''  # END VERSION_JSON",
-                       contents, re.M | re.S)
-    if not mo:
-        raise NotThisMethod("no version_json in _version.py")
-    return json.loads(mo.group(1))
-
-
-def write_to_version_file(filename, versions):
-    """Write the given version number to the given _version.py file."""
-    os.unlink(filename)
-    contents = json.dumps(versions, sort_keys=True,
-                          indent=1, separators=(",", ": "))
-    with open(filename, "w") as f:
-        f.write(SHORT_VERSION_PY % contents)
-
-    print("set %s to '%s'" % (filename, versions["version"]))
-
-
-def plus_or_dot(pieces):
-    """Return a + if we don't already have one, else return a ."""
-    if "+" in pieces.get("closest-tag", ""):
-        return "."
-    return "+"
-
-
-def render_pep440(pieces):
-    """Build up version string, with post-release "local version identifier".
-
-    Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
-    get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
-
-    Exceptions:
-    1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += plus_or_dot(pieces)
-            rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
-            if pieces["dirty"]:
-                rendered += ".dirty"
-    else:
-        # exception #1
-        rendered = "0+untagged.%d.g%s" % (pieces["distance"],
-                                          pieces["short"])
-        if pieces["dirty"]:
-            rendered += ".dirty"
-    return rendered
-
-
-def render_pep440_pre(pieces):
-    """TAG[.post.devDISTANCE] -- No -dirty.
-
-    Exceptions:
-    1: no tags. 0.post.devDISTANCE
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"]:
-            rendered += ".post.dev%d" % pieces["distance"]
-    else:
-        # exception #1
-        rendered = "0.post.dev%d" % pieces["distance"]
-    return rendered
-
-
-def render_pep440_post(pieces):
-    """TAG[.postDISTANCE[.dev0]+gHEX] .
-
-    The ".dev0" means dirty. Note that .dev0 sorts backwards
-    (a dirty tree will appear "older" than the corresponding clean one),
-    but you shouldn't be releasing software with -dirty anyways.
-
-    Exceptions:
-    1: no tags. 0.postDISTANCE[.dev0]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += ".post%d" % pieces["distance"]
-            if pieces["dirty"]:
-                rendered += ".dev0"
-            rendered += plus_or_dot(pieces)
-            rendered += "g%s" % pieces["short"]
-    else:
-        # exception #1
-        rendered = "0.post%d" % pieces["distance"]
-        if pieces["dirty"]:
-            rendered += ".dev0"
-        rendered += "+g%s" % pieces["short"]
-    return rendered
-
-
-def render_pep440_old(pieces):
-    """TAG[.postDISTANCE[.dev0]] .
-
-    The ".dev0" means dirty.
-
-    Eexceptions:
-    1: no tags. 0.postDISTANCE[.dev0]
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"] or pieces["dirty"]:
-            rendered += ".post%d" % pieces["distance"]
-            if pieces["dirty"]:
-                rendered += ".dev0"
-    else:
-        # exception #1
-        rendered = "0.post%d" % pieces["distance"]
-        if pieces["dirty"]:
-            rendered += ".dev0"
-    return rendered
-
-
-def render_git_describe(pieces):
-    """TAG[-DISTANCE-gHEX][-dirty].
-
-    Like 'git describe --tags --dirty --always'.
-
-    Exceptions:
-    1: no tags. HEX[-dirty]  (note: no 'g' prefix)
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        if pieces["distance"]:
-            rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
-    else:
-        # exception #1
-        rendered = pieces["short"]
-    if pieces["dirty"]:
-        rendered += "-dirty"
-    return rendered
-
-
-def render_git_describe_long(pieces):
-    """TAG-DISTANCE-gHEX[-dirty].
-
-    Like 'git describe --tags --dirty --always -long'.
-    The distance/hash is unconditional.
-
-    Exceptions:
-    1: no tags. HEX[-dirty]  (note: no 'g' prefix)
-    """
-    if pieces["closest-tag"]:
-        rendered = pieces["closest-tag"]
-        rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
-    else:
-        # exception #1
-        rendered = pieces["short"]
-    if pieces["dirty"]:
-        rendered += "-dirty"
-    return rendered
-
-
-def render(pieces, style):
-    """Render the given version pieces into the requested style."""
-    if pieces["error"]:
-        return {"version": "unknown",
-                "full-revisionid": pieces.get("long"),
-                "dirty": None,
-                "error": pieces["error"],
-                "date": None}
-
-    if not style or style == "default":
-        style = "pep440"  # the default
-
-    if style == "pep440":
-        rendered = render_pep440(pieces)
-    elif style == "pep440-pre":
-        rendered = render_pep440_pre(pieces)
-    elif style == "pep440-post":
-        rendered = render_pep440_post(pieces)
-    elif style == "pep440-old":
-        rendered = render_pep440_old(pieces)
-    elif style == "git-describe":
-        rendered = render_git_describe(pieces)
-    elif style == "git-describe-long":
-        rendered = render_git_describe_long(pieces)
-    else:
-        raise ValueError("unknown style '%s'" % style)
-
-    return {"version": rendered, "full-revisionid": pieces["long"],
-            "dirty": pieces["dirty"], "error": None,
-            "date": pieces.get("date")}
-
-
-class VersioneerBadRootError(Exception):
-    """The project root directory is unknown or missing key files."""
-
-
-def get_versions(verbose=False):
-    """Get the project version from whatever source is available.
-
-    Returns dict with two keys: 'version' and 'full'.
-    """
-    if "versioneer" in sys.modules:
-        # see the discussion in cmdclass.py:get_cmdclass()
-        del sys.modules["versioneer"]
-
-    root = get_root()
-    cfg = get_config_from_root(root)
-
-    assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
-    handlers = HANDLERS.get(cfg.VCS)
-    assert handlers, "unrecognized VCS '%s'" % cfg.VCS
-    verbose = verbose or cfg.verbose
-    assert cfg.versionfile_source is not None, \
-        "please set versioneer.versionfile_source"
-    assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
-
-    versionfile_abs = os.path.join(root, cfg.versionfile_source)
-
-    # extract version from first of: _version.py, VCS command (e.g. 'git
-    # describe'), parentdir. This is meant to work for developers using a
-    # source checkout, for users of a tarball created by 'setup.py sdist',
-    # and for users of a tarball/zipball created by 'git archive' or github's
-    # download-from-tag feature or the equivalent in other VCSes.
-
-    get_keywords_f = handlers.get("get_keywords")
-    from_keywords_f = handlers.get("keywords")
-    if get_keywords_f and from_keywords_f:
-        try:
-            keywords = get_keywords_f(versionfile_abs)
-            ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
-            if verbose:
-                print("got version from expanded keyword %s" % ver)
-            return ver
-        except NotThisMethod:
-            pass
-
-    try:
-        ver = versions_from_file(versionfile_abs)
-        if verbose:
-            print("got version from file %s %s" % (versionfile_abs, ver))
-        return ver
-    except NotThisMethod:
-        pass
-
-    from_vcs_f = handlers.get("pieces_from_vcs")
-    if from_vcs_f:
-        try:
-            pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
-            ver = render(pieces, cfg.style)
-            if verbose:
-                print("got version from VCS %s" % ver)
-            return ver
-        except NotThisMethod:
-            pass
-
-    try:
-        if cfg.parentdir_prefix:
-            ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
-            if verbose:
-                print("got version from parentdir %s" % ver)
-            return ver
-    except NotThisMethod:
-        pass
-
-    if verbose:
-        print("unable to compute version")
-
-    return {"version": "0+unknown", "full-revisionid": None,
-            "dirty": None, "error": "unable to compute version",
-            "date": None}
-
-
-def get_version():
-    """Get the short version string for this project."""
-    return get_versions()["version"]
-
-
-def get_cmdclass():
-    """Get the custom setuptools/distutils subclasses used by Versioneer."""
-    if "versioneer" in sys.modules:
-        del sys.modules["versioneer"]
-        # this fixes the "python setup.py develop" case (also 'install' and
-        # 'easy_install .'), in which subdependencies of the main project are
-        # built (using setup.py bdist_egg) in the same python process. Assume
-        # a main project A and a dependency B, which use different versions
-        # of Versioneer. A's setup.py imports A's Versioneer, leaving it in
-        # sys.modules by the time B's setup.py is executed, causing B to run
-        # with the wrong versioneer. Setuptools wraps the sub-dep builds in a
-        # sandbox that restores sys.modules to it's pre-build state, so the
-        # parent is protected against the child's "import versioneer". By
-        # removing ourselves from sys.modules here, before the child build
-        # happens, we protect the child from the parent's versioneer too.
-        # Also see https://github.com/warner/python-versioneer/issues/52
-
-    cmds = {}
-
-    # we add "version" to both distutils and setuptools
-    from distutils.core import Command
-
-    class cmd_version(Command):
-        description = "report generated version string"
-        user_options = []
-        boolean_options = []
-
-        def initialize_options(self):
-            pass
-
-        def finalize_options(self):
-            pass
-
-        def run(self):
-            vers = get_versions(verbose=True)
-            print("Version: %s" % vers["version"])
-            print(" full-revisionid: %s" % vers.get("full-revisionid"))
-            print(" dirty: %s" % vers.get("dirty"))
-            print(" date: %s" % vers.get("date"))
-            if vers["error"]:
-                print(" error: %s" % vers["error"])
-    cmds["version"] = cmd_version
-
-    # we override "build_py" in both distutils and setuptools
-    #
-    # most invocation pathways end up running build_py:
-    #  distutils/build -> build_py
-    #  distutils/install -> distutils/build ->..
-    #  setuptools/bdist_wheel -> distutils/install ->..
-    #  setuptools/bdist_egg -> distutils/install_lib -> build_py
-    #  setuptools/install -> bdist_egg ->..
-    #  setuptools/develop -> ?
-    #  pip install:
-    #   copies source tree to a tempdir before running egg_info/etc
-    #   if .git isn't copied too, 'git describe' will fail
-    #   then does setup.py bdist_wheel, or sometimes setup.py install
-    #  setup.py egg_info -> ?
-
-    # we override different "build_py" commands for both environments
-    if "setuptools" in sys.modules:
-        from setuptools.command.build_py import build_py as _build_py
-    else:
-        from distutils.command.build_py import build_py as _build_py
-
-    class cmd_build_py(_build_py):
-        def run(self):
-            root = get_root()
-            cfg = get_config_from_root(root)
-            versions = get_versions()
-            _build_py.run(self)
-            # now locate _version.py in the new build/ directory and replace
-            # it with an updated value
-            if cfg.versionfile_build:
-                target_versionfile = os.path.join(self.build_lib,
-                                                  cfg.versionfile_build)
-                print("UPDATING %s" % target_versionfile)
-                write_to_version_file(target_versionfile, versions)
-    cmds["build_py"] = cmd_build_py
-
-    if "cx_Freeze" in sys.modules:  # cx_freeze enabled?
-        from cx_Freeze.dist import build_exe as _build_exe
-        # nczeczulin reports that py2exe won't like the pep440-style string
-        # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
-        # setup(console=[{
-        #   "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
-        #   "product_version": versioneer.get_version(),
-        #   ...
-
-        class cmd_build_exe(_build_exe):
-            def run(self):
-                root = get_root()
-                cfg = get_config_from_root(root)
-                versions = get_versions()
-                target_versionfile = cfg.versionfile_source
-                print("UPDATING %s" % target_versionfile)
-                write_to_version_file(target_versionfile, versions)
-
-                _build_exe.run(self)
-                os.unlink(target_versionfile)
-                with open(cfg.versionfile_source, "w") as f:
-                    LONG = LONG_VERSION_PY[cfg.VCS]
-                    f.write(LONG %
-                            {"DOLLAR": "$",
-                             "STYLE": cfg.style,
-                             "TAG_PREFIX": cfg.tag_prefix,
-                             "PARENTDIR_PREFIX": cfg.parentdir_prefix,
-                             "VERSIONFILE_SOURCE": cfg.versionfile_source,
-                             })
-        cmds["build_exe"] = cmd_build_exe
-        del cmds["build_py"]
-
-    if 'py2exe' in sys.modules:  # py2exe enabled?
-        try:
-            from py2exe.distutils_buildexe import py2exe as _py2exe  # py3
-        except ImportError:
-            from py2exe.build_exe import py2exe as _py2exe  # py2
-
-        class cmd_py2exe(_py2exe):
-            def run(self):
-                root = get_root()
-                cfg = get_config_from_root(root)
-                versions = get_versions()
-                target_versionfile = cfg.versionfile_source
-                print("UPDATING %s" % target_versionfile)
-                write_to_version_file(target_versionfile, versions)
-
-                _py2exe.run(self)
-                os.unlink(target_versionfile)
-                with open(cfg.versionfile_source, "w") as f:
-                    LONG = LONG_VERSION_PY[cfg.VCS]
-                    f.write(LONG %
-                            {"DOLLAR": "$",
-                             "STYLE": cfg.style,
-                             "TAG_PREFIX": cfg.tag_prefix,
-                             "PARENTDIR_PREFIX": cfg.parentdir_prefix,
-                             "VERSIONFILE_SOURCE": cfg.versionfile_source,
-                             })
-        cmds["py2exe"] = cmd_py2exe
-
-    # we override different "sdist" commands for both environments
-    if "setuptools" in sys.modules:
-        from setuptools.command.sdist import sdist as _sdist
-    else:
-        from distutils.command.sdist import sdist as _sdist
-
-    class cmd_sdist(_sdist):
-        def run(self):
-            versions = get_versions()
-            self._versioneer_generated_versions = versions
-            # unless we update this, the command will keep using the old
-            # version
-            self.distribution.metadata.version = versions["version"]
-            return _sdist.run(self)
-
-        def make_release_tree(self, base_dir, files):
-            root = get_root()
-            cfg = get_config_from_root(root)
-            _sdist.make_release_tree(self, base_dir, files)
-            # now locate _version.py in the new base_dir directory
-            # (remembering that it may be a hardlink) and replace it with an
-            # updated value
-            target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
-            print("UPDATING %s" % target_versionfile)
-            write_to_version_file(target_versionfile,
-                                  self._versioneer_generated_versions)
-    cmds["sdist"] = cmd_sdist
-
-    return cmds
-
-
-CONFIG_ERROR = """
-setup.cfg is missing the necessary Versioneer configuration. You need
-a section like:
-
- [versioneer]
- VCS = git
- style = pep440
- versionfile_source = src/myproject/_version.py
- versionfile_build = myproject/_version.py
- tag_prefix =
- parentdir_prefix = myproject-
-
-You will also need to edit your setup.py to use the results:
-
- import versioneer
- setup(version=versioneer.get_version(),
-       cmdclass=versioneer.get_cmdclass(), ...)
-
-Please read the docstring in ./versioneer.py for configuration instructions,
-edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
-"""
-
-SAMPLE_CONFIG = """
-# See the docstring in versioneer.py for instructions. Note that you must
-# re-run 'versioneer.py setup' after changing this section, and commit the
-# resulting files.
-
-[versioneer]
-#VCS = git
-#style = pep440
-#versionfile_source =
-#versionfile_build =
-#tag_prefix =
-#parentdir_prefix =
-
-"""
-
-INIT_PY_SNIPPET = """
-from ._version import get_versions
-__version__ = get_versions()['version']
-del get_versions
-"""
-
-
-def do_setup():
-    """Main VCS-independent setup function for installing Versioneer."""
-    root = get_root()
-    try:
-        cfg = get_config_from_root(root)
-    except (EnvironmentError, configparser.NoSectionError,
-            configparser.NoOptionError) as e:
-        if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
-            print("Adding sample versioneer config to setup.cfg",
-                  file=sys.stderr)
-            with open(os.path.join(root, "setup.cfg"), "a") as f:
-                f.write(SAMPLE_CONFIG)
-        print(CONFIG_ERROR, file=sys.stderr)
-        return 1
-
-    print(" creating %s" % cfg.versionfile_source)
-    with open(cfg.versionfile_source, "w") as f:
-        LONG = LONG_VERSION_PY[cfg.VCS]
-        f.write(LONG % {"DOLLAR": "$",
-                        "STYLE": cfg.style,
-                        "TAG_PREFIX": cfg.tag_prefix,
-                        "PARENTDIR_PREFIX": cfg.parentdir_prefix,
-                        "VERSIONFILE_SOURCE": cfg.versionfile_source,
-                        })
-
-    ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
-                       "__init__.py")
-    if os.path.exists(ipy):
-        try:
-            with open(ipy, "r") as f:
-                old = f.read()
-        except EnvironmentError:
-            old = ""
-        if INIT_PY_SNIPPET not in old:
-            print(" appending to %s" % ipy)
-            with open(ipy, "a") as f:
-                f.write(INIT_PY_SNIPPET)
-        else:
-            print(" %s unmodified" % ipy)
-    else:
-        print(" %s doesn't exist, ok" % ipy)
-        ipy = None
-
-    # Make sure both the top-level "versioneer.py" and versionfile_source
-    # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
-    # they'll be copied into source distributions. Pip won't be able to
-    # install the package without this.
-    manifest_in = os.path.join(root, "MANIFEST.in")
-    simple_includes = set()
-    try:
-        with open(manifest_in, "r") as f:
-            for line in f:
-                if line.startswith("include "):
-                    for include in line.split()[1:]:
-                        simple_includes.add(include)
-    except EnvironmentError:
-        pass
-    # That doesn't cover everything MANIFEST.in can do
-    # (http://docs.python.org/2/distutils/sourcedist.html#commands), so
-    # it might give some false negatives. Appending redundant 'include'
-    # lines is safe, though.
-    if "versioneer.py" not in simple_includes:
-        print(" appending 'versioneer.py' to MANIFEST.in")
-        with open(manifest_in, "a") as f:
-            f.write("include versioneer.py\n")
-    else:
-        print(" 'versioneer.py' already in MANIFEST.in")
-    if cfg.versionfile_source not in simple_includes:
-        print(" appending versionfile_source ('%s') to MANIFEST.in" %
-              cfg.versionfile_source)
-        with open(manifest_in, "a") as f:
-            f.write("include %s\n" % cfg.versionfile_source)
-    else:
-        print(" versionfile_source already in MANIFEST.in")
-
-    # Make VCS-specific changes. For git, this means creating/changing
-    # .gitattributes to mark _version.py for export-subst keyword
-    # substitution.
-    do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
-    return 0
-
-
-def scan_setup_py():
-    """Validate the contents of setup.py against Versioneer's expectations."""
-    found = set()
-    setters = False
-    errors = 0
-    with open("setup.py", "r") as f:
-        for line in f.readlines():
-            if "import versioneer" in line:
-                found.add("import")
-            if "versioneer.get_cmdclass()" in line:
-                found.add("cmdclass")
-            if "versioneer.get_version()" in line:
-                found.add("get_version")
-            if "versioneer.VCS" in line:
-                setters = True
-            if "versioneer.versionfile_source" in line:
-                setters = True
-    if len(found) != 3:
-        print("")
-        print("Your setup.py appears to be missing some important items")
-        print("(but I might be wrong). Please make sure it has something")
-        print("roughly like the following:")
-        print("")
-        print(" import versioneer")
-        print(" setup( version=versioneer.get_version(),")
-        print("        cmdclass=versioneer.get_cmdclass(),  ...)")
-        print("")
-        errors += 1
-    if setters:
-        print("You should remove lines like 'versioneer.VCS = ' and")
-        print("'versioneer.versionfile_source = ' . This configuration")
-        print("now lives in setup.cfg, and should be removed from setup.py")
-        print("")
-        errors += 1
-    return errors
-
-
-if __name__ == "__main__":
-    cmd = sys.argv[1]
-    if cmd == "setup":
-        errors = do_setup()
-        errors += scan_setup_py()
-        if errors:
-            sys.exit(1)



View it on GitLab: https://salsa.debian.org/med-team/python-dnaio/commit/5add7f8d997f9a1a9485831d991f435507b500fc

-- 
View it on GitLab: https://salsa.debian.org/med-team/python-dnaio/commit/5add7f8d997f9a1a9485831d991f435507b500fc
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20191030/fe37c00a/attachment-0001.html>


More information about the debian-med-commit mailing list