[Git][debian-gis-team/pyepr][upstream] New upstream version 1.0.0

Antonio Valentino gitlab at salsa.debian.org
Sun Sep 8 20:46:30 BST 2019



Antonio Valentino pushed to branch upstream at Debian GIS Project / pyepr


Commits:
f7a06a44 by Antonio Valentino at 2019-09-08T18:58:03Z
New upstream version 1.0.0
- - - - -


27 changed files:

- + .coveragerc
- .gitignore
- .travis.yml
- Makefile
- README.rst
- appveyor.yml
- doc/Makefile
- doc/NEWS.rst
- doc/_templates/appveyor.html
- + doc/_templates/codecov.html
- doc/_templates/ohloh.html
- doc/_templates/pypi.html
- + doc/_templates/readthedocs.html
- doc/_templates/travis-ci.html
- doc/conf.py
- doc/gdal_export_example.rst
- doc/index.rst
- doc/interactive_use.rst
- doc/make.bat
- doc/reference.rst
- doc/sphinxext/ipython_console_highlighting.py
- doc/usermanual.rst
- requirements.txt
- setup.py
- src/epr.pxd
- src/epr.pyx
- tests/test_all.py


Changes:

=====================================
.coveragerc
=====================================
@@ -0,0 +1,5 @@
+[run]
+plugins = Cython.Coverage
+source = src
+branch = True
+# omit = */Cython/Includes/*


=====================================
.gitignore
=====================================
@@ -1,2 +1,4 @@
 SciTEDirectory.properties
 .idea
+.DS_Store
+


=====================================
.travis.yml
=====================================
@@ -1,24 +1,30 @@
 language: python
 
 python:
-  - "2.6"
   - "2.7"
-  - "3.3"
   - "3.4"
   - "3.5"
   - "3.6"
-  # - "3.7"
-  # - "3.8-dev"
-  # - "pypy2.7"
-  - "pypy3.5"
+  - "3.7"
+  - "3.8-dev"
+  - "pypy"
+  - "pypy3"
+
+matrix:
+  allow_failures:
+    - python: "pypy3"
 
 before_install:
-  - sudo apt-get update -qq
-  - sudo apt-get install -qq libepr-api-dev
+  - sudo apt-get update
+  - sudo apt-get install -y libepr-api-dev
 
 install:
   - pip install -r requirements.txt
+  - pip install sphinx coverage codecov
   - if [[ $TRAVIS_PYTHON_VERSION < '3.4' ]]; then pip install -U unittest2; fi
-  - python setup.py build_ext --inplace
 
-script: make PYTHON=python check
+script:
+  - if [[ $TRAVIS_PYTHON_VERSION = '3.7' ]]; then make PYTHON=python coverage; else make PYTHON=python check; fi
+
+after_success:
+  - if [[ $TRAVIS_PYTHON_VERSION = '3.7' ]]; then codecov; fi


=====================================
Makefile
=====================================
@@ -1,7 +1,7 @@
 #!/usr/bin/make -f
 # -*- coding: utf-8 -*-
 
-# Copyright (C) 2011-2018, Antonio Valentino <antonio.valentino at tiscali.it>
+# Copyright (C) 2011-2019, Antonio Valentino <antonio.valentino at tiscali.it>
 #
 # This file is part of PyEPR.
 #
@@ -26,7 +26,7 @@ TEST_DATSET = tests/MER_LRC_2PTGMV20000620_104318_00000104X000_00000_00000_0001.
 EPRAPIROOT = ../epr-api
 
 .PHONY: default ext cythonize sdist eprsrc fullsdist doc clean distclean \
-        check debug data upload manylinux
+        check debug data upload manylinux coverage ext-coverage coverage-report
 
 default: ext
 
@@ -71,6 +71,10 @@ clean:
 	$(MAKE) -C doc clean
 	$(RM) -r doc/_build
 	find . -name '*~' -delete
+	$(RM) *.c *.o *.html .coverage coverage.xml
+	$(RM) src/epr.html
+	$(RM) -r htmlcov
+	$(RM) epr.p*        # workaround for Cython.Coverage bug #1985
 
 distclean: clean
 	$(RM) $(TEST_DATSET)
@@ -78,9 +82,26 @@ distclean: clean
 	$(RM) -r LICENSES epr-api-src
 	$(MAKE) -C tests -f checksetup.mak distclean
 
-check: ext $(TEST_DATSET)
+check: ext data
 	env PYTHONPATH=. $(PYTHON) tests/test_all.py --verbose
 
+ext-coverage: src/epr.pyx
+	env PYEPR_COVERAGE=TRUE $(PYTHON) setup.py build_ext --inplace
+
+coverage: clean ext-coverage data
+	ln -s src/epr.p* .  # workaround for Cython.Coverage bug #1985
+	env PYEPR_COVERAGE=TRUE PYTHONPATH=. \
+		$(PYTHON) -m coverage run --branch --source=src setup.py test
+	env PYTHONPATH=. $(PYTHON) -m coverage report
+
+coverage-report: coverage
+	env PYTHONPATH=. $(PYTHON) -m coverage xml -i
+	env PYTHONPATH=. $(PYTHON) -m cython -E CYTHON_TRACE_NOGIL=1 \
+		-X linetrace=True -X language_level=3str \
+		--annotate-coverage coverage.xml src/epr.pyx
+	env PYTHONPATH=. $(PYTHON) -m coverage html -i
+	cp src/epr.html htmlcov
+
 debug:
 	$(PYTHON) setup.py build_ext --inplace --debug
 
@@ -92,5 +113,5 @@ $(TEST_DATSET):
 
 manylinux:
 	# make fullsdist
-	# docker pull quay.io/pypa/manylinux1_x86_64
-	docker run --rm -v $(shell pwd):/io quay.io/pypa/manylinux1_x86_64 sh /io/build-manylinux-wheels.sh
+	# docker pull quay.io/pypa/manylinux2010_x86_64
+	docker run --rm -v $(shell pwd):/io quay.io/pypa/manylinux2010_x86_64 sh /io/build-manylinux-wheels.sh


=====================================
README.rst
=====================================
@@ -2,11 +2,43 @@
 ENVISAT Product Reader Python API
 =================================
 
-:HomePage:  http://avalentino.github.io/pyepr
+:HomePage:  https://avalentino.github.io/pyepr
 :Author:    Antonio Valentino
 :Contact:   antonio.valentino at tiscali.it
-:Copyright: 2011-2018, Antonio Valentino <antonio.valentino at tiscali.it>
-:Version:   0.9.5
+:Copyright: 2011-2019, Antonio Valentino <antonio.valentino at tiscali.it>
+:Version:   1.0.0
+
+.. image:: https://travis-ci.org/avalentino/pyepr.svg?branch=master
+    :alt: Travis-CI status page
+    :target: https://travis-ci.org/avalentino/pyepr
+
+.. image:: https://ci.appveyor.com/api/projects/status/github/avalentino/pyepr?branch=master&svg=true
+    :alt: AppVeyor status page
+    :target: https://ci.appveyor.com/project/avalentino/pyepr
+
+.. image:: https://img.shields.io/pypi/v/pyepr
+    :alt: Latest Version
+    :target: https://pypi.org/project/pyepr
+
+.. image:: https://img.shields.io/pypi/pyversions/pyepr
+    :alt: Supported Python versions
+    :target: https://pypi.org/project/pyepr
+
+.. image:: https://img.shields.io/pypi/l/pyepr
+    :alt: License
+    :target: https://pypi.org/project/pyepr
+
+.. image:: https://img.shields.io/pypi/wheel/pyepr
+    :alt: Wheel Status
+    :target: https://pypi.org/project/pyepr
+
+.. image:: https://readthedocs.org/projects/pyepr/badge
+    :alt: Documentation Status
+    :target: https://pyepr.readthedocs.io/en/latest
+
+.. image:: https://codecov.io/gh/avalentino/pyepr/branch/master/graph/badge.svg
+    :alt: Coverage Status
+    :target: https://codecov.io/gh/avalentino/pyepr
 
 
 Introduction
@@ -36,28 +68,28 @@ In order to use PyEPR it is needed that the following software are
 correctly installed and configured:
 
 * Python2_ >= 2.6 or Python3_ >= 3.1 (including PyPy_)
-* numpy_ >= 1.5.0
+* numpy_ >= 1.7.0
 * `EPR API`_ >= 2.2 (optional, since PyEPR 0.7 the source tar-ball comes
-  with a copy of the PER C API sources)
+  with a copy of the EPR C API sources)
 * a reasonably updated C compiler (build only)
-* Cython_ >= 0.15 (build only)
+* Cython_ >= 0.19 (build only)
 * unittest2_ (only required for Python < 3.4)
 
 .. _Python2: Python_
 .. _Python3: Python_
-.. _PyPy: http://pypy.org
-.. _numpy: http://www.numpy.org
-.. _gcc: http://gcc.gnu.org
-.. _Cython: http://cython.org
-.. _unittest2: https://pypi.python.org/pypi/unittest2
+.. _PyPy: https://pypy.org
+.. _numpy: https://www.numpy.org
+.. _gcc: https://gcc.gnu.org
+.. _Cython: https://cython.org
+.. _unittest2: https://pypi.org/project/unittest2
 
 
 Download
 ========
 
-Official source tarballs can be downloaded form PyPi_:
+Official source tar-balls can be downloaded form PyPi_:
 
-    https://pypi.python.org/pypi/pyepr
+    https://pypi.org/project/pyepr
 
 The source code of the development versions is available on the GitHub_
 project page
@@ -68,9 +100,9 @@ To clone the git_ repository the following command can be used::
 
     $ git clone https://github.com/avalentino/pyepr.git
 
-.. _PyPi: https://pypi.python.org/pypi
+.. _PyPi: https://pypi.org
 .. _GitHub: https://github.com
-.. _git: http://git-scm.com
+.. _git: https://git-scm.com
 
 
 Installation
@@ -100,7 +132,7 @@ To install PyEPR_ in a non-standard path::
 License
 =======
 
-Copyright (C) 2011-2018 Antonio Valentino <antonio.valentino at tiscali.it>
+Copyright (C) 2011-2019 Antonio Valentino <antonio.valentino at tiscali.it>
 
 PyEPR is free software: you can redistribute it and/or modify
 it under the terms of the `GNU General Public License`_ as published by


=====================================
appveyor.yml
=====================================
@@ -6,59 +6,34 @@
 environment:
 
   global:
-    PYTHON: "C:\\conda"
-    MINICONDA_VERSION: "latest"
     CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\ci-helpers\\appveyor\\windows_sdk.cmd"
-    # PYTHON_ARCH: "64" # needs to be set for CMD_IN_ENV to succeed. If a mix
-                        # of 32 bit and 64 bit builds are needed, move this
-                        # to the matrix section.
+    PYTHON_ARCH: "64" # needs to be set for CMD_IN_ENV to succeed. If a mix
+                      # of 32 bit and 64 bit builds are needed, move this
+                      # to the matrix section.
     CONDA_DEPENDENCIES: "setuptools numpy Cython unittest2"
     # DEBUG: True
     # NUMPY_VERSION: "stable"
 
-
   matrix:
-    - platform: x86
+    - PYTHON: "C:\\Miniconda-x64"
       PYTHON_VERSION: "2.7"
-      PYTHON_ARCH: "32"
-
-    - PYTHON_VERSION: "2.7"
-      PYTHON_ARCH: "64"
-
-    - platform: x86
-      PYTHON_VERSION: "3.4"
-      PYTHON_ARCH: "32"
-
-    - PYTHON_VERSION: "3.4"
-      PYTHON_ARCH: "64"
 
-    - platform: x86
-      PYTHON_VERSION: "3.5"
-      PYTHON_ARCH: "32"
-
-    - PYTHON_VERSION: "3.5"
-      PYTHON_ARCH: "64"
-
-    - platform: x86
+    - PYTHON: "C:\\Miniconda36-x64"
       PYTHON_VERSION: "3.6"
-      PYTHON_ARCH: "32"
 
-    - PYTHON_VERSION: "3.6"
-      PYTHON_ARCH: "64"
-
-    - platform: x86
+    - PYTHON: "C:\\Miniconda37-x64"
       PYTHON_VERSION: "3.7"
-      PYTHON_ARCH: "32"
 
-    - PYTHON_VERSION: "3.7"
-      PYTHON_ARCH: "64"
+platform:
+    -x64
 
 install:
-    # conda
+     # Set up ci-helpers
     - "git clone git://github.com/astropy/ci-helpers.git"
     - "powershell ci-helpers/appveyor/install-miniconda.ps1"
     - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%"
     - "activate test"
+
     # epr-api
     - "git clone -b pyepr https://github.com/avalentino/epr-api.git"
 


=====================================
doc/Makefile
=====================================
@@ -1,10 +1,10 @@
 # Minimal makefile for Sphinx documentation
 #
 
-# You can set these variables from the command line.
-SPHINXOPTS    =
-SPHINXBUILD   = sphinx-build
-SPHINXPROJ    = PyEPR
+# You can set these variables from the command line, and also
+# from the environment for the first two.
+SPHINXOPTS    ?=
+SPHINXBUILD   ?= sphinx-build
 SOURCEDIR     = .
 BUILDDIR      = _build
 


=====================================
doc/NEWS.rst
=====================================
@@ -1,11 +1,23 @@
 Change history
 ==============
 
+PyEPR 1.0.0 (08/09/2019)
+------------------------
+
+* Do not use deprecated numpy_ API (requires Cython_ >= 0.29)
+* Minimal numpy_ version is now v1.7
+* Set cython_ 'language_level` explicitly to '3str' if cython_ >= v0.29,
+  to '2' otherwise
+* Python v2.6, v3.2, v3.3 and v3.4 are now deprecated.
+  Support for the deprecated Python version will be removed in future
+  releases of PyEPR
+
+
 PyEPR 0.9.5 (23/08/2018)
 ------------------------
 
-* Fix compatibility with numpy >= 1.14: :func:`np.fromstring`
-  is deprecated.
+* Fix compatibility with numpy_ >= 1.14: :func:`np.fromstring`
+  is deprecated
 * Update the pypi sidebar in the documentation
 * Use `.rst` extension for doc source files
 * Fix setup script to not use system libs if epr-api sources are available
@@ -124,8 +136,8 @@ PyEPR 0.9 (27/02/2015)
 
 .. _pip: https://pip.pypa.io
 .. _setuptools: https://bitbucket.org/pypa/setuptools
-.. _numpy: http://www.numpy.org
-.. _Windows: http://windows.microsoft.com
+.. _numpy: https://www.numpy.org
+.. _Windows: https://windows.microsoft.com
 .. _AppVeyor: https://www.appveyor.com
 .. _PyPI: https://pypi.org/project/pyepr
 
@@ -279,7 +291,7 @@ PyEPR 0.5 (25/04/2011)
 
 .. _`Python 3`: https://docs.python.org/3
 .. _intersphinx: http://www.sphinx-doc.org/en/master/ext/intersphinx.html
-.. _cython: http://cython.org
+.. _cython: https://cython.org
 
 
 PyEPR 0.4 (10/04/2011)


=====================================
doc/_templates/appveyor.html
=====================================
@@ -1,5 +1,7 @@
 <div>
-<p>
-<a href="https://ci.appveyor.com/project/avalentino/pyepr"><img src="https://ci.appveyor.com/api/projects/status/xy8sb0tso761ths5?svg=true" alt="AppVeyor status page"/></a>
-</p>
+  <p>
+    <a href="https://ci.appveyor.com/project/avalentino/pyepr">
+      <img src="https://ci.appveyor.com/api/projects/status/xy8sb0tso761ths5?svg=true" alt="AppVeyor status page"/>
+    </a>
+  </p>
 </div>


=====================================
doc/_templates/codecov.html
=====================================
@@ -0,0 +1,7 @@
+<div>
+  <p>
+    <a href="https://codecov.io/gh/avalentino/pyepr">
+      <img src="https://codecov.io/gh/avalentino/pyepr/branch/master/graph/badge.svg" alt="codecov status"/>
+    </a>
+  </p>
+</div>


=====================================
doc/_templates/ohloh.html
=====================================
@@ -1,3 +1,4 @@
 <div>
-<script type="text/javascript" src="http://www.openhub.net/p/588314/widgets/project_thin_badge.js"></script>
+  <script type="text/javascript" src="http://www.openhub.net/p/588314/widgets/project_thin_badge.js">
+  </script>
 </div>


=====================================
doc/_templates/pypi.html
=====================================
@@ -1,14 +1,22 @@
 <div>
-<p>
-<a href="https://pypi.org/project/pyepr"><img src="https://img.shields.io/pypi/v/pyepr.svg" alt="Latest Version"/></a>
-</p>
-<p>
-<a href="https://pypi.org/project/pyepr"><img src="https://img.shields.io/pypi/pyversions/pyepr.svg" alt="Supported Python versions"/></a>
-</p>
-<p>
-<a href="https://pypi.org/project/pyepr"><img src="https://img.shields.io/pypi/l/pyepr.svg" alt="License"/></a>
-</p>
-<p>
-<a href="https://pypi.org/project/pyepr"><img src="https://img.shields.io/pypi/wheel/pyepr.svg" alt="Wheel Status"/></a>
-</p>
+  <p>
+    <a href="https://pypi.org/project/pyepr">
+      <img src="https://img.shields.io/pypi/v/pyepr.svg" alt="Latest Version"/>
+    </a>
+  </p>
+  <p>
+    <a href="https://pypi.org/project/pyepr">
+      <img src="https://img.shields.io/pypi/pyversions/pyepr.svg" alt="Supported Python versions"/>
+    </a>
+  </p>
+  <p>
+    <a href="https://pypi.org/project/pyepr">
+      <img src="https://img.shields.io/pypi/l/pyepr.svg" alt="License"/>
+    </a>
+  </p>
+  <p>
+    <a href="https://pypi.org/project/pyepr">
+      <img src="https://img.shields.io/pypi/wheel/pyepr.svg" alt="Wheel Status"/>
+    </a>
+  </p>
 </div>


=====================================
doc/_templates/readthedocs.html
=====================================
@@ -0,0 +1,7 @@
+<div>
+  <p>
+    <a href="https://pyepr.readthedocs.io">
+      <img src="https://readthedocs.org/projects/pyepr/badge" alt="readthedocs status"/>
+    </a>
+  </p>
+</div>


=====================================
doc/_templates/travis-ci.html
=====================================
@@ -1,5 +1,7 @@
 <div>
-<p>
-<a href="https://travis-ci.org/avalentino/pyepr"><img src="https://travis-ci.org/avalentino/pyepr.png" alt="travis-ci status page"/></a>
-</p>
+  <p>
+    <a href="https://travis-ci.org/avalentino/pyepr">
+      <img src="https://travis-ci.org/avalentino/pyepr.png" alt="travis-ci status page"/>
+    </a>
+  </p>
 </div>


=====================================
doc/conf.py
=====================================
@@ -1,17 +1,12 @@
-#!/usr/bin/env python3
 # -*- coding: utf-8 -*-
 #
-# PyEPR documentation build configuration file, created by
-# sphinx-quickstart on Sun Apr 29 18:26:52 2018.
+# Configuration file for the Sphinx documentation builder.
 #
-# This file is execfile()d with the current directory set to its
-# containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
+# This file only contains a selection of the most common options. For a full
+# list see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+# -- Path setup --------------------------------------------------------------
 
 # If extensions (or modules to document with autodoc) are in another directory,
 # add these directories to sys.path here. If the directory is relative to the
@@ -21,72 +16,75 @@ import os
 import sys
 sys.path.insert(0, os.path.abspath('sphinxext'))
 
+# -- Project information -----------------------------------------------------
+
+project = 'PyEPR'
+copyright = '2011-2019, Antonio Valentino'
+author = 'Antonio Valentino'
+
+def get_version(filename='../src/epr.pyx', release=False):
+    import re
+    from distutils.version import LooseVersion
+
+    s = open(filename).read()
+    mobj = re.search("^__version__ = '(?P<version>.*)'$", s, re.MULTILINE)
+    mobj.group('version')
+
+    v = LooseVersion(mobj.group('version'))
+
+    if release:
+        return v.vstring
+    else:
+        return '.'.join(map(str, v.version[:3]))
 
-# -- General configuration ------------------------------------------------
+# The short X.Y version.
+version = get_version()
 
-# If your documentation needs a minimal Sphinx version, state it here.
+# The full version, including alpha/beta/rc tags.
+release = get_version(release=True)
 
-needs_sphinx = '1.0'
+# -- General configuration ---------------------------------------------------
 
 # Add any Sphinx extension module names here, as strings. They can be
 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
 # ones.
 extensions = [
     # 'sphinx.ext.autodoc',
+    # 'sphinx.ext.autosectionlabel',
     # 'sphinx.ext.autosummary',
+    # 'sphinx.ext.coverage',
     # 'sphinx.ext.doctest',
+    'sphinx.ext.extlinks',
+    # 'sphinx.ext.githubpages',
+    # 'sphinx.ext.graphviz',
+    'sphinx.ext.ifconfig',
+    # 'sphinx.ext.imgconverter',
+    # 'sphinx.ext.inheritance_diagram',
     'sphinx.ext.intersphinx',
+    # 'sphinx.ext.linkcode',
+    # 'sphinx.ext.napoleon',
     'sphinx.ext.todo',
-    # 'sphinx.ext.coverage',
+    'sphinx.ext.viewcode',
+
+    # Math support for HTML outputs in Sphinx
     'sphinx.ext.imgmath',
-    # 'sphinx.ext.jsmath',
     # 'sphinx.ext.mathjax',
-    # 'sphinx.ext.graphviz',
-    # 'sphinx.ext.inheritance_diagram',
-    # 'sphinx.ext.refcounting',
-    'sphinx.ext.ifconfig',
-    'sphinx.ext.viewcode',
-    # 'sphinx.ext.githubpages',
-    'sphinx.ext.extlinks',
+    # 'sphinx.ext.jsmath',
+
+    # Additional extensions
     'ipython_console_highlighting',
+    # 'IPython.sphinxext.ipython_console_highlighting',
 ]
 
 # Add any paths that contain templates here, relative to this directory.
 templates_path = ['_templates']
 
-# The suffix(es) of source filenames.
-# You can specify multiple suffix as a list of string:
-#
-# source_suffix = ['.rst', '.md']
-source_suffix = '.rst'
-
 # The master toctree document.
 master_doc = 'index'
 
-# General information about the project.
-project = u'PyEPR'
-copyright = u'2011-2018, Antonio Valentino'
-author = u'Antonio Valentino'
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-version = '0.9.5'
-# The full version, including alpha/beta/rc tags.
-release = version + '.dev0'
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-#
-# This is also used if you do content translation via gettext catalogs.
-# Usually you set "language" from the command line for these cases.
-language = None
-
 # List of patterns, relative to source directory, that match files and
 # directories to ignore when looking for source files.
-# This patterns also effect to html_static_path and html_extra_path
+# This pattern also affects html_static_path and html_extra_path.
 exclude_patterns = [
     '_build',
     'Thumbs.db',
@@ -98,8 +96,7 @@ exclude_patterns = [
 # The name of the Pygments (syntax highlighting) style to use.
 pygments_style = 'sphinx'
 
-
-# -- Options for HTML output ----------------------------------------------
+# -- Options for HTML output -------------------------------------------------
 
 # The theme to use for HTML and HTML Help pages.  See the documentation for
 # a list of builtin themes.
@@ -143,19 +140,19 @@ html_sidebars = {
         'pypi.html',
         'travis-ci.html',
         'appveyor.html',
+        'readthedocs.html',
+        'codecov.html',
     ],
 }
 
 # If false, no module index is generated.
 html_domain_indices = False
 
-
 # -- Options for HTMLHelp output ------------------------------------------
 
 # Output file base name for HTML help builder.
 htmlhelp_basename = 'PyEPRdoc'
 
-
 # -- Options for LaTeX output ---------------------------------------------
 
 latex_elements = {
@@ -187,7 +184,6 @@ latex_documents = [
 # If false, no module index is generated.
 latex_domain_indices = False
 
-
 # -- Options for manual page output ---------------------------------------
 
 # One entry per manual page. List of tuples
@@ -197,7 +193,6 @@ man_pages = [
      [author], 1)
 ]
 
-
 # -- Options for Texinfo output -------------------------------------------
 
 # Grouping the document tree into Texinfo files. List of tuples
@@ -209,31 +204,23 @@ texinfo_documents = [
      'Miscellaneous'),
 ]
 
-
 # -- Options for Epub output ----------------------------------------------
 
-# Bibliographic Dublin Core info.
-epub_title = project
-epub_author = author
-epub_publisher = author
-epub_copyright = copyright
-
-# The unique identifier of the text. This can be a ISBN number
-# or the project homepage.
-#
-# epub_identifier = ''
-
-# A unique identification for the text.
-#
-# epub_uid = ''
-
 # A list of files that should not be packed into the epub file.
 epub_exclude_files = ['search.html']
 
 
-# -- Extensions configuration --------------------------------------------------
+# -- Extension configuration -------------------------------------------------
+
+# -- Options for intersphinx extension ---------------------------------------
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+    'python': ('https://docs.python.org/3', None),
+    'numpy':  ('https://docs.scipy.org/doc/numpy', None),
+}
 
-# Autodoc configuration
+# -- Options for autodoc extension -------------------------------------------
 #autoclass_content = 'both'
 #autodoc_default_flags = ['members', 'undoc-members', 'show-inheritance']
 #                        #,'inherited-members']
@@ -241,18 +228,13 @@ epub_exclude_files = ['search.html']
 # Auto summary generation
 #autosummary_generate = ['reference']
 
-
+# -- Options for extlinks extension ------------------------------------------
 # External links configuration
 extlinks = {
     'issue': ('https://github.com/avalentino/pyepr/issues/%s', 'gh-'),
 }
 
-# Example configuration for intersphinx: refer to the Python standard library.
-intersphinx_mapping = {
-    'python': ('https://docs.python.org/3', None),
-    'numpy':  ('https://docs.scipy.org/doc/numpy', None),
-}
+# -- Options for todo extension ----------------------------------------------
 
 # If true, `todo` and `todoList` produce output, else they produce nothing.
 todo_include_todos = True
-


=====================================
doc/gdal_export_example.rst
=====================================
@@ -225,7 +225,7 @@ Complete listing
    :language: python
 
 
-.. _GDAL: https://www.gdal.org
+.. _GDAL: https://gdal.org
 .. _PyEPR: https://github.com/avalentino/pyepr
 .. _ENVISAT: https://envisat.esa.int
 


=====================================
doc/index.rst
=====================================
@@ -12,7 +12,7 @@ ENVISAT Product Reader Python API
 :HomePage:  http://avalentino.github.io/pyepr
 :Author:    Antonio Valentino
 :Contact:   antonio.valentino at tiscali.it
-:Copyright: 2011-2018, Antonio Valentino
+:Copyright: 2011-2019, Antonio Valentino
 :Version:   |release|
 
 
@@ -60,7 +60,8 @@ ENVISAT Product Reader Python API
     Online documentation for other PyEpr_ versions:
 
     * `latest <https://pyepr.readthedocs.io/en/latest/>`_ development
-    * `0.9.5 <https://pyepr.readthedocs.io/en/v0.9.5/>`_ (latest stable)
+    * `1.0.0 <https://pyepr.readthedocs.io/en/v1.0.0/>`_ (latest stable)
+    * `0.9.5 <https://pyepr.readthedocs.io/en/v0.9.5/>`_
     * `0.9.4 <https://pyepr.readthedocs.io/en/v0.9.4/>`_
     * `0.9.3 <https://pyepr.readthedocs.io/en/v0.9.3/>`_
     * `0.9.2 <https://pyepr.readthedocs.io/en/v0.9.2/>`_
@@ -80,7 +81,7 @@ License
 
 .. index:: license
 
-Copyright (C) 2011-2018 Antonio Valentino <antonio.valentino at tiscali.it>
+Copyright (C) 2011-2019 Antonio Valentino <antonio.valentino at tiscali.it>
 
 PyEPR is free software: you can redistribute it and/or modify
 it under the terms of the `GNU General Public License`_ as published by


=====================================
doc/interactive_use.rst
=====================================
@@ -20,7 +20,7 @@ ESA_ web site.
 .. _PyEPR: https://github.com/avalentino/pyepr
 .. _ENVISAT: https://envisat.esa.int
 .. _ASAR: https://earth.esa.int/handbooks/asar/CNTR.html
-.. _Jupyter: http://jupyter.org/
+.. _Jupyter: https://jupyter.org/
 .. _matplotlib: https://matplotlib.org
 .. _`free sample`: https://earth.esa.int/services/sample_products/asar/IMP/ASA_IMP_1PNUPA20060202_062233_000000152044_00435_20529_3110.N1.gz
 .. _ESA: https://earth.esa.int
@@ -43,7 +43,7 @@ available classes and functions::
 
     Jupyter console 5.2.0
 
-    Python 3.6.5 (default, Apr  1 2018, 05:46:30) 
+    Python 3.6.5 (default, Apr  1 2018, 05:46:30)
     Type "copyright", "credits" or "license" for more information.
 
     IPython 5.5.0 -- An enhanced Interactive Python.
@@ -81,7 +81,7 @@ available classes and functions::
         .. _ESA: https://earth.esa.int
 
     In [3]: epr.__version__, epr.EPR_C_API_VERSION
-    Out[3]: ('0.9.1', '2.3dev')
+    Out[3]: ('1.0.0', '2.3dev')
 
 .. index:: __version__
 


=====================================
doc/make.bat
=====================================
@@ -9,7 +9,6 @@ if "%SPHINXBUILD%" == "" (
 )
 set SOURCEDIR=.
 set BUILDDIR=_build
-set SPHINXPROJ=PyEPR
 
 if "%1" == "" goto help
 
@@ -26,11 +25,11 @@ if errorlevel 9009 (
 	exit /b 1
 )
 
-%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
+%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
 goto end
 
 :help
-%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
+%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
 
 :end
 popd


=====================================
doc/reference.rst
=====================================
@@ -192,7 +192,6 @@ Product
             "(", ")", "NOT", "AND", "OR". Valid bit-mask expression are
             for example ``flags.LAND OR flags.CLOUD`` or
             ``NOT flags.WATER AND flags.TURBID_S``
-
       :param xoffset:
             across-track co-ordinate in pixel co-ordinates (zero-based)
             of the upper right corner of the source-region
@@ -208,7 +207,7 @@ Product
       .. seealso:: :func:`create_bitmask_raster`.
 
 
-    .. method:: close
+   .. method:: close
 
        Closes the :class:`Product` product and free the underlying
        file descriptor.
@@ -221,7 +220,7 @@ Product
        once; only the first call, however, will have an effect.
 
 
-    .. method:: flush()
+   .. method:: flush()
 
        Flush the file stream.
 
@@ -1350,7 +1349,7 @@ EPRError
 
       :param message:
             error message
-      :pram code:
+      :param code:
             EPR error code
 
 


=====================================
doc/sphinxext/ipython_console_highlighting.py
=====================================
@@ -1,114 +1,543 @@
-"""reST directive for syntax-highlighting ipython interactive sessions.
-
-XXX - See what improvements can be made based on the new (as of Sept 2009)
-'pycon' lexer for the python console.  At the very least it will give better
-highlighted tracebacks.
+### IPython/lib/lexers.py ####################################################
+# -*- coding: utf-8 -*-
 """
+Defines a variety of Pygments lexers for highlighting IPython code.
+
+This includes:
+
+    IPythonLexer, IPython3Lexer
+        Lexers for pure IPython (python + magic/shell commands)
+
+    IPythonPartialTracebackLexer, IPythonTracebackLexer
+        Supports 2.x and 3.x via keyword `python3`.  The partial traceback
+        lexer reads everything but the Python code appearing in a traceback.
+        The full lexer combines the partial lexer with an IPython lexer.
 
+    IPythonConsoleLexer
+        A lexer for IPython console sessions, with support for tracebacks.
+
+    IPyLexer
+        A friendly lexer which examines the first line of text and from it,
+        decides whether to use an IPython lexer or an IPython console lexer.
+        This is probably the only lexer that needs to be explicitly added
+        to Pygments.
+
+"""
+#-----------------------------------------------------------------------------
+# Copyright (c) 2013, the IPython Development Team.
+#
+# Distributed under the terms of the Modified BSD License.
+#
+# The full license is in the file COPYING.txt, distributed with this software.
 #-----------------------------------------------------------------------------
-# Needed modules
 
 # Standard library
 import re
 
 # Third party
-from pygments.lexer import Lexer, do_insertions
-from pygments.lexers.agile import (PythonConsoleLexer, PythonLexer, 
-                                   PythonTracebackLexer)
-from pygments.token import Comment, Generic
+from pygments.lexers import BashLexer, PythonLexer, Python3Lexer
+from pygments.lexer import (
+    Lexer, DelegatingLexer, RegexLexer, do_insertions, bygroups, using,
+)
+from pygments.token import (
+    Generic, Keyword, Literal, Name, Operator, Other, Text, Error,
+)
+from pygments.util import get_bool_opt
 
-from sphinx import highlighting
+# Local
 
-#-----------------------------------------------------------------------------
-# Global constants
 line_re = re.compile('.*?\n')
 
-#-----------------------------------------------------------------------------
-# Code begins - classes and functions
+__all__ = ['build_ipy_lexer', 'IPython3Lexer', 'IPythonLexer',
+           'IPythonPartialTracebackLexer', 'IPythonTracebackLexer',
+           'IPythonConsoleLexer', 'IPyLexer']
+
+ipython_tokens = [
+  (r"(?s)(\s*)(%%)(\w+)(.*)", bygroups(Text, Operator, Keyword, Text)),
+  (r'(?s)(^\s*)(%%!)([^\n]*\n)(.*)', bygroups(Text, Operator, Text, using(BashLexer))),
+  (r"(%%?)(\w+)(\?\??)$",  bygroups(Operator, Keyword, Operator)),
+  (r"\b(\?\??)(\s*)$",  bygroups(Operator, Text)),
+  (r'(%)(sx|sc|system)(.*)(\n)', bygroups(Operator, Keyword,
+                                       using(BashLexer), Text)),
+  (r'(%)(\w+)(.*\n)', bygroups(Operator, Keyword, Text)),
+  (r'^(!!)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
+  (r'(!)(?!=)(.+)(\n)', bygroups(Operator, using(BashLexer), Text)),
+  (r'^(\s*)(\?\??)(\s*%{0,2}[\w\.\*]*)', bygroups(Text, Operator, Text)),
+  (r'(\s*%{0,2}[\w\.\*]*)(\?\??)(\s*)$', bygroups(Text, Operator, Text)),
+]
+
+def build_ipy_lexer(python3):
+    """Builds IPython lexers depending on the value of `python3`.
+
+    The lexer inherits from an appropriate Python lexer and then adds
+    information about IPython specific keywords (i.e. magic commands,
+    shell commands, etc.)
+
+    Parameters
+    ----------
+    python3 : bool
+        If `True`, then build an IPython lexer from a Python 3 lexer.
+
+    """
+    # It would be nice to have a single IPython lexer class which takes
+    # a boolean `python3`.  But since there are two Python lexer classes,
+    # we will also have two IPython lexer classes.
+    if python3:
+        PyLexer = Python3Lexer
+        name = 'IPython3'
+        aliases = ['ipython3']
+        doc = """IPython3 Lexer"""
+    else:
+        PyLexer = PythonLexer
+        name = 'IPython'
+        aliases = ['ipython2', 'ipython']
+        doc = """IPython Lexer"""
+
+    tokens = PyLexer.tokens.copy()
+    tokens['root'] = ipython_tokens + tokens['root']
+
+    attrs = {'name': name, 'aliases': aliases, 'filenames': [],
+             '__doc__': doc, 'tokens': tokens}
+
+    return type(name, (PyLexer,), attrs)
+
+
+IPython3Lexer = build_ipy_lexer(python3=True)
+IPythonLexer = build_ipy_lexer(python3=False)
+
+
+class IPythonPartialTracebackLexer(RegexLexer):
+    """
+    Partial lexer for IPython tracebacks.
+
+    Handles all the non-python output. This works for both Python 2.x and 3.x.
+
+    """
+    name = 'IPython Partial Traceback'
+
+    tokens = {
+        'root': [
+            # Tracebacks for syntax errors have a different style.
+            # For both types of tracebacks, we mark the first line with
+            # Generic.Traceback.  For syntax errors, we mark the filename
+            # as we mark the filenames for non-syntax tracebacks.
+            #
+            # These two regexps define how IPythonConsoleLexer finds a
+            # traceback.
+            #
+            ## Non-syntax traceback
+            (r'^(\^C)?(-+\n)', bygroups(Error, Generic.Traceback)),
+            ## Syntax traceback
+            (r'^(  File)(.*)(, line )(\d+\n)',
+             bygroups(Generic.Traceback, Name.Namespace,
+                      Generic.Traceback, Literal.Number.Integer)),
+
+            # (Exception Identifier)(Whitespace)(Traceback Message)
+            (r'(?u)(^[^\d\W]\w*)(\s*)(Traceback.*?\n)',
+             bygroups(Name.Exception, Generic.Whitespace, Text)),
+            # (Module/Filename)(Text)(Callee)(Function Signature)
+            # Better options for callee and function signature?
+            (r'(.*)( in )(.*)(\(.*\)\n)',
+             bygroups(Name.Namespace, Text, Name.Entity, Name.Tag)),
+            # Regular line: (Whitespace)(Line Number)(Python Code)
+            (r'(\s*?)(\d+)(.*?\n)',
+             bygroups(Generic.Whitespace, Literal.Number.Integer, Other)),
+            # Emphasized line: (Arrow)(Line Number)(Python Code)
+            # Using Exception token so arrow color matches the Exception.
+            (r'(-*>?\s?)(\d+)(.*?\n)',
+             bygroups(Name.Exception, Literal.Number.Integer, Other)),
+            # (Exception Identifier)(Message)
+            (r'(?u)(^[^\d\W]\w*)(:.*?\n)',
+             bygroups(Name.Exception, Text)),
+            # Tag everything else as Other, will be handled later.
+            (r'.*\n', Other),
+        ],
+    }
+
+
+class IPythonTracebackLexer(DelegatingLexer):
+    """
+    IPython traceback lexer.
+
+    For doctests, the tracebacks can be snipped as much as desired with the
+    exception to the lines that designate a traceback. For non-syntax error
+    tracebacks, this is the line of hyphens. For syntax error tracebacks,
+    this is the line which lists the File and line number.
+
+    """
+    # The lexer inherits from DelegatingLexer.  The "root" lexer is an
+    # appropriate IPython lexer, which depends on the value of the boolean
+    # `python3`.  First, we parse with the partial IPython traceback lexer.
+    # Then, any code marked with the "Other" token is delegated to the root
+    # lexer.
+    #
+    name = 'IPython Traceback'
+    aliases = ['ipythontb']
+
+    def __init__(self, **options):
+        self.python3 = get_bool_opt(options, 'python3', False)
+        if self.python3:
+            self.aliases = ['ipython3tb']
+        else:
+            self.aliases = ['ipython2tb', 'ipythontb']
+
+        if self.python3:
+            IPyLexer = IPython3Lexer
+        else:
+            IPyLexer = IPythonLexer
+
+        DelegatingLexer.__init__(self, IPyLexer,
+                                 IPythonPartialTracebackLexer, **options)
 
 class IPythonConsoleLexer(Lexer):
     """
-    For IPython console output or doctests, such as:
+    An IPython console lexer for IPython code-blocks and doctests, such as:
 
-    .. sourcecode:: ipython
+    .. code-block:: rst
 
-      In [1]: a = 'foo'
+        .. code-block:: ipythonconsole
 
-      In [2]: a
-      Out[2]: 'foo'
+            In [1]: a = 'foo'
 
-      In [3]: print a
-      foo
+            In [2]: a
+            Out[2]: 'foo'
 
-      In [4]: 1 / 0
+            In [3]: print a
+            foo
 
-    Notes:
+            In [4]: 1 / 0
 
-      - Tracebacks are not currently supported.
 
-      - It assumes the default IPython prompts, not customized ones.
+    Support is also provided for IPython exceptions:
+
+    .. code-block:: rst
+
+        .. code-block:: ipythonconsole
+
+            In [1]: raise Exception
+
+            ---------------------------------------------------------------------------
+            Exception                                 Traceback (most recent call last)
+            <ipython-input-1-fca2ab0ca76b> in <module>()
+            ----> 1 raise Exception
+
+            Exception:
+
     """
-    
     name = 'IPython console session'
-    aliases = ['ipython']
+    aliases = ['ipythonconsole']
     mimetypes = ['text/x-ipython-console']
-    input_prompt = re.compile("(In \[[0-9]+\]: )|(   \.\.\.+:)")
-    output_prompt = re.compile("(Out\[[0-9]+\]: )|(   \.\.\.+:)")
-    continue_prompt = re.compile("   \.\.\.+:")
-    tb_start = re.compile("\-+")
 
-    def get_tokens_unprocessed(self, text):
-        pylexer = PythonLexer(**self.options)
-        tblexer = PythonTracebackLexer(**self.options)
+    # The regexps used to determine what is input and what is output.
+    # The default prompts for IPython are:
+    #
+    #    in           = 'In [#]: '
+    #    continuation = '   .D.: '
+    #    template     = 'Out[#]: '
+    #
+    # Where '#' is the 'prompt number' or 'execution count' and 'D' 
+    # D is a number of dots  matching the width of the execution count 
+    #
+    in1_regex = r'In \[[0-9]+\]: '
+    in2_regex = r'   \.\.+\.: '
+    out_regex = r'Out\[[0-9]+\]: '
+
+    #: The regex to determine when a traceback starts.
+    ipytb_start = re.compile(r'^(\^C)?(-+\n)|^(  File)(.*)(, line )(\d+\n)')
+
+    def __init__(self, **options):
+        """Initialize the IPython console lexer.
+
+        Parameters
+        ----------
+        python3 : bool
+            If `True`, then the console inputs are parsed using a Python 3
+            lexer. Otherwise, they are parsed using a Python 2 lexer.
+        in1_regex : RegexObject
+            The compiled regular expression used to detect the start
+            of inputs. Although the IPython configuration setting may have a
+            trailing whitespace, do not include it in the regex. If `None`,
+            then the default input prompt is assumed.
+        in2_regex : RegexObject
+            The compiled regular expression used to detect the continuation
+            of inputs. Although the IPython configuration setting may have a
+            trailing whitespace, do not include it in the regex. If `None`,
+            then the default input prompt is assumed.
+        out_regex : RegexObject
+            The compiled regular expression used to detect outputs. If `None`,
+            then the default output prompt is assumed.
+
+        """
+        self.python3 = get_bool_opt(options, 'python3', False)
+        if self.python3:
+            self.aliases = ['ipython3console']
+        else:
+            self.aliases = ['ipython2console', 'ipythonconsole']
+
+        in1_regex = options.get('in1_regex', self.in1_regex)
+        in2_regex = options.get('in2_regex', self.in2_regex)
+        out_regex = options.get('out_regex', self.out_regex)
+
+        # So that we can work with input and output prompts which have been
+        # rstrip'd (possibly by editors) we also need rstrip'd variants. If
+        # we do not do this, then such prompts will be tagged as 'output'.
+        # The reason can't just use the rstrip'd variants instead is because
+        # we want any whitespace associated with the prompt to be inserted
+        # with the token. This allows formatted code to be modified so as hide
+        # the appearance of prompts, with the whitespace included. One example
+        # use of this is in copybutton.js from the standard lib Python docs.
+        in1_regex_rstrip = in1_regex.rstrip() + '\n'
+        in2_regex_rstrip = in2_regex.rstrip() + '\n'
+        out_regex_rstrip = out_regex.rstrip() + '\n'
 
-        curcode = ''
-        insertions = []
+        # Compile and save them all.
+        attrs = ['in1_regex', 'in2_regex', 'out_regex',
+                 'in1_regex_rstrip', 'in2_regex_rstrip', 'out_regex_rstrip']
+        for attr in attrs:
+            self.__setattr__(attr, re.compile(locals()[attr]))
+
+        Lexer.__init__(self, **options)
+
+        if self.python3:
+            pylexer = IPython3Lexer
+            tblexer = IPythonTracebackLexer
+        else:
+            pylexer = IPythonLexer
+            tblexer = IPythonTracebackLexer
+
+        self.pylexer = pylexer(**options)
+        self.tblexer = tblexer(**options)
+
+        self.reset()
+
+    def reset(self):
+        self.mode = 'output'
+        self.index = 0
+        self.buffer = u''
+        self.insertions = []
+
+    def buffered_tokens(self):
+        """
+        Generator of unprocessed tokens after doing insertions and before
+        changing to a new state.
+
+        """
+        if self.mode == 'output':
+            tokens = [(0, Generic.Output, self.buffer)]
+        elif self.mode == 'input':
+            tokens = self.pylexer.get_tokens_unprocessed(self.buffer)
+        else: # traceback
+            tokens = self.tblexer.get_tokens_unprocessed(self.buffer)
+
+        for i, t, v in do_insertions(self.insertions, tokens):
+            # All token indexes are relative to the buffer.
+            yield self.index + i, t, v
+
+        # Clear it all
+        self.index += len(self.buffer)
+        self.buffer = u''
+        self.insertions = []
+
+    def get_mci(self, line):
+        """
+        Parses the line and returns a 3-tuple: (mode, code, insertion).
+
+        `mode` is the next mode (or state) of the lexer, and is always equal
+        to 'input', 'output', or 'tb'.
+
+        `code` is a portion of the line that should be added to the buffer
+        corresponding to the next mode and eventually lexed by another lexer.
+        For example, `code` could be Python code if `mode` were 'input'.
+
+        `insertion` is a 3-tuple (index, token, text) representing an
+        unprocessed "token" that will be inserted into the stream of tokens
+        that are created from the buffer once we change modes. This is usually
+        the input or output prompt.
+
+        In general, the next mode depends on current mode and on the contents
+        of `line`.
+
+        """
+        # To reduce the number of regex match checks, we have multiple
+        # 'if' blocks instead of 'if-elif' blocks.
+
+        # Check for possible end of input
+        in2_match = self.in2_regex.match(line)
+        in2_match_rstrip = self.in2_regex_rstrip.match(line)
+        if (in2_match and in2_match.group().rstrip() == line.rstrip()) or \
+           in2_match_rstrip:
+            end_input = True
+        else:
+            end_input = False
+        if end_input and self.mode != 'tb':
+            # Only look for an end of input when not in tb mode.
+            # An ellipsis could appear within the traceback.
+            mode = 'output'
+            code = u''
+            insertion = (0, Generic.Prompt, line)
+            return mode, code, insertion
+
+        # Check for output prompt
+        out_match = self.out_regex.match(line)
+        out_match_rstrip = self.out_regex_rstrip.match(line)
+        if out_match or out_match_rstrip:
+            mode = 'output'
+            if out_match:
+                idx = out_match.end()
+            else:
+                idx = out_match_rstrip.end()
+            code = line[idx:]
+            # Use the 'heading' token for output.  We cannot use Generic.Error
+            # since it would conflict with exceptions.
+            insertion = (0, Generic.Heading, line[:idx])
+            return mode, code, insertion
+
+
+        # Check for input or continuation prompt (non stripped version)
+        in1_match = self.in1_regex.match(line)
+        if in1_match or (in2_match and self.mode != 'tb'):
+            # New input or when not in tb, continued input.
+            # We do not check for continued input when in tb since it is
+            # allowable to replace a long stack with an ellipsis.
+            mode = 'input'
+            if in1_match:
+                idx = in1_match.end()
+            else: # in2_match
+                idx = in2_match.end()
+            code = line[idx:]
+            insertion = (0, Generic.Prompt, line[:idx])
+            return mode, code, insertion
+
+        # Check for input or continuation prompt (stripped version)
+        in1_match_rstrip = self.in1_regex_rstrip.match(line)
+        if in1_match_rstrip or (in2_match_rstrip and self.mode != 'tb'):
+            # New input or when not in tb, continued input.
+            # We do not check for continued input when in tb since it is
+            # allowable to replace a long stack with an ellipsis.
+            mode = 'input'
+            if in1_match_rstrip:
+                idx = in1_match_rstrip.end()
+            else: # in2_match
+                idx = in2_match_rstrip.end()
+            code = line[idx:]
+            insertion = (0, Generic.Prompt, line[:idx])
+            return mode, code, insertion
+
+        # Check for traceback
+        if self.ipytb_start.match(line):
+            mode = 'tb'
+            code = line
+            insertion = None
+            return mode, code, insertion
+
+        # All other stuff...
+        if self.mode in ('input', 'output'):
+            # We assume all other text is output. Multiline input that
+            # does not use the continuation marker cannot be detected.
+            # For example, the 3 in the following is clearly output:
+            #
+            #    In [1]: print 3
+            #    3
+            #
+            # But the following second line is part of the input:
+            #
+            #    In [2]: while True:
+            #        print True
+            #
+            # In both cases, the 2nd line will be 'output'.
+            #
+            mode = 'output'
+        else:
+            mode = 'tb'
+
+        code = line
+        insertion = None
+
+        return mode, code, insertion
+
+    def get_tokens_unprocessed(self, text):
+        self.reset()
         for match in line_re.finditer(text):
             line = match.group()
-            input_prompt = self.input_prompt.match(line)
-            continue_prompt = self.continue_prompt.match(line.rstrip())
-            output_prompt = self.output_prompt.match(line)
-            if line.startswith("#"):
-                insertions.append((len(curcode),
-                                   [(0, Comment, line)]))
-            elif input_prompt is not None:
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, input_prompt.group())]))
-                curcode += line[input_prompt.end():]
-            elif continue_prompt is not None:
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, continue_prompt.group())]))
-                curcode += line[continue_prompt.end():]
-            elif output_prompt is not None:
-                # Use the 'error' token for output.  We should probably make
-                # our own token, but error is typicaly in a bright color like
-                # red, so it works fine for our output prompts.
-                insertions.append((len(curcode),
-                                   [(0, Generic.Error, output_prompt.group())]))
-                curcode += line[output_prompt.end():]
-            else:
-                if curcode:
-                    for item in do_insertions(insertions,
-                                              pylexer.get_tokens_unprocessed(curcode)):
-                        yield item
-                        curcode = ''
-                        insertions = []
-                yield match.start(), Generic.Output, line
-        if curcode:
-            for item in do_insertions(insertions,
-                                      pylexer.get_tokens_unprocessed(curcode)):
-                yield item
+            mode, code, insertion = self.get_mci(line)
+
+            if mode != self.mode:
+                # Yield buffered tokens before transitioning to new mode.
+                for token in self.buffered_tokens():
+                    yield token
+                self.mode = mode
+
+            if insertion:
+                self.insertions.append((len(self.buffer), [insertion]))
+            self.buffer += code
+
+        for token in self.buffered_tokens():
+            yield token
+
+class IPyLexer(Lexer):
+    """
+    Primary lexer for all IPython-like code.
+
+    This is a simple helper lexer.  If the first line of the text begins with
+    "In \[[0-9]+\]:", then the entire text is parsed with an IPython console
+    lexer. If not, then the entire text is parsed with an IPython lexer.
+
+    The goal is to reduce the number of lexers that are registered
+    with Pygments.
+
+    """
+    name = 'IPy session'
+    aliases = ['ipy']
+
+    def __init__(self, **options):
+        self.python3 = get_bool_opt(options, 'python3', False)
+        if self.python3:
+            self.aliases = ['ipy3']
+        else:
+            self.aliases = ['ipy2', 'ipy']
+
+        Lexer.__init__(self, **options)
 
+        self.IPythonLexer = IPythonLexer(**options)
+        self.IPythonConsoleLexer = IPythonConsoleLexer(**options)
+
+    def get_tokens_unprocessed(self, text):
+        # Search for the input prompt anywhere...this allows code blocks to
+        # begin with comments as well.
+        if re.match(r'.*(In \[[0-9]+\]:)', text.strip(), re.DOTALL):
+            lex = self.IPythonConsoleLexer
+        else:
+            lex = self.IPythonLexer
+        for token in lex.get_tokens_unprocessed(text):
+            yield token
+
+
+### IPython/sphinxext/ipython_console_highighting.py #########################
+"""
+reST directive for syntax-highlighting ipython interactive sessions.
+
+"""
+
+from sphinx import highlighting
+# from IPython.lib.lexers import IPyLexer
 
 def setup(app):
     """Setup as a sphinx extension."""
 
     # This is only a lexer, so adding it below to pygments appears sufficient.
-    # But if somebody knows that the right API usage should be to do that via
+    # But if somebody knows what the right API usage should be to do that via
     # sphinx, by all means fix it here.  At least having this setup.py
     # suppresses the sphinx warning we'd get without it.
-    pass
+    metadata = {'parallel_read_safe': True, 'parallel_write_safe': True}
+    return metadata
 
-#-----------------------------------------------------------------------------
-# Register the extension as a valid pygments lexer
-highlighting.lexers['ipython'] = IPythonConsoleLexer()
+# Register the extension as a valid pygments lexer.
+# Alternatively, we could register the lexer with pygments instead. This would
+# require using setuptools entrypoints: http://pygments.org/docs/plugins
+
+ipy2 = IPyLexer(python3=False)
+ipy3 = IPyLexer(python3=True)
+
+highlighting.lexers['ipython'] = ipy2
+highlighting.lexers['ipython2'] = ipy2
+highlighting.lexers['ipython3'] = ipy3


=====================================
doc/usermanual.rst
=====================================
@@ -63,7 +63,7 @@ In order to use PyEPR it is needed that the following software are
 correctly installed and configured:
 
 * Python2_ >= 2.6 or Python3_ >= 3.1 (including PyPy_)
-* numpy_ >= 1.5.0
+* numpy_ >= 1.7.0
 * `EPR API`_ >= 2.2 (optional, since PyEPR 0.7 the source tar-ball comes
   with a copy of the EPR C API sources)
 * a reasonably updated C compiler [#]_ (build only)
@@ -82,9 +82,9 @@ correctly installed and configured:
 .. _Python2: Python_
 .. _Python3: Python_
 .. _PyPy: http://pypy.org
-.. _numpy: http://www.numpy.org
+.. _numpy: https://www.numpy.org
 .. _gcc: http://gcc.gnu.org
-.. _Cython: http://cython.org
+.. _Cython: https://cython.org
 .. _unittest2: https://pypi.org/project/unittest2
 
 


=====================================
requirements.txt
=====================================
@@ -1,3 +1,3 @@
-numpy>=1.5
+numpy>=1.7
 cython>=0.19
 unittest2;python_version<"3.4"


=====================================
setup.py
=====================================
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Copyright (C) 2011-2018, Antonio Valentino <antonio.valentino at tiscali.it>
+# Copyright (C) 2011-2019, Antonio Valentino <antonio.valentino at tiscali.it>
 #
 # This file is part of PyEPR.
 #
@@ -26,12 +26,15 @@ import sys
 import glob
 
 
+PYEPR_COVERAGE = False
+
+
 def get_version(filename):
     with open(filename) as fd:
         data = fd.read()
 
     mobj = re.search(
-        '''^__version__\s*=\s*(?P<q>['"])(?P<version>\d+(\.\d+)*.*)(?P=q)''',
+        r'''^__version__\s*=\s*(?P<q>['"])(?P<version>\d+(\.\d+)*.*)(?P=q)''',
         data, re.MULTILINE)
 
     return mobj.group('version')
@@ -62,10 +65,14 @@ print('HAVE_SETUPTOOLS: {0}'.format(HAVE_SETUPTOOLS))
 
 try:
     from Cython.Build import cythonize
+    from Cython import __version__ as CYTHON_VERSION
     HAVE_CYTHON = True
 except ImportError:
     HAVE_CYTHON = False
+    CYTHON_VERSION = None
 print('HAVE_CYTHON: {0}'.format(HAVE_CYTHON))
+if HAVE_CYTHON:
+    print('CYTHON_VERSION: {0}'.format(CYTHON_VERSION))
 
 
 # @COMPATIBILITY: Extension is an old style class in Python 2
@@ -151,18 +158,44 @@ def get_extension():
             sys.argv.remove(arg)
             break
 
+    define_macros = []
+
+    # @NOTE: uses the CYTHON_VERSION global variable
+    if HAVE_CYTHON and CYTHON_VERSION >= '0.29':
+        define_macros.append(
+            ('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION'),
+        )
+
     ext = PyEprExtension(
         'epr',
         sources=[os.path.join('src', 'epr.pyx')],
         # libraries=['m'],
-        # define_macros=[('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION'),],
+        define_macros=define_macros,
         eprsrcdir=eprsrcdir,
     )
 
-    # @NOTE: uses the HAVE_CYTHON global variable
+    # @NOTE: uses the HAVE_CYTHON and CYTHON_VERSION global variables
     if HAVE_CYTHON:
-        extlist = cythonize([ext])
+        if CYTHON_VERSION >= '0.29':
+            language_level = '3str'
+        else:
+            language_level = '2'
+        print('CYTHON_LANGUAGE_LEVEL: {0}'.format(language_level))
+
+        compiler_directives = dict(
+            language_level=language_level,
+        )
+
+        if PYEPR_COVERAGE:
+            compiler_directives['linetrace'] = True
+
+        extlist = cythonize([ext], compiler_directives=compiler_directives)
         ext = extlist[0]
+
+        if PYEPR_COVERAGE:
+            ext.define_macros.extend([
+                ('CYTHON_TRACE_NOGIL', '1'),
+            ])
     else:
         ext.convert_pyx_sources_to_lang()
 
@@ -203,12 +236,12 @@ any data field contained in a product file.
         'Operating System :: POSIX',
         'Programming Language :: Python',
         'Programming Language :: Python :: 2',
-        'Programming Language :: Python :: 2.6',
+        'Programming Language :: Python :: 2.6',    # deprecated
         'Programming Language :: Python :: 2.7',
         'Programming Language :: Python :: 3',
-        'Programming Language :: Python :: 3.2',
-        'Programming Language :: Python :: 3.3',
-        'Programming Language :: Python :: 3.4',
+        'Programming Language :: Python :: 3.2',    # deprecated
+        'Programming Language :: Python :: 3.3',    # deprecated
+        'Programming Language :: Python :: 3.4',    # deprecated
         'Programming Language :: Python :: 3.5',
         'Programming Language :: Python :: 3.6',
         'Programming Language :: Python :: 3.7',
@@ -239,8 +272,8 @@ def setup_package():
 
     if HAVE_SETUPTOOLS:
         config['test_suite'] = get_collector()
-        config.setdefault('setup_requires', []).append('numpy>=1.5')
-        config.setdefault('install_requires', []).append('numpy>=1.5')
+        config.setdefault('setup_requires', []).append('numpy>=1.7')
+        config.setdefault('install_requires', []).append('numpy>=1.7')
         if ext.setup_requires_cython:
             config['setup_requires'].append('cython>=0.19')
 
@@ -248,4 +281,11 @@ def setup_package():
 
 
 if __name__ == '__main__':
+    if '--coverage' in sys.argv or 'PYEPR_COVERAGE' in os.environ:
+        PYEPR_COVERAGE = True
+        if '--coverage' in sys.argv:
+            sys.argv.remove('--coverage')
+
+    print('PYEPR_COVERAGE:', PYEPR_COVERAGE)
+
     setup_package()


=====================================
src/epr.pxd
=====================================
@@ -2,7 +2,7 @@
 
 # PyEPR - Python bindings for ENVISAT Product Reader API
 #
-# Copyright (C) 2011-2018, Antonio Valentino <antonio.valentino at tiscali.it>
+# Copyright (C) 2011-2019, Antonio Valentino <antonio.valentino at tiscali.it>
 #
 # This file is part of PyEPR.
 #


=====================================
src/epr.pyx
=====================================
@@ -2,7 +2,7 @@
 
 # PyEPR - Python bindings for ENVISAT Product Reader API
 #
-# Copyright (C) 2011-2018, Antonio Valentino <antonio.valentino at tiscali.it>
+# Copyright (C) 2011-2019, Antonio Valentino <antonio.valentino at tiscali.it>
 #
 # This file is part of PyEPR.
 #
@@ -33,15 +33,15 @@ products. It provides access to the data either on a geophysical
 The raw data access makes it possible to read any data field contained
 in a product file.
 
-.. _PyEPR: http://avalentino.github.com/pyepr
-.. _Python: http://www.python.org
+.. _PyEPR: https://avalentino.github.io/pyepr
+.. _Python: https://www.python.org
 .. _`EPR API`: https://github.com/bcdev/epr-api
 .. _ENVISAT: http://envisat.esa.int
 .. _ESA: http://earth.esa.int
 
 """
 
-__version__ = '0.9.5'
+__version__ = '1.0.0'
 
 from libc cimport errno
 from libc cimport stdio
@@ -69,6 +69,7 @@ np.import_array()
 
 import os
 import sys
+import atexit
 from collections import namedtuple
 
 import numpy as np
@@ -195,6 +196,48 @@ _MODEL_MAP = {
 }
 
 
+ctypedef fused T:
+    np.uint8_t
+    np.int8_t
+    np.uint16_t
+    np.int16_t
+    np.uint32_t
+    np.int32_t
+    np.float32_t
+    np.float64_t
+    np.npy_byte
+
+
+cdef const void* _view_to_ptr(T[:] a):
+    return &a[0]
+
+
+cdef const void* _to_ptr(np.ndarray a, EPR_DataTypeId etype):
+    cdef const void *p = NULL
+    if etype == e_tid_uchar:
+        p = _view_to_ptr[np.uint8_t](a)
+    elif etype == e_tid_char:
+        p = _view_to_ptr[np.int8_t](a)
+    elif etype == e_tid_ushort:
+        p = _view_to_ptr[np.uint16_t](a)
+    elif etype == e_tid_short:
+        p = _view_to_ptr[np.int16_t](a)
+    elif etype == e_tid_uint:
+        p = _view_to_ptr[np.uint32_t](a)
+    elif etype == e_tid_int:
+        p = _view_to_ptr[np.int32_t](a)
+    elif etype == e_tid_float:
+        p = _view_to_ptr[np.float32_t](a)
+    elif etype == e_tid_double:
+        p = _view_to_ptr[np.float64_t](a)
+    elif etype == e_tid_string:
+        p = _view_to_ptr[np.npy_byte](a)
+    else:
+        raise ValueError('unexpected type ID: %d' % etype)
+
+    return p
+
+
 class EPRError(Exception):
     """EPR API error."""
 
@@ -275,11 +318,11 @@ cdef class _CLib:
         cdef bytes msg
 
         # @TODO: check
-        #if EPR_C_API_VERSION != '2.2':
-        #    raise ImportError('C library version not supported: "%s"' %
-        #                                                    EPR_C_API_VERSION)
+        # if EPR_C_API_VERSION != '2.2':
+        #     raise ImportError(
+        #         'C library version not supported: "%s"' % EPR_C_API_VERSION)
 
-        #if epr_init_api(e_log_warning, epr_log_message, NULL):
+        # if epr_init_api(e_log_warning, epr_log_message, NULL):
         if epr_init_api(e_log_warning, NULL, NULL):
             msg = <char*>epr_get_last_err_message()
             epr_clear_err()
@@ -388,7 +431,7 @@ cdef class DSD(EprObject):
         if isinstance(self, Dataset):
             (<Dataset>self._parent).check_closed_product()
         else:
-            #elif isinstance(self, Product):
+            # elif isinstance(self, Product):
             (<Product>self._parent).check_closed_product()
 
     property index:
@@ -560,7 +603,6 @@ cdef class Field(EprObject):
 
         return offset
 
-
     def print_(self, ostream=None):
         """print_(self, ostream=None)
 
@@ -589,9 +631,9 @@ cdef class Field(EprObject):
 
         pyepr_check_errors()
 
-    #def dump_field(self):
-    #    epr_dump_field(self._ptr)
-    #    pyepr_check_errors()
+    # def dump_field(self):
+    #     epr_dump_field(self._ptr)
+    #     pyepr_check_errors()
 
     def get_unit(self):
         """get_unit(self)
@@ -704,8 +746,8 @@ cdef class Field(EprObject):
             if index != 0:
                 raise ValueError('invalid index: %d' % index)
             val = <char*>epr_get_field_elem_as_str(self._ptr)
-        #elif etype == e_tid_spare:
-        #    val = epr_get_field_elem_as_str(self._ptr)
+        # elif etype == e_tid_spare:
+        #     val = epr_get_field_elem_as_str(self._ptr)
         elif etype == e_tid_time:
             if index != 0:
                 raise ValueError('invalid index: %d' % index)
@@ -810,15 +852,15 @@ cdef class Field(EprObject):
             buf = <char*>epr_get_field_elem_as_str(self._ptr)
             if buf is NULL:
                 pyepr_null_ptr_error(msg)
-        #elif etype == e_tid_unknown:
-        #    pass
-        #elif etype = e_tid_spare:
-        #    pass
+        # elif etype == e_tid_unknown:
+        #     pass
+        # elif etype = e_tid_spare:
+        #     pass
         else:
             raise ValueError('invalid field type')
 
         out = np.PyArray_SimpleNewFromData(nd, shape, dtype, <void*>buf)
-        #np.PyArray_CLEARFLAG(out, NPY_ARRAY_WRITEABLE)  # new in numpy 1.7
+        # np.PyArray_CLEARFLAG(out, NPY_ARRAY_WRITEABLE)  # new in numpy 1.7
         # Make the ndarray keep a reference to this object
         np.set_array_base(out, self)
 
@@ -837,10 +879,11 @@ cdef class Field(EprObject):
         cdef long field_offset
         cdef char* buf
         cdef EPR_DataTypeId etype = epr_get_field_type(self._ptr)
+        cdef const void* p = NULL
 
         dtype = _DTYPE_MAP[etype]
 
-        elems = elems.astype(dtype)
+        elems = np.ascontiguousarray(elems, dtype=dtype)
 
         record = self._parent
         dataset = record._parent
@@ -853,16 +896,18 @@ cdef class Field(EprObject):
         field_offset = index * elemsize
         file_offset = self._get_offset(absolute=1)
         buf = <char*>self._ptr.elems + field_offset
+        p = _to_ptr(elems, etype)
 
-        cstring.memcpy(<void*>buf, <const void*>elems.data, datasize)
+        with nogil:
+            cstring.memcpy(<void*>buf, p, datasize)
 
         if SWAP_BYTES:
             elems = elems.byteswap()
+            p = _to_ptr(elems, etype)
 
         with nogil:
             stdio.fseek(istream, file_offset + field_offset, stdio.SEEK_SET)
-            ret = stdio.fwrite(elems.data, elemsize, nelems,
-                               product._ptr.istream)
+            ret = stdio.fwrite(p, elemsize, nelems, product._ptr.istream)
         if ret != nelems:
             raise IOError(
                 'write error: %d of %d bytes written' % (ret, datasize))
@@ -939,7 +984,6 @@ cdef class Field(EprObject):
             cdef EPR_FieldInfo* info = <EPR_FieldInfo*>self._ptr.info
             return info.tot_size
 
-
     # --- high level interface ------------------------------------------------
     def __repr__(self):
         return 'epr.Field("%s") %d %s elements' % (self.get_name(),
@@ -1021,7 +1065,7 @@ cdef class Field(EprObject):
                 n = epr_get_data_type_size(epr_get_field_type(p1))
                 if n != 0:
                     n *= epr_get_field_num_elems(p1)
-                #pyepr_check_errors()
+                # pyepr_check_errors()
                 if n <= 0:
                     # @TODO: check
                     return True
@@ -1053,7 +1097,7 @@ cdef class Field(EprObject):
                 n = epr_get_data_type_size(epr_get_field_type(p1))
                 if n != 0:
                     n *= epr_get_field_num_elems(p1)
-                #pyepr_check_errors()
+                # pyepr_check_errors()
                 if n <= 0:
                     # @TODO: check
                     return False
@@ -1127,14 +1171,14 @@ cdef class Record(EprObject):
         if isinstance(self._parent, Dataset):
             (<Dataset>self._parent).check_closed_product()
         else:
-            #elif isinstance(self._parent, Product):
+            # elif isinstance(self._parent, Product):
             (<Product>self._parent).check_closed_product()
 
     cdef inline _check_write_mode(self):
         if isinstance(self._parent, Dataset):
             (<Dataset>self._parent)._check_write_mode()
         else:
-            #elif isinstance(self._parent, Product):
+            # elif isinstance(self._parent, Product):
             (<Product>self._parent)._check_write_mode()
 
     cdef inline uint _get_offset(self, bint absolure=0):
@@ -1491,7 +1535,7 @@ cdef class Raster(EprObject):
         """
 
         if (x < 0 or <uint>x >= self._ptr.raster_width or
-            y < 0  or <uint>y >= self._ptr.raster_height):
+            y < 0 or <uint>y >= self._ptr.raster_height):
             raise ValueError('index out of range: x=%d, y=%d' % (x, y))
 
         cdef EPR_EDataTypeId dtype = self._ptr.data_type
@@ -2131,7 +2175,6 @@ cdef class Band(EprObject):
             self.check_closed_product()
             return self._ptr.magic
 
-
     property _field_index:
         """Index or the field (within the dataset) containing the raw
         data used to create the band's pixel values.
@@ -2470,8 +2513,8 @@ cdef class Product(EprObject):
         """
 
         if self._ptr is not NULL:
-            #if '+' in self.mode:
-            #    stdio.fflush(self._ptr.istream)
+            # if '+' in self.mode:
+            #     stdio.fflush(self._ptr.istream)
             epr_close_product(self._ptr)
             pyepr_check_errors()
             self._ptr = NULL
@@ -2880,8 +2923,8 @@ cdef class Product(EprObject):
         return [self.get_band_at(idx) for idx in range(num_bands)]
 
     # @TODO: iter on both datasets and bands (??)
-    #def __iter__(self):
-    #    return itertools.chain((self.datasets(), self.bands()))
+    # def __iter__(self):
+    #     return itertools.chain((self.datasets(), self.bands()))
 
     def __repr__(self):
         return 'epr.Product(%s) %d datasets, %d bands' % (self.id_string,
@@ -2940,9 +2983,6 @@ def open(filename, mode='rb'):
 _EPR_C_LIB = _CLib.__new__(_CLib)
 
 
-import atexit
-
-
 @atexit.register
 def _close_api():
     # ensure that all EprObject(s) are collected before removing the last


=====================================
tests/test_all.py
=====================================
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Copyright (C) 2011-2018, Antonio Valentino <antonio.valentino at tiscali.it>
+# Copyright (C) 2011-2019, Antonio Valentino <antonio.valentino at tiscali.it>
 #
 # This file is part of PyEPR.
 #
@@ -26,6 +26,7 @@ import gzip
 import shutil
 import numbers
 import operator
+import platform
 import tempfile
 import functools
 import contextlib
@@ -40,6 +41,8 @@ try:
     from unittest import skipIf as _skipIf, TestCase as _TestCase
     if not hasattr(_TestCase, 'subTest'):
         raise ImportError
+    if not hasattr(_TestCase, 'assertRaisesRegex'):
+        raise ImportError
 except ImportError:
     import unittest2 as unittest
 else:
@@ -520,9 +523,9 @@ class TestProductHighLevelAPI(unittest.TestCase):
     #     pass
 
     def test_repr(self):
-        pattern = ('epr\.Product\((?P<name>\w+)\) '
-                   '(?P<n_datasets>\d+) datasets, '
-                   '(?P<n_bands>\d+) bands')
+        pattern = (r'epr\.Product\((?P<name>\w+)\) '
+                   r'(?P<n_datasets>\d+) datasets, '
+                   r'(?P<n_bands>\d+) bands')
 
         mobj = re.match(pattern, repr(self.product))
         self.assertNotEqual(mobj, None)
@@ -748,7 +751,7 @@ class TestDatasetHighLevelAPI(unittest.TestCase):
         self.assertEqual(index, self.dataset.get_num_records())
 
     def test_repr(self):
-        pattern = 'epr\.Dataset\((?P<name>\w+)\) (?P<num>\d+) records'
+        pattern = r'epr\.Dataset\((?P<name>\w+)\) (?P<num>\d+) records'
         mobj = re.match(pattern, repr(self.dataset))
         self.assertNotEqual(mobj, None)
         self.assertEqual(mobj.group('name'), self.dataset.get_name())
@@ -1321,8 +1324,8 @@ class TestBandHighLevelAPI(unittest.TestCase):
         self.product.close()
 
     def test_repr(self):
-        pattern = ('epr.Band\((?P<name>\w+)\) of '
-                   'epr.Product\((?P<product_id>\w+)\)')
+        pattern = (r'epr.Band\((?P<name>\w+)\) of '
+                   r'epr.Product\((?P<product_id>\w+)\)')
         for band in self.product.bands():
             mobj = re.match(pattern, repr(band))
             self.assertNotEqual(mobj, None)
@@ -1642,8 +1645,8 @@ class TestRasterHighLevelAPI(unittest.TestCase):
                                         self.RASTER_WIDTH, self.RASTER_HEIGHT)
 
     def test_repr(self):
-        pattern = ('<epr.Raster object at 0x\w+> (?P<data_type>\w+) '
-                   '\((?P<lines>\d+)L x (?P<pixels>\d+)P\)')
+        pattern = (r'<epr.Raster object at 0x\w+> (?P<data_type>\w+) '
+                   r'\((?P<lines>\d+)L x (?P<pixels>\d+)P\)')
         mobj = re.match(pattern, repr(self.raster))
         self.assertNotEqual(mobj, None)
         self.assertEqual(mobj.group('data_type'),
@@ -1865,7 +1868,7 @@ class TestMultipleRecordsHighLevelAPI(unittest.TestCase):
         self.product.close()
 
     def test_repr(self):
-        pattern = '<epr\.Record object at 0x\w+> (?P<num>\d+) fields'
+        pattern = r'<epr\.Record object at 0x\w+> (?P<num>\d+) fields'
         for record in self.dataset:
             mobj = re.match(pattern, repr(record))
             self.assertNotEqual(mobj, None)
@@ -2429,8 +2432,8 @@ class TestFieldHighLevelAPI(unittest.TestCase):
         self.record = dataset.read_record(0)
 
     def test_repr(self):
-        pattern = ('epr\.Field\("(?P<name>.+)"\) (?P<num>\d+) '
-                   '(?P<type>\w+) elements')
+        pattern = (r'epr\.Field\("(?P<name>.+)"\) (?P<num>\d+) '
+                   r'(?P<type>\w+) elements')
         for field in self.record:
             mobj = re.match(pattern, repr(field))
             self.assertNotEqual(mobj, None)
@@ -2678,7 +2681,7 @@ class TestDsdHighLevelAPI(unittest.TestCase):
         self.dsd = product.get_dsd_at(0)
 
     def test_repr(self):
-        pattern = 'epr\.DSD\("(?P<name>.+)"\)'
+        pattern = r'epr\.DSD\("(?P<name>.+)"\)'
         mobj = re.match(pattern, repr(self.dsd))
         self.assertNotEqual(mobj, None)
         self.assertEqual(mobj.group('name'), self.dsd.ds_name)
@@ -2854,34 +2857,6 @@ class TestSampleModelFunctions(unittest.TestCase):
 class TestDirectInstantiation(unittest.TestCase):
     MSG_PATTERN = '"%s" class cannot be instantiated from Python'
 
-    if sys.version_info[:2] >= (3, 2):
-        # @COMPATIBILITY: python >= 3.2
-        pass
-    elif sys.version_info[:2] in ((2, 7), (3, 1)):
-        # @COMPATIBILITY: unittest2, python2.7, python3.1
-        assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
-    else:
-
-        # @COMPATIBILITY: python < 2.7
-        def assertRaisesRegex(self, expected_exception, expected_regexp,
-                              callable_obj=None, *args, **kwargs):
-            try:
-                callable_obj(*args, **kwargs)
-            except expected_exception as exc_value:
-                import types
-                if isinstance(expected_regexp, types.StringTypes):
-                    expected_regexp = re.compile(expected_regexp)
-                if not expected_regexp.search(str(exc_value)):
-                    raise self.failureException(
-                        '"%s" does not match "%s"' % (expected_regexp.pattern,
-                                                      str(exc_value)))
-            else:
-                if hasattr(expected_exception, '__name__'):
-                    excName = expected_exception.__name__
-                else:
-                    excName = str(expected_exception)
-                raise self.failureException("%s not raised" % excName)
-
     def test_direct_dsd_instantiation(self):
         pattern = self.MSG_PATTERN % epr.DSD.__name__
         self.assertRaisesRegex(TypeError, pattern, epr.DSD)
@@ -2915,6 +2890,9 @@ class TestLibVersion(unittest.TestCase):
         self.assertTrue(isinstance(epr.EPR_C_API_VERSION, str))
 
 
+# only PyPy 3 seems to be affected
+ at unittest.skipIf(platform.python_implementation() == 'PyPy',
+                 'skip memory leak check on PyPy')
 @unittest.skipIf(resource is None, '"resource" module not available')
 class TestMemoryLeaks(unittest.TestCase):
     # See gh-10 (https://github.com/avalentino/pyepr/issues/10)



View it on GitLab: https://salsa.debian.org/debian-gis-team/pyepr/commit/f7a06a4415b5e15ddcf3845fb1534d79b385589e

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/pyepr/commit/f7a06a4415b5e15ddcf3845fb1534d79b385589e
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20190908/aa91aad0/attachment-0001.html>


More information about the Pkg-grass-devel mailing list