[Git][debian-gis-team/pyorbital][upstream] New upstream version 1.8.0

Antonio Valentino (@antonio.valentino) gitlab at salsa.debian.org
Fri Jul 14 09:01:06 BST 2023



Antonio Valentino pushed to branch upstream at Debian GIS Project / pyorbital


Commits:
58c06b41 by Antonio Valentino at 2023-07-14T06:14:15+00:00
New upstream version 1.8.0
- - - - -


20 changed files:

- + .github/dependabot.yml
- .github/workflows/ci.yaml
- .github/workflows/deploy-sdist.yaml
- CHANGELOG.md
- doc/source/index.rst
- + pyorbital/check_platform.py
- pyorbital/etc/platforms.txt
- pyorbital/geoloc_example.py
- pyorbital/geoloc_instrument_definitions.py
- + pyorbital/logger.py
- pyorbital/orbital.py
- pyorbital/tests/test_aiaa.py
- pyorbital/tests/test_astronomy.py
- pyorbital/tests/test_geoloc.py
- pyorbital/tests/test_orbital.py
- pyorbital/tests/test_tlefile.py
- pyorbital/tlefile.py
- pyorbital/version.py
- setup.py
- versioneer.py


Changes:

=====================================
.github/dependabot.yml
=====================================
@@ -0,0 +1,11 @@
+# To get started with Dependabot version updates, you'll need to specify which
+# package ecosystems to update and where the package manifests are located.
+# Please see the documentation for all configuration options:
+# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
+
+version: 2
+updates:
+  - package-ecosystem: "github-actions" # See documentation for possible values
+    directory: "/" # Location of package manifests
+    schedule:
+      interval: "weekly"


=====================================
.github/workflows/ci.yaml
=====================================
@@ -2,47 +2,111 @@ name: CI
 
 on: [push, pull_request]
 
+env:
+  CACHE_NUMBER: 1
+
 jobs:
   test:
     runs-on: ${{ matrix.os }}
+    continue-on-error: ${{ matrix.experimental }}
     strategy:
       fail-fast: true
       matrix:
-        os: ["ubuntu-latest", "macos-latest", "windows-latest"]
-        python-version: ["3.8", "3.9", "3.10"]
+        os: ["windows-latest", "ubuntu-latest", "macos-latest"]
+        python-version: ["3.9", "3.10", "3.11"]
+        experimental: [false]
+        include:
+          - python-version: "3.11"
+            os: "ubuntu-latest"
+            experimental: true
 
     env:
       PYTHON_VERSION: ${{ matrix.python-version }}
       OS: ${{ matrix.os }}
+      UNSTABLE: ${{ matrix.experimental }}
       ACTIONS_ALLOW_UNSECURE_COMMANDS: true
 
     steps:
       - name: Checkout source
-        uses: actions/checkout at v2
+        uses: actions/checkout at v3
 
       - name: Setup Conda Environment
         uses: conda-incubator/setup-miniconda at v2
         with:
-          miniconda-version: "latest"
+          miniforge-variant: Mambaforge
+          miniforge-version: latest
+          use-mamba: true
           python-version: ${{ matrix.python-version }}
-          mamba-version: "*"
-          channels: conda-forge,defaults
-          environment-file: continuous_integration/environment.yaml
           activate-environment: test-environment
 
+      - name: Set cache environment variables
+        shell: bash -l {0}
+        run: |
+          echo "DATE=$(date +'%Y%m%d')" >> $GITHUB_ENV
+          CONDA_PREFIX=$(python -c "import sys; print(sys.prefix)")
+          echo "CONDA_PREFIX=$CONDA_PREFIX" >> $GITHUB_ENV
+
+      - uses: actions/cache at v3
+        with:
+          path: ${{ env.CONDA_PREFIX }}
+          key: ${{ matrix.os }}-${{matrix.python-version}}-conda-${{ hashFiles('continuous_integration/environment.yaml') }}-${{ env.DATE }}-${{matrix.experimental}}-${{ env.CACHE_NUMBER }}
+        id: cache
+
+      - name: Update environment
+        run: mamba env update -n test-environment -f continuous_integration/environment.yaml
+        if: steps.cache.outputs.cache-hit != 'true'
+
+      - name: Install unstable dependencies
+        if: matrix.experimental == true
+        shell: bash -l {0}
+        # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels
+        # may break the conda-forge libraries trying to use newer glibc versions
+        run: |
+          python -m pip install \
+          --index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple/ \
+          --trusted-host pypi.anaconda.org \
+          --no-deps --pre --upgrade \
+          matplotlib \
+          numpy \
+          pandas \
+          scipy; \
+          python -m pip install \
+          --no-deps --upgrade \
+          git+https://github.com/dask/dask \
+          git+https://github.com/pydata/xarray;
+          LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so
+          echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV
+
       - name: Install Pyorbital
         shell: bash -l {0}
         run: |
-          pip install --no-deps -e .
+          python -m pip install --no-deps -e .
 
       - name: Run unit tests
         shell: bash -l {0}
         run: |
-          pytest --cov=pyorbital pyorbital/tests --cov-report=xml
+          export LD_PRELOAD=${{ env.LD_PRELOAD }};
+          pytest --cov=pyorbital pyorbital/tests --cov-report=xml --cov-report=
 
       - name: Upload unittest coverage to Codecov
-        uses: codecov/codecov-action at v1
+        uses: codecov/codecov-action at v3
         with:
           flags: unittests
           file: ./coverage.xml
           env_vars: OS,PYTHON_VERSION,UNSTABLE
+
+      - name: Coveralls Parallel
+        uses: AndreMiras/coveralls-python-action at develop
+        with:
+          flag-name: run-${{ matrix.test_number }}
+          parallel: true
+        if: runner.os == 'Linux'
+
+  coveralls:
+    needs: [test]
+    runs-on: ubuntu-latest
+    steps:
+      - name: Coveralls Finished
+        uses: AndreMiras/coveralls-python-action at develop
+        with:
+          parallel-finished: true


=====================================
.github/workflows/deploy-sdist.yaml
=====================================
@@ -11,7 +11,7 @@ jobs:
 
     steps:
       - name: Checkout source
-        uses: actions/checkout at v2
+        uses: actions/checkout at v3
 
       - name: Create sdist
         shell: bash -l {0}
@@ -19,7 +19,7 @@ jobs:
 
       - name: Publish package to PyPI
         if: github.event.action == 'published'
-        uses: pypa/gh-action-pypi-publish at v1.4.1
+        uses: pypa/gh-action-pypi-publish at v1.8.7
         with:
           user: __token__
           password: ${{ secrets.pypi_password }}
\ No newline at end of file


=====================================
CHANGELOG.md
=====================================
@@ -1,3 +1,31 @@
+## Version 1.8.0 (2023/07/12)
+
+### Issues Closed
+
+* [Issue 112](https://github.com/pytroll/pyorbital/issues/112) - Is the TLES environment variable described? ([PR 113](https://github.com/pytroll/pyorbital/pull/113) by [@adybbroe](https://github.com/adybbroe))
+
+In this release 1 issue was closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR 129](https://github.com/pytroll/pyorbital/pull/129) - Fix bug getting local tlefiles
+* [PR 128](https://github.com/pytroll/pyorbital/pull/128) - Fix typo in VIIRS geoloc definition
+* [PR 121](https://github.com/pytroll/pyorbital/pull/121) - fixed geoloc_example and added variable descriptions
+
+#### Features added
+
+* [PR 120](https://github.com/pytroll/pyorbital/pull/120) - Update versioneer to stop using deprecated distutils module.
+* [PR 113](https://github.com/pytroll/pyorbital/pull/113) - Make use of env variables free from satpy ([112](https://github.com/pytroll/pyorbital/issues/112))
+
+#### Documentation changes
+
+* [PR 113](https://github.com/pytroll/pyorbital/pull/113) - Make use of env variables free from satpy ([112](https://github.com/pytroll/pyorbital/issues/112))
+
+In this release 6 pull requests were closed.
+
+
 ## Version 1.7.3 (2022/07/11)
 
 ### Pull Requests Merged


=====================================
doc/source/index.rst
=====================================
@@ -8,14 +8,72 @@ Pyorbital
 
 Pyorbital is a python package to compute orbital parameters for satellites from
 TLE files as well as astronomical parameters of interest for satellite remote sensing.
-Currently pyorbital only supports low earth orbit satellites.
+Currently Pyorbital only supports low earth orbit satellites.
+
 
 Installation
 ------------
-Pyorbital comes with a file platforms.txt that maps satellite name to NORAD identifier.
-This file needs to be copied to the appropriate satpy etc directory ($PPP_CONFIG_DIR).
-It is wise to check it contains your satellites of interest. The NORAD identifier can
-be found as the first number of each line in the Two-Line Elements (eg. from celestrak).
+
+Pyorbital is available from the Python Package Index (PyPI) via pip or from
+the conda-forge conda channel. To install from PyPI in an existing environment:
+
+.. code-block:: bash
+
+   pip install pyorbital
+   
+Or in an existing conda-based environment:
+
+.. code-block:: bash
+
+   conda install -c conda-forge pyorbital
+
+From Source
+^^^^^^^^^^^
+
+Pyorbital can also be installed from source. If you want to install pyorbital
+from the latest in-development version on GitHub you can run:
+
+.. code-block:: bash
+
+   pip install git+https://github.com/pytroll/pyorbital.git
+    
+However, if you instead want to edit the source code and see the changes reflected
+when you run the code you can clone the git repository and install it in
+"editable" mode:
+
+.. code-block:: bash
+
+   git clone git://github.com/pytroll/pyorbital.git
+   cd pyorbital
+   pip install -e .
+
+
+Add platform missing information
+--------------------------------
+
+Pyorbital comes with a file *platforms.txt* that maps a satellite name to the NORAD identifier.
+
+This file already contain many low earth orbiting environmental or
+meteorological satellites and thus likely be sufficient for your purpose.
+
+But should it not contain your satellites of interest make a copy of the
+`platforms.txt <https://github.com/pytroll/pyorbital/blob/main/pyorbital/etc/platforms.txt>`_
+file and add the missing satellites and their NORAD identifiers and place
+the file in the directory pointed to by :envvar:`PYORBITAL_CONFIG_PATH`.
+
+The NORAD identifier can be found as the first number of each line in the
+Two-Line Elements files (eg. from `celestrak`_).
+
+Pyorbital comes with a small script ``check_platform.py`` to check whether a
+satellite is already supported.
+
+.. code::
+
+   python -m pyorbital.check_platform -s NOAA-21
+
+   [INFO: 2023-01-22 21:20:25 : pyorbital.tlefile] Satellite NOAA-21 is supported. NORAD number: 54234
+   [INFO: 2023-01-22 21:20:25 : pyorbital.tlefile] Satellite names and NORAD numbers are defined in /path/to/pyorbital/etc/directory/platforms.txt
+
 
 TLE files
 ---------
@@ -26,7 +84,12 @@ Pyorbital has a module for parsing NORAD TLE-files
     >>> tle.inclination
     99.043499999999995
 
-If no path is given pyorbital tries to read the earth observation TLE-files from celestrak.com
+If no path is provided pyorbital first tries to read any local TLE files in the
+directory given by the environment variable :envvar:`TLES`. If this variable is not
+set Pyorbital will try get the earth observation TLE files over the internet
+from `celestrak`_. Note this downloading only happens if no
+specific TLE file is provided or if the :envvar:`TLES` environment variable is not set.
+
 
 TLE download and database
 ~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -35,7 +98,7 @@ The historical TLE files can be requested from
 `celestrak <https://celestrak.com/NORAD/archives/request.php>`_.
 
 There is also a script, ``fetch_tles.py``, that can be used to collect
-TLE data from several locations.  Then currently supported locaions
+TLE data from several locations. The currently supported locations
 are:
 
 * generic network locations without login
@@ -78,13 +141,13 @@ But since we are interested in knowing the position of the Suomi-NPP more than
 two and half years from now (September 26, 2017) we can not rely on the current
 TLEs, but rather need a TLE closer to the time of interest:
 
-    >>> snpp = Orbital('Suomi NPP', tle_file='/data/lang/satellit/polar/orbital_elements/TLE/201502/tle-20150207.txt')
+    >>> snpp = Orbital('Suomi NPP', tle_file='/path/to/tle/files/tle-20150207.txt')
     >>> snpp.get_lonlatalt(dtobj)
     (105.37373804512762, 79.160752404540133, 838.94605490133154)
 
 If we take a TLE from one week earlier we get a slightly different result:
 
-    >>> snpp = Orbital('Suomi NPP', tle_file='/data/lang/satellit/polar/orbital_elements/TLE/201501/tle-20150131.txt')
+    >>> snpp = Orbital('Suomi NPP', tle_file='/path/to/tle/files/tle-20150131.txt')
     >>> snpp.get_lonlatalt(dtobj)
     (104.1539184988462, 79.328272480878141, 838.81555967963391)
 
@@ -101,6 +164,35 @@ The astronomy module enables computation of certain parameters of interest for s
     >>> astronomy.sun_zenith_angle(utc_time, lon, lat)
     62.685986438071602
 
+
+.. envvar:: PYORBITAL_CONFIG_PATH
+
+   It is possible (but not mandatory) to define this environment variable to
+   have full control of certain static data used by Pyorbital:
+
+   Pyorbital comes with a file *platforms.txt* that maps a satellite name to the
+   NORAD identifier. This internal file is accessed by Pyorbital without the
+   user having to do anything. But if you need to change or update this file
+   you can make your own copy and place in the directory pointed to by this
+   environment variable.
+
+.. envvar:: TLES
+
+   Two Line Element (TLE) files are accessed automatically over the internet
+   without the user having to do anything. When doing that Pyorbital will fetch
+   the most recent TLE data which may not be the most optimal for historic data
+   for instance. Also, it may not be sustainable in a production environment.
+
+   However, it is possible to let Pyorbital look for the necessary and more
+   optimal TLE data locally, by specifying the directory where such local TLE
+   files are located. If the TLES environment variable is set to point at an
+   existing local directory Pyorbital will first search for the needed TLEs
+   there. This can both be useful in an operational setup where access to the
+   internet is restricted, and when processing old/historic satellite data.
+
+   It is possible (but not mandatory) to define this environment variable.
+
+
 API
 ---
 
@@ -135,3 +227,7 @@ Astronomical computations
    * :ref:`modindex`
    * :ref:`search`
 
+
+
+.. _celestrak: Celestrak <https://celestrak.com>
+.. _github: http://github.com/pytroll/pyorbital


=====================================
pyorbital/check_platform.py
=====================================
@@ -0,0 +1,41 @@
+# Copyright (c) 2023 Pyorbital Developers
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+"""Check if a satellite is supported on default.
+
+If not the name and its NORAD number needs to be added to a local copy of the
+platforms.txt file, which then needs to be placed in the directory pointed to
+by the environment variable PYORBITAL_CONFIG_PATH.
+
+"""
+
+import argparse
+import logging
+from pyorbital.tlefile import check_is_platform_supported
+from pyorbital.logger import logging_on
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(
+        description='Check if a satellite is supported.')
+    parser.add_argument("-s", "--satellite",
+                        help=("Name of the Satellite - following WMO Oscar naming."),
+                        default=None,
+                        required=True,
+                        type=str)
+
+    args = parser.parse_args()
+    satellite_name = args.satellite
+
+    logging_on(logging.INFO)
+    check_is_platform_supported(satellite_name)


=====================================
pyorbital/etc/platforms.txt
=====================================
@@ -1,5 +1,6 @@
-# The platform numbers are given in a file $PPP_CONFIG_DIR/platforms.txt
-# in the following format.  Copy this file to $PPP_CONFIG_DIR
+# The satellite platform names and international designators are given in a
+# file $PYORBITAL_CONFIG_PATH/platforms.txt in the following format. Copy this
+# file to $PYORBITAL_CONFIG_PATH
 #
 # Mappings between satellite catalogue numbers and corresponding
 # platform names from OSCAR.
@@ -65,6 +66,7 @@ NOAA-17 27453
 NOAA-18 28654
 NOAA-19 33591
 NOAA-20 43013
+NOAA-21 54234
 RadarSat-2 32382
 Sentinel-1A 39634
 Sentinel-3A 41335


=====================================
pyorbital/geoloc_example.py
=====================================
@@ -29,34 +29,42 @@ from pyorbital.geoloc import ScanGeometry, compute_pixels, get_lonlatalt
 from mpl_toolkits.basemap import Basemap
 import matplotlib.pyplot as plt
 
+# Couple of example Two Line Elements
 tle1 = "1 33591U 09005A   12345.45213434  .00000391  00000-0  24004-3 0  6113"
 tle2 = "2 33591 098.8821 283.2036 0013384 242.4835 117.4960 14.11432063197875"
 
+# Choosing a specific time, this should be relatively close to the issue date of the TLE
 t = datetime(2012, 12, 12, 4, 16, 1, 575000)
-
-scanline_nb = 351
-
+# this is the number of full scan rotations
+scans_nb = 10
 # we take only every 40th point for plotting clarity
 scan_points = np.arange(24, 2048, 40)
-
+# This the maximum scan angle away from nadir for the given TLE that still sees earth.
+scan_angle = 55.37
+# period of one full rotation 1/6 s
+scan_p = 0.16666667
+# integration time of instrument
+int_t = 0.000025
 
 # build the avhrr instrument (scan angles)
-avhrr = np.vstack(((scan_points - 1023.5) / 1024 * np.deg2rad(-55.37),
-                   np.zeros((len(scan_points),)))).transpose()
-avhrr = np.tile(avhrr, [scanline_nb, 1])
+# creates list of radian angles centered around nadir based on the scan points that see earth
+avhrr = np.vstack(((scan_points / 1023.5-1) * np.deg2rad(-scan_angle),
+                   np.zeros((len(scan_points),))))
+avhrr = np.tile(
+        avhrr[:, np.newaxis, :], [1, scans_nb, 1])
 
 # building the corresponding times array
-offset = np.arange(scanline_nb) * 0.1666667
-times = (np.tile(scan_points * 0.000025 + 0.0025415, [scanline_nb, 1])
-         + np.expand_dims(offset, 1))
+times = np.tile(scan_points * int_t, [scans_nb, 1])
+offset = np.arange(scans_nb) * scan_p
+times += np.expand_dims(offset, 1)
 
 # build the scan geometry object
-sgeom = ScanGeometry(avhrr, times.ravel())
+sgeom = ScanGeometry(avhrr, times)
 
-# roll, pitch, yaw in radians
+# roll, pitch, yaw in radians. This is a static offset.
 rpy = (0, 0, 0)
 
-# print the lonlats for the pixel positions
+# print the longitude and latitude for the pixel positions
 s_times = sgeom.times(t)
 pixels_pos = compute_pixels((tle1, tle2), sgeom, s_times, rpy)
 pos_time = get_lonlatalt(pixels_pos, s_times)


=====================================
pyorbital/geoloc_instrument_definitions.py
=====================================
@@ -156,7 +156,7 @@ def viirs(scans_nb, scan_indices=slice(0, None),
     """
 
     entire_width = np.arange(chn_pixels)
-    scan_points = entire_width[scan_indices.astype('int')]
+    scan_points = entire_width[scan_indices].astype('int')
     scan_pixels = len(scan_points)
 
     # Initial angle 55.84 deg replaced with 56.28 deg found in


=====================================
pyorbital/logger.py
=====================================
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2023 Pyorbital developers
+
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+"""Functionality to support standard logging."""
+
+import logging
+
+
+def debug_on():
+    """Turn debugging logging on."""
+    logging_on(logging.DEBUG)
+
+
+_is_logging_on = False
+
+
+def logging_on(level=logging.WARNING):
+    """Turn logging on."""
+    global _is_logging_on
+
+    if not _is_logging_on:
+        console = logging.StreamHandler()
+        console.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :"
+                                               " %(name)s] %(message)s",
+                                               '%Y-%m-%d %H:%M:%S'))
+        console.setLevel(level)
+        logging.getLogger('').addHandler(console)
+        _is_logging_on = True
+
+    log = logging.getLogger('')
+    log.setLevel(level)
+    for h in log.handlers:
+        h.setLevel(level)
+
+
+class NullHandler(logging.Handler):
+    """Empty handler."""
+
+    def emit(self, record):
+        """Record a message."""
+
+
+def logging_off():
+    """Turn logging off."""
+    logging.getLogger('').handlers = [NullHandler()]
+
+
+def get_logger(name):
+    """Return logger with null handle."""
+    log = logging.getLogger(name)
+    if not log.handlers:
+        log.addHandler(NullHandler())
+    return log


=====================================
pyorbital/orbital.py
=====================================
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Copyright (c) 2011, 2012, 2013, 2014, 2015.
+# Copyright (c) 2011-2023 Pyorbital developers
 
 # Author(s):
 
@@ -631,11 +631,11 @@ class _SGDP4(object):
         self.xn_0 = orbit_elements.mean_motion
         # A30 = -XJ3 * AE**3
 
-        if not(0 < self.eo < ECC_LIMIT_HIGH):
+        if not (0 < self.eo < ECC_LIMIT_HIGH):
             raise OrbitalError('Eccentricity out of range: %e' % self.eo)
-        elif not((0.0035 * 2 * np.pi / XMNPDA) < self.xn_0 < (18 * 2 * np.pi / XMNPDA)):
+        elif not ((0.0035 * 2 * np.pi / XMNPDA) < self.xn_0 < (18 * 2 * np.pi / XMNPDA)):
             raise OrbitalError('Mean motion out of range: %e' % self.xn_0)
-        elif not(0 < self.xincl < np.pi):
+        elif not (0 < self.xincl < np.pi):
             raise OrbitalError('Inclination out of range: %e' % self.xincl)
 
         if self.eo < 0:


=====================================
pyorbital/tests/test_aiaa.py
=====================================
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Copyright (c) 2011 - 2021 Pytroll Community
+# Copyright (c) 2011 - 2023 Pytroll Community
 
 # Author(s):
 
@@ -55,10 +55,10 @@ def get_results(satnumber, delay):
     path = os.path.dirname(os.path.abspath(__file__))
     with open(os.path.join(path, "aiaa_results")) as f_2:
         line = f_2.readline()
-        while(line):
+        while line:
             if line.endswith(" xx\n") and int(line[:-3]) == satnumber:
                 line = f_2.readline()
-                while(not line.startswith("%.8f" % delay)):
+                while (not line.startswith("%.8f" % delay)):
                     line = f_2.readline()
                 sline = line.split()
                 if delay == 0:
@@ -94,7 +94,7 @@ class AIAAIntegrationTest(unittest.TestCase):
         path = os.path.dirname(os.path.abspath(__file__))
         with open(os.path.join(path, "SGP4-VER.TLE")) as f__:
             test_line = f__.readline()
-            while(test_line):
+            while test_line:
                 if test_line.startswith("#"):
                     test_name = test_line
                 if test_line.startswith("1 "):
@@ -147,13 +147,3 @@ class AIAAIntegrationTest(unittest.TestCase):
                             self.assertTrue(abs(dt) < delta_time)
 
                 test_line = f__.readline()
-
-
-def suite():
-    """The suite for test_aiaa
-    """
-    loader = unittest.TestLoader()
-    mysuite = unittest.TestSuite()
-    mysuite.addTest(loader.loadTestsFromTestCase(AIAAIntegrationTest))
-
-    return mysuite


=====================================
pyorbital/tests/test_astronomy.py
=====================================
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Copyright (c) 2013, 2014 Martin Raspaud
+# Copyright (c) 2013, 2014, 2022 Pytroll Community
 
 # Author(s):
 
@@ -56,12 +56,3 @@ class TestAstronomy(unittest.TestCase):
         corr = astr.sun_earth_distance_correction(utc_time)
         corr_exp = 1.0156952156742332
         self.assertAlmostEqual(corr, corr_exp, places=8)
-
-
-def suite():
-    """The suite for test_astronomy."""
-    loader = unittest.TestLoader()
-    mysuite = unittest.TestSuite()
-    mysuite.addTest(loader.loadTestsFromTestCase(TestAstronomy))
-
-    return mysuite


=====================================
pyorbital/tests/test_geoloc.py
=====================================
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Copyright (c) 2014, 2017, 2018, 2021 Martin Raspaud
+# Copyright (c) 2014-2023 Pytroll Community
 
 # Author(s):
 
@@ -19,19 +19,17 @@
 
 # You should have received a copy of the GNU General Public License
 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
-"""Test the geoloc module.
-"""
 
-import unittest
-from datetime import datetime
+"""Test the geoloc module."""
 
+from datetime import datetime
 import numpy as np
 
 from pyorbital.geoloc import ScanGeometry, geodetic_lat, qrotate, subpoint
 from pyorbital.geoloc_instrument_definitions import avhrr, viirs, amsua, mhs, hirs4, atms, ascat
 
 
-class TestQuaternion(unittest.TestCase):
+class TestQuaternion:
     """Test the quaternion rotation."""
 
     def test_qrotate(self):
@@ -39,35 +37,39 @@ class TestQuaternion(unittest.TestCase):
         vector = np.array([[1, 0, 0]]).T
         axis = np.array([[0, 1, 0]]).T
         angle = np.deg2rad(90)
-        self.assertTrue(np.allclose(qrotate(vector, axis, angle),
-                                    np.array([[0, 0, 1]]).T))
+
+        result = qrotate(vector, axis, angle)[:, 0]
+        expected = np.array([0, 0, 1])
+        np.testing.assert_allclose(result, expected, rtol=1e-8, atol=1e-8)
 
         axis = np.array([0, 1, 0])
-        self.assertTrue(np.allclose(qrotate(vector, axis, angle),
-                                    np.array([[0, 0, 1]]).T))
+        result = qrotate(vector, axis, angle)
+        expected = np.array([[0, 0, 1]]).T
+        np.testing.assert_allclose(result, expected, rtol=1e-8, atol=1e-8)
 
         vector = np.array([[1, 0, 0],
                            [0, 0, 1]]).T
         axis = np.array([0, 1, 0])
         angle = np.deg2rad(90)
-        self.assertTrue(np.allclose(qrotate(vector, axis, angle),
-                                    np.array([[0, 0, 1],
-                                              [-1, 0, 0]]).T))
+        result = qrotate(vector, axis, angle)
+        expected = np.array([[0, 0, 1],
+                             [-1, 0, 0]]).T
+
+        np.testing.assert_allclose(result, expected, rtol=1e-8, atol=1e-8)
 
         axis = np.array([[0, 1, 0]]).T
-        self.assertTrue(np.allclose(qrotate(vector, axis, angle),
-                                    np.array([[0, 0, 1],
-                                              [-1, 0, 0]]).T))
+        result = qrotate(vector, axis, angle)
+        expected = np.array([[0, 0, 1],
+                             [-1, 0, 0]]).T
 
+        np.testing.assert_allclose(result, expected, rtol=1e-8, atol=1e-8)
 
-class TestGeoloc(unittest.TestCase):
 
-    """Test for the core computing part.
-    """
+class TestGeoloc:
+    """Test for the core computing part."""
 
     def test_scan_geometry(self):
-        """Test the ScanGeometry object.
-        """
+        """Test the ScanGeometry object."""
         scans_nb = 1
 
         xy = np.vstack((np.deg2rad(np.array([10, 0, -10])),
@@ -78,8 +80,7 @@ class TestGeoloc(unittest.TestCase):
 
         instrument = ScanGeometry(xy, times)
 
-        self.assertTrue(np.allclose(np.rad2deg(instrument.fovs[0]),
-                                    np.array([[10, 0, -10]])))
+        np.testing.assert_allclose(np.rad2deg(instrument.fovs[0]), np.array([[10, 0, -10]]))
 
         # Test vectors
 
@@ -90,38 +91,39 @@ class TestGeoloc(unittest.TestCase):
 
         vec = instrument.vectors(pos, vel)
 
-        self.assertTrue(np.allclose(np.array([[0, 0, -1]]),
-                                    vec[:, 0, 1]))
+        result = vec[:, 0, 1]
+        expected = np.array([0.0, 0.0, -1.0])
+        np.testing.assert_allclose(result, expected, rtol=1e-8, atol=1e-8)
 
         # minus sin because we use trigonometrical direction of angles
+        result = vec[:, 0, 0]
+        expected = np.array([0, -np.sin(np.deg2rad(10)), -np.cos(np.deg2rad(10))])
+        np.testing.assert_allclose(result, expected, rtol=1e-7, atol=1e-7)
 
-        self.assertTrue(np.allclose(np.array([[0,
-                                               -np.sin(np.deg2rad(10)),
-                                               -np.cos(np.deg2rad(10))]]),
-                                    vec[:, 0, 0]))
-        self.assertTrue(np.allclose(np.array([[0,
-                                               -np.sin(np.deg2rad(-10)),
-                                               -np.cos(np.deg2rad(-10))]]),
-                                    vec[:, 0, 2]))
+        result = vec[:, 0, 2]
+        expected = np.array([0, -np.sin(np.deg2rad(-10)), -np.cos(np.deg2rad(-10))])
+        np.testing.assert_allclose(result, expected, rtol=1e-7, atol=1e-7)
 
         # Test times
 
         start_of_scan = np.datetime64(datetime(2014, 1, 8, 11, 30))
         times = instrument.times(start_of_scan)
 
-        self.assertEqual(times[0, 1], start_of_scan)
-        self.assertEqual(times[0, 0], start_of_scan -
-                         np.timedelta64(100, 'ms'))
-        self.assertEqual(times[0, 2], start_of_scan +
-                         np.timedelta64(100, 'ms'))
+        assert times[0, 1] == start_of_scan
+        assert times[0, 0] == start_of_scan - np.timedelta64(100, 'ms')
+        assert times[0, 2] == start_of_scan + np.timedelta64(100, 'ms')
 
     def test_geodetic_lat(self):
         """Test the determination of the geodetic latitude."""
-        point = np.array([7000, 0, 7000])
-        self.assertEqual(geodetic_lat(point), 0.78755832699854733)
+        point = np.array([[7000, 0, 7000]]).T
+        np.testing.assert_allclose(geodetic_lat(point),
+                                   np.array([0.78755832699854733]), rtol=1e-8, atol=1e-8)
+
         points = np.array([[7000, 0, 7000],
                            [7000, 0, 7000]]).T
-        self.assertTrue(np.allclose(geodetic_lat(points), np.array([0.78755832699854733, 0.78755832699854733])))
+        result = geodetic_lat(points)
+        expected = np.array([0.78755832699854733, 0.78755832699854733])
+        np.testing.assert_allclose(result, expected, rtol=1e-8, atol=1e-8)
 
     def test_subpoint(self):
         """Test nadir determination."""
@@ -129,71 +131,77 @@ class TestGeoloc(unittest.TestCase):
         b = 6356.75231414  # km, GRS80
         point = np.array([0, 0, 7000])
         nadir = subpoint(point, a, b)
-        self.assertTrue(np.allclose(nadir, np.array([[0, 0, b]])))
+        np.testing.assert_allclose(nadir, np.array([0, 0, b]), rtol=1e-7, atol=1e-7)
 
         point = np.array([7000, 0, 7000])
         nadir = subpoint(point, a, b)
-        self.assertTrue(np.allclose(nadir,
-                                    np.array([[4507.85431429,
-                                               0,
-                                               4497.06396339]])))
+        np.testing.assert_allclose(nadir,
+                                   np.array([4507.85431429,
+                                             0,
+                                             4497.06396339]), rtol=1e-8, atol=1e-8)
         points = np.array([[7000, 0, 7000],
                            [7000, 0, 7000]]).T
         nadir = subpoint(points, a, b)
-        self.assertTrue(np.allclose(nadir[:, 0],
-                                    np.array([[4507.85431429,
-                                               0,
-                                               4497.06396339]])))
-        self.assertTrue(np.allclose(nadir[:, 1],
-                                    np.array([[4507.85431429,
-                                               0,
-                                               4497.06396339]])))
+        np.testing.assert_allclose(nadir[:, 0],
+                                   np.array([4507.85431429,
+                                             0,
+                                             4497.06396339]), rtol=1e-8, atol=1e-8)
+        np.testing.assert_allclose(nadir[:, 1],
+                                   np.array([4507.85431429,
+                                             0,
+                                             4497.06396339]), rtol=1e-8, atol=1e-8)
 
 
-class TestGeolocDefs(unittest.TestCase):
-
-    """Test the instrument definitions.
-    """
+class TestGeolocDefs:
+    """Test the instrument definitions."""
 
     def test_avhrr(self):
-        """Test the definition of the avhrr instrument
-        """
+        """Test the definition of the avhrr instrument."""
         avh = avhrr(1, np.array([0, 1023.5, 2047]))
-        self.assertTrue(np.allclose(np.rad2deg(avh.fovs[0]),
-                                    np.array([55.37, 0, -55.37])))
+        result = np.rad2deg(avh.fovs[0])
+        expected = np.array([[55.37, 0, -55.37]])
+        np.testing.assert_allclose(result, expected, rtol=1e-7, atol=1e-7)
 
         avh = avhrr(1, np.array([0, 1023.5, 2047]), 10)
-        self.assertTrue(np.allclose(np.rad2deg(avh.fovs[0]),
-                                    np.array([10, 0, -10])))
+        np.testing.assert_allclose(np.rad2deg(avh.fovs[0]),
+                                   np.array([[10, 0, -10]]))
 
         # This is perhaps a bit odd, to require avhrr to accept floats for
         # the number of scans? FIXME!
         avh = avhrr(1.1, np.array([0, 1023.5, 2047]), 10)
-        self.assertTrue(np.allclose(np.rad2deg(avh.fovs[0]),
-                                    np.array([10, 0, -10])))
+        np.testing.assert_allclose(np.rad2deg(avh.fovs[0]),
+                                   np.array([[10, 0, -10]]))
 
     def test_viirs(self):
-        """Test the definition of the viirs instrument
-        """
+        """Test the definition of the viirs instrument."""
         geom = viirs(1, np.array([0, 3200, 6399]))
         expected_fovs = np.array([
             np.tile(np.array([[0.98, -0., -0.98]]), [32, 1]),
             np.tile(np.array([[0., -0., 0]]), [32, 1])], dtype=np.float64)
 
-        self.assertTrue(np.allclose(geom.fovs,
-                                    expected_fovs, rtol=1e-2, atol=1e-2))
+        np.testing.assert_allclose(geom.fovs,
+                                   expected_fovs, rtol=1e-2, atol=1e-2)
 
         geom = viirs(2, np.array([0, 3200, 6399]))
         expected_fovs = np.array([
             np.tile(np.array([[0.98, -0., -0.98]]), [32*2, 1]),
             np.tile(np.array([[0., -0., 0]]), [32*2, 1])], dtype=np.float64)
 
-        self.assertTrue(np.allclose(geom.fovs,
-                                    expected_fovs, rtol=1e-2, atol=1e-2))
+        np.testing.assert_allclose(geom.fovs,
+                                   expected_fovs, rtol=1e-2, atol=1e-2)
+
+    def test_viirs_defaults(self):
+        """Test the definition of the viirs instrument with default slicing."""
+        geom = viirs(1, chn_pixels=3)
+        expected_fovs = np.array([
+            np.tile(np.array([[0.98, -0., -0.98]]), [32, 1]),
+            np.tile(np.array([[0., -0., 0]]), [32, 1])], dtype=np.float64)
+
+        np.testing.assert_allclose(geom.fovs,
+                                   expected_fovs, rtol=1e-2, atol=1e-2)
 
     def test_amsua(self):
-        """Test the definition of the amsua instrument
-        """
+        """Test the definition of the amsua instrument."""
         geom = amsua(1)
         expected_fovs = np.array([
             [[0.84,  0.78,  0.73,  0.67,  0.61,  0.55,  0.49,  0.44,  0.38,
@@ -201,11 +209,10 @@ class TestGeolocDefs(unittest.TestCase):
               -0.2, -0.26, -0.32, -0.38, -0.44, -0.49, -0.55, -0.61, -0.67,
               -0.73, -0.78, -0.84]],
             np.zeros((1, 30))], dtype=np.float64)
-        self.assertTrue(np.allclose(geom.fovs, expected_fovs, rtol=1e-2, atol=1e-2))
+        np.testing.assert_allclose(geom.fovs, expected_fovs, rtol=1e-2, atol=1e-2)
 
     def test_mhs(self):
-        """Test the definition of the mhs instrument
-        """
+        """Test the definition of the mhs instrument."""
         geom = mhs(1)
         expected_fovs = np.array([
             [[0.86,  0.84,  0.82,  0.8,  0.79,  0.77,  0.75,  0.73,  0.71,
@@ -219,12 +226,11 @@ class TestGeolocDefs(unittest.TestCase):
               -0.53, -0.55, -0.57, -0.59, -0.61, -0.63, -0.65, -0.67, -0.69,
               -0.71, -0.73, -0.75, -0.77, -0.79, -0.8, -0.82, -0.84, -0.86]],
             np.zeros((1, 90))], dtype=np.float64)
-        self.assertTrue(np.allclose(geom.fovs,
-                                    expected_fovs, rtol=1e-2, atol=1e-2))
+        np.testing.assert_allclose(geom.fovs,
+                                   expected_fovs, rtol=1e-2, atol=1e-2)
 
     def test_hirs4(self):
-        """Test the definition of the hirs4 instrument
-        """
+        """Test the definition of the hirs4 instrument."""
         geom = hirs4(1)
         expected_fovs = np.array([
             [[0.86,  0.83,  0.8,  0.77,  0.74,  0.71,  0.68,  0.64,  0.61,
@@ -235,12 +241,11 @@ class TestGeolocDefs(unittest.TestCase):
               -0.55, -0.58, -0.61, -0.64, -0.68, -0.71, -0.74, -0.77, -0.8,
               -0.83, -0.86]],
             np.zeros((1, 56))], dtype=np.float64)
-        self.assertTrue(np.allclose(geom.fovs,
-                                    expected_fovs, rtol=1e-2, atol=1e-2))
+        np.testing.assert_allclose(geom.fovs,
+                                   expected_fovs, rtol=1e-2, atol=1e-2)
 
     def test_atms(self):
-        """Test the definition of the atms instrument
-        """
+        """Test the definition of the atms instrument."""
         geom = atms(1)
         expected_fovs = np.array([
             [[0.92,  0.9,  0.88,  0.86,  0.84,  0.82,  0.8,  0.78,  0.76,
@@ -255,12 +260,11 @@ class TestGeolocDefs(unittest.TestCase):
               -0.65, -0.67, -0.69, -0.71, -0.73, -0.75, -0.76, -0.78, -0.8,
               -0.82, -0.84, -0.86, -0.88, -0.9, -0.92]],
             np.zeros((1, 96))], dtype=np.float64)
-        self.assertTrue(np.allclose(geom.fovs,
-                                    expected_fovs, rtol=1e-2, atol=1e-2))
+        np.testing.assert_allclose(geom.fovs,
+                                   expected_fovs, rtol=1e-2, atol=1e-2)
 
     def test_ascat(self):
-        """Test the definition of the ASCAT instrument onboard Metop"""
-
+        """Test the definition of the ASCAT instrument onboard Metop."""
         geom = ascat(1)
         expected_fovs = np.array([
             [[0.9250245,  0.90058989,  0.87615528,  0.85172067,
@@ -275,26 +279,14 @@ class TestGeolocDefs(unittest.TestCase):
               -0.80285146, -0.82728607, -0.85172067, -0.87615528,
               -0.90058989, -0.9250245]], np.zeros((1, 42))], dtype=np.float64)
 
-        self.assertTrue(np.allclose(
-            geom.fovs, expected_fovs, rtol=1e-2, atol=1e-2))
+        np.testing.assert_allclose(
+            geom.fovs, expected_fovs, rtol=1e-2, atol=1e-2)
         geom = ascat(1, np.array([0, 41]))
         expected_fovs = np.array([[[0.9250245,  -0.9250245]],
                                   [[0.,  0.]]], dtype=np.float64)
-        self.assertTrue(np.allclose(
-            geom.fovs, expected_fovs, rtol=1e-2, atol=1e-2))
+        np.testing.assert_allclose(
+            geom.fovs, expected_fovs, rtol=1e-2, atol=1e-2)
 
         geom = ascat(1, np.array([0, -1]))
-        self.assertTrue(np.allclose(
-            geom.fovs, expected_fovs, rtol=1e-2, atol=1e-2))
-
-
-def suite():
-    """The suite for test_geoloc
-    """
-    loader = unittest.TestLoader()
-    mysuite = unittest.TestSuite()
-    mysuite.addTest(loader.loadTestsFromTestCase(TestQuaternion))
-    mysuite.addTest(loader.loadTestsFromTestCase(TestGeoloc))
-    mysuite.addTest(loader.loadTestsFromTestCase(TestGeolocDefs))
-
-    return mysuite
+        np.testing.assert_allclose(
+            geom.fovs, expected_fovs, rtol=1e-2, atol=1e-2)


=====================================
pyorbital/tests/test_orbital.py
=====================================
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Copyright (c) 2012-2014 Martin Raspaud
+# Copyright (c) 2012-2014, 2022 Pytroll Community
 
 # Author(s):
 
@@ -410,16 +410,3 @@ class TestRegressions(unittest.TestCase):
                       line2="2 37849  98.7092 229.3263 0000715  98.5313 290.6262 14.19554485413345")
         orb.get_next_passes(parser.parse("2019-10-21 16:00:00"), 12, 123.29736, -13.93763, 0)
         warnings.filterwarnings('default')
-
-
-def suite():
-    """The suite for test_orbital
-    """
-    loader = unittest.TestLoader()
-    mysuite = unittest.TestSuite()
-    mysuite.addTest(loader.loadTestsFromTestCase(Test))
-    mysuite.addTest(loader.loadTestsFromTestCase(TestGetObserverLook))
-    mysuite.addTest(loader.loadTestsFromTestCase(TestGetObserverLookNadir))
-    mysuite.addTest(loader.loadTestsFromTestCase(TestRegressions))
-
-    return mysuite


=====================================
pyorbital/tests/test_tlefile.py
=====================================
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 #
-# Copyright (c) 2014 Martin Raspaud
+# Copyright (c) 2014-2023 Pytroll Community
 #
 # Author(s):
 #
@@ -25,11 +25,22 @@
 
 
 from pyorbital.tlefile import Tle
+from pyorbital.tlefile import (_get_config_path,
+                               read_platform_numbers,
+                               _get_local_tle_path_from_env,
+                               _get_uris_and_open_func,
+                               check_is_platform_supported,
+                               PKG_CONFIG_DIR)
+
+import logging
 import datetime
 import unittest
+from unittest.mock import patch
 from unittest import mock
+import pytest
 import os
 from contextlib import suppress
+import time
 
 line0 = "ISS (ZARYA)"
 line1 = "1 25544U 98067A   08264.51782528 -.00002182  00000-0 -11606-4 0  2927"
@@ -44,6 +55,7 @@ NOAA19_2LINES = """1 33591U 09005A   21355.91138073  .00000074  00000+0  65091-4
 """
 NOAA19_3LINES = "NOAA 19\n" + NOAA19_2LINES
 
+
 tle_xml = '\n'.join(
     ('<?xml version="1.0" encoding="UTF-8"?>',
         '<multi-mission-administrative-message>',
@@ -66,6 +78,215 @@ tle_xml = '\n'.join(
         '</multi-mission-administrative-message>'))
 
 
+ at pytest.fixture
+def fake_platforms_file(tmp_path):
+    """Return file path to a fake platforms.txt file."""
+    file_path = tmp_path / 'platforms.txt'
+    lines = ['# Some header lines - line 1\n',
+             '# Some header lines - line 2\n',
+             'NOAA-21 54234\n',
+             'NOAA-20 43013\n',
+             'UNKNOWN SATELLITE 99999\n'
+             ]
+    with open(file_path, 'w') as fpt:
+        fpt.writelines(lines)
+
+    yield file_path
+
+
+ at pytest.fixture(scope="session")
+def fake_local_tles_dir(tmp_path_factory):
+    """Make a list of fake tle files in a directory."""
+    tle_dir = tmp_path_factory.mktemp('tle_files')
+    file_path = tle_dir / 'tle-202211180230.txt'
+    file_path.touch()
+    time.sleep(1)
+    file_path = tle_dir / 'tle-202211180430.txt'
+    file_path.touch()
+    time.sleep(1)
+    file_path = tle_dir / 'tle-202211180630.txt'
+    file_path.touch()
+    time.sleep(1)
+    file_path = tle_dir / 'tle-202211180830.txt'
+    file_path.touch()
+
+    yield tle_dir
+
+
+ at pytest.fixture
+def mock_env_ppp_config_dir(monkeypatch):
+    """Mock environment variable PPP_CONFIG_DIR."""
+    monkeypatch.setenv('PPP_CONFIG_DIR', '/path/to/old/mpop/config/dir')
+
+
+ at pytest.fixture
+def mock_env_ppp_config_dir_missing(monkeypatch):
+    """Mock that the environment variable PPP_CONFIG_DIR is missing."""
+    monkeypatch.delenv('PPP_CONFIG_DIR', raising=False)
+
+
+ at pytest.fixture
+def mock_env_tles_missing(monkeypatch):
+    """Mock that the environment variable TLES is missing."""
+    monkeypatch.delenv('TLES', raising=False)
+
+
+ at pytest.fixture
+def mock_env_tles(monkeypatch):
+    """Mock environment variable TLES."""
+    monkeypatch.setenv('TLES', '/path/to/local/tles')
+
+
+def test_get_config_path_no_env_defined(caplog, mock_env_ppp_config_dir_missing):
+    """Test getting the config path."""
+    with caplog.at_level(logging.WARNING):
+        res = _get_config_path()
+
+    assert res == PKG_CONFIG_DIR
+    assert caplog.text == ''
+
+
+def test_check_is_platform_supported_existing(caplog, mock_env_ppp_config_dir_missing):
+    """Test the function to check if an existing platform is supported on default."""
+    with caplog.at_level(logging.INFO):
+        check_is_platform_supported('NOAA-21')
+
+    logoutput_lines = caplog.text.split('\n')
+
+    expected1 = "Satellite NOAA-21 is supported. NORAD number: 54234"
+    expected2 = "Satellite names and NORAD numbers are defined in {path}".format(path=PKG_CONFIG_DIR)
+
+    assert expected1 in logoutput_lines[0]
+    assert expected2 in logoutput_lines[1]
+
+
+def test_check_is_platform_supported_unknown(caplog, mock_env_ppp_config_dir_missing):
+    """Test the function to check if an unknown  platform is supported on default."""
+    sat = 'UNKNOWN'
+    with caplog.at_level(logging.INFO):
+        check_is_platform_supported(sat)
+
+    logoutput_lines = caplog.text.split('\n')
+
+    expected1 = "Satellite {satellite} is NOT supported.".format(satellite=sat)
+    expected2 = ("Please add it to a local copy of the platforms.txt file and put in " +
+                 "the directory pointed to by the environment variable PYORBITAL_CONFIG_PATH")
+    expected3 = "Satellite names and NORAD numbers are defined in {path}".format(path=PKG_CONFIG_DIR)
+
+    assert expected1 in logoutput_lines[0]
+    assert expected2 in logoutput_lines[1]
+    assert expected3 in logoutput_lines[2]
+
+
+ at patch(
+    'pyorbital.version.get_versions',
+    return_value=dict([('version', '1.9.1+1.some-futur.dirty'),
+                       ('full-revisionid', 'some-future-git-version-hash'),
+                       ('dirty', True),
+                       ('error', None),
+                       ('date', '2023-01-20T09:37:30+0100')
+                       ])
+)
+def test_get_config_path_ppp_config_set_but_not_pyorbital_future(mock, caplog, monkeypatch):
+    """Test getting the config path."""
+    monkeypatch.setenv('SATPY_CONFIG_PATH', '/path/to/satpy/etc')
+    monkeypatch.setenv('PPP_CONFIG_DIR', '/path/to/old/mpop/config/dir')
+
+    with caplog.at_level(logging.WARNING):
+        res = _get_config_path()
+
+    log_output = ("The use of PPP_CONFIG_DIR is no longer supported! " +
+                  "Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!")
+    assert log_output in caplog.text
+    assert res == PKG_CONFIG_DIR
+
+
+def test_get_config_path_ppp_config_set_but_not_pyorbital_is_deprecated(caplog, monkeypatch):
+    """Test getting the config path.
+
+    Here the case is tested when the new Pyorbital environment variable is not
+    set but the deprecated (old) Satpy/MPOP one is set.
+
+    """
+    monkeypatch.setenv('SATPY_CONFIG_PATH', '/path/to/satpy/etc')
+    monkeypatch.setenv('PPP_CONFIG_DIR', '/path/to/old/mpop/config/dir')
+
+    with caplog.at_level(logging.WARNING):
+        res = _get_config_path()
+
+    assert res == '/path/to/old/mpop/config/dir'
+
+    log_output = ('The use of PPP_CONFIG_DIR is deprecated and will be removed in version 1.9!' +
+                  ' Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!')
+
+    assert log_output in caplog.text
+
+
+def test_get_config_path_ppp_config_set_and_pyorbital(caplog, monkeypatch):
+    """Test getting the config path."""
+    pyorbital_config_dir = '/path/to/pyorbital/config/dir'
+    monkeypatch.setenv('PYORBITAL_CONFIG_PATH', pyorbital_config_dir)
+    monkeypatch.setenv('PPP_CONFIG_DIR', '/path/to/old/mpop/config/dir')
+
+    with caplog.at_level(logging.WARNING):
+        res = _get_config_path()
+
+    assert res == pyorbital_config_dir
+    assert caplog.text == ''
+
+
+def test_get_config_path_pyorbital_ppp_missing(caplog, monkeypatch, mock_env_ppp_config_dir_missing):
+    """Test getting the config path.
+
+    The old mpop PPP_CONFIG_PATH is not set but the PYORBITAL one is.
+    """
+    pyorbital_config_dir = '/path/to/pyorbital/config/dir'
+    monkeypatch.setenv('PYORBITAL_CONFIG_PATH', pyorbital_config_dir)
+
+    with caplog.at_level(logging.DEBUG):
+        res = _get_config_path()
+
+    assert res == pyorbital_config_dir
+    log_output = ("Path to the Pyorbital configuration (where e.g. " +
+                  "platforms.txt is found): {path}".format(path=pyorbital_config_dir))
+    assert log_output in caplog.text
+
+
+def test_read_platform_numbers(fake_platforms_file):
+    """Test reading the platform names and associated catalougue numbers."""
+    res = read_platform_numbers(str(fake_platforms_file))
+    assert res == {'NOAA-21': '54234', 'NOAA-20': '43013', 'UNKNOWN SATELLITE': '99999'}
+
+
+def test_get_local_tle_path_tle_env_missing(mock_env_tles_missing):
+    """Test getting the path to local TLE files - env TLES missing."""
+    res = _get_local_tle_path_from_env()
+    assert res is None
+
+
+def test_get_local_tle_path(mock_env_tles):
+    """Test getting the path to local TLE files."""
+    res = _get_local_tle_path_from_env()
+    assert res == '/path/to/local/tles'
+
+
+def test_get_uris_and_open_func_using_tles_env(caplog, fake_local_tles_dir, monkeypatch):
+    """Test getting the uris and associated open-function for reading tles.
+
+    Test providing no tle file but using the TLES env to find local tle files.
+    """
+    from collections.abc import Sequence
+
+    monkeypatch.setenv('TLES', str(fake_local_tles_dir))
+    with caplog.at_level(logging.DEBUG):
+        uris, _ = _get_uris_and_open_func()
+
+    assert isinstance(uris, Sequence)
+    assert uris[0] == str(fake_local_tles_dir / 'tle-202211180830.txt')
+    log_message = "Reading TLE from {msg}".format(msg=str(fake_local_tles_dir))
+    assert log_message in caplog.text
+
+
 class TLETest(unittest.TestCase):
     """Test TLE reading.
 
@@ -222,7 +443,7 @@ class TestDownloader(unittest.TestCase):
 
     @mock.patch('pyorbital.tlefile.requests')
     def test_fetch_plain_tle_server_is_a_teapot(self, requests):
-        """Test downloading and a TLE file from internet."""
+        """Test downloading a TLE file from internet."""
         requests.get = mock.MagicMock()
         # No data returned because the server is a teapot
         requests.get.return_value = _get_req_response(418)
@@ -241,7 +462,7 @@ class TestDownloader(unittest.TestCase):
 
     @mock.patch('pyorbital.tlefile.requests')
     def test_fetch_spacetrack_login_fails(self, requests):
-        """Test downloading and TLEs from space-track.org."""
+        """Test downloading TLEs from space-track.org."""
         mock_post = mock.MagicMock()
         mock_session = mock.MagicMock()
         mock_session.post = mock_post
@@ -264,7 +485,7 @@ class TestDownloader(unittest.TestCase):
 
     @mock.patch('pyorbital.tlefile.requests')
     def test_fetch_spacetrack_get_fails(self, requests):
-        """Test downloading and TLEs from space-track.org."""
+        """Test downloading TLEs from space-track.org."""
         mock_post = mock.MagicMock()
         mock_get = mock.MagicMock()
         mock_session = mock.MagicMock()
@@ -288,7 +509,7 @@ class TestDownloader(unittest.TestCase):
 
     @mock.patch('pyorbital.tlefile.requests')
     def test_fetch_spacetrack_success(self, requests):
-        """Test downloading and TLEs from space-track.org."""
+        """Test downloading TLEs from space-track.org."""
         mock_post = mock.MagicMock()
         mock_get = mock.MagicMock()
         mock_session = mock.MagicMock()
@@ -516,6 +737,8 @@ class TestSQLiteTLE(unittest.TestCase):
         # Do not write the satellite name
         self.db.writer_config["write_always"] = True
         self.db.writer_config["write_name"] = False
+        # Wait a bit to ensure different filename
+        time.sleep(2)
         self.db.write_tle_txt()
         files = sorted(glob.glob(os.path.join(tle_dir, 'tle_*txt')))
         self.assertEqual(len(files), 2)
@@ -524,14 +747,3 @@ class TestSQLiteTLE(unittest.TestCase):
         self.assertEqual(len(data), 2)
         self.assertEqual(data[0], line1)
         self.assertEqual(data[1], line2)
-
-
-def suite():
-    """Create the test suite for test_tlefile."""
-    loader = unittest.TestLoader()
-    mysuite = unittest.TestSuite()
-    mysuite.addTest(loader.loadTestsFromTestCase(TLETest))
-    mysuite.addTest(loader.loadTestsFromTestCase(TestDownloader))
-    mysuite.addTest(loader.loadTestsFromTestCase(TestSQLiteTLE))
-
-    return mysuite


=====================================
pyorbital/tlefile.py
=====================================
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 #
-# Copyright (c) 2011 - 2018
+# Copyright (c) 2011-2023 Pytroll Community
 #
 # Author(s):
 #
@@ -53,51 +53,98 @@ LOGGER = logging.getLogger(__name__)
 PKG_CONFIG_DIR = os.path.join(os.path.realpath(os.path.dirname(__file__)), 'etc')
 
 
-def read_platform_numbers(in_upper=False, num_as_int=False):
-    """Read platform numbers from $PPP_CONFIG_DIR/platforms.txt."""
-    out_dict = {}
-    os.getenv('PPP_CONFIG_DIR', PKG_CONFIG_DIR)
-    platform_file = None
-    if 'PPP_CONFIG_DIR' in os.environ:
-        platform_file = os.path.join(os.environ['PPP_CONFIG_DIR'], 'platforms.txt')
-    if not platform_file or not os.path.isfile(platform_file):
+def _check_support_limit_ppp_config_dir():
+    """Check the version where PPP_CONFIG_DIR will no longer be supported."""
+    from pyorbital import version
+    return version.get_versions()['version'] >= '1.9'
+
+
+def _get_config_path():
+    """Get the config path for Pyorbital."""
+    if 'PPP_CONFIG_DIR' in os.environ and 'PYORBITAL_CONFIG_PATH' not in os.environ:
+        if _check_support_limit_ppp_config_dir():
+            LOGGER.warning(
+                'The use of PPP_CONFIG_DIR is no longer supported!' +
+                ' Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!')
+            LOGGER.debug('Using the package default for configuration: %s', PKG_CONFIG_DIR)
+            return PKG_CONFIG_DIR
+        else:
+            LOGGER.warning(
+                'The use of PPP_CONFIG_DIR is deprecated and will be removed in version 1.9!' +
+                ' Please use PYORBITAL_CONFIG_PATH if you need a custom config path for pyorbital!')
+            pyorbital_config_path = os.getenv('PPP_CONFIG_DIR', PKG_CONFIG_DIR)
+    else:
+        pyorbital_config_path = os.getenv('PYORBITAL_CONFIG_PATH', PKG_CONFIG_DIR)
+
+    LOGGER.debug("Path to the Pyorbital configuration (where e.g. platforms.txt is found): %s",
+                 str(pyorbital_config_path))
+    return pyorbital_config_path
+
+
+def get_platforms_filepath():
+    """Get the platforms.txt file path.
+
+    Check that the file exists or raise an error.
+    """
+    config_path = _get_config_path()
+    platform_file = os.path.join(config_path, 'platforms.txt')
+    if not os.path.isfile(platform_file):
         platform_file = os.path.join(PKG_CONFIG_DIR, 'platforms.txt')
+        if not os.path.isfile(platform_file):
+            raise OSError("Platform file {filepath} does not exist!".format(filepath=platform_file))
 
-    try:
-        fid = open(platform_file, 'r')
-    except IOError:
-        LOGGER.error("Platform file %s not found.", platform_file)
-        return out_dict
-    for row in fid:
-        # skip comment lines
-        if not row.startswith('#'):
-            parts = row.split()
-            if len(parts) < 2:
-                continue
-            # The satellite name might have whitespace
-            platform = ' '.join(parts[:-1])
-            num = parts[-1]
-            if in_upper:
-                platform = platform.upper()
-            if num_as_int:
-                num = int(num)
-            out_dict[platform] = num
-    fid.close()
+    return platform_file
+
+
+def read_platform_numbers(filename, in_upper=False, num_as_int=False):
+    """Read platform numbers from $PYORBITAL_CONFIG_PATH/platforms.txt."""
+    out_dict = {}
+
+    with open(filename, 'r') as fid:
+        for row in fid:
+            # skip comment lines
+            if not row.startswith('#'):
+                parts = row.split()
+                if len(parts) < 2:
+                    continue
+                # The satellite name might have whitespace
+                platform = ' '.join(parts[:-1])
+                num = parts[-1]
+                if in_upper:
+                    platform = platform.upper()
+                if num_as_int:
+                    num = int(num)
+                out_dict[platform] = num
 
     return out_dict
 
 
-SATELLITES = read_platform_numbers(in_upper=True, num_as_int=False)
+SATELLITES = read_platform_numbers(get_platforms_filepath(),
+                                   in_upper=True, num_as_int=False)
 """
-The platform numbers are given in a file $PPP_CONFIG/platforms.txt
+The platform numbers are given in a file $PYORBITAL_CONFIG_PATH/platforms.txt
 in the following format:
 
-.. literalinclude:: ../../etc/platforms.txt
+.. literalinclude:: ../../pyorbital/etc/platforms.txt
   :language: text
-  :lines: 4-
+  :lines: 5-
 """
 
 
+def check_is_platform_supported(satname):
+    """Check if satellite is supported and print info."""
+    if satname in SATELLITES:
+        LOGGER.info("Satellite {name} is supported. NORAD number: {norad}".format(
+            name=satname, norad=SATELLITES[satname]))
+    else:
+        LOGGER.info("Satellite {name} is NOT supported.".format(name=satname))
+        LOGGER.info("Please add it to a local copy of the platforms.txt file and put in " +
+                    "the directory pointed to by the environment variable PYORBITAL_CONFIG_PATH")
+
+    LOGGER.info("Satellite names and NORAD numbers are defined in {filepath}".format(
+        filepath=get_platforms_filepath()))
+
+
 def _dummy_open_stringio(stream):
     return stream
 
@@ -251,10 +298,18 @@ class Tle(object):
         return s_var.getvalue()[:-1]
 
 
+def _get_local_tle_path_from_env():
+    """Get the path to possible local TLE files using the environment variable."""
+    return os.environ.get('TLES')
+
+
 def _get_uris_and_open_func(tle_file=None):
+    """Get the uri's and the adequate file open call for the TLE files."""
     def _open(filename):
         return io.open(filename, 'rb')
 
+    local_tle_path = _get_local_tle_path_from_env()
+
     if tle_file:
         if isinstance(tle_file, io.StringIO):
             uris = (tle_file,)
@@ -265,11 +320,11 @@ def _get_uris_and_open_func(tle_file=None):
         else:
             uris = (tle_file,)
             open_func = _open
-    elif "TLES" in os.environ:
+    elif local_tle_path:
         # TODO: get the TLE file closest in time to the actual satellite
         # overpass, NOT the latest!
-        uris = (max(glob.glob(os.environ["TLES"]),
-                    key=os.path.getctime), )
+        list_of_tle_files = glob.glob(os.path.join(local_tle_path, '*'))
+        uris = (max(list_of_tle_files, key=os.path.getctime), )
         LOGGER.debug("Reading TLE from %s", uris[0])
         open_func = _open
     else:
@@ -432,6 +487,7 @@ def collect_filenames(paths):
 
 
 def read_tles_from_mmam_xml_files(paths):
+    """Read TLEs from EUMETSAT MMAM XML files."""
     # Collect filenames
     fnames = collect_filenames(paths)
     tles = []
@@ -444,6 +500,7 @@ def read_tles_from_mmam_xml_files(paths):
 
 
 def read_tle_from_mmam_xml_file(fname):
+    """Read TLEs from a EUMETSAT MMAM XML file."""
     tree = ET.parse(fname)
     root = tree.getroot()
     data = []
@@ -455,7 +512,7 @@ def read_tle_from_mmam_xml_file(fname):
 
 
 def _group_iterable_to_chunks(n, iterable, fillvalue=None):
-    "Collect data into fixed-length chunks or blocks"
+    """Collect data into fixed-length chunks or blocks."""
     # _group_iterable_to_chunks(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
     args = [iter(iterable)] * n
     return zip_longest(fillvalue=fillvalue, *args)


=====================================
pyorbital/version.py
=====================================
@@ -5,8 +5,9 @@
 # directories (produced by setup.py build) will contain a much shorter file
 # that just contains the computed version number.
 
-# This file is released into the public domain. Generated by
-# versioneer-0.18 (https://github.com/warner/python-versioneer)
+# This file is released into the public domain.
+# Generated by versioneer-0.28
+# https://github.com/python-versioneer/python-versioneer
 
 """Git implementation of _version.py."""
 
@@ -15,6 +16,8 @@ import os
 import re
 import subprocess
 import sys
+from typing import Callable, Dict
+import functools
 
 
 def get_keywords():
@@ -23,9 +26,9 @@ def get_keywords():
     # setup.py/versioneer.py will grep for the variable names, so they must
     # each be defined on a line of their own. _version.py will just call
     # get_keywords().
-    git_refnames = " (HEAD -> main, tag: v1.7.3)"
-    git_full = "f7038d4b2a94a226fbbccc6680a797a7f62d0680"
-    git_date = "2022-07-11 13:51:48 -0500"
+    git_refnames = " (HEAD -> main, tag: v1.8.0)"
+    git_full = "aa3a2169cb695068fd57cb257a5070736cc50098"
+    git_date = "2023-07-12 08:37:37 -0500"
     keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
     return keywords
 
@@ -52,12 +55,12 @@ class NotThisMethod(Exception):
     """Exception raised if a method is not valid for the current scenario."""
 
 
-LONG_VERSION_PY = {}
-HANDLERS = {}
+LONG_VERSION_PY: Dict[str, str] = {}
+HANDLERS: Dict[str, Dict[str, Callable]] = {}
 
 
 def register_vcs_handler(vcs, method):  # decorator
-    """Decorator to mark a method as the handler for a particular VCS."""
+    """Create decorator to mark a method as the handler of a VCS."""
     def decorate(f):
         """Store f in HANDLERS[vcs][method]."""
         if vcs not in HANDLERS:
@@ -71,17 +74,25 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
                 env=None):
     """Call the given command(s)."""
     assert isinstance(commands, list)
-    p = None
-    for c in commands:
+    process = None
+
+    popen_kwargs = {}
+    if sys.platform == "win32":
+        # This hides the console window if pythonw.exe is used
+        startupinfo = subprocess.STARTUPINFO()
+        startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+        popen_kwargs["startupinfo"] = startupinfo
+
+    for command in commands:
         try:
-            dispcmd = str([c] + args)
+            dispcmd = str([command] + args)
             # remember shell=False, so use git.cmd on windows, not just git
-            p = subprocess.Popen([c] + args, cwd=cwd, env=env,
-                                 stdout=subprocess.PIPE,
-                                 stderr=(subprocess.PIPE if hide_stderr
-                                         else None))
+            process = subprocess.Popen([command] + args, cwd=cwd, env=env,
+                                       stdout=subprocess.PIPE,
+                                       stderr=(subprocess.PIPE if hide_stderr
+                                               else None), **popen_kwargs)
             break
-        except EnvironmentError:
+        except OSError:
             e = sys.exc_info()[1]
             if e.errno == errno.ENOENT:
                 continue
@@ -93,15 +104,13 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
         if verbose:
             print("unable to find command, tried %s" % (commands,))
         return None, None
-    stdout = p.communicate()[0].strip()
-    if sys.version_info[0] >= 3:
-        stdout = stdout.decode()
-    if p.returncode != 0:
+    stdout = process.communicate()[0].strip().decode()
+    if process.returncode != 0:
         if verbose:
             print("unable to run %s (error)" % dispcmd)
             print("stdout was %s" % stdout)
-        return None, p.returncode
-    return stdout, p.returncode
+        return None, process.returncode
+    return stdout, process.returncode
 
 
 def versions_from_parentdir(parentdir_prefix, root, verbose):
@@ -113,15 +122,14 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
     """
     rootdirs = []
 
-    for i in range(3):
+    for _ in range(3):
         dirname = os.path.basename(root)
         if dirname.startswith(parentdir_prefix):
             return {"version": dirname[len(parentdir_prefix):],
                     "full-revisionid": None,
                     "dirty": False, "error": None, "date": None}
-        else:
-            rootdirs.append(root)
-            root = os.path.dirname(root)  # up a level
+        rootdirs.append(root)
+        root = os.path.dirname(root)  # up a level
 
     if verbose:
         print("Tried directories %s but none started with prefix %s" %
@@ -138,22 +146,21 @@ def git_get_keywords(versionfile_abs):
     # _version.py.
     keywords = {}
     try:
-        f = open(versionfile_abs, "r")
-        for line in f.readlines():
-            if line.strip().startswith("git_refnames ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["refnames"] = mo.group(1)
-            if line.strip().startswith("git_full ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["full"] = mo.group(1)
-            if line.strip().startswith("git_date ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["date"] = mo.group(1)
-        f.close()
-    except EnvironmentError:
+        with open(versionfile_abs, "r") as fobj:
+            for line in fobj:
+                if line.strip().startswith("git_refnames ="):
+                    mo = re.search(r'=\s*"(.*)"', line)
+                    if mo:
+                        keywords["refnames"] = mo.group(1)
+                if line.strip().startswith("git_full ="):
+                    mo = re.search(r'=\s*"(.*)"', line)
+                    if mo:
+                        keywords["full"] = mo.group(1)
+                if line.strip().startswith("git_date ="):
+                    mo = re.search(r'=\s*"(.*)"', line)
+                    if mo:
+                        keywords["date"] = mo.group(1)
+    except OSError:
         pass
     return keywords
 
@@ -161,10 +168,14 @@ def git_get_keywords(versionfile_abs):
 @register_vcs_handler("git", "keywords")
 def git_versions_from_keywords(keywords, tag_prefix, verbose):
     """Get version information from git keywords."""
-    if not keywords:
-        raise NotThisMethod("no keywords at all, weird")
+    if "refnames" not in keywords:
+        raise NotThisMethod("Short version file found")
     date = keywords.get("date")
     if date is not None:
+        # Use only the last line.  Previous lines may contain GPG signature
+        # information.
+        date = date.splitlines()[-1]
+
         # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
         # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
         # -like" string, which we must then edit to make compliant), because
@@ -177,11 +188,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
         if verbose:
             print("keywords are unexpanded, not using")
         raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
-    refs = set([r.strip() for r in refnames.strip("()").split(",")])
+    refs = {r.strip() for r in refnames.strip("()").split(",")}
     # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
     # just "foo-1.0". If we see a "tag: " prefix, prefer those.
     TAG = "tag: "
-    tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+    tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
     if not tags:
         # Either we're using git < 1.8.3, or there really are no tags. We use
         # a heuristic: assume all version tags have a digit. The old git %d
@@ -190,7 +201,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
         # between branches and tags. By ignoring refnames without digits, we
         # filter out many common branch names like "release" and
         # "stabilization", as well as "HEAD" and "master".
-        tags = set([r for r in refs if re.search(r'\d', r)])
+        tags = {r for r in refs if re.search(r'\d', r)}
         if verbose:
             print("discarding '%s', no digits" % ",".join(refs - tags))
     if verbose:
@@ -199,6 +210,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
         # sorting will prefer e.g. "2.0" over "2.0rc1"
         if ref.startswith(tag_prefix):
             r = ref[len(tag_prefix):]
+            # Filter out refs that exactly match prefix or that don't start
+            # with a number once the prefix is stripped (mostly a concern
+            # when prefix is '')
+            if not re.match(r'\d', r):
+                continue
             if verbose:
                 print("picking %s" % r)
             return {"version": r,
@@ -214,7 +230,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
 
 
 @register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
     """Get version from 'git describe' in the root of the source tree.
 
     This only gets called if the git-archive 'subst' keywords were *not*
@@ -225,8 +241,15 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
     if sys.platform == "win32":
         GITS = ["git.cmd", "git.exe"]
 
-    out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
-                          hide_stderr=True)
+    # GIT_DIR can interfere with correct operation of Versioneer.
+    # It may be intended to be passed to the Versioneer-versioned project,
+    # but that should not change where we get our version from.
+    env = os.environ.copy()
+    env.pop("GIT_DIR", None)
+    runner = functools.partial(runner, env=env)
+
+    _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
+                   hide_stderr=not verbose)
     if rc != 0:
         if verbose:
             print("Directory %s not under git control" % root)
@@ -234,15 +257,15 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
 
     # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
     # if there isn't one, this yields HEX[-dirty] (no NUM)
-    describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
-                                          "--always", "--long",
-                                          "--match", "%s*" % tag_prefix],
-                                   cwd=root)
+    describe_out, rc = runner(GITS, [
+        "describe", "--tags", "--dirty", "--always", "--long",
+        "--match", f"{tag_prefix}[[:digit:]]*"
+    ], cwd=root)
     # --long was added in git-1.5.5
     if describe_out is None:
         raise NotThisMethod("'git describe' failed")
     describe_out = describe_out.strip()
-    full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+    full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
     if full_out is None:
         raise NotThisMethod("'git rev-parse' failed")
     full_out = full_out.strip()
@@ -252,6 +275,39 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
     pieces["short"] = full_out[:7]  # maybe improved later
     pieces["error"] = None
 
+    branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
+                             cwd=root)
+    # --abbrev-ref was added in git-1.6.3
+    if rc != 0 or branch_name is None:
+        raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
+    branch_name = branch_name.strip()
+
+    if branch_name == "HEAD":
+        # If we aren't exactly on a branch, pick a branch which represents
+        # the current commit. If all else fails, we are on a branchless
+        # commit.
+        branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
+        # --contains was added in git-1.5.4
+        if rc != 0 or branches is None:
+            raise NotThisMethod("'git branch --contains' returned error")
+        branches = branches.split("\n")
+
+        # Remove the first line if we're running detached
+        if "(" in branches[0]:
+            branches.pop(0)
+
+        # Strip off the leading "* " from the list of branches.
+        branches = [branch[2:] for branch in branches]
+        if "master" in branches:
+            branch_name = "master"
+        elif not branches:
+            branch_name = None
+        else:
+            # Pick the first branch that is returned. Good or bad.
+            branch_name = branches[0]
+
+    pieces["branch"] = branch_name
+
     # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
     # TAG might have hyphens.
     git_describe = describe_out
@@ -268,7 +324,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
         # TAG-NUM-gHEX
         mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
         if not mo:
-            # unparseable. Maybe git-describe is misbehaving?
+            # unparsable. Maybe git-describe is misbehaving?
             pieces["error"] = ("unable to parse git-describe output: '%s'"
                                % describe_out)
             return pieces
@@ -293,13 +349,14 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
     else:
         # HEX: no tags
         pieces["closest-tag"] = None
-        count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
-                                    cwd=root)
-        pieces["distance"] = int(count_out)  # total number of commits
+        out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
+        pieces["distance"] = len(out.split())  # total number of commits
 
     # commit date: see ISO-8601 comment in git_versions_from_keywords()
-    date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
-                       cwd=root)[0].strip()
+    date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
+    # Use only the last line.  Previous lines may contain GPG signature
+    # information.
+    date = date.splitlines()[-1]
     pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
 
     return pieces
@@ -337,19 +394,67 @@ def render_pep440(pieces):
     return rendered
 
 
-def render_pep440_pre(pieces):
-    """TAG[.post.devDISTANCE] -- No -dirty.
+def render_pep440_branch(pieces):
+    """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
+
+    The ".dev0" means not master branch. Note that .dev0 sorts backwards
+    (a feature branch will appear "older" than the master branch).
 
     Exceptions:
-    1: no tags. 0.post.devDISTANCE
+    1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
     """
     if pieces["closest-tag"]:
         rendered = pieces["closest-tag"]
+        if pieces["distance"] or pieces["dirty"]:
+            if pieces["branch"] != "master":
+                rendered += ".dev0"
+            rendered += plus_or_dot(pieces)
+            rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
+            if pieces["dirty"]:
+                rendered += ".dirty"
+    else:
+        # exception #1
+        rendered = "0"
+        if pieces["branch"] != "master":
+            rendered += ".dev0"
+        rendered += "+untagged.%d.g%s" % (pieces["distance"],
+                                          pieces["short"])
+        if pieces["dirty"]:
+            rendered += ".dirty"
+    return rendered
+
+
+def pep440_split_post(ver):
+    """Split pep440 version string at the post-release segment.
+
+    Returns the release segments before the post-release and the
+    post-release version number (or -1 if no post-release segment is present).
+    """
+    vc = str.split(ver, ".post")
+    return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
+
+
+def render_pep440_pre(pieces):
+    """TAG[.postN.devDISTANCE] -- No -dirty.
+
+    Exceptions:
+    1: no tags. 0.post0.devDISTANCE
+    """
+    if pieces["closest-tag"]:
         if pieces["distance"]:
-            rendered += ".post.dev%d" % pieces["distance"]
+            # update the post release segment
+            tag_version, post_version = pep440_split_post(pieces["closest-tag"])
+            rendered = tag_version
+            if post_version is not None:
+                rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"])
+            else:
+                rendered += ".post0.dev%d" % (pieces["distance"])
+        else:
+            # no commits, use the tag as the version
+            rendered = pieces["closest-tag"]
     else:
         # exception #1
-        rendered = "0.post.dev%d" % pieces["distance"]
+        rendered = "0.post0.dev%d" % pieces["distance"]
     return rendered
 
 
@@ -380,12 +485,41 @@ def render_pep440_post(pieces):
     return rendered
 
 
+def render_pep440_post_branch(pieces):
+    """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
+
+    The ".dev0" means not master branch.
+
+    Exceptions:
+    1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
+    """
+    if pieces["closest-tag"]:
+        rendered = pieces["closest-tag"]
+        if pieces["distance"] or pieces["dirty"]:
+            rendered += ".post%d" % pieces["distance"]
+            if pieces["branch"] != "master":
+                rendered += ".dev0"
+            rendered += plus_or_dot(pieces)
+            rendered += "g%s" % pieces["short"]
+            if pieces["dirty"]:
+                rendered += ".dirty"
+    else:
+        # exception #1
+        rendered = "0.post%d" % pieces["distance"]
+        if pieces["branch"] != "master":
+            rendered += ".dev0"
+        rendered += "+g%s" % pieces["short"]
+        if pieces["dirty"]:
+            rendered += ".dirty"
+    return rendered
+
+
 def render_pep440_old(pieces):
     """TAG[.postDISTANCE[.dev0]] .
 
     The ".dev0" means dirty.
 
-    Eexceptions:
+    Exceptions:
     1: no tags. 0.postDISTANCE[.dev0]
     """
     if pieces["closest-tag"]:
@@ -456,10 +590,14 @@ def render(pieces, style):
 
     if style == "pep440":
         rendered = render_pep440(pieces)
+    elif style == "pep440-branch":
+        rendered = render_pep440_branch(pieces)
     elif style == "pep440-pre":
         rendered = render_pep440_pre(pieces)
     elif style == "pep440-post":
         rendered = render_pep440_post(pieces)
+    elif style == "pep440-post-branch":
+        rendered = render_pep440_post_branch(pieces)
     elif style == "pep440-old":
         rendered = render_pep440_old(pieces)
     elif style == "git-describe":
@@ -495,7 +633,7 @@ def get_versions():
         # versionfile_source is the relative path from the top of the source
         # tree (where the .git directory might live) to this file. Invert
         # this to find the root from __file__.
-        for i in cfg.versionfile_source.split('/'):
+        for _ in cfg.versionfile_source.split('/'):
             root = os.path.dirname(root)
     except NameError:
         return {"version": "0+unknown", "full-revisionid": None,


=====================================
setup.py
=====================================
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 #
-# Copyright (c) 2011-2021 Pytroll Community
+# Copyright (c) 2011-2023 Pytroll Community
 #
 # Author(s):
 #
@@ -52,11 +52,10 @@ setup(name='pyorbital',
       url="https://github.com/pytroll/pyorbital",
       long_description=long_description,
       long_description_content_type='text/markdown',
-      test_suite='pyorbital.tests.suite',
       packages=find_packages(),
       package_data={'pyorbital': [os.path.join('etc', 'platforms.txt')]},
       scripts=['bin/fetch_tles.py', ],
       install_requires=['numpy>=1.19.0', 'scipy', 'requests'],
-      python_requires='>=3.8',
+      python_requires='>=3.9',
       zip_safe=False,
       )


=====================================
versioneer.py
=====================================
@@ -1,5 +1,5 @@
 
-# Version: 0.18
+# Version: 0.28
 
 """The Versioneer - like a rocketeer, but for versions.
 
@@ -7,18 +7,14 @@ The Versioneer
 ==============
 
 * like a rocketeer, but for versions!
-* https://github.com/warner/python-versioneer
+* https://github.com/python-versioneer/python-versioneer
 * Brian Warner
-* License: Public Domain
-* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy
-* [![Latest Version]
-(https://pypip.in/version/versioneer/badge.svg?style=flat)
-](https://pypi.python.org/pypi/versioneer/)
-* [![Build Status]
-(https://travis-ci.org/warner/python-versioneer.png?branch=master)
-](https://travis-ci.org/warner/python-versioneer)
-
-This is a tool for managing a recorded version number in distutils-based
+* License: Public Domain (Unlicense)
+* Compatible with: Python 3.7, 3.8, 3.9, 3.10 and pypy3
+* [![Latest Version][pypi-image]][pypi-url]
+* [![Build Status][travis-image]][travis-url]
+
+This is a tool for managing a recorded version number in setuptools-based
 python projects. The goal is to remove the tedious and error-prone "update
 the embedded version string" step from your release process. Making a new
 release should be as easy as recording a new tag in your version-control
@@ -27,9 +23,38 @@ system, and maybe making new tarballs.
 
 ## Quick Install
 
-* `pip install versioneer` to somewhere to your $PATH
-* add a `[versioneer]` section to your setup.cfg (see below)
-* run `versioneer install` in your source tree, commit the results
+Versioneer provides two installation modes. The "classic" vendored mode installs
+a copy of versioneer into your repository. The experimental build-time dependency mode
+is intended to allow you to skip this step and simplify the process of upgrading.
+
+### Vendored mode
+
+* `pip install versioneer` to somewhere in your $PATH
+   * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is
+     available, so you can also use `conda install -c conda-forge versioneer`
+* add a `[tool.versioneer]` section to your `pyproject.toml` or a
+  `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md))
+   * Note that you will need to add `tomli; python_version < "3.11"` to your
+     build-time dependencies if you use `pyproject.toml`
+* run `versioneer install --vendor` in your source tree, commit the results
+* verify version information with `python setup.py version`
+
+### Build-time dependency mode
+
+* `pip install versioneer` to somewhere in your $PATH
+   * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is
+     available, so you can also use `conda install -c conda-forge versioneer`
+* add a `[tool.versioneer]` section to your `pyproject.toml` or a
+  `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md))
+* add `versioneer` (with `[toml]` extra, if configuring in `pyproject.toml`)
+  to the `requires` key of the `build-system` table in `pyproject.toml`:
+  ```toml
+  [build-system]
+  requires = ["setuptools", "versioneer[toml]"]
+  build-backend = "setuptools.build_meta"
+  ```
+* run `versioneer install --no-vendor` in your source tree, commit the results
+* verify version information with `python setup.py version`
 
 ## Version Identifiers
 
@@ -61,7 +86,7 @@ version 1.3). Many VCS systems can report a description that captures this,
 for example `git describe --tags --dirty --always` reports things like
 "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
-uncommitted changes.
+uncommitted changes).
 
 The version identifier is used for multiple purposes:
 
@@ -166,7 +191,7 @@ which may help identify what went wrong).
 
 Some situations are known to cause problems for Versioneer. This details the
 most significant ones. More can be found on Github
-[issues page](https://github.com/warner/python-versioneer/issues).
+[issues page](https://github.com/python-versioneer/python-versioneer/issues).
 
 ### Subprojects
 
@@ -180,7 +205,7 @@ two common reasons why `setup.py` might not be in the root:
   `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
   distributions (and upload multiple independently-installable tarballs).
 * Source trees whose main purpose is to contain a C library, but which also
-  provide bindings to Python (and perhaps other langauges) in subdirectories.
+  provide bindings to Python (and perhaps other languages) in subdirectories.
 
 Versioneer will look for `.git` in parent directories, and most operations
 should get the right version string. However `pip` and `setuptools` have bugs
@@ -194,9 +219,9 @@ work too.
 Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
 some later version.
 
-[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking
+[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking
 this issue. The discussion in
-[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the
+[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the
 issue from the Versioneer side in more detail.
 [pip PR#3176](https://github.com/pypa/pip/pull/3176) and
 [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
@@ -224,31 +249,20 @@ regenerated while a different version is checked out. Many setup.py commands
 cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
 a different virtualenv), so this can be surprising.
 
-[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes
+[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes
 this one, but upgrading to a newer version of setuptools should probably
 resolve it.
 
-### Unicode version strings
-
-While Versioneer works (and is continually tested) with both Python 2 and
-Python 3, it is not entirely consistent with bytes-vs-unicode distinctions.
-Newer releases probably generate unicode version strings on py2. It's not
-clear that this is wrong, but it may be surprising for applications when then
-write these strings to a network connection or include them in bytes-oriented
-APIs like cryptographic checksums.
-
-[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates
-this question.
-
 
 ## Updating Versioneer
 
 To upgrade your project to a new release of Versioneer, do the following:
 
 * install the new Versioneer (`pip install -U versioneer` or equivalent)
-* edit `setup.cfg`, if necessary, to include any new configuration settings
-  indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
-* re-run `versioneer install` in your source tree, to replace
+* edit `setup.cfg` and `pyproject.toml`, if necessary,
+  to include any new configuration settings indicated by the release notes.
+  See [UPGRADING](./UPGRADING.md) for details.
+* re-run `versioneer install --[no-]vendor` in your source tree, to replace
   `SRC/_version.py`
 * commit any changed files
 
@@ -265,28 +279,54 @@ installation by editing setup.py . Alternatively, it might go the other
 direction and include code from all supported VCS systems, reducing the
 number of intermediate scripts.
 
+## Similar projects
+
+* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time
+  dependency
+* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of
+  versioneer
+* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools
+  plugin
 
 ## License
 
 To make Versioneer easier to embed, all its code is dedicated to the public
 domain. The `_version.py` that it creates is also in the public domain.
-Specifically, both are released under the Creative Commons "Public Domain
-Dedication" license (CC0-1.0), as described in
-https://creativecommons.org/publicdomain/zero/1.0/ .
+Specifically, both are released under the "Unlicense", as described in
+https://unlicense.org/.
+
+[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg
+[pypi-url]: https://pypi.python.org/pypi/versioneer/
+[travis-image]:
+https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg
+[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer
 
 """
+# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring
+# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements
+# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error
+# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with
+# pylint:disable=attribute-defined-outside-init,too-many-arguments
 
-from __future__ import print_function
-try:
-    import configparser
-except ImportError:
-    import ConfigParser as configparser
+import configparser
 import errno
 import json
 import os
 import re
 import subprocess
 import sys
+from pathlib import Path
+from typing import Callable, Dict
+import functools
+
+have_tomllib = True
+if sys.version_info >= (3, 11):
+    import tomllib
+else:
+    try:
+        import tomli as tomllib
+    except ImportError:
+        have_tomllib = False
 
 
 class VersioneerConfig:
@@ -321,12 +361,12 @@ def get_root():
         # module-import table will cache the first one. So we can't use
         # os.path.dirname(__file__), as that will find whichever
         # versioneer.py was first imported, even in later projects.
-        me = os.path.realpath(os.path.abspath(__file__))
-        me_dir = os.path.normcase(os.path.splitext(me)[0])
+        my_path = os.path.realpath(os.path.abspath(__file__))
+        me_dir = os.path.normcase(os.path.splitext(my_path)[0])
         vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
-        if me_dir != vsr_dir:
+        if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals():
             print("Warning: build in %s is using versioneer.py from %s"
-                  % (os.path.dirname(me), versioneer_py))
+                  % (os.path.dirname(my_path), versioneer_py))
     except NameError:
         pass
     return root
@@ -334,30 +374,39 @@ def get_root():
 
 def get_config_from_root(root):
     """Read the project setup.cfg file to determine Versioneer config."""
-    # This might raise EnvironmentError (if setup.cfg is missing), or
+    # This might raise OSError (if setup.cfg is missing), or
     # configparser.NoSectionError (if it lacks a [versioneer] section), or
     # configparser.NoOptionError (if it lacks "VCS="). See the docstring at
     # the top of versioneer.py for instructions on writing your setup.cfg .
-    setup_cfg = os.path.join(root, "setup.cfg")
-    parser = configparser.SafeConfigParser()
-    with open(setup_cfg, "r") as f:
-        parser.readfp(f)
-    VCS = parser.get("versioneer", "VCS")  # mandatory
-
-    def get(parser, name):
-        if parser.has_option("versioneer", name):
-            return parser.get("versioneer", name)
-        return None
+    root = Path(root)
+    pyproject_toml = root / "pyproject.toml"
+    setup_cfg = root / "setup.cfg"
+    section = None
+    if pyproject_toml.exists() and have_tomllib:
+        try:
+            with open(pyproject_toml, 'rb') as fobj:
+                pp = tomllib.load(fobj)
+            section = pp['tool']['versioneer']
+        except (tomllib.TOMLDecodeError, KeyError):
+            pass
+    if not section:
+        parser = configparser.ConfigParser()
+        with open(setup_cfg) as cfg_file:
+            parser.read_file(cfg_file)
+        parser.get("versioneer", "VCS")  # raise error if missing
+
+        section = parser["versioneer"]
+
     cfg = VersioneerConfig()
-    cfg.VCS = VCS
-    cfg.style = get(parser, "style") or ""
-    cfg.versionfile_source = get(parser, "versionfile_source")
-    cfg.versionfile_build = get(parser, "versionfile_build")
-    cfg.tag_prefix = get(parser, "tag_prefix")
-    if cfg.tag_prefix in ("''", '""'):
+    cfg.VCS = section['VCS']
+    cfg.style = section.get("style", "")
+    cfg.versionfile_source = section.get("versionfile_source")
+    cfg.versionfile_build = section.get("versionfile_build")
+    cfg.tag_prefix = section.get("tag_prefix")
+    if cfg.tag_prefix in ("''", '""', None):
         cfg.tag_prefix = ""
-    cfg.parentdir_prefix = get(parser, "parentdir_prefix")
-    cfg.verbose = get(parser, "verbose")
+    cfg.parentdir_prefix = section.get("parentdir_prefix")
+    cfg.verbose = section.get("verbose")
     return cfg
 
 
@@ -366,17 +415,15 @@ class NotThisMethod(Exception):
 
 
 # these dictionaries contain VCS-specific tools
-LONG_VERSION_PY = {}
-HANDLERS = {}
+LONG_VERSION_PY: Dict[str, str] = {}
+HANDLERS: Dict[str, Dict[str, Callable]] = {}
 
 
 def register_vcs_handler(vcs, method):  # decorator
-    """Decorator to mark a method as the handler for a particular VCS."""
+    """Create decorator to mark a method as the handler of a VCS."""
     def decorate(f):
         """Store f in HANDLERS[vcs][method]."""
-        if vcs not in HANDLERS:
-            HANDLERS[vcs] = {}
-        HANDLERS[vcs][method] = f
+        HANDLERS.setdefault(vcs, {})[method] = f
         return f
     return decorate
 
@@ -385,17 +432,25 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
                 env=None):
     """Call the given command(s)."""
     assert isinstance(commands, list)
-    p = None
-    for c in commands:
+    process = None
+
+    popen_kwargs = {}
+    if sys.platform == "win32":
+        # This hides the console window if pythonw.exe is used
+        startupinfo = subprocess.STARTUPINFO()
+        startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+        popen_kwargs["startupinfo"] = startupinfo
+
+    for command in commands:
         try:
-            dispcmd = str([c] + args)
+            dispcmd = str([command] + args)
             # remember shell=False, so use git.cmd on windows, not just git
-            p = subprocess.Popen([c] + args, cwd=cwd, env=env,
-                                 stdout=subprocess.PIPE,
-                                 stderr=(subprocess.PIPE if hide_stderr
-                                         else None))
+            process = subprocess.Popen([command] + args, cwd=cwd, env=env,
+                                       stdout=subprocess.PIPE,
+                                       stderr=(subprocess.PIPE if hide_stderr
+                                               else None), **popen_kwargs)
             break
-        except EnvironmentError:
+        except OSError:
             e = sys.exc_info()[1]
             if e.errno == errno.ENOENT:
                 continue
@@ -407,26 +462,25 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
         if verbose:
             print("unable to find command, tried %s" % (commands,))
         return None, None
-    stdout = p.communicate()[0].strip()
-    if sys.version_info[0] >= 3:
-        stdout = stdout.decode()
-    if p.returncode != 0:
+    stdout = process.communicate()[0].strip().decode()
+    if process.returncode != 0:
         if verbose:
             print("unable to run %s (error)" % dispcmd)
             print("stdout was %s" % stdout)
-        return None, p.returncode
-    return stdout, p.returncode
+        return None, process.returncode
+    return stdout, process.returncode
 
 
-LONG_VERSION_PY['git'] = '''
+LONG_VERSION_PY['git'] = r'''
 # This file helps to compute a version number in source trees obtained from
 # git-archive tarball (such as those provided by githubs download-from-tag
 # feature). Distribution tarballs (built by setup.py sdist) and build
 # directories (produced by setup.py build) will contain a much shorter file
 # that just contains the computed version number.
 
-# This file is released into the public domain. Generated by
-# versioneer-0.18 (https://github.com/warner/python-versioneer)
+# This file is released into the public domain.
+# Generated by versioneer-0.28
+# https://github.com/python-versioneer/python-versioneer
 
 """Git implementation of _version.py."""
 
@@ -435,6 +489,8 @@ import os
 import re
 import subprocess
 import sys
+from typing import Callable, Dict
+import functools
 
 
 def get_keywords():
@@ -472,12 +528,12 @@ class NotThisMethod(Exception):
     """Exception raised if a method is not valid for the current scenario."""
 
 
-LONG_VERSION_PY = {}
-HANDLERS = {}
+LONG_VERSION_PY: Dict[str, str] = {}
+HANDLERS: Dict[str, Dict[str, Callable]] = {}
 
 
 def register_vcs_handler(vcs, method):  # decorator
-    """Decorator to mark a method as the handler for a particular VCS."""
+    """Create decorator to mark a method as the handler of a VCS."""
     def decorate(f):
         """Store f in HANDLERS[vcs][method]."""
         if vcs not in HANDLERS:
@@ -491,17 +547,25 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
                 env=None):
     """Call the given command(s)."""
     assert isinstance(commands, list)
-    p = None
-    for c in commands:
+    process = None
+
+    popen_kwargs = {}
+    if sys.platform == "win32":
+        # This hides the console window if pythonw.exe is used
+        startupinfo = subprocess.STARTUPINFO()
+        startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+        popen_kwargs["startupinfo"] = startupinfo
+
+    for command in commands:
         try:
-            dispcmd = str([c] + args)
+            dispcmd = str([command] + args)
             # remember shell=False, so use git.cmd on windows, not just git
-            p = subprocess.Popen([c] + args, cwd=cwd, env=env,
-                                 stdout=subprocess.PIPE,
-                                 stderr=(subprocess.PIPE if hide_stderr
-                                         else None))
+            process = subprocess.Popen([command] + args, cwd=cwd, env=env,
+                                       stdout=subprocess.PIPE,
+                                       stderr=(subprocess.PIPE if hide_stderr
+                                               else None), **popen_kwargs)
             break
-        except EnvironmentError:
+        except OSError:
             e = sys.exc_info()[1]
             if e.errno == errno.ENOENT:
                 continue
@@ -513,15 +577,13 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
         if verbose:
             print("unable to find command, tried %%s" %% (commands,))
         return None, None
-    stdout = p.communicate()[0].strip()
-    if sys.version_info[0] >= 3:
-        stdout = stdout.decode()
-    if p.returncode != 0:
+    stdout = process.communicate()[0].strip().decode()
+    if process.returncode != 0:
         if verbose:
             print("unable to run %%s (error)" %% dispcmd)
             print("stdout was %%s" %% stdout)
-        return None, p.returncode
-    return stdout, p.returncode
+        return None, process.returncode
+    return stdout, process.returncode
 
 
 def versions_from_parentdir(parentdir_prefix, root, verbose):
@@ -533,15 +595,14 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
     """
     rootdirs = []
 
-    for i in range(3):
+    for _ in range(3):
         dirname = os.path.basename(root)
         if dirname.startswith(parentdir_prefix):
             return {"version": dirname[len(parentdir_prefix):],
                     "full-revisionid": None,
                     "dirty": False, "error": None, "date": None}
-        else:
-            rootdirs.append(root)
-            root = os.path.dirname(root)  # up a level
+        rootdirs.append(root)
+        root = os.path.dirname(root)  # up a level
 
     if verbose:
         print("Tried directories %%s but none started with prefix %%s" %%
@@ -558,22 +619,21 @@ def git_get_keywords(versionfile_abs):
     # _version.py.
     keywords = {}
     try:
-        f = open(versionfile_abs, "r")
-        for line in f.readlines():
-            if line.strip().startswith("git_refnames ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["refnames"] = mo.group(1)
-            if line.strip().startswith("git_full ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["full"] = mo.group(1)
-            if line.strip().startswith("git_date ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["date"] = mo.group(1)
-        f.close()
-    except EnvironmentError:
+        with open(versionfile_abs, "r") as fobj:
+            for line in fobj:
+                if line.strip().startswith("git_refnames ="):
+                    mo = re.search(r'=\s*"(.*)"', line)
+                    if mo:
+                        keywords["refnames"] = mo.group(1)
+                if line.strip().startswith("git_full ="):
+                    mo = re.search(r'=\s*"(.*)"', line)
+                    if mo:
+                        keywords["full"] = mo.group(1)
+                if line.strip().startswith("git_date ="):
+                    mo = re.search(r'=\s*"(.*)"', line)
+                    if mo:
+                        keywords["date"] = mo.group(1)
+    except OSError:
         pass
     return keywords
 
@@ -581,10 +641,14 @@ def git_get_keywords(versionfile_abs):
 @register_vcs_handler("git", "keywords")
 def git_versions_from_keywords(keywords, tag_prefix, verbose):
     """Get version information from git keywords."""
-    if not keywords:
-        raise NotThisMethod("no keywords at all, weird")
+    if "refnames" not in keywords:
+        raise NotThisMethod("Short version file found")
     date = keywords.get("date")
     if date is not None:
+        # Use only the last line.  Previous lines may contain GPG signature
+        # information.
+        date = date.splitlines()[-1]
+
         # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
         # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
         # -like" string, which we must then edit to make compliant), because
@@ -597,11 +661,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
         if verbose:
             print("keywords are unexpanded, not using")
         raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
-    refs = set([r.strip() for r in refnames.strip("()").split(",")])
+    refs = {r.strip() for r in refnames.strip("()").split(",")}
     # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
     # just "foo-1.0". If we see a "tag: " prefix, prefer those.
     TAG = "tag: "
-    tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+    tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
     if not tags:
         # Either we're using git < 1.8.3, or there really are no tags. We use
         # a heuristic: assume all version tags have a digit. The old git %%d
@@ -610,7 +674,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
         # between branches and tags. By ignoring refnames without digits, we
         # filter out many common branch names like "release" and
         # "stabilization", as well as "HEAD" and "master".
-        tags = set([r for r in refs if re.search(r'\d', r)])
+        tags = {r for r in refs if re.search(r'\d', r)}
         if verbose:
             print("discarding '%%s', no digits" %% ",".join(refs - tags))
     if verbose:
@@ -619,6 +683,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
         # sorting will prefer e.g. "2.0" over "2.0rc1"
         if ref.startswith(tag_prefix):
             r = ref[len(tag_prefix):]
+            # Filter out refs that exactly match prefix or that don't start
+            # with a number once the prefix is stripped (mostly a concern
+            # when prefix is '')
+            if not re.match(r'\d', r):
+                continue
             if verbose:
                 print("picking %%s" %% r)
             return {"version": r,
@@ -634,7 +703,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
 
 
 @register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
     """Get version from 'git describe' in the root of the source tree.
 
     This only gets called if the git-archive 'subst' keywords were *not*
@@ -645,8 +714,15 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
     if sys.platform == "win32":
         GITS = ["git.cmd", "git.exe"]
 
-    out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
-                          hide_stderr=True)
+    # GIT_DIR can interfere with correct operation of Versioneer.
+    # It may be intended to be passed to the Versioneer-versioned project,
+    # but that should not change where we get our version from.
+    env = os.environ.copy()
+    env.pop("GIT_DIR", None)
+    runner = functools.partial(runner, env=env)
+
+    _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
+                   hide_stderr=not verbose)
     if rc != 0:
         if verbose:
             print("Directory %%s not under git control" %% root)
@@ -654,15 +730,15 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
 
     # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
     # if there isn't one, this yields HEX[-dirty] (no NUM)
-    describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
-                                          "--always", "--long",
-                                          "--match", "%%s*" %% tag_prefix],
-                                   cwd=root)
+    describe_out, rc = runner(GITS, [
+        "describe", "--tags", "--dirty", "--always", "--long",
+        "--match", f"{tag_prefix}[[:digit:]]*"
+    ], cwd=root)
     # --long was added in git-1.5.5
     if describe_out is None:
         raise NotThisMethod("'git describe' failed")
     describe_out = describe_out.strip()
-    full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+    full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
     if full_out is None:
         raise NotThisMethod("'git rev-parse' failed")
     full_out = full_out.strip()
@@ -672,6 +748,39 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
     pieces["short"] = full_out[:7]  # maybe improved later
     pieces["error"] = None
 
+    branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
+                             cwd=root)
+    # --abbrev-ref was added in git-1.6.3
+    if rc != 0 or branch_name is None:
+        raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
+    branch_name = branch_name.strip()
+
+    if branch_name == "HEAD":
+        # If we aren't exactly on a branch, pick a branch which represents
+        # the current commit. If all else fails, we are on a branchless
+        # commit.
+        branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
+        # --contains was added in git-1.5.4
+        if rc != 0 or branches is None:
+            raise NotThisMethod("'git branch --contains' returned error")
+        branches = branches.split("\n")
+
+        # Remove the first line if we're running detached
+        if "(" in branches[0]:
+            branches.pop(0)
+
+        # Strip off the leading "* " from the list of branches.
+        branches = [branch[2:] for branch in branches]
+        if "master" in branches:
+            branch_name = "master"
+        elif not branches:
+            branch_name = None
+        else:
+            # Pick the first branch that is returned. Good or bad.
+            branch_name = branches[0]
+
+    pieces["branch"] = branch_name
+
     # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
     # TAG might have hyphens.
     git_describe = describe_out
@@ -688,7 +797,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
         # TAG-NUM-gHEX
         mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
         if not mo:
-            # unparseable. Maybe git-describe is misbehaving?
+            # unparsable. Maybe git-describe is misbehaving?
             pieces["error"] = ("unable to parse git-describe output: '%%s'"
                                %% describe_out)
             return pieces
@@ -713,13 +822,14 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
     else:
         # HEX: no tags
         pieces["closest-tag"] = None
-        count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
-                                    cwd=root)
-        pieces["distance"] = int(count_out)  # total number of commits
+        out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
+        pieces["distance"] = len(out.split())  # total number of commits
 
     # commit date: see ISO-8601 comment in git_versions_from_keywords()
-    date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
-                       cwd=root)[0].strip()
+    date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip()
+    # Use only the last line.  Previous lines may contain GPG signature
+    # information.
+    date = date.splitlines()[-1]
     pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
 
     return pieces
@@ -757,19 +867,67 @@ def render_pep440(pieces):
     return rendered
 
 
-def render_pep440_pre(pieces):
-    """TAG[.post.devDISTANCE] -- No -dirty.
+def render_pep440_branch(pieces):
+    """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
+
+    The ".dev0" means not master branch. Note that .dev0 sorts backwards
+    (a feature branch will appear "older" than the master branch).
 
     Exceptions:
-    1: no tags. 0.post.devDISTANCE
+    1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
     """
     if pieces["closest-tag"]:
         rendered = pieces["closest-tag"]
+        if pieces["distance"] or pieces["dirty"]:
+            if pieces["branch"] != "master":
+                rendered += ".dev0"
+            rendered += plus_or_dot(pieces)
+            rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
+            if pieces["dirty"]:
+                rendered += ".dirty"
+    else:
+        # exception #1
+        rendered = "0"
+        if pieces["branch"] != "master":
+            rendered += ".dev0"
+        rendered += "+untagged.%%d.g%%s" %% (pieces["distance"],
+                                          pieces["short"])
+        if pieces["dirty"]:
+            rendered += ".dirty"
+    return rendered
+
+
+def pep440_split_post(ver):
+    """Split pep440 version string at the post-release segment.
+
+    Returns the release segments before the post-release and the
+    post-release version number (or -1 if no post-release segment is present).
+    """
+    vc = str.split(ver, ".post")
+    return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
+
+
+def render_pep440_pre(pieces):
+    """TAG[.postN.devDISTANCE] -- No -dirty.
+
+    Exceptions:
+    1: no tags. 0.post0.devDISTANCE
+    """
+    if pieces["closest-tag"]:
         if pieces["distance"]:
-            rendered += ".post.dev%%d" %% pieces["distance"]
+            # update the post release segment
+            tag_version, post_version = pep440_split_post(pieces["closest-tag"])
+            rendered = tag_version
+            if post_version is not None:
+                rendered += ".post%%d.dev%%d" %% (post_version + 1, pieces["distance"])
+            else:
+                rendered += ".post0.dev%%d" %% (pieces["distance"])
+        else:
+            # no commits, use the tag as the version
+            rendered = pieces["closest-tag"]
     else:
         # exception #1
-        rendered = "0.post.dev%%d" %% pieces["distance"]
+        rendered = "0.post0.dev%%d" %% pieces["distance"]
     return rendered
 
 
@@ -800,12 +958,41 @@ def render_pep440_post(pieces):
     return rendered
 
 
+def render_pep440_post_branch(pieces):
+    """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
+
+    The ".dev0" means not master branch.
+
+    Exceptions:
+    1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
+    """
+    if pieces["closest-tag"]:
+        rendered = pieces["closest-tag"]
+        if pieces["distance"] or pieces["dirty"]:
+            rendered += ".post%%d" %% pieces["distance"]
+            if pieces["branch"] != "master":
+                rendered += ".dev0"
+            rendered += plus_or_dot(pieces)
+            rendered += "g%%s" %% pieces["short"]
+            if pieces["dirty"]:
+                rendered += ".dirty"
+    else:
+        # exception #1
+        rendered = "0.post%%d" %% pieces["distance"]
+        if pieces["branch"] != "master":
+            rendered += ".dev0"
+        rendered += "+g%%s" %% pieces["short"]
+        if pieces["dirty"]:
+            rendered += ".dirty"
+    return rendered
+
+
 def render_pep440_old(pieces):
     """TAG[.postDISTANCE[.dev0]] .
 
     The ".dev0" means dirty.
 
-    Eexceptions:
+    Exceptions:
     1: no tags. 0.postDISTANCE[.dev0]
     """
     if pieces["closest-tag"]:
@@ -876,10 +1063,14 @@ def render(pieces, style):
 
     if style == "pep440":
         rendered = render_pep440(pieces)
+    elif style == "pep440-branch":
+        rendered = render_pep440_branch(pieces)
     elif style == "pep440-pre":
         rendered = render_pep440_pre(pieces)
     elif style == "pep440-post":
         rendered = render_pep440_post(pieces)
+    elif style == "pep440-post-branch":
+        rendered = render_pep440_post_branch(pieces)
     elif style == "pep440-old":
         rendered = render_pep440_old(pieces)
     elif style == "git-describe":
@@ -915,7 +1106,7 @@ def get_versions():
         # versionfile_source is the relative path from the top of the source
         # tree (where the .git directory might live) to this file. Invert
         # this to find the root from __file__.
-        for i in cfg.versionfile_source.split('/'):
+        for _ in cfg.versionfile_source.split('/'):
             root = os.path.dirname(root)
     except NameError:
         return {"version": "0+unknown", "full-revisionid": None,
@@ -950,22 +1141,21 @@ def git_get_keywords(versionfile_abs):
     # _version.py.
     keywords = {}
     try:
-        f = open(versionfile_abs, "r")
-        for line in f.readlines():
-            if line.strip().startswith("git_refnames ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["refnames"] = mo.group(1)
-            if line.strip().startswith("git_full ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["full"] = mo.group(1)
-            if line.strip().startswith("git_date ="):
-                mo = re.search(r'=\s*"(.*)"', line)
-                if mo:
-                    keywords["date"] = mo.group(1)
-        f.close()
-    except EnvironmentError:
+        with open(versionfile_abs, "r") as fobj:
+            for line in fobj:
+                if line.strip().startswith("git_refnames ="):
+                    mo = re.search(r'=\s*"(.*)"', line)
+                    if mo:
+                        keywords["refnames"] = mo.group(1)
+                if line.strip().startswith("git_full ="):
+                    mo = re.search(r'=\s*"(.*)"', line)
+                    if mo:
+                        keywords["full"] = mo.group(1)
+                if line.strip().startswith("git_date ="):
+                    mo = re.search(r'=\s*"(.*)"', line)
+                    if mo:
+                        keywords["date"] = mo.group(1)
+    except OSError:
         pass
     return keywords
 
@@ -973,10 +1163,14 @@ def git_get_keywords(versionfile_abs):
 @register_vcs_handler("git", "keywords")
 def git_versions_from_keywords(keywords, tag_prefix, verbose):
     """Get version information from git keywords."""
-    if not keywords:
-        raise NotThisMethod("no keywords at all, weird")
+    if "refnames" not in keywords:
+        raise NotThisMethod("Short version file found")
     date = keywords.get("date")
     if date is not None:
+        # Use only the last line.  Previous lines may contain GPG signature
+        # information.
+        date = date.splitlines()[-1]
+
         # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
         # datestamp. However we prefer "%ci" (which expands to an "ISO-8601
         # -like" string, which we must then edit to make compliant), because
@@ -989,11 +1183,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
         if verbose:
             print("keywords are unexpanded, not using")
         raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
-    refs = set([r.strip() for r in refnames.strip("()").split(",")])
+    refs = {r.strip() for r in refnames.strip("()").split(",")}
     # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
     # just "foo-1.0". If we see a "tag: " prefix, prefer those.
     TAG = "tag: "
-    tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
+    tags = {r[len(TAG):] for r in refs if r.startswith(TAG)}
     if not tags:
         # Either we're using git < 1.8.3, or there really are no tags. We use
         # a heuristic: assume all version tags have a digit. The old git %d
@@ -1002,7 +1196,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
         # between branches and tags. By ignoring refnames without digits, we
         # filter out many common branch names like "release" and
         # "stabilization", as well as "HEAD" and "master".
-        tags = set([r for r in refs if re.search(r'\d', r)])
+        tags = {r for r in refs if re.search(r'\d', r)}
         if verbose:
             print("discarding '%s', no digits" % ",".join(refs - tags))
     if verbose:
@@ -1011,6 +1205,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
         # sorting will prefer e.g. "2.0" over "2.0rc1"
         if ref.startswith(tag_prefix):
             r = ref[len(tag_prefix):]
+            # Filter out refs that exactly match prefix or that don't start
+            # with a number once the prefix is stripped (mostly a concern
+            # when prefix is '')
+            if not re.match(r'\d', r):
+                continue
             if verbose:
                 print("picking %s" % r)
             return {"version": r,
@@ -1026,7 +1225,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose):
 
 
 @register_vcs_handler("git", "pieces_from_vcs")
-def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
+def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
     """Get version from 'git describe' in the root of the source tree.
 
     This only gets called if the git-archive 'subst' keywords were *not*
@@ -1037,8 +1236,15 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
     if sys.platform == "win32":
         GITS = ["git.cmd", "git.exe"]
 
-    out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
-                          hide_stderr=True)
+    # GIT_DIR can interfere with correct operation of Versioneer.
+    # It may be intended to be passed to the Versioneer-versioned project,
+    # but that should not change where we get our version from.
+    env = os.environ.copy()
+    env.pop("GIT_DIR", None)
+    runner = functools.partial(runner, env=env)
+
+    _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root,
+                   hide_stderr=not verbose)
     if rc != 0:
         if verbose:
             print("Directory %s not under git control" % root)
@@ -1046,15 +1252,15 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
 
     # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
     # if there isn't one, this yields HEX[-dirty] (no NUM)
-    describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
-                                          "--always", "--long",
-                                          "--match", "%s*" % tag_prefix],
-                                   cwd=root)
+    describe_out, rc = runner(GITS, [
+        "describe", "--tags", "--dirty", "--always", "--long",
+        "--match", f"{tag_prefix}[[:digit:]]*"
+    ], cwd=root)
     # --long was added in git-1.5.5
     if describe_out is None:
         raise NotThisMethod("'git describe' failed")
     describe_out = describe_out.strip()
-    full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
+    full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root)
     if full_out is None:
         raise NotThisMethod("'git rev-parse' failed")
     full_out = full_out.strip()
@@ -1064,6 +1270,39 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
     pieces["short"] = full_out[:7]  # maybe improved later
     pieces["error"] = None
 
+    branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
+                             cwd=root)
+    # --abbrev-ref was added in git-1.6.3
+    if rc != 0 or branch_name is None:
+        raise NotThisMethod("'git rev-parse --abbrev-ref' returned error")
+    branch_name = branch_name.strip()
+
+    if branch_name == "HEAD":
+        # If we aren't exactly on a branch, pick a branch which represents
+        # the current commit. If all else fails, we are on a branchless
+        # commit.
+        branches, rc = runner(GITS, ["branch", "--contains"], cwd=root)
+        # --contains was added in git-1.5.4
+        if rc != 0 or branches is None:
+            raise NotThisMethod("'git branch --contains' returned error")
+        branches = branches.split("\n")
+
+        # Remove the first line if we're running detached
+        if "(" in branches[0]:
+            branches.pop(0)
+
+        # Strip off the leading "* " from the list of branches.
+        branches = [branch[2:] for branch in branches]
+        if "master" in branches:
+            branch_name = "master"
+        elif not branches:
+            branch_name = None
+        else:
+            # Pick the first branch that is returned. Good or bad.
+            branch_name = branches[0]
+
+    pieces["branch"] = branch_name
+
     # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
     # TAG might have hyphens.
     git_describe = describe_out
@@ -1080,7 +1319,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
         # TAG-NUM-gHEX
         mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
         if not mo:
-            # unparseable. Maybe git-describe is misbehaving?
+            # unparsable. Maybe git-describe is misbehaving?
             pieces["error"] = ("unable to parse git-describe output: '%s'"
                                % describe_out)
             return pieces
@@ -1105,19 +1344,20 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
     else:
         # HEX: no tags
         pieces["closest-tag"] = None
-        count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
-                                    cwd=root)
-        pieces["distance"] = int(count_out)  # total number of commits
+        out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root)
+        pieces["distance"] = len(out.split())  # total number of commits
 
     # commit date: see ISO-8601 comment in git_versions_from_keywords()
-    date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
-                       cwd=root)[0].strip()
+    date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip()
+    # Use only the last line.  Previous lines may contain GPG signature
+    # information.
+    date = date.splitlines()[-1]
     pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
 
     return pieces
 
 
-def do_vcs_install(manifest_in, versionfile_source, ipy):
+def do_vcs_install(versionfile_source, ipy):
     """Git-specific installation logic for Versioneer.
 
     For Git, this means creating/changing .gitattributes to mark _version.py
@@ -1126,31 +1366,31 @@ def do_vcs_install(manifest_in, versionfile_source, ipy):
     GITS = ["git"]
     if sys.platform == "win32":
         GITS = ["git.cmd", "git.exe"]
-    files = [manifest_in, versionfile_source]
+    files = [versionfile_source]
     if ipy:
         files.append(ipy)
-    try:
-        me = __file__
-        if me.endswith(".pyc") or me.endswith(".pyo"):
-            me = os.path.splitext(me)[0] + ".py"
-        versioneer_file = os.path.relpath(me)
-    except NameError:
-        versioneer_file = "versioneer.py"
-    files.append(versioneer_file)
+    if "VERSIONEER_PEP518" not in globals():
+        try:
+            my_path = __file__
+            if my_path.endswith((".pyc", ".pyo")):
+                my_path = os.path.splitext(my_path)[0] + ".py"
+            versioneer_file = os.path.relpath(my_path)
+        except NameError:
+            versioneer_file = "versioneer.py"
+        files.append(versioneer_file)
     present = False
     try:
-        f = open(".gitattributes", "r")
-        for line in f.readlines():
-            if line.strip().startswith(versionfile_source):
-                if "export-subst" in line.strip().split()[1:]:
-                    present = True
-        f.close()
-    except EnvironmentError:
+        with open(".gitattributes", "r") as fobj:
+            for line in fobj:
+                if line.strip().startswith(versionfile_source):
+                    if "export-subst" in line.strip().split()[1:]:
+                        present = True
+                        break
+    except OSError:
         pass
     if not present:
-        f = open(".gitattributes", "a+")
-        f.write("%s export-subst\n" % versionfile_source)
-        f.close()
+        with open(".gitattributes", "a+") as fobj:
+            fobj.write(f"{versionfile_source} export-subst\n")
         files.append(".gitattributes")
     run_command(GITS, ["add", "--"] + files)
 
@@ -1164,15 +1404,14 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
     """
     rootdirs = []
 
-    for i in range(3):
+    for _ in range(3):
         dirname = os.path.basename(root)
         if dirname.startswith(parentdir_prefix):
             return {"version": dirname[len(parentdir_prefix):],
                     "full-revisionid": None,
                     "dirty": False, "error": None, "date": None}
-        else:
-            rootdirs.append(root)
-            root = os.path.dirname(root)  # up a level
+        rootdirs.append(root)
+        root = os.path.dirname(root)  # up a level
 
     if verbose:
         print("Tried directories %s but none started with prefix %s" %
@@ -1181,7 +1420,7 @@ def versions_from_parentdir(parentdir_prefix, root, verbose):
 
 
 SHORT_VERSION_PY = """
-# This file was generated by 'versioneer.py' (0.18) from
+# This file was generated by 'versioneer.py' (0.28) from
 # revision-control system data, or from the parent directory name of an
 # unpacked source archive. Distribution tarballs contain a pre-generated copy
 # of this file.
@@ -1203,7 +1442,7 @@ def versions_from_file(filename):
     try:
         with open(filename) as f:
             contents = f.read()
-    except EnvironmentError:
+    except OSError:
         raise NotThisMethod("unable to read _version.py")
     mo = re.search(r"version_json = '''\n(.*)'''  # END VERSION_JSON",
                    contents, re.M | re.S)
@@ -1258,19 +1497,67 @@ def render_pep440(pieces):
     return rendered
 
 
-def render_pep440_pre(pieces):
-    """TAG[.post.devDISTANCE] -- No -dirty.
+def render_pep440_branch(pieces):
+    """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] .
+
+    The ".dev0" means not master branch. Note that .dev0 sorts backwards
+    (a feature branch will appear "older" than the master branch).
 
     Exceptions:
-    1: no tags. 0.post.devDISTANCE
+    1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty]
     """
     if pieces["closest-tag"]:
         rendered = pieces["closest-tag"]
+        if pieces["distance"] or pieces["dirty"]:
+            if pieces["branch"] != "master":
+                rendered += ".dev0"
+            rendered += plus_or_dot(pieces)
+            rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
+            if pieces["dirty"]:
+                rendered += ".dirty"
+    else:
+        # exception #1
+        rendered = "0"
+        if pieces["branch"] != "master":
+            rendered += ".dev0"
+        rendered += "+untagged.%d.g%s" % (pieces["distance"],
+                                          pieces["short"])
+        if pieces["dirty"]:
+            rendered += ".dirty"
+    return rendered
+
+
+def pep440_split_post(ver):
+    """Split pep440 version string at the post-release segment.
+
+    Returns the release segments before the post-release and the
+    post-release version number (or -1 if no post-release segment is present).
+    """
+    vc = str.split(ver, ".post")
+    return vc[0], int(vc[1] or 0) if len(vc) == 2 else None
+
+
+def render_pep440_pre(pieces):
+    """TAG[.postN.devDISTANCE] -- No -dirty.
+
+    Exceptions:
+    1: no tags. 0.post0.devDISTANCE
+    """
+    if pieces["closest-tag"]:
         if pieces["distance"]:
-            rendered += ".post.dev%d" % pieces["distance"]
+            # update the post release segment
+            tag_version, post_version = pep440_split_post(pieces["closest-tag"])
+            rendered = tag_version
+            if post_version is not None:
+                rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"])
+            else:
+                rendered += ".post0.dev%d" % (pieces["distance"])
+        else:
+            # no commits, use the tag as the version
+            rendered = pieces["closest-tag"]
     else:
         # exception #1
-        rendered = "0.post.dev%d" % pieces["distance"]
+        rendered = "0.post0.dev%d" % pieces["distance"]
     return rendered
 
 
@@ -1301,12 +1588,41 @@ def render_pep440_post(pieces):
     return rendered
 
 
+def render_pep440_post_branch(pieces):
+    """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] .
+
+    The ".dev0" means not master branch.
+
+    Exceptions:
+    1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty]
+    """
+    if pieces["closest-tag"]:
+        rendered = pieces["closest-tag"]
+        if pieces["distance"] or pieces["dirty"]:
+            rendered += ".post%d" % pieces["distance"]
+            if pieces["branch"] != "master":
+                rendered += ".dev0"
+            rendered += plus_or_dot(pieces)
+            rendered += "g%s" % pieces["short"]
+            if pieces["dirty"]:
+                rendered += ".dirty"
+    else:
+        # exception #1
+        rendered = "0.post%d" % pieces["distance"]
+        if pieces["branch"] != "master":
+            rendered += ".dev0"
+        rendered += "+g%s" % pieces["short"]
+        if pieces["dirty"]:
+            rendered += ".dirty"
+    return rendered
+
+
 def render_pep440_old(pieces):
     """TAG[.postDISTANCE[.dev0]] .
 
     The ".dev0" means dirty.
 
-    Eexceptions:
+    Exceptions:
     1: no tags. 0.postDISTANCE[.dev0]
     """
     if pieces["closest-tag"]:
@@ -1377,10 +1693,14 @@ def render(pieces, style):
 
     if style == "pep440":
         rendered = render_pep440(pieces)
+    elif style == "pep440-branch":
+        rendered = render_pep440_branch(pieces)
     elif style == "pep440-pre":
         rendered = render_pep440_pre(pieces)
     elif style == "pep440-post":
         rendered = render_pep440_post(pieces)
+    elif style == "pep440-post-branch":
+        rendered = render_pep440_post_branch(pieces)
     elif style == "pep440-old":
         rendered = render_pep440_old(pieces)
     elif style == "git-describe":
@@ -1480,8 +1800,12 @@ def get_version():
     return get_versions()["version"]
 
 
-def get_cmdclass():
-    """Get the custom setuptools/distutils subclasses used by Versioneer."""
+def get_cmdclass(cmdclass=None):
+    """Get the custom setuptools subclasses used by Versioneer.
+
+    If the package uses a different cmdclass (e.g. one from numpy), it
+    should be provide as an argument.
+    """
     if "versioneer" in sys.modules:
         del sys.modules["versioneer"]
         # this fixes the "python setup.py develop" case (also 'install' and
@@ -1495,12 +1819,12 @@ def get_cmdclass():
         # parent is protected against the child's "import versioneer". By
         # removing ourselves from sys.modules here, before the child build
         # happens, we protect the child from the parent's versioneer too.
-        # Also see https://github.com/warner/python-versioneer/issues/52
+        # Also see https://github.com/python-versioneer/python-versioneer/issues/52
 
-    cmds = {}
+    cmds = {} if cmdclass is None else cmdclass.copy()
 
-    # we add "version" to both distutils and setuptools
-    from distutils.core import Command
+    # we add "version" to setuptools
+    from setuptools import Command
 
     class cmd_version(Command):
         description = "report generated version string"
@@ -1523,7 +1847,7 @@ def get_cmdclass():
                 print(" error: %s" % vers["error"])
     cmds["version"] = cmd_version
 
-    # we override "build_py" in both distutils and setuptools
+    # we override "build_py" in setuptools
     #
     # most invocation pathways end up running build_py:
     #  distutils/build -> build_py
@@ -1538,11 +1862,14 @@ def get_cmdclass():
     #   then does setup.py bdist_wheel, or sometimes setup.py install
     #  setup.py egg_info -> ?
 
+    # pip install -e . and setuptool/editable_wheel will invoke build_py
+    # but the build_py command is not expected to copy any files.
+
     # we override different "build_py" commands for both environments
-    if "setuptools" in sys.modules:
-        from setuptools.command.build_py import build_py as _build_py
+    if 'build_py' in cmds:
+        _build_py = cmds['build_py']
     else:
-        from distutils.command.build_py import build_py as _build_py
+        from setuptools.command.build_py import build_py as _build_py
 
     class cmd_build_py(_build_py):
         def run(self):
@@ -1550,6 +1877,10 @@ def get_cmdclass():
             cfg = get_config_from_root(root)
             versions = get_versions()
             _build_py.run(self)
+            if getattr(self, "editable_mode", False):
+                # During editable installs `.py` and data files are
+                # not copied to build_lib
+                return
             # now locate _version.py in the new build/ directory and replace
             # it with an updated value
             if cfg.versionfile_build:
@@ -1559,6 +1890,38 @@ def get_cmdclass():
                 write_to_version_file(target_versionfile, versions)
     cmds["build_py"] = cmd_build_py
 
+    if 'build_ext' in cmds:
+        _build_ext = cmds['build_ext']
+    else:
+        from setuptools.command.build_ext import build_ext as _build_ext
+
+    class cmd_build_ext(_build_ext):
+        def run(self):
+            root = get_root()
+            cfg = get_config_from_root(root)
+            versions = get_versions()
+            _build_ext.run(self)
+            if self.inplace:
+                # build_ext --inplace will only build extensions in
+                # build/lib<..> dir with no _version.py to write to.
+                # As in place builds will already have a _version.py
+                # in the module dir, we do not need to write one.
+                return
+            # now locate _version.py in the new build/ directory and replace
+            # it with an updated value
+            if not cfg.versionfile_build:
+                return
+            target_versionfile = os.path.join(self.build_lib,
+                                              cfg.versionfile_build)
+            if not os.path.exists(target_versionfile):
+                print(f"Warning: {target_versionfile} does not exist, skipping "
+                      "version update. This can happen if you are running build_ext "
+                      "without first running build_py.")
+                return
+            print("UPDATING %s" % target_versionfile)
+            write_to_version_file(target_versionfile, versions)
+    cmds["build_ext"] = cmd_build_ext
+
     if "cx_Freeze" in sys.modules:  # cx_freeze enabled?
         from cx_Freeze.dist import build_exe as _build_exe
         # nczeczulin reports that py2exe won't like the pep440-style string
@@ -1593,9 +1956,9 @@ def get_cmdclass():
 
     if 'py2exe' in sys.modules:  # py2exe enabled?
         try:
-            from py2exe.distutils_buildexe import py2exe as _py2exe  # py3
+            from py2exe.setuptools_buildexe import py2exe as _py2exe
         except ImportError:
-            from py2exe.build_exe import py2exe as _py2exe  # py2
+            from py2exe.distutils_buildexe import py2exe as _py2exe
 
         class cmd_py2exe(_py2exe):
             def run(self):
@@ -1619,11 +1982,48 @@ def get_cmdclass():
                              })
         cmds["py2exe"] = cmd_py2exe
 
+    # sdist farms its file list building out to egg_info
+    if 'egg_info' in cmds:
+        _egg_info = cmds['egg_info']
+    else:
+        from setuptools.command.egg_info import egg_info as _egg_info
+
+    class cmd_egg_info(_egg_info):
+        def find_sources(self):
+            # egg_info.find_sources builds the manifest list and writes it
+            # in one shot
+            super().find_sources()
+
+            # Modify the filelist and normalize it
+            root = get_root()
+            cfg = get_config_from_root(root)
+            self.filelist.append('versioneer.py')
+            if cfg.versionfile_source:
+                # There are rare cases where versionfile_source might not be
+                # included by default, so we must be explicit
+                self.filelist.append(cfg.versionfile_source)
+            self.filelist.sort()
+            self.filelist.remove_duplicates()
+
+            # The write method is hidden in the manifest_maker instance that
+            # generated the filelist and was thrown away
+            # We will instead replicate their final normalization (to unicode,
+            # and POSIX-style paths)
+            from setuptools import unicode_utils
+            normalized = [unicode_utils.filesys_decode(f).replace(os.sep, '/')
+                          for f in self.filelist.files]
+
+            manifest_filename = os.path.join(self.egg_info, 'SOURCES.txt')
+            with open(manifest_filename, 'w') as fobj:
+                fobj.write('\n'.join(normalized))
+
+    cmds['egg_info'] = cmd_egg_info
+
     # we override different "sdist" commands for both environments
-    if "setuptools" in sys.modules:
-        from setuptools.command.sdist import sdist as _sdist
+    if 'sdist' in cmds:
+        _sdist = cmds['sdist']
     else:
-        from distutils.command.sdist import sdist as _sdist
+        from setuptools.command.sdist import sdist as _sdist
 
     class cmd_sdist(_sdist):
         def run(self):
@@ -1687,21 +2087,26 @@ SAMPLE_CONFIG = """
 
 """
 
-INIT_PY_SNIPPET = """
+OLD_SNIPPET = """
 from ._version import get_versions
 __version__ = get_versions()['version']
 del get_versions
 """
 
+INIT_PY_SNIPPET = """
+from . import {0}
+__version__ = {0}.get_versions()['version']
+"""
+
 
 def do_setup():
-    """Main VCS-independent setup function for installing Versioneer."""
+    """Do main VCS-independent setup function for installing Versioneer."""
     root = get_root()
     try:
         cfg = get_config_from_root(root)
-    except (EnvironmentError, configparser.NoSectionError,
+    except (OSError, configparser.NoSectionError,
             configparser.NoOptionError) as e:
-        if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
+        if isinstance(e, (OSError, configparser.NoSectionError)):
             print("Adding sample versioneer config to setup.cfg",
                   file=sys.stderr)
             with open(os.path.join(root, "setup.cfg"), "a") as f:
@@ -1725,54 +2130,28 @@ def do_setup():
         try:
             with open(ipy, "r") as f:
                 old = f.read()
-        except EnvironmentError:
+        except OSError:
             old = ""
-        if INIT_PY_SNIPPET not in old:
+        module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0]
+        snippet = INIT_PY_SNIPPET.format(module)
+        if OLD_SNIPPET in old:
+            print(" replacing boilerplate in %s" % ipy)
+            with open(ipy, "w") as f:
+                f.write(old.replace(OLD_SNIPPET, snippet))
+        elif snippet not in old:
             print(" appending to %s" % ipy)
             with open(ipy, "a") as f:
-                f.write(INIT_PY_SNIPPET)
+                f.write(snippet)
         else:
             print(" %s unmodified" % ipy)
     else:
         print(" %s doesn't exist, ok" % ipy)
         ipy = None
 
-    # Make sure both the top-level "versioneer.py" and versionfile_source
-    # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
-    # they'll be copied into source distributions. Pip won't be able to
-    # install the package without this.
-    manifest_in = os.path.join(root, "MANIFEST.in")
-    simple_includes = set()
-    try:
-        with open(manifest_in, "r") as f:
-            for line in f:
-                if line.startswith("include "):
-                    for include in line.split()[1:]:
-                        simple_includes.add(include)
-    except EnvironmentError:
-        pass
-    # That doesn't cover everything MANIFEST.in can do
-    # (http://docs.python.org/2/distutils/sourcedist.html#commands), so
-    # it might give some false negatives. Appending redundant 'include'
-    # lines is safe, though.
-    if "versioneer.py" not in simple_includes:
-        print(" appending 'versioneer.py' to MANIFEST.in")
-        with open(manifest_in, "a") as f:
-            f.write("include versioneer.py\n")
-    else:
-        print(" 'versioneer.py' already in MANIFEST.in")
-    if cfg.versionfile_source not in simple_includes:
-        print(" appending versionfile_source ('%s') to MANIFEST.in" %
-              cfg.versionfile_source)
-        with open(manifest_in, "a") as f:
-            f.write("include %s\n" % cfg.versionfile_source)
-    else:
-        print(" versionfile_source already in MANIFEST.in")
-
     # Make VCS-specific changes. For git, this means creating/changing
     # .gitattributes to mark _version.py for export-subst keyword
     # substitution.
-    do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
+    do_vcs_install(cfg.versionfile_source, ipy)
     return 0
 
 
@@ -1813,10 +2192,14 @@ def scan_setup_py():
     return errors
 
 
+def setup_command():
+    """Set up Versioneer and exit with appropriate error code."""
+    errors = do_setup()
+    errors += scan_setup_py()
+    sys.exit(1 if errors else 0)
+
+
 if __name__ == "__main__":
     cmd = sys.argv[1]
     if cmd == "setup":
-        errors = do_setup()
-        errors += scan_setup_py()
-        if errors:
-            sys.exit(1)
+        setup_command()



View it on GitLab: https://salsa.debian.org/debian-gis-team/pyorbital/-/commit/58c06b4102f3ff47d0eca92ac026b183113e5cfc

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/pyorbital/-/commit/58c06b4102f3ff47d0eca92ac026b183113e5cfc
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20230714/8cfb8cfc/attachment-0001.htm>


More information about the Pkg-grass-devel mailing list