[med-svn] [Git][med-team/nitime][upstream] New upstream version 0.12.1

Étienne Mollier (@emollier) gitlab at salsa.debian.org
Sat Jan 3 16:11:17 GMT 2026



Étienne Mollier pushed to branch upstream at Debian Med / nitime


Commits:
14c8b1a5 by Étienne Mollier at 2026-01-03T15:42:11+01:00
New upstream version 0.12.1
- - - - -


28 changed files:

- .git_archival.txt
- + .github/dependabot.yml
- .github/workflows/codespell.yml
- .github/workflows/test.yml
- .github/workflows/wheels.yml
- .gitignore
- README.txt → README.rst
- THANKS
- doc/news.rst
- min-requirements.txt
- + nitime/_compat.py
- − nitime/_mpl_units.py
- nitime/algorithms/tests/test_autoregressive.py
- nitime/analysis/coherence.py
- nitime/analysis/correlation.py
- nitime/analysis/spectral.py
- + nitime/conftest.py
- nitime/index_utils.py
- nitime/lazyimports.py
- nitime/tests/test_algorithms.py
- nitime/tests/test_timeseries.py
- nitime/timeseries.py
- nitime/utils.py
- nitime/viz.py
- pyproject.toml
- requirements.txt
- setup.py
- + tools/audit_wheel.sh


Changes:

=====================================
.git_archival.txt
=====================================
@@ -1,4 +1,4 @@
-node: dd505830dd890b477414a0dad8f587235010bf9c
-node-date: 2024-06-14T07:59:20-07:00
-describe-name: 0.11
-ref-names: tag: 0.11
+node: 6c3b1a3460b207b3dab33044f487cd5f7ddf14e5
+node-date: 2025-11-06T14:31:12-05:00
+describe-name: 0.12.1
+ref-names: tag: 0.12.1


=====================================
.github/dependabot.yml
=====================================
@@ -0,0 +1,10 @@
+version: 2
+updates:
+  - package-ecosystem: "github-actions"
+    directory: "/"
+    schedule:
+      interval: "monthly"
+    groups:
+      actions:
+        patterns:
+          - "*"


=====================================
.github/workflows/codespell.yml
=====================================
@@ -17,6 +17,6 @@ jobs:
 
     steps:
       - name: Checkout
-        uses: actions/checkout at v3
+        uses: actions/checkout at v5
       - name: Codespell
         uses: codespell-project/actions-codespell at v2


=====================================
.github/workflows/test.yml
=====================================
@@ -23,17 +23,17 @@ jobs:
     strategy:
       matrix:
         # We test NumPy dev on 3.11
-        python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
+        python-version: ['3.10', '3.11', '3.12', '3.13', '3.14']
         requires: ['requirements.txt']
         include:
-          - python-version: '3.8'
+          - python-version: '3.10'
             requires: 'min-requirements.txt'
 
     steps:
     - name: Checkout repo
-      uses: actions/checkout at v3
+      uses: actions/checkout at v5
     - name: Set up Python ${{ matrix.python-version }}
-      uses: actions/setup-python at v4
+      uses: actions/setup-python at v6
       with:
         python-version: ${{ matrix.python-version }}
         allow-prereleases: true


=====================================
.github/workflows/wheels.yml
=====================================
@@ -15,90 +15,47 @@ concurrency:
   cancel-in-progress: true
 
 jobs:
-  job_metadata:
-    runs-on: ubuntu-latest
-    outputs:
-      commit_message: ${{ steps.get_commit_message.outputs.commit_message }}
-    steps:
-      - name: Checkout
-        uses: actions/checkout at v3
-        with:
-          fetch-depth: 2
-      - name: Print head git commit message
-        id: get_commit_message
-        run: |
-          if [[ -z "$COMMIT_MSG" ]]; then
-            COMMIT_MSG=$(git show -s --format=%s $REF)
-          fi
-          echo commit_message=$COMMIT_MSG | tee -a $GITHUB_OUTPUT
-        env:
-          COMMIT_MSG: ${{ github.event.head_commit.message }}
-          REF: ${{ github.event.pull_request.head.sha }}
-
   build-sdist:
     name: Build sdist
     runs-on: ubuntu-latest
     steps:
-      - uses: actions/checkout at v3
+      - uses: actions/checkout at v5
         with:
           fetch-depth: 0
       - name: Build sdist
         run: pipx run build -s
-      - uses: actions/upload-artifact at v3
+      - uses: actions/upload-artifact at v5
         with:
-          name: sdist
+          name: source-dist
           path: ./dist/*.tar.gz
 
   build-wheel:
-    name: Build wheel for ${{ matrix.python }}-${{ matrix.buildplat[1] }}
-    needs: [job_metadata]
+    name: Build wheel for ${{ matrix.buildplat[1] }}
     runs-on: ${{ matrix.buildplat[0] }}
-    if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') || contains(needs.job_metadata.outputs.commit_message, '[build wheels]')
     strategy:
       fail-fast: false
       matrix:
         buildplat:
           - [ubuntu-latest, musllinux_x86_64]
-          - [ubuntu-latest, manylinux_aarch64]
-          - [macos-13, macosx_x86_64]  # native Intel hardware
+          - [ubuntu-latest, manylinux_x86_64]
+          - [ubuntu-24.04-arm, manylinux_aarch64]
+          - [macos-latest, macosx_arm64]
+          - [macos-15-intel, macosx_x86_64]
           - [windows-latest, win_amd64]
-        python: ["cp38", "cp39", "cp310", "cp311", "cp312"]
-        # No NumPy wheels on 3.8 aarch64 or musl
-        exclude:
-          - buildplat: [ubuntu-latest, manylinux_aarch64]
-            python: "cp38"
-          - buildplat: [ubuntu-latest, musllinux_x86_64]
-            python: "cp38"
-        include:
-          # Manylinux and arm64 builds (on native hardware) are cheap, do all in one
-          - { buildplat: ["ubuntu-latest", "manylinux_x86_64"], python: "*" }
-          - { buildplat: ["macos-14", "macosx_arm64"], python: "*" }
 
     steps:
-      - uses: actions/checkout at v3
+      - uses: actions/checkout at v5
         with:
           fetch-depth: 0
 
-      - uses: actions/setup-python at v3
-
-      - name: Update pip/pipx
-        run: pip install --upgrade pip pipx
-
-      # For aarch64 support
-      # https://cibuildwheel.pypa.io/en/stable/faq/#emulation
-      - uses: docker/setup-qemu-action at v3
-        with:
-          platforms: all
-        if: runner.os == 'Linux' && endsWith(matrix.buildplat[1], 'aarch64')
-
       - name: Build wheel(s)
-        run: pipx run --spec "cibuildwheel>=2.15" cibuildwheel
+        uses: pypa/cibuildwheel at v3.2.1
         env:
-          CIBW_BUILD: ${{ matrix.python }}-${{ matrix.buildplat[1] }}
+          CIBW_BUILD: "cp310-${{ matrix.buildplat[1] }} cp311-${{ matrix.buildplat[1] }} "
 
-      - uses: actions/upload-artifact at v3
+      - uses: actions/upload-artifact at v5
         with:
-          name: ${{ matrix.python == '*' && 'all' || matrix.python }}-${{ startsWith(matrix.buildplat[1], 'macosx') && 'macosx' || matrix.buildplat[1] }}
+          name: ${{ matrix.buildplat[1] }}-dist
           path: ./wheelhouse/*.whl
 
   test-sdist:
@@ -106,11 +63,11 @@ jobs:
     needs: [build-sdist]
     runs-on: ubuntu-latest
     steps:
-      - uses: actions/download-artifact at v3
+      - uses: actions/download-artifact at v6
         with:
-          name: sdist
+          name: source-dist
           path: ./dist
-      - uses: actions/setup-python at v4
+      - uses: actions/setup-python at v6
         with:
           python-version: "3.11"
       - name: Display Python version
@@ -127,16 +84,12 @@ jobs:
     runs-on: ubuntu-latest
     needs: [test-sdist, build-wheel]
     steps:
-      - uses: actions/download-artifact at v3
+      - uses: actions/download-artifact at v6
         with:
           path: dist/
-      - name: Check artifacts
-        run: ls -lR
-      - name: Consolidate and re-check
-        run: |
-          mv dist/*/*.{tar.gz,whl} dist
-          rmdir dist/*/
-          ls -lR
+          pattern: '*-dist'
+          merge-multiple: true
+      - run: ls -lR dist/
       - run: pipx run twine check dist/*
 
   publish:
@@ -145,13 +98,12 @@ jobs:
     needs: [pre-publish]
     if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
     steps:
-      - uses: actions/download-artifact at v3
+      - uses: actions/download-artifact at v6
         with:
           path: dist/
-      - name: Consolidate artifacts
-        run: |
-          mv dist/*/*.{tar.gz,whl} dist
-          rmdir dist/*/
+          pattern: '*-dist'
+          merge-multiple: true
+      - run: ls -lR dist/
       - uses: pypa/gh-action-pypi-publish at release/v1
         with:
           user: __token__


=====================================
.gitignore
=====================================
@@ -14,3 +14,10 @@ dist/
 
 # setuptools_scm
 nitime/_version.py
+
+# coverage
+.coverage
+coverage.xml
+
+# tox
+.tox


=====================================
README.txt → README.rst
=====================================


=====================================
THANKS
=====================================
@@ -8,7 +8,7 @@ Below is a partial list.  If you've been left off, please let us know
 Tim Blanche
 Matthew Brett
 Christopher Burns
-Michael Castelle 
+Michael Castelle
 Philippe Ciuciu
 Dav Clark
 Yann Cointepas
@@ -20,7 +20,9 @@ Brian Hawthorne
 Paul Ivanov
 Kilian Koepsell
 Tim Leslie
+Eric Larson
 Cindee Madison
+Chris Markiewicz
 Jarrod Millman
 Fernando Perez
 Josef Perktold


=====================================
doc/news.rst
=====================================
@@ -2,9 +2,29 @@
  Nitime news
 =============
 
-February 7th, 2016: version 0.6 released
+November 6 2025: Version 0.12.1 released.
 
-June 13, 2014: version 0.5 released.
+November 6 2025: Version 0.12 released.
+
+June 17 2024: Version 0.11 released.
+
+October 31 2023: Version 0.10.2 released.
+
+April 4 2023: Version 0.10.1 released.
+
+April 4 2023: Version 0.10 released.
+
+December 19 2020: Version 0.9 releassed
+
+June 25 2019: Version 0.8.1 released.
+
+June 22 2019: Version 0.8 released.
+
+December 15, 2016: Version 0.7 released.
+
+February 7 2016: Version 0.6 released.
+
+June 13 2014: Version 0.5 released.
 
 June 19 2012: Version 0.4 released.
 


=====================================
min-requirements.txt
=====================================
@@ -1,8 +1,8 @@
 # Auto-generated by tools/update_requirements.py
 --only-binary numpy,scipy
 --extra-index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple
-matplotlib==3.5
-numpy==1.22
-scipy==1.8
-networkx==2.7
-nibabel==4.0
+matplotlib==3.7
+numpy==1.24
+scipy==1.10
+networkx==3.0
+nibabel==5.0


=====================================
nitime/_compat.py
=====================================
@@ -0,0 +1,5 @@
+# np.trapezoid was introduced and np.trapz deprecated in numpy 2.0
+try:  # NP2
+    from numpy import trapezoid
+except ImportError:  # NP1
+    from numpy import trapz as trapezoid


=====================================
nitime/_mpl_units.py deleted
=====================================
@@ -1,226 +0,0 @@
-"""
-
-This is a fixed copy of a module from Matplotlib v1.3 (https://github.com/matplotlib/matplotlib/pull/2591).
-
-It was taken verbatim from Matplotlib's github repository and is, as is all of
-MPL v1.3.1, copyright (c) 2012-2013 Matplotlib Development Team; All Rights
-Reserved. 
-
-1. This LICENSE AGREEMENT is between the Matplotlib Development Team
-("MDT"), and the Individual or Organization ("Licensee") accessing and
-otherwise using matplotlib software in source or binary form and its
-associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, MDT
-hereby grants Licensee a nonexclusive, royalty-free, world-wide license
-to reproduce, analyze, test, perform and/or display publicly, prepare
-derivative works, distribute, and otherwise use matplotlib 1.3.1
-alone or in any derivative version, provided, however, that MDT's
-License Agreement and MDT's notice of copyright, i.e., "Copyright (c)
-2012-2013 Matplotlib Development Team; All Rights Reserved" are retained in
-matplotlib 1.3.1 alone or in any derivative version prepared by
-Licensee.
-
-3. In the event Licensee prepares a derivative work that is based on or
-incorporates matplotlib 1.3.1 or any part thereof, and wants to
-make the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to matplotlib 1.3.1.
-
-4. MDT is making matplotlib 1.3.1 available to Licensee on an "AS
-IS" basis.  MDT MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, MDT MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF MATPLOTLIB 1.3.1
-WILL NOT INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. MDT SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF MATPLOTLIB
-1.3.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR
-LOSS AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING
-MATPLOTLIB 1.3.1, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF
-THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. Nothing in this License Agreement shall be deemed to create any
-relationship of agency, partnership, or joint venture between MDT and
-Licensee.  This License Agreement does not grant permission to use MDT
-trademarks or trade name in a trademark sense to endorse or promote
-products or services of Licensee, or any third party.
-
-8. By copying, installing or otherwise using matplotlib 1.3.1,
-Licensee agrees to be bound by the terms and conditions of this License
-Agreement.
-
-
-It is distributed under the following license:
-The classes here provide support for using custom classes with
-matplotlib, eg those that do not expose the array interface but know
-how to converter themselves to arrays.  It also supoprts classes with
-units and units conversion.  Use cases include converters for custom
-objects, eg a list of datetime objects, as well as for objects that
-are unit aware.  We don't assume any particular units implementation,
-rather a units implementation must provide a ConversionInterface, and
-the register with the Registry converter dictionary.  For example,
-here is a complete implementation which supports plotting with native
-datetime objects::
-
-
-    import matplotlib.units as units
-    import matplotlib.dates as dates
-    import matplotlib.ticker as ticker
-    import datetime
-
-    class DateConverter(units.ConversionInterface):
-
-        @staticmethod
-        def convert(value, unit, axis):
-            'convert value to a scalar or array'
-            return dates.date2num(value)
-
-        @staticmethod
-        def axisinfo(unit, axis):
-            'return major and minor tick locators and formatters'
-            if unit!='date': return None
-            majloc = dates.AutoDateLocator()
-            majfmt = dates.AutoDateFormatter(majloc)
-            return AxisInfo(majloc=majloc,
-                            majfmt=majfmt,
-                            label='date')
-
-        @staticmethod
-        def default_units(x, axis):
-            'return the default unit for x or None'
-            return 'date'
-
-    # finally we register our object type with a converter
-    units.registry[datetime.date] = DateConverter()
-
-"""
-from __future__ import print_function
-from matplotlib.cbook import iterable, is_numlike
-import numpy as np
-
-
-class AxisInfo:
-    """information to support default axis labeling and tick labeling, and
-       default limits"""
-    def __init__(self, majloc=None, minloc=None,
-                 majfmt=None, minfmt=None, label=None,
-                 default_limits=None):
-        """
-        majloc and minloc: TickLocators for the major and minor ticks
-        majfmt and minfmt: TickFormatters for the major and minor ticks
-        label: the default axis label
-        default_limits: the default min, max of the axis if no data is present
-        If any of the above are None, the axis will simply use the default
-        """
-        self.majloc = majloc
-        self.minloc = minloc
-        self.majfmt = majfmt
-        self.minfmt = minfmt
-        self.label = label
-        self.default_limits = default_limits
-
-
-class ConversionInterface:
-    """
-    The minimal interface for a converter to take custom instances (or
-    sequences) and convert them to values mpl can use
-    """
-    @staticmethod
-    def axisinfo(unit, axis):
-        'return an units.AxisInfo instance for axis with the specified units'
-        return None
-
-    @staticmethod
-    def default_units(x, axis):
-        'return the default unit for x or None for the given axis'
-        return None
-
-    @staticmethod
-    def convert(obj, unit, axis):
-        """
-        convert obj using unit for the specified axis.  If obj is a sequence,
-        return the converted sequence.  The output must be a sequence of scalars
-        that can be used by the numpy array layer
-        """
-        return obj
-
-    @staticmethod
-    def is_numlike(x):
-        """
-        The matplotlib datalim, autoscaling, locators etc work with
-        scalars which are the units converted to floats given the
-        current unit.  The converter may be passed these floats, or
-        arrays of them, even when units are set.  Derived conversion
-        interfaces may opt to pass plain-ol unitless numbers through
-        the conversion interface and this is a helper function for
-        them.
-        """
-        if iterable(x):
-            for thisx in x:
-                return is_numlike(thisx)
-        else:
-            return is_numlike(x)
-
-
-class Registry(dict):
-    """
-    register types with conversion interface
-    """
-    def __init__(self):
-        dict.__init__(self)
-        self._cached = {}
-
-    def get_converter(self, x):
-        'get the converter interface instance for x, or None'
-
-        if not len(self):
-            return None  # nothing registered
-        #DISABLED idx = id(x)
-        #DISABLED cached = self._cached.get(idx)
-        #DISABLED if cached is not None: return cached
-
-        converter = None
-        classx = getattr(x, '__class__', None)
-
-        if classx is not None:
-            converter = self.get(classx)
-
-        if isinstance(x, np.ndarray) and x.size:
-            xravel = x.ravel()
-            try:
-                # pass the first value of x that is not masked back to
-                # get_converter
-                if not np.all(xravel.mask):
-                    # some elements are not masked
-                    converter = self.get_converter(
-                        xravel[np.argmin(xravel.mask)])
-                    return converter
-            except AttributeError:
-                # not a masked_array
-                # Make sure we don't recurse forever -- it's possible for
-                # ndarray subclasses to continue to return subclasses and
-                # not ever return a non-subclass for a single element.
-                next_item = xravel[0]
-                if (not isinstance(next_item, np.ndarray) or
-                    next_item.shape != x.shape):
-                    converter = self.get_converter(next_item)
-                return converter
-            
-        if converter is None and iterable(x):
-            for thisx in x:
-                # Make sure that recursing might actually lead to a solution,
-                # if we are just going to re-examine another item of the same
-                # kind, then do not look at it.
-                if classx and classx != getattr(thisx, '__class__', None):
-                    converter = self.get_converter(thisx)
-                    return converter
-
-        #DISABLED self._cached[idx] = converter
-        return converter
-
-
-registry = Registry()


=====================================
nitime/algorithms/tests/test_autoregressive.py
=====================================
@@ -3,6 +3,7 @@ import numpy.testing as npt
 
 import nitime.algorithms as tsa
 import nitime.utils as utils
+from nitime._compat import trapezoid
 
 # Set the random seed:
 np.random.seed(1)
@@ -46,14 +47,14 @@ def test_AR_YW():
 
     # evaluate this integral numerically from 0 to pi
     dw = np.pi / len(psd)
-    avg_pwr_est = np.trapz(psd, dx=dw) / (2 * np.pi)
+    avg_pwr_est = trapezoid(psd, dx=dw) / (2 * np.pi)
     # consistency on the order of 10**0 is pretty good for this test
     npt.assert_almost_equal(avg_pwr, avg_pwr_est, decimal=0)
 
     # Test for providing the autocovariance as an input:
     ak, sigma_v = tsa.AR_est_YW(arsig, order, utils.autocov(arsig))
     w, psd = tsa.AR_psd(ak, sigma_v)
-    avg_pwr_est = np.trapz(psd, dx=dw) / (2 * np.pi)
+    avg_pwr_est = trapezoid(psd, dx=dw) / (2 * np.pi)
     npt.assert_almost_equal(avg_pwr, avg_pwr_est, decimal=0)
 
 
@@ -76,13 +77,13 @@ def test_AR_LD():
 
     # evaluate this integral numerically from 0 to pi
     dw = np.pi / len(psd)
-    avg_pwr_est = np.trapz(psd, dx=dw) / (2 * np.pi)
+    avg_pwr_est = trapezoid(psd, dx=dw) / (2 * np.pi)
     npt.assert_almost_equal(avg_pwr, avg_pwr_est, decimal=0)
 
     # Test for providing the autocovariance as an input:
     ak, sigma_v = tsa.AR_est_LD(arsig, order, utils.autocov(arsig))
     w, psd = tsa.AR_psd(ak, sigma_v)
-    avg_pwr_est = np.trapz(psd, dx=dw) / (2 * np.pi)
+    avg_pwr_est = trapezoid(psd, dx=dw) / (2 * np.pi)
     npt.assert_almost_equal(avg_pwr, avg_pwr_est, decimal=0)
 
 


=====================================
nitime/analysis/coherence.py
=====================================
@@ -39,7 +39,6 @@ class CoherenceAnalyzer(BaseAnalyzer):
         Examples
         --------
         >>> import nitime.timeseries as ts
-        >>> np.set_printoptions(precision=4)  # for doctesting
         >>> t1 = ts.TimeSeries(data = np.arange(0,1024,1).reshape(2,512),
         ...                                 sampling_rate=np.pi)
         >>> c1 = CoherenceAnalyzer(t1)
@@ -48,11 +47,11 @@ class CoherenceAnalyzer(BaseAnalyzer):
         >>> c1.method['this_method']
         'welch'
         >>> c1.coherence[0,1]
-        array([ 0.9024,  0.9027,  0.9652,  0.9433,  0.9297,  0.9213,  0.9161,
-                0.9126,  0.9102,  0.9085,  0.9072,  0.9063,  0.9055,  0.905 ,
-                0.9045,  0.9041,  0.9038,  0.9036,  0.9034,  0.9032,  0.9031,
-                0.9029,  0.9028,  0.9027,  0.9027,  0.9026,  0.9026,  0.9025,
-                0.9025,  0.9025,  0.9025,  0.9026,  1.    ])
+        array([0.9024, 0.9027, 0.9652, 0.9433, 0.9297, 0.9213, 0.9161, 0.9126,
+               0.9102, 0.9085, 0.9072, 0.9063, 0.9055, 0.905 , 0.9045, 0.9041,
+               0.9038, 0.9036, 0.9034, 0.9032, 0.9031, 0.9029, 0.9028, 0.9027,
+               0.9027, 0.9026, 0.9026, 0.9025, 0.9025, 0.9025, 0.9025, 0.9026,
+               1.    ])
         >>> c1.phase[0,1]
         array([ 0.    , -0.035 , -0.4839, -0.4073, -0.3373, -0.2828, -0.241 ,
                -0.2085, -0.1826, -0.1615, -0.144 , -0.1292, -0.1164, -0.1054,


=====================================
nitime/analysis/correlation.py
=====================================
@@ -24,7 +24,6 @@ class CorrelationAnalyzer(BaseAnalyzer):
 
         Examples
         --------
-        >>> np.set_printoptions(precision=4)  # for doctesting
         >>> t1 = ts.TimeSeries(data = np.sin(np.arange(0,
         ...                    10*np.pi,10*np.pi/100)).reshape(2,50),
         ...                                      sampling_rate=np.pi)


=====================================
nitime/analysis/spectral.py
=====================================
@@ -43,7 +43,6 @@ class SpectralAnalyzer(BaseAnalyzer):
 
         Examples
         --------
-        >>> np.set_printoptions(precision=4)  # for doctesting
         >>> t1 = ts.TimeSeries(data = np.arange(0,1024,1).reshape(2,512),
         ... sampling_rate=np.pi)
         >>> s1 = SpectralAnalyzer(t1)
@@ -53,13 +52,13 @@ class SpectralAnalyzer(BaseAnalyzer):
         3.1415926535... Hz
         >>> f,s = s1.psd
         >>> f
-        array([ 0.    ,  0.0491,  0.0982,  0.1473,  0.1963,  0.2454,  0.2945,
-                0.3436,  0.3927,  0.4418,  0.4909,  0.54  ,  0.589 ,  0.6381,
-                0.6872,  0.7363,  0.7854,  0.8345,  0.8836,  0.9327,  0.9817,
-                1.0308,  1.0799,  1.129 ,  1.1781,  1.2272,  1.2763,  1.3254,
-                1.3744,  1.4235,  1.4726,  1.5217,  1.5708])
+        array([0.    , 0.0491, 0.0982, 0.1473, 0.1963, 0.2454, 0.2945, 0.3436,
+               0.3927, 0.4418, 0.4909, 0.54  , 0.589 , 0.6381, 0.6872, 0.7363,
+               0.7854, 0.8345, 0.8836, 0.9327, 0.9817, 1.0308, 1.0799, 1.129 ,
+               1.1781, 1.2272, 1.2763, 1.3254, 1.3744, 1.4235, 1.4726, 1.5217,
+               1.5708])
         >>> s[0,0]   # doctest: +ELLIPSIS
-        1128276.92538360...
+        1128276.9253836009
         """
         BaseAnalyzer.__init__(self, input)
 


=====================================
nitime/conftest.py
=====================================
@@ -0,0 +1,7 @@
+import numpy as np
+import pytest
+
+
+ at pytest.fixture(scope='session', autouse=True)
+def legacy_printoptions():
+    np.set_printoptions(legacy='1.21', precision=4)


=====================================
nitime/index_utils.py
=====================================
@@ -44,9 +44,9 @@ def tri(N, M=None, k=0, dtype=float):
            [1, 1, 1, 1, 1]])
 
     >>> np.tri(3, 5, -1)
-    array([[ 0.,  0.,  0.,  0.,  0.],
-           [ 1.,  0.,  0.,  0.,  0.],
-           [ 1.,  1.,  0.,  0.,  0.]])
+    array([[0., 0., 0., 0., 0.],
+           [1., 0., 0., 0., 0.],
+           [1., 1., 0., 0., 0.]])
 
     """
     if M is None: M = N


=====================================
nitime/lazyimports.py
=====================================
@@ -36,20 +36,20 @@ class LazyImport(types.ModuleType):
     respect to introspection and tab completion) with the *exception* of
     reload()- reloading a :class:`LazyImport` raises an :class:`ImportError`.
 
-    >>> mlab = LazyImport('matplotlib.mlab')
+    >>> numpy = LazyImport('numpy')
 
     No import happens on the above line, until we do something like call an
-    ``mlab`` method or try to do tab completion or introspection on ``mlab``
+    ``numpy`` method or try to do tab completion or introspection on ``numpy``
     in IPython.
 
-    >>> mlab
-    <module 'matplotlib.mlab' will be lazily loaded>
+    >>> numpy
+    <module 'numpy' will be lazily loaded>
 
-    Now the :class:`LazyImport` will do an actual import, and call the dist
+    Now the :class:`LazyImport` will do an actual import, and call the hypot
     function of the imported module.
 
-    >>> mlab.dist(1969,2011)
-    42.0
+    >>> numpy.diff([1969, 2011])
+    array([42])
     """
     def __getattribute__(self,x):
         # This method will be called only once, since we'll change


=====================================
nitime/tests/test_algorithms.py
=====================================
@@ -8,6 +8,7 @@ from scipy import fftpack, signal
 import nitime
 from nitime import algorithms as tsa
 from nitime import utils as ut
+from nitime._compat import trapezoid
 
 #Define globally
 test_dir_path = os.path.join(nitime.__path__[0], 'tests')
@@ -70,7 +71,7 @@ def test_periodogram():
     avg_pwr = (arsig * arsig.conjugate()).mean()
     f, psd = tsa.periodogram(arsig, N=2048)
     df = 2. * np.pi / 2048
-    avg_pwr_est = np.trapz(psd, dx=df)
+    avg_pwr_est = trapezoid(psd, dx=df)
     npt.assert_almost_equal(avg_pwr, avg_pwr_est, decimal=1)
 
 


=====================================
nitime/tests/test_timeseries.py
=====================================
@@ -131,10 +131,10 @@ def test_TimeArray_repr():
     """
 >>> a = ts.TimeArray([1.1,2,3])
 >>> a
-TimeArray([ 1.1,  2. ,  3. ], time_unit='s')
+TimeArray([1.1, 2. , 3. ], time_unit='s')
 >>> t = ts.TimeArray(a,time_unit='ms')
 >>> t
-TimeArray([ 1100.,  2000.,  3000.], time_unit='ms')
+TimeArray([1100., 2000., 3000.], time_unit='ms')
 >>> t[0]
 1100.0 ms
     """
@@ -195,12 +195,12 @@ def test_TimeArray_convert_unit():
     >>> a = ts.TimeArray([1,2,3,4])
     >>> a.convert_unit('ms')
     >>> a
-    TimeArray([ 1000.,  2000.,  3000.,  4000.], time_unit='ms')
+    TimeArray([1000., 2000., 3000., 4000.], time_unit='ms')
     >>> a.time_unit
     'ms'
     >>> b = ts.TimeArray([1,2,3,4],'s')
     >>> a==b
-    array([ True,  True,  True,  True], dtype=bool)
+    array([ True,  True,  True,  True])
     """
 
 
@@ -431,13 +431,13 @@ def test_UniformTime_repr():
     >>> time1.sampling_rate
     1000.0 Hz
     >>> time1
-    UniformTime([ 0.,  1.,  2.], time_unit='ms')
+    UniformTime([0., 1., 2.], time_unit='ms')
 
     >>> time2= ts.UniformTime(sampling_rate=1000,time_unit='s',length=3)
     >>> time2.sampling_rate
     1000.0 Hz
     >>> time2
-    UniformTime([ 0.   ,  0.001,  0.002], time_unit='s')
+    UniformTime([0.   , 0.001, 0.002], time_unit='s')
 
     >>> a = ts.UniformTime(length=5,sampling_rate=1,time_unit='ms')
 
@@ -452,7 +452,7 @@ def test_UniformTime_repr():
     >>> b = ts.UniformTime(a,time_unit='s')
 
     >>> b
-    UniformTime([ 0.,  1.,  2.,  3.,  4.], time_unit='s')
+    UniformTime([0., 1., 2., 3., 4.], time_unit='s')
 
     >>> a = ts.UniformTime(length=1,sampling_rate=2)
 
@@ -574,7 +574,7 @@ def test_TimeSeries_repr():
     3.0 Hz
     >>> tseries1 = ts.TimeSeries(data=[3,5,8],sampling_rate=3)
     >>> tseries1.time
-    UniformTime([ 0.    ,  0.3333,  0.6667], time_unit='s')
+    UniformTime([0.    , 0.3333, 0.6667], time_unit='s')
     >>> tseries1.sampling_rate
     3.0 Hz
     >>> tseries1.sampling_interval


=====================================
nitime/timeseries.py
=====================================
@@ -192,13 +192,16 @@ class TimeArray(np.ndarray, TimeInterface):
         time._conversion_factor = time_unit_conversion[time_unit]
         return time
 
-    def __array_wrap__(self, out_arr, context=None):
+    def __array_wrap__(self, out_arr, context=None, return_scalar=False):
         # When doing comparisons between TimeArrays, make sure that you return
         # a boolean array, not a time array:
         if out_arr.dtype == bool:
-            return np.asarray(out_arr)
+            ret = np.asarray(out_arr)
+            if return_scalar:
+                ret = ret[()]
+            return ret
         else:
-            return np.ndarray.__array_wrap__(self, out_arr, context)
+            return np.ndarray.__array_wrap__(self, out_arr, context, return_scalar)
 
     def __array_finalize__(self, obj):
         """XXX """
@@ -691,13 +694,16 @@ class UniformTime(np.ndarray, TimeInterface):
 
         return time
 
-    def __array_wrap__(self, out_arr, context=None):
+    def __array_wrap__(self, out_arr, context=None, return_scalar=False):
         # When doing comparisons between UniformTime, make sure that you return
         # a boolean array, not a time array:
         if out_arr.dtype == bool:
-            return np.asarray(out_arr)
+            ret = np.asarray(out_arr)
+            if return_scalar:
+                ret = ret[()]
+            return ret
         else:
-            return np.ndarray.__array_wrap__(self, out_arr, context)
+            return np.ndarray.__array_wrap__(self, out_arr, context, return_scalar)
 
     def __array_finalize__(self, obj):
         """XXX """
@@ -1101,7 +1107,7 @@ class TimeSeries(TimeSeriesBase):
 
         >>> ts = TimeSeries([1,2,3],sampling_interval=0.25)
         >>> ts.time
-        UniformTime([ 0.  ,  0.25,  0.5 ], time_unit='s')
+        UniformTime([0.  , 0.25, 0.5 ], time_unit='s')
         >>> ts.t0
         0.0 s
         >>> ts.sampling_rate
@@ -1111,7 +1117,7 @@ class TimeSeries(TimeSeriesBase):
 
         >>> ts = TimeSeries([1,2,3],sampling_rate=2)
         >>> ts.time
-        UniformTime([ 0. ,  0.5,  1. ], time_unit='s')
+        UniformTime([0. , 0.5, 1. ], time_unit='s')
         >>> ts.t0
         0.0 s
         >>> ts.sampling_interval
@@ -1123,7 +1129,7 @@ class TimeSeries(TimeSeriesBase):
         >>> ts.data
         array([1, 2, 3])
         >>> ts.time
-        UniformTime([ 4.25,  4.75,  5.25], time_unit='s')
+        UniformTime([4.25, 4.75, 5.25], time_unit='s')
         >>> ts.t0
         4.25 s
         >>> ts.sampling_interval
@@ -1135,7 +1141,7 @@ class TimeSeries(TimeSeriesBase):
         >>> ts.data
         array([1, 2, 3])
         >>> ts.time
-        UniformTime([ 4.25,  4.75,  5.25], time_unit='s')
+        UniformTime([4.25, 4.75, 5.25], time_unit='s')
         >>> ts.t0
         4.25 s
         >>> ts.sampling_interval
@@ -1517,7 +1523,7 @@ class Events(TimeInterface):
         if not np.iterable(time):
             time = [time]
 
-        # First initilaize the TimeArray from the time-stamps
+        # First initialize the TimeArray from the time-stamps
         self.time = TimeArray(time, time_unit=time_unit)
         self.time_unit = self.time.time_unit
 


=====================================
nitime/utils.py
=====================================
@@ -1381,7 +1381,7 @@ def fill_diagonal(a, val):
     else:
         # For more than d=2, the strided formula is only valid for arrays with
         # all dimensions equal, so we check first.
-        if not np.alltrue(np.diff(a.shape) == 0):
+        if not np.all(np.diff(a.shape) == 0):
             raise ValueError("All dimensions of input must be of equal length")
         step = np.cumprod((1,) + a.shape[:-1]).sum()
 
@@ -1460,7 +1460,7 @@ def diag_indices_from(arr):
         raise ValueError("input array must be at least 2-d")
     # For more than d=2, the strided formula is only valid for arrays with
     # all dimensions equal, so we check first.
-    if not np.alltrue(np.diff(arr.shape) == 0):
+    if not np.all(np.diff(arr.shape) == 0):
         raise ValueError("All dimensions of input must be of equal length")
 
     return diag_indices(arr.shape[0], arr.ndim)
@@ -1704,12 +1704,11 @@ def structured_rand_arr(size, sample_func=np.random.random,
     Examples
     --------
     >>> np.random.seed(0)  # for doctesting
-    >>> np.set_printoptions(precision=4)  # for doctesting
     >>> structured_rand_arr(4)
-    array([[ 0.5488,  0.7152,  0.6028,  0.5449],
-           [ 0.7152,  0.6459,  0.4376,  0.8918],
-           [ 0.6028,  0.4376,  0.7917,  0.5289],
-           [ 0.5449,  0.8918,  0.5289,  0.0871]])
+    array([[0.5488, 0.7152, 0.6028, 0.5449],
+           [0.7152, 0.6459, 0.4376, 0.8918],
+           [0.6028, 0.4376, 0.7917, 0.5289],
+           [0.5449, 0.8918, 0.5289, 0.0871]])
     >>> structured_rand_arr(4,ltfac=-10,utfac=10,fill_diag=0.5)
     array([[ 0.5   ,  8.3262,  7.7816,  8.7001],
            [-8.3262,  0.5   ,  4.6148,  7.8053],
@@ -1765,17 +1764,16 @@ def symm_rand_arr(size, sample_func=np.random.random, fill_diag=None):
     Examples
     --------
     >>> np.random.seed(0)  # for doctesting
-    >>> np.set_printoptions(precision=4)  # for doctesting
     >>> symm_rand_arr(4)
-    array([[ 0.5488,  0.7152,  0.6028,  0.5449],
-           [ 0.7152,  0.6459,  0.4376,  0.8918],
-           [ 0.6028,  0.4376,  0.7917,  0.5289],
-           [ 0.5449,  0.8918,  0.5289,  0.0871]])
+    array([[0.5488, 0.7152, 0.6028, 0.5449],
+           [0.7152, 0.6459, 0.4376, 0.8918],
+           [0.6028, 0.4376, 0.7917, 0.5289],
+           [0.5449, 0.8918, 0.5289, 0.0871]])
     >>> symm_rand_arr(4,fill_diag=4)
-    array([[ 4.    ,  0.8326,  0.7782,  0.87  ],
-           [ 0.8326,  4.    ,  0.4615,  0.7805],
-           [ 0.7782,  0.4615,  4.    ,  0.9447],
-           [ 0.87  ,  0.7805,  0.9447,  4.    ]])
+    array([[4.    , 0.8326, 0.7782, 0.87  ],
+           [0.8326, 4.    , 0.4615, 0.7805],
+           [0.7782, 0.4615, 4.    , 0.9447],
+           [0.87  , 0.7805, 0.9447, 4.    ]])
       """
     return structured_rand_arr(size, sample_func, fill_diag=fill_diag)
 
@@ -1797,7 +1795,6 @@ def antisymm_rand_arr(size, sample_func=np.random.random):
     Examples
     --------
     >>> np.random.seed(0)  # for doctesting
-    >>> np.set_printoptions(precision=4)  # for doctesting
     >>> antisymm_rand_arr(4)
     array([[ 0.    ,  0.7152,  0.6028,  0.5449],
            [-0.7152,  0.    ,  0.4376,  0.8918],
@@ -1830,16 +1827,15 @@ def threshold_arr(cmat, threshold=0.0, threshold2=None):
 
     Examples
     --------
-    >>> np.set_printoptions(precision=4)  # For doctesting
     >>> a = np.linspace(0,0.2,5)
     >>> a
-    array([ 0.  ,  0.05,  0.1 ,  0.15,  0.2 ])
+    array([0.  , 0.05, 0.1 , 0.15, 0.2 ])
     >>> threshold_arr(a,0.1)
-    (array([3, 4]), array([ 0.15,  0.2 ]))
+    (array([3, 4]), array([0.15, 0.2 ]))
 
     With two thresholds:
     >>> threshold_arr(a,0.1,0.2)
-    (array([0, 1]), array([ 0.  ,  0.05]))
+    (array([0, 1]), array([0.  , 0.05]))
     """
     # Select thresholds
     if threshold2 is None:
@@ -1906,7 +1902,7 @@ def rescale_arr(arr, amin, amax):
     >>> a = np.arange(5)
 
     >>> rescale_arr(a,3,6)
-    array([ 3.  ,  3.75,  4.5 ,  5.25,  6.  ])
+    array([3.  , 3.75, 4.5 , 5.25, 6.  ])
     """
 
     # old bounds
@@ -1941,16 +1937,15 @@ def minmax_norm(arr, mode='direct', folding_edges=None):
 
     Examples
     --------
-    >>> np.set_printoptions(precision=4)  # for doctesting
     >>> a = np.linspace(0.3,0.8,4)
     >>> minmax_norm(a)
-    array([ 0.    ,  0.3333,  0.6667,  1.    ])
+    array([0.    , 0.3333, 0.6667, 1.    ])
     >>> b = np.concatenate([np.linspace(-0.7,-0.3,3),
     ...                             np.linspace(0.3,0.8,3)])
     >>> b
     array([-0.7 , -0.5 , -0.3 ,  0.3 ,  0.55,  0.8 ])
     >>> minmax_norm(b,'folding',[-0.3,0.3])
-    array([ 0.8,  0.4,  0. ,  0. ,  0.5,  1. ])
+    array([0.8, 0.4, 0. , 0. , 0.5, 1. ])
     """
     if mode == 'direct':
         return rescale_arr(arr, 0, 1)


=====================================
nitime/viz.py
=====================================
@@ -3,8 +3,6 @@
 Depends on matplotlib. Some functions depend also on networkx
 
 """
-from __future__ import print_function
-
 # If you are running nosetests right now, you might want to use 'agg' as a backend:
 import sys
 
@@ -22,14 +20,6 @@ import nitime.utils as tsu
 from nitime.utils import threshold_arr, minmax_norm, rescale_arr
 import nitime.analysis as nta
 
-# Matplotlib 1.3 has a bug in it, so if that's what you have, we'll replace it
-# for you with a fixed version of that module:
-import matplotlib
-if matplotlib.__version__[:3] == '1.3' or matplotlib.__version__[:3] == '1.4':
-    import nitime._mpl_units as mpl_units
-    import matplotlib.axis as ax
-    ax.munits = mpl_units
-
 from nitime.utils import tril_indices
 
 #Some visualization functions require networkx. Import that if possible:


=====================================
pyproject.toml
=====================================
@@ -4,9 +4,7 @@ requires = [
   "setuptools_scm[toml]>=6.2",
   "cython",
   # Wheels need to be built with NumPy 2.0 to be compatible with 2.0 and 1.x
-  "numpy>=2.0.0rc1,<3; python_version > '3.8'",
-  # NEP29-minimum as of Aug 17, 2023 (1.25 doesn't support 3.8)
-  "numpy==1.22; python_version == '3.8'",
+  "numpy>=2",
 ]
 build-backend = "setuptools.build_meta"
 
@@ -14,9 +12,9 @@ build-backend = "setuptools.build_meta"
 name = "nitime"
 dynamic = ["version"]
 description = "Nitime: timeseries analysis for neuroscience data"
-readme = "README.txt"
+readme = "README.rst"
 license = { file = "LICENSE" }
-requires-python = ">=3.8"
+requires-python = ">=3.10"
 authors = [
     { name = "Nitime developers", email = "neuroimaging at python.org" },
 ]
@@ -33,15 +31,15 @@ classifiers = [
     "Topic :: Scientific/Engineering",
 ]
 dependencies = [
-    "matplotlib>=3.5",
-    "numpy>=1.22",
-    "scipy>=1.8",
+    "matplotlib>=3.7",
+    "numpy>=1.24",
+    "scipy>=1.10",
 ]
 
 [project.optional-dependencies]
 full = [
-    "networkx>=2.7",
-    "nibabel>=4.0",
+    "networkx>=3.0",
+    "nibabel>=5.0",
 ]
 
 [project.urls]
@@ -54,14 +52,24 @@ include = ["nitime*"]
 [tool.setuptools_scm]
 write_to = "nitime/_version.py"
 
-[tool.cibuildwheel]
-# Disable PyPy, and no NumPy wheels for 3.8 Linux aarch64 or musl
-skip = "pp* cp38-*_aarch64 cp38-musllinux_*"
+[dependency-groups]
+test = [
+    "pytest>=8",
+    "pytest-cov>=2.11",
+    "pytest-doctestplus>=1.5",
+    "networkx>=3.0",
+    "nibabel>=5.0",
+]
 
+[tool.cibuildwheel]
+# Disable PyPy
+skip = "pp*"
+# 3.11 is abi3
+build = "cp310-* cp311-*"
 # 64-bit builds only; 32-bit builds seem pretty niche these days, so
 # don't bother unless someone asks
 archs = ["native"]
-
+before-build = "pip install abi3audit"
 test-requires = [
     "pytest",
     "nitime[full]",  # Enable all optional behavior
@@ -69,7 +77,43 @@ test-requires = [
 test-command = "pytest -rsx --pyargs nitime"
 
 [tool.cibuildwheel.linux]
-archs = ["x86_64", "aarch64"]
+repair-wheel-command = [
+  "auditwheel repair -w {dest_dir} {wheel}",
+  "bash tools/audit_wheel.sh {wheel}",
+]
+
+[tool.cibuildwheel.macos]
+archs = ["native"]
+repair-wheel-command = [
+  "delocate-wheel --require-archs {delocate_archs} -w {dest_dir} -v {wheel}",
+  "bash tools/audit_wheel.sh {wheel}",
+]
+
+[tool.cibuildwheel.windows]
+before-build = "pip install delvewheel abi3audit"
+repair-wheel-command = [
+  "delvewheel repair -w {dest_dir} {wheel}",
+  "bash tools/audit_wheel.sh {wheel}",
+]
+
+[tool.pytest.ini_options]
+minversion = "8"
+testpaths = ["nitime"]
+log_cli_level = "INFO"
+xfail_strict = true
+norecursedirs = [".git", "dist", "build", ".tox", ".venv"]
+addopts = [
+  "-rsx",
+  "--strict-config",
+  "--strict-markers",
+  "--doctest-modules",
+  # Config pytest-cov
+  "--cov=nitime",
+  "--cov-report=xml",
+  # Comment out above and uncomment below to autofix doctests
+  # "--doctest-only",
+  # "--doctest-plus-generate-diff=overwrite",
+]
 
 [tool.codespell]
 skip = '.git,*.pdf,*.svg,go.sum,*.css'


=====================================
requirements.txt
=====================================
@@ -1,8 +1,8 @@
 # Auto-generated by tools/update_requirements.py
 --only-binary numpy,scipy
 --extra-index-url https://pypi.anaconda.org/scientific-python-nightly-wheels/simple
-matplotlib>=3.5
-numpy>=1.22
-scipy>=1.8
-networkx>=2.7
-nibabel>=4.0
+matplotlib>=3.7
+numpy>=1.24
+scipy>=1.10
+networkx>=3.0
+nibabel>=5.0


=====================================
setup.py
=====================================
@@ -4,26 +4,48 @@
 This file only contains cython components.
 See pyproject.toml for the remaining configuration.
 """
-from setuptools import setup
-
-try:
-    from setuptools import Extension
-    from Cython.Build import cythonize
-    from numpy import get_include
-
-    # add Cython extensions to the setup options
-    exts = [
-        Extension(
-            'nitime._utils',
-            ['nitime/_utils.pyx'],
-            include_dirs=[get_include()],
-            define_macros=[('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION')],
-        )
-    ]
-    opts = {'ext_modules': cythonize(exts, language_level='3')}
-except ImportError:
-    # no loop for you!
-    opts = {}
+import platform
+import sys
+
+from Cython.Build import cythonize
+from numpy import get_include
+from setuptools import setup, Extension
+from wheel.bdist_wheel import bdist_wheel
+
+# add Cython extensions to the setup options
+
+
+# https://github.com/joerick/python-abi3-package-sample/blob/main/setup.py
+class bdist_wheel_abi3(bdist_wheel):  # noqa: D101
+    def get_tag(self):  # noqa: D102
+        python, abi, plat = super().get_tag()
+
+        if python.startswith("cp"):
+            return "cp311", "abi3", plat
+
+        return python, abi, plat
+
+
+macros = [('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION')]
+ext_kwargs = {}
+setup_kwargs = {}
+if sys.version_info.minor >= 11 and platform.python_implementation() == "CPython":
+    # Can create an abi3 wheel (typed memoryviews first available in 3.11)!
+    macros.append(("Py_LIMITED_API", "0x030B0000"))
+    ext_kwargs["py_limited_api"] = True
+    setup_kwargs["cmdclass"] = {"bdist_wheel": bdist_wheel_abi3}
+
+
+exts = [
+    Extension(
+        'nitime._utils',
+        ['nitime/_utils.pyx'],
+        include_dirs=[get_include()],
+        define_macros=macros,
+        **ext_kwargs,
+    )
+]
+opts = {'ext_modules': cythonize(exts, language_level='3'), **setup_kwargs}
 
 # Now call the actual setup function
 if __name__ == '__main__':


=====================================
tools/audit_wheel.sh
=====================================
@@ -0,0 +1,9 @@
+#!/bin/bash -eo pipefail
+set -x
+
+PY_MINOR=$(python -c "import sys; print(sys.version_info.minor)")
+if [ "$PY_MINOR" -lt 11 ]; then
+  echo "Not checking abi3audit for Python $PY_MINOR < 3.11"
+  exit 0
+fi
+abi3audit --strict --report --verbose "$1"



View it on GitLab: https://salsa.debian.org/med-team/nitime/-/commit/14c8b1a546763902c7d9ed38e2e3edf7b4aeacce

-- 
View it on GitLab: https://salsa.debian.org/med-team/nitime/-/commit/14c8b1a546763902c7d9ed38e2e3edf7b4aeacce
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20260103/5ca02ad3/attachment-0001.htm>


More information about the debian-med-commit mailing list