[med-svn] [Git][med-team/nitime][master] 7 commits: New upstream version 0.10.1

Étienne Mollier (@emollier) gitlab at salsa.debian.org
Tue Aug 22 09:00:45 BST 2023



Étienne Mollier pushed to branch master at Debian Med / nitime


Commits:
4eafb15f by Étienne Mollier at 2023-08-22T09:17:01+02:00
New upstream version 0.10.1
- - - - -
523e2e28 by Étienne Mollier at 2023-08-22T09:17:01+02:00
routine-update: New upstream version

- - - - -
93442062 by Étienne Mollier at 2023-08-22T09:17:16+02:00
Update upstream source from tag 'upstream/0.10.1'

Update to upstream version '0.10.1'
with Debian dir f6b5fedcafe9edc1045f7d4a908dcaf049b57140
- - - - -
60fe51ac by Étienne Mollier at 2023-08-22T09:24:12+02:00
do-not-set-lowerbound-zero-in-iir.patch: delete: fixed upstream.

- - - - -
ea584219 by Étienne Mollier at 2023-08-22T09:26:10+02:00
fix-psd-test.patch: delete: fixed upstream.

- - - - -
40bf8023 by Étienne Mollier at 2023-08-22T09:27:15+02:00
numpy_1.24.patch: remove: applied upstream.

- - - - -
5f810533 by Étienne Mollier at 2023-08-22T09:59:46+02:00
d/changelog: update with warning notice.

- - - - -


22 changed files:

- .github/workflows/test.yml
- + .github/workflows/wheels.yml
- debian/changelog
- − debian/patches/do-not-set-lowerbound-zero-in-iir.patch
- − debian/patches/fix-psd-test.patch
- − debian/patches/numpy_1.24.patch
- debian/patches/series
- doc/examples/multi_taper_coh.py
- nitime/algorithms/cohere.py
- nitime/algorithms/spectral.py
- nitime/analysis/coherence.py
- nitime/analysis/correlation.py
- nitime/analysis/spectral.py
- nitime/fmri/io.py
- nitime/fmri/tests/test_io.py
- nitime/tests/test_algorithms.py
- nitime/tests/test_analysis.py
- nitime/version.py
- nitime/viz.py
- + pyproject.toml
- requirements-dev.txt
- setup.py


Changes:

=====================================
.github/workflows/test.yml
=====================================
@@ -1,6 +1,19 @@
 name: Test suite
 
-on: [push, pull_request]
+on:
+  push:
+    branches:
+      - master
+    tags:
+      - "*"
+  pull_request:
+    branches:
+      - master
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref }}
+  cancel-in-progress: true
+
 
 jobs:
   build:
@@ -9,13 +22,13 @@ jobs:
     strategy:
       max-parallel: 4
       matrix:
-        python-version: [3.7, 3.8]
+        python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
 
     steps:
     - name: Checkout repo
-      uses: actions/checkout at v1
+      uses: actions/checkout at v3
     - name: Set up Python ${{ matrix.python-version }}
-      uses: actions/setup-python at v1
+      uses: actions/setup-python at v4
       with:
         python-version: ${{ matrix.python-version }}
     - name: Install
@@ -26,7 +39,7 @@ jobs:
         python -m pip install .
     - name: Lint
       run: |
-        flake8 --ignore N802,N806,W504 --select W503 `find . -name \*.py | grep -v setup.py | grep -v version.py | grep -v __init__.py | grep -v /docs/`
+        pipx run flake8 --ignore N802,N806,W504 --select W503 nitime/ tools/
     - name: Test
       run: |
         cd && mkdir for_test && cd for_test && pytest --pyargs nitime --cov-report term-missing --cov=AFQ


=====================================
.github/workflows/wheels.yml
=====================================
@@ -0,0 +1,134 @@
+name: Build
+
+on:
+  push:
+    branches:
+      - master
+    tags:
+      - "*"
+  pull_request:
+    branches:
+      - master
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref }}
+  cancel-in-progress: true
+
+jobs:
+  job_metadata:
+    runs-on: ubuntu-latest
+    outputs:
+      commit_message: ${{ steps.get_commit_message.outputs.commit_message }}
+    steps:
+      - name: Checkout
+        uses: actions/checkout at v3
+        with:
+          fetch-depth: 2
+      - name: Print head git commit message
+        id: get_commit_message
+        run: |
+          if [[ -z "$COMMIT_MSG" ]]; then
+            COMMIT_MSG=$(git show -s --format=%s $REF)
+          fi
+          echo commit_message=$COMMIT_MSG | tee -a $GITHUB_OUTPUT
+        env:
+          COMMIT_MSG: ${{ github.event.head_commit.message }}
+          REF: ${{ github.event.pull_request.head.sha }}
+
+  build-sdist:
+    name: Build sdist
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout at v3
+      - name: Build sdist
+        run: pipx run build -s
+      - uses: actions/upload-artifact at v3
+        with:
+          name: sdist
+          path: ./dist/*.tar.gz
+
+  build-wheel:
+    name: Build wheel for ${{ matrix.python }}-${{ matrix.buildplat[1] }}
+    needs: [job_metadata]
+    runs-on: ${{ matrix.buildplat[0] }}
+    if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') || contains(needs.job_metadata.outputs.commit_message, '[build wheels]')
+    strategy:
+      fail-fast: false
+      matrix:
+        buildplat:
+          - [ubuntu-20.04, musllinux_x86_64]
+          - [macos-12, macosx_*]
+          - [windows-2019, win_amd64]
+        python: ["cp37", "cp38", "cp39", "cp310", "cp311"]
+        include:
+          # Manylinux builds are cheap, do all in one
+          - { buildplat: ["ubuntu-20.04", "manylinux_x86_64"], python: "*" }
+
+    steps:
+      - uses: actions/checkout at v3
+
+      - name: Build wheel(s)
+        run: pipx run cibuildwheel
+        env:
+          CIBW_BUILD: ${{ matrix.python }}-${{ matrix.buildplat[1] }}
+
+      - uses: actions/upload-artifact at v3
+        with:
+          name: ${{ matrix.python == '*' && 'all' || matrix.python }}-${{ startsWith(matrix.buildplat[1], 'macosx') && 'macosx' || matrix.buildplat[1] }}
+          path: ./wheelhouse/*.whl
+
+  test-sdist:
+    name: Test sdist
+    needs: [build-sdist]
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/download-artifact at v3
+        with:
+          name: sdist
+          path: ./dist
+      - uses: actions/setup-python at v4
+        with:
+          python-version: "3.11"
+      - name: Display Python version
+        run: python -c "import sys; print(sys.version)"
+      - name: Install sdist
+        run: pip install dist/*.tar.gz
+      - run: python -c 'import nitime; print(nitime.__version__)'
+      - name: Install pytest
+        run: pip install pytest
+      - name: Run tests
+        run: pytest -v --pyargs nitime
+
+  pre-publish:
+    runs-on: ubuntu-latest
+    needs: [test-sdist, build-wheel]
+    steps:
+      - uses: actions/download-artifact at v3
+        with:
+          path: dist/
+      - name: Check artifacts
+        run: ls -lR
+      - name: Consolidate and re-check
+        run: |
+          mv dist/*/*.{tar.gz,whl} dist
+          rmdir dist/*/
+          ls -lR
+      - run: pipx run twine check dist/*
+
+  publish:
+    runs-on: ubuntu-latest
+    environment: "Package deployment"
+    needs: [pre-publish]
+    if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
+    steps:
+      - uses: actions/download-artifact at v3
+        with:
+          path: dist/
+      - name: Consolidate artifacts
+        run: |
+          mv dist/*/*.{tar.gz,whl} dist
+          rmdir dist/*/
+      - uses: pypa/gh-action-pypi-publish at release/v1
+        with:
+          user: __token__
+          password: ${{ secrets.PYPI_API_TOKEN }}


=====================================
debian/changelog
=====================================
@@ -1,9 +1,20 @@
-nitime (0.9-6) UNRELEASED; urgency=medium
+nitime (0.10.1-1) UNRELEASED; urgency=medium
+
+ WARNING: a test is failing, pending resolution of github issue[1].
+ [1]: https://github.com/nipy/nitime/issues/207
 
   * Team upload.
+
+  [ Andreas Tille ]
   * Drop debian/blends which is unused
 
- -- Andreas Tille <tille at debian.org>  Sat, 21 Jan 2023 09:17:51 +0100
+  [ Étienne Mollier ]
+  * New upstream version 0.10.1  (Closes: #1042247)
+  * do-not-set-lowerbound-zero-in-iir.patch: delete: fixed upstream.
+  * fix-psd-test.patch: delete: fixed upstream.
+  * numpy_1.24.patch: remove: applied upstream.
+
+ -- Étienne Mollier <emollier at debian.org>  Tue, 22 Aug 2023 09:58:31 +0200
 
 nitime (0.9-5) unstable; urgency=medium
 


=====================================
debian/patches/do-not-set-lowerbound-zero-in-iir.patch deleted
=====================================
@@ -1,15 +0,0 @@
-Description: While doing iir filter, lower bound for ws can be set to 0 which scipy no longer seems to allow
- Hence set it to a very small non-zero val.
-Author: Nilesh Patra <nilesh at debian.org>
-Last-Update: 2022-07-11
---- a/nitime/analysis/spectral.py
-+++ b/nitime/analysis/spectral.py
-@@ -418,7 +418,7 @@
- 
-             wp = [lb_frac, ub_frac]
- 
--            ws = [np.max([lb_frac - 0.1, 0]),
-+            ws = [np.max([lb_frac - 0.1, 1e-10]),
-                   np.min([ub_frac + 0.1, 1.0])]
- 
-         # For the lowpass:


=====================================
debian/patches/fix-psd-test.patch deleted
=====================================
@@ -1,26 +0,0 @@
-Description: noverlap param in psd function should be an integer, cast it explicitly
- Also, look in axes_grid1 instead of axes_grid in mpl_toolkits
-Author: Nilesh Patra <nilesh at debian.org>
-Last-Update: 2023-01-02
---- a/nitime/tests/test_algorithms.py
-+++ b/nitime/tests/test_algorithms.py
-@@ -149,7 +149,7 @@
- 
-     NFFT = 256
-     Fs = 1.0
--    noverlap = NFFT / 2
-+    noverlap = int(NFFT / 2)
- 
-     fxx, f = mlab.psd(ts0, NFFT=NFFT, Fs=Fs, noverlap=noverlap,
-                       scale_by_freq=True)
---- a/doc/conf.py
-+++ b/doc/conf.py
-@@ -23,7 +23,7 @@
- # is a string that should be a valid (possibly dotted) package name, and the
- # second a list (possibly empty) of names to import from that package.
- doc_deps = [['networkx', []],
--            ['mpl_toolkits.axes_grid',  ['make_axes_locatable']],
-+            ['mpl_toolkits.axes_grid1',  ['make_axes_locatable']],
-             ]
- 
- # Analyze the dependencies, and fail if  any is unmet, with a hopefully


=====================================
debian/patches/numpy_1.24.patch deleted
=====================================
@@ -1,71 +0,0 @@
-Description: Adapt to numpy 1.24
-Bug-Debian: https://bugs.debian.org/1029245
-Author: Andreas Tille <tille at debian.org>
-Last-Update: Fri, 20 Jan 2023 19:21:34 +0100
-
---- a/nitime/algorithms/cohere.py
-+++ b/nitime/algorithms/cohere.py
-@@ -1005,7 +1005,7 @@ def cache_fft(time_series, ij, lb=0, ub=
-     FFT_conj_slices = {}
- 
-     for i_channel in all_channels:
--        Slices = np.zeros((n_slices, n_freqs), dtype=np.complex)
-+        Slices = np.zeros((n_slices, n_freqs), dtype=complex)
-         for iSlice in range(n_slices):
-             thisSlice = time_series[i_channel,
-                                     i_times[iSlice]:i_times[iSlice] + NFFT]
-@@ -1161,7 +1161,7 @@ def cache_to_relative_phase(cache, ij):
-     channels_i = max(1, max(ij_array[:, 0]) + 1)
-     channels_j = max(1, max(ij_array[:, 1]) + 1)
-     # Pre-allocate for speed:
--    Phi_xy = np.zeros((channels_i, channels_j, freqs), dtype=np.complex)
-+    Phi_xy = np.zeros((channels_i, channels_j, freqs), dtype=complex)
- 
-     # These checks take time, so do them up front, not in every iteration:
-     if list(FFT_slices.items())[0][1].shape[0] > 1:
-@@ -1221,7 +1221,7 @@ def cache_to_coherency(cache, ij):
- 
-     channels_i = max(1, max(ij_array[:, 0]) + 1)
-     channels_j = max(1, max(ij_array[:, 1]) + 1)
--    Cxy = np.zeros((channels_i, channels_j, freqs), dtype=np.complex)
-+    Cxy = np.zeros((channels_i, channels_j, freqs), dtype=complex)
- 
-     #These checks take time, so do them up front, not in every iteration:
-     if list(FFT_slices.items())[0][1].shape[0] > 1:
---- a/nitime/analysis/coherence.py
-+++ b/nitime/analysis/coherence.py
-@@ -653,10 +653,10 @@ class SeedCoherenceAnalyzer(object):
-         if len(self.seed.shape) > 1:
-             Cxy = np.empty((self.seed.data.shape[0],
-                             self.target.data.shape[0],
--                            self.frequencies.shape[0]), dtype=np.complex)
-+                            self.frequencies.shape[0]), dtype=complex)
-         else:
-             Cxy = np.empty((self.target.data.shape[0],
--                            self.frequencies.shape[0]), dtype=np.complex)
-+                            self.frequencies.shape[0]), dtype=complex)
- 
-         #Get the fft window cache for the target time-series:
-         cache = self.target_cache
---- a/nitime/tests/test_algorithms.py
-+++ b/nitime/tests/test_algorithms.py
-@@ -145,7 +145,7 @@ def test_psd_matlab():
-     ts = np.loadtxt(os.path.join(test_dir_path, 'tseries12.txt'))
- 
-     #Complex signal!
--    ts0 = ts[1] + ts[0] * np.complex(0, 1)
-+    ts0 = ts[1] + ts[0] * complex(0, 1)
- 
-     NFFT = 256
-     Fs = 1.0
---- a/nitime/analysis/correlation.py
-+++ b/nitime/analysis/correlation.py
-@@ -147,7 +147,7 @@ class SeedCorrelationAnalyzer(object):
- 
-             # Preallocate results
-             Cxy = np.empty((self.seed.data.shape[0],
--                            self.target.data.shape[0]), dtype=np.float)
-+                            self.target.data.shape[0]), dtype=float)
- 
-             for seed_idx, this_seed in enumerate(self.seed.data):
- 


=====================================
debian/patches/series
=====================================
@@ -1,7 +1,4 @@
 deb_no_sources_for_docs
 python3
 sphinx_ignore_github.patch
-do-not-set-lowerbound-zero-in-iir.patch
-fix-psd-test.patch
-numpy_1.24.patch
 numpydoc_1.24.patch


=====================================
doc/examples/multi_taper_coh.py
=====================================
@@ -152,7 +152,7 @@ Looping over the ROIs:
 """
 
 for i in range(nseq):
-    for j in range(i):
+    for j in range(i, nseq):
 
         """
 


=====================================
nitime/algorithms/cohere.py
=====================================
@@ -1005,7 +1005,7 @@ def cache_fft(time_series, ij, lb=0, ub=None,
     FFT_conj_slices = {}
 
     for i_channel in all_channels:
-        Slices = np.zeros((n_slices, n_freqs), dtype=np.complex)
+        Slices = np.zeros((n_slices, n_freqs), dtype=complex)
         for iSlice in range(n_slices):
             thisSlice = time_series[i_channel,
                                     i_times[iSlice]:i_times[iSlice] + NFFT]
@@ -1161,7 +1161,7 @@ def cache_to_relative_phase(cache, ij):
     channels_i = max(1, max(ij_array[:, 0]) + 1)
     channels_j = max(1, max(ij_array[:, 1]) + 1)
     # Pre-allocate for speed:
-    Phi_xy = np.zeros((channels_i, channels_j, freqs), dtype=np.complex)
+    Phi_xy = np.zeros((channels_i, channels_j, freqs), dtype=complex)
 
     # These checks take time, so do them up front, not in every iteration:
     if list(FFT_slices.items())[0][1].shape[0] > 1:
@@ -1221,7 +1221,7 @@ def cache_to_coherency(cache, ij):
 
     channels_i = max(1, max(ij_array[:, 0]) + 1)
     channels_j = max(1, max(ij_array[:, 1]) + 1)
-    Cxy = np.zeros((channels_i, channels_j, freqs), dtype=np.complex)
+    Cxy = np.zeros((channels_i, channels_j, freqs), dtype=complex)
 
     #These checks take time, so do them up front, not in every iteration:
     if list(FFT_slices.items())[0][1].shape[0] > 1:


=====================================
nitime/algorithms/spectral.py
=====================================
@@ -249,7 +249,8 @@ def periodogram(s, Fs=2 * np.pi, Sk=None, N=None,
         Fl = (N + 1) // 2
         pshape[-1] = Fn
         P = np.zeros(pshape, 'd')
-        freqs = np.linspace(0, Fs // 2, Fn)
+        #freqs = np.linspace(0, Fs // 2, Fn)
+        freqs = np.fft.rfftfreq(N) * Fs
         P[..., 0] = (Sk[..., 0] * Sk[..., 0].conj()).real
         P[..., 1:Fl] = 2 * (Sk[..., 1:Fl] * Sk[..., 1:Fl].conj()).real
         if Fn > Fl:


=====================================
nitime/analysis/coherence.py
=====================================
@@ -304,7 +304,7 @@ class MTCoherenceAnalyzer(BaseAnalyzer):
     @desc.setattr_on_read
     def df(self):
         # The degrees of freedom:
-        return 2 * self.NW - 1
+        return int(2 * self.NW - 1)
 
     @desc.setattr_on_read
     def spectra(self):
@@ -328,9 +328,9 @@ class MTCoherenceAnalyzer(BaseAnalyzer):
         else:
             wshape = [1] * len(self.spectra.shape)
             wshape[0] = channel_n
-            wshape[-2] = int(self.df)
+            wshape[-2] = self.df
             pre_w = np.sqrt(self.eigs) + np.zeros((wshape[0],
-                                                    self.eigs.shape[0]))
+                                                   self.eigs.shape[0]))
 
             w = pre_w.reshape(*wshape)
 
@@ -351,7 +351,7 @@ class MTCoherenceAnalyzer(BaseAnalyzer):
                                              self.weights[i],
                                              sides='onesided')
                 syy = tsa.mtm_cross_spectrum(self.spectra[j], self.spectra[j],
-                                             self.weights[i],
+                                             self.weights[j],
                                              sides='onesided')
                 psd_mat[0, i, j] = sxx
                 psd_mat[1, i, j] = syy
@@ -653,10 +653,10 @@ class SeedCoherenceAnalyzer(object):
         if len(self.seed.shape) > 1:
             Cxy = np.empty((self.seed.data.shape[0],
                             self.target.data.shape[0],
-                            self.frequencies.shape[0]), dtype=np.complex)
+                            self.frequencies.shape[0]), dtype=complex)
         else:
             Cxy = np.empty((self.target.data.shape[0],
-                            self.frequencies.shape[0]), dtype=np.complex)
+                            self.frequencies.shape[0]), dtype=complex)
 
         #Get the fft window cache for the target time-series:
         cache = self.target_cache


=====================================
nitime/analysis/correlation.py
=====================================
@@ -147,7 +147,7 @@ class SeedCorrelationAnalyzer(object):
 
             # Preallocate results
             Cxy = np.empty((self.seed.data.shape[0],
-                            self.target.data.shape[0]), dtype=np.float)
+                            self.target.data.shape[0]), dtype=float)
 
             for seed_idx, this_seed in enumerate(self.seed.data):
 


=====================================
nitime/analysis/spectral.py
=====================================
@@ -418,8 +418,8 @@ class FilterAnalyzer(desc.ResetMixin):
 
             wp = [lb_frac, ub_frac]
 
-            ws = [np.max([lb_frac - 0.1, 0]),
-                  np.min([ub_frac + 0.1, 1.0])]
+            ws = [np.max([lb_frac - 0.1, 0.001]),
+                  np.min([ub_frac + 0.1, 0.999])]
 
         # For the lowpass:
         elif lb_frac == 0:


=====================================
nitime/fmri/io.py
=====================================
@@ -88,7 +88,7 @@ def time_series_from_file(nifti_files, coords=None, TR=None, normalize=None,
         if verbose:
             print("Reading %s" % nifti_files)
         im = load(nifti_files)
-        data = im.get_data()
+        data = im.get_fdata()
         # If coordinates are provided as input, read data only from these coordinates:
         if coords is not None:
             #If the input is the coords of several ROIs
@@ -118,7 +118,7 @@ def time_series_from_file(nifti_files, coords=None, TR=None, normalize=None,
             if verbose:
                 print("Reading %s" % f)
             im = load(f)
-            data = im.get_data()
+            data = im.get_fdata()
             if coords is not None:
                 #If the input is the coords of several ROIs
                 if isinstance(coords, tuple) or isinstance(coords, list):


=====================================
nitime/fmri/tests/test_io.py
=====================================
@@ -65,7 +65,7 @@ def test_time_series_from_file():
     npt.assert_equal(t4.sampling_interval,nitime.TimeArray(1.35))
 
     # Test the default behavior:
-    data = io.load(fmri_file1).get_data()
+    data = io.load(fmri_file1).get_fdata()
     t5 = ts_ff(fmri_file1)
     npt.assert_equal(t5.shape, data.shape)
     npt.assert_equal(t5.sampling_interval, ts.TimeArray(1, time_unit='s'))


=====================================
nitime/tests/test_algorithms.py
=====================================
@@ -145,11 +145,11 @@ def test_psd_matlab():
     ts = np.loadtxt(os.path.join(test_dir_path, 'tseries12.txt'))
 
     #Complex signal!
-    ts0 = ts[1] + ts[0] * np.complex(0, 1)
+    ts0 = ts[1] + ts[0] * complex(0, 1)
 
     NFFT = 256
     Fs = 1.0
-    noverlap = NFFT / 2
+    noverlap = NFFT // 2
 
     fxx, f = mlab.psd(ts0, NFFT=NFFT, Fs=Fs, noverlap=noverlap,
                       scale_by_freq=True)


=====================================
nitime/tests/test_analysis.py
=====================================
@@ -314,3 +314,15 @@ def test_MorletWaveletAnalyzer():
     npt.assert_almost_equal(np.sin(HL.phase.data[10:-10]),
                             np.sin(WL.phase.data[10:-10]),
                             decimal=0)
+
+
+def test_MTCoherenceAnalyzer():
+    """
+    Based on gh-188
+    """
+    my_signal = np.random.randn(10, int(np.round(30.02*89)))
+    multitaper_bandwidth = 0.1  # [Hz]
+    TS = ts.TimeSeries(my_signal, sampling_rate=30.02)
+    # T.metadata['roi'] = vessel_names
+    C2 = nta.MTCoherenceAnalyzer(TS, bandwidth=multitaper_bandwidth)
+    npt.assert_equal(C2.coherence.shape, (10, 10, 1337))


=====================================
nitime/version.py
=====================================
@@ -2,9 +2,9 @@
 
 # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z"
 _version_major = 0
-_version_minor = 9
-_version_micro = ''  # use '' for first of series, number for 1 and above
-#  _version_extra = 'dev'
+_version_minor = 10
+_version_micro = 1  # use '' for first of series, number for 1 and above
+# _version_extra = 'dev'
 _version_extra = ''  # Uncomment this for full releases
 
 # Construct full version string from these.
@@ -96,4 +96,4 @@ MICRO = _version_micro
 VERSION = __version__
 PACKAGE_DATA = {"nitime": ["LICENSE", "tests/*.txt", "tests/*.npy",
                            "data/*.nii.gz", "data/*.txt", "data/*.csv"]}
-PYTHON_REQUIRES = ">=3.5"
+PYTHON_REQUIRES = ">=3.7"


=====================================
nitime/viz.py
=====================================
@@ -30,7 +30,7 @@ if matplotlib.__version__[:3] == '1.3' or matplotlib.__version__[:3] == '1.4':
     import matplotlib.axis as ax
     ax.munits = mpl_units
 
-from nitime.utils import triu_indices
+from nitime.utils import tril_indices
 
 #Some visualization functions require networkx. Import that if possible:
 try:
@@ -272,10 +272,12 @@ def drawmatrix_channels(in_m, channel_names=None, fig=None, x_tick_rot=0,
     # data provided
     m = in_m.copy()
 
-    # Null the upper triangle, so that you don't get the redundant and the
+    # Null the **lower** triangle, so that you don't get the redundant and the
     # diagonal values:
-    idx_null = triu_indices(m.shape[0])
+    idx_null = tril_indices(m.shape[0])
     m[idx_null] = np.nan
+    # tranpose the upper triangle to lower
+    m = m.T
 
     # Extract the minimum and maximum values for scaling of the
     # colormap/colorbar:
@@ -678,7 +680,7 @@ def draw_graph(G,
 
     # Build a 'weighted degree' array obtained by adding the (absolute value)
     # of the weights for all edges pointing to each node:
-    amat = nx.adj_matrix(G).A  # get a normal array out of it
+    amat = nx.adjacency_matrix(G).todense()  # get a normal array out of it
     degarr = abs(amat).sum(0)  # weights are sums across rows
 
     # Map the degree to the 0-1 range so we can use it for sizing the nodes.


=====================================
pyproject.toml
=====================================
@@ -0,0 +1,20 @@
+[build-system]
+requires = [
+  "setuptools",
+  "cython",
+  # Newer than NEP29-minimum: compile against oldest numpy available
+  "numpy==1.24; python_version >= '3.11'",
+  "numpy==1.22; python_version >= '3.10' and python_version < '3.11'",
+  # NEP29-minimum as of Jan 31, 2023
+  "numpy==1.21; python_version >= '3.7' and python_version < '3.10'",
+]
+build-backend = "setuptools.build_meta"
+
+[tool.cibuildwheel]
+# Disable CPython 3.6 here; if project.requires-python gets defined,
+# cp36* can be removed
+skip = "pp* cp36*"
+
+# 64-bit builds only; 32-bit builds seem pretty niche these days, so
+# don't bother unless someone asks
+archs = ["auto64"]


=====================================
requirements-dev.txt
=====================================
@@ -3,4 +3,3 @@ pytest
 pytest-cov
 nibabel
 networkx
-flake8


=====================================
setup.py
=====================================
@@ -9,8 +9,7 @@ import sys
 if os.path.exists('MANIFEST'):
     os.remove('MANIFEST')
 
-from setuptools import find_packages
-from distutils.core import setup
+from setuptools import find_packages, setup
 
 # Get version and release info, which is all stored in nitime/version.py
 ver_file = os.path.join('nitime', 'version.py')
@@ -49,14 +48,13 @@ opts = dict(name=NAME,
             )
 
 try:
-    from distutils.extension import Extension
-    from Cython.Distutils import build_ext as build_pyx_ext
+    from setuptools import Extension
+    from Cython.Build import cythonize
     from numpy import get_include
     # add Cython extensions to the setup options
     exts = [Extension('nitime._utils', ['nitime/_utils.pyx'],
                       include_dirs=[get_include()])]
-    opts['cmdclass'] = dict(build_ext=build_pyx_ext)
-    opts['ext_modules'] = exts
+    opts['ext_modules'] = cythonize(exts, language_level='3')
 except ImportError:
     # no loop for you!
     pass



View it on GitLab: https://salsa.debian.org/med-team/nitime/-/compare/ec1527261742d26b342b276d7239524fab80eee5...5f81053387a3fbba3d1952359eef147f1ec8d8d7

-- 
View it on GitLab: https://salsa.debian.org/med-team/nitime/-/compare/ec1527261742d26b342b276d7239524fab80eee5...5f81053387a3fbba3d1952359eef147f1ec8d8d7
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20230822/ac3e8985/attachment-0001.htm>


More information about the debian-med-commit mailing list