[med-svn] [Git][med-team/nitime][upstream] New upstream version 0.10.1
Étienne Mollier (@emollier)
gitlab at salsa.debian.org
Tue Aug 22 09:00:59 BST 2023
Étienne Mollier pushed to branch upstream at Debian Med / nitime
Commits:
4eafb15f by Étienne Mollier at 2023-08-22T09:17:01+02:00
New upstream version 0.10.1
- - - - -
17 changed files:
- .github/workflows/test.yml
- + .github/workflows/wheels.yml
- doc/examples/multi_taper_coh.py
- nitime/algorithms/cohere.py
- nitime/algorithms/spectral.py
- nitime/analysis/coherence.py
- nitime/analysis/correlation.py
- nitime/analysis/spectral.py
- nitime/fmri/io.py
- nitime/fmri/tests/test_io.py
- nitime/tests/test_algorithms.py
- nitime/tests/test_analysis.py
- nitime/version.py
- nitime/viz.py
- + pyproject.toml
- requirements-dev.txt
- setup.py
Changes:
=====================================
.github/workflows/test.yml
=====================================
@@ -1,6 +1,19 @@
name: Test suite
-on: [push, pull_request]
+on:
+ push:
+ branches:
+ - master
+ tags:
+ - "*"
+ pull_request:
+ branches:
+ - master
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
jobs:
build:
@@ -9,13 +22,13 @@ jobs:
strategy:
max-parallel: 4
matrix:
- python-version: [3.7, 3.8]
+ python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
steps:
- name: Checkout repo
- uses: actions/checkout at v1
+ uses: actions/checkout at v3
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python at v1
+ uses: actions/setup-python at v4
with:
python-version: ${{ matrix.python-version }}
- name: Install
@@ -26,7 +39,7 @@ jobs:
python -m pip install .
- name: Lint
run: |
- flake8 --ignore N802,N806,W504 --select W503 `find . -name \*.py | grep -v setup.py | grep -v version.py | grep -v __init__.py | grep -v /docs/`
+ pipx run flake8 --ignore N802,N806,W504 --select W503 nitime/ tools/
- name: Test
run: |
cd && mkdir for_test && cd for_test && pytest --pyargs nitime --cov-report term-missing --cov=AFQ
=====================================
.github/workflows/wheels.yml
=====================================
@@ -0,0 +1,134 @@
+name: Build
+
+on:
+ push:
+ branches:
+ - master
+ tags:
+ - "*"
+ pull_request:
+ branches:
+ - master
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ job_metadata:
+ runs-on: ubuntu-latest
+ outputs:
+ commit_message: ${{ steps.get_commit_message.outputs.commit_message }}
+ steps:
+ - name: Checkout
+ uses: actions/checkout at v3
+ with:
+ fetch-depth: 2
+ - name: Print head git commit message
+ id: get_commit_message
+ run: |
+ if [[ -z "$COMMIT_MSG" ]]; then
+ COMMIT_MSG=$(git show -s --format=%s $REF)
+ fi
+ echo commit_message=$COMMIT_MSG | tee -a $GITHUB_OUTPUT
+ env:
+ COMMIT_MSG: ${{ github.event.head_commit.message }}
+ REF: ${{ github.event.pull_request.head.sha }}
+
+ build-sdist:
+ name: Build sdist
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout at v3
+ - name: Build sdist
+ run: pipx run build -s
+ - uses: actions/upload-artifact at v3
+ with:
+ name: sdist
+ path: ./dist/*.tar.gz
+
+ build-wheel:
+ name: Build wheel for ${{ matrix.python }}-${{ matrix.buildplat[1] }}
+ needs: [job_metadata]
+ runs-on: ${{ matrix.buildplat[0] }}
+ if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') || contains(needs.job_metadata.outputs.commit_message, '[build wheels]')
+ strategy:
+ fail-fast: false
+ matrix:
+ buildplat:
+ - [ubuntu-20.04, musllinux_x86_64]
+ - [macos-12, macosx_*]
+ - [windows-2019, win_amd64]
+ python: ["cp37", "cp38", "cp39", "cp310", "cp311"]
+ include:
+ # Manylinux builds are cheap, do all in one
+ - { buildplat: ["ubuntu-20.04", "manylinux_x86_64"], python: "*" }
+
+ steps:
+ - uses: actions/checkout at v3
+
+ - name: Build wheel(s)
+ run: pipx run cibuildwheel
+ env:
+ CIBW_BUILD: ${{ matrix.python }}-${{ matrix.buildplat[1] }}
+
+ - uses: actions/upload-artifact at v3
+ with:
+ name: ${{ matrix.python == '*' && 'all' || matrix.python }}-${{ startsWith(matrix.buildplat[1], 'macosx') && 'macosx' || matrix.buildplat[1] }}
+ path: ./wheelhouse/*.whl
+
+ test-sdist:
+ name: Test sdist
+ needs: [build-sdist]
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/download-artifact at v3
+ with:
+ name: sdist
+ path: ./dist
+ - uses: actions/setup-python at v4
+ with:
+ python-version: "3.11"
+ - name: Display Python version
+ run: python -c "import sys; print(sys.version)"
+ - name: Install sdist
+ run: pip install dist/*.tar.gz
+ - run: python -c 'import nitime; print(nitime.__version__)'
+ - name: Install pytest
+ run: pip install pytest
+ - name: Run tests
+ run: pytest -v --pyargs nitime
+
+ pre-publish:
+ runs-on: ubuntu-latest
+ needs: [test-sdist, build-wheel]
+ steps:
+ - uses: actions/download-artifact at v3
+ with:
+ path: dist/
+ - name: Check artifacts
+ run: ls -lR
+ - name: Consolidate and re-check
+ run: |
+ mv dist/*/*.{tar.gz,whl} dist
+ rmdir dist/*/
+ ls -lR
+ - run: pipx run twine check dist/*
+
+ publish:
+ runs-on: ubuntu-latest
+ environment: "Package deployment"
+ needs: [pre-publish]
+ if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
+ steps:
+ - uses: actions/download-artifact at v3
+ with:
+ path: dist/
+ - name: Consolidate artifacts
+ run: |
+ mv dist/*/*.{tar.gz,whl} dist
+ rmdir dist/*/
+ - uses: pypa/gh-action-pypi-publish at release/v1
+ with:
+ user: __token__
+ password: ${{ secrets.PYPI_API_TOKEN }}
=====================================
doc/examples/multi_taper_coh.py
=====================================
@@ -152,7 +152,7 @@ Looping over the ROIs:
"""
for i in range(nseq):
- for j in range(i):
+ for j in range(i, nseq):
"""
=====================================
nitime/algorithms/cohere.py
=====================================
@@ -1005,7 +1005,7 @@ def cache_fft(time_series, ij, lb=0, ub=None,
FFT_conj_slices = {}
for i_channel in all_channels:
- Slices = np.zeros((n_slices, n_freqs), dtype=np.complex)
+ Slices = np.zeros((n_slices, n_freqs), dtype=complex)
for iSlice in range(n_slices):
thisSlice = time_series[i_channel,
i_times[iSlice]:i_times[iSlice] + NFFT]
@@ -1161,7 +1161,7 @@ def cache_to_relative_phase(cache, ij):
channels_i = max(1, max(ij_array[:, 0]) + 1)
channels_j = max(1, max(ij_array[:, 1]) + 1)
# Pre-allocate for speed:
- Phi_xy = np.zeros((channels_i, channels_j, freqs), dtype=np.complex)
+ Phi_xy = np.zeros((channels_i, channels_j, freqs), dtype=complex)
# These checks take time, so do them up front, not in every iteration:
if list(FFT_slices.items())[0][1].shape[0] > 1:
@@ -1221,7 +1221,7 @@ def cache_to_coherency(cache, ij):
channels_i = max(1, max(ij_array[:, 0]) + 1)
channels_j = max(1, max(ij_array[:, 1]) + 1)
- Cxy = np.zeros((channels_i, channels_j, freqs), dtype=np.complex)
+ Cxy = np.zeros((channels_i, channels_j, freqs), dtype=complex)
#These checks take time, so do them up front, not in every iteration:
if list(FFT_slices.items())[0][1].shape[0] > 1:
=====================================
nitime/algorithms/spectral.py
=====================================
@@ -249,7 +249,8 @@ def periodogram(s, Fs=2 * np.pi, Sk=None, N=None,
Fl = (N + 1) // 2
pshape[-1] = Fn
P = np.zeros(pshape, 'd')
- freqs = np.linspace(0, Fs // 2, Fn)
+ #freqs = np.linspace(0, Fs // 2, Fn)
+ freqs = np.fft.rfftfreq(N) * Fs
P[..., 0] = (Sk[..., 0] * Sk[..., 0].conj()).real
P[..., 1:Fl] = 2 * (Sk[..., 1:Fl] * Sk[..., 1:Fl].conj()).real
if Fn > Fl:
=====================================
nitime/analysis/coherence.py
=====================================
@@ -304,7 +304,7 @@ class MTCoherenceAnalyzer(BaseAnalyzer):
@desc.setattr_on_read
def df(self):
# The degrees of freedom:
- return 2 * self.NW - 1
+ return int(2 * self.NW - 1)
@desc.setattr_on_read
def spectra(self):
@@ -328,9 +328,9 @@ class MTCoherenceAnalyzer(BaseAnalyzer):
else:
wshape = [1] * len(self.spectra.shape)
wshape[0] = channel_n
- wshape[-2] = int(self.df)
+ wshape[-2] = self.df
pre_w = np.sqrt(self.eigs) + np.zeros((wshape[0],
- self.eigs.shape[0]))
+ self.eigs.shape[0]))
w = pre_w.reshape(*wshape)
@@ -351,7 +351,7 @@ class MTCoherenceAnalyzer(BaseAnalyzer):
self.weights[i],
sides='onesided')
syy = tsa.mtm_cross_spectrum(self.spectra[j], self.spectra[j],
- self.weights[i],
+ self.weights[j],
sides='onesided')
psd_mat[0, i, j] = sxx
psd_mat[1, i, j] = syy
@@ -653,10 +653,10 @@ class SeedCoherenceAnalyzer(object):
if len(self.seed.shape) > 1:
Cxy = np.empty((self.seed.data.shape[0],
self.target.data.shape[0],
- self.frequencies.shape[0]), dtype=np.complex)
+ self.frequencies.shape[0]), dtype=complex)
else:
Cxy = np.empty((self.target.data.shape[0],
- self.frequencies.shape[0]), dtype=np.complex)
+ self.frequencies.shape[0]), dtype=complex)
#Get the fft window cache for the target time-series:
cache = self.target_cache
=====================================
nitime/analysis/correlation.py
=====================================
@@ -147,7 +147,7 @@ class SeedCorrelationAnalyzer(object):
# Preallocate results
Cxy = np.empty((self.seed.data.shape[0],
- self.target.data.shape[0]), dtype=np.float)
+ self.target.data.shape[0]), dtype=float)
for seed_idx, this_seed in enumerate(self.seed.data):
=====================================
nitime/analysis/spectral.py
=====================================
@@ -418,8 +418,8 @@ class FilterAnalyzer(desc.ResetMixin):
wp = [lb_frac, ub_frac]
- ws = [np.max([lb_frac - 0.1, 0]),
- np.min([ub_frac + 0.1, 1.0])]
+ ws = [np.max([lb_frac - 0.1, 0.001]),
+ np.min([ub_frac + 0.1, 0.999])]
# For the lowpass:
elif lb_frac == 0:
=====================================
nitime/fmri/io.py
=====================================
@@ -88,7 +88,7 @@ def time_series_from_file(nifti_files, coords=None, TR=None, normalize=None,
if verbose:
print("Reading %s" % nifti_files)
im = load(nifti_files)
- data = im.get_data()
+ data = im.get_fdata()
# If coordinates are provided as input, read data only from these coordinates:
if coords is not None:
#If the input is the coords of several ROIs
@@ -118,7 +118,7 @@ def time_series_from_file(nifti_files, coords=None, TR=None, normalize=None,
if verbose:
print("Reading %s" % f)
im = load(f)
- data = im.get_data()
+ data = im.get_fdata()
if coords is not None:
#If the input is the coords of several ROIs
if isinstance(coords, tuple) or isinstance(coords, list):
=====================================
nitime/fmri/tests/test_io.py
=====================================
@@ -65,7 +65,7 @@ def test_time_series_from_file():
npt.assert_equal(t4.sampling_interval,nitime.TimeArray(1.35))
# Test the default behavior:
- data = io.load(fmri_file1).get_data()
+ data = io.load(fmri_file1).get_fdata()
t5 = ts_ff(fmri_file1)
npt.assert_equal(t5.shape, data.shape)
npt.assert_equal(t5.sampling_interval, ts.TimeArray(1, time_unit='s'))
=====================================
nitime/tests/test_algorithms.py
=====================================
@@ -145,11 +145,11 @@ def test_psd_matlab():
ts = np.loadtxt(os.path.join(test_dir_path, 'tseries12.txt'))
#Complex signal!
- ts0 = ts[1] + ts[0] * np.complex(0, 1)
+ ts0 = ts[1] + ts[0] * complex(0, 1)
NFFT = 256
Fs = 1.0
- noverlap = NFFT / 2
+ noverlap = NFFT // 2
fxx, f = mlab.psd(ts0, NFFT=NFFT, Fs=Fs, noverlap=noverlap,
scale_by_freq=True)
=====================================
nitime/tests/test_analysis.py
=====================================
@@ -314,3 +314,15 @@ def test_MorletWaveletAnalyzer():
npt.assert_almost_equal(np.sin(HL.phase.data[10:-10]),
np.sin(WL.phase.data[10:-10]),
decimal=0)
+
+
+def test_MTCoherenceAnalyzer():
+ """
+ Based on gh-188
+ """
+ my_signal = np.random.randn(10, int(np.round(30.02*89)))
+ multitaper_bandwidth = 0.1 # [Hz]
+ TS = ts.TimeSeries(my_signal, sampling_rate=30.02)
+ # T.metadata['roi'] = vessel_names
+ C2 = nta.MTCoherenceAnalyzer(TS, bandwidth=multitaper_bandwidth)
+ npt.assert_equal(C2.coherence.shape, (10, 10, 1337))
=====================================
nitime/version.py
=====================================
@@ -2,9 +2,9 @@
# Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z"
_version_major = 0
-_version_minor = 9
-_version_micro = '' # use '' for first of series, number for 1 and above
-# _version_extra = 'dev'
+_version_minor = 10
+_version_micro = 1 # use '' for first of series, number for 1 and above
+# _version_extra = 'dev'
_version_extra = '' # Uncomment this for full releases
# Construct full version string from these.
@@ -96,4 +96,4 @@ MICRO = _version_micro
VERSION = __version__
PACKAGE_DATA = {"nitime": ["LICENSE", "tests/*.txt", "tests/*.npy",
"data/*.nii.gz", "data/*.txt", "data/*.csv"]}
-PYTHON_REQUIRES = ">=3.5"
+PYTHON_REQUIRES = ">=3.7"
=====================================
nitime/viz.py
=====================================
@@ -30,7 +30,7 @@ if matplotlib.__version__[:3] == '1.3' or matplotlib.__version__[:3] == '1.4':
import matplotlib.axis as ax
ax.munits = mpl_units
-from nitime.utils import triu_indices
+from nitime.utils import tril_indices
#Some visualization functions require networkx. Import that if possible:
try:
@@ -272,10 +272,12 @@ def drawmatrix_channels(in_m, channel_names=None, fig=None, x_tick_rot=0,
# data provided
m = in_m.copy()
- # Null the upper triangle, so that you don't get the redundant and the
+ # Null the **lower** triangle, so that you don't get the redundant and the
# diagonal values:
- idx_null = triu_indices(m.shape[0])
+ idx_null = tril_indices(m.shape[0])
m[idx_null] = np.nan
+ # tranpose the upper triangle to lower
+ m = m.T
# Extract the minimum and maximum values for scaling of the
# colormap/colorbar:
@@ -678,7 +680,7 @@ def draw_graph(G,
# Build a 'weighted degree' array obtained by adding the (absolute value)
# of the weights for all edges pointing to each node:
- amat = nx.adj_matrix(G).A # get a normal array out of it
+ amat = nx.adjacency_matrix(G).todense() # get a normal array out of it
degarr = abs(amat).sum(0) # weights are sums across rows
# Map the degree to the 0-1 range so we can use it for sizing the nodes.
=====================================
pyproject.toml
=====================================
@@ -0,0 +1,20 @@
+[build-system]
+requires = [
+ "setuptools",
+ "cython",
+ # Newer than NEP29-minimum: compile against oldest numpy available
+ "numpy==1.24; python_version >= '3.11'",
+ "numpy==1.22; python_version >= '3.10' and python_version < '3.11'",
+ # NEP29-minimum as of Jan 31, 2023
+ "numpy==1.21; python_version >= '3.7' and python_version < '3.10'",
+]
+build-backend = "setuptools.build_meta"
+
+[tool.cibuildwheel]
+# Disable CPython 3.6 here; if project.requires-python gets defined,
+# cp36* can be removed
+skip = "pp* cp36*"
+
+# 64-bit builds only; 32-bit builds seem pretty niche these days, so
+# don't bother unless someone asks
+archs = ["auto64"]
=====================================
requirements-dev.txt
=====================================
@@ -3,4 +3,3 @@ pytest
pytest-cov
nibabel
networkx
-flake8
=====================================
setup.py
=====================================
@@ -9,8 +9,7 @@ import sys
if os.path.exists('MANIFEST'):
os.remove('MANIFEST')
-from setuptools import find_packages
-from distutils.core import setup
+from setuptools import find_packages, setup
# Get version and release info, which is all stored in nitime/version.py
ver_file = os.path.join('nitime', 'version.py')
@@ -49,14 +48,13 @@ opts = dict(name=NAME,
)
try:
- from distutils.extension import Extension
- from Cython.Distutils import build_ext as build_pyx_ext
+ from setuptools import Extension
+ from Cython.Build import cythonize
from numpy import get_include
# add Cython extensions to the setup options
exts = [Extension('nitime._utils', ['nitime/_utils.pyx'],
include_dirs=[get_include()])]
- opts['cmdclass'] = dict(build_ext=build_pyx_ext)
- opts['ext_modules'] = exts
+ opts['ext_modules'] = cythonize(exts, language_level='3')
except ImportError:
# no loop for you!
pass
View it on GitLab: https://salsa.debian.org/med-team/nitime/-/commit/4eafb15fea887b16c766bd0a7f086d487081d780
--
View it on GitLab: https://salsa.debian.org/med-team/nitime/-/commit/4eafb15fea887b16c766bd0a7f086d487081d780
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/debian-med-commit/attachments/20230822/e4990ee8/attachment-0001.htm>
More information about the debian-med-commit
mailing list