[Git][debian-gis-team/netcdf4-python][master] 6 commits: New upstream version 1.7.3
Bas Couwenberg (@sebastic)
gitlab at salsa.debian.org
Tue Oct 14 04:48:28 BST 2025
Bas Couwenberg pushed to branch master at Debian GIS Project / netcdf4-python
Commits:
0b058d7c by Bas Couwenberg at 2025-10-14T05:36:57+02:00
New upstream version 1.7.3
- - - - -
12e426b3 by Bas Couwenberg at 2025-10-14T05:36:59+02:00
Update upstream source from tag 'upstream/1.7.3'
Update to upstream version '1.7.3'
with Debian dir 139b694b66ab310dc67a74ab23df6c6531ab7f3f
- - - - -
4008c90d by Bas Couwenberg at 2025-10-14T05:37:19+02:00
New upstream release.
- - - - -
abd2d64f by Bas Couwenberg at 2025-10-14T05:40:26+02:00
Update Upstream-Contact email address.
- - - - -
e3525c0c by Bas Couwenberg at 2025-10-14T05:42:42+02:00
Refresh patches.
- - - - -
0af5ef74 by Bas Couwenberg at 2025-10-14T05:43:56+02:00
Set distribution to unstable.
- - - - -
22 changed files:
- .github/workflows/build_latest.yml
- .github/workflows/build_master.yml
- .github/workflows/build_old.yml
- .github/workflows/cibuildwheel.yml
- .github/workflows/miniconda.yml
- Changelog
- MANIFEST.in
- README.md
- debian/changelog
- debian/copyright
- debian/patches/rpath.patch
- docs/index.html
- include/membuf.pyx
- pyproject.toml
- setup.py
- src/netCDF4/__init__.pyi
- src/netCDF4/_netCDF4.pyx
- src/netCDF4/utils.py
- test/test_cdl.py
- + test/test_no_iter_contains.py
- test/test_open_mem.py
- test/test_vlen.py
Changes:
=====================================
.github/workflows/build_latest.yml
=====================================
@@ -5,30 +5,32 @@ jobs:
name: Python (${{ matrix.python-version }})
runs-on: ubuntu-latest
env:
- PNETCDF_VERSION: 1.12.1
- NETCDF_VERSION: 4.9.2
+ PNETCDF_VERSION: 1.14.1
+ NETCDF_VERSION: 4.9.3
NETCDF_DIR: ${{ github.workspace }}/..
NETCDF_EXTRA_CONFIG: --enable-pnetcdf
- CC: mpicc.mpich
+ #CC: mpicc.mpich
+ CC: mpicc
#NO_NET: 1
strategy:
matrix:
- python-version: ["3.12"]
+ python-version: ["3.14"]
steps:
- - uses: actions/checkout at v4
+ - uses: actions/checkout at v5
with:
submodules: true
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python at v5
+ uses: actions/setup-python at v6
with:
python-version: ${{ matrix.python-version }}
- name: Install Ubuntu Dependencies
run: |
sudo apt-get update
- sudo apt-get install mpich libmpich-dev libhdf5-mpich-dev libcurl4-openssl-dev bzip2 libsnappy-dev libblosc-dev libzstd-dev
+ #sudo apt-get install mpich libmpich-dev libhdf5-mpich-dev openmpi-bin openmpi-common libopenmpi-dev libhdf5-openmpi-dev libcurl4-openssl-dev bzip2 libsnappy-dev libblosc-dev libzstd-dev
+ sudo apt-get install openmpi-common libopenmpi-dev openmpi-bin libhdf5-openmpi-dev libcurl4-openssl-dev bzip2 libsnappy-dev libblosc-dev libzstd-dev
echo "Download and build PnetCDF version ${PNETCDF_VERSION}"
wget https://parallel-netcdf.github.io/Release/pnetcdf-${PNETCDF_VERSION}.tar.gz
tar -xzf pnetcdf-${PNETCDF_VERSION}.tar.gz
@@ -41,9 +43,12 @@ jobs:
wget https://downloads.unidata.ucar.edu/netcdf-c/${NETCDF_VERSION}/netcdf-c-${NETCDF_VERSION}.tar.gz
tar -xzf netcdf-c-${NETCDF_VERSION}.tar.gz
pushd netcdf-c-${NETCDF_VERSION}
- export CPPFLAGS="-I/usr/include/hdf5/mpich -I${NETCDF_DIR}/include"
+ #export CPPFLAGS="-I/usr/include/hdf5/mpich -I${NETCDF_DIR}/include"
+ export CPPFLAGS="-I/usr/include/hdf5/openmpi -I${NETCDF_DIR}/include"
export LDFLAGS="-L${NETCDF_DIR}/lib"
- export LIBS="-lhdf5_mpich_hl -lhdf5_mpich -lm -lz"
+ #export LIBS="-lhdf5_mpich_hl -lhdf5_mpich -lm -lz"
+ export LIBS="-lhdf5_openmpi_hl -lhdf5_openmpi -lm -lz"
+ which $CC
./configure --prefix $NETCDF_DIR --enable-netcdf-4 --enable-shared --enable-dap --enable-parallel4 $NETCDF_EXTRA_CONFIG
make -j 2
sudo make install
@@ -58,13 +63,14 @@ jobs:
- name: Install python dependencies via pip
run: |
python -m pip install --upgrade pip
- pip install numpy cython cftime pytest twine wheel check-manifest mpi4py typing-extensions
+ python -m pip install numpy cython cftime pytest twine wheel check-manifest mpi4py typing-extensions
- name: Install netcdf4-python
run: |
export PATH=${NETCDF_DIR}/bin:${PATH}
export NETCDF_PLUGIN_DIR=${{ github.workspace }}/netcdf-c-${NETCDF_VERSION}/plugins/plugindir
- python setup.py install
+ python -m pip install . --no-build-isolation
+
- name: Test
run: |
export PATH=${NETCDF_DIR}/bin:${PATH}
@@ -74,21 +80,24 @@ jobs:
python run_all.py
# parallel (hdf5 for netcdf4, pnetcdf for netcdf3)
cd ../examples
- mpirun.mpich -np 4 python mpi_example.py
+ #mpirun.mpich -np 4 python mpi_example.py
+ mpirun -np 4 --oversubscribe python mpi_example.py
if [ $? -ne 0 ] ; then
echo "hdf5 mpi test failed!"
exit 1
else
echo "hdf5 mpi test passed!"
fi
- mpirun.mpich -np 4 python mpi_example_compressed.py
+ #mpirun.mpich -np 4 python mpi_example_compressed.py
+ mpirun -np 4 --oversubscribe python mpi_example_compressed.py
if [ $? -ne 0 ] ; then
echo "hdf5 compressed mpi test failed!"
exit 1
else
echo "hdf5 compressed mpi test passed!"
fi
- mpirun.mpich -np 4 python mpi_example.py NETCDF3_64BIT_DATA
+ #mpirun.mpich -np 4 python mpi_example.py NETCDF3_64BIT_DATA
+ mpirun -np 4 --oversubscribe python mpi_example.py NETCDF3_64BIT_DATA
if [ $? -ne 0 ] ; then
echo "pnetcdf mpi test failed!"
exit 1
=====================================
.github/workflows/build_master.yml
=====================================
@@ -6,32 +6,36 @@ jobs:
runs-on: ubuntu-latest
env:
NETCDF_DIR: ${{ github.workspace }}/..
- CC: mpicc.mpich
+ #CC: mpicc.mpich
+ CC: mpicc
#NO_NET: 1
strategy:
matrix:
- python-version: ["3.12"]
+ python-version: ["3.14"]
steps:
- - uses: actions/checkout at v4
+ - uses: actions/checkout at v5
with:
submodules: true
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python at v5
+ uses: actions/setup-python at v6
with:
python-version: ${{ matrix.python-version }}
- name: Install Ubuntu Dependencies
run: |
sudo apt-get update
- sudo apt-get install mpich libmpich-dev libhdf5-mpich-dev libcurl4-openssl-dev bzip2 libsnappy-dev libblosc-dev libzstd-dev
+ #sudo apt-get install mpich libmpich-dev libhdf5-mpich-dev libcurl4-openssl-dev bzip2 libsnappy-dev libblosc-dev libzstd-dev
+ sudo apt-get install openmpi-common libopenmpi-dev openmpi-bin libhdf5-openmpi-dev libcurl4-openssl-dev bzip2 libsnappy-dev libblosc-dev libzstd-dev
echo "Download and build netCDF github master"
git clone https://github.com/Unidata/netcdf-c
pushd netcdf-c
- export CPPFLAGS="-I/usr/include/hdf5/mpich -I${NETCDF_DIR}/include"
+ #export CPPFLAGS="-I/usr/include/hdf5/mpich -I${NETCDF_DIR}/include"
+ export CPPFLAGS="-I/usr/include/hdf5/openmpi -I${NETCDF_DIR}/include"
export LDFLAGS="-L${NETCDF_DIR}/lib"
- export LIBS="-lhdf5_mpich_hl -lhdf5_mpich -lm -lz"
+ #export LIBS="-lhdf5_mpich_hl -lhdf5_mpich -lm -lz"
+ export LIBS="-lhdf5_openmpi_hl -lhdf5_openmpi -lm -lz"
autoreconf -i
./configure --prefix $NETCDF_DIR --enable-netcdf-4 --enable-shared --enable-dap --enable-parallel4
make -j 2
@@ -47,13 +51,13 @@ jobs:
- name: Install python dependencies via pip
run: |
python -m pip install --upgrade pip
- pip install numpy cython cftime pytest twine wheel check-manifest mpi4py mypy types-setuptools typing-extensions
+ python -m pip install numpy cython cftime pytest twine wheel check-manifest mpi4py mypy types-setuptools typing-extensions
- name: Install netcdf4-python
run: |
export PATH=${NETCDF_DIR}/bin:${PATH}
export NETCDF_PLUGIN_DIR=${{ github.workspace }}/netcdf-c/plugins/plugindir
- python setup.py install
+ python -m pip install . --no-build-isolation
- name: Test
run: |
@@ -65,14 +69,16 @@ jobs:
python run_all.py
# parallel
cd ../examples
- mpirun.mpich -np 4 python mpi_example.py
+ #mpirun.mpich -np 4 python mpi_example.py
+ mpirun -np 4 --oversubscribe python mpi_example.py
if [ $? -ne 0 ] ; then
echo "hdf5 mpi test failed!"
exit 1
else
echo "hdf5 mpi test passed!"
fi
- mpirun.mpich -np 4 python mpi_example_compressed.py
+ #mpirun.mpich -np 4 python mpi_example_compressed.py
+ mpirun -np 4 --oversubscribe python mpi_example_compressed.py
if [ $? -ne 0 ] ; then
echo "hdf5 compressed mpi test failed!"
exit 1
=====================================
.github/workflows/build_old.yml
=====================================
@@ -9,26 +9,28 @@ jobs:
NETCDF_VERSION: 4.7.4
NETCDF_DIR: ${{ github.workspace }}/..
NETCDF_EXTRA_CONFIG: --enable-pnetcdf
- CC: mpicc.mpich
+ #CC: mpicc.mpich
+ CC: mpicc
#NO_NET: 1
strategy:
matrix:
- python-version: ["3.12"]
+ python-version: ["3.14"]
steps:
- - uses: actions/checkout at v4
+ - uses: actions/checkout at v5
with:
submodules: true
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python at v5
+ uses: actions/setup-python at v6
with:
python-version: ${{ matrix.python-version }}
- name: Install Ubuntu Dependencies
run: |
sudo apt-get update
- sudo apt-get install mpich libmpich-dev libhdf5-mpich-dev libcurl4-openssl-dev bzip2 libsnappy-dev libblosc-dev libzstd-dev
+ #sudo apt-get install mpich libmpich-dev libhdf5-mpich-dev libcurl4-openssl-dev bzip2 libsnappy-dev libblosc-dev libzstd-dev
+ sudo apt-get install openmpi-common libopenmpi-dev openmpi-bin libhdf5-openmpi-dev libcurl4-openssl-dev bzip2 libsnappy-dev libblosc-dev libzstd-dev
echo "Download and build PnetCDF version ${PNETCDF_VERSION}"
wget https://parallel-netcdf.github.io/Release/pnetcdf-${PNETCDF_VERSION}.tar.gz
tar -xzf pnetcdf-${PNETCDF_VERSION}.tar.gz
@@ -42,9 +44,11 @@ jobs:
wget https://www.gfd-dennou.org/arch/netcdf/unidata-mirror/netcdf-c-${NETCDF_VERSION}.tar.gz
tar -xzf netcdf-c-${NETCDF_VERSION}.tar.gz
pushd netcdf-c-${NETCDF_VERSION}
- export CPPFLAGS="-I/usr/include/hdf5/mpich -I${NETCDF_DIR}/include"
+ #export CPPFLAGS="-I/usr/include/hdf5/mpich -I${NETCDF_DIR}/include"
+ export CPPFLAGS="-I/usr/include/hdf5/openmpi -I${NETCDF_DIR}/include"
export LDFLAGS="-L${NETCDF_DIR}/lib"
- export LIBS="-lhdf5_mpich_hl -lhdf5_mpich -lm -lz"
+ #export LIBS="-lhdf5_mpich_hl -lhdf5_mpich -lm -lz"
+ export LIBS="-lhdf5_openmpi_hl -lhdf5_openmpi -lm -lz"
./configure --prefix $NETCDF_DIR --enable-netcdf-4 --enable-shared --enable-dap --enable-parallel4 $NETCDF_EXTRA_CONFIG
make -j 2
sudo make install
@@ -59,13 +63,14 @@ jobs:
- name: Install python dependencies via pip
run: |
python -m pip install --upgrade pip
- pip install numpy cython cftime pytest twine wheel check-manifest mpi4py typing-extensions
+ python -m pip install numpy cython cftime pytest twine wheel check-manifest mpi4py typing-extensions
- name: Install netcdf4-python
run: |
export PATH=${NETCDF_DIR}/bin:${PATH}
export NETCDF_PLUGIN_DIR=${{ github.workspace }}/netcdf-c-${NETCDF_VERSION}/plugins/plugindir
- python setup.py install
+ python -m pip install . --no-build-isolation
+
- name: Test
run: |
export PATH=${NETCDF_DIR}/bin:${PATH}
@@ -75,21 +80,24 @@ jobs:
python run_all.py
# parallel (hdf5 for netcdf4, pnetcdf for netcdf3)
cd ../examples
- mpirun.mpich -np 4 python mpi_example.py
+ #mpirun.mpich -np 4 python mpi_example.py
+ mpirun -np 4 --oversubscribe python mpi_example.py
if [ $? -ne 0 ] ; then
echo "hdf5 mpi test failed!"
exit 1
else
echo "hdf5 mpi test passed!"
fi
- mpirun.mpich -np 4 python mpi_example_compressed.py
+ #mpirun.mpich -np 4 python mpi_example_compressed.py
+ mpirun -np 4 --oversubscribe python mpi_example_compressed.py
if [ $? -ne 0 ] ; then
echo "hdf5 compressed mpi test failed!"
exit 1
else
echo "hdf5 compressed mpi test passed!"
fi
- mpirun.mpich -np 4 python mpi_example.py NETCDF3_64BIT_DATA
+ #mpirun.mpich -np 4 python mpi_example.py NETCDF3_64BIT_DATA
+ mpirun -np 4 --oversubscribe python mpi_example.py NETCDF3_64BIT_DATA
if [ $? -ne 0 ] ; then
echo "pnetcdf mpi test failed!"
exit 1
=====================================
.github/workflows/cibuildwheel.yml
=====================================
@@ -18,11 +18,11 @@ jobs:
name: Build source distribution
runs-on: ubuntu-22.04
steps:
- - uses: actions/checkout at v4
+ - uses: actions/checkout at v5
with:
fetch-depth: 0
- - uses: actions/setup-python at v5
+ - uses: actions/setup-python at v6
name: Install Python
with:
python-version: 3.x
@@ -59,13 +59,11 @@ jobs:
arch: aarch64
- os: macos-14
arch: arm64
- CIBW_ENVIRONMENT: MACOSX_DEPLOYMENT_TARGET=14.0
- - os: macos-12
+ - os: macos-13
arch: x86_64
- CIBW_ENVIRONMENT: MACOSX_DEPLOYMENT_TARGET=12.0
steps:
- - uses: actions/checkout at v4
+ - uses: actions/checkout at v5
with:
fetch-depth: 0
@@ -80,33 +78,20 @@ jobs:
shell: bash
# On PRs we run only oldest and newest Python versions to reduce CI load.
# Skips pypy and musllinux everywhere.
- # We are buiding 38 and 312 for now.
+ # We are building 310, 311 and 314 for now.
+ # (3.11 is the oldest version for which we support abi3 wheels)
# These needs to rotate every new Python release.
run: |
- if [[ "${{ github.event_name }}" == "pull_request" ]]; then
- CIBW_SKIP="pp* cp36-* cp37-* *-musllinux* cp39-* cp310-* cp311-*"
- else
- CIBW_SKIP="pp* cp36-* cp37-* *-musllinux*"
- fi
- echo "CIBW_SKIP=$CIBW_SKIP" >> $GITHUB_ENV
- echo "Setting CIBW_SKIP=$CIBW_SKIP"
+ set -x
+ echo "CIBW_BUILD=cp310-* cp311-* cp314-*" >> $GITHUB_ENV
+ set +x
+
+ if: ${{ github.event_name }} == "pull_request"
- name: "Building ${{ matrix.os }} (${{ matrix.arch }}) wheels"
- uses: pypa/cibuildwheel at v2.21.3
+ uses: pypa/cibuildwheel at v3.2.0
env:
- CIBW_SKIP: ${{ env.CIBW_SKIP }}
CIBW_ARCHS: ${{ matrix.arch }}
- CIBW_BUILD_FRONTEND: build
- CIBW_MANYLINUX_X86_64_IMAGE: ghcr.io/ocefpaf/manylinux2014_x86_64-netcdf
- CIBW_MANYLINUX_AARCH64_IMAGE: ghcr.io/ocefpaf/manylinux2014_aarch64-netcdf
- # Emulation testing is slow, testing only latest Python.
- CIBW_TEST_SKIP: "cp38-*_aarch64 cp39-*_aarch64 cp310-*_aarch64 cp311-*_aarch64"
- CIBW_ENVIRONMENT: ${{ matrix.CIBW_ENVIRONMENT }}
- CIBW_BEFORE_BUILD_MACOS: brew install hdf5 netcdf
- CIBW_TEST_REQUIRES: pytest cython packaging typing-extensions
- CIBW_TEST_COMMAND: >
- python -c "import netCDF4; print(f'netCDF4 v{netCDF4.__version__}')"
- && pytest -s -rxs -v {project}/test
- uses: actions/upload-artifact at v4
with:
@@ -120,14 +105,14 @@ jobs:
strategy:
matrix:
os: [windows-latest]
- arch: [win_amd64]
+ arch: [AMD64]
steps:
- - uses: actions/checkout at v4
+ - uses: actions/checkout at v5
with:
fetch-depth: 0
- - uses: actions/setup-python at v5
+ - uses: actions/setup-python at v6
name: Install Python
with:
python-version: 3.x
@@ -140,25 +125,12 @@ jobs:
create-args: >-
python=${{ matrix.python-version }} libnetcdf=4.9.2 --channel conda-forge
- - name: Install cibuildwheel
- run: |
- python -m pip install --upgrade cibuildwheel delvewheel
-
- name: Build wheels for Windows (${{ matrix.arch }})
- run: cibuildwheel --output-dir wheelhouse
+ uses: pypa/cibuildwheel at v3.2.0
env:
- CIBW_BUILD: "cp39-${{ matrix.arch }} cp310-${{ matrix.arch }} cp311-${{ matrix.arch }} cp312-${{ matrix.arch }}"
- CIBW_ENVIRONMENT_WINDOWS: >
- HDF5_DIR="C:\\Users\\runneradmin\\micromamba\\envs\\build\\Library"
- netCDF4_DIR="C:\\Users\\runneradmin\\micromamba\\envs\\build\\Library"
- PATH="C:\\Users\\runneradmin\\micromamba\\envs\\build\\Library\\bin;${PATH}"
- CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: >
- delvewheel show {wheel}
- && delvewheel repair -w {dest_dir} {wheel}
- CIBW_TEST_REQUIRES: pytest cython packaging typing-extensions
- CIBW_TEST_COMMAND: >
- python -c "import netCDF4; print(f'netCDF4 v{netCDF4.__version__}')"
- && pytest -s -rxs -v {project}\\test
+ CIBW_ARCHS: ${{ matrix.arch }}
+ # cannot build cftime for this target (missing a wheel at the time of writing)
+ CIBW_SKIP: "cp314*"
- uses: actions/upload-artifact at v4
with:
@@ -171,7 +143,7 @@ jobs:
name: "Show artifacts"
runs-on: ubuntu-22.04
steps:
- - uses: actions/download-artifact at v4
+ - uses: actions/download-artifact at v5
with:
pattern: pypi-artifacts*
path: ${{ github.workspace }}/dist
@@ -189,7 +161,7 @@ jobs:
# upload to PyPI for every tag starting with 'v'
if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v')
steps:
- - uses: actions/download-artifact at v4
+ - uses: actions/download-artifact at v5
with:
pattern: pypi-artifacts*
path: ${{ github.workspace }}/dist
=====================================
.github/workflows/miniconda.yml
=====================================
@@ -12,7 +12,7 @@ jobs:
# NO_NET: 1
strategy:
matrix:
- python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13" ]
+ python-version: [ "3.10", "3.11", "3.12", "3.13", "3.14" ]
os: [windows-latest, ubuntu-latest, macos-latest]
platform: [x64, x32]
exclude:
@@ -24,7 +24,7 @@ jobs:
shell: bash -l {0}
steps:
- - uses: actions/checkout at v4
+ - uses: actions/checkout at v5
with:
submodules: true
@@ -58,7 +58,7 @@ jobs:
run:
shell: bash -l {0}
steps:
- - uses: actions/checkout at v4
+ - uses: actions/checkout at v5
with:
submodules: true
=====================================
Changelog
=====================================
@@ -1,3 +1,14 @@
+ version 1.7.3 (tag v1.7.3rel)
+ =============================
+ * Python 3.14 wheels (issue #1432)
+ * support os.PathLike arguments for `Dataset.fromcdl` and raise a `FileNotFoundError`
+ if the cdl is missing and a `FileExistsError` if the nc file already exists (PR #1387)
+ * raise more informative error when trying to iterate or
+ perform a membership operation on a Dataset (issue #1383)
+ * fix type hint for createEnumType (issue #1378)
+ * add python 3.13 to windows wheel builds (PR #1377)
+ * allow slicing of vlen and string variables with non-unitary strides (issue #1408).
+
version 1.7.2 (tag v1.7.2rel)
=============================
* add static type hints (PRs #1302, #1349)
=====================================
MANIFEST.in
=====================================
@@ -27,4 +27,3 @@ include *.md
include *.py
include *.release
include *.sh
-include LICENSE
=====================================
README.md
=====================================
@@ -10,6 +10,8 @@
## News
For details on the latest updates, see the [Changelog](https://github.com/Unidata/netcdf4-python/blob/master/Changelog).
+10/13/2025: Version [1.7.3](https://pypi.python.org/pypi/netCDF4/1.7.3) released. Minor updates/bugfixes and python 3.14 wheels, see Changelog for details.
+
10/22/2024: Version [1.7.2](https://pypi.python.org/pypi/netCDF4/1.7.2) released. Minor updates/bugfixes and python 3.13 wheels, see Changelog for details.
06/17/2024: Version [1.7.1](https://pypi.python.org/pypi/netCDF4/1.7.1) released. Fixes for wheels, no code changes.
=====================================
debian/changelog
=====================================
@@ -1,10 +1,13 @@
-netcdf4-python (1.7.2-2) UNRELEASED; urgency=medium
+netcdf4-python (1.7.3-1) unstable; urgency=medium
+ * New upstream release.
* Bump Standards-Version to 4.7.2, no changes.
* Update lintian overrides.
* Drop Rules-Requires-Root: no, default since dpkg 1.22.13.
+ * Update Upstream-Contact email address.
+ * Refresh patches.
- -- Bas Couwenberg <sebastic at debian.org> Thu, 20 Mar 2025 06:13:05 +0100
+ -- Bas Couwenberg <sebastic at debian.org> Tue, 14 Oct 2025 05:43:07 +0200
netcdf4-python (1.7.2-1) unstable; urgency=medium
=====================================
debian/copyright
=====================================
@@ -1,6 +1,6 @@
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
Upstream-Name: netcdf4-python
-Upstream-Contact: Jeff Whitaker <jeffrey.s.whitaker at noaa.gov>
+Upstream-Contact: Jeff Whitaker <whitaker.jeffrey at gmail.com>
Source: https://github.com/Unidata/netcdf4-python
Files: *
=====================================
debian/patches/rpath.patch
=====================================
@@ -4,11 +4,11 @@ Forwarded: not-needed
--- a/setup.py
+++ b/setup.py
-@@ -351,11 +351,6 @@ else:
+@@ -371,11 +371,6 @@ else:
lib_dirs.append(curl_libdir)
inc_dirs.append(curl_incdir)
--if sys.platform == 'win32':
+-if sys.platform == 'win32' or sys.platform == 'cygwin':
- runtime_lib_dirs = []
-else:
- runtime_lib_dirs = lib_dirs
@@ -16,13 +16,11 @@ Forwarded: not-needed
# Do not require numpy for just querying the package
# Taken from the h5py setup file.
if any('--' + opt in sys.argv for opt in Distribution.display_option_names +
-@@ -433,8 +428,7 @@ if 'sdist' not in sys.argv[1:] and 'clea
- define_macros=DEFINE_MACROS,
+@@ -461,7 +456,6 @@ if 'sdist' not in sys.argv[1:] and 'clea
libraries=libs,
library_dirs=lib_dirs,
-- include_dirs=include_dirs,
-- runtime_library_dirs=runtime_lib_dirs)]
-+ include_dirs=include_dirs)]
+ include_dirs=include_dirs,
+- runtime_library_dirs=runtime_lib_dirs,
+ py_limited_api=USE_PY_LIMITED_API)]
# set language_level directive to 3
for e in ext_modules:
- e.cython_directives = {'language_level': "3"} #
=====================================
docs/index.html
=====================================
@@ -297,7 +297,7 @@ supplied by the <a href="http://numpy.scipy.org">numpy module</a>. However,
unlike numpy arrays, netCDF4 variables can be appended to along one or
more 'unlimited' dimensions. To create a netCDF variable, use the
<code><a title="netCDF4.Dataset.createVariable" href="#netCDF4.Dataset.createVariable">Dataset.createVariable()</a></code> method of a <code><a title="netCDF4.Dataset" href="#netCDF4.Dataset">Dataset</a></code> or
-<code><a title="netCDF4.Group" href="#netCDF4.Group">Group</a></code> instance. The <code><a title="netCDF4.Dataset.createVariable" href="#netCDF4.Dataset.createVariable">Dataset.createVariable()</a></code>j method
+<code><a title="netCDF4.Group" href="#netCDF4.Group">Group</a></code> instance. The <code><a title="netCDF4.Dataset.createVariable" href="#netCDF4.Dataset.createVariable">Dataset.createVariable()</a></code> method
has two mandatory arguments, the variable name (a Python string), and
the variable datatype. The variable's dimensions are given by a tuple
containing the dimension names (defined previously with
@@ -305,19 +305,75 @@ containing the dimension names (defined previously with
variable, simply leave out the dimensions keyword. The variable
primitive datatypes correspond to the dtype attribute of a numpy array.
You can specify the datatype as a numpy dtype object, or anything that
-can be converted to a numpy dtype object.
-Valid datatype specifiers
-include: <code>'f4'</code> (32-bit floating point), <code>'f8'</code> (64-bit floating
-point), <code>'i4'</code> (32-bit signed integer), <code>'i2'</code> (16-bit signed
-integer), <code>'i8'</code> (64-bit signed integer), <code>'i1'</code> (8-bit signed
-integer), <code>'u1'</code> (8-bit unsigned integer), <code>'u2'</code> (16-bit unsigned
-integer), <code>'u4'</code> (32-bit unsigned integer), <code>'u8'</code> (64-bit unsigned
-integer), or <code>'S1'</code> (single-character string).
-The old Numeric
-single-character typecodes (<code>'f'</code>,<code>'d'</code>,<code>'h'</code>,
-<code>'s'</code>,<code>'b'</code>,<code>'B'</code>,<code>'c'</code>,<code>'i'</code>,<code>'l'</code>), corresponding to
-(<code>'f4'</code>,<code>'f8'</code>,<code>'i2'</code>,<code>'i2'</code>,<code>'i1'</code>,<code>'i1'</code>,<code>'S1'</code>,<code>'i4'</code>,<code>'i4'</code>),
-will also work. The unsigned integer types and the 64-bit integer type
+can be converted to a numpy dtype object. Valid datatype specifiers
+include:</p>
+<table>
+<thead>
+<tr>
+<th>Specifier</th>
+<th>Datatype</th>
+<th>Old typecodes</th>
+</tr>
+</thead>
+<tbody>
+<tr>
+<td><code>'f4'</code></td>
+<td>32-bit floating point</td>
+<td><code>'f'</code></td>
+</tr>
+<tr>
+<td><code>'f8'</code></td>
+<td>64-bit floating point</td>
+<td><code>'d'</code></td>
+</tr>
+<tr>
+<td><code>'i4'</code></td>
+<td>32-bit signed integer</td>
+<td><code>'i'</code> <code>'l'</code></td>
+</tr>
+<tr>
+<td><code>'i2'</code></td>
+<td>16-bit signed integer</td>
+<td><code>'h'</code> <code>'s'</code></td>
+</tr>
+<tr>
+<td><code>'i8'</code></td>
+<td>64-bit signed integer</td>
+<td></td>
+</tr>
+<tr>
+<td><code>'i1'</code></td>
+<td>8-bit signed integer</td>
+<td><code>'b'</code> <code>'B'</code></td>
+</tr>
+<tr>
+<td><code>'u1'</code></td>
+<td>8-bit unsigned integer</td>
+<td></td>
+</tr>
+<tr>
+<td><code>'u2'</code></td>
+<td>16-bit unsigned integer</td>
+<td></td>
+</tr>
+<tr>
+<td><code>'u4'</code></td>
+<td>32-bit unsigned integer</td>
+<td></td>
+</tr>
+<tr>
+<td><code>'u8'</code></td>
+<td>64-bit unsigned integer</td>
+<td></td>
+</tr>
+<tr>
+<td><code>'S1'</code></td>
+<td>single-character string</td>
+<td><code>'c'</code></td>
+</tr>
+</tbody>
+</table>
+<p>The unsigned integer types and the 64-bit integer type
can only be used if the file format is <code>NETCDF4</code>.</p>
<p>The dimensions themselves are usually also defined as variables, called
coordinate variables. The <code><a title="netCDF4.Dataset.createVariable" href="#netCDF4.Dataset.createVariable">Dataset.createVariable()</a></code>
@@ -1552,8 +1608,8 @@ will clobber an existing file with the same name.
if <code>False</code>, an
exception will be raised if a file with the same name already exists.
mode=<code>x</code> is identical to mode=<code>w</code> with clobber=False.</p>
-<p><strong><code>format</code></strong>: underlying file format (one of <code>'NETCDF4',
-'NETCDF4_CLASSIC', 'NETCDF3_CLASSIC'<code>, </code>'NETCDF3_64BIT_OFFSET'</code> or
+<p><strong><code>format</code></strong>: underlying file format (one of <code>'NETCDF4'</code>,
+<code>'NETCDF4_CLASSIC'</code>, <code>'NETCDF3_CLASSIC'</code>, <code>'NETCDF3_64BIT_OFFSET'</code> or
<code>'NETCDF3_64BIT_DATA'</code>.
Only relevant if <code>mode = 'w'</code> (if <code>mode = 'r','a'</code> or <code>'r+'</code> the file format
is automatically detected). Default <code>'NETCDF4'</code>, which means the data is
@@ -1637,8 +1693,8 @@ CDL file.</p>
suffix replaced by <code>.nc</code> is used..</p>
<p><strong><code>mode</code></strong>:
Access mode to open Dataset (Default <code>'a'</code>).</p>
-<p><strong><code>format</code></strong>: underlying file format to use (one of <code>'NETCDF4',
-'NETCDF4_CLASSIC', 'NETCDF3_CLASSIC'<code>, </code>'NETCDF3_64BIT_OFFSET'</code> or
+<p><strong><code>format</code></strong>: underlying file format to use (one of <code>'NETCDF4'</code>,
+<code>'NETCDF4_CLASSIC'</code>, <code>'NETCDF3_CLASSIC'</code>, <code>'NETCDF3_64BIT_OFFSET'</code> or
<code>'NETCDF3_64BIT_DATA'</code>. Default <code>'NETCDF4'</code>.</p>
<p>Dataset instance for <code>ncfilename</code> is returned.</p></div>
</dd>
@@ -1912,8 +1968,8 @@ Dataset standard attributes: <code>dimensions, dtype, shape, ndim, name</code> a
<code>least_significant_digit</code>. Application programs should never modify
these attributes. The <code>dimensions</code> attribute is a tuple containing the
names of the dimensions associated with this variable. The <code>dtype</code>
-attribute is a string describing the variable's data type (<code>i4, f8,
-S1,<code> etc). The </code>shape</code> attribute is a tuple describing the current
+attribute is a string describing the variable's data type (<code>i4</code>, <code>f8</code>,
+<code>S1</code>, etc). The <code>shape</code> attribute is a tuple describing the current
sizes of all the variable's dimensions. The <code>name</code> attribute is a
string containing the name of the Variable instance.
The <code>least_significant_digit</code>
@@ -2347,8 +2403,8 @@ instances, raises OSError.</p></div>
<dd>
<div class="desc"><p>Class for reading multi-file netCDF Datasets, making variables
spanning multiple files appear as if they were in one file.
-Datasets must be in <code>NETCDF4_CLASSIC, NETCDF3_CLASSIC, NETCDF3_64BIT_OFFSET
-or NETCDF3_64BIT_DATA<code> format (</code>NETCDF4</code> Datasets won't work).</p>
+Datasets must be in <code>NETCDF4_CLASSIC</code>, <code>NETCDF3_CLASSIC</code>, <code>NETCDF3_64BIT_OFFSET</code>
+or <code>NETCDF3_64BIT_DATA</code> format (<code>NETCDF4</code> Datasets won't work).</p>
<p>Adapted from <a href="http://pysclint.sourceforge.net/pycdf">pycdf</a> by Andre Gosselin.</p>
<p>Example usage (See <code><a title="netCDF4.MFDataset" href="#netCDF4.MFDataset">MFDataset</a></code> for more details):</p>
<pre><code class="language-python">>>> import numpy as np
=====================================
include/membuf.pyx
=====================================
@@ -14,7 +14,7 @@ cdef memview_fromptr(void *memory, size_t size):
# private extension type that implements buffer protocol.
cdef class _MemBuf:
- cdef const void *memory
+ cdef void *memory
cdef size_t size
def __getbuffer__(self, Py_buffer *buf, int flags):
PyBuffer_FillInfo(buf, self, <void *>self.memory, self.size, 1, flags)
=====================================
pyproject.toml
=====================================
@@ -1,9 +1,9 @@
[build-system]
requires = [
"Cython>=0.29",
- "oldest-supported-numpy ; python_version < '3.9'",
- "numpy>=2.0.0rc1 ; python_version >= '3.9'",
- "setuptools>=61", "setuptools_scm[toml]>=3.4"
+ "numpy>=2.0.0",
+ "setuptools>=77.0.1",
+ "setuptools_scm[toml]>=3.4",
]
build-backend = "setuptools.build_meta"
@@ -11,24 +11,24 @@ build-backend = "setuptools.build_meta"
name = "netCDF4"
description = "Provides an object-oriented python interface to the netCDF version 4 library"
authors = [
- {name = "Jeff Whitaker", email = "jeffrey.s.whitaker at noaa.gov"},
+ {name = "Jeff Whitaker", email = "whitaker.jeffrey at gmail.com"},
]
-requires-python = ">=3.8"
+requires-python = ">=3.10"
keywords = [
"numpy", "netcdf", "data", "science", "network", "oceanography",
"meteorology", "climate",
]
-license = {text = "MIT"}
+license = "MIT"
+license-files = ["LICENSE"]
classifiers = [
"Development Status :: 3 - Alpha",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.8",
- "Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Programming Language :: Python :: 3.14",
"Intended Audience :: Science/Research",
- "License :: OSI Approved :: MIT License",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Archiving :: Compression",
"Operating System :: OS Independent",
@@ -45,6 +45,10 @@ tests = [
"Cython",
"packaging",
"pytest",
+ "typing-extensions>=4.15.0",
+]
+parallel = [
+ "mpi4py",
]
[project.readme]
@@ -99,3 +103,48 @@ module = [
"filter_availability",
"matplotlib.*"
]
+
+[tool.cibuildwheel]
+build-verbosity = 1
+build-frontend = "build"
+skip = [
+ "*-musllinux*",
+ "cp314t-*",
+]
+test-extras = "tests"
+test-sources = [
+ "test",
+ "pyproject.toml"
+]
+test-command = [
+ '''python -c "import netCDF4; print(f'netCDF4 v{netCDF4.__version__}')"''',
+ "pytest -s -rxs -v test",
+]
+manylinux-x86_64-image = "ghcr.io/ocefpaf/manylinux_2_28_x86_64-netcdf"
+manylinux-aarch64-image = "ghcr.io/ocefpaf/manylinux_2_28_aarch64-netcdf"
+environment = {NETCDF4_LIMITED_API="1"}
+
+[tool.cibuildwheel.macos]
+before-build = "brew install hdf5 netcdf"
+
+[[tool.cibuildwheel.overrides]]
+select = "*-macosx_x86_64"
+inherit.environment = "append"
+environment = {MACOSX_DEPLOYMENT_TARGET="13.0"}
+
+[[tool.cibuildwheel.overrides]]
+select = "*-macosx_arm64"
+inherit.environment = "append"
+environment = {MACOSX_DEPLOYMENT_TARGET="14.0"}
+
+[tool.cibuildwheel.windows]
+before-build = "python -m pip install delvewheel"
+repair-wheel-command = [
+ "delvewheel show {wheel}",
+ "delvewheel repair -w {dest_dir} {wheel}",
+]
+
+[[tool.cibuildwheel.overrides]]
+select = "*-win_*"
+inherit.environment = "append"
+environment = {HDF5_DIR='C:\\\\Users\\runneradmin\\micromamba\\envs\\build\\Library', netCDF4_DIR='C:\\\\Users\\runneradmin\\micromamba\\envs\\build\\Library', PATH='C:\\\\Users\\runneradmin\\micromamba\\envs\\build\\Library\\bin;${PATH}' }
=====================================
setup.py
=====================================
@@ -3,10 +3,28 @@ import os.path as osp
import pathlib
import shutil
import configparser
+import sysconfig
from setuptools import setup, Extension
from setuptools.dist import Distribution
from typing import List
+
+USE_PY_LIMITED_API = (
+ # require opt-in (builds are specialized by default)
+ os.getenv('NETCDF4_LIMITED_API', '0') == '1'
+ # Cython + numpy + limited API de facto requires Python >=3.11
+ and sys.version_info >= (3, 11)
+ # as of Python 3.14t, free-threaded builds don't support the limited API
+ and not sysconfig.get_config_var("Py_GIL_DISABLED")
+)
+ABI3_TARGET_VERSION = "".join(str(_) for _ in sys.version_info[:2])
+ABI3_TARGET_HEX = hex(sys.hexversion & 0xFFFF00F0)
+
+if USE_PY_LIMITED_API:
+ SETUP_OPTIONS = {"bdist_wheel": {"py_limited_api": f"cp{ABI3_TARGET_VERSION}"}}
+else:
+ SETUP_OPTIONS = {}
+
open_kwargs = {'encoding': 'utf-8'}
@@ -260,6 +278,8 @@ if USE_NCCONFIG and HAS_NCCONFIG and ncconfig is not None:
for direc in inc_dirs:
hdf5_version = get_hdf5_version(direc)
if hdf5_version is not None:
+ if sys.platform == "cygwin":
+ _populate_hdf5_info(dirstosearch, inc_dirs, libs, lib_dirs)
break
# if hdf5 not found, search other standard locations (including those specified in env vars).
if hdf5_version is None:
@@ -351,7 +371,7 @@ else:
lib_dirs.append(curl_libdir)
inc_dirs.append(curl_incdir)
-if sys.platform == 'win32':
+if sys.platform == 'win32' or sys.platform == 'cygwin':
runtime_lib_dirs = []
else:
runtime_lib_dirs = lib_dirs
@@ -397,7 +417,12 @@ if 'sdist' not in sys.argv[1:] and 'clean' not in sys.argv[1:] and '--version' n
print(f"netcdf lib {has_has_not} parallel functions")
if has_parallel_support:
- import mpi4py
+ try:
+ import mpi4py
+ except ImportError:
+ msg = "Parallel support requires mpi4py but it is not installed."
+ raise ImportError(msg)
+
inc_dirs.append(mpi4py.get_include())
# mpi_incdir should not be needed if using nc-config
# (should be included in nc-config --cflags)
@@ -427,6 +452,8 @@ if 'sdist' not in sys.argv[1:] and 'clean' not in sys.argv[1:] and '--version' n
str(nc_complex_dir / "include/generated_fallbacks"),
]
DEFINE_MACROS += [("NC_COMPLEX_NO_EXPORT", "1")]
+ if USE_PY_LIMITED_API:
+ DEFINE_MACROS.append(("Py_LIMITED_API", ABI3_TARGET_HEX))
ext_modules = [Extension("netCDF4._netCDF4",
source_files,
@@ -434,7 +461,8 @@ if 'sdist' not in sys.argv[1:] and 'clean' not in sys.argv[1:] and '--version' n
libraries=libs,
library_dirs=lib_dirs,
include_dirs=include_dirs,
- runtime_library_dirs=runtime_lib_dirs)]
+ runtime_library_dirs=runtime_lib_dirs,
+ py_limited_api=USE_PY_LIMITED_API)]
# set language_level directive to 3
for e in ext_modules:
e.cython_directives = {'language_level': "3"} #
@@ -468,6 +496,7 @@ setup(
name="netCDF4", # need by GitHub dependency graph
version=extract_version(netcdf4_src_pyx),
ext_modules=ext_modules,
+ options=SETUP_OPTIONS,
)
# remove plugin files copied from outside source tree
=====================================
src/netCDF4/__init__.pyi
=====================================
@@ -34,6 +34,7 @@ from typing import (
TYPE_CHECKING,
Any,
Callable,
+ final,
Final,
Generic,
Iterable,
@@ -51,7 +52,7 @@ from typing import (
import cftime
import numpy as np
import numpy.typing as npt
-from typing_extensions import Buffer, Self, TypeAlias
+from typing_extensions import Buffer, Self, TypeAlias, disjoint_base
__all__ = [
"Dataset",
@@ -217,6 +218,7 @@ class NetCDF4MissingFeatureException(Exception):
def dtype_is_complex(dtype: str) -> bool: ...
+ at disjoint_base
class Dataset:
def __init__(
self,
@@ -354,7 +356,7 @@ class Dataset:
def createVLType(self, datatype: npt.DTypeLike, datatype_name: str) -> VLType: ...
def createEnumType(
self,
- datatype: np.dtype[np.integer] | type[np.integer] | type[int],
+ datatype: np.dtype[np.integer] | type[np.integer] | type[int] | str,
datatype_name: str,
enum_dict: Mapping[str, int | np.integer],
) -> EnumType: ...
@@ -373,7 +375,7 @@ class Dataset:
def get_variables_by_attributes(self, **kwargs: Callable[[Any], bool] | Any) -> list[Variable]: ...
@staticmethod
def fromcdl(
- cdlfilename: str, ncfilename: str | None = None, mode: AccessMode = "a", format: Format = "NETCDF4"
+ cdlfilename: str | os.PathLike, ncfilename: str | os.PathLike | None = None, mode: AccessMode = "a", format: Format = "NETCDF4"
) -> Dataset: ...
@overload
def tocdl(self, coordvars: bool = False, data: bool = False, outfile: None = None) -> str: ...
@@ -384,6 +386,9 @@ class Dataset:
def has_bzip2_filter(self) -> bool: ...
def has_szip_filter(self) -> bool: ...
def __getitem__(self, elem: str) -> Any: ... # should be Group | Variable, but this causes too many problems
+ # __iter__ and __contains__ always error because iteration and membership ops are not allowed
+ def __iter__(self) -> NoReturn: ...
+ def __contains__(self, key) -> NoReturn: ...
def __setattr__(self, name: str, value: Any) -> None: ...
def __getattr__(self, name: str) -> Any: ...
def __delattr__(self, name: str): ...
@@ -395,6 +400,7 @@ class Group(Dataset):
def __init__(self, parent: Dataset, name: str, **kwargs: Any) -> None: ...
def close(self) -> NoReturn: ...
+ at final
class Dimension:
def __init__(self, grp: Dataset, name: str, size: int | None = None, **kwargs: Any) -> None: ...
@property
@@ -427,6 +433,7 @@ class _VarDtypeProperty:
@overload
def __get__(self, instance: Variable, owner: Any) -> Any: ... # actual return type np.dtype | Type[str]
+ at final
class Variable(Generic[VarT]):
# Overloads of __new__ are provided for some cases where the Variable's type may be statically inferred from the datatype arg
@overload
@@ -587,6 +594,7 @@ class Variable(Generic[VarT]):
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[Any]: ... # faux method so mypy believes Variable is iterable
+ at final
class CompoundType:
dtype: np.dtype
dtype_view: np.dtype
@@ -597,6 +605,7 @@ class CompoundType:
) -> None: ...
def __reduce__(self) -> NoReturn: ...
+ at final
class VLType:
dtype: np.dtype
name: str | None
@@ -604,6 +613,7 @@ class VLType:
def __init__(self, grp: Dataset, dt: npt.DTypeLike, dtype_name: str, **kwargs: Any) -> None: ...
def __reduce__(self) -> NoReturn: ...
+ at final
class EnumType:
dtype: np.dtype[np.integer]
name: str
=====================================
src/netCDF4/_netCDF4.pyx
=====================================
@@ -1,4 +1,4 @@
-"""Version 1.7.2
+"""Version 1.7.3
-------------
# Introduction
@@ -295,7 +295,7 @@ supplied by the [numpy module](http://numpy.scipy.org). However,
unlike numpy arrays, netCDF4 variables can be appended to along one or
more 'unlimited' dimensions. To create a netCDF variable, use the
`Dataset.createVariable` method of a `Dataset` or
-`Group` instance. The `Dataset.createVariable`j method
+`Group` instance. The `Dataset.createVariable` method
has two mandatory arguments, the variable name (a Python string), and
the variable datatype. The variable's dimensions are given by a tuple
containing the dimension names (defined previously with
@@ -303,17 +303,24 @@ containing the dimension names (defined previously with
variable, simply leave out the dimensions keyword. The variable
primitive datatypes correspond to the dtype attribute of a numpy array.
You can specify the datatype as a numpy dtype object, or anything that
-can be converted to a numpy dtype object. Valid datatype specifiers
-include: `'f4'` (32-bit floating point), `'f8'` (64-bit floating
-point), `'i4'` (32-bit signed integer), `'i2'` (16-bit signed
-integer), `'i8'` (64-bit signed integer), `'i1'` (8-bit signed
-integer), `'u1'` (8-bit unsigned integer), `'u2'` (16-bit unsigned
-integer), `'u4'` (32-bit unsigned integer), `'u8'` (64-bit unsigned
-integer), or `'S1'` (single-character string). The old Numeric
-single-character typecodes (`'f'`,`'d'`,`'h'`,
-`'s'`,`'b'`,`'B'`,`'c'`,`'i'`,`'l'`), corresponding to
-(`'f4'`,`'f8'`,`'i2'`,`'i2'`,`'i1'`,`'i1'`,`'S1'`,`'i4'`,`'i4'`),
-will also work. The unsigned integer types and the 64-bit integer type
+can be converted to a numpy dtype object. Valid datatype specifiers
+include:
+
+| Specifier | Datatype | Old typecodes |
+|-----------|-------------------------|---------------|
+| `'f4'` | 32-bit floating point | `'f'` |
+| `'f8'` | 64-bit floating point | `'d'` |
+| `'i4'` | 32-bit signed integer | `'i'` `'l'` |
+| `'i2'` | 16-bit signed integer | `'h'` `'s'` |
+| `'i8'` | 64-bit signed integer | |
+| `'i1'` | 8-bit signed integer | `'b'` `'B'` |
+| `'u1'` | 8-bit unsigned integer | |
+| `'u2'` | 16-bit unsigned integer | |
+| `'u4'` | 32-bit unsigned integer | |
+| `'u8'` | 64-bit unsigned integer | |
+| `'S1'` | single-character string | `'c'` |
+
+The unsigned integer types and the 64-bit integer type
can only be used if the file format is `NETCDF4`.
The dimensions themselves are usually also defined as variables, called
@@ -1248,7 +1255,7 @@ Support for complex numbers is handled via the
further details.
-**contact**: Jeffrey Whitaker <jeffrey.s.whitaker at noaa.gov>
+**contact**: Jeffrey Whitaker <whitaker.jeffrey at gmail.com>
**copyright**: 2008 by Jeffrey Whitaker.
@@ -1272,7 +1279,7 @@ import sys
import functools
from typing import Union
-__version__ = "1.7.2"
+__version__ = "1.7.3"
# Initialize numpy
import posixpath
@@ -1485,17 +1492,6 @@ _needsworkaround_issue485 = __netcdf4libversion__ < "4.4.0" or \
(__netcdf4libversion__.startswith("4.4.0") and \
"-development" in __netcdf4libversion__)
-# issue warning for hdf5 1.10 (issue #549)
-if __netcdf4libversion__[0:5] < "4.4.1" and\
- __hdf5libversion__.startswith("1.10"):
- msg = """
-WARNING: Backwards incompatible files will be created with HDF5 1.10.x
-and netCDF < 4.4.1. Upgrading to netCDF4 >= 4.4.1 or downgrading to
-to HDF5 version 1.8.x is highly recommended
-(see https://github.com/Unidata/netcdf-c/issues/250)."""
- warnings.warn(msg)
-
-
class NetCDF4MissingFeatureException(Exception):
"""Custom exception when trying to use features missing from the linked netCDF library"""
def __init__(self, feature: str, version: str):
@@ -2331,8 +2327,8 @@ strings.
exception will be raised if a file with the same name already exists.
mode=`x` is identical to mode=`w` with clobber=False.
- **`format`**: underlying file format (one of `'NETCDF4',
- 'NETCDF4_CLASSIC', 'NETCDF3_CLASSIC'`, `'NETCDF3_64BIT_OFFSET'` or
+ **`format`**: underlying file format (one of `'NETCDF4'`,
+ `'NETCDF4_CLASSIC'`, `'NETCDF3_CLASSIC'`, `'NETCDF3_64BIT_OFFSET'` or
`'NETCDF3_64BIT_DATA'`.
Only relevant if `mode = 'w'` (if `mode = 'r','a'` or `'r+'` the file format
is automatically detected). Default `'NETCDF4'`, which means the data is
@@ -2569,6 +2565,17 @@ strings.
else:
raise IndexError('%s not found in %s' % (lastname,group.path))
+ def __iter__(self):
+ raise TypeError(
+ "Dataset is not iterable. Consider iterating on Dataset.variables."
+ )
+
+ def __contains__(self, key):
+ raise TypeError(
+ "Dataset does not support membership operations. Perhaps try 'varname in"
+ " dataset.variables' or 'dimname in dataset.dimensions'."
+ )
+
def filepath(self,encoding=None):
"""**`filepath(self,encoding=None)`**
@@ -2974,8 +2981,8 @@ Dataset standard attributes: `dimensions, dtype, shape, ndim, name` and
`least_significant_digit`. Application programs should never modify
these attributes. The `dimensions` attribute is a tuple containing the
names of the dimensions associated with this variable. The `dtype`
-attribute is a string describing the variable's data type (`i4, f8,
-S1,` etc). The `shape` attribute is a tuple describing the current
+attribute is a string describing the variable's data type (`i4`, `f8`,
+`S1`, etc). The `shape` attribute is a tuple describing the current
sizes of all the variable's dimensions. The `name` attribute is a
string containing the name of the Variable instance.
The `least_significant_digit`
@@ -3482,8 +3489,8 @@ suffix replaced by `.nc` is used..
**`mode`**: Access mode to open Dataset (Default `'a'`).
-**`format`**: underlying file format to use (one of `'NETCDF4',
-'NETCDF4_CLASSIC', 'NETCDF3_CLASSIC'`, `'NETCDF3_64BIT_OFFSET'` or
+**`format`**: underlying file format to use (one of `'NETCDF4'`,
+`'NETCDF4_CLASSIC'`, `'NETCDF3_CLASSIC'`, `'NETCDF3_64BIT_OFFSET'` or
`'NETCDF3_64BIT_DATA'`. Default `'NETCDF4'`.
Dataset instance for `ncfilename` is returned.
@@ -3491,19 +3498,27 @@ Dataset instance for `ncfilename` is returned.
[ncgen]: https://www.unidata.ucar.edu/software/netcdf/docs/netcdf_utilities_guide.html#ncgen_guide
[cdl]: https://www.unidata.ucar.edu/software/netcdf/docs/netcdf_utilities_guide.html#cdl_guide
"""
+ filepath = pathlib.Path(cdlfilename)
if ncfilename is None:
- filepath = pathlib.Path(cdlfilename)
ncfilename = filepath.with_suffix('.nc')
+ else:
+ ncfilename = pathlib.Path(ncfilename)
formatcodes = {'NETCDF4': 4,
'NETCDF4_CLASSIC': 7,
'NETCDF3_CLASSIC': 3,
'NETCDF3_64BIT': 6, # legacy
'NETCDF3_64BIT_OFFSET': 6,
'NETCDF3_64BIT_DATA': 5}
+
if format not in formatcodes:
raise ValueError('illegal format requested')
+ if not filepath.exists():
+ raise FileNotFoundError(filepath)
+ if ncfilename.exists():
+ raise FileExistsError(ncfilename)
+
ncgenargs="-knc%s" % formatcodes[format]
- subprocess.run(["ncgen", ncgenargs, "-o", ncfilename, cdlfilename], check=True)
+ subprocess.run(["ncgen", ncgenargs, "-o", str(ncfilename), str(filepath)], check=True)
return Dataset(ncfilename, mode=mode)
def tocdl(self,coordvars=False,data=False,outfile=None):
@@ -4041,7 +4056,7 @@ behavior is similar to Fortran or Matlab, but different than numpy.
If fill_value is set to `False`, then the variable is not pre-filled.
The default netCDF fill values can be found in the dictionary `netCDF4.default_fillvals`.
If not set, the default fill value will be used but no `_FillValue` attribute will be created
- (this is the default behavior of the netcdf-c library). If you want to use the
+ (this is the default behavior of the netcdf-c library). If you want to use the
default fill value, but have the `_FillValue` attribute set, use
`fill_value='default'` (note - this only works for primitive data types). `Variable.get_fill_value`
can be used to retrieve the fill value, even if the `_FillValue` attribute is not set.
@@ -5940,10 +5955,9 @@ NC_CHAR).
ierr = nc_put_vara(self._grpid, self._varid,
startp, countp, strdata)
else:
- raise IndexError('strides must all be 1 for string variables')
- #with nogil:
- # ierr = nc_put_vars(self._grpid, self._varid,
- # startp, countp, stridep, strdata)
+ with nogil:
+ ierr = nc_put_vars(self._grpid, self._varid,
+ startp, countp, stridep, strdata)
_ensure_nc_success(ierr)
free(strdata)
else:
@@ -5969,10 +5983,9 @@ NC_CHAR).
ierr = nc_put_vara(self._grpid, self._varid,
startp, countp, vldata)
else:
- raise IndexError('strides must all be 1 for vlen variables')
- #with nogil:
- # ierr = nc_put_vars(self._grpid, self._varid,
- # startp, countp, stridep, vldata)
+ with nogil:
+ ierr = nc_put_vars(self._grpid, self._varid,
+ startp, countp, stridep, vldata)
_ensure_nc_success(ierr)
# free the pointer array.
free(vldata)
@@ -6065,11 +6078,9 @@ NC_CHAR).
ierr = nc_get_vara(self._grpid, self._varid,
startp, countp, strdata)
else:
- # FIXME: is this a bug in netCDF4?
- raise IndexError('strides must all be 1 for string variables')
- #with nogil:
- # ierr = nc_get_vars(self._grpid, self._varid,
- # startp, countp, stridep, strdata)
+ with nogil:
+ ierr = nc_get_vars(self._grpid, self._varid,
+ startp, countp, stridep, strdata)
if ierr == NC_EINVALCOORDS:
raise IndexError
elif ierr != NC_NOERR:
@@ -6104,10 +6115,9 @@ NC_CHAR).
ierr = nc_get_vara(self._grpid, self._varid,
startp, countp, vldata)
else:
- raise IndexError('strides must all be 1 for vlen variables')
- #with nogil:
- # ierr = nc_get_vars(self._grpid, self._varid,
- # startp, countp, stridep, vldata)
+ with nogil:
+ ierr = nc_get_vars(self._grpid, self._varid,
+ startp, countp, stridep, vldata)
if ierr == NC_EINVALCOORDS:
raise IndexError
elif ierr != NC_NOERR:
@@ -6822,8 +6832,8 @@ class MFDataset(Dataset):
"""
Class for reading multi-file netCDF Datasets, making variables
spanning multiple files appear as if they were in one file.
-Datasets must be in `NETCDF4_CLASSIC, NETCDF3_CLASSIC, NETCDF3_64BIT_OFFSET
-or NETCDF3_64BIT_DATA` format (`NETCDF4` Datasets won't work).
+Datasets must be in `NETCDF4_CLASSIC`, `NETCDF3_CLASSIC`, `NETCDF3_64BIT_OFFSET`
+or `NETCDF3_64BIT_DATA` format (`NETCDF4` Datasets won't work).
Adapted from [pycdf](http://pysclint.sourceforge.net/pycdf) by Andre Gosselin.
=====================================
src/netCDF4/utils.py
=====================================
@@ -44,7 +44,7 @@ def _find_dim(grp, dimname):
except:
raise ValueError("cannot find dimension %s in this group or parent groups" % dimname)
if dim is None:
- raise KeyError("dimension %s not defined in group %s or any group in it's family tree" % (dimname, grp.path))
+ raise KeyError("dimension %s not defined in group %s or any group in its family tree" % (dimname, grp.path))
else:
return dim
@@ -426,11 +426,11 @@ Boolean array must have the same shape as the data along this dimension."""
# ITERABLE #
elif np.iterable(e) and np.array(e).dtype.kind in 'i': # Sequence of integers
- start[...,i] = np.apply_along_axis(lambda x: e*x, i, np.ones(sdim[:-1]))
- indices[...,i] = np.apply_along_axis(lambda x: np.arange(sdim[i])*x, i, np.ones(sdim[:-1], int))
-
- count[...,i] = 1
- stride[...,i] = 1
+ if start[...,i].size:
+ start[...,i] = np.apply_along_axis(lambda x: e*x, i, np.ones(sdim[:-1]))
+ indices[...,i] = np.apply_along_axis(lambda x: np.arange(sdim[i])*x, i, np.ones(sdim[:-1], int))
+ count[...,i] = 1
+ stride[...,i] = 1
# all that's left is SCALAR INTEGER #
else:
=====================================
test/test_cdl.py
=====================================
@@ -68,8 +68,18 @@ class Test_CDL(unittest.TestCase):
assert len(f1.dimensions["d"]) == len(f2.dimensions["d"])
assert (f1["ub"][:] == f2["ub"][:]).all()
assert (f1["sb"][:] == f2["sb"][:]).all()
+
+ # test if os.PathLike works
+ with netCDF4.Dataset.fromcdl(pathlib.Path("ubyte.cdl"), ncfilename=pathlib.Path("ubyte3.nc")) as f3:
+ assert f1.variables.keys() == f3.variables.keys()
+ # check if correct errors are raised
+ self.assertRaises(FileNotFoundError, netCDF4.Dataset.fromcdl, "doesnotexist.cdl")
+ self.assertRaises(FileExistsError, netCDF4.Dataset.fromcdl, "ubyte.cdl", ncfilename="ubyte2.nc")
+
+ # cleanup
os.remove("ubyte2.nc")
+ os.remove("ubyte3.nc")
def tearDown(self):
# Remove the temporary files
=====================================
test/test_no_iter_contains.py
=====================================
@@ -0,0 +1,34 @@
+import os
+import tempfile
+import unittest
+
+import netCDF4
+
+FILE_NAME = tempfile.NamedTemporaryFile(suffix='.nc', delete=False).name
+
+
+class TestNoIterNoContains(unittest.TestCase):
+ def setUp(self) -> None:
+ self.file = FILE_NAME
+ with netCDF4.Dataset(self.file, "w") as dataset:
+ # just create a simple variable
+ dataset.createVariable("var1", int)
+
+ def tearDown(self) -> None:
+ os.remove(self.file)
+
+ def test_no_iter(self) -> None:
+ """Verify that iteration is explicitly not supported"""
+ with netCDF4.Dataset(self.file, "r") as dataset:
+ with self.assertRaises(TypeError):
+ for _ in dataset: # type: ignore # type checker catches that this doesn't work
+ pass
+
+ def test_no_contains(self) -> None:
+ """Verify the membership operations are explicity not supported"""
+ with netCDF4.Dataset(self.file, "r") as dataset:
+ with self.assertRaises(TypeError):
+ _ = "var1" in dataset
+
+if __name__ == "__main__":
+ unittest.main(verbosity=2)
=====================================
test/test_open_mem.py
=====================================
@@ -17,12 +17,6 @@ class TestOpenMem(unittest.TestCase):
netCDF4.Dataset('foo_bar', memory=nc_bytes)
return
- # Needs: https://github.com/Unidata/netcdf-c/pull/400
- if netCDF4.__netcdf4libversion__ < '4.4.1.2':
- with self.assertRaises(OSError):
- netCDF4.Dataset('foo_bar', memory=nc_bytes)
- return
-
with netCDF4.Dataset('foo_bar', memory=nc_bytes) as nc:
assert nc.filepath() == 'foo_bar'
assert nc.project_summary == 'Dummy netCDF file'
=====================================
test/test_vlen.py
=====================================
@@ -76,6 +76,15 @@ class VariablesTestCase(unittest.TestCase):
assert_array_equal(data2[j,i], data[j,i])
assert datas[j,i] == data2s[j,i]
assert_array_equal(datas, vs_alt[:])
+ # issue #1408
+ data2a = data2[::2,::2]
+ data2b = v[::2,::2]
+ data2sa = data2s[::2,::2]
+ data2sb = vs[::2,::2]
+ for i in range(nlons//2):
+ for j in range(nlats//2):
+ assert_array_equal(data2a[j,i], data2b[j,i])
+ assert_array_equal(data2sa[j,i], data2sb[j,i])
f.close()
View it on GitLab: https://salsa.debian.org/debian-gis-team/netcdf4-python/-/compare/c9fb890b157de5058f31e0d76ba011bcce1629d1...0af5ef740fe441cea2724321f75bf0e69cc0c270
--
View it on GitLab: https://salsa.debian.org/debian-gis-team/netcdf4-python/-/compare/c9fb890b157de5058f31e0d76ba011bcce1629d1...0af5ef740fe441cea2724321f75bf0e69cc0c270
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20251014/15d9a6de/attachment-0001.htm>
More information about the Pkg-grass-devel
mailing list