[Git][debian-gis-team/netcdf4-python][master] 4 commits: New upstream version 1.7.4
Bas Couwenberg (@sebastic)
gitlab at salsa.debian.org
Mon Jan 5 04:46:03 GMT 2026
Bas Couwenberg pushed to branch master at Debian GIS Project / netcdf4-python
Commits:
0c53a681 by Bas Couwenberg at 2026-01-05T05:38:54+01:00
New upstream version 1.7.4
- - - - -
2b36974c by Bas Couwenberg at 2026-01-05T05:38:56+01:00
Update upstream source from tag 'upstream/1.7.4'
Update to upstream version '1.7.4'
with Debian dir 1c50cb9c32fd03c9140cde7c49cea57066dbf0e2
- - - - -
625d9e66 by Bas Couwenberg at 2026-01-05T05:39:35+01:00
New upstream release.
- - - - -
f11d3e88 by Bas Couwenberg at 2026-01-05T05:40:50+01:00
Set distribution to unstable.
- - - - -
25 changed files:
- .github/workflows/build_latest.yml
- .github/workflows/build_master.yml
- .github/workflows/build_old.yml
- .github/workflows/cibuildwheel.yml
- .github/workflows/miniconda.yml
- Changelog
- README.md
- debian/changelog
- docs/index.html
- include/netcdf-compat.h
- pyproject.toml
- setup.py
- src/netCDF4/__init__.py
- src/netCDF4/__init__.pyi
- src/netCDF4/_netCDF4.pyx
- test/run_all.py
- test/test_compression_blosc.py
- test/test_masked2.py
- test/test_masked3.py
- test/test_masked4.py
- test/test_masked5.py
- test/test_masked6.py
- test/test_scaled.py
- test/test_stringarr.py
- test/test_types.py
Changes:
=====================================
.github/workflows/build_latest.yml
=====================================
@@ -17,7 +17,7 @@ jobs:
python-version: ["3.14"]
steps:
- - uses: actions/checkout at v5
+ - uses: actions/checkout at v6
with:
submodules: true
=====================================
.github/workflows/build_master.yml
=====================================
@@ -14,7 +14,7 @@ jobs:
python-version: ["3.14"]
steps:
- - uses: actions/checkout at v5
+ - uses: actions/checkout at v6
with:
submodules: true
=====================================
.github/workflows/build_old.yml
=====================================
@@ -17,7 +17,7 @@ jobs:
python-version: ["3.14"]
steps:
- - uses: actions/checkout at v5
+ - uses: actions/checkout at v6
with:
submodules: true
=====================================
.github/workflows/cibuildwheel.yml
=====================================
@@ -18,7 +18,7 @@ jobs:
name: Build source distribution
runs-on: ubuntu-22.04
steps:
- - uses: actions/checkout at v5
+ - uses: actions/checkout at v6
with:
fetch-depth: 0
@@ -38,7 +38,7 @@ jobs:
pip install build
&& python -m build --sdist . --outdir dist
- - uses: actions/upload-artifact at v4
+ - uses: actions/upload-artifact at v6
with:
name: pypi-artifacts
path: ${{ github.workspace }}/dist/*.tar.gz
@@ -55,15 +55,11 @@ jobs:
include:
- os: ubuntu-22.04
arch: x86_64
- - os: ubuntu-22.04
+ - os: ubuntu-24.04-arm
arch: aarch64
- - os: macos-14
- arch: arm64
- - os: macos-13
- arch: x86_64
steps:
- - uses: actions/checkout at v5
+ - uses: actions/checkout at v6
with:
fetch-depth: 0
@@ -83,32 +79,38 @@ jobs:
# These needs to rotate every new Python release.
run: |
set -x
- echo "CIBW_BUILD=cp310-* cp311-* cp314-*" >> $GITHUB_ENV
+ echo "CIBW_BUILD=cp310-* cp311-* cp314-* cp314t-*" >> $GITHUB_ENV
set +x
if: ${{ github.event_name }} == "pull_request"
- name: "Building ${{ matrix.os }} (${{ matrix.arch }}) wheels"
- uses: pypa/cibuildwheel at v3.2.0
+ uses: pypa/cibuildwheel at v3.3.0
env:
CIBW_ARCHS: ${{ matrix.arch }}
- - uses: actions/upload-artifact at v4
+ - uses: actions/upload-artifact at v6
with:
name: pypi-artifacts-${{ matrix.os }}-${{ matrix.arch }}
path: ${{ github.workspace }}/wheelhouse/*.whl
- build_wheels_windows:
+ build_wheels_winmac:
name: Build wheels for ${{matrix.arch}} on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
+ fail-fast: false
matrix:
- os: [windows-latest]
- arch: [AMD64]
+ include:
+ - os: windows-latest
+ arch: AMD64
+ - os: macos-14
+ arch: arm64
+ - os: macos-15-intel
+ arch: x86_64
steps:
- - uses: actions/checkout at v5
+ - uses: actions/checkout at v6
with:
fetch-depth: 0
@@ -123,27 +125,65 @@ jobs:
environment-name: build
init-shell: bash
create-args: >-
- python=${{ matrix.python-version }} libnetcdf=4.9.2 --channel conda-forge
+ python=${{ matrix.python-version }} libnetcdf=4.9.3 --channel conda-forge
- - name: Build wheels for Windows (${{ matrix.arch }})
- uses: pypa/cibuildwheel at v3.2.0
+ - name: Build wheels for Windows/Mac
+ uses: pypa/cibuildwheel at v3.3.0
env:
CIBW_ARCHS: ${{ matrix.arch }}
- # cannot build cftime for this target (missing a wheel at the time of writing)
- CIBW_SKIP: "cp314*"
- - uses: actions/upload-artifact at v4
+ - uses: actions/upload-artifact at v6
with:
name: pypi-artifacts-${{ matrix.os }}-${{ matrix.arch }}
path: ${{ github.workspace }}/wheelhouse/*.whl
+ build_wheels_windows_arm:
+ name: Build wheels for ARM64 on Windows
+ runs-on: windows-11-arm
+ steps:
+ - uses: actions/checkout at v6
+ with:
+ fetch-depth: 0
+
+ - uses: actions/setup-python at v6
+ name: Install Python
+ with:
+ python-version: 3.x
+
+ - name: Install vcpkg dependencies
+ shell: pwsh
+ run: |
+ # Install vcpkg
+ git clone https://github.com/Microsoft/vcpkg.git C:\vcpkg
+ cd C:\vcpkg
+ .\bootstrap-vcpkg.bat
+
+ # Install netcdf and dependencies
+ .\vcpkg.exe install hdf5:arm64-windows netcdf-c:arm64-windows zlib:arm64-windows
+
+ # Set environment variables for build
+ echo "HDF5_DIR=C:\vcpkg\installed\arm64-windows" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
+ echo "NETCDF4_DIR=C:\vcpkg\installed\arm64-windows" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
+
+ - name: Build wheels for Windows ARM64
+ uses: pypa/cibuildwheel at v3.3.0
+ env:
+ CIBW_ARCHS: ARM64
+ CIBW_SKIP: "cp310-*"
+
+ - uses: actions/upload-artifact at v6
+ with:
+ name: pypi-artifacts-windows-11-arm-ARM64
+ path: ${{ github.workspace }}/wheelhouse/*.whl
+
+
show-artifacts:
- needs: [build_bdist, build_sdist, build_wheels_windows]
+ needs: [build_bdist, build_sdist, build_wheels_winmac, build_wheels_windows_arm]
name: "Show artifacts"
runs-on: ubuntu-22.04
steps:
- - uses: actions/download-artifact at v5
+ - uses: actions/download-artifact at v7
with:
pattern: pypi-artifacts*
path: ${{ github.workspace }}/dist
@@ -155,13 +195,13 @@ jobs:
publish-artifacts-pypi:
- needs: [build_bdist, build_sdist, build_wheels_windows]
+ needs: [build_bdist, build_sdist, build_wheels_winmac, build_wheels_windows_arm]
name: "Publish to PyPI"
runs-on: ubuntu-22.04
# upload to PyPI for every tag starting with 'v'
if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v')
steps:
- - uses: actions/download-artifact at v5
+ - uses: actions/download-artifact at v7
with:
pattern: pypi-artifacts*
path: ${{ github.workspace }}/dist
=====================================
.github/workflows/miniconda.yml
=====================================
@@ -24,7 +24,7 @@ jobs:
shell: bash -l {0}
steps:
- - uses: actions/checkout at v5
+ - uses: actions/checkout at v6
with:
submodules: true
@@ -45,6 +45,11 @@ jobs:
- name: Tests
run: |
+ if [ "$RUNNER_OS" == "Windows" ]; then
+ export HDF5_PLUGIN_PATH="${CONDA_PREFIX}\\Library\\hdf5\\lib\\plugin"
+ else
+ export HDF5_PLUGIN_PATH="${CONDA_PREFIX}/hdf5/lib/plugin/"
+ fi
pytest -s -rxs -v test
run-mpi:
@@ -58,7 +63,7 @@ jobs:
run:
shell: bash -l {0}
steps:
- - uses: actions/checkout at v5
+ - uses: actions/checkout at v6
with:
submodules: true
=====================================
Changelog
=====================================
@@ -1,3 +1,14 @@
+ version 1.7.4 (tag v1.7.4rel)
+ ================================
+ * Make sure automatic conversion of character arrays <--> string arrays works for Unicode strings (issue #1440).
+ (previously only worked correctly for encoding="ascii").
+ * Add netcdf plugins (blosc, zstd, bzip2) in wheels. Blosc plugin doesn't work in Windows wheels.
+ Macos wheels now use conda provided libs. (PR #1450)
+ * Add windows/arm (PR #1453) and free-threaded python wheels (issue #1454). Windows wheels now use netcdf-c 4.9.3.
+ WARNING: netcdf-c is not thread-safe and netcdf4-python does have internal locking so expect segfaults if you
+ use netcdf4-python on multiple threads with free-threaded python. Users must exercise care to only call netcdf from
+ a single thread.
+
version 1.7.3 (tag v1.7.3rel)
=============================
* Python 3.14 wheels (issue #1432)
=====================================
README.md
=====================================
@@ -1,7 +1,7 @@
# [netcdf4-python](http://unidata.github.io/netcdf4-python)
[Python](http://python.org)/[numpy](http://numpy.org) interface to the netCDF [C library](https://github.com/Unidata/netcdf-c).
-[](https://github.com/Unidata/netcdf4-python/actions)
+[](https://github.com/Unidata/netcdf4-python/actions/workflows/github-code-scanning/codeql)
[](http://python.org/pypi/netCDF4)
[](https://anaconda.org/conda-forge/netCDF4)
[](https://doi.org/10.5281/zenodo.2592290)
@@ -10,6 +10,12 @@
## News
For details on the latest updates, see the [Changelog](https://github.com/Unidata/netcdf4-python/blob/master/Changelog).
+1/5/2026: Version [1.7.4](https://pypi.python.org/pypi/netCDF4/1.7.4) released. Compression plugins now included in wheels, windows/arm64 and
+free-threaded python wheels provided. Automatic conversion of character arrays <--> string arrays works for Unicode (not just ascii) strings.
+WARNING: netcdf-c is not thread-safe and netcdf4-python does have internal locking so expect segfaults if you
+use netcdf4-python on multiple threads with free-threaded python. Users must exercise care to only call netcdf from
+a single thread.
+
10/13/2025: Version [1.7.3](https://pypi.python.org/pypi/netCDF4/1.7.3) released. Minor updates/bugfixes and python 3.14 wheels, see Changelog for details.
10/22/2024: Version [1.7.2](https://pypi.python.org/pypi/netCDF4/1.7.2) released. Minor updates/bugfixes and python 3.13 wheels, see Changelog for details.
=====================================
debian/changelog
=====================================
@@ -1,12 +1,13 @@
-netcdf4-python (1.7.3-2) UNRELEASED; urgency=medium
+netcdf4-python (1.7.4-1) unstable; urgency=medium
+ * New upstream release.
* Use test-build-validate-cleanup instead of test-build-twice.
* Use pytest in dh_auto_test instead of custom command.
* Make pytest output verbose.
* Drop Priority: optional, default since dpkg 1.22.13.
* Bump Standards-Version to 4.7.3, changes: priority.
- -- Bas Couwenberg <sebastic at debian.org> Sat, 25 Oct 2025 13:00:45 +0200
+ -- Bas Couwenberg <sebastic at debian.org> Mon, 05 Jan 2026 05:40:40 +0100
netcdf4-python (1.7.3-1) unstable; urgency=medium
=====================================
docs/index.html
=====================================
@@ -3,20 +3,31 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
-<meta name="generator" content="pdoc3 0.11.1">
+<meta name="generator" content="pdoc3 0.11.6">
<title>netCDF4 API documentation</title>
-<meta name="description" content="Version 1.7.2
+<meta name="description" content="Version 1.7.4
…">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/typography.min.css" integrity="sha512-Y1DYSb995BAfxobCkKepB1BqJJTPrOp3zPL74AWFugHHmmdcvO+C48WLrUOlhGMc0QG7AE3f7gmvvcrmX2fDoA==" crossorigin>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/default.min.css" crossorigin>
-<style>:root{--highlight-color:#fe9}.flex{display:flex !important}body{line-height:1.5em}#content{padding:20px}#sidebar{padding:1.5em;overflow:hidden}#sidebar > *:last-child{margin-bottom:2cm}.http-server-breadcrumbs{font-size:130%;margin:0 0 15px 0}#footer{font-size:.75em;padding:5px 30px;border-top:1px solid #ddd;text-align:right}#footer p{margin:0 0 0 1em;display:inline-block}#footer p:last-child{margin-right:30px}h1,h2,h3,h4,h5{font-weight:300}h1{font-size:2.5em;line-height:1.1em}h2{font-size:1.75em;margin:2em 0 .50em 0}h3{font-size:1.4em;margin:1.6em 0 .7em 0}h4{margin:0;font-size:105%}h1:target,h2:target,h3:target,h4:target,h5:target,h6:target{background:var(--highlight-color);padding:.2em 0}a{color:#058;text-decoration:none;transition:color .2s ease-in-out}a:visited{color:#503}a:hover{color:#b62}.title code{font-weight:bold}h2[id^="header-"]{margin-top:2em}.ident{color:#900;font-weight:bold}pre code{font-size:.8em;line-height:1.4em;padding:1em;display:block}code{background:#f3f3f3;font-family:"DejaVu Sans Mono",monospace;padding:1px 4px;overflow-wrap:break-word}h1 code{background:transparent}pre{border-top:1px solid #ccc;border-bottom:1px solid #ccc;margin:1em 0}#http-server-module-list{display:flex;flex-flow:column}#http-server-module-list div{display:flex}#http-server-module-list dt{min-width:10%}#http-server-module-list p{margin-top:0}.toc ul,#index{list-style-type:none;margin:0;padding:0}#index code{background:transparent}#index h3{border-bottom:1px solid #ddd}#index ul{padding:0}#index h4{margin-top:.6em;font-weight:bold}@media (min-width:200ex){#index .two-column{column-count:2}}@media (min-width:300ex){#index .two-column{column-count:3}}dl{margin-bottom:2em}dl dl:last-child{margin-bottom:4em}dd{margin:0 0 1em 3em}#header-classes + dl > dd{margin-bottom:3em}dd dd{margin-left:2em}dd p{margin:10px 0}.name{background:#eee;font-size:.85em;padding:5px 10px;display:inline-block;min-width:40%}.name:hover{background:#e0e0e0}dt:target .name{background:var(--highlight-color)}.name > span:first-child{white-space:nowrap}.name.class > span:nth-child(2){margin-left:.4em}.inherited{color:#999;border-left:5px solid #eee;padding-left:1em}.inheritance em{font-style:normal;font-weight:bold}.desc h2{font-weight:400;font-size:1.25em}.desc h3{font-size:1em}.desc dt code{background:inherit}.source summary,.git-link-div{color:#666;text-align:right;font-weight:400;font-size:.8em;text-transform:uppercase}.source summary > *{white-space:nowrap;cursor:pointer}.git-link{color:inherit;margin-left:1em}.source pre{max-height:500px;overflow:auto;margin:0}.source pre code{font-size:12px;overflow:visible}.hlist{list-style:none}.hlist li{display:inline}.hlist li:after{content:',\2002'}.hlist li:last-child:after{content:none}.hlist .hlist{display:inline;padding-left:1em}img{max-width:100%}td{padding:0 .5em}.admonition{padding:.1em 1em;margin-bottom:1em}.admonition-title{font-weight:bold}.admonition.note,.admonition.info,.admonition.important{background:#aef}.admonition.todo,.admonition.versionadded,.admonition.tip,.admonition.hint{background:#dfd}.admonition.warning,.admonition.versionchanged,.admonition.deprecated{background:#fd4}.admonition.error,.admonition.danger,.admonition.caution{background:lightpink}</style>
+<style>:root{--highlight-color:#fe9}.flex{display:flex !important}body{line-height:1.5em}#content{padding:20px}#sidebar{padding:1.5em;overflow:hidden}#sidebar > *:last-child{margin-bottom:2cm}.http-server-breadcrumbs{font-size:130%;margin:0 0 15px 0}#footer{font-size:.75em;padding:5px 30px;border-top:1px solid #ddd;text-align:right}#footer p{margin:0 0 0 1em;display:inline-block}#footer p:last-child{margin-right:30px}h1,h2,h3,h4,h5{font-weight:300}h1{font-size:2.5em;line-height:1.1em}h2{font-size:1.75em;margin:2em 0 .50em 0}h3{font-size:1.4em;margin:1.6em 0 .7em 0}h4{margin:0;font-size:105%}h1:target,h2:target,h3:target,h4:target,h5:target,h6:target{background:var(--highlight-color);padding:.2em 0}a{color:#058;text-decoration:none;transition:color .2s ease-in-out}a:visited{color:#503}a:hover{color:#b62}.title code{font-weight:bold}h2[id^="header-"]{margin-top:2em}.ident{color:#900;font-weight:bold}pre code{font-size:.8em;line-height:1.4em;padding:1em;display:block}code{background:#f3f3f3;font-family:"DejaVu Sans Mono",monospace;padding:1px 4px;overflow-wrap:break-word}h1 code{background:transparent}pre{border-top:1px solid #ccc;border-bottom:1px solid #ccc;margin:1em 0}#http-server-module-list{display:flex;flex-flow:column}#http-server-module-list div{display:flex}#http-server-module-list dt{min-width:10%}#http-server-module-list p{margin-top:0}.toc ul,#index{list-style-type:none;margin:0;padding:0}#index code{background:transparent}#index h3{border-bottom:1px solid #ddd}#index ul{padding:0}#index h4{margin-top:.6em;font-weight:bold}@media (min-width:200ex){#index .two-column{column-count:2}}@media (min-width:300ex){#index .two-column{column-count:3}}dl{margin-bottom:2em}dl dl:last-child{margin-bottom:4em}dd{margin:0 0 1em 3em}#header-classes + dl > dd{margin-bottom:3em}dd dd{margin-left:2em}dd p{margin:10px 0}.name{background:#eee;font-size:.85em;padding:5px 10px;display:inline-block;min-width:40%}.name:hover{background:#e0e0e0}dt:target .name{background:var(--highlight-color)}.name > span:first-child{white-space:nowrap}.name.class > span:nth-child(2){margin-left:.4em}.inherited{color:#999;border-left:5px solid #eee;padding-left:1em}.inheritance em{font-style:normal;font-weight:bold}.desc h2{font-weight:400;font-size:1.25em}.desc h3{font-size:1em}.desc dt code{background:inherit}.source > summary,.git-link-div{color:#666;text-align:right;font-weight:400;font-size:.8em;text-transform:uppercase}.source summary > *{white-space:nowrap;cursor:pointer}.git-link{color:inherit;margin-left:1em}.source pre{max-height:500px;overflow:auto;margin:0}.source pre code{font-size:12px;overflow:visible;min-width:max-content}.hlist{list-style:none}.hlist li{display:inline}.hlist li:after{content:',\2002'}.hlist li:last-child:after{content:none}.hlist .hlist{display:inline;padding-left:1em}img{max-width:100%}td{padding:0 .5em}.admonition{padding:.1em 1em;margin:1em 0}.admonition-title{font-weight:bold}.admonition.note,.admonition.info,.admonition.important{background:#aef}.admonition.todo,.admonition.versionadded,.admonition.tip,.admonition.hint{background:#dfd}.admonition.warning,.admonition.versionchanged,.admonition.deprecated{background:#fd4}.admonition.error,.admonition.danger,.admonition.caution{background:lightpink}</style>
<style media="screen and (min-width: 700px)">@media screen and (min-width:700px){#sidebar{width:30%;height:100vh;overflow:auto;position:sticky;top:0}#content{width:70%;max-width:100ch;padding:3em 4em;border-left:1px solid #ddd}pre code{font-size:1em}.name{font-size:1em}main{display:flex;flex-direction:row-reverse;justify-content:flex-end}.toc ul ul,#index ul ul{padding-left:1em}.toc > ul > li{margin-top:.5em}}</style>
<style media="print">@media print{#sidebar h1{page-break-before:always}.source{display:none}}@media print{*{background:transparent !important;color:#000 !important;box-shadow:none !important;text-shadow:none !important}a[href]:after{content:" (" attr(href) ")";font-size:90%}a[href][title]:after{content:none}abbr[title]:after{content:" (" attr(title) ")"}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100% !important}@page{margin:0.5cm}p,h2,h3{orphans:3;widows:3}h1,h2,h3,h4,h5,h6{page-break-after:avoid}}</style>
<script defer src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js" integrity="sha512-D9gUyxqja7hBtkWpPWGt9wfbfaMGVt9gnyCvYa+jojwwPHLCzUm5i8rpk7vD7wNee9bA35eYIjobYPaQuKS1MQ==" crossorigin></script>
<script>window.addEventListener('DOMContentLoaded', () => {
hljs.configure({languages: ['bash', 'css', 'diff', 'graphql', 'ini', 'javascript', 'json', 'plaintext', 'python', 'python-repl', 'rust', 'shell', 'sql', 'typescript', 'xml', 'yaml']});
hljs.highlightAll();
+/* Collapse source docstrings */
+setTimeout(() => {
+[...document.querySelectorAll('.hljs.language-python > .hljs-string')]
+.filter(el => el.innerHTML.length > 200 && ['"""', "'''"].includes(el.innerHTML.substring(0, 3)))
+.forEach(el => {
+let d = document.createElement('details');
+d.classList.add('hljs-string');
+d.innerHTML = '<summary>"""</summary>' + el.innerHTML.substring(3);
+el.replaceWith(d);
+});
+}, 100);
})</script>
</head>
<body>
@@ -26,7 +37,7 @@ hljs.highlightAll();
<h1 class="title">Package <code>netCDF4</code></h1>
</header>
<section id="section-intro">
-<h2 id="version-172">Version 1.7.2</h2>
+<h2 id="version-174">Version 1.7.4</h2>
<h1 id="introduction">Introduction</h1>
<p>netcdf4-python is a Python interface to the netCDF C library.</p>
<p><a href="http://www.unidata.ucar.edu/software/netcdf/">netCDF</a> version 4 has many features
@@ -1003,6 +1014,11 @@ to a variable in an existing file, you must use <code><a title="netCDF4.Variable
to write to it.</li>
<li>You cannot use variable-length (VLEN) data types.</li>
</ul>
+<p><strong><em>Import warning regarding threads:</em></strong>
+The underlying netcdf-c library is not thread-safe, so netcdf4-python cannot perform parallel
+IO in a multi-threaded environment.
+Users should expect segfaults if a netcdf file is opened on multiple threads - care should
+be taken to restrict netcdf4-python usage to a single thread, even when using free-threaded python.</p>
<h2 id="dealing-with-strings">Dealing with strings</h2>
<p>The most flexible way to store arrays of strings is with the
<a href="#variable-length-vlen-data-type">Variable-length (vlen) string data type</a>. However, this requires
@@ -1018,7 +1034,7 @@ If the <code>_Encoding</code> special attribute is set for a character array
(dtype <code>S1</code>) variable, the <code><a title="netCDF4.chartostring" href="#netCDF4.chartostring">chartostring()</a></code> utility function is used to convert the array of
characters to an array of strings with one less dimension (the last dimension is
interpreted as the length of each string) when reading the data. The character
-set (usually ascii) is specified by the <code>_Encoding</code> attribute. If <code>_Encoding</code>
+set is specified by the <code>_Encoding</code> attribute. If <code>_Encoding</code>
is 'none' or 'bytes', then the character array is converted to a numpy
fixed-width byte string array (dtype <code>S#</code>), otherwise a numpy unicode (dtype
<code>U#</code>) array is created.
@@ -1196,7 +1212,7 @@ formats.</li>
<p>Support for complex numbers is handled via the
<a href="https://github.com/PlasmaFAIR/nc-complex"><code>nc-complex</code></a> library. See there for
further details.</p>
-<p><strong>contact</strong>: Jeffrey Whitaker <a href="mailto:jeffrey.s.whitaker@noaa.gov">jeffrey.s.whitaker@noaa.gov</a></p>
+<p><strong>contact</strong>: Jeffrey Whitaker <a href="mailto:whitaker.jeffrey@gmail.com">whitaker.jeffrey@gmail.com</a></p>
<p><strong>copyright</strong>: 2008 by Jeffrey Whitaker.</p>
<p><strong>license</strong>: Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:</p>
<p>The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.</p>
@@ -1229,7 +1245,7 @@ returned.</p>
<span>def <span class="ident">date2index</span></span>(<span>dates, nctime, calendar=None, select='exact', has_year_zero=None)</span>
</code></dt>
<dd>
-<div class="desc"><p>date2index(dates, nctime, calendar=None, select=u'exact', has_year_zero=None)</p>
+<div class="desc"><p>date2index(dates, nctime, calendar=None, select='exact', has_year_zero=None)</p>
<p>Return indices of a netCDF time variable corresponding to the given dates.</p>
<p><strong>dates</strong>: A datetime object or a sequence of datetime objects.
The datetime objects should not include a time-zone offset.</p>
@@ -1347,10 +1363,10 @@ details. Values can be reset with <code><a title="netCDF4.set_chunk_cache" href=
used to build the module, and when it was built.</p></div>
</dd>
<dt id="netCDF4.num2date"><code class="name flex">
-<span>def <span class="ident">num2date</span></span>(<span>times, units, calendar='standard', only_use_cftime_datetimes=True, only_use_python_datetimes=False, has_year_zero=None)</span>
+<span>def <span class="ident">num2date</span></span>(<span>times,<br>units,<br>calendar='standard',<br>only_use_cftime_datetimes=True,<br>only_use_python_datetimes=False,<br>has_year_zero=None)</span>
</code></dt>
<dd>
-<div class="desc"><p>num2date(times, units, calendar=u'standard', only_use_cftime_datetimes=True, only_use_python_datetimes=False, has_year_zero=None)</p>
+<div class="desc"><p>num2date(times, units, calendar='standard', only_use_cftime_datetimes=True, only_use_python_datetimes=False, has_year_zero=None)</p>
<p>Return datetime objects given numeric time values. The units
of the numeric time values are described by the <strong>units</strong> argument
and the <strong>calendar</strong> keyword. The returned datetime objects represent
@@ -1460,10 +1476,10 @@ unicode array (dtype = <code>'U1'</code>) will be returned.</p>
(default) or <code>'U1'</code> (if dtype=<code>'U'</code>)</p></div>
</dd>
<dt id="netCDF4.stringtochar"><code class="name flex">
-<span>def <span class="ident">stringtochar</span></span>(<span>a, encoding='utf-8')</span>
+<span>def <span class="ident">stringtochar</span></span>(<span>a, encoding='utf-8', n_strlen=None)</span>
</code></dt>
<dd>
-<div class="desc"><p><strong><code>stringtochar(a,encoding='utf-8')</code></strong></p>
+<div class="desc"><p><strong><code>stringtochar(a,encoding='utf-8',n_strlen=None)</code></strong></p>
<p>convert a string array to a character array with one extra dimension</p>
<p><strong><code>a</code></strong>:
Input numpy string array with numpy datatype <code>'SN'</code> or <code>'UN'</code>, where N
@@ -1473,6 +1489,10 @@ an array of characters (datatype <code>'S1'</code> or <code>'U1'</code>) of shap
<p>optional kwarg <code>encoding</code> can be used to specify character encoding (default
<code>utf-8</code>). If <code>encoding</code> is 'none' or 'bytes', a <code>numpy.string_</code> the input array
is treated a raw byte strings (<code>numpy.string_</code>).</p>
+<p>optional kwarg <code>n_strlen</code> is the number of characters in each string.
+Default
+is None, which means <code>n_strlen</code> will be set to a.itemsize (the number of bytes
+used to represent each string in the input array).</p>
<p>returns a numpy character array with datatype <code>'S1'</code> or <code>'U1'</code>
and shape <code>a.shape + (N,)</code>, where N is the length of each string in a.</p></div>
</dd>
@@ -1834,7 +1854,7 @@ dtype object <code>datatype</code>.</p>
datatype.</p></div>
</dd>
<dt id="netCDF4.Dataset.createVariable"><code class="name flex">
-<span>def <span class="ident">createVariable</span></span>(<span>self, varname, datatype, dimensions=(), compression=None, zlib=False, complevel=4, shuffle=True, szip_coding='nn', szip_pixels_per_block=8, blosc_shuffle=1, fletcher32=False, contiguous=False, chunksizes=None, endian='native', least_significant_digit=None, significant_digits=None, quantize_mode='BitGroom', fill_value=None, chunk_cache=None)</span>
+<span>def <span class="ident">createVariable</span></span>(<span>self,<br>varname,<br>datatype,<br>dimensions=(),<br>compression=None,<br>zlib=False,<br>complevel=4,<br>shuffle=True,<br>szip_coding='nn',<br>szip_pixels_per_block=8,<br>blosc_shuffle=1,<br>fletcher32=False,<br>contiguous=False,<br>chunksizes=None,<br>endian='native',<br>least_significant_digit=None,<br>significant_digits=None,<br>quantize_mode='BitGroom',<br>fill_value=None,<br>chunk_cache=None)</span>
</code></dt>
<dd>
<div class="desc"><p><strong><code>createVariable(self, varname, datatype, dimensions=(), compression=None, zlib=False,
@@ -3144,7 +3164,7 @@ slower than multiple calls to the unstrided read routine <code>nc_get_vara</code
<nav id="sidebar">
<div class="toc">
<ul>
-<li><a href="#version-172">Version 1.7.2</a></li>
+<li><a href="#version-174">Version 1.7.4</a></li>
<li><a href="#introduction">Introduction</a><ul>
<li><a href="#quick-install">Quick Install</a></li>
<li><a href="#developer-install">Developer Install</a></li>
@@ -3343,7 +3363,7 @@ slower than multiple calls to the unstrided read routine <code>nc_get_vara</code
</nav>
</main>
<footer id="footer">
-<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.1</a>.</p>
+<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
</footer>
</body>
</html>
=====================================
include/netcdf-compat.h
=====================================
@@ -60,7 +60,7 @@ static inline int nc_get_alignment(int* thresholdp, int* alignmentp) {
#else
#define HAS_NCRCSET 0
static inline int nc_rc_set(const char* key, const char* value) { return NC_EINVAL; }
-static inline const char *nc_rc_get(const char* key) { return NC_EINVAL; }
+static inline const char *nc_rc_get(const char* key) { return NULL; }
#endif
#if NC_VERSION_GE(4, 4, 0)
=====================================
pyproject.toml
=====================================
@@ -36,7 +36,8 @@ classifiers = [
dependencies = [
"cftime",
"certifi",
- "numpy",
+ "numpy>=2.3.0; platform_system == 'Windows' and platform_machine == 'ARM64'",
+ "numpy>=1.21.2; platform_system != 'Windows' or platform_machine != 'ARM64'",
]
dynamic = ["version"]
@@ -75,7 +76,7 @@ Repository = "https://github.com/Unidata/netcdf4-python"
where = ["src"]
[tool.setuptools.package-data]
-"netCDF4.plugins" = ["lib__nc*"]
+"netCDF4.plugins" = ["*__nc*"]
[tool.setuptools_scm]
@@ -84,6 +85,7 @@ pythonpath = ["test"]
filterwarnings = [
"error",
"ignore::UserWarning",
+ "ignore::RuntimeWarning",
]
[tool.mypy]
@@ -109,7 +111,6 @@ build-verbosity = 1
build-frontend = "build"
skip = [
"*-musllinux*",
- "cp314t-*",
]
test-extras = "tests"
test-sources = [
@@ -125,26 +126,43 @@ manylinux-aarch64-image = "ghcr.io/ocefpaf/manylinux_2_28_aarch64-netcdf"
environment = {NETCDF4_LIMITED_API="1"}
[tool.cibuildwheel.macos]
-before-build = "brew install hdf5 netcdf"
+# https://cibuildwheel.pypa.io/en/stable/faq/#macos-passing-dyld_library_path-to-delocate
+repair-wheel-command = """\
+DYLD_FALLBACK_LIBRARY_PATH=/Users/runner/micromamba/envs/build/lib \
+delocate-wheel --require-archs {delocate_archs} -w {dest_dir} -v {wheel} \
+"""
+
+[tool.cibuildwheel.windows]
+before-build = "python -m pip install delvewheel"
+repair-wheel-command = [
+ "delvewheel show --include blosc.dll;zstd.dll;lz4.dll {wheel}",
+ "delvewheel repair --include blosc.dll;zstd.dll;lz4.dll -w {dest_dir} {wheel}",
+]
+
+[[tool.cibuildwheel.overrides]]
+select = "*linux*"
+environment = {NETCDF_PLUGIN_DIR="/usr/local/hdf5/lib/plugin/"}
[[tool.cibuildwheel.overrides]]
select = "*-macosx_x86_64"
inherit.environment = "append"
-environment = {MACOSX_DEPLOYMENT_TARGET="13.0"}
+environment = {MACOSX_DEPLOYMENT_TARGET="13.0",HDF5_DIR="/Users/runner/micromamba/envs/build",netCDF4_DIR="/Users/runner/micromamba/envs/build",PATH="${PATH}:/Users/runner/micromamba/envs/build/bin",NETCDF_PLUGIN_DIR="/Users/runner/micromamba/envs/build/hdf5/lib/plugin"}
[[tool.cibuildwheel.overrides]]
select = "*-macosx_arm64"
inherit.environment = "append"
-environment = {MACOSX_DEPLOYMENT_TARGET="14.0"}
+environment = {MACOSX_DEPLOYMENT_TARGET="14.0",HDF5_DIR="/Users/runner/micromambe/envs/build",netCDF4_DIR="/Users/runner/micromambe/envs/build",PATH="${PATH}:/Users/runner/micromamba/envs/build/bin",NETCDF_PLUGIN_DIR="/Users/runner/micromamba/envs/build/hdf5/lib/plugin"}
-[tool.cibuildwheel.windows]
-before-build = "python -m pip install delvewheel"
+[[tool.cibuildwheel.overrides]]
+select = "*-win_*"
+inherit.environment = "append"
+environment = {HDF5_DIR='C:\\\\Users\\runneradmin\\micromamba\\envs\\build\\Library',netCDF4_DIR='C:\\\\Users\\runneradmin\\micromamba\\envs\\build\\Library',PATH='C:\\\\Users\\runneradmin\\micromamba\\envs\\build\\Library\\bin;${PATH}',NETCDF_PLUGIN_DIR='C:\\\\Users\\runneradmin\\micromamba\\envs\\build\\Library\\hdf5\\lib\\plugin'}
+
+[[tool.cibuildwheel.overrides]]
+select = "*-win_arm64"
+inherit.environment = "append"
+environment = { HDF5_DIR = 'C:\\\\vcpkg\\\\installed\\\\arm64-windows', netCDF4_DIR = 'C:\\\\vcpkg\\\\installed\\\\arm64-windows', PATH = 'C:\\\\vcpkg\\\\installed\\\\arm64-windows\\\\bin;${PATH}', NO_CDL = '1' }
repair-wheel-command = [
"delvewheel show {wheel}",
"delvewheel repair -w {dest_dir} {wheel}",
]
-
-[[tool.cibuildwheel.overrides]]
-select = "*-win_*"
-inherit.environment = "append"
-environment = {HDF5_DIR='C:\\\\Users\\runneradmin\\micromamba\\envs\\build\\Library', netCDF4_DIR='C:\\\\Users\\runneradmin\\micromamba\\envs\\build\\Library', PATH='C:\\\\Users\\runneradmin\\micromamba\\envs\\build\\Library\\bin;${PATH}' }
=====================================
setup.py
=====================================
@@ -474,9 +474,11 @@ else:
copied_plugins=False
if os.environ.get("NETCDF_PLUGIN_DIR"):
plugin_dir = os.environ.get("NETCDF_PLUGIN_DIR")
- plugins = glob.glob(os.path.join(plugin_dir, "lib__nc*"))
+ plugins = glob.glob(os.path.join(plugin_dir, "*__nc*"))
if not plugins:
- print('no plugin files in NETCDF_PLUGIN_DIR, not installing...')
+ print('no plugin files in %s, not installing...' % plugin_dir)
+ if not os.path.exists(plugin_dir):
+ print('directory %s does not exist!' % plugin_dir)
data_files = []
else:
data_files = plugins
=====================================
src/netCDF4/__init__.py
=====================================
@@ -1,4 +1,12 @@
# init for netCDF4. package
+# if HDF5_PLUGIN_PATH not set, point to package path if plugins live there
+import os
+pluginpath = os.path.join(__path__[0],'plugins')
+if 'HDF5_PLUGIN_PATH' not in os.environ and\
+ (os.path.exists(os.path.join(pluginpath,'lib__nczhdf5filters.so')) or\
+ os.path.exists(os.path.join(pluginpath,'__nczhdf5filters.dll')) or\
+ os.path.exists(os.path.join(pluginpath,'lib__nczhdf5filters.dylib'))):
+ os.environ['HDF5_PLUGIN_PATH']=pluginpath
# Docstring comes from extension module _netCDF4.
from ._netCDF4 import *
# Need explicit imports for names beginning with underscores
@@ -11,7 +19,6 @@ from ._netCDF4 import (__version__, __netcdf4libversion__, __hdf5libversion__,
__has_quantization_support__, __has_zstandard_support__,
__has_bzip2_support__, __has_blosc_support__, __has_szip_support__,
__has_set_alignment__, __has_parallel_support__, __has_ncfilter__, __has_nc_rc_set__)
-import os
__all__ = [
'Dataset', 'Variable', 'Dimension', 'Group', 'MFDataset', 'MFTime', 'CompoundType',
'VLType', 'date2num', 'num2date', 'date2index', 'stringtochar', 'chartostring',
@@ -19,9 +26,3 @@ __all__ = [
'set_alignment', 'get_alignment', 'rc_get', 'rc_set',
]
__pdoc__ = {'utils': False}
-# if HDF5_PLUGIN_PATH not set, point to package path if plugins live there
-pluginpath = os.path.join(__path__[0],'plugins')
-if 'HDF5_PLUGIN_PATH' not in os.environ and\
- (os.path.exists(os.path.join(pluginpath,'lib__nczhdf5filters.so')) or\
- os.path.exists(os.path.join(pluginpath,'lib__nczhdf5filters.dylib'))):
- os.environ['HDF5_PLUGIN_PATH']=pluginpath
=====================================
src/netCDF4/__init__.pyi
=====================================
@@ -699,6 +699,7 @@ def stringtoarr(
def stringtochar(
a: npt.NDArray[np.character],
encoding: Literal["none", "None", "bytes"],
+ n_strlen: int | None = None,
) -> npt.NDArray[np.bytes_]: ...
@overload
def stringtochar(
=====================================
src/netCDF4/_netCDF4.pyx
=====================================
@@ -1,4 +1,4 @@
-"""Version 1.7.3
+"""Version 1.7.4
-------------
# Introduction
@@ -1050,6 +1050,10 @@ are collective. There are a couple of important limitations of parallel IO:
to write to it.
- You cannot use variable-length (VLEN) data types.
+***Import warning regarding threads:*** The underlying netcdf-c library is not thread-safe, so netcdf4-python cannot perform parallel
+IO in a multi-threaded environment. Users should expect segfaults if a netcdf file is opened on multiple threads - care should
+be taken to restrict netcdf4-python usage to a single thread, even when using free-threaded python.
+
## Dealing with strings
The most flexible way to store arrays of strings is with the
@@ -1066,7 +1070,7 @@ If the `_Encoding` special attribute is set for a character array
(dtype `S1`) variable, the `chartostring` utility function is used to convert the array of
characters to an array of strings with one less dimension (the last dimension is
interpreted as the length of each string) when reading the data. The character
-set (usually ascii) is specified by the `_Encoding` attribute. If `_Encoding`
+set is specified by the `_Encoding` attribute. If `_Encoding`
is 'none' or 'bytes', then the character array is converted to a numpy
fixed-width byte string array (dtype `S#`), otherwise a numpy unicode (dtype
`U#`) array is created. When writing the data,
@@ -1279,7 +1283,7 @@ import sys
import functools
from typing import Union
-__version__ = "1.7.3"
+__version__ = "1.7.4"
# Initialize numpy
import posixpath
@@ -3558,8 +3562,11 @@ to be installed and in `$PATH`.
"""**`has_blosc_filter(self)`**
returns True if blosc compression filter is available
"""
- if __has_blosc_support__:
- return False
+
+ #if __has_blosc_support__:
+ # return True
+ #else:
+ # return False
cdef int ierr
with nogil:
@@ -3571,8 +3578,10 @@ to be installed and in `$PATH`.
returns True if zstd compression filter is available
"""
- if __has_zstandard_support__:
- return False
+ #if __has_zstandard_support__:
+ # return True
+ #else:
+ # return False
cdef int ierr
with nogil:
@@ -3584,8 +3593,10 @@ to be installed and in `$PATH`.
returns True if bzip2 compression filter is available
"""
- if __has_bzip2_support__:
- return False
+ #if __has_bzip2_support__:
+ # return True
+ #else:
+ # return False
cdef int ierr
with nogil:
@@ -3597,11 +3608,13 @@ to be installed and in `$PATH`.
returns True if szip compression filter is available
"""
- if not __has_ncfilter__:
- return __has_szip_support__
+ #if not __has_ncfilter__:
+ # return __has_szip_support__
- if not __has_szip_support__:
- return False
+ #if __has_szip_support__:
+ # return True
+ #else:
+ # return False
cdef int ierr
with nogil:
@@ -5525,11 +5538,15 @@ cannot be safely cast to variable data type""" % attname
# if data is a string or a bytes object, convert to a numpy string array
# whose length is equal to the rightmost dimension of the
# variable.
- if type(data) in [str,bytes]: data = numpy.asarray(data,dtype='S'+repr(self.shape[-1]))
+ if type(data) in [str,bytes]:
+ if encoding == 'ascii':
+ data = numpy.asarray(data,dtype='S'+repr(self.shape[-1]))
+ else:
+ data = numpy.asarray(data,dtype='U'+repr(self.shape[-1]))
if data.dtype.kind in ['S','U'] and data.dtype.itemsize > 1:
# if data is a numpy string array, convert it to an array
# of characters with one more dimension.
- data = stringtochar(data, encoding=encoding)
+ data = stringtochar(data, encoding=encoding,n_strlen=self.shape[-1])
# if structured data has strings (and _Encoding att set), create view as char arrays
# (issue #773)
@@ -6771,9 +6788,9 @@ returns a rank 1 numpy character array of length NUMCHARS with datatype `'S1'`
arr[0:len(string)] = tuple(string)
return arr
-def stringtochar(a,encoding='utf-8'):
+def stringtochar(a,encoding='utf-8',n_strlen=None):
"""
-**`stringtochar(a,encoding='utf-8')`**
+**`stringtochar(a,encoding='utf-8',n_strlen=None)`**
convert a string array to a character array with one extra dimension
@@ -6785,16 +6802,29 @@ optional kwarg `encoding` can be used to specify character encoding (default
`utf-8`). If `encoding` is 'none' or 'bytes', a `numpy.string_` the input array
is treated a raw byte strings (`numpy.string_`).
+optional kwarg `n_strlen` is the number of characters in each string. Default
+is None, which means `n_strlen` will be set to a.itemsize (the number of bytes
+used to represent each string in the input array).
+
returns a numpy character array with datatype `'S1'` or `'U1'`
and shape `a.shape + (N,)`, where N is the length of each string in a."""
dtype = a.dtype.kind
+ if n_strlen is None:
+ n_strlen = a.dtype.itemsize
if dtype not in ["S","U"]:
raise ValueError("type must string or unicode ('S' or 'U')")
if encoding in ['none','None','bytes']:
b = numpy.array(tuple(a.tobytes()),'S1')
- else:
+ elif encoding == 'ascii':
b = numpy.array(tuple(a.tobytes().decode(encoding)),dtype+'1')
- b.shape = a.shape + (a.itemsize,)
+ b.shape = a.shape + (n_strlen,)
+ else:
+ if not a.ndim:
+ a = numpy.array([a])
+ bbytes = [text.encode(encoding) for text in a]
+ pad = b'\0' * n_strlen
+ bbytes = [(x + pad)[:n_strlen] for x in bbytes]
+ b = numpy.array([[bb[i:i+1] for i in range(n_strlen)] for bb in bbytes])
return b
def chartostring(b,encoding='utf-8'):
@@ -6816,15 +6846,12 @@ returns a numpy string array with datatype `'UN'` (or `'SN'`) and shape
dtype = b.dtype.kind
if dtype not in ["S","U"]:
raise ValueError("type must be string or unicode ('S' or 'U')")
- if encoding in ['none','None','bytes']:
- bs = b.tobytes()
- else:
- bs = b.tobytes().decode(encoding)
+ bs = b.tobytes()
slen = int(b.shape[-1])
if encoding in ['none','None','bytes']:
a = numpy.array([bs[n1:n1+slen] for n1 in range(0,len(bs),slen)],'S'+repr(slen))
else:
- a = numpy.array([bs[n1:n1+slen] for n1 in range(0,len(bs),slen)],'U'+repr(slen))
+ a = numpy.array([bs[n1:n1+slen].decode(encoding) for n1 in range(0,len(bs),slen)],'U'+repr(slen))
a.shape = b.shape[:-1]
return a
=====================================
test/run_all.py
=====================================
@@ -15,7 +15,6 @@ for f in test_files:
m = __import__(os.path.splitext(f)[0])
testsuite.addTests(unittest.TestLoader().loadTestsFromModule(m))
-
if __name__ == '__main__':
import numpy, cython
sys.stdout.write('\n')
=====================================
test/test_compression_blosc.py
=====================================
@@ -2,7 +2,7 @@ from typing import TYPE_CHECKING, Any, Literal
from numpy.random.mtrand import uniform
from netCDF4 import Dataset
from numpy.testing import assert_almost_equal
-import os, tempfile, unittest, sys
+import os, tempfile, unittest, sys, pytest
from filter_availability import no_plugins, has_blosc_filter
if TYPE_CHECKING:
from netCDF4 import CompressionLevel
@@ -40,6 +40,8 @@ def write_netcdf(filename, dtype='f8', blosc_shuffle: Literal[0, 1, 2] = 1, comp
@unittest.skipIf(no_plugins or not has_blosc_filter, "blosc filter not available")
+# allow failures for this test for now (it fails in Windows wheel workflow)
+ at pytest.mark.xfail
class CompressionTestCase(unittest.TestCase):
def setUp(self):
self.filename = filename
=====================================
test/test_masked2.py
=====================================
@@ -64,8 +64,7 @@ class PrimitiveTypesTestCase(unittest.TestCase):
v = f.createVariable('v',np.float32,'x',zlib=True,least_significant_digit=1)
# assign masked array to that variable with one missing value.
data =\
- ma.array([1.5678,99.99,3.75145,4.127654],mask=np.array([False,True,False,False],np.bool_))
- data.mask[1]=True
+ ma.MaskedArray([1.5678,99.99,3.75145,4.127654],mask=np.array([False,True,False,False],np.bool_))
v[:] = data
f.close()
=====================================
test/test_masked3.py
=====================================
@@ -19,7 +19,7 @@ class SetAutoMaskTestBase(unittest.TestCase):
self.fillval = default_fillvals["i2"]
self.v = np.array([self.fillval, 5, 4, -9999], dtype = "i2")
- self.v_ma = ma.array([self.fillval, 5, 4, -9999], dtype = "i2", mask = [True, False, False, True])
+ self.v_ma = ma.MaskedArray([self.fillval, 5, 4, -9999], dtype = "i2", mask = [True, False, False, True])
self.scale_factor = 10.
self.add_offset = 5.
=====================================
test/test_masked4.py
=====================================
@@ -20,7 +20,7 @@ class SetValidMinMax(unittest.TestCase):
self.valid_max = 32765
self.valid_range = [self.valid_min,self.valid_max]
self.v = np.array([self.valid_min-1, 5, 4, self.valid_max+1], dtype = "i2")
- self.v_ma = ma.array([self.valid_min-1, 5, 4, self.valid_max+1], dtype = "i2", mask = [True, False, False, True])
+ self.v_ma = ma.MaskedArray([self.valid_min-1, 5, 4, self.valid_max+1], dtype = "i2", mask = [True, False, False, True])
self.scale_factor = 10.
self.add_offset = 5.
=====================================
test/test_masked5.py
=====================================
@@ -17,7 +17,7 @@ class VectorMissingValues(unittest.TestCase):
self.missing_values = [-999,999,0]
self.v = np.array([-999,0,1,2,3,999], dtype = "i2")
- self.v_ma = ma.array([-1,0,1,2,3,4], dtype = "i2", \
+ self.v_ma = ma.MaskedArray([-1,0,1,2,3,4], dtype = "i2", \
mask = [True, True, False, False, False, True])
f = Dataset(self.testfile, 'w')
=====================================
test/test_masked6.py
=====================================
@@ -18,7 +18,7 @@ class SetAlwaysMaskTestBase(unittest.TestCase):
self.testfile = tempfile.NamedTemporaryFile(suffix='.nc', delete=False).name
self.v = np.array([4, 3, 2, 1], dtype="i2")
- self.w = np.ma.array([-1, -2, -3, -4], mask=[False, True, False, False], dtype="i2")
+ self.w = np.ma.MaskedArray([-1, -2, -3, -4], mask=[False, True, False, False], dtype="i2")
f = Dataset(self.testfile, 'w')
_ = f.createDimension('x', None)
=====================================
test/test_scaled.py
=====================================
@@ -22,7 +22,7 @@ class SetAutoScaleTestBase(unittest.TestCase):
self.missing_value = -9999
self.v = np.array([0, 5, 4, self.missing_value], dtype = "i2")
- self.v_ma = ma.array([0, 5, 4, self.missing_value], dtype = "i2",
+ self.v_ma = ma.MaskedArray([0, 5, 4, self.missing_value], dtype = "i2",
mask = [True, False, False, True], fill_value = self.fillval)
self.scale_factor = 10.
=====================================
test/test_stringarr.py
=====================================
@@ -3,6 +3,7 @@ import random, numpy, string
import unittest
import os
from numpy.testing import assert_array_equal, assert_array_almost_equal
+import numpy as np
def generateString(length, alphabet=string.ascii_letters + string.digits + string.punctuation):
return(''.join([random.choice(alphabet) for i in range(length)]))
@@ -20,6 +21,11 @@ for nrec in range(nrecs):
datau = data.astype('U')
datac = stringtochar(data, encoding='ascii')
+nx, n_strlen = 3, 12
+unicode_strings = np.array(['Münster', 'Liége', '東京'],dtype='U'+str(n_strlen))
+unicode_strings2 = np.array(['Münster', 'Москва', '東京'],dtype='U'+str(n_strlen))
+unicode_strings2_bytes = [b'M', b'\xc3', b'\xbc', b'n', b's', b't', b'e', b'r', b'\xd0', b'\x9c', b'\xd0', b'\xbe', b'\xd1', b'\x81', b'\xd0', b'\xba', b'\xd0', b'\xb2', b'\xd0', b'\xb0', b'\xe6', b'\x9d', b'\xb1', b'\xe4', b'\xba', b'\xac']
+
class StringArrayTestCase(unittest.TestCase):
def setUp(self):
@@ -28,6 +34,8 @@ class StringArrayTestCase(unittest.TestCase):
nc.createDimension('n1',None)
nc.createDimension('n2',n2)
nc.createDimension('nchar',nchar)
+ nc.createDimension("x", nx)
+ nc.createDimension("nstr", n_strlen)
v = nc.createVariable('strings','S1',('n1','n2','nchar'))
v2 = nc.createVariable('strings2','S1',('n1','n2','nchar'))
# if _Encoding set, string array should automatically be converted
@@ -44,6 +52,11 @@ class StringArrayTestCase(unittest.TestCase):
v2[-1,-1] = data[-1,-1].tobytes() # write single python string
# _Encoding should be ignored if an array of characters is specified
v3[:] = stringtochar(data, encoding='ascii')
+ # test unicode strings (issue #1440)
+ v4 = nc.createVariable("strings4", "S1", dimensions=("x", "nstr",))
+ v4._Encoding = "UTF-8"
+ v4[:] = unicode_strings
+ v4[1] = "Москва"
nc.close()
def tearDown(self):
@@ -57,6 +70,10 @@ class StringArrayTestCase(unittest.TestCase):
v = nc.variables['strings']
v2 = nc.variables['strings2']
v3 = nc.variables['strings3']
+ v4 = nc.variables['strings4']
+ assert np.all(v4[:]==unicode_strings2)
+ v4.set_auto_chartostring(False)
+ assert (v4[:].compressed().tolist() == unicode_strings2_bytes)
assert v.dtype.str[1:] in ['S1','U1']
assert v.shape == (nrecs,n2,nchar)
for nrec in range(nrecs):
=====================================
test/test_types.py
=====================================
@@ -22,7 +22,7 @@ ranarr = 100.*uniform(size=(n1dim,n2dim))
zlib=False; complevel=0; shuffle=False; least_significant_digit=None
datatypes = ['f8','f4','i1','i2','i4','i8','u1','u2','u4','u8','S1']
FillValue = 1.0
-issue273_data = np.ma.array(['z']*10,dtype='S1',\
+issue273_data = np.ma.MaskedArray(['z']*10,dtype='S1',\
mask=[False,False,False,False,False,True,False,False,False,False])
class PrimitiveTypesTestCase(unittest.TestCase):
View it on GitLab: https://salsa.debian.org/debian-gis-team/netcdf4-python/-/compare/fcd8e5c9f2903eaabc28f4bd2b4a802a6598f090...f11d3e88c4e0bc0d85094cfbbe522f5575f7dd72
--
View it on GitLab: https://salsa.debian.org/debian-gis-team/netcdf4-python/-/compare/fcd8e5c9f2903eaabc28f4bd2b4a802a6598f090...f11d3e88c4e0bc0d85094cfbbe522f5575f7dd72
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20260105/422f1589/attachment-0001.htm>
More information about the Pkg-grass-devel
mailing list