[Git][debian-gis-team/python-hdf4][upstream] New upstream version 0.11.6
Antonio Valentino (@antonio.valentino)
gitlab at salsa.debian.org
Sun Jan 5 10:39:11 GMT 2025
Antonio Valentino pushed to branch upstream at Debian GIS Project / python-hdf4
Commits:
fb0e450c by Antonio Valentino at 2025-01-05T10:24:01+00:00
New upstream version 0.11.6
- - - - -
11 changed files:
- .github/workflows/package_and_publish.yml
- .github/workflows/tests.yml
- AUTHORS
- LICENSE
- README.md
- doc/install.rst
- pyhdf/SD.py
- pyhdf/VS.py
- pyhdf/hdfext.i
- pyhdf/hdfext_wrap.c
- pyproject.toml
Changes:
=====================================
.github/workflows/package_and_publish.yml
=====================================
@@ -20,7 +20,7 @@ jobs:
- uses: actions/setup-python at v4
name: Install Python
with:
- python-version: '3.10'
+ python-version: '3.13'
- name: Setup conda (windows-latest)
if: matrix.os == 'windows-latest'
@@ -32,9 +32,20 @@ jobs:
echo "LIBRARY_DIRS=C:\Miniconda\Library\lib;C:\Miniconda\Library\bin" >> $env:GITHUB_ENV
echo "INCLUDE_DIRS=C:\Miniconda\Library\include" >> $env:GITHUB_ENV
+ - name: Setup libjpeg paths (macos-latest)
+ if: matrix.os == 'macos-latest'
+ run: |
+ echo 'LIBRARY_DIRS=/opt/homebrew/opt/jpeg/lib' >> $GITHUB_ENV
+ echo 'INCLUDE_DIRS=/opt/homebrew/opt/jpeg/include' >> $GITHUB_ENV
+
+ # See https://github.com/pypa/cibuildwheel/issues/563#issuecomment-2257729524
+ - name: Set macOS deployment target
+ if: matrix.os == 'macos-latest'
+ run: echo "MACOSX_DEPLOYMENT_TARGET=$(sw_vers -productVersion | cut -d '.' -f 1-2)" >> $GITHUB_ENV
+
- name: Install cibuildwheel
run: |
- python -m pip install cibuildwheel==2.13.1
+ python -m pip install cibuildwheel==2.22.0
- name: Build wheels
run: |
python -m cibuildwheel --output-dir dist
@@ -46,12 +57,16 @@ jobs:
CIBW_ARCHS_WINDOWS: 'AMD64' # restrict to 64bit builds
# (mac-os) Install hdf4 from sources
CIBW_BEFORE_ALL_MACOS: >
- brew install ninja &&
+ brew install ninja jpeg &&
+ export PATH="/opt/homebrew/opt/jpeg/bin:$PATH" &&
+ export LDFLAGS="-L/opt/homebrew/opt/jpeg/lib" &&
+ export CPPFLAGS="-I/opt/homebrew/opt/jpeg/include" &&
+ export PKG_CONFIG_PATH="/opt/homebrew/opt/jpeg/lib/pkgconfig" &&
cd /tmp &&
- git clone --depth 1 --branch hdf-4_2_16 https://github.com/HDFGroup/hdf4.git &&
+ git clone --depth 1 --branch hdf4.3.0 https://github.com/HDFGroup/hdf4.git &&
mkdir build && cd build &&
../hdf4/configure --enable-hdf4-xdr --enable-shared --disable-static --disable-fortran --disable-netcdf --enable-production --with-zlib --prefix=/usr/local &&
- make install
+ sudo make install
CIBW_BEFORE_ALL_WINDOWS: >
conda config --set always_yes yes --set changeps1 no --set auto_update_conda no --set safety_checks disabled &&
conda install -q hdf4
@@ -59,9 +74,9 @@ jobs:
run: |
mkdir wheelhouse
cp dist/*.whl wheelhouse
- - uses: actions/upload-artifact at v2
+ - uses: actions/upload-artifact at v4
with:
- name: wheelhouse
+ name: wheelhouse-${{ matrix.os }}
path: wheelhouse
publish:
@@ -82,9 +97,9 @@ jobs:
python -m pip install build
python -m build --sdist -o wheelhouse
- - uses: actions/upload-artifact at v2
+ - uses: actions/upload-artifact at v4
with:
- name: wheelhouse
+ name: wheelhouse-sdist
path: wheelhouse
- name: Publish SDIST to PyPI # there are some problems if sdist is not pushed first
@@ -96,10 +111,11 @@ jobs:
packages_dir: wheelhouse/
- name: Download all the wheels
- uses: actions/download-artifact at v2
+ uses: actions/download-artifact at v4
with:
- name: wheelhouse
path: ./wheelhouse/
+ pattern: wheelhouse-*
+ merge-multiple: true
- name: Publish a Python distribution to Test PyPI
uses: pypa/gh-action-pypi-publish at release/v1
=====================================
.github/workflows/tests.yml
=====================================
@@ -15,7 +15,7 @@ jobs:
fail-fast: true
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
- python: ["3.8", "3.9", "3.10", "3.x"]
+ python: ["3.9", "3.10", "3.11", "3.12", "3.13"]
steps:
- uses: actions/checkout at v3
@@ -28,12 +28,19 @@ jobs:
- name: Install libhdf4-dev (macos-latest)
if: matrix.os == 'macos-latest'
run: |
- brew install ninja &&
+ brew install ninja jpeg
+ export PATH="/opt/homebrew/opt/jpeg/bin:$PATH"
+ export LDFLAGS="-L/opt/homebrew/opt/jpeg/lib"
+ export CPPFLAGS="-I/opt/homebrew/opt/jpeg/include"
+ export PKG_CONFIG_PATH="/opt/homebrew/opt/jpeg/lib/pkgconfig"
+ echo 'LIBRARY_DIRS=/opt/homebrew/opt/jpeg/lib' >> $GITHUB_ENV
+ echo 'INCLUDE_DIRS=/opt/homebrew/opt/jpeg/include' >> $GITHUB_ENV
+
cd /tmp &&
- git clone --depth 1 --branch hdf-4_2_16 https://github.com/HDFGroup/hdf4.git &&
+ git clone --depth 1 --branch hdf4.3.0 https://github.com/HDFGroup/hdf4.git &&
mkdir build && cd build &&
../hdf4/configure --enable-hdf4-xdr --enable-shared --disable-static --disable-fortran --disable-netcdf --enable-java --enable-production --with-zlib --prefix=/usr/local &&
- make install
+ sudo make install
- name: Install libhdf4-dev (ubuntu-latest)
if: matrix.os == 'ubuntu-latest'
@@ -53,6 +60,8 @@ jobs:
- name: Install requirements
run: |
+ echo LIBRARY_DIRS is $LIBRARY_DIRS
+ echo INCLUDE_DIRS is $INCLUDE_DIRS
python -m pip install -U pip
python -m pip install numpy pytest
=====================================
AUTHORS
=====================================
@@ -2,6 +2,6 @@ Andre Gosselin <Andre.Gosselin at dfo-mpo.gc.ca>
@bmagill1250
@dmarth
Fazlul Shahriar <fshahriar at gmail.com>
-HDF-EOS Tools Informatoin Center <eoshelp at hdfgroup.org>
+HDF-EOS Tools Information Center <eoshelp at hdfgroup.org>
H. Joe Lee <hyoklee at hdfgroup.org>
Travis E. Oliphant <teoliphant at gmail.com>
=====================================
LICENSE
=====================================
@@ -22,7 +22,7 @@ THE SOFTWARE.
Built distributions of pyhdf also include:
-Libary | License
-- hdf | BSD-3
-- jpeg | Custom BSD-like
-- zlib | zlib
+Library | License
+- hdf | BSD-3
+- jpeg | Custom BSD-like
+- zlib | zlib
=====================================
README.md
=====================================
@@ -1,5 +1,5 @@
[](https://github.com/fhs/pyhdf/actions/workflows/tests.yml)
-[](https://github.com/fhs/pyhdf/actions/workflows/package.yml)
+[](https://github.com/fhs/pyhdf/actions/workflows/package_and_publish.yml)
[](https://anaconda.org/conda-forge/pyhdf)
# pyhdf
=====================================
doc/install.rst
=====================================
@@ -17,7 +17,7 @@ Once you're in the conda environment, install `pyhdf from conda-forge
If you don't want to use conda, the instructions below describes how you
can compile pyhdf from source. Version 0.10.3 also includes static linked wheels for
-linux with cpython 3.6-3.9. If compatible, `pip install pyhdf` will include the neccessary
+linux with cpython 3.6-3.9. If compatible, `pip install pyhdf` will include the necessary
libraries for you. If you don't want to use the built manylinux distribution, follow instructions
below to build from source downloading from pypi with `pip install pyhdf --no-binary :all:`.
=====================================
pyhdf/SD.py
=====================================
@@ -3087,7 +3087,7 @@ class SDim(object):
buf = _C.array_float64(n_values)
else:
- raise HDF4Error("setscale: illegal or usupported data_type")
+ raise HDF4Error("setscale: illegal or unsupported data_type")
if n_values == 1:
buf[0] = scale
=====================================
pyhdf/VS.py
=====================================
@@ -643,7 +643,7 @@ vdata attribute. We want to be able update this attribute (see
following examples). However, the VS API prohibits changing an attribute
type when updating its value. Since the length (order) of an attribute
is part of its type, we make sure of setting the attribute to a length
-long enough to accommodate the longest possible string we migh want to
+long enough to accommodate the longest possible string we might want to
assign to the attribute.
Appending records to a vdata
@@ -2053,7 +2053,7 @@ class VD(object):
# - tuple of the start indices along the vdata dimensions
# - tuple of the count values along the vdata dimensions
# a count of -1 indicates that an index, not a slice
- # was applied on the correcponding dimension.
+ # was applied on the corresponding dimension.
# Make sure the indexing expression does not exceed the
# vdata number of dimensions (2).
=====================================
pyhdf/hdfext.i
=====================================
@@ -203,7 +203,7 @@ extern void _HEprint(void);
%{
-#include "hdfi.h" /* declares int32, float32, etc */
+#include "hdf.h" /* declares int32, float32, etc */
#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
#include "numpy/ndarraytypes.h"
=====================================
pyhdf/hdfext_wrap.c
=====================================
@@ -3842,7 +3842,7 @@ void _HEprint(void) {
}
-#include "hdfi.h" /* declares int32, float32, etc */
+#include "hdf.h" /* declares int32, float32, etc */
#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
#include "numpy/ndarraytypes.h"
=====================================
pyproject.toml
=====================================
@@ -1,13 +1,14 @@
[build-system]
requires = [
"setuptools",
+ "setuptools-scm",
"numpy",
]
build-backend = "setuptools.build_meta"
[project]
name = "pyhdf"
-version = "0.11.4"
+dynamic = ["version"]
description = "Python interface to the NCSA HDF4 library"
readme = "README.md"
keywords = ['hdf4', 'netcdf', 'numpy', 'python', 'pyhdf']
@@ -40,3 +41,5 @@ dependencies = [
[project.urls]
Homepage = 'https://github.com/fhs/pyhdf'
+
+[tool.setuptools_scm]
View it on GitLab: https://salsa.debian.org/debian-gis-team/python-hdf4/-/commit/fb0e450cc180f2466902e2b1603c13a420529239
--
View it on GitLab: https://salsa.debian.org/debian-gis-team/python-hdf4/-/commit/fb0e450cc180f2466902e2b1603c13a420529239
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20250105/b2a7a0d6/attachment-0001.htm>
More information about the Pkg-grass-devel
mailing list