[Git][debian-gis-team/python-hdf4][master] 7 commits: New upstream version 0.11.6
Antonio Valentino (@antonio.valentino)
gitlab at salsa.debian.org
Sun Jan 5 10:38:42 GMT 2025
Antonio Valentino pushed to branch master at Debian GIS Project / python-hdf4
Commits:
fb0e450c by Antonio Valentino at 2025-01-05T10:24:01+00:00
New upstream version 0.11.6
- - - - -
156fa2c4 by Antonio Valentino at 2025-01-05T10:24:02+00:00
Update upstream source from tag 'upstream/0.11.6'
Update to upstream version '0.11.6'
with Debian dir 435824a86cb7832381cfffe2530f879bade47e6b
- - - - -
ebdfb465 by Antonio Valentino at 2025-01-05T10:24:42+00:00
New upstream release
- - - - -
8e5e7f52 by Antonio Valentino at 2025-01-05T10:29:31+00:00
Update dependencies
- - - - -
5f41b106 by Antonio Valentino at 2025-01-05T10:30:31+00:00
Update dates in d/copyright
- - - - -
8d5aee64 by Antonio Valentino at 2025-01-05T10:33:10+00:00
Refresh patches
- - - - -
e1f46087 by Antonio Valentino at 2025-01-05T10:35:30+00:00
Set distribution to unstable
- - - - -
17 changed files:
- .github/workflows/package_and_publish.yml
- .github/workflows/tests.yml
- AUTHORS
- LICENSE
- README.md
- debian/changelog
- debian/control
- debian/copyright
- debian/patches/gcc-14.patch → debian/patches/0001-gcc-14.patch
- − debian/patches/hdf-4.3.0.patch
- debian/patches/series
- doc/install.rst
- pyhdf/SD.py
- pyhdf/VS.py
- pyhdf/hdfext.i
- pyhdf/hdfext_wrap.c
- pyproject.toml
Changes:
=====================================
.github/workflows/package_and_publish.yml
=====================================
@@ -20,7 +20,7 @@ jobs:
- uses: actions/setup-python at v4
name: Install Python
with:
- python-version: '3.10'
+ python-version: '3.13'
- name: Setup conda (windows-latest)
if: matrix.os == 'windows-latest'
@@ -32,9 +32,20 @@ jobs:
echo "LIBRARY_DIRS=C:\Miniconda\Library\lib;C:\Miniconda\Library\bin" >> $env:GITHUB_ENV
echo "INCLUDE_DIRS=C:\Miniconda\Library\include" >> $env:GITHUB_ENV
+ - name: Setup libjpeg paths (macos-latest)
+ if: matrix.os == 'macos-latest'
+ run: |
+ echo 'LIBRARY_DIRS=/opt/homebrew/opt/jpeg/lib' >> $GITHUB_ENV
+ echo 'INCLUDE_DIRS=/opt/homebrew/opt/jpeg/include' >> $GITHUB_ENV
+
+ # See https://github.com/pypa/cibuildwheel/issues/563#issuecomment-2257729524
+ - name: Set macOS deployment target
+ if: matrix.os == 'macos-latest'
+ run: echo "MACOSX_DEPLOYMENT_TARGET=$(sw_vers -productVersion | cut -d '.' -f 1-2)" >> $GITHUB_ENV
+
- name: Install cibuildwheel
run: |
- python -m pip install cibuildwheel==2.13.1
+ python -m pip install cibuildwheel==2.22.0
- name: Build wheels
run: |
python -m cibuildwheel --output-dir dist
@@ -46,12 +57,16 @@ jobs:
CIBW_ARCHS_WINDOWS: 'AMD64' # restrict to 64bit builds
# (mac-os) Install hdf4 from sources
CIBW_BEFORE_ALL_MACOS: >
- brew install ninja &&
+ brew install ninja jpeg &&
+ export PATH="/opt/homebrew/opt/jpeg/bin:$PATH" &&
+ export LDFLAGS="-L/opt/homebrew/opt/jpeg/lib" &&
+ export CPPFLAGS="-I/opt/homebrew/opt/jpeg/include" &&
+ export PKG_CONFIG_PATH="/opt/homebrew/opt/jpeg/lib/pkgconfig" &&
cd /tmp &&
- git clone --depth 1 --branch hdf-4_2_16 https://github.com/HDFGroup/hdf4.git &&
+ git clone --depth 1 --branch hdf4.3.0 https://github.com/HDFGroup/hdf4.git &&
mkdir build && cd build &&
../hdf4/configure --enable-hdf4-xdr --enable-shared --disable-static --disable-fortran --disable-netcdf --enable-production --with-zlib --prefix=/usr/local &&
- make install
+ sudo make install
CIBW_BEFORE_ALL_WINDOWS: >
conda config --set always_yes yes --set changeps1 no --set auto_update_conda no --set safety_checks disabled &&
conda install -q hdf4
@@ -59,9 +74,9 @@ jobs:
run: |
mkdir wheelhouse
cp dist/*.whl wheelhouse
- - uses: actions/upload-artifact at v2
+ - uses: actions/upload-artifact at v4
with:
- name: wheelhouse
+ name: wheelhouse-${{ matrix.os }}
path: wheelhouse
publish:
@@ -82,9 +97,9 @@ jobs:
python -m pip install build
python -m build --sdist -o wheelhouse
- - uses: actions/upload-artifact at v2
+ - uses: actions/upload-artifact at v4
with:
- name: wheelhouse
+ name: wheelhouse-sdist
path: wheelhouse
- name: Publish SDIST to PyPI # there are some problems if sdist is not pushed first
@@ -96,10 +111,11 @@ jobs:
packages_dir: wheelhouse/
- name: Download all the wheels
- uses: actions/download-artifact at v2
+ uses: actions/download-artifact at v4
with:
- name: wheelhouse
path: ./wheelhouse/
+ pattern: wheelhouse-*
+ merge-multiple: true
- name: Publish a Python distribution to Test PyPI
uses: pypa/gh-action-pypi-publish at release/v1
=====================================
.github/workflows/tests.yml
=====================================
@@ -15,7 +15,7 @@ jobs:
fail-fast: true
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
- python: ["3.8", "3.9", "3.10", "3.x"]
+ python: ["3.9", "3.10", "3.11", "3.12", "3.13"]
steps:
- uses: actions/checkout at v3
@@ -28,12 +28,19 @@ jobs:
- name: Install libhdf4-dev (macos-latest)
if: matrix.os == 'macos-latest'
run: |
- brew install ninja &&
+ brew install ninja jpeg
+ export PATH="/opt/homebrew/opt/jpeg/bin:$PATH"
+ export LDFLAGS="-L/opt/homebrew/opt/jpeg/lib"
+ export CPPFLAGS="-I/opt/homebrew/opt/jpeg/include"
+ export PKG_CONFIG_PATH="/opt/homebrew/opt/jpeg/lib/pkgconfig"
+ echo 'LIBRARY_DIRS=/opt/homebrew/opt/jpeg/lib' >> $GITHUB_ENV
+ echo 'INCLUDE_DIRS=/opt/homebrew/opt/jpeg/include' >> $GITHUB_ENV
+
cd /tmp &&
- git clone --depth 1 --branch hdf-4_2_16 https://github.com/HDFGroup/hdf4.git &&
+ git clone --depth 1 --branch hdf4.3.0 https://github.com/HDFGroup/hdf4.git &&
mkdir build && cd build &&
../hdf4/configure --enable-hdf4-xdr --enable-shared --disable-static --disable-fortran --disable-netcdf --enable-java --enable-production --with-zlib --prefix=/usr/local &&
- make install
+ sudo make install
- name: Install libhdf4-dev (ubuntu-latest)
if: matrix.os == 'ubuntu-latest'
@@ -53,6 +60,8 @@ jobs:
- name: Install requirements
run: |
+ echo LIBRARY_DIRS is $LIBRARY_DIRS
+ echo INCLUDE_DIRS is $INCLUDE_DIRS
python -m pip install -U pip
python -m pip install numpy pytest
=====================================
AUTHORS
=====================================
@@ -2,6 +2,6 @@ Andre Gosselin <Andre.Gosselin at dfo-mpo.gc.ca>
@bmagill1250
@dmarth
Fazlul Shahriar <fshahriar at gmail.com>
-HDF-EOS Tools Informatoin Center <eoshelp at hdfgroup.org>
+HDF-EOS Tools Information Center <eoshelp at hdfgroup.org>
H. Joe Lee <hyoklee at hdfgroup.org>
Travis E. Oliphant <teoliphant at gmail.com>
=====================================
LICENSE
=====================================
@@ -22,7 +22,7 @@ THE SOFTWARE.
Built distributions of pyhdf also include:
-Libary | License
-- hdf | BSD-3
-- jpeg | Custom BSD-like
-- zlib | zlib
+Library | License
+- hdf | BSD-3
+- jpeg | Custom BSD-like
+- zlib | zlib
=====================================
README.md
=====================================
@@ -1,5 +1,5 @@
[](https://github.com/fhs/pyhdf/actions/workflows/tests.yml)
-[](https://github.com/fhs/pyhdf/actions/workflows/package.yml)
+[](https://github.com/fhs/pyhdf/actions/workflows/package_and_publish.yml)
[](https://anaconda.org/conda-forge/pyhdf)
# pyhdf
=====================================
debian/changelog
=====================================
@@ -1,9 +1,18 @@
-python-hdf4 (0.11.4-4) UNRELEASED; urgency=medium
+python-hdf4 (0.11.6-1) unstable; urgency=medium
- * Team upload.
+ [ Bas Couwenberg ]
* Bump Standards-Version to 4.7.0, no changes.
- -- Bas Couwenberg <sebastic at debian.org> Sun, 28 Jul 2024 19:58:45 +0200
+ [ Antonio Valentino ]
+ * New upstream release.
+ * debian/control:
+ - Add dependency on setuptools-scm.
+ * Update dates in d/copyright.
+ * debian/patches:
+ - Drop hdf-4.3.0.patch, applied upstream.
+ - Refresh and renumber remaining patches.
+
+ -- Antonio Valentino <antonio.valentino at tiscali.it> Sun, 05 Jan 2025 10:35:22 +0000
python-hdf4 (0.11.4-3) unstable; urgency=medium
=====================================
debian/control
=====================================
@@ -16,6 +16,7 @@ Build-Depends: debhelper-compat (= 13),
python3-pytest <!nocheck>,
python3-numpy,
python3-setuptools,
+ python3-setuptools-scm,
zlib1g-dev
Standards-Version: 4.7.0
Vcs-Browser: https://salsa.debian.org/debian-gis-team/python-hdf4
=====================================
debian/copyright
=====================================
@@ -12,7 +12,7 @@ Copyright: 2010-2013, Benjamin Peterson
License: Expat
Files: debian/*
-Copyright: 2018-2024, Antonio Valentino <antonio.valentino at tiscali.it>
+Copyright: 2018-2025, Antonio Valentino <antonio.valentino at tiscali.it>
License: Expat
License: Expat
=====================================
debian/patches/gcc-14.patch → debian/patches/0001-gcc-14.patch
=====================================
@@ -1,10 +1,17 @@
-Description: Fix FTBFS with GCC 14 (-Wincompatible-pointer-types).
-Author: Bas Couwenberg <sebastic at debian.org>
+From: Bas Couwenberg <sebastic at debian.org>
+Date: Sun, 5 Jan 2025 10:31:21 +0000
+Subject: Fix FTBFS with GCC 14 (-Wincompatible-pointer-types).
+
Forwarded: https://github.com/fhs/pyhdf/pull/75
+---
+ pyhdf/hdfext_wrap.c | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+diff --git a/pyhdf/hdfext_wrap.c b/pyhdf/hdfext_wrap.c
+index 80b3410..7982db1 100644
--- a/pyhdf/hdfext_wrap.c
+++ b/pyhdf/hdfext_wrap.c
-@@ -4107,7 +4107,7 @@ static PyObject * _SDwritedata_0(int32 s
+@@ -4107,7 +4107,7 @@ static PyObject * _SDwritedata_0(int32 sds_id, int32 data_type,
#include "hcomp.h"
@@ -13,7 +20,7 @@ Forwarded: https://github.com/fhs/pyhdf/pull/75
int32 *v2, int32 *v3, int32 *v4, int32 *v5) {
comp_info c_info;
-@@ -7319,7 +7319,7 @@ SWIGINTERN PyObject *_wrap__SDgetcompres
+@@ -7319,7 +7319,7 @@ SWIGINTERN PyObject *_wrap__SDgetcompress(PyObject *self, PyObject *args) {
SWIG_exception_fail(SWIG_ArgError(ecode1), "in method '" "_SDgetcompress" "', argument " "1"" of type '" "int32""'");
}
arg1 = (int32)(val1);
=====================================
debian/patches/hdf-4.3.0.patch deleted
=====================================
@@ -1,28 +0,0 @@
-Description: Fix FTBFS with HDF 4.3.0.
-Author: Bas Couwenberg <sebastic at debian.org>
-Bug: https://bugs.debian.org/1068384
-Forwarded: https://github.com/fhs/pyhdf/pull/72
-Applied-Upstream: https://github.com/fhs/pyhdf/commit/7746b030c3e5b3d7ba05b4e94b747a92f1f773a5
-
---- a/pyhdf/hdfext.i
-+++ b/pyhdf/hdfext.i
-@@ -203,7 +203,7 @@ extern void _HEprint(void);
-
-
- %{
--#include "hdfi.h" /* declares int32, float32, etc */
-+#include "hdf.h" /* declares int32, float32, etc */
-
- #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
- #include "numpy/ndarraytypes.h"
---- a/pyhdf/hdfext_wrap.c
-+++ b/pyhdf/hdfext_wrap.c
-@@ -3842,7 +3842,7 @@ void _HEprint(void) {
- }
-
-
--#include "hdfi.h" /* declares int32, float32, etc */
-+#include "hdf.h" /* declares int32, float32, etc */
-
- #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
- #include "numpy/ndarraytypes.h"
=====================================
debian/patches/series
=====================================
@@ -1,2 +1 @@
-hdf-4.3.0.patch
-gcc-14.patch
+0001-gcc-14.patch
=====================================
doc/install.rst
=====================================
@@ -17,7 +17,7 @@ Once you're in the conda environment, install `pyhdf from conda-forge
If you don't want to use conda, the instructions below describes how you
can compile pyhdf from source. Version 0.10.3 also includes static linked wheels for
-linux with cpython 3.6-3.9. If compatible, `pip install pyhdf` will include the neccessary
+linux with cpython 3.6-3.9. If compatible, `pip install pyhdf` will include the necessary
libraries for you. If you don't want to use the built manylinux distribution, follow instructions
below to build from source downloading from pypi with `pip install pyhdf --no-binary :all:`.
=====================================
pyhdf/SD.py
=====================================
@@ -3087,7 +3087,7 @@ class SDim(object):
buf = _C.array_float64(n_values)
else:
- raise HDF4Error("setscale: illegal or usupported data_type")
+ raise HDF4Error("setscale: illegal or unsupported data_type")
if n_values == 1:
buf[0] = scale
=====================================
pyhdf/VS.py
=====================================
@@ -643,7 +643,7 @@ vdata attribute. We want to be able update this attribute (see
following examples). However, the VS API prohibits changing an attribute
type when updating its value. Since the length (order) of an attribute
is part of its type, we make sure of setting the attribute to a length
-long enough to accommodate the longest possible string we migh want to
+long enough to accommodate the longest possible string we might want to
assign to the attribute.
Appending records to a vdata
@@ -2053,7 +2053,7 @@ class VD(object):
# - tuple of the start indices along the vdata dimensions
# - tuple of the count values along the vdata dimensions
# a count of -1 indicates that an index, not a slice
- # was applied on the correcponding dimension.
+ # was applied on the corresponding dimension.
# Make sure the indexing expression does not exceed the
# vdata number of dimensions (2).
=====================================
pyhdf/hdfext.i
=====================================
@@ -203,7 +203,7 @@ extern void _HEprint(void);
%{
-#include "hdfi.h" /* declares int32, float32, etc */
+#include "hdf.h" /* declares int32, float32, etc */
#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
#include "numpy/ndarraytypes.h"
=====================================
pyhdf/hdfext_wrap.c
=====================================
@@ -3842,7 +3842,7 @@ void _HEprint(void) {
}
-#include "hdfi.h" /* declares int32, float32, etc */
+#include "hdf.h" /* declares int32, float32, etc */
#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
#include "numpy/ndarraytypes.h"
=====================================
pyproject.toml
=====================================
@@ -1,13 +1,14 @@
[build-system]
requires = [
"setuptools",
+ "setuptools-scm",
"numpy",
]
build-backend = "setuptools.build_meta"
[project]
name = "pyhdf"
-version = "0.11.4"
+dynamic = ["version"]
description = "Python interface to the NCSA HDF4 library"
readme = "README.md"
keywords = ['hdf4', 'netcdf', 'numpy', 'python', 'pyhdf']
@@ -40,3 +41,5 @@ dependencies = [
[project.urls]
Homepage = 'https://github.com/fhs/pyhdf'
+
+[tool.setuptools_scm]
View it on GitLab: https://salsa.debian.org/debian-gis-team/python-hdf4/-/compare/917b4867e53d2f10e0c51c696bf18f661267a331...e1f4608794045f88e830af3e1976b76062ada294
--
View it on GitLab: https://salsa.debian.org/debian-gis-team/python-hdf4/-/compare/917b4867e53d2f10e0c51c696bf18f661267a331...e1f4608794045f88e830af3e1976b76062ada294
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20250105/e6c884a3/attachment-0001.htm>
More information about the Pkg-grass-devel
mailing list