[Git][debian-gis-team/python-hdf4][upstream] New upstream version 0.10.5
Antonio Valentino (@antonio.valentino)
gitlab at salsa.debian.org
Fri May 13 07:02:52 BST 2022
Antonio Valentino pushed to branch upstream at Debian GIS Project / python-hdf4
Commits:
f03b0003 by Antonio Valentino at 2022-05-13T05:44:15+00:00
New upstream version 0.10.5
- - - - -
9 changed files:
- − .appveyor.yml
- .github/workflows/package.yml
- + .github/workflows/tests.yml
- − .travis.yml
- − .travis/install_osx_miniconda.bash
- README.md
- doc/conf.py
- pyhdf/test_SD.py
- setup.py
Changes:
=====================================
.appveyor.yml deleted
=====================================
@@ -1,46 +0,0 @@
-# Based on https://packaging.python.org/guides/supporting-windows-using-appveyor/
-# and https://github.com/Anaconda-Platform/anaconda-project/blob/master/appveyor.yml
-
-environment:
- matrix:
- # For Miniconda versions available on Appveyor, see
- # https://www.appveyor.com/docs/windows-images-software/#miniconda
- - MINICONDA: C:\Miniconda
- - MINICONDA: C:\Miniconda-x64
- - MINICONDA: C:\Miniconda3
- - MINICONDA: C:\Miniconda3-x64
-
-install:
- # conda 4.5.11 seems to expect that this directory exists already
- - mkdir C:\Users\appveyor\.conda
- - call %MINICONDA%\Scripts\activate.bat
- # The safety checks are simply intended to ensure that there is enough disk space
- # and the user has the necessary permissions to make environment changes. In a CI
- # environment these are not necessary and slow things down noticeably on Windows.
- - conda config --set always_yes yes --set changeps1 no --set auto_update_conda no --set safety_checks disabled
- - conda install -q conda numpy hdf4 nose
- - conda info -a
- # We need wheel installed to build wheels
- - pip install wheel
-
-build: off
-
-test_script:
- # Put your test command here.
- - set LIBRARY_DIRS=%MINICONDA%\Library\bin;%MINICONDA%\Library\lib
- - set INCLUDE_DIRS=%MINICONDA%\Library\include
- - python setup.py nosetests -v
- - python examples\runall.py
-
-after_test:
- # This step builds your wheels.
- - python setup.py bdist_wheel
-
-artifacts:
- # bdist_wheel puts your built wheel in the dist directory
- - path: dist\*
-
-#on_success:
-# You can use this step to upload your artifacts to a public website.
-# See Appveyor's documentation for more details. Or you can simply
-# access your wheels from the Appveyor "artifacts" tab for your build.
=====================================
.github/workflows/package.yml
=====================================
@@ -1,4 +1,4 @@
-name: package into static linked wheel
+name: Pypi build
on:
push:
@@ -6,10 +6,16 @@ on:
- 'v*' #
jobs:
- package:
- name: package up into a nice wheel
- runs-on: ubuntu-latest
-
+ packages:
+ name: Wheels on ${{ matrix.os }} (${{ matrix.cibw_archs }})
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: true
+ matrix:
+ os: [windows-latest, ubuntu-latest, macos-latest]
+ cibw_archs: ["auto"]
+ env:
+ CIBW_SKIP: "*-musllinux_*"
steps:
- uses: actions/checkout at v2
@@ -18,49 +24,97 @@ jobs:
with:
python-version: '3.7'
- - name: Install cibuildwheel
+ - name: Setup conda (windows-latest)
+ if: matrix.os == 'windows-latest'
+ uses: s-weigand/setup-conda at v1
+
+ - name: Setup conda paths (windows-latest)
+ if: matrix.os == 'windows-latest'
run: |
- python -m pip install cibuildwheel==1.9.0
+ echo "LIBRARY_DIRS=C:\Miniconda\Library\lib;C:\Miniconda\Library\bin" >> $env:GITHUB_ENV
+ echo "INCLUDE_DIRS=C:\Miniconda\Library\include" >> $env:GITHUB_ENV
- - name: Build wheels for linux python versions
+ - name: Install cibuildwheel
+ run: |
+ python -m pip install cibuildwheel==2.3.1
+ - name: Build wheels
run: |
python -m cibuildwheel --output-dir dist
env:
- CIBW_BUILD: '{cp,pp}3*-*'
+ CIBW_BUILD: '{cp,pp}3*'
CIBW_MANYLINUX_X86_64_IMAGE: manylinux2014
CIBW_BEFORE_ALL_LINUX: yum -y install epel-release hdf hdf-devel && ln -s /usr/lib64/hdf/lib* /usr/lib64/
- CIBW_ARCHS_LINUX: 'x86_64'
-
- - name: purge old _linux_wheels
+ CIBW_ARCHS_LINUX: 'x86_64' # restrict to 64bit builds
+ CIBW_ARCHS_WINDOWS: 'AMD64' # restrict to 64bit builds
+ # (mac-os) Install hdf-4.2.15 from sources
+ CIBW_BEFORE_ALL_MACOS: >
+ brew install ninja &&
+ cd /tmp &&
+ git clone https://github.com/HDFGroup/hdf4.git &&
+ mkdir build && cd build &&
+ ../hdf4/configure --enable-shared --disable-static --disable-fortran --disable-netcdf --enable-production --with-zlib --prefix=/usr/local &&
+ make install
+ CIBW_BEFORE_ALL_WINDOWS: >
+ conda config --set always_yes yes --set changeps1 no --set auto_update_conda no --set safety_checks disabled &&
+ conda install -q hdf4
+ - name: Copy wheels into wheelhouse
run: |
mkdir wheelhouse
- cp dist/*manylinux* wheelhouse/
-
- - name: create source distribution archive for pypi
- run: |
- python -m pip install numpy
- python setup.py sdist -d wheelhouse
-
+ cp dist/*.whl wheelhouse
- uses: actions/upload-artifact at v2
with:
name: wheelhouse
path: wheelhouse
- - name: Publish a Python distribution to Test PyPI
- uses: pypa/gh-action-pypi-publish at release/v1
- with:
- user: __token__
- password: ${{ secrets.PYPI_TEST_TOKEN }}
- repository_url: https://test.pypi.org/legacy/
- packages_dir: wheelhouse/
- verbose: true
+ publish:
+ name: Publish to PyPI
+ needs: [packages]
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout at v2
- - name: Publish a Python distribution to PyPI
- if: github.event.base_ref == 'refs/heads/master'
- uses: pypa/gh-action-pypi-publish at release/v1
- with:
- user: __token__
- password: ${{ secrets.PYPI_API_TOKEN }}
- packages_dir: wheelhouse/
+ - name: Switch to using Python 3.x
+ uses: actions/setup-python at v2
+ with:
+ python-version: 3.x
+
+ - name: Create source distribution archive
+ run: |
+ python -m pip install numpy
+ python setup.py sdist -d wheelhouse
+
+ - uses: actions/upload-artifact at v2
+ with:
+ name: wheelhouse
+ path: wheelhouse
+
+ - name: Publish SDIST to PyPI # there are some problems if sdist is not pushed first
+ if: github.event.base_ref == 'refs/heads/master'
+ uses: pypa/gh-action-pypi-publish at release/v1
+ with:
+ user: __token__
+ password: ${{ secrets.PYPI_API_TOKEN }}
+ packages_dir: wheelhouse/
+
+ - name: Download all the wheels
+ uses: actions/download-artifact at v2
+ with:
+ name: wheelhouse
+ path: ./wheelhouse/
+ - name: Publish a Python distribution to Test PyPI
+ uses: pypa/gh-action-pypi-publish at release/v1
+ with:
+ user: __token__
+ password: ${{ secrets.PYPI_TEST_TOKEN }}
+ repository_url: https://test.pypi.org/legacy/
+ packages_dir: wheelhouse/
+ verbose: true
+ - name: Publish a Python distribution to PyPI
+ if: github.event.base_ref == 'refs/heads/master'
+ uses: pypa/gh-action-pypi-publish at release/v1
+ with:
+ user: __token__
+ password: ${{ secrets.PYPI_API_TOKEN }}
+ packages_dir: wheelhouse/
=====================================
.github/workflows/tests.yml
=====================================
@@ -0,0 +1,63 @@
+name: Tests
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.type }}
+ cancel-in-progress: true
+
+on:
+ push:
+
+jobs:
+ packages:
+ name: Test on ${{ matrix.os }} (${{ matrix.python }})
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: true
+ matrix:
+ os: [ubuntu-latest, macos-latest, windows-latest]
+ python: ["3.7", "3.8", "3.9", "3.x"]
+
+ steps:
+ - uses: actions/checkout at v2
+
+ - name: Install Python
+ uses: actions/setup-python at v2
+ with:
+ python-version: ${{ matrix.python }}
+
+ - name: Install libhdf4-dev (macos-latest)
+ if: matrix.os == 'macos-latest'
+ run: |
+ brew install ninja &&
+ cd /tmp &&
+ git clone https://github.com/HDFGroup/hdf4.git &&
+ mkdir build && cd build &&
+ ../hdf4/configure --enable-shared --disable-static --disable-fortran --disable-netcdf --enable-java --enable-production --with-zlib --prefix=/usr/local &&
+ make install
+
+ - name: Install libhdf4-dev (ubuntu-latest)
+ if: matrix.os == 'ubuntu-latest'
+ run: sudo apt-get install libhdf4-dev
+
+ - name: Setup conda (windows-latest)
+ if: matrix.os == 'windows-latest'
+ uses: s-weigand/setup-conda at v1
+
+ - name: Install libhdf4-dev (windows-latest)
+ if: matrix.os == 'windows-latest'
+ run: |
+ conda config --set always_yes yes --set changeps1 no --set auto_update_conda no --set safety_checks disabled
+ conda install -q hdf4
+ echo "LIBRARY_DIRS=C:\Miniconda\Library\lib;C:\Miniconda\Library\bin" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
+ echo "INCLUDE_DIRS=C:\Miniconda\Library\include" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
+
+ - name: Install requirements
+ run: |
+ python -m pip install -U pip
+ python -m pip install numpy pytest
+
+ - name: Run tests
+ run: |
+ python setup.py develop
+ pytest
+ python examples/runall.py
=====================================
.travis.yml deleted
=====================================
@@ -1,38 +0,0 @@
-language: python
-
-matrix:
- include:
- - os: linux
- python: "2.7"
- - os: linux
- python: "3.4"
- - os: linux
- python: "3.5"
- - os: linux
- python: "3.6"
- - os: osx
- language: generic
- env: PYHDF_PYTHON_VERSION="2.7"
- - os: osx
- language: generic
- env: PYHDF_PYTHON_VERSION="3.7"
-
-addons:
- apt:
- packages:
- - libhdf4-dev
-
-before_install:
- - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then source ./.travis/install_osx_miniconda.bash; fi
-
-# command to run tests
-script:
- - python setup.py nosetests -v
- - python examples/runall.py
-
-notifications:
- email:
- recipients:
- - fshahriar at gmail.com
- on_success: never
- on_failure: change
=====================================
.travis/install_osx_miniconda.bash deleted
=====================================
@@ -1,22 +0,0 @@
-#!/bin/bash
-
-# We do this conditionally because it saves us some downloading if the
-# version is the same.
-if [[ "$PYHDF_PYTHON_VERSION" == "2.7" ]]; then
- curl https://repo.anaconda.com/miniconda/Miniconda2-latest-MacOSX-x86_64.sh > miniconda.sh;
-else
- curl https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh > miniconda.sh;
-fi
-bash miniconda.sh -b -p $HOME/miniconda
-export PATH="$HOME/miniconda/bin:$PATH"
-hash -r
-conda config --set always_yes yes --set changeps1 no
-conda update -q conda
-# Useful for debugging any issues with conda
-conda info -a
-
-conda create -q -n test-environment python=$PYHDF_PYTHON_VERSION numpy hdf4 nose
-source activate test-environment
-
-export LIBRARY_DIRS=$CONDA_PREFIX/lib
-export INCLUDE_DIRS=$CONDA_PREFIX/include
=====================================
README.md
=====================================
@@ -1,5 +1,5 @@
-[](https://travis-ci.org/fhs/pyhdf)
-[](https://ci.appveyor.com/project/fhs/pyhdf/branch/master)
+[](https://github.com/fhs/pyhdf/actions/workflows/tests.yml)
+[](https://github.com/fhs/pyhdf/actions/workflows/package.yml)
[](https://anaconda.org/conda-forge/pyhdf)
# pyhdf
=====================================
doc/conf.py
=====================================
@@ -56,7 +56,7 @@ copyright = u'2019, pyhdf authors'
# The short X.Y version.
version = '0.10'
# The full version, including alpha/beta/rc tags.
-release = '0.10.2'
+release = '0.10.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
=====================================
pyhdf/test_SD.py
=====================================
@@ -5,7 +5,6 @@ import os
import pyhdf.SD
import shutil
import tempfile
-from nose.tools import eq_
from numpy.testing import assert_array_equal
from pyhdf.SD import SDC
@@ -29,7 +28,7 @@ def test_long_varname():
name, _, _, _, _ = sds.info()
sds.endaccess()
sd.end()
- eq_(sds_name, name)
+ assert sds_name == name
finally:
shutil.rmtree(temp)
@@ -42,16 +41,16 @@ def test_negative_int8():
data = np.zeros(shape=(20,20), dtype=np.int8)
sds = sd.create("testsds", SDC.INT8, data.shape)
sds.setfillvalue(-1)
- eq_(sds.getfillvalue(), -1)
+ assert sds.getfillvalue() == -1
sds.setrange(-50, -30)
min, max = sds.getrange()
- eq_(min, -50)
- eq_(max, -30)
+ assert min == -50
+ assert max == -30
attr = sds.attr("testattr")
attr.set(SDC.INT8, -1)
- eq_(attr.get(), -1)
+ assert attr.get() == -1
dim = sds.dim(0)
scale = [-1]*20
=====================================
setup.py
=====================================
@@ -153,7 +153,7 @@ setup(name = 'pyhdf',
license = 'MIT',
long_description = "\n".join(DOCLINES[2:]),
url = 'https://github.com/fhs/pyhdf',
- version = '0.10.3',
+ version = '0.10.5',
packages = ['pyhdf'],
ext_modules = [_hdfext],
data_files = data_files,
View it on GitLab: https://salsa.debian.org/debian-gis-team/python-hdf4/-/commit/f03b000374f0d236e64b9d6a1779c3a6e7b6222e
--
View it on GitLab: https://salsa.debian.org/debian-gis-team/python-hdf4/-/commit/f03b000374f0d236e64b9d6a1779c3a6e7b6222e
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20220513/9a564955/attachment-0001.htm>
More information about the Pkg-grass-devel
mailing list