[Git][debian-gis-team/pint-xarray][upstream] New upstream version 0.6.1

Antonio Valentino (@antonio.valentino) gitlab at salsa.debian.org
Sat Mar 28 17:02:15 GMT 2026



Antonio Valentino pushed to branch upstream at Debian GIS Project / pint-xarray


Commits:
306bc372 by Antonio Valentino at 2026-03-28T16:30:16+00:00
New upstream version 0.6.1
- - - - -


16 changed files:

- .github/workflows/ci-additional.yml
- .github/workflows/ci.yml
- .github/workflows/nightly.yml
- − .github/workflows/parse_logs.py
- .github/workflows/pypi.yaml
- .pre-commit-config.yaml
- docs/whats-new.rst
- pint_xarray/_expects.py
- pint_xarray/conversion.py
- pint_xarray/index.py
- pint_xarray/tests/test_accessors.py
- pint_xarray/tests/test_conversion.py
- pint_xarray/tests/test_index.py
- − pixi.lock
- + pixi.toml
- pyproject.toml


Changes:

=====================================
.github/workflows/ci-additional.yml
=====================================
@@ -23,20 +23,20 @@ jobs:
 
     steps:
       - name: checkout the repository
-        uses: actions/checkout at v5
+        uses: actions/checkout at v6
         with:
           # need to fetch all tags to get a correct version
           fetch-depth: 0 # fetch all branches and tags
 
       - name: setup environment
-        uses: prefix-dev/setup-pixi at fef5c9568ca6c4ff7707bf840ab0692ba3f08293 # 0.9.0
+        uses: prefix-dev/setup-pixi at a0af7a228712d6121d37aba47adf55c1332c9c2e # 0.9.4
         with:
-          environments: "doctests"
+          environments: "ci-py313"
 
       - name: import pint-xarray
         run: |
-          pixi run -e doctests python -c 'import pint_xarray'
+          pixi run -e ci-py313 python -c 'import pint_xarray'
 
       - name: run doctests
         run: |
-          pixi run -e doctests doctests
+          pixi run -e ci-py313 doctests


=====================================
.github/workflows/ci.yml
=====================================
@@ -16,11 +16,11 @@ jobs:
   detect-skip-ci-trigger:
     name: "Detect CI Trigger: [skip-ci]"
     if: github.event_name == 'push' || github.event_name == 'pull_request'
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-slim
     outputs:
       triggered: ${{ steps.detect-trigger.outputs.trigger-found }}
     steps:
-      - uses: actions/checkout at v5
+      - uses: actions/checkout at v6
         with:
           fetch-depth: 2
       - uses: xarray-contrib/ci-trigger at v1
@@ -28,10 +28,39 @@ jobs:
         with:
           keyword: "[skip-ci]"
 
+  cache-pixi-lock:
+    name: "Cache pixi lock"
+    needs: detect-skip-ci-trigger
+    runs-on: ubuntu-slim
+    if: |
+      always()
+      && github.repository == 'xarray-contrib/pint-xarray'
+      && (
+        github.event_name == 'workflow_dispatch' || github.event_name == 'push'
+        || (
+          github.event_name == 'pull_request'
+          && (
+            needs.detect-skip-ci-trigger.outputs.triggered == 'false'
+            && !contains(github.event.pull_request.labels.*.name, 'skip-ci')
+          )
+        )
+      )
+
+    outputs:
+      cache-key: ${{ steps.pixi-lock.outputs.cache-key }}
+      pixi-version: ${{ steps.pixi-lock.outputs.pixi-version }}
+    steps:
+      - uses: actions/checkout at de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+        with:
+          persist-credentials: false
+
+      - uses: Parcels-code/pixi-lock/create-and-cache at 5cbd7a155fa24aa0711ec0a9aad9ede0819e84a1 # v0.1.0
+        id: pixi-lock
+
   ci:
     name: ${{ matrix.os }} ${{ matrix.env }}
     runs-on: ${{ matrix.os }}
-    needs: detect-skip-ci-trigger
+    needs: cache-pixi-lock
     defaults:
       run:
         shell: bash -l {0}
@@ -39,30 +68,29 @@ jobs:
     env:
       FORCE_COLOR: 3
 
-    if: |
-      always()
-      && github.repository == 'xarray-contrib/pint-xarray'
-      && (
-        github.event_name == 'workflow_dispatch'
-        || needs.detect-skip-ci-trigger.outputs.triggered == 'false'
-      )
-
     strategy:
       fail-fast: false
       matrix:
-        env: ["tests-py311", "tests-py312", "tests-py313"]
+        env: ["ci-py311", "ci-py313", "ci-py314"]
         os: ["ubuntu-latest", "macos-latest", "windows-latest"]
 
     steps:
       - name: checkout the repository
-        uses: actions/checkout at v5
+        uses: actions/checkout at v6
         with:
           # need to fetch all tags to get a correct version
           fetch-depth: 0 # fetch all branches and tags
 
+      - uses: Parcels-code/pixi-lock/restore at 5cbd7a155fa24aa0711ec0a9aad9ede0819e84a1 # v0.1.0
+        with:
+          cache-key: ${{ needs.cache-pixi-lock.outputs.cache-key }}
+
       - name: setup environment
-        uses: prefix-dev/setup-pixi at fef5c9568ca6c4ff7707bf840ab0692ba3f08293 # 0.9.0
+        uses: prefix-dev/setup-pixi at a0af7a228712d6121d37aba47adf55c1332c9c2e # 0.9.4
         with:
+          pixi-version: "${{ needs.cache-pixi-lock.outputs.pixi-version }}"
+          frozen: true
+          cache: true
           environments: "${{ matrix.env }}"
 
       - name: investigate env variables
@@ -81,7 +109,7 @@ jobs:
           pixi run -e ${{ matrix.env }} tests --cov-report=xml
 
       - name: Upload code coverage to Codecov
-        uses: codecov/codecov-action at v5.5.0
+        uses: codecov/codecov-action at v5.5.3
         with:
           token: "${{ secrets.CODECOV_TOKEN }}"
           files: ./coverage.xml


=====================================
.github/workflows/nightly.yml
=====================================
@@ -20,11 +20,11 @@ jobs:
     if: |
       github.repository_owner == 'xarray-contrib'
       && (github.event_name == 'push' || github.event_name == 'pull_request')
-    runs-on: ubuntu-latest
+    runs-on: ubuntu-slim
     outputs:
       triggered: ${{ steps.detect-trigger.outputs.trigger-found }}
     steps:
-      - uses: actions/checkout at v5
+      - uses: actions/checkout at v6
         with:
           fetch-depth: 2
       - uses: xarray-contrib/ci-trigger at v1.2
@@ -57,17 +57,13 @@ jobs:
 
     steps:
       - name: checkout the repository
-        uses: actions/checkout at v5
+        uses: actions/checkout at v6
         with:
           # need to fetch all tags to get a correct version
           fetch-depth: 0 # fetch all branches and tags
 
-      - name: remove lockfile
-        run: |
-          rm pixi.lock
-
       - name: setup environment
-        uses: prefix-dev/setup-pixi at fef5c9568ca6c4ff7707bf840ab0692ba3f08293 # 0.9.0
+        uses: prefix-dev/setup-pixi at a0af7a228712d6121d37aba47adf55c1332c9c2e # 0.9.4
         with:
           environments: "nightly"
           locked: false
@@ -87,8 +83,8 @@ jobs:
       - name: report failures
         if: |
           failure()
-          && steps.tests.outcome == 'failure'
+          && steps.status.outcome == 'failure'
           && github.event_name == 'schedule'
-        uses: xarray-contrib/issue-from-pytest-log at v1
+        uses: scientific-python/issue-from-pytest-log at v1
         with:
           log-path: pytest-log.jsonl


=====================================
.github/workflows/parse_logs.py deleted
=====================================
@@ -1,102 +0,0 @@
-# type: ignore
-import argparse
-import functools
-import json
-import pathlib
-import textwrap
-from dataclasses import dataclass
-
-from pytest import CollectReport, TestReport
-
-
- at dataclass
-class SessionStart:
-    pytest_version: str
-    outcome: str = "status"
-
-    @classmethod
-    def _from_json(cls, json):
-        json_ = json.copy()
-        json_.pop("$report_type")
-        return cls(**json_)
-
-
- at dataclass
-class SessionFinish:
-    exitstatus: str
-    outcome: str = "status"
-
-    @classmethod
-    def _from_json(cls, json):
-        json_ = json.copy()
-        json_.pop("$report_type")
-        return cls(**json_)
-
-
-def parse_record(record):
-    report_types = {
-        "TestReport": TestReport,
-        "CollectReport": CollectReport,
-        "SessionStart": SessionStart,
-        "SessionFinish": SessionFinish,
-    }
-    cls = report_types.get(record["$report_type"])
-    if cls is None:
-        raise ValueError(f"unknown report type: {record['$report_type']}")
-
-    return cls._from_json(record)
-
-
- at functools.singledispatch
-def format_summary(report):
-    return f"{report.nodeid}: {report}"
-
-
- at format_summary.register
-def _(report: TestReport):
-    message = report.longrepr.chain[0][1].message
-    return f"{report.nodeid}: {message}"
-
-
- at format_summary.register
-def _(report: CollectReport):
-    message = report.longrepr.split("\n")[-1].removeprefix("E").lstrip()
-    return f"{report.nodeid}: {message}"
-
-
-def format_report(reports, py_version):
-    newline = "\n"
-    summaries = newline.join(format_summary(r) for r in reports)
-    message = textwrap.dedent(
-        """\
-        <details><summary>Python {py_version} Test Summary</summary>
-
-        ```
-        {summaries}
-        ```
-
-        </details>
-        """
-    ).format(summaries=summaries, py_version=py_version)
-    return message
-
-
-if __name__ == "__main__":
-    parser = argparse.ArgumentParser()
-    parser.add_argument("filepath", type=pathlib.Path)
-    args = parser.parse_args()
-
-    py_version = args.filepath.stem.split("-")[1]
-
-    print("Parsing logs ...")
-
-    lines = args.filepath.read_text().splitlines()
-    reports = [parse_record(json.loads(line)) for line in lines]
-
-    failed = [report for report in reports if report.outcome == "failed"]
-
-    message = format_report(failed, py_version=py_version)
-
-    output_file = pathlib.Path("pytest-logs.txt")
-    print(f"Writing output file to: {output_file.absolute()}")
-    output_file.write_text(message)


=====================================
.github/workflows/pypi.yaml
=====================================
@@ -10,11 +10,11 @@ jobs:
     runs-on: ubuntu-latest
     if: github.repository == 'xarray-contrib/pint-xarray'
     steps:
-      - uses: actions/checkout at v5
+      - uses: actions/checkout at v6
         with:
           fetch-depth: 0
 
-      - uses: actions/setup-python at v5
+      - uses: actions/setup-python at v6
         name: Install Python
         with:
           python-version: "3.x"
@@ -40,7 +40,7 @@ jobs:
           else
             echo "✅ Looks good"
           fi
-      - uses: actions/upload-artifact at v4
+      - uses: actions/upload-artifact at v7
         with:
           name: releases
           path: dist
@@ -57,11 +57,11 @@ jobs:
       id-token: write
 
     steps:
-      - uses: actions/download-artifact at v5
+      - uses: actions/download-artifact at v8
         with:
           name: releases
           path: dist
       - name: Publish package to PyPI
-        uses: pypa/gh-action-pypi-publish at 76f52bc884231f62b9a034ebfe128415bbaabdfc
+        uses: pypa/gh-action-pypi-publish at ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e
         with:
           verbose: true


=====================================
.pre-commit-config.yaml
=====================================
@@ -1,7 +1,6 @@
 ci:
   autoupdate_schedule: weekly
 
-# https://pre-commit.com/
 repos:
   - repo: https://github.com/pre-commit/pre-commit-hooks
     rev: v6.0.0
@@ -10,7 +9,7 @@ repos:
       - id: end-of-file-fixer
       - id: check-docstring-first
   - repo: https://github.com/rbubley/mirrors-prettier
-    rev: v3.6.2
+    rev: v3.8.1
     hooks:
       - id: prettier
         args: ["--cache-location=.prettier_cache/cache"]
@@ -18,31 +17,31 @@ repos:
     rev: v0.9.3
     hooks:
       - id: taplo-format
-        args: [--option, array_auto_collapse=false]
+        args: ["--option", "array_auto_collapse=false"]
       - id: taplo-lint
-        args: [--no-schema]
+        args: ["--no-schema"]
   - repo: https://github.com/abravalheri/validate-pyproject
-    rev: v0.24.1
+    rev: v0.25
     hooks:
       - id: validate-pyproject
-  # https://github.com/python/black#version-control-integration
   - repo: https://github.com/psf/black-pre-commit-mirror
-    rev: 25.1.0
+    rev: 26.3.1
     hooks:
       - id: black-jupyter
   - repo: https://github.com/keewis/blackdoc
-    rev: v0.4.1
+    rev: v0.4.6
     hooks:
       - id: blackdoc
-        additional_dependencies: ["black==25.1.0"]
+        additional_dependencies: ["black==26.3.1"]
       - id: blackdoc-autoupdate-black
   - repo: https://github.com/astral-sh/ruff-pre-commit
-    rev: v0.12.10
+    rev: v0.15.7
     hooks:
       - id: ruff
-        args: [--fix]
+        args: ["--fix"]
   - repo: https://github.com/kynan/nbstripout
-    rev: 0.8.1
+    rev: 0.9.1
     hooks:
       - id: nbstripout
-        args: [--extra-keys=metadata.kernelspec metadata.language_info.version]
+        args:
+          - "--extra-keys='metadata.kernelspec metadata.language_info.version'"


=====================================
docs/whats-new.rst
=====================================
@@ -2,6 +2,15 @@
 
 What's new
 ==========
+0.6.1 (23 Mar 2026)
+-------------------
+- Properly rename the coordinate units (:pull:`351`).
+  By `Justus Magin <https://github.com/keewis>`_.
+- Don't rely on ``xarray``'s default behavior regarding ``attrs`` in :py:func:`pint_xarray.expects` (:issue:`360`, :pull:`367`)
+  By `Justus Magin <https://github.com/keewis>`_.
+- Fix quantifying a existing index and adjust the index conversion tests (:pull:`368`)
+  By `Justus Magin <https://github.com/keewis>`_.
+
 0.6.0 (31 Aug 2025)
 -------------------
 - Bump dependency versions (:pull:`313`):


=====================================
pint_xarray/_expects.py
=====================================
@@ -8,7 +8,7 @@ import pint.testing
 import xarray as xr
 
 from pint_xarray.accessors import get_registry
-from pint_xarray.conversion import extract_units
+from pint_xarray.conversion import extract_units, strip_units
 from pint_xarray.itertools import zip_mappings
 
 variable_parameters = (Parameter.VAR_POSITIONAL, Parameter.VAR_KEYWORD)
@@ -185,7 +185,7 @@ def expects(*args_units, return_value=None, **kwargs_units):
                     if isinstance(value, pint.Quantity):
                         params.arguments[name] = value.m_as(units)
                     elif isinstance(value, (xr.DataArray, xr.Dataset)):
-                        params.arguments[name] = value.pint.to(units).pint.dequantify()
+                        params.arguments[name] = strip_units(value.pint.to(units))
                     else:
                         raise TypeError(
                             f"Attempting to convert non-quantity {value} to {units}."


=====================================
pint_xarray/conversion.py
=====================================
@@ -137,10 +137,13 @@ def attach_units_index(index, index_vars, units):
         # skip non-quantity indexed variables
         return index
 
-    if isinstance(index, PintIndex) and index.units != units:
-        raise ValueError(
-            f"cannot attach units to quantified index: {index.units} != {units}"
-        )
+    if isinstance(index, PintIndex):
+        if index.units != units:
+            raise ValueError(
+                f"cannot attach units to quantified index: {index.units} != {units}"
+            )
+        else:
+            return index
 
     return PintIndex(index=index, units=units)
 
@@ -296,7 +299,7 @@ def convert_units_dataset(obj, units):
         try:
             converted_index = convert_units_index(idx, idx_vars, idx_units)
             indexes.update({k: converted_index for k in idx_vars})
-            index_vars.update(converted_index.create_variables())
+            index_vars.update(converted_index.create_variables(idx_vars))
         except (ValueError, pint.errors.PintTypeError) as e:
             names = tuple(idx_vars)
             failed[names] = e


=====================================
pint_xarray/index.py
=====================================
@@ -112,7 +112,10 @@ class PintIndex(Index):
         return self._replace(self.index.roll(shifts))
 
     def rename(self, name_dict, dims_dict):
-        return self._replace(self.index.rename(name_dict, dims_dict))
+        new_units = {new: self.units[old] for old, new in name_dict.items()}
+        return self.__class__(
+            index=self.index.rename(name_dict, dims_dict), units=new_units
+        )
 
     def __getitem__(self, indexer):
         return self._replace(self.index[indexer])


=====================================
pint_xarray/tests/test_accessors.py
=====================================
@@ -672,6 +672,39 @@ def test_to(obj, units, expected, error):
             KeyError,
             id="Dataset-incompatible units",
         ),
+        pytest.param(
+            xr.Dataset(
+                {
+                    "x": (
+                        "x",
+                        [10, 20, 30],
+                        {"units": unit_registry.Unit("dm"), "long_name": "length"},
+                    ),
+                    "y": (
+                        "y",
+                        [60, 120],
+                        {"units": unit_registry.Unit("s"), "long_name": "time"},
+                    ),
+                }
+            ),
+            {"x": Quantity([10, 30], "dm"), "y": Quantity([60], "s")},
+            xr.Dataset(
+                {
+                    "x": (
+                        "x",
+                        [10, 30],
+                        {"units": unit_registry.Unit("dm"), "long_name": "length"},
+                    ),
+                    "y": (
+                        "y",
+                        [60],
+                        {"units": unit_registry.Unit("s"), "long_name": "time"},
+                    ),
+                }
+            ),
+            None,
+            id="Dataset-coords with attrs",
+        ),
         pytest.param(
             xr.DataArray(
                 [[0, 1], [2, 3], [4, 5]],


=====================================
pint_xarray/tests/test_conversion.py
=====================================
@@ -418,11 +418,12 @@ class TestXarrayFunctions:
         expected_a = convert_quantity(q_a, units.get("a", original_units.get("a")))
         expected_b = convert_quantity(q_b, units.get("b", original_units.get("b")))
         expected_u = convert_quantity(q_u, units.get("u", original_units.get("u")))
-        expected_x = convert_quantity(q_x, units.get("x"))
+        expected_x = convert_quantity(q_x, units.get("x", original_units.get("x")))
         expected_index = PandasIndex(pd.Index(strip_quantity(expected_x)), "x")
-        if units.get("x") is not None:
+        expected_index_units = units.get("x", original_units.get("x"))
+        if expected_index_units is not None:
             expected_index = PintIndex(
-                index=expected_index, units={"x": units.get("x")}
+                index=expected_index, units={"x": expected_index_units}
             )
 
         expected = Dataset(


=====================================
pint_xarray/tests/test_index.py
=====================================
@@ -260,11 +260,12 @@ def test_roll(shifts, expected_index):
 @pytest.mark.parametrize("name_dict", ({"y2": "y3"}, {"y2": "y1"}))
 def test_rename(name_dict, dims_dict):
     wrapped_index = PandasIndex(pd.Index([1, 2], name="y2"), dim="y")
-    index = PintIndex(index=wrapped_index, units={"y": ureg.Unit("m")})
+    index = PintIndex(index=wrapped_index, units={"y2": ureg.Unit("m")})
 
     actual = index.rename(name_dict, dims_dict)
     expected = PintIndex(
-        index=wrapped_index.rename(name_dict, dims_dict), units=index.units
+        index=wrapped_index.rename(name_dict, dims_dict),
+        units={name_dict["y2"]: ureg.Unit("m")},
     )
 
     assert actual.equals(expected)


=====================================
pixi.lock deleted
=====================================
The diff for this file was not included because it is too large.

=====================================
pixi.toml
=====================================
@@ -0,0 +1,90 @@
+[workspace]
+channels = ["conda-forge"]
+platforms = ["linux-64", "osx-arm64", "win-64"]
+
+[dependencies]
+numpy = "*"
+pint = "*"
+xarray = "*"
+
+[pypi-dependencies]
+pint-xarray = { path = ".", editable = true }
+
+[feature.optional-deps.dependencies]
+dask = "*"
+scipy = "*"
+bottleneck = "*"
+
+[feature.tests.dependencies]
+pytest = ">=8"
+pytest-cov = "*"
+pytest-xdist = "*"
+cytoolz = "*"
+
+[feature.tests.tasks]
+tests = { cmd = "pytest -n auto --cov=pint_xarray", cwd = ".", default-environment = "dev" }
+doctests = { cmd = "pytest --doctest-modules pint_xarray --ignore pint_xarray/tests", cwd = ".", default-environment = "dev" }
+
+[feature.py311.dependencies]
+python = "3.11.*"
+
+[feature.py312.dependencies]
+python = "3.12.*"
+
+[feature.py313.dependencies]
+python = "3.13.*"
+
+[feature.py314.dependencies]
+python = "3.14.*"
+
+[feature.nightly.pypi-options]
+extra-index-urls = [
+  "https://pypi.anaconda.org/scientific-python-nightly-wheels/simple",
+]
+
+[feature.nightly.pypi-dependencies]
+pint-xarray = { path = ".", editable = true }
+pint = { git = "git+https://github.com/hgrecco/pint.git" }
+xarray = "*"
+numpy = "*"
+scipy = "*"
+
+[feature.nightly.dependencies]
+pytest-reportlog = ">=0.1.2"
+python = "3.13.*"
+
+[feature.docs.dependencies]
+sphinx = "*"
+sphinx-rtd-theme = ">=1.0"
+sphinx-autosummary-accessors = "*"
+nbsphinx = "*"
+cf-xarray = ">=0.10"
+pooch = "*"
+netcdf4 = "*"
+ipython = "*"
+ipykernel = "*"
+jupyter_client = "*"
+matplotlib-base = "*"
+sphinx-autobuild = "*"
+python = "3.13.*"
+
+[feature.docs.tasks]
+build-docs = { cmd = "rm -rf generated/; python -m sphinx -b html -w warnings.log -W -Tn -j auto . _build", cwd = "docs" }
+autobuild-docs = { cmd = "sphinx-autobuild -b html -w warnings.log -W -Tn -j auto . _build", cwd = "docs" }
+build-docs-rtd = { cmd = "python -m sphinx -b html -W -T -j auto . $READTHEDOCS_OUTPUT/html", cwd = "docs" }
+
+[feature.dev.dependencies]
+ipython = "*"
+ipdb = "*"
+pooch = ">=1.8.2,<2"
+netcdf4 = ">=1.7.2,<2"
+
+[environments]
+tests = ["optional-deps", "tests"]
+nightly = { features = ["tests", "nightly"], no-default-feature = true }
+docs = ["docs"]
+ci-py311 = ["optional-deps", "tests", "py311"]
+ci-py312 = ["optional-deps", "tests", "py312"]
+ci-py313 = ["optional-deps", "tests", "py313"]
+ci-py314 = ["optional-deps", "tests", "py314"]
+dev = ["optional-deps", "tests", "py313", "dev"]


=====================================
pyproject.toml
=====================================
@@ -15,6 +15,7 @@ classifiers = [
   "Programming Language :: Python :: 3.11",
   "Programming Language :: Python :: 3.12",
   "Programming Language :: Python :: 3.13",
+  "Programming Language :: Python :: 3.14",
   "Topic :: Scientific/Engineering",
 ]
 requires-python = ">=3.11"
@@ -98,101 +99,3 @@ omit = ["pint_xarray/tests/*"]
 [tool.coverage.report]
 show_missing = true
 exclude_lines = ["pragma: no cover", "if TYPE_CHECKING"]
-
-[tool.pixi.workspace]
-channels = ["conda-forge"]
-platforms = ["linux-64", "osx-arm64", "win-64"]
-
-[tool.pixi.dependencies]
-numpy = "*"
-pint = "*"
-xarray = "*"
-
-[tool.pixi.pypi-dependencies]
-pint-xarray = { path = ".", editable = true }
-
-[tool.pixi.feature.optional-deps.dependencies]
-dask = "*"
-scipy = "*"
-bottleneck = "*"
-
-[tool.pixi.feature.tests.dependencies]
-pytest = ">=8"
-pytest-cov = "*"
-pytest-xdist = "*"
-cytoolz = "*"
-
-[tool.pixi.feature.tests-py311.dependencies]
-python = "3.11.*"
-
-[tool.pixi.feature.tests-py311.tasks]
-tests = "pytest -n auto --cov=pint_xarray"
-
-[tool.pixi.feature.tests-py312.dependencies]
-python = "3.12.*"
-
-[tool.pixi.feature.tests-py312.tasks]
-tests = "pytest -n auto --cov=pint_xarray"
-
-[tool.pixi.feature.tests-py313.dependencies]
-python = "3.13.*"
-
-[tool.pixi.feature.tests-py313.tasks]
-doctests = "pytest --doctest-modules pint_xarray --ignore pint_xarray/tests"
-tests = "pytest -n auto --cov=pint_xarray"
-
-[tool.pixi.feature.nightly.pypi-options]
-extra-index-urls = [
-  "https://pypi.anaconda.org/scientific-python-nightly-wheels/simple",
-]
-
-[tool.pixi.feature.nightly.pypi-dependencies]
-pint-xarray = { path = ".", editable = true }
-pint = { git = "git+https://github.com/hgrecco/pint.git" }
-xarray = "*"
-numpy = "*"
-scipy = "*"
-
-[tool.pixi.feature.nightly.dependencies]
-pytest-reportlog = ">=0.1.2"
-python = "3.13.*"
-
-[tool.pixi.feature.nightly.tasks]
-tests = "pytest -n auto --cov=pint_xarray --report-log=tests.jsonl"
-
-[tool.pixi.feature.docs.dependencies]
-sphinx = "*"
-sphinx-rtd-theme = ">=1.0"
-sphinx-autosummary-accessors = "*"
-nbsphinx = "*"
-cf-xarray = ">=0.10"
-pooch = "*"
-netcdf4 = "*"
-ipython = "*"
-ipykernel = "*"
-jupyter_client = "*"
-matplotlib-base = "*"
-sphinx-autobuild = "*"
-python = "3.13.*"
-
-[tool.pixi.feature.docs.tasks]
-build-docs = { cmd = "rm -rf generated/; python -m sphinx -b html -w warnings.log -W -Tn -j auto . _build", cwd = "docs" }
-autobuild-docs = { cmd = "sphinx-autobuild -b html -w warnings.log -W -Tn -j auto . _build", cwd = "docs" }
-build-docs-rtd = { cmd = "python -m sphinx -b html -W -T -j auto . $READTHEDOCS_OUTPUT/html", cwd = "docs" }
-
-[tool.pixi.feature.dev.dependencies]
-ipython = "*"
-ipdb = "*"
-python = "3.13.*"
-pooch = ">=1.8.2,<2"
-netcdf4 = ">=1.7.2,<2"
-
-[tool.pixi.environments]
-tests = ["optional-deps", "tests"]
-nightly = { features = ["tests", "nightly"], no-default-feature = true }
-docs = ["docs"]
-tests-py311 = ["optional-deps", "tests", "tests-py311"]
-tests-py312 = ["optional-deps", "tests", "tests-py312"]
-tests-py313 = ["optional-deps", "tests", "tests-py313"]
-doctests = ["optional-deps", "tests", "tests-py313"]
-dev = ["optional-deps", "tests", "dev"]



View it on GitLab: https://salsa.debian.org/debian-gis-team/pint-xarray/-/commit/306bc37201ec369d883706ce0aabf341deade780

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/pint-xarray/-/commit/306bc37201ec369d883706ce0aabf341deade780
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20260328/93f685ad/attachment-0001.htm>


More information about the Pkg-grass-devel mailing list