[Git][debian-gis-team/python-rtree][upstream] New upstream version 1.4.1
Bas Couwenberg (@sebastic)
gitlab at salsa.debian.org
Thu Aug 14 04:37:30 BST 2025
Bas Couwenberg pushed to branch upstream at Debian GIS Project / python-rtree
Commits:
3bb36c49 by Bas Couwenberg at 2025-08-14T05:26:24+02:00
New upstream version 1.4.1
- - - - -
16 changed files:
- − .github/workflows/deploy.yml
- .github/workflows/test.yml
- .pre-commit-config.yaml
- CHANGES.rst
- README.md
- docs/source/install.rst
- docs/source/performance.rst
- docs/source/tutorial.rst
- pyproject.toml
- rtree/__init__.py
- rtree/finder.py
- rtree/index.py
- + tests/common.py
- tests/conftest.py
- tests/test_index.py
- tox.ini
Changes:
=====================================
.github/workflows/deploy.yml deleted
=====================================
@@ -1,74 +0,0 @@
-name: Build and upload to PyPI
-
-on:
- workflow_dispatch:
- pull_request:
- push:
- branches:
- - master
- paths:
- - '.github/workflows/deploy.yml'
- release:
- types:
- - published
-
-jobs:
- build_wheels:
- name: Build wheel on ${{ matrix.os }}
- runs-on: ${{ matrix.os }}
- strategy:
- matrix:
- os:
- - windows-latest
- - ubuntu-latest
- - ubuntu-24.04-arm
- - macos-latest
-
- steps:
- - uses: actions/checkout at v4
-
- - uses: actions/setup-python at v5
- name: Install Python
- with:
- python-version: '3.11'
-
- - uses: ilammy/msvc-dev-cmd at v1
- if: startsWith(matrix.os, 'windows')
-
- - name: Build wheels
- uses: pypa/cibuildwheel at v2.23.0
-
- - uses: actions/upload-artifact at v4
- with:
- name: cibw-wheels-${{ matrix.os }}
- path: ./wheelhouse/*.whl
-
- build_sdist:
- name: Build source distribution
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout at v4
-
- - name: Build sdist
- run: pipx run build --sdist
-
- - uses: actions/upload-artifact at v4
- with:
- name: cibw-sdist
- path: dist/*.tar.gz
-
- upload_pypi:
- needs: [build_wheels, build_sdist]
- runs-on: ubuntu-latest
- environment: pypi
- permissions:
- id-token: write
- if: github.event_name == 'release' && github.event.action == 'published'
- steps:
- - uses: actions/download-artifact at v4
- with:
- pattern: cibw-*
- path: dist
- merge-multiple: true
-
- - uses: pypa/gh-action-pypi-publish at release/v1
=====================================
.github/workflows/test.yml
=====================================
@@ -2,16 +2,20 @@ name: Test
on:
push:
- branches:
- - master
pull_request:
workflow_dispatch:
schedule:
- cron: '0 6 * * 1'
+ release:
+ types: [published, prereleased, released]
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
+ cancel-in-progress: true
jobs:
conda:
- name: Conda ${{ matrix.python-version }} - ${{ matrix.os }}
+ name: Conda Python ${{ matrix.python-version }}, SIDX-${{ matrix.sidx-version }}, ${{ matrix.os }}
defaults:
run:
shell: bash -l {0}
@@ -25,7 +29,7 @@ jobs:
sidx-version: ['1.8.5', '2.1.0']
exclude:
- os: 'macos-latest'
- - sidx-version: '1.8.5'
+ sidx-version: '1.8.5'
steps:
- uses: actions/checkout at v4
@@ -33,16 +37,17 @@ jobs:
with:
channels: conda-forge
auto-update-conda: true
+ conda-remove-defaults: true
python-version: ${{ matrix.python-version }}
- name: Setup
- run: conda install -c conda-forge numpy pytest libspatialindex=${{ matrix.sidx-version }} -y
+ run: conda install -c conda-forge pip numpy pytest libspatialindex=${{ matrix.sidx-version }} -y
- name: Install
run: pip install -e .
- name: Test with pytest
- run: pytest --import-mode=importlib -Werror -v --doctest-modules rtree tests
+ run: pytest -Werror -v --doctest-modules rtree tests
ubuntu:
name: Ubuntu Python ${{ matrix.python-version }}
@@ -73,4 +78,109 @@ jobs:
run: pip install --user .
- name: Test with pytest
- run: pytest --import-mode=importlib -Werror -v --doctest-modules rtree tests
+ run: pytest -Werror -v --doctest-modules rtree tests
+
+ - name: Run doctests
+ run: pytest -Werror -v --doctest-modules docs/source/*.rst
+
+ build_wheels:
+ name: Wheel ${{ matrix.os }}
+ runs-on: ${{ matrix.os }}
+ needs: [ build_sdist]
+ strategy:
+ matrix:
+ include:
+ - os: "windows-latest"
+ msvc_arch: "x64"
+ - os: "windows-11-arm"
+ msvc_arch: "ARM64"
+ - os: "ubuntu-latest"
+ - os: "ubuntu-24.04-arm"
+ - os: "macos-latest"
+
+ steps:
+ - uses: actions/checkout at v4
+
+ - uses: actions/setup-python at v5
+ name: Install Python
+ with:
+ python-version: '3.13'
+
+ - uses: ilammy/msvc-dev-cmd at v1.13.0
+ with:
+ arch: ${{ matrix.msvc_arch }}
+ if: startsWith(matrix.os, 'windows')
+
+ - name: Build wheels
+ uses: pypa/cibuildwheel at v3.1.3
+ env:
+ CIBW_BUILD: ${{ runner.os == 'Windows' && runner.arch == 'ARM64' && 'cp311-*' || 'cp39-*' }}
+
+ - uses: actions/upload-artifact at v4
+ with:
+ name: cibw-wheels-${{ matrix.os }}
+ path: ./wheelhouse/*.whl
+
+ build_sdist:
+ name: Source Distribution
+ runs-on: ubuntu-latest
+ needs: [ conda, ubuntu ]
+ steps:
+ - uses: actions/checkout at v4
+
+ - name: Build sdist
+ run: pipx run build --sdist
+
+ - uses: actions/upload-artifact at v4
+ with:
+ name: cibw-sdist
+ path: dist/*.tar.gz
+
+ attest:
+ name: "Attest Artifacts"
+ runs-on: ubuntu-latest
+ needs: [build_wheels]
+ permissions:
+ id-token: write
+ attestations: write
+ steps:
+ - uses: actions/download-artifact at v5
+ with:
+ pattern: cibw-*
+ path: dist
+ merge-multiple: true
+ - uses: actions/attest-build-provenance at v2
+ with:
+ subject-path: 'dist/**/*'
+
+ publish:
+ name: Gather ${{ github.ref_name }} release
+ runs-on: ubuntu-latest
+ needs: [attest]
+ environment: pypi
+ permissions:
+ contents: write
+ id-token: write
+ steps:
+ - uses: actions/download-artifact at v5
+ name: Download release artifacts
+ with:
+ pattern: cibw-*
+ path: dist
+ merge-multiple: true
+
+ - uses: softprops/action-gh-release at v2
+ if: startsWith(github.ref, 'refs/tags/')
+ name: Publish release as draft
+ with:
+ make_latest: false
+ fail_on_unmatched_files: true
+ prerelease: true
+ generate_release_notes: true
+ draft: true
+ files: |
+ dist/*
+
+ - uses: pypa/gh-action-pypi-publish at release/v1
+ if: github.event_name == 'release' && github.event.action == 'released'
+ name: "Publish to PyPI"
=====================================
.pre-commit-config.yaml
=====================================
@@ -8,12 +8,12 @@ repos:
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/python-jsonschema/check-jsonschema
- rev: 0.31.2
+ rev: 0.33.2
hooks:
- id: check-github-workflows
args: ["--verbose"]
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.9.9
+ rev: v0.12.2
hooks:
# Run the linter
- id: ruff
@@ -21,7 +21,7 @@ repos:
# Run the formatter
- id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: v1.15.0
+ rev: v1.16.1
hooks:
- id: mypy
exclude: 'docs/.'
=====================================
CHANGES.rst
=====================================
@@ -1,3 +1,17 @@
+1.4.1: 2025-08-13
+=================
+
+- Rename main branch references by :user:`mwtoews` in :PR:`356`
+- Fixing an incorrect reassignment in nearest_v and intersection_v by :user:`Atilleusz` in :PR:`358`
+- Add spatialindex version to tests, add common pytest configuration by :user:`mwtoews` in :PR:`360`
+- Refactor array-loading methods, add tests by :user:`mwtoews` in :PR:`361`
+- Minor refactor of code blocks in docs by :user:`mwtoews` in :PR:`362`
+- Resolve some issues in the batch API by :user:`FreddieWitherden` in :PR:`367`
+- fix #369 (load libspatialindex without changing cwd) by :user:`remicres` in :PR:`370`
+- arm64 wheels on windows by @w8sl in :PR:`378` and :PR:`371`
+
+`Full Changelog <https://github.com/Toblerity/rtree/compare/1.4.0...1.4.1>`__
+
1.4.0: 2025-03-06
=================
=====================================
README.md
=====================================
@@ -1,6 +1,6 @@
# Rtree: Spatial indexing for Python
-
+[](https://github.com/Toblerity/rtree/actions/workflows/test.yml)
[](https://badge.fury.io/py/rtree)
=====================================
docs/source/install.rst
=====================================
@@ -10,7 +10,9 @@ First, download and install version 1.8.5+ of the `libspatialindex`_ library fro
https://libspatialindex.org
-The library supports CMake builds, so it is a matter of::
+The library supports CMake builds, so it is a matter of:
+
+.. code-block:: console
$ mkdir build && cd build
$ cmake ..
@@ -20,15 +22,21 @@ The library supports CMake builds, so it is a matter of::
You may need to run the ``ldconfig`` command after installing the library to
ensure that applications can find it at startup time.
-Rtree can be easily installed via pip::
+Rtree can be easily installed via pip:
+
+.. code-block:: console
$ pip install rtree
-or by running in a local source directory::
+or by running in a local source directory:
+
+.. code-block:: console
$ pip install -e .
-You can build and test in place like::
+You can build and test in place like:
+
+.. code-block:: console
$ pytest
@@ -37,9 +45,11 @@ Windows
The Windows DLLs of `libspatialindex`_ are pre-compiled in
windows installers that are available from `PyPI`_. Installation on Windows
-is as easy as::
+is as easy as:
+
+.. code-block:: console
- pip install rtree
+ $ pip install rtree
.. _`PyPI`: https://pypi.org/project/rtree/
=====================================
docs/source/performance.rst
=====================================
@@ -6,7 +6,7 @@ Performance
See the `benchmarks.py`_ file for a comparison of various query methods
and how much acceleration can be obtained from using Rtree.
-.. _benchmarks.py: https://github.com/Toblerity/rtree/blob/master/benchmarks/benchmarks.py
+.. _benchmarks.py: https://github.com/Toblerity/rtree/blob/main/benchmarks/benchmarks.py
There are a few simple things that will improve performance.
@@ -17,12 +17,14 @@ This will substantially (orders of magnitude in many cases) improve
performance over :py:meth:`~rtree.index.Index.insert` by allowing the data to
be pre-sorted
-::
+.. code-block:: pycon
+ >>> from rtree import index
>>> def generator_function(somedata):
- ... for i, obj in enumerate(somedata):
- ... yield (i, (obj.xmin, obj.ymin, obj.xmax, obj.ymax), obj)
- >>> r = index.Index(generator_function(somedata))
+ ... for i, obj in enumerate(somedata):
+ ... yield (i, (obj.xmin, obj.ymin, obj.xmax, obj.ymax), obj)
+ ...
+ >>> r = index.Index(generator_function(somedata)) # doctest: +SKIP
After bulk loading the index, you can then insert additional records into
the index using :py:meth:`~rtree.index.Index.insert`
@@ -30,12 +32,14 @@ the index using :py:meth:`~rtree.index.Index.insert`
Override :py:data:`~rtree.index.Index.dumps` to use the highest pickle protocol
...............................................................................
-::
+.. code-block:: pycon
- >>> import cPickle, rtree
+ >>> import pickle
+ >>> import rtree
>>> class FastRtree(rtree.Rtree):
... def dumps(self, obj):
- ... return cPickle.dumps(obj, -1)
+ ... return pickle.dumps(obj, -1)
+ ...
>>> r = FastRtree()
.. topic:: Update from January 2024
@@ -45,13 +49,16 @@ Override :py:data:`~rtree.index.Index.dumps` to use the highest pickle protocol
.. _pull request on GitHub: https://github.com/Toblerity/rtree/pull/197
-Use objects='raw'
+Use objects="raw"
...............................................................................
In any :py:meth:`~rtree.index.Index.intersection` or
-:py:meth:`~rtree.index.Index.nearest` or query, use objects='raw' keyword
-argument ::
+:py:meth:`~rtree.index.Index.nearest` or query, use ``objects="raw"`` keyword
+argument:
+.. code-block:: pycon
+
+ >>> xmin, ymin, xmax, ymax = 0.0, 0.0, 1.0, 1.0
>>> objs = r.intersection((xmin, ymin, xmax, ymax), objects="raw")
=====================================
docs/source/tutorial.rst
=====================================
@@ -18,7 +18,9 @@ Import
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
After :ref:`installing <installation>` :ref:`Rtree <home>`, you should be able to
-open up a Python prompt and issue the following::
+open up a Python prompt and issue the following:
+
+.. code-block:: pycon
>>> from rtree import index
@@ -31,7 +33,9 @@ Construct an instance
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
After importing the index module, construct an index with the default
-construction::
+construction:
+
+.. code-block:: pycon
>>> idx = index.Index()
@@ -45,7 +49,9 @@ Create a bounding box
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
After instantiating the index, create a bounding box that we can
-insert into the index::
+insert into the index:
+
+.. code-block:: pycon
>>> left, bottom, right, top = (0.0, 0.0, 1.0, 1.0)
@@ -61,7 +67,9 @@ insert into the index::
Insert records into the index
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Insert an entry into the index::
+Insert an entry into the index:
+
+.. code-block:: pycon
>>> idx.insert(0, (left, bottom, right, top))
@@ -90,13 +98,17 @@ There are three primary methods for querying the index.
Intersection
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
-Given a query window, return ids that are contained within the window::
+Given a query window, return ids that are contained within the window:
+
+.. code-block:: pycon
>>> list(idx.intersection((1.0, 1.0, 2.0, 2.0)))
[0]
Given a query window that is beyond the bounds of data we have in the
-index::
+index:
+
+.. code-block:: pycon
>>> list(idx.intersection((1.0000001, 1.0000001, 2.0, 2.0)))
[]
@@ -105,7 +117,9 @@ Nearest Neighbors
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
The following finds the 1 nearest item to the given bounds. If multiple items
-are of equal distance to the bounds, both are returned::
+are of equal distance to the bounds, both are returned:
+
+.. code-block:: pycon
>>> idx.insert(1, (left, bottom, right, top))
>>> list(idx.nearest((1.0000001, 1.0000001, 2.0, 2.0), 1))
@@ -119,12 +133,16 @@ Using Rtree as a cheapo spatial database
Rtree also supports inserting any object you can pickle into the index (called
a clustered index in `libspatialindex`_ parlance). The following inserts the
-picklable object ``42`` into the index with the given id::
+picklable object ``42`` into the index with the given id ``2``:
+
+.. code-block:: pycon
- >>> idx.insert(id=id, coordinates=(left, bottom, right, top), obj=42)
+ >>> idx.insert(id=2, coordinates=(left, bottom, right, top), obj=42)
You can then return a list of objects by giving the ``objects=True`` flag
-to intersection::
+to intersection:
+
+.. code-block:: pycon
>>> [n.object for n in idx.intersection((left, bottom, right, top), objects=True)]
[None, None, 42]
@@ -140,17 +158,28 @@ Serializing your index to a file
One of :ref:`Rtree <home>`'s most useful properties is the ability to
serialize Rtree indexes to disk. These include the clustered indexes
-described :ref:`here <clustered>`::
+described :ref:`here <clustered>`:
- >>> file_idx = index.Rtree('rtree')
+.. code-block:: pycon
+
+ >>> import os
+ >>> from tempfile import TemporaryDirectory
+ >>> prev_dir = os.getcwd()
+ >>> temp_dir = TemporaryDirectory()
+ >>> os.chdir(temp_dir.name)
+ >>> file_idx = index.Rtree("myidx")
>>> file_idx.insert(1, (left, bottom, right, top))
>>> file_idx.insert(2, (left - 1.0, bottom - 1.0, right + 1.0, top + 1.0))
>>> [n for n in file_idx.intersection((left, bottom, right, top))]
[1, 2]
+ >>> sorted(os.listdir())
+ ['myidx.dat', 'myidx.idx']
+ >>> os.chdir(prev_dir)
+ >>> temp_dir.cleanup()
.. note::
- By default, if an index file with the given name `rtree` in the example
+ By default, if an index file with the given name ``myidx`` in the example
above already exists on the file system, it will be opened in append mode
and not be re-created. You can control this behavior with the
:py:attr:`rtree.index.Property.overwrite` property of the index property
@@ -170,12 +199,12 @@ that are created when serializing index data to disk. These file extensions
are controllable using the :py:attr:`rtree.index.Property.dat_extension` and
:py:attr:`rtree.index.Property.idx_extension` index properties.
-::
+.. code-block:: pycon
- >>> p = rtree.index.Property()
- >>> p.dat_extension = 'data'
- >>> p.idx_extension = 'index'
- >>> file_idx = index.Index('rtree', properties = p)
+ >>> p = index.Property()
+ >>> p.dat_extension = "data"
+ >>> p.idx_extension = "index"
+ >>> file_idx = index.Index("rtree", properties=p) # doctest: +SKIP
3D indexes
..............................................................................
@@ -185,17 +214,23 @@ following is a 3D index that is to be stored on disk. Persisted indexes are
stored on disk using two files -- an index file (.idx) and a data (.dat) file.
You can modify the extensions these files use by altering the properties of
the index at instantiation time. The following creates a 3D index that is
-stored on disk as the files ``3d_index.data`` and ``3d_index.index``::
+stored on disk as the files ``3d_index.data`` and ``3d_index.index``:
+
+.. code-block:: pycon
>>> from rtree import index
+ >>> temp_dir = TemporaryDirectory()
+ >>> os.chdir(temp_dir.name)
>>> p = index.Property()
>>> p.dimension = 3
- >>> p.dat_extension = 'data'
- >>> p.idx_extension = 'index'
- >>> idx3d = index.Index('3d_index',properties=p)
+ >>> p.dat_extension = "data"
+ >>> p.idx_extension = "index"
+ >>> idx3d = index.Index("3d_index", properties=p)
>>> idx3d.insert(1, (0, 60, 23.0, 0, 60, 42.0))
- >>> idx3d.intersection( (-1, 62, 22, -1, 62, 43))
- [1L]
+ >>> list(idx3d.intersection((-1, 60, 22, 1, 62, 43)))
+ [1]
+ >>> os.chdir(prev_dir)
+ >>> temp_dir.cleanup()
ZODB and Custom Storages
..............................................................................
=====================================
pyproject.toml
=====================================
@@ -15,12 +15,11 @@ description = "R-Tree spatial index for Python GIS"
readme = "README.md"
requires-python = ">=3.9"
keywords = ["gis", "spatial", "index", "r-tree"]
-license = {text = "MIT"}
+license = "MIT"
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
- "License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
@@ -49,7 +48,6 @@ version = {attr = "rtree.__version__"}
rtree = ["py.typed"]
[tool.cibuildwheel]
-build = "cp39-*"
build-verbosity = 3
before-all = "pip install wheel"
repair-wheel-command = "python scripts/repair_wheel.py -w {dest_dir} {wheel}"
@@ -82,7 +80,7 @@ before-build = [
]
[tool.cibuildwheel.windows]
-archs = ["AMD64"]
+archs = ["auto64"]
before-build = [
"call {project}\\scripts\\install_libspatialindex.bat",
]
@@ -95,6 +93,11 @@ exclude_lines = [
"@overload",
]
+[tool.pytest.ini_options]
+minversion = "6.0"
+addopts = "--import-mode=importlib"
+testpaths = ["tests"]
+
[tool.ruff.lint]
select = [
"E", "W", # pycodestyle
=====================================
rtree/__init__.py
=====================================
@@ -7,6 +7,6 @@ hyperrectangular intersection queries.
from __future__ import annotations
-__version__ = "1.4.0"
+__version__ = "1.4.1"
from .index import Index, Rtree # noqa
=====================================
rtree/finder.py
=====================================
@@ -86,8 +86,6 @@ def load() -> ctypes.CDLL:
except importlib.metadata.PackageNotFoundError:
pass
- # get the starting working directory
- cwd = os.getcwd()
for cand in _candidates:
if cand.is_dir():
# if our candidate is a directory use best guess
@@ -104,9 +102,9 @@ def load() -> ctypes.CDLL:
continue
try:
- # move to the location we're checking
- os.chdir(path)
# try loading the target file candidate
+ # These should be fully specified paths to
+ # files
rt = ctypes.cdll.LoadLibrary(str(target))
if rt is not None:
return rt
@@ -115,8 +113,6 @@ def load() -> ctypes.CDLL:
f"rtree.finder ({target}) unexpected error: {err!s}",
file=sys.stderr,
)
- finally:
- os.chdir(cwd)
try:
# try loading library using LD path search
=====================================
rtree/index.py
=====================================
@@ -1061,12 +1061,7 @@ class Index:
"""
import numpy as np
- assert mins.shape == maxs.shape
- assert mins.strides == maxs.strides
-
- # Cast
- mins = mins.astype(np.float64)
- maxs = maxs.astype(np.float64)
+ mins, maxs = self._prepare_v_arrays(mins, maxs)
# Extract counts
n, d = mins.shape
@@ -1085,7 +1080,7 @@ class Index:
self.handle,
n - offn,
d,
- len(ids),
+ len(ids) - offi,
d_i_stri,
d_j_stri,
mins[offn:].ctypes.data,
@@ -1103,12 +1098,13 @@ class Index:
offi += counts[offn : offn + nr.value].sum()
offn += nr.value
- ids = ids.resize(2 * len(ids), refcheck=False)
+ ids.resize(2 * len(ids) + counts[offn], refcheck=False)
def nearest_v(
self,
mins,
maxs,
+ *,
num_results=1,
max_dists=None,
strict=False,
@@ -1144,12 +1140,7 @@ class Index:
"""
import numpy as np
- assert mins.shape == maxs.shape
- assert mins.strides == maxs.strides
-
- # Cast
- mins = mins.astype(np.float64)
- maxs = maxs.astype(np.float64)
+ mins, maxs = self._prepare_v_arrays(mins, maxs)
# Extract counts
n, d = mins.shape
@@ -1164,9 +1155,11 @@ class Index:
offn, offi = 0, 0
if max_dists is not None:
- assert len(max_dists) == n
-
- dists = max_dists.astype(np.float64).copy()
+ dists = np.ascontiguousarray(np.atleast_1d(max_dists), dtype=np.float64)
+ if dists.ndim != 1:
+ raise ValueError("max_dists must have 1 dimension")
+ if len(dists) != n:
+ raise ValueError(f"max_dists must have length {n}")
elif return_max_dists:
dists = np.zeros(n)
else:
@@ -1178,7 +1171,7 @@ class Index:
num_results if not strict else -num_results,
n - offn,
d,
- len(ids),
+ len(ids) - offi,
d_i_stri,
d_j_stri,
mins[offn:].ctypes.data,
@@ -1189,7 +1182,7 @@ class Index:
ctypes.byref(nr),
)
- # If we got the expected nuber of results then return
+ # If we got the expected number of results then return
if nr.value == n - offn:
if return_max_dists:
return ids[: counts.sum()], counts, dists
@@ -1200,7 +1193,34 @@ class Index:
offi += counts[offn : offn + nr.value].sum()
offn += nr.value
- ids = ids.resize(2 * len(ids), refcheck=False)
+ ids.resize(2 * len(ids) + counts[offn], refcheck=False)
+
+ def _prepare_v_arrays(self, mins, maxs):
+ import numpy as np
+
+ # Ensure inputs are 2D float64 arrays
+ if mins is maxs:
+ mins = maxs = np.atleast_2d(mins).astype(np.float64)
+ else:
+ mins = np.atleast_2d(mins).astype(np.float64)
+ maxs = np.atleast_2d(maxs).astype(np.float64)
+
+ if mins.ndim != 2 or maxs.ndim != 2:
+ raise ValueError("mins/maxs must have 2 dimensions: (n, d)")
+ if mins.shape != maxs.shape:
+ raise ValueError("mins and maxs shapes not equal")
+ if mins.strides != maxs.strides:
+ raise ValueError("mins and maxs strides not equal")
+
+ # Handle invalid strides
+ if any(s % mins.itemsize for s in mins.strides):
+ if mins is maxs:
+ mins = maxs = mins.copy()
+ else:
+ mins = mins.copy()
+ maxs = maxs.copy()
+
+ return mins, maxs
def _nearestTP(self, coordinates, velocities, times, num_results=1, objects=False):
p_mins, p_maxs = self.get_coordinate_pointers(coordinates)
@@ -1427,22 +1447,26 @@ class Index:
return IndexStreamHandle(self.properties.handle, stream)
def _create_idx_from_array(self, ibuf, minbuf, maxbuf):
- assert len(ibuf) == len(minbuf)
- assert len(ibuf) == len(maxbuf)
- assert minbuf.strides == maxbuf.strides
+ import numpy as np
+
+ # Prepare the arrays
+ ibuf = ibuf.astype(np.int64)
+ minbuf, maxbuf = self._prepare_v_arrays(minbuf, maxbuf)
+
+ if len(ibuf) != len(minbuf):
+ raise ValueError("index and point counts different")
- # Cast
- ibuf = ibuf.astype(int)
- minbuf = minbuf.astype(float)
- maxbuf = maxbuf.astype(float)
+ # Handle misaligned data
+ if ibuf.strides[0] % ibuf.itemsize:
+ ibuf = ibuf.copy()
# Extract counts
n, d = minbuf.shape
# Compute strides
- i_stri = ibuf.strides[0] // 8
- d_i_stri = minbuf.strides[0] // 8
- d_j_stri = minbuf.strides[1] // 8
+ i_stri = ibuf.strides[0] // ibuf.itemsize
+ d_i_stri = minbuf.strides[0] // minbuf.itemsize
+ d_j_stri = minbuf.strides[1] // minbuf.itemsize
return IndexArrayHandle(
self.properties.handle,
=====================================
tests/common.py
=====================================
@@ -0,0 +1,10 @@
+"""Common test functions."""
+
+import pytest
+
+from rtree.core import rt
+
+sidx_version_string = rt.SIDX_Version().decode()
+sidx_version = tuple(map(int, sidx_version_string.split(".", maxsplit=3)[:3]))
+
+skip_sidx_lt_210 = pytest.mark.skipif(sidx_version < (2, 1, 0), reason="SIDX < 2.1.0")
=====================================
tests/conftest.py
=====================================
@@ -8,7 +8,7 @@ import numpy
import py
import pytest
-import rtree
+from .common import sidx_version_string
data_files = ["boxes_15x15.data"]
@@ -25,7 +25,7 @@ def temporary_working_directory(tmpdir: py.path.local) -> Iterator[None]:
def pytest_report_header(config):
"""Header for pytest."""
vers = [
- f"SIDX version: {rtree.core.rt.SIDX_Version().decode()}",
+ f"SIDX version: {sidx_version_string}",
f"NumPy version: {numpy.__version__}",
]
return "\n".join(vers)
=====================================
tests/test_index.py
=====================================
@@ -14,6 +14,8 @@ import rtree
from rtree import core, index
from rtree.exceptions import RTreeError
+from .common import skip_sidx_lt_210
+
class IndexTestCase(unittest.TestCase):
def setUp(self) -> None:
@@ -268,6 +270,26 @@ class IndexIntersection(IndexTestCase):
self.assertEqual([1, 1], list(idx.intersection((0, 0, 5, 5))))
+ @skip_sidx_lt_210
+ def test_intersection_v(self) -> None:
+ mins = np.array([[0, 1]] * 2).T
+ maxs = np.array([[60, 50]] * 2).T
+ ret = self.idx.intersection_v(mins, maxs)
+ assert type(ret) is tuple
+ ids, counts = ret
+ assert ids.dtype == np.int64
+ ids0 = [0, 4, 16, 27, 35, 40, 47, 50, 76, 80]
+ ids1 = [0, 16, 27, 35, 47, 76]
+ assert ids.tolist() == ids0 + ids1
+ assert counts.dtype == np.uint64
+ assert counts.tolist() == [len(ids0), len(ids1)]
+
+ # errors
+ with pytest.raises(ValueError, match="must have 2 dimensions"):
+ self.idx.intersection_v(np.ones((2, 3, 4)), 4)
+ with pytest.raises(ValueError, match="shapes not equal"):
+ self.idx.intersection_v([0], [10, 12])
+
class TestIndexIntersectionUnion:
@pytest.fixture(scope="class")
@@ -314,6 +336,17 @@ class TestIndexIntersectionUnion:
else:
assert False
+ @skip_sidx_lt_210
+ def test_intersection_v_interleaved(
+ self, index_a_interleaved: index.Index, index_b_interleaved: index.Index
+ ) -> None:
+ index_c_interleaved = index_a_interleaved & index_b_interleaved
+ mins = index_c_interleaved.bounds[0:2]
+ maxs = index_c_interleaved.bounds[2:4]
+ idxs, counts = index_c_interleaved.intersection_v(mins, maxs)
+ assert idxs.tolist() == [0, 1]
+ assert counts.tolist() == [2]
+
def test_intersection_uninterleaved(
self, index_a_uninterleaved: index.Index, index_b_uninterleaved: index.Index
) -> None:
@@ -330,6 +363,17 @@ class TestIndexIntersectionUnion:
else:
assert False
+ @skip_sidx_lt_210
+ def test_intersection_v_uninterleaved(
+ self, index_a_uninterleaved: index.Index, index_b_uninterleaved: index.Index
+ ) -> None:
+ index_c_uninterleaved = index_a_uninterleaved & index_b_uninterleaved
+ mins = index_c_uninterleaved.bounds[0::2]
+ maxs = index_c_uninterleaved.bounds[1::2]
+ idxs, counts = index_c_uninterleaved.intersection_v(mins, maxs)
+ assert idxs.tolist() == [0, 1]
+ assert counts.tolist() == [2]
+
def test_intersection_mismatch(
self, index_a_interleaved: index.Index, index_b_uninterleaved: index.Index
) -> None:
@@ -617,6 +661,46 @@ class IndexNearest(IndexTestCase):
hits = sorted(idx.nearest((13, 0, 20, 2), 3))
self.assertEqual(hits, [3, 4, 5])
+ @skip_sidx_lt_210
+ def test_nearest_v_basic(self) -> None:
+ mins = np.array([[0, 5]] * 2).T
+ maxs = np.array([[10, 15]] * 2).T
+ ret = self.idx.nearest_v(mins, maxs, num_results=3)
+ assert type(ret) is tuple
+ ids, counts = ret
+ assert ids.dtype == np.int64
+ ids0 = [76, 48, 19]
+ ids1 = [76, 47, 48]
+ assert ids.tolist() == ids0 + ids1
+ assert counts.dtype == np.uint64
+ assert counts.tolist() == [3, 3]
+
+ ret = self.idx.nearest_v(mins, maxs, num_results=3, return_max_dists=True)
+ assert type(ret) is tuple
+ ids, counts, max_dists = ret
+ assert ids.tolist() == ids0 + ids1
+ assert counts.tolist() == [3, 3]
+ assert max_dists.dtype == np.float64
+ np.testing.assert_allclose(max_dists, [7.54938045, 11.05686397])
+
+ ret = self.idx.nearest_v(
+ mins, maxs, num_results=3, max_dists=[10, 10], return_max_dists=True
+ )
+ ids, counts, max_dists = ret
+ assert ids.tolist() == ids0 + ids1[:2]
+ assert counts.tolist() == [3, 2]
+ np.testing.assert_allclose(max_dists, [7.54938045, 3.92672575])
+
+ # errors
+ with pytest.raises(ValueError, match="must have 2 dimensions"):
+ self.idx.nearest_v(np.ones((2, 3, 4)), 4)
+ with pytest.raises(ValueError, match="shapes not equal"):
+ self.idx.nearest_v([0], [10, 12])
+ with pytest.raises(ValueError, match="max_dists must have 1 dimension"):
+ self.idx.nearest_v(maxs, mins, max_dists=[[10]])
+ with pytest.raises(ValueError, match="max_dists must have length 2"):
+ self.idx.nearest_v(maxs, mins, max_dists=[10])
+
def test_nearest_equidistant(self) -> None:
"""Test that if records are equidistant, both are returned."""
point = (0, 0)
@@ -677,25 +761,47 @@ class IndexDelete(IndexTestCase):
self.assertEqual(hits, [])
-class IndexMoreDimensions(IndexTestCase):
- def test_3d(self) -> None:
- """Test we make and query a 3D index"""
+class Index3d(IndexTestCase):
+ """Test we make and query a 3D index"""
+
+ def setUp(self) -> None:
p = index.Property()
p.dimension = 3
- idx = index.Index(properties=p, interleaved=False)
- idx.insert(1, (0, 0, 60, 60, 22, 22.0))
- hits = idx.intersection((-1, 1, 58, 62, 22, 24))
+ self.idx = index.Index(properties=p, interleaved=False)
+ self.idx.insert(1, (0, 0, 60, 60, 22, 22.0))
+ self.coords = (-1, 1, 58, 62, 22, 24)
+
+ def test_intersection(self) -> None:
+ hits = self.idx.intersection(self.coords)
self.assertEqual(list(hits), [1])
- def test_4d(self) -> None:
- """Test we make and query a 4D index"""
+ @skip_sidx_lt_210
+ def test_intersection_v(self) -> None:
+ idxs, counts = self.idx.intersection_v(self.coords[0::2], self.coords[1::2])
+ assert idxs.tolist() == [1]
+ assert counts.tolist() == [1]
+
+
+class Index4d(IndexTestCase):
+ """Test we make and query a 4D index"""
+
+ def setUp(self) -> None:
p = index.Property()
p.dimension = 4
- idx = index.Index(properties=p, interleaved=False)
- idx.insert(1, (0, 0, 60, 60, 22, 22.0, 128, 142))
- hits = idx.intersection((-1, 1, 58, 62, 22, 24, 120, 150))
+ self.idx = index.Index(properties=p, interleaved=False)
+ self.idx.insert(1, (0, 0, 60, 60, 22, 22.0, 128, 142))
+ self.coords = (-1, 1, 58, 62, 22, 24, 120, 150)
+
+ def test_intersection(self) -> None:
+ hits = self.idx.intersection(self.coords)
self.assertEqual(list(hits), [1])
+ @skip_sidx_lt_210
+ def test_intersection_v(self) -> None:
+ idxs, counts = self.idx.intersection_v(self.coords[0::2], self.coords[1::2])
+ assert idxs.tolist() == [1]
+ assert counts.tolist() == [1]
+
class IndexStream(IndexTestCase):
def test_stream_input(self) -> None:
=====================================
tox.ini
=====================================
@@ -12,5 +12,4 @@ install_command =
python -I -m pip install --only-binary=:all: {opts} {packages}
ignore_errors = True
ignore_outcome = True
-commands =
- pytest --import-mode=importlib {posargs:tests}
+commands = pytest
View it on GitLab: https://salsa.debian.org/debian-gis-team/python-rtree/-/commit/3bb36c4993607bf9660a0cd8f4a9c6d71f0c124c
--
View it on GitLab: https://salsa.debian.org/debian-gis-team/python-rtree/-/commit/3bb36c4993607bf9660a0cd8f4a9c6d71f0c124c
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20250814/9bd698d5/attachment-0001.htm>
More information about the Pkg-grass-devel
mailing list