[Git][debian-gis-team/python-rtree][master] 6 commits: New upstream version 0.9.5
Bas Couwenberg
gitlab at salsa.debian.org
Fri Dec 18 15:28:17 GMT 2020
Bas Couwenberg pushed to branch master at Debian GIS Project / python-rtree
Commits:
f2bb26a0 by Bas Couwenberg at 2020-12-18T16:12:18+01:00
New upstream version 0.9.5
- - - - -
db5f2f90 by Bas Couwenberg at 2020-12-18T16:12:19+01:00
Update upstream source from tag 'upstream/0.9.5'
Update to upstream version '0.9.5'
with Debian dir fb4e3ba9a1199cd2cbbfa3fe6a6d52acb5476d3e
- - - - -
93c2e57d by Bas Couwenberg at 2020-12-18T16:12:39+01:00
New upstream release.
- - - - -
7830bc7a by Bas Couwenberg at 2020-12-18T16:16:11+01:00
Add python3-wheel to build dependencies.
- - - - -
389c9364 by Bas Couwenberg at 2020-12-18T16:22:22+01:00
Set PYTHONPATH for sphinx-build.
- - - - -
97fd0bcf by Bas Couwenberg at 2020-12-18T16:22:22+01:00
Set distribution to unstable.
- - - - -
30 changed files:
- + .github/workflows/build.yml
- + .github/workflows/wheels.yml
- .gitignore
- − .travis.yml
- README.md
- − azure-pipelines.yml
- + ci/CMakeLists.txt
- − ci/azp/conda.yml
- − ci/azp/docker.yml
- − ci/azp/linux-1604-pip.yml
- − ci/azp/linux-1804-pip.yml
- − ci/azp/osx.yml
- − ci/azp/win.yml
- + ci/install_libspatialindex.bash
- + ci/install_libspatialindex.bat
- debian/changelog
- debian/control
- debian/rules
- docs/source/conf.py
- docs/source/tutorial.txt
- + pyproject.toml
- rtree/__init__.py
- rtree/core.py
- + rtree/exceptions.py
- + rtree/finder.py
- rtree/index.py
- scripts/visualize.py
- setup.py
- tests/test_index.py
- tests/test_tpr.py
Changes:
=====================================
.github/workflows/build.yml
=====================================
@@ -0,0 +1,143 @@
+name: Build
+
+on:
+ pull_request:
+ branches:
+ - '*'
+ release:
+ types:
+ - published
+jobs:
+ conda:
+ name: Conda ${{ matrix.python-version }} - ${{ matrix.os }}
+
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: true
+ matrix:
+ os: ['ubuntu-latest', 'macos-latest', 'windows-latest']
+ python-version: ['3.8']
+ sidx-version: ['1.8.5','1.9.3']
+
+ steps:
+ - uses: actions/checkout at v2
+ - uses: conda-incubator/setup-miniconda at v2
+ with:
+ channels: conda-forge
+ auto-update-conda: true
+ python-version: ${{ matrix.python-version }}
+ - name: Setup
+ shell: bash -l {0}
+ run: |
+ conda install -c conda-forge numpy libspatialindex=${{ matrix.sidx-version }} -y
+
+ - name: Install
+ shell: bash -l {0}
+ run: |
+ pip install -e .
+ - name: Lint with flake8
+ shell: bash -l {0}
+ run: |
+ pip install flake8
+ flake8 rtree/
+ - name: Test with pytest
+ shell: bash -l {0}
+ run: |
+ pip install pytest
+ python -m pytest --doctest-modules rtree tests
+
+ ubuntu:
+ name: Ubuntu ${{ matrix.os }}
+
+ runs-on: ubuntu-16.04
+ strategy:
+ fail-fast: true
+ matrix:
+ os: ['ubuntu-16.04', 'ubuntu-18.04']
+
+ steps:
+ - uses: actions/checkout at v2
+ - name: Setup
+ shell: bash -l {0}
+ run: |
+ sudo apt install libspatialindex-c4v5 python3-pip
+ python3 -m pip install --upgrade pip
+ python3 -m pip install setuptools numpy flake8 pytest
+
+ - name: Build
+ shell: bash -l {0}
+ run: |
+ python3 -m pip install --user .
+ - name: Lint with flake8
+ shell: bash -l {0}
+ run: |
+ export PATH=$PATH:/home/runner/.local/bin
+ flake8 rtree/
+ - name: Test with pytest
+ shell: bash -l {0}
+ run: |
+ python3 -m pytest --doctest-modules rtree tests
+
+ docs:
+ name: Docs
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: true
+ container: osgeo/proj-docs
+ steps:
+ - uses: actions/checkout at v2
+ - name: Run libspatialindex build
+ run: |
+ apt-get update -y
+ apt-get install -y -qq libspatialindex-dev
+ pip3 install --user .
+ - name: Print versions
+ shell: bash -l {0}
+ run: |
+ python3 --version
+ sphinx-build --version
+ - name: Lint .rst files
+ shell: bash -l {0}
+ run: |
+ if find . -name '*.rst' | xargs grep -P '\t'; then echo 'Tabs are bad, please use four spaces in .rst files.'; false; fi
+ working-directory: ./docs
+ - name: HTML
+ shell: bash -l {0}
+ run: |
+ make html
+ working-directory: ./docs
+ - name: PDF
+ shell: bash -l {0}
+ run: |
+ make latexpdf
+ working-directory: ./docs
+
+
+ collect-artifacts:
+ name: Package and push release
+
+ #needs: [windows-wheel, linux-wheel, osx-wheel, conda, ubuntu]
+ needs: [conda, ubuntu]
+
+ runs-on: 'ubuntu-latest'
+ strategy:
+ fail-fast: true
+
+ steps:
+ - uses: actions/checkout at v2
+ - name: Source
+ shell: bash -l {0}
+ run: |
+ sudo apt install libspatialindex-c4v5 python3-pip
+ python3 -m pip install --upgrade pip
+ python3 -m pip install setuptools numpy flake8 pytest wheel
+ export PATH=$PATH:/home/runner/.local/bin
+ python3 setup.py sdist
+
+ - uses: pypa/gh-action-pypi-publish at master
+ name: Publish package
+ if: github.event_name == 'release' && github.event.action == 'published'
+ with:
+ user: __token__
+ password: ${{ secrets.pypi_token }}
+ packages_dir: ./dist
=====================================
.github/workflows/wheels.yml
=====================================
@@ -0,0 +1,56 @@
+name: Build Wheels
+
+on: [push, pull_request]
+
+jobs:
+ build_wheels:
+ name: Build wheel on ${{ matrix.os }}
+ runs-on: ${{ matrix.os }}
+ env:
+ CIBW_SKIP: pp* *-win32
+ CIBW_TEST_REQUIRES: pytest numpy
+ CIBW_TEST_COMMAND: "pytest -v {project}/tests"
+ # we are copying the shared libraries ourselves so skip magical copy
+ CIBW_REPAIR_WHEEL_COMMAND_MACOS: ""
+ CIBW_REPAIR_WHEEL_COMMAND_WINDOWS: ""
+ CIBW_BEFORE_BUILD_LINUX: "pip install cmake; bash {project}/ci/install_libspatialindex.bash"
+ strategy:
+ matrix:
+ os: [windows-latest, ubuntu-latest, macos-latest]
+ steps:
+ - uses: actions/checkout at v1
+ - uses: actions/setup-python at v1
+ name: Install Python
+ with:
+ python-version: '3.7'
+ - name: Install cibuildwheel
+ run: |
+ python -m pip install cibuildwheel==1.6.4
+ - name: Run MacOS Preinstall Build
+ if: startsWith(matrix.os, 'macos')
+ run: |
+ # provides sha256sum
+ brew install coreutils
+ pip install cmake
+ bash ci/install_libspatialindex.bash
+ - name: Run Windows Preinstall Build
+ if: startsWith(matrix.os, 'windows')
+ run: |
+ choco install vcpython27 -f -y
+ ci\install_libspatialindex.bat
+ - name: Build wheels
+ run: |
+ python -m cibuildwheel --output-dir wheelhouse
+ - uses: actions/upload-artifact at v1
+ with:
+ name: wheels
+ path: ./wheelhouse
+ - name: Upload To PyPi
+ env:
+ TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
+ TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
+ # TODO : remove `if false` statement after secrets are set in Github UI
+ if: false
+ run: |
+ pip install twine
+ twine upload ./wheelhouse/*
=====================================
.gitignore
=====================================
@@ -5,3 +5,5 @@ build/
dist/
*.idx
*.dat
+include
+lib
=====================================
.travis.yml deleted
=====================================
@@ -1,27 +0,0 @@
-dist: trusty
-
-cache:
- - pip
- - apt
-
-language: python
-
-matrix:
- include:
- - python: "3.5"
- - python: "3.6"
- sudo: required
- dist: trusty
-
-addons:
- apt:
- packages:
- - libspatialindex-c3
-
-install:
- - pip install flake8
- - pip install -e .
-
-script:
- - flake8 --ignore=E501 --exclude=rtree/__init__.py rtree/
- - python -m pytest --doctest-modules rtree tests/test_*
=====================================
README.md
=====================================
@@ -1,8 +1,11 @@
Rtree
=====
-[![Build Status](https://travis-ci.org/Toblerity/rtree.svg)](https://travis-ci.org/Toblerity/rtree)
+![Build](https://github.com/Toblerity/rtree/workflows/Build/badge.svg)
[![PyPI version](https://badge.fury.io/py/Rtree.svg)](https://badge.fury.io/py/Rtree)
-Python bindings for libspatialindex 1.8.3.
+RTree is a Python package with bindings for [libspatialindex](https://github.com/libspatialindex/libspatialindex). Wheels are available for most major platforms, and `rtree` with bundled `libspatialindex` can be installed via pip:
+```
+pip install rtree
+```
=====================================
azure-pipelines.yml deleted
=====================================
@@ -1,13 +0,0 @@
-pr:
- branches:
- include:
- - master
-
-jobs:
- - template: ./ci/azp/docker.yml
- - template: ./ci/azp/conda.yml
- - template: ./ci/azp/win.yml
- - template: ./ci/azp/osx.yml
- - template: ./ci/azp/linux-1604-pip.yml
- - template: ./ci/azp/linux-1804-pip.yml
-
=====================================
ci/CMakeLists.txt
=====================================
@@ -0,0 +1,237 @@
+#
+# top-level CMake configuration file for libspatialindex
+#
+# (based originally on the libLAS files copyright Mateusz Loskot)
+
+SET(MSVC_INCREMENTAL_DEFAULT OFF)
+cmake_minimum_required(VERSION 3.5.0)
+project(spatialindex)
+
+#------------------------------------------------------------------------------
+# internal cmake settings
+#------------------------------------------------------------------------------
+
+set(CMAKE_COLOR_MAKEFILE ON)
+
+# C++11 required
+set (CMAKE_CXX_STANDARD 11)
+
+# Allow advanced users to generate Makefiles printing detailed commands
+mark_as_advanced(CMAKE_VERBOSE_MAKEFILE)
+
+# Path to additional CMake modules
+set(CMAKE_MODULE_PATH "${PROJECT_SOURCE_DIR}/cmake/modules" ${CMAKE_MODULE_PATH})
+
+# Make string comparison in cmake behave like you'd expect
+cmake_policy(SET CMP0054 NEW)
+
+if (WIN32)
+ if(${CMAKE_VERSION} VERSION_GREATER "3.14.5")
+ cmake_policy(SET CMP0092 NEW) # don't put /w3 in flags
+ endif()
+endif()
+
+if (APPLE)
+ set(CMAKE_MACOSX_RPATH ON)
+endif (APPLE)
+
+#------------------------------------------------------------------------------
+# libspatialindex general settings
+#------------------------------------------------------------------------------
+
+SET(SIDX_VERSION_MAJOR "1")
+SET(SIDX_VERSION_MINOR "9")
+SET(SIDX_VERSION_PATCH "3")
+SET(SIDX_LIB_VERSION "6.1.1")
+SET(SIDX_LIB_SOVERSION "6")
+SET(BUILD_SHARED_LIBS ON)
+
+
+set(SIDX_VERSION_STRING "${SIDX_VERSION_MAJOR}.${SIDX_VERSION_MINOR}.${SIDX_VERSION_PATCH}")
+
+#------------------------------------------------------------------------------
+# libspatialindex general cmake options
+#------------------------------------------------------------------------------
+
+option(SIDX_BUILD_TESTS "Enables integrated test suites" OFF)
+
+
+# Name of C++ library
+
+set(SIDX_LIB_NAME spatialindex)
+set(SIDX_C_LIB_NAME spatialindex_c)
+
+if(WIN32)
+ if (MSVC)
+ if( CMAKE_SIZEOF_VOID_P EQUAL 8 )
+ set( SIDX_LIB_NAME "spatialindex-64" )
+ set( SIDX_C_LIB_NAME "spatialindex_c-64" )
+ else( CMAKE_SIZEOF_VOID_P EQUAL 8 )
+ set( SIDX_LIB_NAME "spatialindex-32" )
+ set( SIDX_C_LIB_NAME "spatialindex_c-32" )
+ endif( CMAKE_SIZEOF_VOID_P EQUAL 8 )
+ endif()
+endif()
+
+set(CMAKE_INCLUDE_DIRECTORIES_PROJECT_BEFORE ON)
+
+include (CheckFunctionExists)
+
+check_function_exists(srand48 HAVE_SRAND48)
+check_function_exists(gettimeofday HAVE_GETTIMEOFDAY)
+check_function_exists(memset HAVE_MEMSET)
+check_function_exists(memcpy HAVE_MEMCPY)
+check_function_exists(bcopy HAVE_BCOPY)
+
+
+INCLUDE (CheckIncludeFiles)
+
+
+#------------------------------------------------------------------------------
+# General build settings
+#------------------------------------------------------------------------------
+
+# note we default to RelWithDebInfo mode if not set
+if(NOT CMAKE_BUILD_TYPE)
+ set(CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING
+ "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel" FORCE)
+endif()
+
+# Always show which build type we have
+message(STATUS "Setting libspatialindex build type - ${CMAKE_BUILD_TYPE}")
+
+set(SIDX_BUILD_TYPE ${CMAKE_BUILD_TYPE})
+
+# TODO: Still testing the output paths --mloskot
+set(SIDX_BUILD_OUTPUT_DIRECTORY "${PROJECT_BINARY_DIR}/bin")
+
+# Output directory in which to build RUNTIME target files.
+set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${SIDX_BUILD_OUTPUT_DIRECTORY})
+
+# Output directory in which to build LIBRARY target files
+set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${SIDX_BUILD_OUTPUT_DIRECTORY})
+
+# Output directory in which to build ARCHIVE target files.
+set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${SIDX_BUILD_OUTPUT_DIRECTORY})
+
+
+#------------------------------------------------------------------------------
+# Platform and compiler specific settings
+#------------------------------------------------------------------------------
+
+if(NOT WIN32)
+ # Recommended C++ compilation flags
+ set(SIDX_COMMON_CXX_FLAGS
+ "-pedantic -Wall -Wpointer-arith -Wcast-align -Wcast-qual -Wredundant-decls -Wno-long-long -Wl --no-undefined")
+endif(NOT WIN32)
+
+if (APPLE)
+ set(SO_EXT dylib)
+ set(CMAKE_FIND_FRAMEWORK "LAST")
+elseif(WIN32)
+ set(SO_EXT dll)
+else()
+ set(SO_EXT so)
+endif(APPLE)
+
+
+enable_testing()
+
+#------------------------------------------------------------------------------
+# installation path settings
+#------------------------------------------------------------------------------
+
+if(WIN32)
+ set(DEFAULT_LIB_SUBDIR lib)
+ set(DEFAULT_DATA_SUBDIR .)
+ set(DEFAULT_INCLUDE_SUBDIR include)
+
+ if (MSVC)
+ set(DEFAULT_BIN_SUBDIR bin)
+ else()
+ set(DEFAULT_BIN_SUBDIR .)
+ endif()
+else()
+ # Common locations for Unix and Mac OS X
+ set(DEFAULT_BIN_SUBDIR bin)
+ set(DEFAULT_LIB_SUBDIR lib${LIB_SUFFIX})
+ set(DEFAULT_DATA_SUBDIR share/spatialindex)
+ set(DEFAULT_INCLUDE_SUBDIR include)
+endif()
+
+# Locations are changeable by user to customize layout of SIDX installation
+# (default values are platform-specific)
+set(SIDX_BIN_SUBDIR ${DEFAULT_BIN_SUBDIR} CACHE STRING
+ "Subdirectory where executables will be installed")
+set(SIDX_LIB_SUBDIR ${DEFAULT_LIB_SUBDIR} CACHE STRING
+ "Subdirectory where libraries will be installed")
+set(SIDX_INCLUDE_SUBDIR ${DEFAULT_INCLUDE_SUBDIR} CACHE STRING
+ "Subdirectory where header files will be installed")
+set(SIDX_DATA_SUBDIR ${DEFAULT_DATA_SUBDIR} CACHE STRING
+ "Subdirectory where data will be installed")
+
+# Mark *_SUBDIR variables as advanced and dedicated to use by power-users only.
+mark_as_advanced(SIDX_BIN_SUBDIR
+ SIDX_LIB_SUBDIR SIDX_INCLUDE_SUBDIR SIDX_DATA_SUBDIR)
+
+# Full paths for the installation
+set(SIDX_BIN_DIR ${SIDX_BIN_SUBDIR})
+set(SIDX_LIB_DIR ${SIDX_LIB_SUBDIR})
+set(SIDX_INCLUDE_DIR ${SIDX_INCLUDE_SUBDIR})
+set(SIDX_DATA_DIR ${SIDX_DATA_SUBDIR})
+
+#------------------------------------------------------------------------------
+# subdirectory controls
+#------------------------------------------------------------------------------
+
+add_subdirectory(src)
+
+if(SIDX_BUILD_TESTS)
+ add_subdirectory(test)
+endif()
+
+#------------------------------------------------------------------------------
+# CPACK controls
+#------------------------------------------------------------------------------
+
+SET(CPACK_PACKAGE_VERSION_MAJOR ${SIDX_VERSION_MAJOR})
+SET(CPACK_PACKAGE_VERSION_MINOR ${SIDX_VERSION_MINOR})
+SET(CPACK_PACKAGE_VERSION_PATCH ${SIDX_VERSION_MINOR})
+SET(CPACK_PACKAGE_NAME "libspatialindex")
+
+SET(CPACK_SOURCE_GENERATOR "TBZ2;TGZ")
+SET(CPACK_PACKAGE_VENDOR "libspatialindex Development Team")
+SET(CPACK_RESOURCE_FILE_LICENSE "${PROJECT_SOURCE_DIR}/COPYING")
+
+set(CPACK_SOURCE_PACKAGE_FILE_NAME
+ "${CMAKE_PROJECT_NAME}-src-${SIDX_VERSION_STRING}")
+
+set(CPACK_SOURCE_IGNORE_FILES
+"/\\\\.gitattributes;/\\\\.vagrant;/\\\\.DS_Store;/CVS/;/\\\\.git/;\\\\.swp$;~$;\\\\.\\\\#;/\\\\#")
+
+list(APPEND CPACK_SOURCE_IGNORE_FILES "CMakeScripts/")
+list(APPEND CPACK_SOURCE_IGNORE_FILES "_CPack_Packages")
+list(APPEND CPACK_SOURCE_IGNORE_FILES "cmake_install.cmake")
+list(APPEND CPACK_SOURCE_IGNORE_FILES "/bin/")
+list(APPEND CPACK_SOURCE_IGNORE_FILES "/scripts/")
+list(APPEND CPACK_SOURCE_IGNORE_FILES "/azure-pipelines.yml")
+list(APPEND CPACK_SOURCE_IGNORE_FILES ".gitignore")
+list(APPEND CPACK_SOURCE_IGNORE_FILES ".ninja*")
+list(APPEND CPACK_SOURCE_IGNORE_FILES "HOWTORELEASE.txt")
+
+list(APPEND CPACK_SOURCE_IGNORE_FILES "README")
+list(APPEND CPACK_SOURCE_IGNORE_FILES "build/")
+
+list(APPEND CPACK_SOURCE_IGNORE_FILES "CMakeFiles")
+list(APPEND CPACK_SOURCE_IGNORE_FILES "CTestTestfile.cmake")
+list(APPEND CPACK_SOURCE_IGNORE_FILES "/docs/build/")
+list(APPEND CPACK_SOURCE_IGNORE_FILES "/doc/presentations/")
+list(APPEND CPACK_SOURCE_IGNORE_FILES "package-release.sh")
+list(APPEND CPACK_SOURCE_IGNORE_FILES "docker-package.sh")
+
+list(APPEND CPACK_SOURCE_IGNORE_FILES ".gz2")
+
+list(APPEND CPACK_SOURCE_IGNORE_FILES ".bz2")
+
+include(CPack)
+add_custom_target(dist COMMAND ${CMAKE_MAKE_PROGRAM} package_source)
=====================================
ci/azp/conda.yml deleted
=====================================
@@ -1,38 +0,0 @@
-jobs:
-- job:
- displayName: Conda Linux
- pool:
- vmImage: 'ubuntu-16.04'
- strategy:
- matrix:
- Python36_185:
- python.version: '3.6'
- sidx.version: '1.8.5'
- Python36_193:
- python.version: '3.6'
- sidx.version: '1.9.3'
- Python37:
- python.version: '3.7'
- sidx.version: '1.9.3'
- Python38:
- python.version: '3.8'
- sidx.version: '1.9.3'
-
- steps:
- - bash: echo "##vso[task.prependpath]$CONDA/bin"
- displayName: Add conda to PATH
-
- - bash: conda create --yes --quiet --name rtree
- displayName: Create Anaconda environment
-
- - bash: |
- source activate rtree
- conda install --yes --quiet --name rtree -c conda-forge python=$PYTHON_VERSION libspatialindex=$SIDX_VERSION
- displayName: Install Anaconda packages
-
- - bash: |
- source activate rtree
- pip install flake8 pytest numpy
- flake8 --ignore=E501 --exclude=rtree/__init__.py rtree/
- python -m pytest --doctest-modules rtree tests/test_*
- displayName: Lint with Flake8 and run unit tests
=====================================
ci/azp/docker.yml deleted
=====================================
@@ -1,42 +0,0 @@
-# -*- mode: yaml -*-
-
-jobs:
-- job:
- displayName: Conda - Docker
- pool:
- vmImage: ubuntu-16.04
- container:
- image: ubuntu:trusty
- options: --privileged
-
- steps:
- - bash: |
- DEBIAN_FRONTEND=noninteractive sudo apt-get update -y -qq
- DEBIAN_FRONTEND=noninteractive sudo apt-get install wget libspatialindex-c3 -y
- wget --quiet https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh
- bash miniconda.sh -b -p $HOME/miniconda
- export PATH="$HOME/miniconda/bin:$PATH"
- conda config --set always_yes yes
- conda config --add channels conda-forge
- conda update -q conda
- conda create -q -n test-environment python=3.6
- source activate test-environment
- displayName: 'Install prerequisites'
-
- - bash: |
- export PATH="$HOME/miniconda/bin:$PATH"
- source activate test-environment
- pip install .
- displayName: 'Install Rtree'
-
- - bash: |
- export PATH="$HOME/miniconda/bin:$PATH"
- source activate test-environment
- sudo locale-gen en_US.UTF-8
- sudo update-locale LANG=en_US.UTF-8
- export LANG="en_US.UTF-8"
- export LC_ALL="en_US.UTF-8"
- pip install flake8 pytest numpy
- flake8 --ignore=E501 --exclude=rtree/__init__.py rtree/
- python -m pytest --doctest-modules rtree tests/test_*
- displayName: Lint with Flake8 and run unit tests
=====================================
ci/azp/linux-1604-pip.yml deleted
=====================================
@@ -1,20 +0,0 @@
-jobs:
-- job:
- displayName: ubuntu-16.04–pip
- pool:
- vmImage: 'ubuntu-16.04'
-
- steps:
- - bash: sudo apt install libspatialindex-c4v5 python3-pip
- displayName: Install libspatialindex and pip
-
- - bash: |
- pip3 install setuptools
- pip3 install .
- displayName: pip install
-
- - bash: |
- pip3 install flake8 pytest numpy
- flake8 --ignore=E501 --exclude=rtree/__init__.py rtree/
- python3 -m pytest --doctest-modules rtree tests/test_*
- displayName: Lint with Flake8 and run unit tests
=====================================
ci/azp/linux-1804-pip.yml deleted
=====================================
@@ -1,20 +0,0 @@
-jobs:
-- job:
- displayName: ubuntu-18.04–pip
- pool:
- vmImage: 'ubuntu-18.04'
-
- steps:
- - bash: sudo apt install libspatialindex-c4v5 python3-pip
- displayName: Install libspatialindex and pip
-
- - bash: |
- pip3 install setuptools
- pip3 install .
- displayName: pip install
-
- - bash: |
- pip3 install flake8 pytest numpy
- flake8 --ignore=E501 --exclude=rtree/__init__.py rtree/
- python3 -m pytest --doctest-modules rtree tests/test_*
- displayName: Lint with Flake8 and run unit tests
=====================================
ci/azp/osx.yml deleted
=====================================
@@ -1,51 +0,0 @@
-# -*- mode: yaml -*-
-
-
-jobs:
-- job:
- displayName: Conda OSX 10.13
- pool:
- vmImage: 'macOS-10.13'
- strategy:
- matrix:
- Python36_185:
- python.version: '3.6'
- sidx.version: '1.8.5'
- Python36_193:
- python.version: '3.6'
- sidx.version: '1.9.3'
- Python37:
- python.version: '3.7'
- sidx.version: '1.9.3'
- Python38:
- python.version: '3.8'
- sidx.version: '1.9.3'
-
- steps:
- - script: |
- echo "Removing homebrew from Azure to avoid conflicts."
- curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/uninstall > ~/uninstall_homebrew
- chmod +x ~/uninstall_homebrew
- ~/uninstall_homebrew -fq
- rm ~/uninstall_homebrew
- displayName: Remove homebrew
- - bash: |
- echo "##vso[task.prependpath]$CONDA/bin"
- sudo chown -R $USER $CONDA
- displayName: Add conda to PATH
-
-
- - bash: conda create --yes --quiet --name rtree
- displayName: Create Anaconda environment
-
- - bash: |
- source activate rtree
- conda install --yes --quiet --name rtree -c conda-forge python=$PYTHON_VERSION libspatialindex=$SIDX_VERSION
- displayName: Install Anaconda packages
-
- - bash: |
- source activate rtree
- pip install flake8 pytest numpy
- flake8 --ignore=E501 --exclude=rtree/__init__.py rtree/
- python -m pytest --doctest-modules rtree tests/test_*
- displayName: Lint with Flake8 and run unit tests
=====================================
ci/azp/win.yml deleted
=====================================
@@ -1,40 +0,0 @@
-# -*- mode: yaml -*-
-
-jobs:
-- job:
- displayName: Conda Win64
- pool:
- vmImage: 'vs2017-win2016'
- strategy:
- matrix:
- Python36_185:
- python.version: '3.6'
- sidx.version: '1.8.5'
- Python36_193:
- python.version: '3.6'
- sidx.version: '1.9.3'
- Python37:
- python.version: '3.7'
- sidx.version: '1.9.3'
- Python38:
- python.version: '3.8'
- sidx.version: '1.9.3'
-
- steps:
- - powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts"
- displayName: Add conda to PATH
-
- - script: conda create --yes --quiet --name rtree
- displayName: Create Anaconda environment
-
- - script: |
- call activate rtree
- conda install --yes --quiet --name rtree -c conda-forge python=%PYTHON_VERSION% libspatialindex=%SIDX_VERSION%
- displayName: Install Anaconda packages
-
- - script: |
- call activate rtree
- pip install flake8 pytest numpy
- flake8 --ignore=E501 --exclude=rtree/__init__.py rtree/
- python -m pytest --doctest-modules rtree tests
- displayName: Lint with Flake8 and run unit tests
=====================================
ci/install_libspatialindex.bash
=====================================
@@ -0,0 +1,64 @@
+#!/bin/bash
+set -xe
+
+# A simple script to install libspatialindex from a Github Release
+VERSION=1.9.3
+SHA256=63a03bfb26aa65cf0159f925f6c3491b6ef79bc0e3db5a631d96772d6541187e
+
+
+# where to copy resulting files
+# this has to be run before `cd`-ing anywhere
+gentarget() {
+ OURPWD=$PWD
+ cd "$(dirname "$0")"
+ mkdir -p ../rtree/lib
+ cd ../rtree/lib
+ arr=$(pwd)
+ cd "$OURPWD"
+ echo $arr
+}
+
+scriptloc() {
+ OURPWD=$PWD
+ cd "$(dirname "$0")"
+ arr=$(pwd)
+ cd "$OURPWD"
+ echo $arr
+}
+# note that we're doing this convoluted thing to get
+# an absolute path so mac doesn't yell at us
+TARGET=`gentarget`
+SL=`scriptloc`
+
+rm $VERSION.zip || true
+curl -L -O https://github.com/libspatialindex/libspatialindex/archive/$VERSION.zip
+
+# check the file hash
+echo "${SHA256} ${VERSION}.zip" | sha256sum --check
+
+rm -rf "libspatialindex-${VERSION}" || true
+unzip $VERSION
+cd libspatialindex-${VERSION}
+
+mkdir build
+cd build
+
+cp "${SL}/CMakeLists.txt" ..
+
+cmake -DCMAKE_BUILD_TYPE=Release ..
+make -j 4
+
+# copy built libraries relative to path of this script
+# -d means copy links as links rather than duplicate files
+# macos uses "bsd cp" and needs special handling
+if [ "$(uname)" == "Darwin" ]; then
+ # change the rpath in the dylib to point to the same directory
+ install_name_tool -change @rpath/libspatialindex.6.dylib @loader_path/libspatialindex.dylib bin/libspatialindex_c.dylib
+ # copy the dylib files to the target director
+ cp bin/libspatialindex.dylib $TARGET
+ cp bin/libspatialindex_c.dylib $TARGET
+else
+ cp -d bin/* $TARGET
+fi
+
+ls $TARGET
=====================================
ci/install_libspatialindex.bat
=====================================
@@ -0,0 +1,23 @@
+python -c "import sys; print(sys.version)"
+
+// A simple script to install libspatialindex from a Github Release
+curl -L -O https://github.com/libspatialindex/libspatialindex/archive/1.9.3.zip
+
+unzip 1.9.3.zip
+copy %~dp0\CMakeLists.txt libspatialindex-1.9.3\CMakeLists.txt
+cd libspatialindex-1.9.3
+
+mkdir build
+cd build
+
+cmake -D CMAKE_BUILD_TYPE=Release ..
+
+"C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\MSBuild\Current\Bin\amd64\MSBuild.exe" spatialindex.sln
+
+mkdir %~dp0\..\rtree\lib
+copy bin\Debug\*.dll %~dp0\..\rtree\lib
+rmdir /Q /S bin
+
+dir %~dp0\..\rtree\
+dir %~dp0\..\rtree\lib
+
=====================================
debian/changelog
=====================================
@@ -1,8 +1,11 @@
-python-rtree (0.9.4-3) UNRELEASED; urgency=medium
+python-rtree (0.9.5-1) unstable; urgency=medium
+ * New upstream release.
* Bump Standards-Version to 4.5.1, no changes.
+ * Add python3-wheel to build dependencies.
+ * Set PYTHONPATH for sphinx-build.
- -- Bas Couwenberg <sebastic at debian.org> Sat, 28 Nov 2020 14:18:24 +0100
+ -- Bas Couwenberg <sebastic at debian.org> Fri, 18 Dec 2020 16:13:37 +0100
python-rtree (0.9.4-2) unstable; urgency=medium
=====================================
debian/control
=====================================
@@ -10,7 +10,8 @@ Build-Depends: debhelper (>= 10~),
python3-numpy,
python3-pytest,
python3-setuptools,
- python3-sphinx
+ python3-sphinx,
+ python3-wheel
Standards-Version: 4.5.1
Vcs-Browser: https://salsa.debian.org/debian-gis-team/python-rtree
Vcs-Git: https://salsa.debian.org/debian-gis-team/python-rtree.git
=====================================
debian/rules
=====================================
@@ -17,7 +17,7 @@ override_dh_auto_clean:
override_dh_auto_build:
dh_auto_build
- (cd docs && make html)
+ (cd docs && PYTHONPATH=$(shell ls -d $(CURDIR)/.pybuild/*_$(shell py3versions -dv)_*/build 2> /dev/null | head -1) make html)
override_dh_auto_test:
dh_auto_test || echo "Ignoring test failures"
=====================================
docs/source/conf.py
=====================================
@@ -11,21 +11,28 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
-import sys, os
+import rtree
+import sys
+import os
sys.path.append('../../')
-import rtree
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
-#sys.path.append(os.path.abspath('.'))
+# sys.path.append(os.path.abspath('.'))
-# -- General configuration -----------------------------------------------------
+# -- General configuration -----------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig']
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.doctest',
+ 'sphinx.ext.intersphinx',
+ 'sphinx.ext.todo',
+ 'sphinx.ext.coverage',
+ 'sphinx.ext.ifconfig']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -90,7 +97,7 @@ pygments_style = 'sphinx'
#modindex_common_prefix = []
-# -- Options for HTML output ---------------------------------------------------
+# -- Options for HTML output ---------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
@@ -170,7 +177,7 @@ html_static_path = ['_static']
htmlhelp_basename = 'Rtreedoc'
-# -- Options for LaTeX output --------------------------------------------------
+# -- Options for LaTeX output --------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
@@ -181,8 +188,8 @@ htmlhelp_basename = 'Rtreedoc'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
- ('index', 'Rtree.tex', u'Rtree Documentation',
- u'Sean Gilles', 'manual'),
+ ('index', 'Rtree.tex', u'Rtree Documentation',
+ u'Sean Gilles', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
=====================================
docs/source/tutorial.txt
=====================================
@@ -121,7 +121,7 @@ Rtree also supports inserting any object you can pickle into the index (called
a clustered index in `libspatialindex`_ parlance). The following inserts the
picklable object ``42`` into the index with the given id::
- >>> index.insert(id=id, bounds=(left, bottom, right, top), obj=42)
+ >>> idx.insert(id=id, bounds=(left, bottom, right, top), obj=42)
You can then return a list of objects by giving the ``objects=True`` flag
to intersection::
@@ -205,4 +205,4 @@ storage backend for `ZODB`_
.. _ZODB: http://www.zodb.org/
-.. _`libspatialindex`: http://libspatialindex.github.com
\ No newline at end of file
+.. _`libspatialindex`: http://libspatialindex.github.com
=====================================
pyproject.toml
=====================================
@@ -0,0 +1,3 @@
+[build-system]
+requires = [ "wheel", "setuptools" ]
+build-backend = "setuptools.build_meta"
=====================================
rtree/__init__.py
=====================================
@@ -1,5 +1,9 @@
-from .index import Rtree
+"""
+# rtree
-from .core import rt
+Rtree provides Python bindings to libspatialindex for quick
+hyperrectangular intersection queries.
+"""
+__version__ = '0.9.5'
-__version__ = '0.9.4'
+from .index import Rtree, Index # noqa
=====================================
rtree/core.py
=====================================
@@ -1,13 +1,7 @@
-import os
-import sys
-import platform
import ctypes
-from ctypes.util import find_library
-
-class RTreeError(Exception):
- "RTree exception, indicates a RTree-related error."
- pass
+from . import finder
+from .exceptions import RTreeError
def check_return(result, func, cargs):
@@ -77,68 +71,8 @@ def free_error_msg_ptr(result, func, cargs):
return retvalue
-if os.name == 'nt':
-
- def _load_library(dllname, loadfunction, dllpaths=('', )):
- """Load a DLL via ctypes load function. Return None on failure.
- Try loading the DLL from the current package directory first,
- then from the Windows DLL search path.
- """
- try:
- dllpaths = (os.path.abspath(os.path.dirname(__file__)),
- ) + dllpaths
- except NameError:
- pass # no __file__ attribute on PyPy and some frozen distributions
- for path in dllpaths:
- if path:
- # temporarily add the path to the PATH environment variable
- # so Windows can find additional DLL dependencies.
- try:
- oldenv = os.environ['PATH']
- os.environ['PATH'] = path + ';' + oldenv
- except KeyError:
- oldenv = None
- try:
- return loadfunction(os.path.join(path, dllname))
- except (WindowsError, OSError):
- pass
- finally:
- if path and oldenv is not None:
- os.environ['PATH'] = oldenv
- return None
-
- base_name = 'spatialindex_c'
- if '64' in platform.architecture()[0]:
- arch = '64'
- else:
- arch = '32'
-
- lib_name = '%s-%s.dll' % (base_name, arch)
- if 'SPATIALINDEX_C_LIBRARY' in os.environ:
- lib_path, lib_name = os.path.split(os.environ['SPATIALINDEX_C_LIBRARY'])
- rt = _load_library(lib_name, ctypes.cdll.LoadLibrary, (lib_path,))
- elif 'conda' in sys.version:
- lib_path = os.path.join(sys.prefix, "Library", "bin")
- rt = _load_library(lib_name, ctypes.cdll.LoadLibrary, (lib_path,))
- else:
- rt = _load_library(lib_name, ctypes.cdll.LoadLibrary)
- if not rt:
- raise OSError("could not find or load %s" % lib_name)
-
-elif os.name == 'posix':
-
- if 'SPATIALINDEX_C_LIBRARY' in os.environ:
- lib_name = os.environ['SPATIALINDEX_C_LIBRARY']
- rt = ctypes.CDLL(lib_name)
- else:
- lib_name = find_library('spatialindex_c')
- rt = ctypes.CDLL(lib_name)
-
- if not rt:
- raise OSError("Could not load libspatialindex_c library")
-
-else:
- raise RTreeError('Unsupported OS "%s"' % os.name)
+# load the shared library by looking in likely places
+rt = finder.load()
rt.Error_GetLastErrorNum.restype = ctypes.c_int
=====================================
rtree/exceptions.py
=====================================
@@ -0,0 +1,4 @@
+
+class RTreeError(Exception):
+ "RTree exception, indicates a RTree-related error."
+ pass
=====================================
rtree/finder.py
=====================================
@@ -0,0 +1,121 @@
+"""
+finder.py
+------------
+
+Locate `libspatialindex` shared library by any means necessary.
+"""
+import os
+import sys
+import ctypes
+import platform
+from ctypes.util import find_library
+
+# the current working directory of this file
+_cwd = os.path.abspath(os.path.expanduser(
+ os.path.dirname(__file__)))
+
+# generate a bunch of candidate locations where the
+# libspatialindex shared library *might* be hanging out
+_candidates = [
+ os.environ.get('SPATIALINDEX_C_LIBRARY', None),
+ os.path.join(_cwd, 'lib'),
+ _cwd,
+ '']
+
+
+def load():
+ """
+ Load the `libspatialindex` shared library.
+
+ Returns
+ -----------
+ rt : ctypes object
+ Loaded shared library
+ """
+ if os.name == 'nt':
+ # check the platform architecture
+ if '64' in platform.architecture()[0]:
+ arch = '64'
+ else:
+ arch = '32'
+ lib_name = 'spatialindex_c-{}.dll'.format(arch)
+
+ # add search paths for conda installs
+ if 'conda' in sys.version:
+ _candidates.append(
+ os.path.join(sys.prefix, "Library", "bin"))
+
+ # get the current PATH
+ oldenv = os.environ.get('PATH', '').strip().rstrip(';')
+ # run through our list of candidate locations
+ for path in _candidates:
+ if not path or not os.path.exists(path):
+ continue
+ # temporarily add the path to the PATH environment variable
+ # so Windows can find additional DLL dependencies.
+ os.environ['PATH'] = ';'.join([path, oldenv])
+ try:
+ rt = ctypes.cdll.LoadLibrary(os.path.join(path, lib_name))
+ if rt is not None:
+ return rt
+ except (WindowsError, OSError):
+ pass
+ except BaseException as E:
+ print('rtree.finder unexpected error: {}'.format(str(E)))
+ finally:
+ os.environ['PATH'] = oldenv
+ raise OSError("could not find or load {}".format(lib_name))
+
+ elif os.name == 'posix':
+
+ # posix includes both mac and linux
+ # use the extension for the specific platform
+ if platform.system() == 'Darwin':
+ # macos shared libraries are `.dylib`
+ lib_name = "libspatialindex_c.dylib"
+ else:
+ # linux shared libraries are `.so`
+ lib_name = 'libspatialindex_c.so'
+
+ # get the starting working directory
+ cwd = os.getcwd()
+ for cand in _candidates:
+ if cand is None:
+ continue
+ elif os.path.isdir(cand):
+ # if our candidate is a directory use best guess
+ path = cand
+ target = os.path.join(cand, lib_name)
+ elif os.path.isfile(cand):
+ # if candidate is just a file use that
+ path = os.path.split(cand)[0]
+ target = cand
+ else:
+ continue
+
+ if not os.path.exists(target):
+ continue
+
+ try:
+ # move to the location we're checking
+ os.chdir(path)
+ # try loading the target file candidate
+ rt = ctypes.cdll.LoadLibrary(target)
+ if rt is not None:
+ return rt
+ except BaseException as E:
+ print('rtree.finder ({}) unexpected error: {}'.format(
+ target, str(E)))
+ finally:
+ os.chdir(cwd)
+
+ try:
+ # try loading library using LD path search
+ rt = ctypes.cdll.LoadLibrary(
+ find_library('spatialindex_c'))
+ if rt is not None:
+ return rt
+ except BaseException:
+ pass
+
+ raise OSError("Could not load libspatialindex_c library")
=====================================
rtree/index.py
=====================================
@@ -146,8 +146,12 @@ class Index(object):
>>> p = index.Property()
>>> idx = index.Index(properties=p)
- >>> idx # doctest: +ELLIPSIS
- rtree.index.Index(bounds=[1.7976931348623157e+308, 1.7976931348623157e+308, -1.7976931348623157e+308, -1.7976931348623157e+308], size=0)
+ >>> idx # doctest: +NORMALIZE_WHITESPACE
+ rtree.index.Index(bounds=[1.7976931348623157e+308,
+ 1.7976931348623157e+308,
+ -1.7976931348623157e+308,
+ -1.7976931348623157e+308],
+ size=0)
Insert an item into the index::
@@ -291,7 +295,8 @@ class Index(object):
return 0
def __repr__(self):
- return 'rtree.index.Index(bounds={}, size={})'.format(self.bounds, self.get_size())
+ return 'rtree.index.Index(bounds={}, size={})'.format(self.bounds,
+ self.get_size())
def __getstate__(self):
state = self.__dict__.copy()
@@ -848,10 +853,12 @@ class Index(object):
# p_num_results is an input and output for C++ lib
# as an input it says "get n closest neighbors"
- # but if multiple neighbors are at the same distance, both will be returned
+ # but if multiple neighbors are at the same distance, both
+ # will be returned
# so the number of returned neighbors may be > p_num_results
- # thus p_num_results.contents.value gets set as an output by the C++ lib
- # to indicate the actual number of results for _get_ids to use
+ # thus p_num_results.contents.value gets set as an output by the
+ # C++ lib to indicate the actual number of results for
+ # _get_ids to use
p_num_results = ctypes.pointer(ctypes.c_uint64(num_results))
it = ctypes.pointer(ctypes.c_int64())
@@ -865,7 +872,12 @@ class Index(object):
return self._get_ids(it, p_num_results.contents.value)
- def _nearestTP(self, coordinates, velocities, times, num_results=1, objects=False):
+ def _nearestTP(self,
+ coordinates,
+ velocities,
+ times,
+ num_results=1,
+ objects=False):
p_mins, p_maxs = self.get_coordinate_pointers(coordinates)
pv_mins, pv_maxs = self.get_coordinate_pointers(velocities)
t_start, t_end = self._get_time_doubles(times)
@@ -1529,7 +1541,8 @@ class Property(object):
"""Index filename for disk storage"""
def get_dat_extension(self):
- return core.rt.IndexProperty_GetFileNameExtensionDat(self.handle).decode()
+ ext = core.rt.IndexProperty_GetFileNameExtensionDat(self.handle)
+ return ext.decode()
def set_dat_extension(self, value):
if isinstance(value, str):
@@ -1541,7 +1554,8 @@ class Property(object):
"""Extension for .dat file"""
def get_idx_extension(self):
- return core.rt.IndexProperty_GetFileNameExtensionIdx(self.handle).decode()
+ ext = core.rt.IndexProperty_GetFileNameExtensionIdx(self.handle)
+ return ext.decode()
def set_idx_extension(self, value):
if isinstance(value, str):
@@ -1835,8 +1849,12 @@ class RtreeContainer(Rtree):
>>> p = index.Property()
>>> idx = index.RtreeContainer(properties=p)
- >>> idx # doctest: +ELLIPSIS
- rtree.index.RtreeContainer(bounds=[1.7976931348623157e+308, 1.7976931348623157e+308, -1.7976931348623157e+308, -1.7976931348623157e+308], size=0)
+ >>> idx # doctest: +NORMALIZE_WHITESPACE
+ rtree.index.RtreeContainer(bounds=[1.7976931348623157e+308,
+ 1.7976931348623157e+308,
+ -1.7976931348623157e+308,
+ -1.7976931348623157e+308],
+ size=0)
Insert an item into the index::
@@ -1869,7 +1887,8 @@ class RtreeContainer(Rtree):
return 0
def __repr__(self):
- return 'rtree.index.RtreeContainer(bounds={}, size={})'.format(self.bounds, self.get_size())
+ m = 'rtree.index.RtreeContainer(bounds={}, size={})'
+ return m.format(self.bounds, self.get_size())
def __contains__(self, obj):
return id(obj) in self._objects
=====================================
scripts/visualize.py
=====================================
@@ -1,5 +1,7 @@
#!/usr/bin/env python
+from liblas import file
+import sys
from rtree import index
import ogr
@@ -29,8 +31,6 @@ def quick_create_layer_def(lyr, field_list):
field_defn.Destroy()
-import sys
-
shape_drv = ogr.GetDriverByName('ESRI Shapefile')
shapefile_name = sys.argv[1].split('.')[0]
@@ -71,7 +71,6 @@ leaves = idx.leaves()
# leaves[0] == (0L, [2L, 92L, 51L, 55L, 26L], [-132.41727847799999,
# -96.717721818399994, -132.41727847799999, -96.717721818399994])
-from liblas import file
f = file.File(sys.argv[1])
@@ -79,7 +78,7 @@ def area(minx, miny, maxx, maxy):
width = abs(maxx - minx)
height = abs(maxy - miny)
- return width*height
+ return width * height
def get_bounds(leaf_ids, lasfile, block_id):
@@ -124,6 +123,7 @@ def make_feature(lyr, geom, id, count):
result = lyr.CreateFeature(feature)
del result
+
t = 0
for leaf in leaves:
id = leaf[0]
=====================================
setup.py
=====================================
@@ -1,47 +1,122 @@
#!/usr/bin/env python
-from setuptools import setup
-import rtree
import os
+import sys
+
+from setuptools import setup
+from setuptools.dist import Distribution
+from setuptools.command.install import install
+
+from wheel.bdist_wheel import bdist_wheel as _bdist_wheel
-import itertools as it
# Get text from README.txt
with open('docs/source/README.txt', 'r') as fp:
readme_text = fp.read()
-extras_require = {
- 'test': ['pytest>=3', 'pytest-cov', 'numpy']
-}
+# Get __version without importing
+with open('rtree/__init__.py', 'r') as fp:
+ # get and exec just the line which looks like "__version__ = '0.9.4'"
+ exec(next(line for line in fp if '__version__' in line))
+
+# current working directory of this setup.py file
+_cwd = os.path.abspath(os.path.split(__file__)[0])
+
+
+class bdist_wheel(_bdist_wheel):
+ def finalize_options(self):
+ _bdist_wheel.finalize_options(self)
+ self.root_is_pure = False
+
+
+class BinaryDistribution(Distribution):
+ """Distribution which always forces a binary package with platform name"""
+ def has_ext_modules(foo):
+ return True
+
+
+class InstallPlatlib(install):
+ def finalize_options(self):
+ """
+ Copy the shared libraries into the wheel. Note that this
+ will *only* check in `rtree/lib` rather than anywhere on
+ the system so if you are building a wheel you *must* copy or
+ symlink the `.so`/`.dll`/`.dylib` files into `rtree/lib`.
+ """
+ # use for checking extension types
+ from fnmatch import fnmatch
+
+ install.finalize_options(self)
+ if self.distribution.has_ext_modules():
+ self.install_lib = self.install_platlib
+ # now copy over libspatialindex
+ # get the location of the shared library on the filesystem
+
+ # where we're putting the shared library in the build directory
+ target_dir = os.path.join(self.build_lib, 'rtree', 'lib')
+ # where are we checking for shared libraries
+ source_dir = os.path.join(_cwd, 'rtree', 'lib')
+
+ # what patterns represent shared libraries
+ patterns = {'*.so',
+ 'libspatialindex*dylib',
+ '*.dll'}
+
+ if not os.path.isdir(source_dir):
+ # no copying of binary parts to library
+ # this is so `pip install .` works even
+ # if `rtree/lib` isn't populated
+ return
+
+ for file_name in os.listdir(source_dir):
+ # make sure file name is lower case
+ check = file_name.lower()
+ # use filename pattern matching to see if it is
+ # a shared library format file
+ if not any(fnmatch(check, p) for p in patterns):
+ continue
+
+ # if the source isn't a file skip it
+ if not os.path.isfile(os.path.join(source_dir, file_name)):
+ continue
+
+ # make build directory if it doesn't exist yet
+ if not os.path.isdir(target_dir):
+ os.makedirs(target_dir)
+
+ # copy the source file to the target directory
+ self.copy_file(
+ os.path.join(source_dir, file_name),
+ os.path.join(target_dir, file_name))
-extras_require['all'] = list(set(it.chain(*extras_require.values())))
setup(
- name = 'Rtree',
- version = rtree.__version__,
- description = 'R-Tree spatial index for Python GIS',
- license = 'MIT',
- keywords = 'gis spatial index r-tree',
- author = 'Sean Gillies',
- author_email = 'sean.gillies at gmail.com',
- maintainer = 'Howard Butler',
- maintainer_email = 'howard at hobu.co',
- url = 'https://github.com/Toblerity/rtree',
- long_description = readme_text,
- packages = ['rtree'],
- install_requires = ['setuptools'],
- extras_require = extras_require,
- tests_require = extras_require['test'],
- zip_safe = False,
- classifiers = [
- 'Development Status :: 5 - Production/Stable',
- 'Intended Audience :: Developers',
- 'Intended Audience :: Science/Research',
- 'License :: OSI Approved :: MIT License',
- 'Operating System :: OS Independent',
- 'Programming Language :: C',
- 'Programming Language :: C++',
- 'Programming Language :: Python',
- 'Topic :: Scientific/Engineering :: GIS',
- 'Topic :: Database',
- ],
+ name='Rtree',
+ version=__version__,
+ description='R-Tree spatial index for Python GIS',
+ license='MIT',
+ keywords='gis spatial index r-tree',
+ author='Sean Gillies',
+ author_email='sean.gillies at gmail.com',
+ maintainer='Howard Butler',
+ maintainer_email='howard at hobu.co',
+ url='https://github.com/Toblerity/rtree',
+ long_description=readme_text,
+ packages=['rtree'],
+ package_data={"rtree": ['lib']},
+ zip_safe=False,
+ include_package_data=True,
+ distclass=BinaryDistribution,
+ cmdclass={'bdist_wheel': bdist_wheel, 'install': InstallPlatlib},
+ classifiers=[
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: Science/Research',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: C',
+ 'Programming Language :: C++',
+ 'Programming Language :: Python',
+ 'Topic :: Scientific/Engineering :: GIS',
+ 'Topic :: Database',
+ ],
)
=====================================
tests/test_index.py
=====================================
@@ -1,12 +1,17 @@
+import sys
import unittest
import ctypes
import rtree
-from rtree import index, core
import numpy as np
import pytest
import tempfile
import pickle
+from rtree import index, core
+
+# is this running on Python 3
+PY3 = sys.version_info.major >= 3
+
class IndexTestCase(unittest.TestCase):
def setUp(self):
@@ -15,7 +20,7 @@ class IndexTestCase(unittest.TestCase):
for i, coords in enumerate(self.boxes15):
self.idx.add(i, coords)
- def boxes15_stream(interleaved=True):
+ def boxes15_stream(self, interleaved=True):
boxes15 = np.genfromtxt('boxes_15x15.data')
for i, (minx, miny, maxx, maxy) in enumerate(boxes15):
@@ -24,6 +29,14 @@ class IndexTestCase(unittest.TestCase):
else:
yield (i, (minx, maxx, miny, maxy), 42)
+ def stream_basic(self):
+ # some versions of libspatialindex screw up indexes on stream loading
+ # so do a very simple index check
+ rtree_test = rtree.index.Index(
+ [(1564, [0, 0, 0, 10, 10, 10], None)],
+ properties=rtree.index.Property(dimension=3))
+ assert next(rtree_test.intersection([1, 1, 1, 2, 2, 2])) == 1564
+
class IndexVersion(unittest.TestCase):
@@ -32,16 +45,18 @@ class IndexVersion(unittest.TestCase):
self.assertTrue(index.minor_version >= 7)
-
class IndexBounds(unittest.TestCase):
def test_invalid_specifications(self):
"""Invalid specifications of bounds properly throw"""
idx = index.Index()
- self.assertRaises(core.RTreeError, idx.add, None, (0.0, 0.0, -1.0, 1.0))
- self.assertRaises(core.RTreeError, idx.intersection, (0.0, 0.0, -1.0, 1.0))
- self.assertRaises(ctypes.ArgumentError, idx.add, None, (1, 1,))
+ self.assertRaises(core.RTreeError, idx.add,
+ None, (0.0, 0.0, -1.0, 1.0))
+ self.assertRaises(core.RTreeError, idx.intersection,
+ (0.0, 0.0, -1.0, 1.0))
+ self.assertRaises(ctypes.ArgumentError, idx.add, None, (1, 1,))
+
class IndexProperties(IndexTestCase):
@@ -96,13 +111,13 @@ class IndexProperties(IndexTestCase):
p.region_pool_capacity = 1700
p.tight_mbr = True
p.overwrite = True
- p.writethrough = True
- p.tpr_horizon = 20.0
- p.reinsert_factor = 0.3
+ p.writethrough = True
+ p.tpr_horizon = 20.0
+ p.reinsert_factor = 0.3
p.idx_extension = 'index'
p.dat_extension = 'data'
- idx = index.Index(properties = p)
+ idx = index.Index(properties=p)
props = idx.properties
self.assertEqual(props.leaf_capacity, 100)
@@ -125,6 +140,7 @@ class IndexProperties(IndexTestCase):
self.assertEqual(props.idx_extension, 'index')
self.assertEqual(props.dat_extension, 'data')
+
class TestPickling(unittest.TestCase):
def test_index(self):
@@ -132,7 +148,7 @@ class TestPickling(unittest.TestCase):
unpickled = pickle.loads(pickle.dumps(idx))
self.assertNotEqual(idx.handle, unpickled.handle)
self.assertEqual(idx.properties.as_dict(),
- unpickled.properties.as_dict())
+ unpickled.properties.as_dict())
self.assertEqual(idx.interleaved, unpickled.interleaved)
def test_property(self):
@@ -141,6 +157,7 @@ class TestPickling(unittest.TestCase):
self.assertNotEqual(p.handle, unpickled.handle)
self.assertEqual(p.as_dict(), unpickled.as_dict())
+
class IndexContainer(IndexTestCase):
def test_container(self):
@@ -198,8 +215,8 @@ class IndexContainer(IndexTestCase):
# Test iter method
assert objects[12] in set(container)
-class IndexIntersection(IndexTestCase):
+class IndexIntersection(IndexTestCase):
def test_intersection(self):
"""Test basic insertion and retrieval"""
@@ -216,32 +233,41 @@ class IndexIntersection(IndexTestCase):
idx = index.Index()
for i, coords in enumerate(self.boxes15):
idx.add(i, coords)
- idx.insert(4321, (34.3776829412, 26.7375853734, 49.3776829412, 41.7375853734), obj=42)
+ idx.insert(
+ 4321,
+ (34.3776829412,
+ 26.7375853734,
+ 49.3776829412,
+ 41.7375853734),
+ obj=42)
hits = idx.intersection((0, 0, 60, 60), objects=True)
hit = [h for h in hits if h.id == 4321][0]
self.assertEqual(hit.id, 4321)
self.assertEqual(hit.object, 42)
box = ['%.10f' % t for t in hit.bbox]
- expected = ['34.3776829412', '26.7375853734', '49.3776829412', '41.7375853734']
+ expected = [
+ '34.3776829412',
+ '26.7375853734',
+ '49.3776829412',
+ '41.7375853734']
self.assertEqual(box, expected)
def test_double_insertion(self):
"""Inserting the same id twice does not overwrite data"""
idx = index.Index()
- idx.add(1, (2,2))
- idx.add(1, (3,3))
+ idx.add(1, (2, 2))
+ idx.add(1, (3, 3))
+
+ self.assertEqual([1, 1], list(idx.intersection((0, 0, 5, 5))))
- self.assertEqual([1,1], list(idx.intersection((0, 0, 5, 5))))
class IndexSerialization(unittest.TestCase):
def setUp(self):
self.boxes15 = np.genfromtxt('boxes_15x15.data')
- def boxes15_stream(interleaved=True):
- boxes15 = np.genfromtxt('boxes_15x15.data')
+ def boxes15_stream(self, interleaved=True):
for i, (minx, miny, maxx, maxy) in enumerate(self.boxes15):
-
if interleaved:
yield (i, (minx, miny, maxx, maxy), 42)
else:
@@ -249,12 +275,18 @@ class IndexSerialization(unittest.TestCase):
def test_unicode_filenames(self):
"""Unicode filenames work as expected"""
-
+ if sys.version_info.major < 3:
+ return
tname = tempfile.mktemp()
filename = tname + u'gilename\u4500abc'
idx = index.Index(filename)
- idx.insert(4321, (34.3776829412, 26.7375853734, 49.3776829412, 41.7375853734), obj=42)
-
+ idx.insert(
+ 4321,
+ (34.3776829412,
+ 26.7375853734,
+ 49.3776829412,
+ 41.7375853734),
+ obj=42)
def test_pickling(self):
"""Pickling works as expected"""
@@ -268,7 +300,10 @@ class IndexSerialization(unittest.TestCase):
idx.loads = lambda string: json.loads(string.decode('utf-8'))
idx.add(0, (0, 0, 1, 1), some_data)
- self.assertEqual(list(idx.nearest((0, 0), 1, objects="raw"))[0], some_data)
+ self.assertEqual(
+ list(
+ idx.nearest(
+ (0, 0), 1, objects="raw"))[0], some_data)
def test_custom_filenames(self):
"""Test using custom filenames for index serialization"""
@@ -276,7 +311,7 @@ class IndexSerialization(unittest.TestCase):
p.dat_extension = 'data'
p.idx_extension = 'index'
tname = tempfile.mktemp()
- idx = index.Index(tname, properties = p)
+ idx = index.Index(tname, properties=p)
for i, coords in enumerate(self.boxes15):
idx.add(i, coords)
@@ -286,36 +321,48 @@ class IndexSerialization(unittest.TestCase):
del idx
# Check we can reopen the index and get the same results
- idx2 = index.Index(tname, properties = p)
+ idx2 = index.Index(tname, properties=p)
hits = list(idx2.intersection((0, 0, 60, 60)))
self.assertTrue(len(hits), 10)
self.assertEqual(hits, [0, 4, 16, 27, 35, 40, 47, 50, 76, 80])
-
def test_interleaving(self):
"""Streaming against a persisted index without interleaving"""
def data_gen(interleaved=True):
- for i, (minx, miny, maxx, maxy) in enumerate(self.boxes15):
- if interleaved:
- yield (i, (minx, miny, maxx, maxy), 42)
- else:
- yield (i, (minx, maxx, miny, maxy), 42)
+ for i, (minx, miny, maxx, maxy) in enumerate(self.boxes15):
+ if interleaved:
+ yield (i, (minx, miny, maxx, maxy), 42)
+ else:
+ yield (i, (minx, maxx, miny, maxy), 42)
p = index.Property()
tname = tempfile.mktemp()
idx = index.Index(tname,
- data_gen(interleaved = False),
- properties = p,
- interleaved = False)
+ data_gen(interleaved=False),
+ properties=p,
+ interleaved=False)
hits = sorted(list(idx.intersection((0, 60, 0, 60))))
self.assertTrue(len(hits), 10)
self.assertEqual(hits, [0, 4, 16, 27, 35, 40, 47, 50, 76, 80])
leaves = idx.leaves()
- expected = [(0, [2, 92, 51, 55, 26, 95, 7, 81, 38, 22, 58, 89, 91, 83, 98, 37, 70, 31, 49, 34, 11, 6, 13, 3, 23, 57, 9, 96, 84, 36, 5, 45, 77, 78, 44, 12, 42, 73, 93, 41, 71, 17, 39, 54, 88, 72, 97, 60, 62, 48, 19, 25, 76, 59, 66, 64, 79, 94, 40, 32, 46, 47, 15, 68, 10, 0, 80, 56, 50, 30], [-186.673789279, -96.7177218184, 172.392784956, 45.4856075292]), (2, [61, 74, 29, 99, 16, 43, 35, 33, 27, 63, 18, 90, 8, 53, 82, 21, 65, 24, 4, 1, 75, 67, 86, 52, 28, 85, 87, 14, 69, 20], [-174.739939684, 32.6596016791, 184.761387556, 96.6043699778])]
-
- self.assertEqual(leaves, expected)
-
- hits = sorted(list(idx.intersection((0, 60, 0, 60), objects = True)))
+ expected = [
+ (0, [2, 92, 51, 55, 26, 95, 7, 81, 38, 22, 58, 89, 91, 83, 98, 37,
+ 70, 31, 49, 34, 11, 6, 13, 3, 23, 57, 9, 96, 84, 36, 5, 45,
+ 77, 78, 44, 12, 42, 73, 93, 41, 71, 17, 39, 54, 88, 72, 97,
+ 60, 62, 48, 19, 25, 76, 59, 66, 64, 79, 94, 40, 32, 46, 47,
+ 15, 68, 10, 0, 80, 56, 50, 30],
+ [-186.673789279, -96.7177218184, 172.392784956, 45.4856075292]),
+ (2, [61, 74, 29, 99, 16, 43, 35, 33, 27, 63, 18, 90, 8, 53, 82,
+ 21, 65, 24, 4, 1, 75, 67, 86, 52, 28, 85, 87, 14, 69, 20],
+ [-174.739939684, 32.6596016791, 184.761387556, 96.6043699778])]
+
+ if PY3 and False:
+ # TODO : this reliably fails on Python 2.7 and 3.5
+ # go through the traversal and see if everything is close
+ assert all(all(np.allclose(a, b) for a, b in zip(L, E))
+ for L, E in zip(leaves, expected))
+
+ hits = sorted(list(idx.intersection((0, 60, 0, 60), objects=True)))
self.assertTrue(len(hits), 10)
self.assertEqual(hits[0].object, 42)
@@ -326,12 +373,14 @@ class IndexSerialization(unittest.TestCase):
idx = index.Index(tname)
del idx
idx = index.Index(tname, overwrite=True)
+ assert isinstance(idx, index.Index)
+
class IndexNearest(IndexTestCase):
def test_nearest_basic(self):
"""Test nearest basic selection of records"""
- hits = list(self.idx.nearest((0,0,10,10), 3))
+ hits = list(self.idx.nearest((0, 0, 10, 10), 3))
self.assertEqual(hits, [76, 48, 19])
idx = index.Index()
@@ -340,7 +389,7 @@ class IndexNearest(IndexTestCase):
idx.add(i, (start, 1, stop, 1))
hits = sorted(idx.nearest((13, 0, 20, 2), 3))
self.assertEqual(hits, [3, 4, 5])
-
+
def test_nearest_equidistant(self):
"""Test that if records are equidistant, both are returned."""
point = (0, 0)
@@ -360,20 +409,20 @@ class IndexNearest(IndexTestCase):
idx = index.Index()
idx.insert(0, small_box)
idx.insert(1, large_box)
- idx.insert(2, (50, 50)) # point on top right vertex of large_box
- point = (51, 51) # right outside of large_box
+ idx.insert(2, (50, 50)) # point on top right vertex of large_box
+ point = (51, 51) # right outside of large_box
self.assertEqual(list(idx.nearest(point, 2)), [1, 2])
self.assertEqual(list(idx.nearest(point, 1)), [1, 2])
idx = index.Index()
idx.insert(0, small_box)
idx.insert(1, large_box)
- idx.insert(2, (51, 51)) # point right outside on top right vertex of large_box
- point = (51, 52) # shifted 1 unit up from the point above
+ # point right outside on top right vertex of large_box
+ idx.insert(2, (51, 51))
+ point = (51, 52) # shifted 1 unit up from the point above
self.assertEqual(list(idx.nearest(point, 2)), [2, 1])
self.assertEqual(list(idx.nearest(point, 1)), [2])
-
def test_nearest_object(self):
"""Test nearest object selection of records"""
idx = index.Index()
@@ -381,9 +430,12 @@ class IndexNearest(IndexTestCase):
for i, (minx, miny, maxx, maxy) in enumerate(locs):
idx.add(i, (minx, miny, maxx, maxy), obj={'a': 42})
- hits = sorted([(i.id, i.object) for i in idx.nearest((15, 10, 15, 10), 1, objects=True)])
+ hits = sorted(
+ [(i.id, i.object)
+ for i in idx.nearest((15, 10, 15, 10), 1, objects=True)])
self.assertEqual(hits, [(0, {'a': 42}), (1, {'a': 42})])
+
class IndexDelete(IndexTestCase):
def test_deletion(self):
@@ -404,15 +456,16 @@ class IndexMoreDimensions(IndexTestCase):
"""Test we make and query a 3D index"""
p = index.Property()
p.dimension = 3
- idx = index.Index(properties = p, interleaved = False)
+ idx = index.Index(properties=p, interleaved=False)
idx.insert(1, (0, 0, 60, 60, 22, 22.0))
hits = idx.intersection((-1, 1, 58, 62, 22, 24))
self.assertEqual(list(hits), [1])
+
def test_4d(self):
"""Test we make and query a 4D index"""
p = index.Property()
p.dimension = 4
- idx = index.Index(properties = p, interleaved = False)
+ idx = index.Index(properties=p, interleaved=False)
idx.insert(1, (0, 0, 60, 60, 22, 22.0, 128, 142))
hits = idx.intersection((-1, 1, 58, 62, 22, 24, 120, 150))
self.assertEqual(list(hits), [1])
@@ -444,14 +497,17 @@ class IndexStream(IndexTestCase):
def gen():
# insert at least 6 or so before the exception
for i in range(10):
- yield (i, (1,2,3,4), None)
+ yield (i, (1, 2, 3, 4), None)
raise TestException("raising here")
return index.Index(gen())
self.assertRaises(TestException, create_index)
def test_exception_at_beginning_of_generator(self):
- """Assert exceptions raised in callbacks before generator function are raised in main thread"""
+ """
+ Assert exceptions raised in callbacks before generator
+ function are raised in main thread.
+ """
class TestException(Exception):
pass
@@ -464,52 +520,53 @@ class IndexStream(IndexTestCase):
self.assertRaises(TestException, create_index)
-
class DictStorage(index.CustomStorage):
- """ A simple storage which saves the pages in a python dictionary """
- def __init__(self):
- index.CustomStorage.__init__( self )
- self.clear()
-
- def create(self, returnError):
- """ Called when the storage is created on the C side """
-
- def destroy(self, returnError):
- """ Called when the storage is destroyed on the C side """
-
- def clear(self):
- """ Clear all our data """
- self.dict = {}
-
- def loadByteArray(self, page, returnError):
- """ Returns the data for page or returns an error """
- try:
- return self.dict[page]
- except KeyError:
- returnError.contents.value = self.InvalidPageError
-
- def storeByteArray(self, page, data, returnError):
- """ Stores the data for page """
- if page == self.NewPage:
- newPageId = len(self.dict)
- self.dict[newPageId] = data
- return newPageId
- else:
- if page not in self.dict:
- returnError.value = self.InvalidPageError
- return 0
- self.dict[page] = data
- return page
-
- def deleteByteArray(self, page, returnError):
- """ Deletes a page """
- try:
- del self.dict[page]
- except KeyError:
- returnError.contents.value = self.InvalidPageError
-
- hasData = property( lambda self: bool(self.dict) )
- """ Returns true if we contains some data """
+ """ A simple storage which saves the pages in a python dictionary """
+
+ def __init__(self):
+ index.CustomStorage.__init__(self)
+ self.clear()
+
+ def create(self, returnError):
+ """ Called when the storage is created on the C side """
+
+ def destroy(self, returnError):
+ """ Called when the storage is destroyed on the C side """
+
+ def clear(self):
+ """ Clear all our data """
+ self.dict = {}
+
+ def loadByteArray(self, page, returnError):
+ """ Returns the data for page or returns an error """
+ try:
+ return self.dict[page]
+ except KeyError:
+ returnError.contents.value = self.InvalidPageError
+
+ def storeByteArray(self, page, data, returnError):
+ """ Stores the data for page """
+ if page == self.NewPage:
+ newPageId = len(self.dict)
+ self.dict[newPageId] = data
+ return newPageId
+ else:
+ if page not in self.dict:
+ returnError.value = self.InvalidPageError
+ return 0
+ self.dict[page] = data
+ return page
+
+ def deleteByteArray(self, page, returnError):
+ """ Deletes a page """
+ try:
+ del self.dict[page]
+ except KeyError:
+ returnError.contents.value = self.InvalidPageError
+
+ hasData = property(lambda self: bool(self.dict))
+ """ Returns true if we contains some data """
+
class IndexCustomStorage(unittest.TestCase):
def test_custom_storage(self):
@@ -518,17 +575,18 @@ class IndexCustomStorage(unittest.TestCase):
settings.writethrough = True
settings.buffering_capacity = 1
-# Notice that there is a small in-memory buffer by default. We effectively disable
-# it here so our storage directly receives any load/store/delete calls.
-# This is not necessary in general and can hamper performance; we just use it here
-# for illustrative and testing purposes.
+ # Notice that there is a small in-memory buffer by default.
+ # We effectively disable it here so our storage directly receives
+ # any load/store/delete calls.
+ # This is not necessary in general and can hamper performance;
+ # we just use it here for illustrative and testing purposes.
storage = DictStorage()
- r = index.Index( storage, properties = settings )
+ r = index.Index(storage, properties=settings)
-# Interestingly enough, if we take a look at the contents of our storage now, we
-# can see the Rtree has already written two pages to it. This is for header and
-# index.
+ # Interestingly enough, if we take a look at the contents of our
+ # storage now, we can see the Rtree has already written two pages
+ # to it. This is for header and index.
state1 = storage.dict.copy()
self.assertEqual(list(state1.keys()), [0, 1])
@@ -556,7 +614,6 @@ class IndexCustomStorage(unittest.TestCase):
del storage
-
def test_custom_storage_reopening(self):
"""Reopening custom index storage works as expected"""
@@ -565,11 +622,11 @@ class IndexCustomStorage(unittest.TestCase):
settings.writethrough = True
settings.buffering_capacity = 1
- r1 = index.Index(storage, properties = settings, overwrite = True)
+ r1 = index.Index(storage, properties=settings, overwrite=True)
r1.add(555, (2, 2))
del r1
self.assertTrue(storage.hasData)
- r2 = index.Index(storage, properly = settings, overwrite = False)
- count = r2.count( (0, 0, 10, 10) )
+ r2 = index.Index(storage, properly=settings, overwrite=False)
+ count = r2.count((0, 0, 10, 10))
self.assertEqual(count, 1)
=====================================
tests/test_tpr.py
=====================================
@@ -1,22 +1,23 @@
from collections import namedtuple, defaultdict
from math import ceil
+import unittest
import numpy as np
-import pytest
-import rtree
+import os
+from rtree.index import Index, Property, RT_TPRTree
-class Object(namedtuple("Object", (
- "id", "time", "x", "y", "x_vel", "y_vel", "update_time",
+class Cartesian(namedtuple("Cartesian", (
+ "id", "time", "x", "y", "x_vel", "y_vel", "update_time",
"out_of_bounds"))):
__slots__ = ()
def getX(self, t):
- return self.x + self.x_vel*(t - self.time)
+ return self.x + self.x_vel * (t - self.time)
def getY(self, t):
- return self.y + self.y_vel*(t - self.time)
+ return self.y + self.y_vel * (t - self.time)
def getXY(self, t):
return self.getX(t), self.getY(t)
@@ -27,7 +28,7 @@ class Object(namedtuple("Object", (
self.time if t_now is None else (self.time, t_now))
-class QueryObject(namedtuple("QueryObject", (
+class QueryCartesian(namedtuple("QueryCartesian", (
"start_time", "end_time", "x", "y", "dx", "dy"))):
__slots__ = ()
@@ -39,11 +40,11 @@ class QueryObject(namedtuple("QueryObject", (
def data_generator(
- dataset_size=1000, simulation_length=100, max_update_interval=20,
+ dataset_size=100, simulation_length=10, max_update_interval=20,
queries_per_time_step=5, min_query_extent=0.05, max_query_extent=0.1,
horizon=20, min_query_interval=2, max_query_interval=10, agility=0.01,
min_speed=0.0025, max_speed=0.0166, min_x=0, min_y=0, max_x=1, max_y=1,
- ):
+):
def create_object(id_, time, x=None, y=None):
# Create object with random or defined x, y and random velocity
@@ -53,11 +54,11 @@ def data_generator(
y = np.random.uniform(min_y, max_y)
speed = np.random.uniform(min_speed, max_speed)
angle = np.random.uniform(-np.pi, np.pi)
- x_vel, y_vel = speed*np.cos(angle), speed*np.sin(angle)
+ x_vel, y_vel = speed * np.cos(angle), speed * np.sin(angle)
# Set update time for when out of bounds, or max interval
for dt in range(1, max_update_interval + 1):
- if not (0 < x + x_vel*dt < max_x and 0 < y + y_vel*dt < max_y):
+ if not (0 < x + x_vel * dt < max_x and 0 < y + y_vel * dt < max_y):
out_of_bounds = True
update_time = time + dt
break
@@ -65,8 +66,8 @@ def data_generator(
out_of_bounds = False
update_time = time + max_update_interval
- return Object(id_, time, x, y, x_vel, y_vel, update_time,
- out_of_bounds)
+ return Cartesian(id_, time, x, y, x_vel, y_vel, update_time,
+ out_of_bounds)
objects = list()
objects_to_update = defaultdict(set)
@@ -113,10 +114,10 @@ def data_generator(
y = np.random.uniform(min_y, max_y)
dx = np.random.uniform(min_query_extent, max_query_extent)
dy = np.random.uniform(min_query_extent, max_query_extent)
- dt = np.random.randint(min_query_interval, max_query_interval+1)
+ dt = np.random.randint(min_query_interval, max_query_interval + 1)
t = np.random.randint(t_now, t_now + horizon - dt)
- yield "QUERY", t_now, QueryObject(t, t+dt, x, y, dx, dy)
+ yield "QUERY", t_now, QueryCartesian(t, t + dt, x, y, dx, dy)
def intersects(x1, y1, x2, y2, x, y, dx, dy):
@@ -125,59 +126,53 @@ def intersects(x1, y1, x2, y2, x, y, dx, dy):
# Implementation of https://stackoverflow.com/a/293052
# Check if line points not both more/less than max/min for each axis
- if (x1 > x+dx and x2 > x+dx) or (x1 < x-dx and x2 < x-dx) \
- or (y1 > y+dy and y2 > y+dy) or (y1 < y-dy and y2 < y-dy):
+ if (x1 > x + dx and x2 > x + dx) or (x1 < x - dx and x2 < x - dx) \
+ or (y1 > y + dy and y2 > y + dy) or (y1 < y - dy and y2 < y - dy):
return False
# Check on which side (+ve, -ve) of the line the rectangle corners are,
# returning True if any corner is on a different side.
- calcs = ((y2-y1)*rect_x + (x1-x2)*rect_y + (x2*y1 - x1*y2)
- for rect_x, rect_y in (
- (x-dx, y-dy), (x+dx, y-dy), (x-dx, y+dy), (x+dx, y+dy)))
+ calcs = ((y2 - y1) * rect_x + (x1 - x2) * rect_y + (x2 * y1 - x1 * y2)
+ for rect_x, rect_y in ((x - dx, y - dy),
+ (x + dx, y - dy),
+ (x - dx, y + dy),
+ (x + dx, y + dy)))
sign = np.sign(next(calcs)) # First corner (bottom left)
return any(np.sign(calc) != sign for calc in calcs) # Check remaining 3
- at pytest.fixture(scope="function")
-def tpr_tree(request):
- # Create tree
- from rtree.index import Index, Property, RT_TPRTree
- return Index(properties=Property(type=RT_TPRTree))
-
-
- at pytest.fixture(scope="function")
-def simulation():
- return data_generator()
-
-
- at pytest.mark.skipif(
- not hasattr(rtree.core.rt, 'Index_InsertTPData'),
- reason="Requires TPR-Tree support in libspatialindex")
-def test_tpr(tpr_tree, simulation):
- # Objects list for brute force
- objects = dict()
-
- for operation, t_now, object_ in simulation:
- if operation == "INSERT":
- tpr_tree.insert(object_.id, object_.get_coordinates())
- objects[object_.id] = object_
- elif operation == "DELETE":
- tpr_tree.delete(object_.id, object_.get_coordinates(t_now))
- del objects[object_.id]
- elif operation == "QUERY":
- tree_intersect = set(
- tpr_tree.intersection(object_.get_coordinates()))
-
- # Brute intersect
- brute_intersect = set()
- for tree_object in objects.values():
- x_low, y_low = tree_object.getXY(object_.start_time)
- x_high, y_high = tree_object.getXY(object_.end_time)
-
- if intersects(
- x_low, y_low, x_high, y_high, # Line
- object_.x, object_.y, object_.dx, object_.dy): # Rect
- brute_intersect.add(tree_object.id)
-
- # Tree should match brute force approach
- assert tree_intersect == brute_intersect
+class TPRTests(unittest.TestCase):
+
+ def test_tpr(self):
+ # TODO : this freezes forever on some windows cloud builds
+ if os.name == 'nt':
+ return
+
+ # Cartesians list for brute force
+ objects = dict()
+ tpr_tree = Index(properties=Property(type=RT_TPRTree))
+
+ for operation, t_now, object_ in data_generator():
+ if operation == "INSERT":
+ tpr_tree.insert(object_.id, object_.get_coordinates())
+ objects[object_.id] = object_
+ elif operation == "DELETE":
+ tpr_tree.delete(object_.id, object_.get_coordinates(t_now))
+ del objects[object_.id]
+ elif operation == "QUERY":
+ tree_intersect = set(
+ tpr_tree.intersection(object_.get_coordinates()))
+
+ # Brute intersect
+ brute_intersect = set()
+ for tree_object in objects.values():
+ x_low, y_low = tree_object.getXY(object_.start_time)
+ x_high, y_high = tree_object.getXY(object_.end_time)
+
+ if intersects(
+ x_low, y_low, x_high, y_high, # Line
+ object_.x, object_.y, object_.dx, object_.dy): # Rect
+ brute_intersect.add(tree_object.id)
+
+ # Tree should match brute force approach
+ assert tree_intersect == brute_intersect
View it on GitLab: https://salsa.debian.org/debian-gis-team/python-rtree/-/compare/eb2fa1f649a1657123b3cd00c52bf9c0a7cc4582...97fd0bcf3ac83074a4019c1719f1dfa31b14056f
--
View it on GitLab: https://salsa.debian.org/debian-gis-team/python-rtree/-/compare/eb2fa1f649a1657123b3cd00c52bf9c0a7cc4582...97fd0bcf3ac83074a4019c1719f1dfa31b14056f
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20201218/dbf7596a/attachment-0001.html>
More information about the Pkg-grass-devel
mailing list