[Git][debian-gis-team/sarsen][upstream] New upstream version 0.9.5+ds

Antonio Valentino (@antonio.valentino) gitlab at salsa.debian.org
Sun Sep 28 11:23:56 BST 2025



Antonio Valentino pushed to branch upstream at Debian GIS Project / sarsen


Commits:
103c4f73 by Antonio Valentino at 2025-09-28T09:40:47+00:00
New upstream version 0.9.5+ds
- - - - -


28 changed files:

- .cruft.json
- .github/workflows/on-push.yml
- .gitignore
- .pre-commit-config-cruft.yaml
- .pre-commit-config.yaml
- + .python-version
- Makefile
- ci/environment-integration.yml
- docs/REQUIREMENTS.md
- docs/conf.py
- docs/index.md
- environment.yml
- pyproject.toml
- sarsen/__main__.py
- sarsen/apps.py
- sarsen/chunking.py
- sarsen/datamodel.py
- sarsen/geocoding.py
- sarsen/orbit.py
- sarsen/radiometry.py
- sarsen/scene.py
- sarsen/sentinel1.py
- tests/test_10_orbit.py
- tests/test_10_scene.py
- tests/test_20_geocoding.py
- tests/test_50_apps.py
- tests/test_60_main.py
- + uv.lock


Changes:

=====================================
.cruft.json
=====================================
@@ -1,6 +1,6 @@
 {
   "template": "https://github.com/ecmwf-projects/cookiecutter-conda-package",
-  "commit": "d379e35af1aa17d816367bcb0942fcf3e238be9d",
+  "commit": "55a2762621d850770549b944eb594d2164c31435",
   "checkout": null,
   "context": {
     "cookiecutter": {
@@ -11,6 +11,7 @@
       "copyright_year": "2016",
       "mypy_strict": true,
       "integration_tests": true,
+      "pypi": true,
       "_template": "https://github.com/ecmwf-projects/cookiecutter-conda-package"
     }
   },


=====================================
.github/workflows/on-push.yml
=====================================
@@ -55,7 +55,7 @@ jobs:
     runs-on: ubuntu-latest
     strategy:
       matrix:
-        python-version: ['3.12']
+        python-version: ['3.13']
 
     steps:
     - uses: actions/checkout at v4
@@ -66,7 +66,7 @@ jobs:
     - name: Get current date
       id: date
       run: echo "date=$(date +%Y-%m-%d)" >> "${GITHUB_OUTPUT}"
-    - uses: mamba-org/setup-micromamba at v1
+    - uses: mamba-org/setup-micromamba at v2
       with:
         environment-file: ci/combined-environment-ci.yml
         environment-name: DEVELOP
@@ -88,28 +88,22 @@ jobs:
 
     steps:
     - uses: actions/checkout at v4
-    - uses: actions/download-artifact at v4
+
+    - name: Install uv
+      uses: astral-sh/setup-uv at v5
       with:
-        name: combined-environments
-        path: ci
-    - name: Get current date
-      id: date
-      run: echo "date=$(date +%Y-%m-%d)" >> "${GITHUB_OUTPUT}"
-    - uses: mamba-org/setup-micromamba at v1
+        enable-cache: true
+        cache-dependency-glob: "uv.lock"
+
+    - name: "Set up Python"
+      uses: actions/setup-python at v5
       with:
-        environment-file: ci/combined-environment-ci.yml
-        environment-name: DEVELOP
-        cache-environment: true
-        cache-environment-key: environment-${{ steps.date.outputs.date }}
-        cache-downloads-key: downloads-${{ steps.date.outputs.date }}
-        create-args: >-
-          python=3.11
-    - name: Install package
-      run: |
-        python -m pip install --no-deps -e .
-    - name: Run code quality checks
-      run: |
-        make type-check
+        python-version-file: ".python-version"
+    - name: Install the project
+      run: uv sync --locked --all-extras --dev
+    - name: Run tests
+      # For example, using `pytest`
+      run: uv run mypy .
 
   docs-build:
     needs: [combine-environments, unit-tests]
@@ -124,7 +118,7 @@ jobs:
     - name: Get current date
       id: date
       run: echo "date=$(date +%Y-%m-%d)" >> "${GITHUB_OUTPUT}"
-    - uses: mamba-org/setup-micromamba at v1
+    - uses: mamba-org/setup-micromamba at v2
       with:
         environment-file: ci/combined-environment-ci.yml
         environment-name: DEVELOP
@@ -141,41 +135,50 @@ jobs:
         make docs-build
 
   integration-tests:
-    needs: [combine-environments, unit-tests]
+    needs: [unit-tests]
     if: |
       success() && true
     runs-on: ubuntu-latest
-
     strategy:
       matrix:
-        include:
-        - python-version: '3.10'
-          extra: -integration
+        python-version:
+        - "3.10"
+        - "3.11"
+        - "3.12"
 
     steps:
     - uses: actions/checkout at v4
-    - uses: actions/download-artifact at v4
+
+    - name: Install uv and set the python version
+      uses: astral-sh/setup-uv at v5
       with:
-        name: combined-environments
-        path: ci
-    - name: Get current date
-      id: date
-      run: echo "date=$(date +%Y-%m-%d)" >> "${GITHUB_OUTPUT}"
-    - uses: mamba-org/setup-micromamba at v1
+        python-version: ${{ matrix.python-version }}
+
+    - name: Install the project
+      run: uv sync --locked --dev
+
+    - name: Run tests
+      run: uv run pytest -vv
+
+  minver-tests:
+    needs: [unit-tests]
+    if: |
+      success() && true
+    runs-on: ubuntu-latest
+
+    steps:
+    - uses: actions/checkout at v4
+
+    - name: Install uv and set the python version
+      uses: astral-sh/setup-uv at v5
       with:
-        environment-file: ci/combined-environment${{ matrix.extra }}.yml
-        environment-name: DEVELOP${{ matrix.extra }}
-        cache-environment: true
-        cache-environment-key: environment-${{ steps.date.outputs.date }}
-        cache-downloads-key: downloads-${{ steps.date.outputs.date }}
-        create-args: >-
-          python=${{ matrix.python-version }}
-    - name: Install package
-      run: |
-        python -m pip install --no-deps -e .
+        python-version: "3.10"
+
+    - name: Install the project
+      run: uv sync --dev --resolution lowest-direct
+
     - name: Run tests
-      run: |
-        make unit-tests COV_REPORT=xml
+      run: uv run pytest -vv
 
   distribution:
     runs-on: ubuntu-latest
@@ -215,7 +218,7 @@ jobs:
     runs-on: ubuntu-latest
     needs: distribution
     if: |
-      always() &&
+      always() && true &&
       needs.distribution.result == 'success' &&
       github.event_name == 'push' &&
       startsWith(github.ref, 'refs/tags')
@@ -230,6 +233,6 @@ jobs:
       with:
         name: distribution
         path: dist
-    - uses: pypa/gh-action-pypi-publish at v1.10.1
+    - uses: pypa/gh-action-pypi-publish at v1.12.4
       with:
         verbose: true


=====================================
.gitignore
=====================================
@@ -5,6 +5,7 @@
 version.py
 
 # Sphinx automatic generation of API
+docs/README.md
 docs/_api/
 
 # Combined environments


=====================================
.pre-commit-config-cruft.yaml
=====================================
@@ -1,6 +1,6 @@
 repos:
 - repo: https://github.com/cruft/cruft
-  rev: 2.15.0
+  rev: 2.16.0
   hooks:
   - id: cruft
     entry: cruft update -y


=====================================
.pre-commit-config.yaml
=====================================
@@ -1,6 +1,6 @@
 repos:
 - repo: https://github.com/pre-commit/pre-commit-hooks
-  rev: v4.5.0
+  rev: v5.0.0
   hooks:
   - id: trailing-whitespace
   - id: end-of-file-fixer
@@ -17,23 +17,23 @@ repos:
   - id: blackdoc
     additional_dependencies: [black==23.11.0]
 - repo: https://github.com/astral-sh/ruff-pre-commit
-  rev: v0.1.13
+  rev: v0.11.9
   hooks:
   - id: ruff
     args: [--fix, --show-fixes]
   - id: ruff-format
 - repo: https://github.com/executablebooks/mdformat
-  rev: 0.7.17
+  rev: 0.7.22
   hooks:
   - id: mdformat
 - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
-  rev: v2.12.0
+  rev: v2.14.0
   hooks:
   - id: pretty-format-yaml
     args: [--autofix, --preserve-quotes]
   - id: pretty-format-toml
     args: [--autofix]
 - repo: https://github.com/gitleaks/gitleaks
-  rev: v8.18.1
+  rev: v8.25.1
   hooks:
   - id: gitleaks


=====================================
.python-version
=====================================
@@ -0,0 +1 @@
+3.13


=====================================
Makefile
=====================================
@@ -29,7 +29,7 @@ template-update:
 	pre-commit run --all-files cruft -c .pre-commit-config-cruft.yaml
 
 docs-build:
-	cd docs && rm -fr _api && make clean && make html
+	cp README.md docs/. && cd docs && rm -fr _api && make clean && make html
 
 # DO NOT EDIT ABOVE THIS LINE, ADD COMMANDS BELOW
 


=====================================
ci/environment-integration.yml
=====================================
@@ -14,6 +14,6 @@ dependencies:
 - pandas == 1.4.0
 - rasterio == 1.3.0
 - rioxarray == 0.13.0
-- xarray == 2023.02.0
+- xarray == 2023.12.0
 - xarray-sentinel == 0.9.3
 - xmlschema == 2.2.0


=====================================
docs/REQUIREMENTS.md
=====================================
@@ -4,7 +4,7 @@ Requirements status: :white_check_mark: for final and :construction: for draft
 
 ## General
 
-1. :white_check_mark: _Package type_:	Command line tool and python library
+1. :white_check_mark: _Package type_: Command line tool and python library
 
 1. :white_check_mark: _License_: Apache v2
 


=====================================
docs/conf.py
=====================================
@@ -37,8 +37,10 @@ extensions = [
 autodoc_typehints = "none"
 
 # autoapi configuration
+autoapi_add_toctree_entry = False
 autoapi_dirs = ["../sarsen"]
 autoapi_ignore = ["*/version.py"]
+autoapi_member_order = "groupwise"
 autoapi_options = [
     "members",
     "inherited-members",


=====================================
docs/index.md
=====================================
@@ -1,4 +1,4 @@
-# Welcome to sarsen's documentation!
+# Welcome to sarsen's documentation
 
 Sarsen.
 
@@ -6,6 +6,7 @@ Sarsen.
 :caption: 'Contents:'
 :maxdepth: 2
 
+README.md
 API Reference <_api/sarsen/index>
 ```
 


=====================================
environment.yml
=====================================
@@ -14,7 +14,6 @@ dependencies:
 - flox
 - fsspec
 - gdal
-- mock
 - numpy >= 1.22.0
 - rasterio >= 1.3.0
 - rioxarray


=====================================
pyproject.toml
=====================================
@@ -1,5 +1,18 @@
 [build-system]
-requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2"]
+build-backend = "setuptools.build_meta"
+requires = ["setuptools>=64", "setuptools_scm>=8"]
+
+[dependency-groups]
+dev = [
+  "cfchecker>=4.1.0",
+  "mypy>=1.15.0",
+  "pandas-stubs>=1.4.0",
+  "pytest>=7.0",
+  "pytest-cov>=5.0",
+  "setuptools>=64.0",
+  "shapely>=2.1",
+  "zarr>=2.18.3"
+]
 
 [project]
 classifiers = [
@@ -12,25 +25,26 @@ classifiers = [
   "Programming Language :: Python :: 3.10",
   "Programming Language :: Python :: 3.11",
   "Programming Language :: Python :: 3.12",
+  "Programming Language :: Python :: 3.13",
   "Topic :: Scientific/Engineering"
 ]
 dependencies = [
-  "attrs",
-  "flox",
-  "mock",
-  "numpy",
-  "pandas",
-  "rasterio",
-  "rioxarray",
-  "typer",
-  "xarray >= 2022.06.0",
-  "xarray-sentinel >= 0.9.3"
+  "attrs>=24.0",
+  "dask>=2023.2.0",
+  "flox>=0.9.13",
+  "numpy>=1.23.0",
+  "pandas>=1.4.0",
+  "rioxarray>=0.15.0",
+  "typer>=0.11",
+  "xarray>=2023.12.0",
+  "xarray-sentinel>=0.9.3"
 ]
 description = "Algorithms and utilities for Synthetic Aperture Radar (SAR) sensors"
 dynamic = ["version"]
 license = {file = "LICENSE"}
 name = "sarsen"
 readme = "README.md"
+requires-python = ">=3.10"
 
 [project.scripts]
 sarsen = "sarsen.__main__:app"
@@ -47,13 +61,19 @@ ignore_missing_imports = true
 module = ["py", "rasterio"]
 
 [tool.ruff]
+# Same as Black.
+indent-width = 4
+line-length = 88
+
+[tool.ruff.format]
+exclude = ["*.ipynb"]
+
+[tool.ruff.lint]
+exclude = ["*.ipynb"]
 ignore = [
   # pydocstyle: Missing Docstrings
   "D1"
 ]
-# Same as Black.
-indent-width = 4
-line-length = 88
 select = [
   # pyflakes
   "F",
@@ -69,7 +89,7 @@ select = [
 [tool.ruff.lint.pycodestyle]
 max-line-length = 110
 
-[tool.ruff.pydocstyle]
+[tool.ruff.lint.pydocstyle]
 convention = "numpy"
 
 [tool.setuptools]
@@ -79,6 +99,7 @@ packages = ["sarsen"]
 sarsen = ["py.typed"]
 
 [tool.setuptools_scm]
+fallback_version = "999"
 write_to = "sarsen/version.py"
 write_to_template = '''
 # Do not change! Do not track in version control!


=====================================
sarsen/__main__.py
=====================================
@@ -30,10 +30,12 @@ def gtc(
     enable_dask_distributed: bool = False,
     client_kwargs_json: str = '{"processes": false}',
     chunks: int = 1024,
+    seed_step: int | None = None,
 ) -> None:
     """Generate a geometrically terrain corrected (GTC) image from Sentinel-1 product."""
     client_kwargs = json.loads(client_kwargs_json)
     real_chunks = chunks if chunks > 0 else None
+    real_seed_step = (seed_step, seed_step) if seed_step is not None else None
     logging.basicConfig(level=logging.INFO)
     product = sentinel1.Sentinel1SarProduct(
         product_urlpath,
@@ -46,6 +48,7 @@ def gtc(
         enable_dask_distributed=enable_dask_distributed,
         client_kwargs=client_kwargs,
         chunks=real_chunks,
+        seed_step=real_seed_step,
     )
 
 
@@ -59,10 +62,12 @@ def stc(
     client_kwargs_json: str = '{"processes": false}',
     chunks: int = 1024,
     grouping_area_factor: Tuple[float, float] = (3.0, 3.0),
+    seed_step: int | None = None,
 ) -> None:
     """Generate a simulated terrain corrected image from a Sentinel-1 product."""
     client_kwargs = json.loads(client_kwargs_json)
     real_chunks = chunks if chunks > 0 else None
+    real_seed_step = (seed_step, seed_step) if seed_step is not None else None
     logging.basicConfig(level=logging.INFO)
     product = sentinel1.Sentinel1SarProduct(
         product_urlpath,
@@ -77,6 +82,7 @@ def stc(
         enable_dask_distributed=enable_dask_distributed,
         client_kwargs=client_kwargs,
         chunks=real_chunks,
+        seed_step=real_seed_step,
     )
 
 
@@ -90,10 +96,12 @@ def rtc(
     client_kwargs_json: str = '{"processes": false}',
     chunks: int = 1024,
     grouping_area_factor: Tuple[float, float] = (3.0, 3.0),
+    seed_step: int | None = None,
 ) -> None:
     """Generate a radiometrically terrain corrected (RTC) image from Sentinel-1 product."""
     client_kwargs = json.loads(client_kwargs_json)
     real_chunks = chunks if chunks > 0 else None
+    real_seed_step = (seed_step, seed_step) if seed_step is not None else None
     logging.basicConfig(level=logging.INFO)
     product = sentinel1.Sentinel1SarProduct(
         product_urlpath,
@@ -108,6 +116,7 @@ def rtc(
         enable_dask_distributed=enable_dask_distributed,
         client_kwargs=client_kwargs,
         chunks=real_chunks,
+        seed_step=real_seed_step,
     )
 
 


=====================================
sarsen/apps.py
=====================================
@@ -1,5 +1,5 @@
 import logging
-from typing import Any, Container, Dict, Optional, Tuple
+from typing import Any, Container
 from unittest import mock
 
 import numpy as np
@@ -14,17 +14,35 @@ logger = logging.getLogger(__name__)
 SPEED_OF_LIGHT = 299_792_458.0  # m / s
 
 
+def make_simulate_acquisition_template(
+    template_raster: xr.DataArray,
+    correct_radiometry: str | None = None,
+) -> xr.Dataset:
+    acquisition_template = xr.Dataset(
+        data_vars={
+            "slant_range_time": template_raster,
+            "azimuth_time": template_raster.astype("datetime64[ns]"),
+        }
+    )
+    include_variables = {"slant_range_time", "azimuth_time"}
+    if correct_radiometry is not None:
+        acquisition_template["gamma_area"] = template_raster
+        include_variables.add("gamma_area")
+
+    return acquisition_template
+
+
 def simulate_acquisition(
     dem_ecef: xr.DataArray,
-    position_ecef: xr.DataArray,
+    orbit_interpolator: orbit.OrbitPolyfitInterpolator,
     include_variables: Container[str] = (),
+    azimuth_time: xr.DataArray | float = 0.0,
+    **kwargs: Any,
 ) -> xr.Dataset:
     """Compute the image coordinates of the DEM given the satellite orbit."""
-    orbit_interpolator = orbit.OrbitPolyfitIterpolator.from_position(position_ecef)
-    position_ecef = orbit_interpolator.position()
-    velocity_ecef = orbit_interpolator.velocity()
-
-    acquisition = geocoding.backward_geocode(dem_ecef, position_ecef, velocity_ecef)
+    acquisition = geocoding.backward_geocode(
+        dem_ecef, orbit_interpolator, azimuth_time, **kwargs
+    )
 
     slant_range = (acquisition.dem_distance**2).sum(dim="axis") ** 0.5
     slant_range_time = 2.0 / SPEED_OF_LIGHT * slant_range
@@ -51,28 +69,24 @@ def simulate_acquisition(
 
 def map_simulate_acquisition(
     dem_ecef: xr.DataArray,
-    position_ecef: xr.DataArray,
-    template_raster: xr.DataArray,
-    correct_radiometry: Optional[str] = None,
+    orbit_interpolator: orbit.OrbitPolyfitInterpolator,
+    template_raster: xr.DataArray | None = None,
+    correct_radiometry: str | None = None,
+    **kwargs: Any,
 ) -> xr.Dataset:
-    acquisition_template = xr.Dataset(
-        data_vars={
-            "slant_range_time": template_raster,
-            "azimuth_time": template_raster.astype("datetime64[ns]"),
-        }
+    if template_raster is None:
+        template_raster = dem_ecef.isel(axis=0).drop_vars(["axis", "spatial_ref"]) * 0.0
+    acquisition_template = make_simulate_acquisition_template(
+        template_raster, correct_radiometry
     )
-    include_variables = {"slant_range_time", "azimuth_time"}
-    if correct_radiometry is not None:
-        acquisition_template["gamma_area"] = template_raster
-        include_variables.add("gamma_area")
-
     acquisition = xr.map_blocks(
         simulate_acquisition,
-        dem_ecef,
+        dem_ecef.drop_vars("spatial_ref"),
         kwargs={
-            "position_ecef": position_ecef,
-            "include_variables": include_variables,
-        },
+            "orbit_interpolator": orbit_interpolator,
+            "include_variables": list(acquisition_template.data_vars),
+        }
+        | kwargs,
         template=acquisition_template,
     )
     return acquisition
@@ -81,25 +95,33 @@ def map_simulate_acquisition(
 def do_terrain_correction(
     product: datamodel.SarProduct,
     dem_raster: xr.DataArray,
-    correct_radiometry: Optional[str] = None,
+    convert_to_dem_ecef_kwargs: dict[str, Any] = {},
+    correct_radiometry: str | None = None,
     interp_method: xr.core.types.InterpOptions = "nearest",
-    grouping_area_factor: Tuple[float, float] = (3.0, 3.0),
+    grouping_area_factor: tuple[float, float] = (3.0, 3.0),
     radiometry_chunks: int = 2048,
     radiometry_bound: int = 128,
-) -> tuple[xr.DataArray, Optional[xr.DataArray]]:
+    seed_step: tuple[int, int] | None = None,
+) -> tuple[xr.DataArray, xr.DataArray | None]:
     logger.info("pre-process DEM")
 
     dem_ecef = xr.map_blocks(
-        scene.convert_to_dem_ecef, dem_raster, kwargs={"source_crs": dem_raster.rio.crs}
+        scene.convert_to_dem_ecef, dem_raster, kwargs=convert_to_dem_ecef_kwargs
     )
-    dem_ecef = dem_ecef.drop_vars(dem_ecef.rio.grid_mapping)
 
     logger.info("simulate acquisition")
 
-    template_raster = dem_raster.drop_vars(dem_raster.rio.grid_mapping) * 0.0
+    template_raster = dem_ecef.isel(axis=0).drop_vars(["axis", "spatial_ref"]) * 0.0
+
+    orbit_interpolator = orbit.OrbitPolyfitInterpolator.from_position(
+        product.state_vectors()
+    )
 
     acquisition = map_simulate_acquisition(
-        dem_ecef, product.state_vectors(), template_raster, correct_radiometry
+        dem_ecef,
+        orbit_interpolator,
+        correct_radiometry=correct_radiometry,
+        seed_step=seed_step,
     )
 
     simulated_beta_nought = None
@@ -125,9 +147,9 @@ def do_terrain_correction(
         )
         simulated_beta_nought.attrs["long_name"] = "terrain-simulated beta nought"
 
-        simulated_beta_nought.x.attrs.update(dem_raster.x.attrs)
-        simulated_beta_nought.y.attrs.update(dem_raster.y.attrs)
-        simulated_beta_nought.rio.write_crs(dem_raster.rio.crs, inplace=True)
+        simulated_beta_nought.x.attrs.update(dem_ecef.x.attrs)
+        simulated_beta_nought.y.attrs.update(dem_ecef.y.attrs)
+        simulated_beta_nought.rio.write_crs(dem_ecef.rio.crs, inplace=True)
 
     logger.info("calibrate image")
 
@@ -150,9 +172,9 @@ def do_terrain_correction(
         geocoded = geocoded / simulated_beta_nought
         geocoded.attrs["long_name"] = "terrain-corrected gamma nought"
 
-    geocoded.x.attrs.update(dem_raster.x.attrs)
-    geocoded.y.attrs.update(dem_raster.y.attrs)
-    geocoded.rio.write_crs(dem_raster.rio.crs, inplace=True)
+    geocoded.x.attrs.update(dem_ecef.x.attrs)
+    geocoded.y.attrs.update(dem_ecef.y.attrs)
+    geocoded.rio.write_crs(dem_ecef.rio.crs, inplace=True)
 
     return geocoded, simulated_beta_nought
 
@@ -160,17 +182,18 @@ def do_terrain_correction(
 def terrain_correction(
     product: datamodel.SarProduct,
     dem_urlpath: str,
-    output_urlpath: Optional[str] = "GTC.tif",
-    simulated_urlpath: Optional[str] = None,
-    correct_radiometry: Optional[str] = None,
+    output_urlpath: str | None = "GTC.tif",
+    simulated_urlpath: str | None = None,
+    correct_radiometry: str | None = None,
     interp_method: xr.core.types.InterpOptions = "nearest",
-    grouping_area_factor: Tuple[float, float] = (3.0, 3.0),
-    open_dem_raster_kwargs: Dict[str, Any] = {},
-    chunks: Optional[int] = 1024,
+    grouping_area_factor: tuple[float, float] = (3.0, 3.0),
+    open_dem_raster_kwargs: dict[str, Any] = {},
+    chunks: int | None = 1024,
     radiometry_chunks: int = 2048,
     radiometry_bound: int = 128,
     enable_dask_distributed: bool = False,
-    client_kwargs: Dict[str, Any] = {"processes": False},
+    client_kwargs: dict[str, Any] = {"processes": False},
+    seed_step: tuple[int, int] | None = None,
 ) -> xr.DataArray:
     """Apply the terrain-correction to sentinel-1 SLC and GRD products.
 
@@ -208,14 +231,20 @@ def terrain_correction(
     if output_urlpath is None and simulated_urlpath is None:
         raise ValueError("No output selected")
 
+    allowed_product_types = ["GRD", "SLC"]
+    if product.product_type not in allowed_product_types:
+        raise ValueError(
+            f"{product.product_type=}. Must be one of: {allowed_product_types}"
+        )
+
     output_chunks = chunks if chunks is not None else 512
 
-    to_raster_kwargs: Dict[str, Any] = {}
+    to_raster_kwargs: dict[str, Any] = {}
     if enable_dask_distributed:
         from dask.distributed import Client, Lock
 
-        client = Client(**client_kwargs)  # type: ignore
-        to_raster_kwargs["lock"] = Lock("rio", client=client)  # type: ignore
+        client = Client(**client_kwargs)
+        to_raster_kwargs["lock"] = Lock("rio", client=client)
         to_raster_kwargs["compute"] = False
         print(f"Dask distributed dashboard at: {client.dashboard_link}")
 
@@ -225,12 +254,6 @@ def terrain_correction(
         dem_urlpath, chunks=chunks, **open_dem_raster_kwargs
     )
 
-    allowed_product_types = ["GRD", "SLC"]
-    if product.product_type not in allowed_product_types:
-        raise ValueError(
-            f"{product.product_type=}. Must be one of: {allowed_product_types}"
-        )
-
     geocoded, simulated_beta_nought = do_terrain_correction(
         product=product,
         dem_raster=dem_raster,
@@ -239,6 +262,7 @@ def terrain_correction(
         grouping_area_factor=grouping_area_factor,
         radiometry_chunks=radiometry_chunks,
         radiometry_bound=radiometry_bound,
+        seed_step=seed_step,
     )
 
     if simulated_urlpath is not None:


=====================================
sarsen/chunking.py
=====================================
@@ -1,6 +1,6 @@
 import itertools
 import math
-from typing import Any, Callable, Dict, List, Optional, Tuple, Union
+from typing import Any, Callable
 
 import xarray as xr
 
@@ -9,7 +9,7 @@ def compute_chunks_1d(
     dim_size: int,
     chunks: int = 2048,
     bound: int = 128,
-) -> Tuple[List[slice], List[slice], List[slice]]:
+) -> tuple[list[slice], list[slice], list[slice]]:
     ext_slices = []
     ext_slices_bound = []
     int_slices = []
@@ -43,9 +43,9 @@ def compute_chunks_1d(
 
 
 def compute_product(
-    slices: List[List[slice]], dims_name: List[str]
-) -> List[Dict[str, slice]]:
-    product: List[Dict[str, slice]] = []
+    slices: list[list[slice]], dims_name: list[str]
+) -> list[dict[str, slice]]:
+    product: list[dict[str, slice]] = []
 
     for slices_ in itertools.product(*slices):
         product.append({})
@@ -55,10 +55,10 @@ def compute_product(
 
 
 def compute_chunks(
-    dims: Dict[str, int] = {},
+    dims: dict[str, int] = {},
     chunks: int = 2048,
     bound: int = 128,
-) -> Tuple[List[Dict[str, slice]], List[Dict[str, slice]], List[Dict[str, slice]]]:
+) -> tuple[list[dict[str, slice]], list[dict[str, slice]], list[dict[str, slice]]]:
     ext_slices_ = []
     ext_slices_bound_ = []
     int_slices_ = []
@@ -76,11 +76,11 @@ def compute_chunks(
 
 def map_ovelap(
     function: Callable[..., xr.DataArray],
-    obj: Union[xr.Dataset, xr.DataArray],
+    obj: xr.Dataset | xr.DataArray,
     chunks: int = 2048,
     bound: int = 128,
-    kwargs: Dict[Any, Any] = {},
-    template: Optional[xr.DataArray] = None,
+    kwargs: dict[Any, Any] = {},
+    template: xr.DataArray | None = None,
 ) -> xr.DataArray:
     dims = {}
     for d in obj.dims:
@@ -100,7 +100,7 @@ def map_ovelap(
     )  # type ignore
 
     try:
-        from dask.array import empty_like  # type: ignore
+        from dask.array import empty_like
     except ModuleNotFoundError:
         from numpy import empty_like  # type: ignore
 


=====================================
sarsen/datamodel.py
=====================================
@@ -1,5 +1,5 @@
 import abc
-from typing import Any, Dict, Optional, Tuple
+from typing import Any
 
 import xarray as xr
 
@@ -10,51 +10,45 @@ class SarProduct(abc.ABC):
 
     @property
     @abc.abstractmethod
-    def product_type(self) -> str:
-        ...
+    def product_type(self) -> str: ...
 
     @abc.abstractmethod
-    def state_vectors(self) -> xr.DataArray:
-        ...
+    def state_vectors(self) -> xr.DataArray: ...
 
     @abc.abstractmethod
-    def beta_nought(self) -> xr.DataArray:
-        ...
+    def beta_nought(self) -> xr.DataArray: ...
 
     @abc.abstractmethod
     def interp_sar(
         self,
         data: xr.DataArray,
         azimuth_time: xr.DataArray,
-        slant_range_time: Optional[xr.DataArray] = None,
+        slant_range_time: xr.DataArray | None = None,
         method: xr.core.types.InterpOptions = "nearest",
-        ground_range: Optional[xr.DataArray] = None,
-    ) -> xr.DataArray:
-        ...
+        ground_range: xr.DataArray | None = None,
+    ) -> xr.DataArray: ...
 
     # FIXME: design a better interface
     @abc.abstractmethod
     def grid_parameters(
         self,
-        grouping_area_factor: Tuple[float, float] = (3.0, 3.0),
-    ) -> Dict[str, Any]:
-        ...
+        grouping_area_factor: tuple[float, float] = (3.0, 3.0),
+    ) -> dict[str, Any]: ...
 
 
 class GroundRangeSarProduct(SarProduct):
     @abc.abstractmethod
     def slant_range_time_to_ground_range(
         self, azimuth_time: xr.DataArray, slant_range_time: xr.DataArray
-    ) -> xr.DataArray:
-        ...
+    ) -> xr.DataArray: ...
 
     def interp_sar(
         self,
         data: xr.DataArray,
         azimuth_time: xr.DataArray,
-        slant_range_time: Optional[xr.DataArray] = None,
+        slant_range_time: xr.DataArray | None = None,
         method: xr.core.types.InterpOptions = "nearest",
-        ground_range: Optional[xr.DataArray] = None,
+        ground_range: xr.DataArray | None = None,
     ) -> xr.DataArray:
         if ground_range is None:
             assert slant_range_time is not None
@@ -69,8 +63,7 @@ class GroundRangeSarProduct(SarProduct):
 
 class SlantRangeSarProduct(SarProduct):
     @abc.abstractmethod
-    def complex_amplitude(self) -> xr.DataArray:
-        ...
+    def complex_amplitude(self) -> xr.DataArray: ...
 
     def beta_nought(self) -> xr.DataArray:
         amplitude = self.complex_amplitude()
@@ -82,9 +75,9 @@ class SlantRangeSarProduct(SarProduct):
         self,
         data: xr.DataArray,
         azimuth_time: xr.DataArray,
-        slant_range_time: Optional[xr.DataArray] = None,
+        slant_range_time: xr.DataArray | None = None,
         method: xr.core.types.InterpOptions = "nearest",
-        ground_range: Optional[xr.DataArray] = None,
+        ground_range: xr.DataArray | None = None,
     ) -> xr.DataArray:
         assert ground_range is None
         interpolated = data.interp(


=====================================
sarsen/geocoding.py
=====================================
@@ -2,103 +2,218 @@
 
 See: https://sentinel.esa.int/documents/247904/0/Guide-to-Sentinel-1-Geocoding.pdf/e0450150-b4e9-4b2d-9b32-dadf989d3bd3
 """
+
 import functools
-from typing import Any, Callable, Optional, Tuple, TypeVar
+from typing import Any, Callable, TypeVar
 
 import numpy as np
 import numpy.typing as npt
 import xarray as xr
 
-TimedeltaArrayLike = TypeVar("TimedeltaArrayLike", bound=npt.ArrayLike)
+from . import orbit
+
+ArrayLike = TypeVar("ArrayLike", bound=npt.ArrayLike)
 FloatArrayLike = TypeVar("FloatArrayLike", bound=npt.ArrayLike)
 
 
 def secant_method(
-    ufunc: Callable[[TimedeltaArrayLike], Tuple[FloatArrayLike, FloatArrayLike]],
-    t_prev: TimedeltaArrayLike,
-    t_curr: TimedeltaArrayLike,
+    ufunc: Callable[[ArrayLike], tuple[FloatArrayLike, Any]],
+    t_prev: ArrayLike,
+    t_curr: ArrayLike,
     diff_ufunc: float = 1.0,
-    diff_t: np.timedelta64 = np.timedelta64(0, "ns"),
-) -> Tuple[TimedeltaArrayLike, TimedeltaArrayLike, FloatArrayLike, Any]:
+    diff_t: Any = 1e-6,
+    maxiter: int = 10,
+) -> tuple[ArrayLike, ArrayLike, FloatArrayLike, int, Any]:
     """Return the root of ufunc calculated using the secant method."""
     # implementation modified from https://en.wikipedia.org/wiki/Secant_method
     f_prev, _ = ufunc(t_prev)
 
     # strong convergence, all points below one of the two thresholds
-    while True:
+    for k in range(maxiter):
         f_curr, payload_curr = ufunc(t_curr)
 
+        # print(f"{f_curr / 7500}")
+
         # the `not np.any` construct let us accept `np.nan` as good values
         if not np.any((np.abs(f_curr) > diff_ufunc)):
             break
 
-        t_diff: TimedeltaArrayLike
-        p: TimedeltaArrayLike
-        q: FloatArrayLike
-
         t_diff = t_curr - t_prev  # type: ignore
-        p = f_curr * t_diff  # type: ignore
+
+        # the `not np.any` construct let us accept `np.nat` as good values
+        if not np.any(np.abs(t_diff) > diff_t):
+            break
+
         q = f_curr - f_prev  # type: ignore
 
-        # t_prev, t_curr = t_curr, t_curr - f_curr * np.timedelta64(-148_000, "ns")
-        t_prev, t_curr = t_curr, t_curr - np.where(q != 0, p / q, 0)  # type: ignore
+        # NOTE: in same cases f_curr * t_diff overflows datetime64[ns] before the division by q
+        t_prev, t_curr = t_curr, t_curr - np.where(q != 0, f_curr / q, 0) * t_diff  # type: ignore
         f_prev = f_curr
 
+    return t_curr, t_prev, f_curr, k, payload_curr
+
+
+def newton_raphson_method(
+    ufunc: Callable[[ArrayLike], tuple[FloatArrayLike, Any]],
+    ufunc_prime: Callable[[ArrayLike, Any], FloatArrayLike],
+    t_curr: ArrayLike,
+    diff_ufunc: float = 1.0,
+    diff_t: Any = 1e-6,
+    maxiter: int = 10,
+) -> tuple[ArrayLike, FloatArrayLike, int, Any]:
+    """Return the root of ufunc calculated using the Newton method."""
+    # implementation based on https://en.wikipedia.org/wiki/Newton%27s_method
+    # strong convergence, all points below one of the two thresholds
+    for k in range(maxiter):
+        f_curr, payload_curr = ufunc(t_curr)
+
+        # print(f"{f_curr / 7500}")
+
+        # the `not np.any` construct let us accept `np.nan` as good values
+        if not np.any((np.abs(f_curr) > diff_ufunc)):
+            break
+
+        fp_curr = ufunc_prime(t_curr, payload_curr)
+
+        t_diff = f_curr / fp_curr  # type: ignore
+
         # the `not np.any` construct let us accept `np.nat` as good values
         if not np.any(np.abs(t_diff) > diff_t):
             break
 
-    return t_curr, t_prev, f_curr, payload_curr
+        t_curr = t_curr - t_diff  # type: ignore
 
+    return t_curr, f_curr, k, payload_curr
 
-# FIXME: interpolationg the direction decreses the precision, this function should
-#   probably have velocity_ecef_sar in input instead
-def zero_doppler_plane_distance(
+
+def zero_doppler_plane_distance_velocity(
     dem_ecef: xr.DataArray,
-    position_ecef_sar: xr.DataArray,
-    direction_ecef_sar: xr.DataArray,
-    azimuth_time: TimedeltaArrayLike,
+    orbit_interpolator: orbit.OrbitPolyfitInterpolator,
+    orbit_time: xr.DataArray,
     dim: str = "axis",
-) -> Tuple[xr.DataArray, Tuple[xr.DataArray, xr.DataArray]]:
-    dem_distance = dem_ecef - position_ecef_sar.interp(azimuth_time=azimuth_time)
-    satellite_direction = direction_ecef_sar.interp(azimuth_time=azimuth_time)
-    plane_distance = (dem_distance * satellite_direction).sum(dim, skipna=False)
-    return plane_distance, (dem_distance, satellite_direction)
+) -> tuple[xr.DataArray, tuple[xr.DataArray, xr.DataArray]]:
+    dem_distance = dem_ecef - orbit_interpolator.position_from_orbit_time(orbit_time)
+    satellite_velocity = orbit_interpolator.velocity_from_orbit_time(orbit_time)
+    plane_distance_velocity = (dem_distance * satellite_velocity).sum(dim, skipna=False)
+    return plane_distance_velocity, (dem_distance, satellite_velocity)
 
 
-def backward_geocode(
+def zero_doppler_plane_distance_velocity_prime(
+    orbit_interpolator: orbit.OrbitPolyfitInterpolator,
+    orbit_time: xr.DataArray,
+    payload: tuple[xr.DataArray, xr.DataArray],
+    dim: str = "axis",
+) -> xr.DataArray:
+    dem_distance, satellite_velocity = payload
+
+    plane_distance_velocity_prime = (
+        dem_distance * orbit_interpolator.acceleration_from_orbit_time(orbit_time)
+        - satellite_velocity**2
+    ).sum(dim)
+    return plane_distance_velocity_prime
+
+
+def backward_geocode_simple(
     dem_ecef: xr.DataArray,
-    position_ecef: xr.DataArray,
-    velocity_ecef: xr.DataArray,
-    azimuth_time: Optional[xr.DataArray] = None,
+    orbit_interpolator: orbit.OrbitPolyfitInterpolator,
+    orbit_time_guess: xr.DataArray | float = 0.0,
     dim: str = "axis",
-    diff_ufunc: float = 1.0,
-) -> xr.Dataset:
-    direction_ecef = (
-        velocity_ecef / xr.dot(velocity_ecef, velocity_ecef, dims=dim) ** 0.5
-    )
+    zero_doppler_distance: float = 1.0,
+    satellite_speed: float = 7_500.0,
+    method: str = "secant",
+    orbit_time_prev_shift: float = -0.1,
+    maxiter: int = 10,
+) -> tuple[xr.DataArray, xr.DataArray, xr.DataArray]:
+    diff_ufunc = zero_doppler_distance * satellite_speed
 
     zero_doppler = functools.partial(
-        zero_doppler_plane_distance, dem_ecef, position_ecef, direction_ecef
+        zero_doppler_plane_distance_velocity, dem_ecef, orbit_interpolator
     )
 
-    if azimuth_time is None:
-        azimuth_time = position_ecef.azimuth_time
-    t_template = dem_ecef.isel({dim: 0}).drop_vars(dim)
-    t_prev = xr.full_like(t_template, azimuth_time.values[0], dtype=azimuth_time.dtype)
-    t_curr = xr.full_like(t_template, azimuth_time.values[-1], dtype=azimuth_time.dtype)
-
-    # NOTE: dem_distance has the associated azimuth_time as a coordinate already
-    _, _, _, (dem_distance, satellite_direction) = secant_method(
-        zero_doppler,
-        t_prev,
-        t_curr,
-        diff_ufunc,
+    if isinstance(orbit_time_guess, xr.DataArray):
+        pass
+    else:
+        t_template = dem_ecef.isel({dim: 0}).drop_vars(dim).rename("azimuth_time")
+        orbit_time_guess = xr.full_like(
+            t_template,
+            orbit_time_guess,
+            dtype="float64",
+        )
+
+    if method == "secant":
+        orbit_time_guess_prev = orbit_time_guess + orbit_time_prev_shift
+        orbit_time, _, _, k, (dem_distance, satellite_velocity) = secant_method(
+            zero_doppler,
+            orbit_time_guess_prev,
+            orbit_time_guess,
+            diff_ufunc,
+            maxiter=maxiter,
+        )
+    elif method in {"newton", "newton_raphson"}:
+        zero_doppler_prime = functools.partial(
+            zero_doppler_plane_distance_velocity_prime, orbit_interpolator
+        )
+        orbit_time, _, k, (dem_distance, satellite_velocity) = newton_raphson_method(
+            zero_doppler,
+            zero_doppler_prime,
+            orbit_time_guess,
+            diff_ufunc,
+            maxiter=maxiter,
+        )
+    # print(f"iterations: {k}")
+    return orbit_time, dem_distance, satellite_velocity
+
+
+def backward_geocode(
+    dem_ecef: xr.DataArray,
+    orbit_interpolator: orbit.OrbitPolyfitInterpolator,
+    orbit_time_guess: xr.DataArray | float = 0.0,
+    dim: str = "axis",
+    zero_doppler_distance: float = 1.0,
+    satellite_speed: float = 7_500.0,
+    method: str = "newton",
+    seed_step: tuple[int, int] | None = None,
+    maxiter: int = 10,
+    maxiter_after_seed: int = 1,
+    orbit_time_prev_shift: float = -0.1,
+) -> xr.Dataset:
+    if seed_step is not None:
+        dem_ecef_seed = dem_ecef.isel(
+            y=slice(seed_step[0] // 2, None, seed_step[0]),
+            x=slice(seed_step[1] // 2, None, seed_step[1]),
+        )
+        orbit_time_seed, _, _ = backward_geocode_simple(
+            dem_ecef_seed,
+            orbit_interpolator,
+            orbit_time_guess,
+            dim,
+            zero_doppler_distance,
+            satellite_speed,
+            method,
+            orbit_time_prev_shift=orbit_time_prev_shift,
+        )
+        orbit_time_guess = orbit_time_seed.interp_like(
+            dem_ecef.sel(axis=0), kwargs={"fill_value": "extrapolate"}
+        )
+        maxiter = maxiter_after_seed
+
+    orbit_time, dem_distance, satellite_velocity = backward_geocode_simple(
+        dem_ecef,
+        orbit_interpolator,
+        orbit_time_guess,
+        dim,
+        zero_doppler_distance,
+        satellite_speed,
+        method,
+        maxiter=maxiter,
+        orbit_time_prev_shift=orbit_time_prev_shift,
     )
+
     acquisition = xr.Dataset(
         data_vars={
+            "azimuth_time": orbit_interpolator.orbit_time_to_azimuth_time(orbit_time),
             "dem_distance": dem_distance,
-            "satellite_direction": satellite_direction.transpose(*dem_distance.dims),
+            "satellite_velocity": satellite_velocity.transpose(*dem_distance.dims),
         }
     )
-    return acquisition.reset_coords("azimuth_time")
+    return acquisition


=====================================
sarsen/orbit.py
=====================================
@@ -1,4 +1,5 @@
-from typing import Any, Optional, Tuple
+import functools
+from typing import Any
 
 import attrs
 import numpy as np
@@ -18,11 +19,25 @@ def polyder(coefficients: xr.DataArray) -> xr.DataArray:
     return derivative_coefficients
 
 
+def orbit_time_to_azimuth_time(
+    orbit_time: xr.DataArray, epoch: np.datetime64
+) -> xr.DataArray:
+    azimuth_time = orbit_time * np.timedelta64(S_TO_NS, "ns") + epoch
+    return azimuth_time.rename("azimuth_time")
+
+
+def azimuth_time_to_orbit_time(
+    azimuth_time: xr.DataArray, epoch: np.datetime64
+) -> xr.DataArray:
+    orbit_time = (azimuth_time - epoch) / np.timedelta64(S_TO_NS, "ns")
+    return orbit_time.rename("orbit_time")
+
+
 @attrs.define
-class OrbitPolyfitIterpolator:
+class OrbitPolyfitInterpolator:
     coefficients: xr.DataArray
     epoch: np.datetime64
-    interval: Tuple[np.datetime64, np.datetime64]
+    interval: tuple[np.datetime64, np.datetime64]
 
     @classmethod
     def from_position(
@@ -30,11 +45,10 @@ class OrbitPolyfitIterpolator:
         position: xr.DataArray,
         dim: str = "azimuth_time",
         deg: int = 5,
-        epoch: Optional[np.datetime64] = None,
-        interval: Optional[Tuple[np.datetime64, np.datetime64]] = None,
-    ) -> "OrbitPolyfitIterpolator":
+        epoch: np.datetime64 | None = None,
+        interval: tuple[np.datetime64, np.datetime64] | None = None,
+    ) -> "OrbitPolyfitInterpolator":
         time = position.coords[dim]
-        assert time.dtype.name in ("datetime64[ns]", "timedelta64[ns]")
 
         if epoch is None:
             # NOTE: summing two datetime64 is not defined and we cannot use:
@@ -44,12 +58,19 @@ class OrbitPolyfitIterpolator:
         if interval is None:
             interval = (time.values[0], time.values[-1])
 
-        data = position.assign_coords({dim: time - epoch})
+        orbit_time = azimuth_time_to_orbit_time(time, epoch)
+        data = position.assign_coords({dim: orbit_time})
         polyfit_results = data.polyfit(dim=dim, deg=deg)
         # TODO: raise if the fit is not good enough
 
         return cls(polyfit_results.polyfit_coefficients, epoch, interval)
 
+    def orbit_time_to_azimuth_time(self, azimuth_time: xr.DataArray) -> xr.DataArray:
+        return orbit_time_to_azimuth_time(azimuth_time, self.epoch)
+
+    def azimuth_time_to_orbit_time(self, orbit_time: xr.DataArray) -> xr.DataArray:
+        return azimuth_time_to_orbit_time(orbit_time, self.epoch)
+
     def azimuth_time_range(self, freq_s: float = 0.02) -> xr.DataArray:
         azimuth_time_values = pd.date_range(
             start=self.interval[0],
@@ -62,28 +83,54 @@ class OrbitPolyfitIterpolator:
             name="azimuth_time",
         )
 
-    def position(
-        self, time: Optional[xr.DataArray] = None, **kwargs: Any
-    ) -> xr.DataArray:
+    def position_from_orbit_time(self, orbit_time: xr.DataArray) -> xr.DataArray:
+        position = xr.polyval(orbit_time, self.coefficients)
+        return position.rename("position")
+
+    def position(self, time: xr.DataArray | None = None, **kwargs: Any) -> xr.DataArray:
         if time is None:
             time = self.azimuth_time_range(**kwargs)
         assert time.dtype.name in ("datetime64[ns]", "timedelta64[ns]")
 
-        position: xr.DataArray
-        position = xr.polyval(time - self.epoch, self.coefficients)
-        position = position.assign_coords({time.name: time})
-        return position.rename("position")
+        position = self.position_from_orbit_time(self.azimuth_time_to_orbit_time(time))
+        return position.assign_coords({time.name: time})
+
+    @functools.cached_property
+    def velocity_coefficients(self) -> xr.DataArray:
+        return polyder(self.coefficients)
 
-    def velocity(
-        self, time: Optional[xr.DataArray] = None, **kwargs: Any
+    def velocity_from_orbit_time(self, orbit_time: xr.DataArray) -> xr.DataArray:
+        velocity = xr.polyval(orbit_time, self.velocity_coefficients)
+        return velocity.rename("velocity")
+
+    def velocity(self, time: xr.DataArray | None = None, **kwargs: Any) -> xr.DataArray:
+        if time is None:
+            time = self.azimuth_time_range(**kwargs)
+        assert time.dtype.name in ("datetime64[ns]", "timedelta64[ns]")
+
+        velocity = self.velocity_from_orbit_time(self.azimuth_time_to_orbit_time(time))
+        return velocity.assign_coords({time.name: time})
+
+    @functools.cached_property
+    def acceleration_coefficients(self) -> xr.DataArray:
+        return polyder(self.velocity_coefficients)
+
+    def acceleration_from_orbit_time(self, orbit_time: xr.DataArray) -> xr.DataArray:
+        velocity = xr.polyval(orbit_time, self.acceleration_coefficients)
+        return velocity.rename("acceleration")
+
+    def acceleration(
+        self, time: xr.DataArray | None = None, **kwargs: Any
     ) -> xr.DataArray:
         if time is None:
             time = self.azimuth_time_range(**kwargs)
         assert time.dtype.name in ("datetime64[ns]", "timedelta64[ns]")
 
-        velocity_coefficients = polyder(self.coefficients) * S_TO_NS
+        acceleration = self.acceleration_from_orbit_time(
+            self.azimuth_time_to_orbit_time(time)
+        )
+        return acceleration.assign_coords({time.name: time})
 
-        velocity: xr.DataArray
-        velocity = xr.polyval(time - self.epoch, velocity_coefficients)
-        velocity = velocity.assign_coords({time.name: time})
-        return velocity.rename("velocity")
+
+# keep wrong spelling used elsewhere
+OrbitPolyfitIterpolator = OrbitPolyfitInterpolator


=====================================
sarsen/radiometry.py
=====================================
@@ -1,5 +1,4 @@
 import logging
-from typing import Optional, Tuple
 from unittest import mock
 
 import flox.xarray
@@ -10,14 +9,14 @@ from . import scene
 
 logger = logging.getLogger(__name__)
 
-ONE_SECOND = np.timedelta64(1, "s")
+ONE_SECOND = np.timedelta64(10**9, "ns")
 
 
 def sum_weights(
     initial_weights: xr.DataArray,
     azimuth_index: xr.DataArray,
     slant_range_index: xr.DataArray,
-    multilook: Optional[Tuple[int, int]] = None,
+    multilook: tuple[int, int] | None = None,
 ) -> xr.DataArray:
     geocoded = initial_weights.assign_coords(
         slant_range_index=slant_range_index, azimuth_index=azimuth_index
@@ -54,7 +53,7 @@ def compute_gamma_area(
     dem_direction: xr.DataArray,
 ) -> xr.DataArray:
     dem_oriented_area = scene.compute_dem_oriented_area(dem_ecef)
-    gamma_area: xr.DataArray = xr.dot(dem_oriented_area, -dem_direction, dims="axis")
+    gamma_area: xr.DataArray = xr.dot(dem_oriented_area, -dem_direction, dim="axis")
     gamma_area = gamma_area.where(gamma_area > 0, 0)
     return gamma_area
 


=====================================
sarsen/scene.py
=====================================
@@ -28,15 +28,23 @@ def open_dem_raster(
 
 def make_nd_dataarray(das: list[xr.DataArray], dim: str = "axis") -> xr.DataArray:
     da_nd = xr.concat(das, dim=dim, coords="minimal")
-    return da_nd.assign_coords({dim: range(len(das))})
+    dim_attrs = {"long_name": "cartesian axis index", "units": 1}
+    return da_nd.assign_coords({dim: (dim, range(len(das)), dim_attrs)})
 
 
 def convert_to_dem_3d(
-    dem_raster: xr.DataArray, dim: str = "axis", x: str = "x", y: str = "y"
+    dem_raster: xr.DataArray,
+    dim: str = "axis",
+    x: str = "x",
+    y: str = "y",
+    dtype: str = "float64",
 ) -> xr.DataArray:
-    _, dem_raster_x = xr.broadcast(dem_raster, dem_raster.coords[x])
-    dem_raster_y = dem_raster.coords[y]
-    dem_3d = make_nd_dataarray([dem_raster_x, dem_raster_y, dem_raster], dim=dim)
+    _, dem_raster_x = xr.broadcast(dem_raster, dem_raster.coords[x].astype(dtype))
+    dem_raster_y = dem_raster.coords[y].astype(dtype)
+    dem_raster_astype = dem_raster.astype(dtype)
+    dem_3d = make_nd_dataarray([dem_raster_x, dem_raster_y, dem_raster_astype], dim=dim)
+    dem_3d.attrs.clear()
+    dem_3d.attrs.update(dem_raster.attrs)
     return dem_3d.rename("dem_3d")
 
 
@@ -74,6 +82,27 @@ def transform_dem_3d(
     return dem_3d_crs
 
 
+def upsample_coords(
+    data: xr.DataArray, dtype: str | None = None, **factors: int
+) -> dict[str, np.ndarray[Any, Any]]:
+    coords = {}
+    for dim, factor in factors.items():
+        coord = data.coords[dim]
+        coord_delta = coord[1].values - coord[0].values
+        start = coord[0].values - coord_delta / 2 + coord_delta / factor / 2
+        stop = coord[-1].values + coord_delta / 2 - coord_delta / factor / 2
+        values = np.linspace(start, stop, num=coord.size * factor, dtype=dtype)
+        coords[dim] = values
+    return coords
+
+
+def upsample(
+    data: xr.DataArray, dtype: str | None = None, **factors: int
+) -> xr.DataArray:
+    coords = upsample_coords(data, dtype, **factors)
+    return data.interp(coords, kwargs={"fill_value": "extrapolate"})
+
+
 def convert_to_dem_ecef(
     dem_raster: xr.DataArray, x: str = "x", y: str = "y", **kwargs: Any
 ) -> xr.DataArray:
@@ -112,12 +141,12 @@ def compute_dem_oriented_area(dem_ecef: xr.DataArray) -> xr.DataArray:
 
     cross_1 = xr.cross(dx1, dy1, dim="axis") / 2
     sign_1 = np.sign(
-        xr.dot(cross_1, dem_ecef, dims="axis")
+        xr.dot(cross_1, dem_ecef, dim="axis")
     )  # ensure direction out of DEM
 
     cross_2 = xr.cross(dx2, dy2, dim="axis") / 2
     sign_2 = np.sign(
-        xr.dot(cross_2, dem_ecef, dims="axis")
+        xr.dot(cross_2, dem_ecef, dim="axis")
     )  # ensure direction out of DEM
     dem_oriented_area: xr.DataArray = cross_1 * sign_1 + cross_2 * sign_2
 


=====================================
sarsen/sentinel1.py
=====================================
@@ -1,6 +1,5 @@
-from __future__ import annotations
-
-from typing import Any, Dict, List, Optional, Tuple, Union
+import functools
+from typing import Any
 
 import attrs
 import numpy as np
@@ -12,7 +11,7 @@ import sarsen
 try:
     import dask  # noqa: F401
 
-    DEFAULT_MEASUREMENT_CHUNKS: Optional[int] = 2048
+    DEFAULT_MEASUREMENT_CHUNKS: int | None = 2048
 except ModuleNotFoundError:
     DEFAULT_MEASUREMENT_CHUNKS = None
 
@@ -21,11 +20,11 @@ SPEED_OF_LIGHT = 299_792_458.0  # m / s
 
 def open_dataset_autodetect(
     product_urlpath: str,
-    group: Optional[str] = None,
-    chunks: Optional[Union[int, Dict[str, int]]] = None,
+    group: str | None = None,
+    chunks: int | dict[str, int] | None = None,
     check_files_exist: bool = False,
     **kwargs: Any,
-) -> Tuple[xr.Dataset, Dict[str, Any]]:
+) -> tuple[xr.Dataset, dict[str, Any]]:
     kwargs.setdefault("engine", "sentinel-1")
     try:
         ds = xr.open_dataset(
@@ -37,17 +36,17 @@ def open_dataset_autodetect(
         )
     except FileNotFoundError:
         # re-try with Planetary Computer option
-        kwargs[
-            "override_product_files"
-        ] = "{dirname}/{prefix}{swath}-{polarization}{ext}"
+        kwargs["override_product_files"] = (
+            "{dirname}/{prefix}{swath}-{polarization}{ext}"
+        )
         ds = xr.open_dataset(product_urlpath, group=group, chunks=chunks, **kwargs)
     return ds, kwargs
 
 
 def azimuth_slant_range_grid(
-    attrs: Dict[str, Any],
-    grouping_area_factor: Tuple[float, float] = (3.0, 3.0),
-) -> Dict[str, Any]:
+    attrs: dict[str, Any],
+    grouping_area_factor: tuple[float, float] = (3.0, 3.0),
+) -> dict[str, Any]:
     if attrs["product_type"] == "SLC":
         slant_range_spacing_m = (
             attrs["range_pixel_spacing"]
@@ -61,7 +60,7 @@ def azimuth_slant_range_grid(
         slant_range_spacing_m * 2 / SPEED_OF_LIGHT  # ignore type
     )
 
-    grid_parameters: Dict[str, Any] = {
+    grid_parameters: dict[str, Any] = {
         "slant_range_time0": attrs["image_slant_range_time"],
         "slant_range_time_interval_s": slant_range_time_interval_s,
         "slant_range_spacing_m": slant_range_spacing_m,
@@ -76,11 +75,11 @@ def azimuth_slant_range_grid(
 @attrs.define(slots=False)
 class Sentinel1SarProduct(sarsen.GroundRangeSarProduct, sarsen.SlantRangeSarProduct):
     product_urlpath: str
-    measurement_group: Optional[str] = None
-    measurement_chunks: Optional[int] = DEFAULT_MEASUREMENT_CHUNKS
-    kwargs: Dict[str, Any] = {}
+    measurement_group: str | None = None
+    measurement_chunks: int | None = DEFAULT_MEASUREMENT_CHUNKS
+    kwargs: dict[str, Any] = {}
 
-    def all_measurement_groups(self) -> List[str]:
+    def all_measurement_groups(self) -> list[str]:
         ds, self.kwargs = open_dataset_autodetect(
             self.product_urlpath, check_files_exist=True, **self.kwargs
         )
@@ -100,31 +99,31 @@ class Sentinel1SarProduct(sarsen.GroundRangeSarProduct, sarsen.SlantRangeSarProd
             ds = xarray_sentinel.mosaic_slc_iw(ds)
         return ds
 
-    @property
+    @functools.cached_property
     def orbit(self) -> xr.Dataset:
         ds, self.kwargs = open_dataset_autodetect(
             self.product_urlpath, group=f"{self.measurement_group}/orbit", **self.kwargs
         )
-        return ds
+        return ds.compute()
 
-    @property
+    @functools.cached_property
     def gcp(self) -> xr.Dataset:
         ds, self.kwargs = open_dataset_autodetect(
             self.product_urlpath, group=f"{self.measurement_group}/gcp", **self.kwargs
         )
-        return ds
+        return ds.compute()
 
-    @property
+    @functools.cached_property
     def calibration(self) -> xr.Dataset:
         ds, self.kwargs = open_dataset_autodetect(
             self.product_urlpath,
             group=f"{self.measurement_group}/calibration",
             **self.kwargs,
         )
-        return ds
+        return ds.compute()
 
-    @property
-    def coordinate_conversion(self) -> Optional[xr.Dataset]:
+    @functools.cached_property
+    def coordinate_conversion(self) -> xr.Dataset | None:
         ds = None
         if self.product_type == "GRD":
             ds, self.kwargs = open_dataset_autodetect(
@@ -132,10 +131,11 @@ class Sentinel1SarProduct(sarsen.GroundRangeSarProduct, sarsen.SlantRangeSarProd
                 group=f"{self.measurement_group}/coordinate_conversion",
                 **self.kwargs,
             )
+            ds = ds.compute()
         return ds
 
-    @property
-    def azimuth_fm_rate(self) -> Optional[xr.Dataset]:
+    @functools.cached_property
+    def azimuth_fm_rate(self) -> xr.Dataset | None:
         ds = None
         if self.product_type == "SLC":
             ds, self.kwargs = open_dataset_autodetect(
@@ -143,10 +143,11 @@ class Sentinel1SarProduct(sarsen.GroundRangeSarProduct, sarsen.SlantRangeSarProd
                 group=f"{self.measurement_group}/azimuth_fm_rate",
                 **self.kwargs,
             )
+            ds = ds.compute()
         return ds
 
-    @property
-    def dc_estimate(self) -> Optional[xr.Dataset]:
+    @functools.cached_property
+    def dc_estimate(self) -> xr.Dataset | None:
         ds = None
         if self.product_type == "SLC":
             ds, self.kwargs = open_dataset_autodetect(
@@ -154,23 +155,36 @@ class Sentinel1SarProduct(sarsen.GroundRangeSarProduct, sarsen.SlantRangeSarProd
                 group=f"{self.measurement_group}/dc_estimate",
                 **self.kwargs,
             )
+            ds = ds.compute()
         return ds
 
+    # make class hashable to allow caching methods calls
+
+    def __hash__(self) -> int:
+        id = (
+            self.product_urlpath,
+            self.measurement_group,
+            self.measurement_chunks,
+        ) + tuple(repr(self.kwargs))
+        return hash(id)
+
     # SarProduct interaface
 
-    @property
+    @functools.cached_property
     def product_type(self) -> Any:
         prod_type = self.measurement.attrs["product_type"]
         assert isinstance(prod_type, str)
         return prod_type
 
-    def beta_nought(self) -> xr.DataArray:
+    @functools.cache
+    def beta_nought(self, persist: bool = False) -> xr.DataArray:
         measurement = self.measurement.data_vars["measurement"]
         beta_nought = xarray_sentinel.calibrate_intensity(
             measurement, self.calibration.betaNought
         )
-        beta_nought = beta_nought.drop_vars(["pixel", "line"])
-        return beta_nought
+        if persist:
+            beta_nought = beta_nought.persist()
+        return beta_nought.drop_vars(["pixel", "line"])
 
     def state_vectors(self) -> xr.DataArray:
         return self.orbit.data_vars["position"]
@@ -190,8 +204,8 @@ class Sentinel1SarProduct(sarsen.GroundRangeSarProduct, sarsen.SlantRangeSarProd
 
     def grid_parameters(
         self,
-        grouping_area_factor: Tuple[float, float] = (3.0, 3.0),
-    ) -> Dict[str, Any]:
+        grouping_area_factor: tuple[float, float] = (3.0, 3.0),
+    ) -> dict[str, Any]:
         return azimuth_slant_range_grid(self.measurement.attrs, grouping_area_factor)
 
     def complex_amplitude(self) -> xr.DataArray:
@@ -208,7 +222,8 @@ class Sentinel1SarProduct(sarsen.GroundRangeSarProduct, sarsen.SlantRangeSarProd
         else:
             return sarsen.SlantRangeSarProduct.interp_sar(self, *args, **kwargs)
 
-    def product_info(self, **kwargs: Any) -> Dict[str, Any]:
+    @functools.cache
+    def product_info(self, **kwargs: Any) -> dict[str, Any]:
         """Get information about the Sentinel-1 product."""
         measurement_groups = self.all_measurement_groups()
 


=====================================
tests/test_10_orbit.py
=====================================
@@ -4,9 +4,35 @@ import xarray as xr
 from sarsen import orbit
 
 
-def test_OrbitPolyfitIterpolator_datetime64(orbit_ds: xr.Dataset) -> None:
+def test_orbit_time_to_azimuth_time() -> None:
+    epoch = np.datetime64("2025-05-16T08:38:12.123456789", "ns")
+
+    res = orbit.orbit_time_to_azimuth_time(xr.DataArray(0.0), epoch)
+
+    assert res == epoch
+
+    res = orbit.orbit_time_to_azimuth_time(xr.DataArray(-120.2), epoch)
+
+    assert res == epoch - np.timedelta64(120200, "ms")
+
+    res = orbit.orbit_time_to_azimuth_time(xr.DataArray(1e-10), epoch)
+
+    assert res == epoch
+
+
+def test_azimuth_time_to_orbit_time() -> None:
+    epoch = np.datetime64("2025-05-16T08:38:12.123456789", "ns")
+    seconds = -120.43256234
+    date = orbit.orbit_time_to_azimuth_time(xr.DataArray(seconds), epoch)
+
+    res = orbit.azimuth_time_to_orbit_time(date, epoch)
+
+    assert res == seconds
+
+
+def test_OrbitPolyfitInterpolator_datetime64(orbit_ds: xr.Dataset) -> None:
     position = orbit_ds.data_vars["position"]
-    orbit_interpolator = orbit.OrbitPolyfitIterpolator.from_position(position, deg=4)
+    orbit_interpolator = orbit.OrbitPolyfitInterpolator.from_position(position, deg=4)
 
     res = orbit_interpolator.position(position.azimuth_time)
 
@@ -26,13 +52,13 @@ def test_OrbitPolyfitIterpolator_datetime64(orbit_ds: xr.Dataset) -> None:
     assert res.dims == ("azimuth_time", "axis")
 
 
-def test_OrbitPolyfitIterpolator_timedelta64(orbit_ds: xr.Dataset) -> None:
+def test_OrbitPolyfitInterpolator_timedelta64(orbit_ds: xr.Dataset) -> None:
     position = orbit_ds.data_vars["position"]
     position = position.assign_coords(
         azimuth_time=position.azimuth_time - position.azimuth_time[0]
     )
     epoch = position.azimuth_time.values[0]
-    orbit_interpolator = orbit.OrbitPolyfitIterpolator.from_position(
+    orbit_interpolator = orbit.OrbitPolyfitInterpolator.from_position(
         position, epoch=epoch, deg=4
     )
 
@@ -46,3 +72,20 @@ def test_OrbitPolyfitIterpolator_timedelta64(orbit_ds: xr.Dataset) -> None:
 
     assert res.dims == ("azimuth_time", "axis")
     assert np.allclose(res, expected_velocity, rtol=0, atol=0.02)
+
+
+def test_OrbitPolyfitInterpolator_values(orbit_ds: xr.Dataset) -> None:
+    position = orbit_ds.data_vars["position"]
+    orbit_interpolator = orbit.OrbitPolyfitInterpolator.from_position(position, deg=4)
+
+    distance = np.linalg.norm(orbit_interpolator.position(), axis=1)
+
+    assert np.allclose(distance, 7_068_663, rtol=0.001)
+
+    velocity = np.linalg.norm(orbit_interpolator.velocity(), axis=1)
+
+    assert np.allclose(velocity, 7_590, rtol=0.001)
+
+    acceleration = np.linalg.norm(orbit_interpolator.acceleration(), axis=1)
+
+    assert np.allclose(acceleration, 8.18, rtol=0.001)


=====================================
tests/test_10_scene.py
=====================================
@@ -1,4 +1,5 @@
 import numpy as np
+import pytest
 import xarray as xr
 
 from sarsen import scene
@@ -12,6 +13,7 @@ def test_convert_to_dem_3d(dem_raster: xr.DataArray) -> None:
     assert res.sel(x=12.5, y=42, method="nearest")[2] == 17.0
 
 
+ at pytest.mark.xfail()
 def test_transform_dem_3d(dem_raster: xr.DataArray) -> None:
     dem_3d = scene.convert_to_dem_3d(dem_raster)
 
@@ -33,6 +35,13 @@ def test_transform_dem_3d(dem_raster: xr.DataArray) -> None:
     )
 
 
+def test_upsample(dem_raster: xr.DataArray) -> None:
+    res = scene.upsample(dem_raster, x=2)
+
+    assert res.x.size == dem_raster.x.size * 2
+    assert np.allclose(res.isel(x=0).mean(), 78.46527778)
+
+
 def test_compute_dem_oriented_area(dem_raster: xr.DataArray) -> None:
     dem_3d = scene.convert_to_dem_3d(dem_raster)
 


=====================================
tests/test_20_geocoding.py
=====================================
@@ -11,42 +11,38 @@ def test_secant_method() -> None:
     def ufunc(
         t: npt.ArrayLike,
     ) -> Tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]:
-        ft: npt.NDArray[np.float64] = np.asarray(t).astype("float64")
-        retval = 1.0 + 0.015 * ft - 0.0001 * ft**2 + 0.00003 * ft**3
-        return retval, retval
+        retval = 1.0 + 0.015 * t - 0.0001 * t**2 + 0.00003 * t**3  # type: ignore
+        return retval, retval  # type: ignore
 
-    t_start: npt.NDArray[np.timedelta64] = np.array([np.timedelta64(-100, "ns")])
+    t_start = np.array([-100.0])
 
     # stop with df threshold
-    res, _, _, _ = geocoding.secant_method(ufunc, -t_start, t_start, diff_ufunc=0.1)
+    res, _, _, _, _ = geocoding.secant_method(ufunc, -t_start, t_start, diff_ufunc=0.1)
 
     assert isinstance(res, np.ndarray)
     assert res.size == 1
-    assert res[0] == np.timedelta64(-27, "ns")
+    assert np.allclose(res, -25.1724)
 
     # stop with dt threshold
-    res, _, _, _ = geocoding.secant_method(ufunc, -t_start, t_start, diff_ufunc=0.01)
+    res, _, _, _, _ = geocoding.secant_method(ufunc, -t_start, t_start, diff_ufunc=0.01)
 
-    assert res[0] == np.timedelta64(-27, "ns")
+    assert np.allclose(res, -26.3065)
 
-    t_start = np.ones((2, 2), dtype="timedelta64[ns]") * 100  # type: ignore
+    t_start = np.ones((2, 2))
 
-    res, _, _, _ = geocoding.secant_method(ufunc, -t_start, t_start, diff_ufunc=0.1)
+    res, _, _, _, _ = geocoding.secant_method(ufunc, -t_start, t_start, diff_ufunc=0.01)
 
     assert res.shape == t_start.shape
-    assert np.all(res == np.timedelta64(-27, "ns"))
+    assert np.allclose(res, -26.102)
 
 
-def test_zero_doppler_plane_distance(
+def test_zero_doppler_plane_distance_velocity(
     dem_ecef: xr.DataArray, orbit_ds: xr.Dataset
 ) -> None:
-    orbit_interpolator = orbit.OrbitPolyfitIterpolator.from_position(orbit_ds.position)
+    orbit_interpolator = orbit.OrbitPolyfitInterpolator.from_position(orbit_ds.position)
 
-    res0, (res1, res2) = geocoding.zero_doppler_plane_distance(
-        dem_ecef,
-        orbit_interpolator.position(),
-        orbit_interpolator.velocity(),
-        orbit_ds.azimuth_time,
+    res0, (res1, res2) = geocoding.zero_doppler_plane_distance_velocity(
+        dem_ecef, orbit_interpolator, orbit_ds.azimuth_time
     )
 
     assert isinstance(res0, xr.DataArray)
@@ -55,12 +51,12 @@ def test_zero_doppler_plane_distance(
 
 
 def test_backward_geocode(dem_ecef: xr.DataArray, orbit_ds: xr.Dataset) -> None:
-    orbit_interpolator = orbit.OrbitPolyfitIterpolator.from_position(orbit_ds.position)
+    orbit_interpolator = orbit.OrbitPolyfitInterpolator.from_position(orbit_ds.position)
 
-    res = geocoding.backward_geocode(
-        dem_ecef,
-        orbit_interpolator.position(),
-        orbit_interpolator.velocity(),
-    )
+    res = geocoding.backward_geocode(dem_ecef, orbit_interpolator)
+
+    assert isinstance(res, xr.Dataset)
+
+    res = geocoding.backward_geocode(dem_ecef, orbit_interpolator, method="newton")
 
     assert isinstance(res, xr.Dataset)


=====================================
tests/test_50_apps.py
=====================================
@@ -60,6 +60,7 @@ def test_terrain_correction_fast_rtc(
         str(DEM_RASTER),
         correct_radiometry="gamma_nearest",
         output_urlpath=out,
+        seed_step=(32, 32),
     )
 
     assert isinstance(res, xr.DataArray)
@@ -104,6 +105,7 @@ def test_terrain_correction_gtc_dask(
         str(DEM_RASTER),
         output_urlpath=out,
         chunks=1024,
+        seed_step=(32, 32),
     )
 
     assert isinstance(res, xr.DataArray)
@@ -127,6 +129,7 @@ def test_terrain_correction_fast_rtc_dask(
         correct_radiometry="gamma_nearest",
         output_urlpath=out,
         chunks=1024,
+        seed_step=(32, 32),
     )
 
     assert isinstance(res, xr.DataArray)
@@ -150,6 +153,7 @@ def test_terrain_correction_rtc_dask(
         correct_radiometry="gamma_bilinear",
         output_urlpath=out,
         chunks=1024,
+        seed_step=(32, 32),
     )
 
     assert isinstance(res, xr.DataArray)


=====================================
tests/test_60_main.py
=====================================
@@ -1,3 +1,4 @@
+import pytest
 from typer.testing import CliRunner
 
 from sarsen import __main__
@@ -5,6 +6,7 @@ from sarsen import __main__
 runner = CliRunner()
 
 
+ at pytest.mark.xfail()
 def test_main() -> None:
     res = runner.invoke(__main__.app, ["gtc", "--help"])
     assert res.exit_code == 0


=====================================
uv.lock
=====================================
The diff for this file was not included because it is too large.


View it on GitLab: https://salsa.debian.org/debian-gis-team/sarsen/-/commit/103c4f73db37d36629b2fcc053966d5478dea074

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/sarsen/-/commit/103c4f73db37d36629b2fcc053966d5478dea074
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20250928/49d99d84/attachment-0001.htm>


More information about the Pkg-grass-devel mailing list