[Git][debian-gis-team/xarray-safe-s1][upstream] New upstream version 2024.11.28

Antonio Valentino (@antonio.valentino) gitlab at salsa.debian.org
Wed Dec 4 06:41:26 GMT 2024



Antonio Valentino pushed to branch upstream at Debian GIS Project / xarray-safe-s1


Commits:
001034a9 by Antonio Valentino at 2024-12-04T06:33:11+00:00
New upstream version 2024.11.28
- - - - -


16 changed files:

- + .github/workflows/ci.yaml
- .github/workflows/publish.yml
- + .github/workflows/upstream-dev.yaml
- + ci/install-upstream-dev.sh
- ci/requirements/environment.yaml
- docs/api.rst
- docs/conf.py
- docs/index.rst
- docs/installing.rst
- pyproject.toml
- safe_s1/__init__.py
- safe_s1/config.yml
- safe_s1/getconfig.py
- safe_s1/reader.py
- safe_s1/sentinel1_xml_mappings.py
- safe_s1/xml_parser.py


Changes:

=====================================
.github/workflows/ci.yaml
=====================================
@@ -0,0 +1,84 @@
+name: CI
+
+on:
+  push:
+    branches: [main]
+  pull_request:
+    branches: [main]
+  workflow_dispatch:
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref }}
+  cancel-in-progress: true
+
+jobs:
+  detect-skip-ci-trigger:
+    name: "Detect CI Trigger: [skip-ci]"
+    if: |
+      github.repository == 'umr-lops/xarray-safe-s1'
+      && (
+          github.event_name == 'push' || github.event_name == 'pull_request'
+      )
+    runs-on: ubuntu-latest
+    outputs:
+      triggered: ${{ steps.detect-trigger.outputs.trigger-found }}
+    steps:
+      - uses: actions/checkout at v4
+        with:
+          fetch-depth: 2
+      - uses: xarray-contrib/ci-trigger at v1
+        id: detect-trigger
+        with:
+          keyword: "[skip-ci]"
+
+  ci:
+    name: ${{ matrix.os }} py${{ matrix.python-version }}
+    runs-on: ${{ matrix.os }}
+    needs: detect-skip-ci-trigger
+
+    if: needs.detect-skip-ci-trigger.outputs.triggered == 'false'
+
+    defaults:
+      run:
+        shell: bash -l {0}
+
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: ["3.10", "3.11", "3.12"]
+        os: ["ubuntu-latest", "macos-latest", "windows-latest"]
+
+    steps:
+      - name: Checkout the repository
+        uses: actions/checkout at v4
+        with:
+          # need to fetch all tags to get a correct version
+          fetch-depth: 0 # fetch all branches and tags
+
+      - name: Setup environment variables
+        run: |
+          echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
+
+          echo "CONDA_ENV_FILE=ci/requirements/environment.yaml" >> $GITHUB_ENV
+
+      - name: Setup micromamba
+        uses: mamba-org/setup-micromamba at v2
+        with:
+          environment-file: ${{ env.CONDA_ENV_FILE }}
+          environment-name: xarray-safe-s1-tests
+          cache-environment: true
+          cache-environment-key: "${{runner.os}}-${{runner.arch}}-py${{matrix.python-version}}-${{env.TODAY}}-${{hashFiles(env.CONDA_ENV_FILE)}}"
+          create-args: >-
+            python=${{matrix.python-version}}
+
+      - name: Install xarray-safe-s1
+        run: |
+          python -m pip install --no-deps -e .
+
+      - name: Import xarray-safe-s1
+        run: |
+          python -c "import safe_s1"
+
+      - name: Run tests
+        run: |
+          python -m pytest --cov=safe_s1


=====================================
.github/workflows/publish.yml
=====================================
@@ -9,14 +9,14 @@ jobs:
     name: Publish to PyPI
     runs-on: ubuntu-latest
     permissions:
-      contents: 'read'
-      id-token: 'write'
+      contents: "read"
+      id-token: "write"
     steps:
       - uses: actions/checkout at v4
       - name: Set up Python
         uses: actions/setup-python at v5
         with:
-          python-version: '3.x'
+          python-version: "3.x"
       - name: Install dependencies
         run: |
           python -m pip install --upgrade pip build twine
@@ -32,4 +32,4 @@ jobs:
         with:
           password: ${{ secrets.pypi_token }}
           repository_url: https://upload.pypi.org/legacy/
-          verify_metadata: true
\ No newline at end of file
+          verify_metadata: true


=====================================
.github/workflows/upstream-dev.yaml
=====================================
@@ -0,0 +1,98 @@
+name: upstream-dev CI
+
+on:
+  push:
+    branches: [main]
+  pull_request:
+    branches: [main]
+  schedule:
+    - cron: "0 18 * * 0" # Weekly "On Sundays at 18:00" UTC
+  workflow_dispatch:
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.ref }}
+  cancel-in-progress: true
+
+jobs:
+  detect-test-upstream-trigger:
+    name: "Detect CI Trigger: [test-upstream]"
+    if: github.event_name == 'push' || github.event_name == 'pull_request'
+    runs-on: ubuntu-latest
+    outputs:
+      triggered: ${{ steps.detect-trigger.outputs.trigger-found }}
+    steps:
+      - uses: actions/checkout at v4
+        with:
+          fetch-depth: 2
+      - uses: xarray-contrib/ci-trigger at v1.2
+        id: detect-trigger
+        with:
+          keyword: "[test-upstream]"
+
+  upstream-dev:
+    name: upstream-dev
+    runs-on: ubuntu-latest
+    needs: detect-test-upstream-trigger
+
+    if: |
+      always()
+      && github.repository == 'umr-lops/xarray-safe-s1'
+      && (
+        github.event_name == 'schedule'
+        || github.event_name == 'workflow_dispatch'
+        || needs.detect-test-upstream-trigger.outputs.triggered == 'true'
+        || contains(github.event.pull_request.labels.*.name, 'run-upstream')
+      )
+
+    defaults:
+      run:
+        shell: bash -l {0}
+
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: ["3.12"]
+
+    steps:
+      - name: checkout the repository
+        uses: actions/checkout at v4
+        with:
+          # need to fetch all tags to get a correct version
+          fetch-depth: 0 # fetch all branches and tags
+
+      - name: set up conda environment
+        uses: mamba-org/setup-micromamba at v1
+        with:
+          environment-file: ci/requirements/environment.yaml
+          environment-name: tests
+          create-args: >-
+            python=${{ matrix.python-version }}
+            pytest-reportlog
+
+      - name: install upstream-dev dependencies
+        run: bash ci/install-upstream-dev.sh
+
+      - name: install the package
+        run: python -m pip install --no-deps -e .
+
+      - name: show versions
+        run: python -m pip list
+
+      - name: import
+        run: |
+          python -c 'import safe_s1'
+
+      - name: run tests
+        if: success()
+        id: status
+        run: |
+          python -m pytest -rf --report-log=pytest-log.jsonl
+
+      - name: report failures
+        if: |
+          failure()
+          && steps.tests.outcome == 'failure'
+          && github.event_name == 'schedule'
+        uses: xarray-contrib/issue-from-pytest-log at v1
+        with:
+          log-path: pytest-log.jsonl


=====================================
ci/install-upstream-dev.sh
=====================================
@@ -0,0 +1,13 @@
+#!/usr/bin/env bash
+
+conda remove -y --force cytoolz numpy xarray toolz python-dateutil
+python -m pip install \
+  -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple \
+  --no-deps \
+  --pre \
+  --upgrade \
+  numpy \
+  xarray
+python -m pip install --upgrade \
+  git+https://github.com/pytoolz/toolz \
+  git+https://github.com/dateutil/dateutil


=====================================
ci/requirements/environment.yaml
=====================================
@@ -2,7 +2,7 @@ name: xarray-safe-s1-tests
 channels:
   - conda-forge
 dependencies:
-  - python=3.10
+  - python
   # development
   - ipython
   - pre-commit
@@ -14,6 +14,7 @@ dependencies:
   # testing
   - pytest
   - pytest-reportlog
+  - pytest-cov
   - hypothesis
   - coverage
   # I/O
@@ -29,12 +30,9 @@ dependencies:
   - numpy
   - pandas
   - shapely
-  - datetime
   - geopandas
   - affine
   - pyproj
   # processing
-  - os
-  - io
   - lxml
   - jmespath


=====================================
docs/api.rst
=====================================
@@ -8,4 +8,3 @@ API reference
 
     .. autoclass:: Sentinel1Reader
         :members:
-


=====================================
docs/conf.py
=====================================
@@ -16,16 +16,16 @@ root_doc = "index"
 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
 # ones.
 extensions = [
-    'sphinx.ext.autosummary',
-    'sphinx.ext.autodoc',
+    "sphinx.ext.autosummary",
+    "sphinx.ext.autodoc",
     "myst_parser",
     "sphinx.ext.extlinks",
     "sphinx.ext.intersphinx",
     "IPython.sphinxext.ipython_directive",
     "IPython.sphinxext.ipython_console_highlighting",
-    'nbsphinx',
-    'jupyter_sphinx',
-    'sphinx.ext.napoleon'
+    "nbsphinx",
+    "jupyter_sphinx",
+    "sphinx.ext.napoleon",
 ]
 
 extlinks = {
@@ -36,9 +36,9 @@ extlinks = {
 # Add any paths that contain templates here, relative to this directory.
 templates_path = ["_templates"]
 
-html_static_path = ['_static']
+html_static_path = ["_static"]
 
-html_style = 'css/xsar.css'
+html_style = "css/xsar.css"
 
 # List of patterns, relative to source directory, that match files and
 # directories to ignore when looking for source files.
@@ -84,12 +84,12 @@ intersphinx_mapping = {
     "dask": ("https://docs.dask.org/en/latest", None),
     "xarray": ("https://docs.xarray.dev/en/latest/", None),
     "rasterio": ("https://rasterio.readthedocs.io/en/latest/", None),
-    "datatree": ("https://xarray-datatree.readthedocs.io/en/latest/", None)
+    "datatree": ("https://xarray-datatree.readthedocs.io/en/latest/", None),
 }
 
 html_theme_options = {
-    'navigation_depth': 4,  # FIXME: doesn't work as expeted: should expand side menu
-    'collapse_navigation': False # FIXME: same as above
+    "navigation_depth": 4,  # FIXME: doesn't work as expeted: should expand side menu
+    "collapse_navigation": False,  # FIXME: same as above
 }
 
 # If true, links to the reST sources are added to the pages.
@@ -97,8 +97,8 @@ html_show_sourcelink = False
 
 nbsphinx_allow_errors = False
 
-nbsphinx_execute = 'always'
+nbsphinx_execute = "always"
 
 nbsphinx_timeout = 300
 
-today_fmt = '%b %d %Y at %H:%M'
+today_fmt = "%b %d %Y at %H:%M"


=====================================
docs/index.rst
=====================================
@@ -79,4 +79,4 @@ Last documentation build: |today|
 .. _xarray.Dataset: http://xarray.pydata.org/en/stable/generated/xarray.Dataset.html
 .. _`recommended installation`: installing.rst#recommended-packages
 .. _SAFE format: https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/data-formats
-.. _jupyter notebook: https://jupyter.readthedocs.io/en/latest/running.html#running
\ No newline at end of file
+.. _jupyter notebook: https://jupyter.readthedocs.io/en/latest/running.html#running


=====================================
docs/installing.rst
=====================================
@@ -45,4 +45,4 @@ Pytest configuration
 Pytest uses a default configuration file (`config.yml`) in which we can found products paths to test.
 This configuration can be superseded by adding a local config file on the home directory :
 (`~/xarray-safe-s1/localconfig.yml`).
-In this file, testing files can be listed in the var `product_paths`.
\ No newline at end of file
+In this file, testing files can be listed in the var `product_paths`.


=====================================
pyproject.toml
=====================================
@@ -37,3 +37,36 @@ skip_gitignore = true
 float_to_top = true
 default_section = "THIRDPARTY"
 known_first_party = "safe_s1"
+
+[tool.coverage.report]
+show_missing = true
+exclude_lines = ["pragma: no cover", "if TYPE_CHECKING"]
+
+[tool.ruff.lint]
+ignore = [
+  "E402",  # module level import not at top of file
+  "E501",  # line too long - let black worry about that
+  "E731",  # do not assign a lambda expression, use a def
+  "UP038", # type union instead of tuple for isinstance etc
+]
+select = [
+  "F",   # Pyflakes
+  "E",   # Pycodestyle
+  "I",   # isort
+  "UP",  # Pyupgrade
+  "TID", # flake8-tidy-imports
+  "W",
+]
+extend-safe-fixes = [
+  "TID252", # absolute imports
+  "UP031",  # percent string interpolation
+]
+fixable = ["I", "TID252", "UP"]
+
+[tool.ruff.lint.isort]
+known-first-party = ["safe_s1"]
+known-third-party = ["xarray", "toolz", "construct"]
+
+[tool.ruff.lint.flake8-tidy-imports]
+# Disallow all relative imports.
+ban-relative-imports = "all"


=====================================
safe_s1/__init__.py
=====================================
@@ -1,12 +1,12 @@
 import traceback
-#import safe_s1
 from safe_s1.reader import Sentinel1Reader
+
 try:
     from importlib import metadata
-except ImportError: # for Python<3.8
+except ImportError:  # for Python<3.8
     import importlib_metadata as metadata
-try: 
+try:
     __version__ = metadata.version("xarray-safe-s1")
 except Exception:
-    print('trace',traceback.format_exc())
+    print("trace", traceback.format_exc())
     __version__ = "999"


=====================================
safe_s1/config.yml
=====================================
@@ -1,3 +1,3 @@
 # default data paths for tests
 product_paths:
-  - 'S1A_IW_GRDH_1SDV_20170907T103020_20170907T103045_018268_01EB76_Z010.SAFE'
\ No newline at end of file
+  - "S1A_IW_GRDH_1SDV_20170907T103020_20170907T103045_018268_01EB76_Z010.SAFE"


=====================================
safe_s1/getconfig.py
=====================================
@@ -1,23 +1,30 @@
-import yaml
-import os
 import logging
-import safe_s1
+import os
 from pathlib import Path
+
+import yaml
+
+import safe_s1
+
+
 # determine the config file we will use (config.yml by default, and a local config if one is present) and retrieve
 # the products names
 def get_config():
-    local_config_pontential_path = os.path.join(os.path.dirname(safe_s1.__file__), 'localconfig.yml')
-    logging.info('potential local config: %s',local_config_pontential_path)
-    #local_config_pontential_path = Path(os.path.join('~', 'xarray-safe-s1', 'localconfig.yml')).expanduser()
+    local_config_pontential_path = os.path.join(
+        os.path.dirname(safe_s1.__file__), "localconfig.yml"
+    )
+    logging.info("potential local config: %s", local_config_pontential_path)
+    # local_config_pontential_path = Path(os.path.join('~', 'xarray-safe-s1', 'localconfig.yml')).expanduser()
     if os.path.exists(local_config_pontential_path):
-        logging.info('localconfig used')
+        logging.info("localconfig used")
         config_path = local_config_pontential_path
         with open(config_path) as config_content:
             conf = yaml.load(config_content, Loader=yaml.SafeLoader)
     else:
-        logging.info('default config')
-        config_path = Path(os.path.join(os.path.dirname(safe_s1.__file__), 'config.yml'))
+        logging.info("default config")
+        config_path = Path(
+            os.path.join(os.path.dirname(safe_s1.__file__), "config.yml")
+        )
         with open(config_path) as config_content:
             conf = yaml.load(config_content, Loader=yaml.SafeLoader)
     return conf
-


=====================================
safe_s1/reader.py
=====================================
@@ -1,50 +1,49 @@
+import logging
 import os
 import re
-import pdb
+
 import dask
 import fsspec
 import numpy as np
+import pandas as pd
 import rasterio
+import xarray as xr
 import yaml
 from affine import Affine
 from rioxarray import rioxarray
-import logging
+
 from safe_s1 import sentinel1_xml_mappings
 from safe_s1.xml_parser import XmlParser
-import xarray as xr
-import pandas as pd
-import warnings
 
 
 class Sentinel1Reader:
-
     def __init__(self, name, backend_kwargs=None):
-        logging.debug('input name: %s',name)
+        logging.debug("input name: %s", name)
         if not isinstance(name, (str, os.PathLike)):
-           raise ValueError(f"cannot deal with object of type {type(name)}: {name}")
+            raise ValueError(f"cannot deal with object of type {type(name)}: {name}")
         # gdal dataset name
-        if not name.startswith('SENTINEL1_DS:'):
-            name = 'SENTINEL1_DS:%s:' % name
+        if not name.startswith("SENTINEL1_DS:"):
+            name = "SENTINEL1_DS:%s:" % name
         self.name = name
         """Gdal dataset name"""
-        name_parts = self.name.split(':')
+        name_parts = self.name.split(":")
         if len(name_parts) > 3:
-            logging.debug('windows case')
+            logging.debug("windows case")
             # windows might have semicolon in path ('c:\...')
-            name_parts[1] = ':'.join(name_parts[1:-1])
+            name_parts[1] = ":".join(name_parts[1:-1])
             del name_parts[2:-1]
         name_parts[1] = os.path.basename(name_parts[1])
-        self.short_name = ':'.join(name_parts)
-        logging.debug('short_name : %s',self.short_name)
+        self.short_name = ":".join(name_parts)
+        logging.debug("short_name : %s", self.short_name)
         """Like name, but without path"""
         if len(name_parts) == 2:
-            self.path = self.name.split(':')[1]
+            self.path = self.name.split(":")[1]
         else:
-            self.path = ':'.join(self.name.split(':')[1:-1])
-        logging.debug('path: %s',self.path)
+            self.path = ":".join(self.name.split(":")[1:-1])
+        logging.debug("path: %s", self.path)
         # remove trailing slash in the safe path
-        if self.path[-1]=='/':
-            self.path = self.path.rstrip('/')
+        if self.path[-1] == "/":
+            self.path = self.path.rstrip("/")
         """Dataset path"""
         self.safe = os.path.basename(self.path)
 
@@ -60,30 +59,34 @@ class Sentinel1Reader:
             xpath_mappings=sentinel1_xml_mappings.xpath_mappings,
             compounds_vars=sentinel1_xml_mappings.compounds_vars,
             namespaces=sentinel1_xml_mappings.namespaces,
-            mapper=mapper
+            mapper=mapper,
         )
 
-        self.manifest = 'manifest.safe'
-        if 'SLC' in self.path or 'GRD' in self.path:
-            self.manifest_attrs = self.xml_parser.get_compound_var(self.manifest, 'safe_attributes_slcgrd')
-        elif 'SL2' in self.path:
-            self.manifest_attrs = self.xml_parser.get_compound_var(self.manifest, 'safe_attributes_sl2')
+        self.manifest = "manifest.safe"
+        if "SLC" in self.path or "GRD" in self.path:
+            self.manifest_attrs = self.xml_parser.get_compound_var(
+                self.manifest, "safe_attributes_slcgrd"
+            )
+        elif "SL2" in self.path:
+            self.manifest_attrs = self.xml_parser.get_compound_var(
+                self.manifest, "safe_attributes_sl2"
+            )
         else:
-            raise Exception('case not handled')
+            raise Exception("case not handled")
 
         self._safe_files = None
         self._multidataset = False
         """True if multi dataset"""
-        self._datasets_names = list(self.safe_files['dsid'].sort_index().unique())
+        self._datasets_names = list(self.safe_files["dsid"].sort_index().unique())
         self.xsd_definitions = self.get_annotation_definitions()
-        if self.name.endswith(':') and len(self._datasets_names) == 1:
+        if self.name.endswith(":") and len(self._datasets_names) == 1:
             self.name = self._datasets_names[0]
-        self.dsid = self.name.split(':')[-1]
+        self.dsid = self.name.split(":")[-1]
         """Dataset identifier (like 'WV_001', 'IW1', 'IW'), or empty string for multidataset"""
 
         try:
-            self.product = os.path.basename(self.path).split('_')[2]
-        except:
+            self.product = os.path.basename(self.path).split("_")[2]
+        except ValueError:
             print("path: %s" % self.path)
             self.product = "XXX"
         """Product type, like 'GRDH', 'SLC', etc .."""
@@ -91,36 +94,38 @@ class Sentinel1Reader:
         # submeta is a list of submeta objects if multidataset and TOPS
         # this list will remain empty for _WV__SLC because it will be time-consuming to process them
         # self._submeta = []
-        if self.short_name.endswith(':'):
+        if self.short_name.endswith(":"):
             self.short_name = self.short_name + self.dsid
         if self.files.empty:
             self._multidataset = True
 
         self.dt = None
         self._dict = {
-            'geolocationGrid': None,
+            "geolocationGrid": None,
         }
         if not self.multidataset:
             self._dict = {
-                'geolocationGrid': self.geoloc,
-                'orbit': self.orbit,
-                'image': self.image,
-                'azimuth_fmrate': self.azimuth_fmrate,
-                'doppler_estimate': self.doppler_estimate,
-                'bursts': self.bursts,
-                'calibration_luts': self.get_calibration_luts,
-                'noise_azimuth_raw': self.get_noise_azi_raw,
-                'noise_range_raw': self.get_noise_range_raw,
-                'antenna_pattern':self.antenna_pattern,
-                'swath_merging': self.swath_merging
+                "geolocationGrid": self.geoloc,
+                "orbit": self.orbit,
+                "image": self.image,
+                "azimuth_fmrate": self.azimuth_fmrate,
+                "doppler_estimate": self.doppler_estimate,
+                "bursts": self.bursts,
+                "calibration_luts": self.get_calibration_luts,
+                "noise_azimuth_raw": self.get_noise_azi_raw,
+                "noise_range_raw": self.get_noise_range_raw,
+                "antenna_pattern": self.antenna_pattern,
+                "swath_merging": self.swath_merging,
             }
             self.dt = xr.DataTree.from_dict(self._dict)
-            assert self.dt==self.datatree
+            assert self.dt == self.datatree
         else:
-            print('multidataset')
+            print("multidataset")
             # there is no error raised here, because we want to let the user access the metadata for multidatasets
 
-    def load_digital_number(self, resolution=None, chunks=None, resampling=rasterio.enums.Resampling.rms):
+    def load_digital_number(
+        self, resolution=None, chunks=None, resampling=rasterio.enums.Resampling.rms
+    ):
         """
         load digital_number from self.sar_meta.files['measurement'], as an `xarray.Dataset`.
 
@@ -138,63 +143,67 @@ class Sentinel1Reader:
         def get_glob(strlist):
             # from list of str, replace diff by '?'
             def _get_glob(st):
-                stglob = ''.join(
+                stglob = "".join(
                     [
-                        '?' if len(charlist) > 1 else charlist[0]
+                        "?" if len(charlist) > 1 else charlist[0]
                         for charlist in [list(set(charset)) for charset in zip(*st)]
                     ]
                 )
-                return re.sub(r'\?+', '*', stglob)
+                return re.sub(r"\?+", "*", stglob)
 
             strglob = _get_glob(strlist)
-            if strglob.endswith('*'):
+            if strglob.endswith("*"):
                 strglob += _get_glob(s[::-1] for s in strlist)[::-1]
-                strglob = strglob.replace('**', '*')
+                strglob = strglob.replace("**", "*")
 
             return strglob
 
-        map_dims = {
-            'pol': 'band',
-            'line': 'y',
-            'sample': 'x'
-        }
+        map_dims = {"pol": "band", "line": "y", "sample": "x"}
 
         _dtypes = {
-            'latitude': 'f4',
-            'longitude': 'f4',
-            'incidence': 'f4',
-            'elevation': 'f4',
-            'altitude': 'f4',
-            'ground_heading': 'f4',
-            'nesz': None,
-            'negz': None,
-            'sigma0_raw': None,
-            'gamma0_raw': None,
-            'noise_lut': 'f4',
-            'noise_lut_range': 'f4',
-            'noise_lut_azi': 'f4',
-            'sigma0_lut': 'f8',
-            'gamma0_lut': 'f8',
-            'azimuth_time': np.datetime64,
-            'slant_range_time': None
+            "latitude": "f4",
+            "longitude": "f4",
+            "incidence": "f4",
+            "elevation": "f4",
+            "altitude": "f4",
+            "ground_heading": "f4",
+            "nesz": None,
+            "negz": None,
+            "sigma0_raw": None,
+            "gamma0_raw": None,
+            "noise_lut": "f4",
+            "noise_lut_range": "f4",
+            "noise_lut_azi": "f4",
+            "sigma0_lut": "f8",
+            "gamma0_lut": "f8",
+            "azimuth_time": np.datetime64,
+            "slant_range_time": None,
         }
 
         if resolution is not None:
             comment = 'resampled at "%s" with %s.%s.%s' % (
-                resolution, resampling.__module__, resampling.__class__.__name__, resampling.name)
+                resolution,
+                resampling.__module__,
+                resampling.__class__.__name__,
+                resampling.name,
+            )
         else:
-            comment = 'read at full resolution'
+            comment = "read at full resolution"
 
         # Add root to path
-        files_measurement = self.files['measurement'].copy()
+        files_measurement = self.files["measurement"].copy()
         files_measurement = [os.path.join(self.path, f) for f in files_measurement]
 
         # arbitrary rio object, to get shape, etc ... (will not be used to read data)
         rio = rasterio.open(files_measurement[0])
 
-        chunks['pol'] = 1
+        chunks["pol"] = 1
         # sort chunks keys like map_dims
-        chunks = dict(sorted(chunks.items(), key=lambda pair: list(map_dims.keys()).index(pair[0])))
+        chunks = dict(
+            sorted(
+                chunks.items(), key=lambda pair: list(map_dims.keys()).index(pair[0])
+            )
+        )
         chunks_rio = {map_dims[d]: chunks[d] for d in map_dims.keys()}
         res = None
         if resolution is None:
@@ -205,39 +214,49 @@ class Sentinel1Reader:
                 [
                     rioxarray.open_rasterio(
                         f, chunks=chunks_rio, parse_coordinates=False
-                    ) for f in files_measurement
-                ], 'band'
-            ).assign_coords(band=np.arange(len(self.manifest_attrs['polarizations'])) + 1)
+                    )
+                    for f in files_measurement
+                ],
+                "band",
+            ).assign_coords(
+                band=np.arange(len(self.manifest_attrs["polarizations"])) + 1
+            )
 
             # set dimensions names
             dn = dn.rename(dict(zip(map_dims.values(), map_dims.keys())))
 
             # create coordinates from dimension index (because of parse_coordinates=False)
-            dn = dn.assign_coords({'line': dn.line, 'sample': dn.sample})
-            dn = dn.drop_vars('spatial_ref', errors='ignore')
+            dn = dn.assign_coords({"line": dn.line, "sample": dn.sample})
+            dn = dn.drop_vars("spatial_ref", errors="ignore")
         else:
             if not isinstance(resolution, dict):
-                if isinstance(resolution, str) and resolution.endswith('m'):
+                if isinstance(resolution, str) and resolution.endswith("m"):
                     resolution = float(resolution[:-1])
                     res = resolution
-                resolution = dict(line=resolution / self.pixel_line_m,
-                                  sample=resolution / self.pixel_sample_m)
+                resolution = dict(
+                    line=resolution / self.pixel_line_m,
+                    sample=resolution / self.pixel_sample_m,
+                )
                 # resolution = dict(line=resolution / self.dataset['sampleSpacing'].values,
                 #                   sample=resolution / self.dataset['lineSpacing'].values)
 
             # resample the DN at gdal level, before feeding it to the dataset
             out_shape = (
-                int(rio.height / resolution['line']),
-                int(rio.width / resolution['sample'])
+                int(rio.height / resolution["line"]),
+                int(rio.width / resolution["sample"]),
             )
             out_shape_pol = (1,) + out_shape
             # read resampled array in one chunk, and rechunk
             # this doesn't optimize memory, but total size remain quite small
 
-            if isinstance(resolution['line'], int):
+            if isinstance(resolution["line"], int):
                 # legacy behaviour: winsize is the maximum full image size that can be divided  by resolution (int)
-                winsize = (0, 0, rio.width // resolution['sample'] * resolution['sample'],
-                           rio.height // resolution['line'] * resolution['line'])
+                winsize = (
+                    0,
+                    0,
+                    rio.width // resolution["sample"] * resolution["sample"],
+                    rio.height // resolution["line"] * resolution["line"],
+                )
                 window = rasterio.windows.Window(*winsize)
             else:
                 window = None
@@ -249,43 +268,50 @@ class Sentinel1Reader:
                             rasterio.open(f).read(
                                 out_shape=out_shape_pol,
                                 resampling=resampling,
-                                window=window
+                                window=window,
                             ),
-                            chunks=chunks_rio
+                            chunks=chunks_rio,
                         ),
-                        dims=tuple(map_dims.keys()), coords={'pol': [pol]}
-                    ) for f, pol in
-                    zip(files_measurement, self.manifest_attrs['polarizations'])
+                        dims=tuple(map_dims.keys()),
+                        coords={"pol": [pol]},
+                    )
+                    for f, pol in zip(
+                        files_measurement, self.manifest_attrs["polarizations"]
+                    )
                 ],
-                'pol'
+                "pol",
             ).chunk(chunks)
 
             # create coordinates at box center
-            translate = Affine.translation((resolution['sample'] - 1) / 2, (resolution['line'] - 1) / 2)
+            translate = Affine.translation(
+                (resolution["sample"] - 1) / 2, (resolution["line"] - 1) / 2
+            )
             scale = Affine.scale(
-                rio.width // resolution['sample'] * resolution['sample'] / out_shape[1],
-                rio.height // resolution['line'] * resolution['line'] / out_shape[0])
+                rio.width // resolution["sample"] * resolution["sample"] / out_shape[1],
+                rio.height // resolution["line"] * resolution["line"] / out_shape[0],
+            )
             sample, _ = translate * scale * (dn.sample, 0)
             _, line = translate * scale * (0, dn.line)
-            dn = dn.assign_coords({'line': line, 'sample': sample})
+            dn = dn.assign_coords({"line": line, "sample": sample})
 
         # for GTiff driver, pols are already ordered. just rename them
-        dn = dn.assign_coords(pol=self.manifest_attrs['polarizations'])
+        dn = dn.assign_coords(pol=self.manifest_attrs["polarizations"])
 
         if not all(self.denoised.values()):
-            descr = 'denoised'
+            descr = "denoised"
         else:
-            descr = 'not denoised'
-        var_name = 'digital_number'
+            descr = "not denoised"
+        var_name = "digital_number"
 
         dn.attrs = {
-            'comment': '%s digital number, %s' % (descr, comment),
-            'history': yaml.safe_dump(
+            "comment": "%s digital number, %s" % (descr, comment),
+            "history": yaml.safe_dump(
                 {
                     var_name: get_glob(
-                        [p.replace(self.path + '/', '') for p in files_measurement])
+                        [p.replace(self.path + "/", "") for p in files_measurement]
+                    )
                 }
-            )
+            ),
         }
         ds = dn.to_dataset(name=var_name)
         astype = _dtypes.get(var_name)
@@ -307,7 +333,7 @@ class Sentinel1Reader:
         if self.multidataset:
             res = None  # not defined for multidataset
         else:
-            res = self.image['azimuthPixelSpacing']
+            res = self.image["azimuthPixelSpacing"]
         return res
 
     @property
@@ -323,7 +349,7 @@ class Sentinel1Reader:
         if self.multidataset:
             res = None  # not defined for multidataset
         else:
-            res = self.image['groundRangePixelSpacing']
+            res = self.image["groundRangePixelSpacing"]
         return res
 
     @property
@@ -364,18 +390,25 @@ class Sentinel1Reader:
             Geolocation Grid
         """
         if self.multidataset:
-            raise TypeError('geolocation_grid not available for multidataset')
-        if self._dict['geolocationGrid'] is None:
-            xml_annotation = self.files['annotation'].iloc[0]
+            raise TypeError("geolocation_grid not available for multidataset")
+        if self._dict["geolocationGrid"] is None:
+            xml_annotation = self.files["annotation"].iloc[0]
             da_var_list = []
-            for var_name in ['longitude', 'latitude', 'height', 'azimuthTime', 'slantRangeTime', 'incidenceAngle',
-                             'elevationAngle']:
+            for var_name in [
+                "longitude",
+                "latitude",
+                "height",
+                "azimuthTime",
+                "slantRangeTime",
+                "incidenceAngle",
+                "elevationAngle",
+            ]:
                 # TODO: we should use dask.array.from_delayed so xml files are read on demand
                 da_var = self.xml_parser.get_compound_var(xml_annotation, var_name)
                 da_var.name = var_name
-                da_var.attrs['history'] = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0],
-                                                                           var_name,
-                                                                           describe=True)
+                da_var.attrs["history"] = self.xml_parser.get_compound_var(
+                    self.files["annotation"].iloc[0], var_name, describe=True
+                )
                 da_var_list.append(da_var)
 
             return xr.merge(da_var_list)
@@ -399,12 +432,15 @@ class Sentinel1Reader:
         """
         if self.multidataset:
             return None  # not defined for multidataset
-        gdf_orbit = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'orbit')
+        gdf_orbit = self.xml_parser.get_compound_var(
+            self.files["annotation"].iloc[0], "orbit"
+        )
         for vv in gdf_orbit:
             if vv in self.xsd_definitions:
-                gdf_orbit[vv].attrs['definition'] = self.xsd_definitions[vv]
-        gdf_orbit.attrs['history'] = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'orbit',
-                                                                      describe=True)
+                gdf_orbit[vv].attrs["definition"] = self.xsd_definitions[vv]
+        gdf_orbit.attrs["history"] = self.xml_parser.get_compound_var(
+            self.files["annotation"].iloc[0], "orbit", describe=True
+        )
         return gdf_orbit
 
     @property
@@ -421,7 +457,11 @@ class Sentinel1Reader:
             return None  # not defined for multidataset
         else:
             return dict(
-                [self.xml_parser.get_compound_var(f, 'denoised') for f in self.files['annotation']])
+                [
+                    self.xml_parser.get_compound_var(f, "denoised")
+                    for f in self.files["annotation"]
+                ]
+            )
 
     @property
     def time_range(self):
@@ -433,7 +473,9 @@ class Sentinel1Reader:
 
         """
         if not self.multidataset:
-            return self.xml_parser.get_var(self.files['annotation'].iloc[0], 'annotation.line_time_range')
+            return self.xml_parser.get_var(
+                self.files["annotation"].iloc[0], "annotation.line_time_range"
+            )
 
     @property
     def image(self):
@@ -447,11 +489,15 @@ class Sentinel1Reader:
         """
         if self.multidataset:
             return None
-        img_dict = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'image')
-        img_dict['history'] = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'image', describe=True)
+        img_dict = self.xml_parser.get_compound_var(
+            self.files["annotation"].iloc[0], "image"
+        )
+        img_dict["history"] = self.xml_parser.get_compound_var(
+            self.files["annotation"].iloc[0], "image", describe=True
+        )
         for vv in img_dict:
             if vv in self.xsd_definitions:
-                img_dict[vv].attrs['definition'] = self.xsd_definitions[vv]
+                img_dict[vv].attrs["definition"] = self.xsd_definitions[vv]
         return img_dict
 
     @property
@@ -463,12 +509,15 @@ class Sentinel1Reader:
         xarray.Dataset
             Frequency Modulation rate annotations such as t0 (azimuth time reference) and polynomial coefficients: Azimuth FM rate = c0 + c1(tSR - t0) + c2(tSR - t0)^2
         """
-        fmrates = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'azimuth_fmrate')
-        fmrates.attrs['history'] = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'azimuth_fmrate',
-                                                                    describe=True)
+        fmrates = self.xml_parser.get_compound_var(
+            self.files["annotation"].iloc[0], "azimuth_fmrate"
+        )
+        fmrates.attrs["history"] = self.xml_parser.get_compound_var(
+            self.files["annotation"].iloc[0], "azimuth_fmrate", describe=True
+        )
         for vv in fmrates:
             if vv in self.xsd_definitions:
-                fmrates[vv].attrs['definition'] = self.xsd_definitions[vv]
+                fmrates[vv].attrs["definition"] = self.xsd_definitions[vv]
         return fmrates
 
     @property
@@ -480,12 +529,15 @@ class Sentinel1Reader:
         xarray.Dataset
             with Doppler Centroid Estimates from annotations such as geo_polynom,data_polynom or frequency
         """
-        dce = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'doppler_estimate')
+        dce = self.xml_parser.get_compound_var(
+            self.files["annotation"].iloc[0], "doppler_estimate"
+        )
         for vv in dce:
             if vv in self.xsd_definitions:
-                dce[vv].attrs['definition'] = self.xsd_definitions[vv]
-        dce.attrs['history'] = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'doppler_estimate',
-                                                                describe=True)
+                dce[vv].attrs["definition"] = self.xsd_definitions[vv]
+        dce.attrs["history"] = self.xml_parser.get_compound_var(
+            self.files["annotation"].iloc[0], "doppler_estimate", describe=True
+        )
         return dce
 
     @property
@@ -498,35 +550,51 @@ class Sentinel1Reader:
         xarray.Dataset
             Bursts information dataArrays
         """
-        if self.xml_parser.get_var(self.files['annotation'].iloc[0], 'annotation.number_of_bursts') > 0:
-            bursts = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'bursts')
+        if (
+            self.xml_parser.get_var(
+                self.files["annotation"].iloc[0], "annotation.number_of_bursts"
+            )
+            > 0
+        ):
+            bursts = self.xml_parser.get_compound_var(
+                self.files["annotation"].iloc[0], "bursts"
+            )
             for vv in bursts:
                 if vv in self.xsd_definitions:
-                    bursts[vv].attrs['definition'] = self.xsd_definitions[vv]
-            bursts.attrs['history'] = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'bursts',
-                                                                       describe=True)
+                    bursts[vv].attrs["definition"] = self.xsd_definitions[vv]
+            bursts.attrs["history"] = self.xml_parser.get_compound_var(
+                self.files["annotation"].iloc[0], "bursts", describe=True
+            )
             return bursts
         else:
-            bursts = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'bursts_grd')
-            bursts.attrs['history'] = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'bursts_grd',
-                                                                       describe=True)
+            bursts = self.xml_parser.get_compound_var(
+                self.files["annotation"].iloc[0], "bursts_grd"
+            )
+            bursts.attrs["history"] = self.xml_parser.get_compound_var(
+                self.files["annotation"].iloc[0], "bursts_grd", describe=True
+            )
             return bursts
 
     @property
     def antenna_pattern(self):
-        ds =  self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'antenna_pattern')
-        ds.attrs['history'] = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'antenna_pattern',
-                                                               describe=True)
+        ds = self.xml_parser.get_compound_var(
+            self.files["annotation"].iloc[0], "antenna_pattern"
+        )
+        ds.attrs["history"] = self.xml_parser.get_compound_var(
+            self.files["annotation"].iloc[0], "antenna_pattern", describe=True
+        )
         return ds
 
     @property
     def swath_merging(self):
-        if 'GRD' in self.product:
-            
-            ds =  self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'swath_merging')
-            ds.attrs['history'] = self.xml_parser.get_compound_var(self.files['annotation'].iloc[0], 'swath_merging',
-                                                               describe=True)
-        else :
+        if "GRD" in self.product:
+            ds = self.xml_parser.get_compound_var(
+                self.files["annotation"].iloc[0], "swath_merging"
+            )
+            ds.attrs["history"] = self.xml_parser.get_compound_var(
+                self.files["annotation"].iloc[0], "swath_merging", describe=True
+            )
+        else:
             ds = xr.Dataset()
         return ds
 
@@ -551,14 +619,16 @@ class Sentinel1Reader:
             annotations definitions
         """
         final_dict = {}
-        ds_path_xsd = self.xml_parser.get_compound_var(self.manifest, 'xsd_files')
-        path_xsd = ds_path_xsd['xsd_product'].values[0]
+        ds_path_xsd = self.xml_parser.get_compound_var(self.manifest, "xsd_files")
+        path_xsd = ds_path_xsd["xsd_product"].values[0]
         full_path_xsd = os.path.join(self.path, path_xsd)
         if os.path.exists(full_path_xsd):
             rootxsd = self.xml_parser.getroot(path_xsd)
-            mypath = '/xsd:schema/xsd:complexType/xsd:sequence/xsd:element'
+            mypath = "/xsd:schema/xsd:complexType/xsd:sequence/xsd:element"
 
-            for lulu, uu in enumerate(rootxsd.xpath(mypath, namespaces=sentinel1_xml_mappings.namespaces)):
+            for lulu, uu in enumerate(
+                rootxsd.xpath(mypath, namespaces=sentinel1_xml_mappings.namespaces)
+            ):
                 mykey = uu.values()[0]
                 if uu.getchildren() != []:
                     myvalue = uu.getchildren()[0].getchildren()[0]
@@ -578,19 +648,21 @@ class Sentinel1Reader:
         xarray.Dataset
             Original sigma0 and gamma0 calibration Look Up Tables
         """
-        #sigma0_lut = self.xml_parser.get_var(self.files['calibration'].iloc[0], 'calibration.sigma0_lut',describe=True)
+        # sigma0_lut = self.xml_parser.get_var(self.files['calibration'].iloc[0], 'calibration.sigma0_lut',describe=True)
         pols = []
         tmp = []
-        for pol_code, xml_file in self.files['calibration'].items():
-            luts_ds = self.xml_parser.get_compound_var(xml_file, 'luts_raw')
+        for pol_code, xml_file in self.files["calibration"].items():
+            luts_ds = self.xml_parser.get_compound_var(xml_file, "luts_raw")
             # add history to attributes
-            minifile = re.sub('.*SAFE/', '', xml_file)
-            minifile = re.sub(r'-.*\.xml', '.xml', minifile)
+            minifile = re.sub(".*SAFE/", "", xml_file)
+            minifile = re.sub(r"-.*\.xml", ".xml", minifile)
             for da in luts_ds:
-                histo = self.xml_parser.get_var(xml_file, f"calibration.{da}", describe=True)
-                luts_ds[da].attrs['history'] = yaml.safe_dump({da: {minifile: histo}})
+                histo = self.xml_parser.get_var(
+                    xml_file, f"calibration.{da}", describe=True
+                )
+                luts_ds[da].attrs["history"] = yaml.safe_dump({da: {minifile: histo}})
 
-            pol = os.path.basename(xml_file).split('-')[4].upper()
+            pol = os.path.basename(xml_file).split("-")[4].upper()
             pols.append(pol)
             tmp.append(luts_ds)
         ds = xr.concat(tmp, pd.Index(pols, name="pol"))
@@ -611,33 +683,51 @@ class Sentinel1Reader:
         tmp = []
         pols = []
         history = []
-        for pol_code, xml_file in self.files['noise'].items():
-            pol = os.path.basename(xml_file).split('-')[4].upper()
+        for pol_code, xml_file in self.files["noise"].items():
+            pol = os.path.basename(xml_file).split("-")[4].upper()
             pols.append(pol)
-            if self.product == 'SLC' or self.product == 'SL2':
-                noise_lut_azi_raw_ds = self.xml_parser.get_compound_var(xml_file, 'noise_lut_azi_raw_slc')
-                history.append(self.xml_parser.get_compound_var(xml_file, 'noise_lut_azi_raw_slc', describe=True))
+            if self.product == "SLC" or self.product == "SL2":
+                noise_lut_azi_raw_ds = self.xml_parser.get_compound_var(
+                    xml_file, "noise_lut_azi_raw_slc"
+                )
+                history.append(
+                    self.xml_parser.get_compound_var(
+                        xml_file, "noise_lut_azi_raw_slc", describe=True
+                    )
+                )
             else:
-                noise_lut_azi_raw_ds = self.xml_parser.get_compound_var(xml_file, 'noise_lut_azi_raw_grd')
-                #noise_lut_azi_raw_ds.attrs[f'raw_azi_lut_{pol}'] = \
+                noise_lut_azi_raw_ds = self.xml_parser.get_compound_var(
+                    xml_file, "noise_lut_azi_raw_grd"
+                )
+                # noise_lut_azi_raw_ds.attrs[f'raw_azi_lut_{pol}'] = \
                 #    self.xml_parser.get_var(xml_file, 'noise.azi.noiseLut')
-                history.append(self.xml_parser.get_compound_var(xml_file, 'noise_lut_azi_raw_grd', describe=True))
+                history.append(
+                    self.xml_parser.get_compound_var(
+                        xml_file, "noise_lut_azi_raw_grd", describe=True
+                    )
+                )
             for vari in noise_lut_azi_raw_ds:
-                if 'noise_lut' in vari:
-                    varitmp = 'noiseLut'
-                    hihi = self.xml_parser.get_var(self.files['noise'].iloc[0], 'noise.azi.%s' % varitmp,
-                                                   describe=True)
-                elif vari == 'noise_lut' and self.product=='WV': #WV case
-                    hihi = 'dummy variable, noise is not defined in azimuth for WV acquisitions'
+                if "noise_lut" in vari:
+                    varitmp = "noiseLut"
+                    hihi = self.xml_parser.get_var(
+                        self.files["noise"].iloc[0],
+                        "noise.azi.%s" % varitmp,
+                        describe=True,
+                    )
+                elif vari == "noise_lut" and self.product == "WV":  # WV case
+                    hihi = "dummy variable, noise is not defined in azimuth for WV acquisitions"
                 else:
                     varitmp = vari
-                    hihi = self.xml_parser.get_var(self.files['noise'].iloc[0], 'noise.azi.%s' % varitmp,
-                                                   describe=True)
+                    hihi = self.xml_parser.get_var(
+                        self.files["noise"].iloc[0],
+                        "noise.azi.%s" % varitmp,
+                        describe=True,
+                    )
 
-                noise_lut_azi_raw_ds[vari].attrs['description'] = hihi
+                noise_lut_azi_raw_ds[vari].attrs["description"] = hihi
             tmp.append(noise_lut_azi_raw_ds)
         ds = xr.concat(tmp, pd.Index(pols, name="pol"))
-        ds.attrs['history'] = '\n'.join(history)
+        ds.attrs["history"] = "\n".join(history)
         return ds
 
     @property
@@ -653,21 +743,30 @@ class Sentinel1Reader:
         tmp = []
         pols = []
         history = []
-        for pol_code, xml_file in self.files['noise'].items():
-            #pol = self.files['polarization'].cat.categories[pol_code - 1]
-            pol = os.path.basename(xml_file).split('-')[4].upper()
+        for pol_code, xml_file in self.files["noise"].items():
+            # pol = self.files['polarization'].cat.categories[pol_code - 1]
+            pol = os.path.basename(xml_file).split("-")[4].upper()
             pols.append(pol)
-            noise_lut_range_raw_ds = self.xml_parser.get_compound_var(xml_file, 'noise_lut_range_raw')
+            noise_lut_range_raw_ds = self.xml_parser.get_compound_var(
+                xml_file, "noise_lut_range_raw"
+            )
             for vari in noise_lut_range_raw_ds:
-                if 'noise_lut' in vari:
-                    varitmp = 'noiseLut'
-                hihi = self.xml_parser.get_var(self.files['noise'].iloc[0], 'noise.range.%s' % varitmp,
-                                               describe=True)
-                noise_lut_range_raw_ds[vari].attrs['description'] = hihi
-                history.append(self.xml_parser.get_compound_var(xml_file, 'noise_lut_range_raw', describe=True))
+                if "noise_lut" in vari:
+                    varitmp = "noiseLut"
+                hihi = self.xml_parser.get_var(
+                    self.files["noise"].iloc[0],
+                    "noise.range.%s" % varitmp,
+                    describe=True,
+                )
+                noise_lut_range_raw_ds[vari].attrs["description"] = hihi
+                history.append(
+                    self.xml_parser.get_compound_var(
+                        xml_file, "noise_lut_range_raw", describe=True
+                    )
+                )
             tmp.append(noise_lut_range_raw_ds)
         ds = xr.concat(tmp, pd.Index(pols, name="pol"))
-        ds.attrs['history'] = '\n'.join(history)
+        ds.attrs["history"] = "\n".join(history)
         return ds
 
     def get_noise_azi_initial_parameters(self, pol):
@@ -685,15 +784,17 @@ class Sentinel1Reader:
             Tuple that contains the swaths, noise azimuth lines, line_start, line_stop, sample_start, sample_stop and
             noise azimuth lut for the pol selected.
         """
-        for pol_code, xml_file in self.files['noise'].items():
+        for pol_code, xml_file in self.files["noise"].items():
             if pol in os.path.basename(xml_file).upper():
-                return self.xml_parser.get_var(xml_file, 'noise.azi.swath'),\
-                    self.xml_parser.get_var(xml_file, 'noise.azi.line'),\
-                    self.xml_parser.get_var(xml_file, 'noise.azi.line_start'),\
-                    self.xml_parser.get_var(xml_file, 'noise.azi.line_stop'),\
-                    self.xml_parser.get_var(xml_file, 'noise.azi.sample_start'),\
-                    self.xml_parser.get_var(xml_file, 'noise.azi.sample_stop'),\
-                    self.xml_parser.get_var(xml_file, 'noise.azi.noiseLut')
+                return (
+                    self.xml_parser.get_var(xml_file, "noise.azi.swath"),
+                    self.xml_parser.get_var(xml_file, "noise.azi.line"),
+                    self.xml_parser.get_var(xml_file, "noise.azi.line_start"),
+                    self.xml_parser.get_var(xml_file, "noise.azi.line_stop"),
+                    self.xml_parser.get_var(xml_file, "noise.azi.sample_start"),
+                    self.xml_parser.get_var(xml_file, "noise.azi.sample_stop"),
+                    self.xml_parser.get_var(xml_file, "noise.azi.noiseLut"),
+                )
 
     @property
     def safe_files(self):
@@ -720,7 +821,7 @@ class Sentinel1Reader:
 
         """
         if self._safe_files is None:
-            files = self.xml_parser.get_compound_var(self.manifest, 'files')
+            files = self.xml_parser.get_compound_var(self.manifest, "files")
 
             """
             # add path
@@ -730,11 +831,14 @@ class Sentinel1Reader:
 
             # set "polarization" as a category, so sorting dataframe on polarization
             # will return the dataframe in same order as self._safe_attributes['polarizations']
-            files["polarization"] = files.polarization.astype('category').cat.reorder_categories(
-                self.manifest_attrs['polarizations'], ordered=True)
+            files["polarization"] = files.polarization.astype(
+                "category"
+            ).cat.reorder_categories(self.manifest_attrs["polarizations"], ordered=True)
             # replace 'dsid' with full path, compatible with gdal sentinel1 driver
-            files['dsid'] = files['dsid'].map(lambda dsid: "SENTINEL1_DS:%s:%s" % (self.path, dsid))
-            files.sort_values('polarization', inplace=True)
+            files["dsid"] = files["dsid"].map(
+                lambda dsid: "SENTINEL1_DS:%s:%s" % (self.path, dsid)
+            )
+            files.sort_values("polarization", inplace=True)
             self._safe_files = files
         return self._safe_files
 
@@ -747,7 +851,7 @@ class Sentinel1Reader:
         --------
         Sentinel1Reader.safe_files
         """
-        return self.safe_files[self.safe_files['dsid'] == self.name]
+        return self.safe_files[self.safe_files["dsid"] == self.name]
 
     def __repr__(self):
         if self.multidataset:


=====================================
safe_s1/sentinel1_xml_mappings.py
=====================================
@@ -1,21 +1,21 @@
 """
 xpath mapping from xml file, with convertion functions
 """
+import os.path
+import warnings
 import zipfile
+from datetime import datetime
 
 import aiohttp
 import fsspec
-import xarray
-from datetime import datetime
+import geopandas as gpd
 import numpy as np
 import pandas as pd
+import pyproj
+import xarray
 import xarray as xr
 from numpy.polynomial import Polynomial
-import warnings
-import geopandas as gpd
-from shapely.geometry import Polygon, Point
-import os.path
-import pyproj
+from shapely.geometry import Point, Polygon
 
 namespaces = {
     "xfdu": "urn:ccsds:schema:xfdu:1",
@@ -23,19 +23,27 @@ namespaces = {
     "s1sar": "http://www.esa.int/safe/sentinel-1.0/sentinel-1/sar",
     "s1": "http://www.esa.int/safe/sentinel-1.0/sentinel-1",
     "safe": "http://www.esa.int/safe/sentinel-1.0",
-    "gml": "http://www.opengis.net/gml"
+    "gml": "http://www.opengis.net/gml",
 }
 # xpath convertion function: they take only one args (list returned by xpath)
 scalar = lambda x: x[0]
 scalar_int = lambda x: int(x[0])
 scalar_float = lambda x: float(x[0])
-date_converter = lambda x: datetime.strptime(x[0], '%Y-%m-%dT%H:%M:%S.%f')
-datetime64_array = lambda x: np.array([np.datetime64(date_converter([sx])).astype("datetime64[ns]") for sx in x])
-int_1Darray_from_string = lambda x: np.fromstring(x[0], dtype=int, sep=' ')
-float_2Darray_from_string_list = lambda x: np.vstack([np.fromstring(e, dtype=float, sep=' ') for e in x])
-list_of_float_1D_array_from_string = lambda x: [np.fromstring(e, dtype=float, sep=' ') for e in x]
-int_1Darray_from_join_strings = lambda x: np.fromstring(" ".join(x), dtype=int, sep=' ')
-float_1Darray_from_join_strings = lambda x: np.fromstring(" ".join(x), dtype=float, sep=' ')
+date_converter = lambda x: datetime.strptime(x[0], "%Y-%m-%dT%H:%M:%S.%f")
+datetime64_array = lambda x: np.array(
+    [np.datetime64(date_converter([sx])).astype("datetime64[ns]") for sx in x]
+)
+int_1Darray_from_string = lambda x: np.fromstring(x[0], dtype=int, sep=" ")
+float_2Darray_from_string_list = lambda x: np.vstack(
+    [np.fromstring(e, dtype=float, sep=" ") for e in x]
+)
+list_of_float_1D_array_from_string = lambda x: [
+    np.fromstring(e, dtype=float, sep=" ") for e in x
+]
+int_1Darray_from_join_strings = lambda x: np.fromstring(" ".join(x), dtype=int, sep=" ")
+float_1Darray_from_join_strings = lambda x: np.fromstring(
+    " ".join(x), dtype=float, sep=" "
+)
 int_array = lambda x: np.array(x, dtype=int)
 bool_array = lambda x: np.array(x, dtype=bool)
 float_array = lambda x: np.array(x, dtype=float)
@@ -60,9 +68,9 @@ def get_test_file(fname):
         path to file, relative to `config['data_dir']`
 
     """
-    config = {'data_dir': '/tmp'}
+    config = {"data_dir": "/tmp"}
 
-    def url_get(url, cache_dir=os.path.join(config['data_dir'], 'fsspec_cache')):
+    def url_get(url, cache_dir=os.path.join(config["data_dir"], "fsspec_cache")):
         """
         Get fil from url, using caching.
 
@@ -86,11 +94,15 @@ def get_test_file(fname):
         Due to fsspec, the returned filename won't match the remote one.
         """
 
-        if '://' in url:
+        if "://" in url:
             with fsspec.open(
-                    'filecache::%s' % url,
-                    https={'client_kwargs': {'timeout': aiohttp.ClientTimeout(total=3600)}},
-                    filecache={'cache_storage': os.path.join(os.path.join(config['data_dir'], 'fsspec_cache'))}
+                "filecache::%s" % url,
+                https={"client_kwargs": {"timeout": aiohttp.ClientTimeout(total=3600)}},
+                filecache={
+                    "cache_storage": os.path.join(
+                        os.path.join(config["data_dir"], "fsspec_cache")
+                    )
+                },
             ) as f:
                 fname = f.name
         else:
@@ -98,33 +110,37 @@ def get_test_file(fname):
 
         return fname
 
-    res_path = config['data_dir']
-    base_url = 'https://cyclobs.ifremer.fr/static/sarwing_datarmor/xsardata'
-    file_url = '%s/%s.zip' % (base_url, fname)
+    res_path = config["data_dir"]
+    base_url = "https://cyclobs.ifremer.fr/static/sarwing_datarmor/xsardata"
+    file_url = "%s/%s.zip" % (base_url, fname)
     if not os.path.exists(os.path.join(res_path, fname)):
         warnings.warn("Downloading %s" % file_url)
         local_file = url_get(file_url)
         warnings.warn("Unzipping %s" % os.path.join(res_path, fname))
-        with zipfile.ZipFile(local_file, 'r') as zip_ref:
+        with zipfile.ZipFile(local_file, "r") as zip_ref:
             zip_ref.extractall(res_path)
     return os.path.join(res_path, fname)
 
 
 def or_ipf28(xpath):
     """change xpath to match ipf <2.8 or >2.9 (for noise range)"""
-    xpath28 = xpath.replace('noiseRange', 'noise').replace('noiseAzimuth', 'noise')
+    xpath28 = xpath.replace("noiseRange", "noise").replace("noiseAzimuth", "noise")
     if xpath28 != xpath:
         xpath += " | %s" % xpath28
     return xpath
 
 
-
 def list_poly_from_list_string_coords(str_coords_list):
     footprints = []
     for gmlpoly in str_coords_list:
-        footprints.append(Polygon(
-            [(float(lon), float(lat)) for lat, lon in [latlon.split(",")
-                                                       for latlon in gmlpoly.split(" ")]]))
+        footprints.append(
+            Polygon(
+                [
+                    (float(lon), float(lat))
+                    for lat, lon in [latlon.split(",") for latlon in gmlpoly.split(" ")]
+                ]
+            )
+        )
     return footprints
 
 
@@ -137,219 +153,496 @@ def list_poly_from_list_string_coords(str_coords_list):
 #  - dict is a nested dict, to create more hierarchy levels.
 xpath_mappings = {
     "manifest": {
-        'ipf_version': (scalar_float, '//xmlData/safe:processing/safe:facility/safe:software/@version'),
-        'swath_type': (scalar, '//s1sarl1:instrumentMode/s1sarl1:mode'),
+        "ipf_version": (
+            scalar_float,
+            "//xmlData/safe:processing/safe:facility/safe:software/@version",
+        ),
+        "swath_type": (scalar, "//s1sarl1:instrumentMode/s1sarl1:mode"),
         # 'product': (scalar, '/xfdu:XFDU/informationPackageMap/xfdu:contentUnit/@textInfo'),
-        'polarizations': (
-            ordered_category, '//s1sarl1:standAloneProductInformation/s1sarl1:transmitterReceiverPolarisation'),
-        'footprints': (list_poly_from_list_string_coords, '//safe:frame/safe:footPrint/gml:coordinates'),
-        'product_type': (scalar, '//s1sarl1:standAloneProductInformation/s1sarl1:productType'),
-        'mission': (scalar, '//safe:platform/safe:familyName'),
-        'satellite': (scalar, '//safe:platform/safe:number'),
-        'start_date': (date_converter, '//safe:acquisitionPeriod/safe:startTime'),
-        'stop_date': (date_converter, '//safe:acquisitionPeriod/safe:stopTime'),
-        
-        'aux_cal': (scalar, '//metadataSection/metadataObject/metadataWrap/xmlData/safe:processing/safe:resource/safe:processing/safe:resource[@role="AUX_CAL"]/@name'),
-        'aux_pp1': (scalar, '//metadataSection/metadataObject/metadataWrap/xmlData/safe:processing/safe:resource/safe:processing/safe:resource[@role="AUX_PP1"]/@name'),
-        'aux_ins': (scalar, '//metadataSection/metadataObject/metadataWrap/xmlData/safe:processing/safe:resource/safe:processing/safe:resource[@role="AUX_INS"]/@name'),
-
-        'aux_cal_sl2': (scalar,'//metadataSection/metadataObject/metadataWrap/xmlData/safe:processing/safe:resource[@role="AUX_CAL"]/@name'),
-        'annotation_files': (
-            normpath, '/xfdu:XFDU/dataObjectSection/*[@repID="s1Level1ProductSchema"]/byteStream/fileLocation/@href'),
-        'measurement_files': (
+        "polarizations": (
+            ordered_category,
+            "//s1sarl1:standAloneProductInformation/s1sarl1:transmitterReceiverPolarisation",
+        ),
+        "footprints": (
+            list_poly_from_list_string_coords,
+            "//safe:frame/safe:footPrint/gml:coordinates",
+        ),
+        "product_type": (
+            scalar,
+            "//s1sarl1:standAloneProductInformation/s1sarl1:productType",
+        ),
+        "mission": (scalar, "//safe:platform/safe:familyName"),
+        "satellite": (scalar, "//safe:platform/safe:number"),
+        "start_date": (date_converter, "//safe:acquisitionPeriod/safe:startTime"),
+        "stop_date": (date_converter, "//safe:acquisitionPeriod/safe:stopTime"),
+        "aux_cal": (
+            scalar,
+            '//metadataSection/metadataObject/metadataWrap/xmlData/safe:processing/safe:resource/safe:processing/safe:resource[@role="AUX_CAL"]/@name',
+        ),
+        "aux_pp1": (
+            scalar,
+            '//metadataSection/metadataObject/metadataWrap/xmlData/safe:processing/safe:resource/safe:processing/safe:resource[@role="AUX_PP1"]/@name',
+        ),
+        "aux_ins": (
+            scalar,
+            '//metadataSection/metadataObject/metadataWrap/xmlData/safe:processing/safe:resource/safe:processing/safe:resource[@role="AUX_INS"]/@name',
+        ),
+        "aux_cal_sl2": (
+            scalar,
+            '//metadataSection/metadataObject/metadataWrap/xmlData/safe:processing/safe:resource[@role="AUX_CAL"]/@name',
+        ),
+        "annotation_files": (
             normpath,
-            '/xfdu:XFDU/dataObjectSection/*[@repID="s1Level1MeasurementSchema"]/byteStream/fileLocation/@href'),
-        'noise_files': (
-            normpath, '/xfdu:XFDU/dataObjectSection/*[@repID="s1Level1NoiseSchema"]/byteStream/fileLocation/@href'),
-        'calibration_files': (
+            '/xfdu:XFDU/dataObjectSection/*[@repID="s1Level1ProductSchema"]/byteStream/fileLocation/@href',
+        ),
+        "measurement_files": (
             normpath,
-            '/xfdu:XFDU/dataObjectSection/*[@repID="s1Level1CalibrationSchema"]/byteStream/fileLocation/@href'),
-        'xsd_product_file': (
-            normpath, '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1Level1ProductSchema"]/metadataReference/@href'),
-        'xsd_Noise_file': (
-            normpath, '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1Level1NoiseSchema"]/metadataReference/@href'),
-        'xsd_RFI_file': (
-            normpath, '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1Level1RfiSchema"]/metadataReference/@href'),
-        'xsd_calibration_file': (
+            '/xfdu:XFDU/dataObjectSection/*[@repID="s1Level1MeasurementSchema"]/byteStream/fileLocation/@href',
+        ),
+        "noise_files": (
             normpath,
-            '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1Level1CalibrationSchema"]/metadataReference/@href'),
-        'xsd_objecttype_file': (
-            normpath, '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1ObjectTypesSchema"]/metadataReference/@href'),
-        'xsd_measurement_file': (
+            '/xfdu:XFDU/dataObjectSection/*[@repID="s1Level1NoiseSchema"]/byteStream/fileLocation/@href',
+        ),
+        "calibration_files": (
             normpath,
-            '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1Level1MeasurementSchema"]/metadataReference/@href'),
-        'xsd_level1product_file': (normpath,
-                                   '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1Level1ProductPreviewSchema"]/metadataReference/@href'),
-        'xsd_overlay_file': (
+            '/xfdu:XFDU/dataObjectSection/*[@repID="s1Level1CalibrationSchema"]/byteStream/fileLocation/@href',
+        ),
+        "xsd_product_file": (
             normpath,
-            '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1Level1MapOverlaySchema"]/metadataReference/@href'),
-        'instrument_configuration_id': (scalar, 
-            '//s1sarl1:standAloneProductInformation/s1sarl1:instrumentConfigurationID/text()', 
-            )
+            '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1Level1ProductSchema"]/metadataReference/@href',
+        ),
+        "xsd_Noise_file": (
+            normpath,
+            '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1Level1NoiseSchema"]/metadataReference/@href',
+        ),
+        "xsd_RFI_file": (
+            normpath,
+            '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1Level1RfiSchema"]/metadataReference/@href',
+        ),
+        "xsd_calibration_file": (
+            normpath,
+            '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1Level1CalibrationSchema"]/metadataReference/@href',
+        ),
+        "xsd_objecttype_file": (
+            normpath,
+            '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1ObjectTypesSchema"]/metadataReference/@href',
+        ),
+        "xsd_measurement_file": (
+            normpath,
+            '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1Level1MeasurementSchema"]/metadataReference/@href',
+        ),
+        "xsd_level1product_file": (
+            normpath,
+            '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1Level1ProductPreviewSchema"]/metadataReference/@href',
+        ),
+        "xsd_overlay_file": (
+            normpath,
+            '/xfdu:XFDU/metadataSection/metadataObject[@ID="s1Level1MapOverlaySchema"]/metadataReference/@href',
+        ),
+        "instrument_configuration_id": (
+            scalar,
+            "//s1sarl1:standAloneProductInformation/s1sarl1:instrumentConfigurationID/text()",
+        ),
     },
-    'calibration': {
-        'polarization': (scalar, '/calibration/adsHeader/polarisation'),
+    "calibration": {
+        "polarization": (scalar, "/calibration/adsHeader/polarisation"),
         # 'number_of_vector': '//calibration/calibrationVectorList/@count',
-        'line': (np.array, '//calibration/calibrationVectorList/calibrationVector/line'),
-        'sample': (int_1Darray_from_string, '//calibration/calibrationVectorList/calibrationVector[1]/pixel'),
-        'sigma0_lut': (
-            float_2Darray_from_string_list, '//calibration/calibrationVectorList/calibrationVector/sigmaNought'),
-        'gamma0_lut': (float_2Darray_from_string_list, '//calibration/calibrationVectorList/calibrationVector/gamma'),
-        'azimuthTime': (datetime64_array, '/calibration/calibrationVectorList/calibrationVector/azimuthTime')
+        "line": (
+            np.array,
+            "//calibration/calibrationVectorList/calibrationVector/line",
+        ),
+        "sample": (
+            int_1Darray_from_string,
+            "//calibration/calibrationVectorList/calibrationVector[1]/pixel",
+        ),
+        "sigma0_lut": (
+            float_2Darray_from_string_list,
+            "//calibration/calibrationVectorList/calibrationVector/sigmaNought",
+        ),
+        "gamma0_lut": (
+            float_2Darray_from_string_list,
+            "//calibration/calibrationVectorList/calibrationVector/gamma",
+        ),
+        "azimuthTime": (
+            datetime64_array,
+            "/calibration/calibrationVectorList/calibrationVector/azimuthTime",
+        ),
     },
-    'noise': {
-        'mode': (scalar, '/noise/adsHeader/mode'),
-        'polarization': (scalar, '/noise/adsHeader/polarisation'),
-        'range': {
-            'line': (int_array, or_ipf28('/noise/noiseRangeVectorList/noiseRangeVector/line')),
-            'sample': (lambda x: [np.fromstring(s, dtype=int, sep=' ') for s in x],
-                       or_ipf28('/noise/noiseRangeVectorList/noiseRangeVector/pixel')),
-            'noiseLut': (
-                lambda x: [np.fromstring(s, dtype=float, sep=' ') for s in x],
-                or_ipf28('/noise/noiseRangeVectorList/noiseRangeVector/noiseRangeLut')),
-            'azimuthTime': (datetime64_array, '/noise/noiseRangeVectorList/noiseRangeVector/azimuthTime')
+    "noise": {
+        "mode": (scalar, "/noise/adsHeader/mode"),
+        "polarization": (scalar, "/noise/adsHeader/polarisation"),
+        "range": {
+            "line": (
+                int_array,
+                or_ipf28("/noise/noiseRangeVectorList/noiseRangeVector/line"),
+            ),
+            "sample": (
+                lambda x: [np.fromstring(s, dtype=int, sep=" ") for s in x],
+                or_ipf28("/noise/noiseRangeVectorList/noiseRangeVector/pixel"),
+            ),
+            "noiseLut": (
+                lambda x: [np.fromstring(s, dtype=float, sep=" ") for s in x],
+                or_ipf28("/noise/noiseRangeVectorList/noiseRangeVector/noiseRangeLut"),
+            ),
+            "azimuthTime": (
+                datetime64_array,
+                "/noise/noiseRangeVectorList/noiseRangeVector/azimuthTime",
+            ),
+        },
+        "azi": {
+            "swath": "/noise/noiseAzimuthVectorList/noiseAzimuthVector/swath",
+            "line": (
+                lambda x: [np.fromstring(str(s), dtype=int, sep=" ") for s in x],
+                "/noise/noiseAzimuthVectorList/noiseAzimuthVector/line",
+            ),
+            "line_start": (
+                int_array,
+                "/noise/noiseAzimuthVectorList/noiseAzimuthVector/firstAzimuthLine",
+            ),
+            "line_stop": (
+                int_array,
+                "/noise/noiseAzimuthVectorList/noiseAzimuthVector/lastAzimuthLine",
+            ),
+            "sample_start": (
+                int_array,
+                "/noise/noiseAzimuthVectorList/noiseAzimuthVector/firstRangeSample",
+            ),
+            "sample_stop": (
+                int_array,
+                "/noise/noiseAzimuthVectorList/noiseAzimuthVector/lastRangeSample",
+            ),
+            "noiseLut": (
+                lambda x: [np.fromstring(str(s), dtype=float, sep=" ") for s in x],
+                "/noise/noiseAzimuthVectorList/noiseAzimuthVector/noiseAzimuthLut",
+            ),
         },
-        'azi': {
-            'swath': '/noise/noiseAzimuthVectorList/noiseAzimuthVector/swath',
-            'line': (lambda x: [np.fromstring(str(s), dtype=int, sep=' ') for s in x],
-                     '/noise/noiseAzimuthVectorList/noiseAzimuthVector/line'),
-            'line_start': (int_array, '/noise/noiseAzimuthVectorList/noiseAzimuthVector/firstAzimuthLine'),
-            'line_stop': (int_array, '/noise/noiseAzimuthVectorList/noiseAzimuthVector/lastAzimuthLine'),
-            'sample_start': (int_array, '/noise/noiseAzimuthVectorList/noiseAzimuthVector/firstRangeSample'),
-            'sample_stop': (int_array, '/noise/noiseAzimuthVectorList/noiseAzimuthVector/lastRangeSample'),
-            'noiseLut': (
-                lambda x: [np.fromstring(str(s), dtype=float, sep=' ') for s in x],
-                '/noise/noiseAzimuthVectorList/noiseAzimuthVector/noiseAzimuthLut'),
-        }
     },
-    'annotation': {
-        'product_type': (scalar, '/product/adsHeader/productType'),
-        'swath_subswath': (scalar, '/product/adsHeader/swath'),
-        'line': (uniq_sorted, '/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/line'),
-        'sample': (uniq_sorted, '/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/pixel'),
-        'incidenceAngle': (
-            float_array, '/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/incidenceAngle'),
-        'elevationAngle': (
-            float_array, '/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/elevationAngle'),
-        'height': (float_array, '/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/height'),
-        'azimuthTime': (
-            datetime64_array, '/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/azimuthTime'),
-        'slantRangeTime': (
-            float_array, '/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/slantRangeTime'),
-        'longitude': (float_array, '/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/longitude'),
-        'latitude': (float_array, '/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/latitude'),
-        'polarization': (scalar, '/product/adsHeader/polarisation'),
-        'line_time_range': (
-            datetime64_array, '/product/imageAnnotation/imageInformation/*[contains(name(),"LineUtcTime")]'),
-        'line_size': (scalar, '/product/imageAnnotation/imageInformation/numberOfLines'),
-        'sample_size': (scalar, '/product/imageAnnotation/imageInformation/numberOfSamples'),
-        'incidence_angle_mid_swath': (scalar_float, '/product/imageAnnotation/imageInformation/incidenceAngleMidSwath'),
-        'azimuth_time_interval': (scalar_float, '/product/imageAnnotation/imageInformation/azimuthTimeInterval'),
-        'slant_range_time_image': (scalar_float, '/product/imageAnnotation/imageInformation/slantRangeTime'),
-        'rangePixelSpacing': (scalar_float, '/product/imageAnnotation/imageInformation/rangePixelSpacing'),
-        'azimuthPixelSpacing': (scalar_float, '/product/imageAnnotation/imageInformation/azimuthPixelSpacing'),
-        'denoised': (scalar, '/product/imageAnnotation/processingInformation/thermalNoiseCorrectionPerformed'),
-        'pol': (scalar, '/product/adsHeader/polarisation'),
-        'pass': (scalar, '/product/generalAnnotation/productInformation/pass'),
-        'platform_heading': (scalar_float, '/product/generalAnnotation/productInformation/platformHeading'),
-        'radar_frequency': (scalar_float, '/product/generalAnnotation/productInformation/radarFrequency'),
-        'range_sampling_rate': (scalar_float, '/product/generalAnnotation/productInformation/rangeSamplingRate'),
-        'azimuth_steering_rate': (scalar_float, '/product/generalAnnotation/productInformation/azimuthSteeringRate'),
-        'orbit_time': (datetime64_array, '//product/generalAnnotation/orbitList/orbit/time'),
-        'orbit_frame': (np.array, '//product/generalAnnotation/orbitList/orbit/frame'),
-        'orbit_pos_x': (float_array, '//product/generalAnnotation/orbitList/orbit/position/x'),
-        'orbit_pos_y': (float_array, '//product/generalAnnotation/orbitList/orbit/position/y'),
-        'orbit_pos_z': (float_array, '//product/generalAnnotation/orbitList/orbit/position/z'),
-        'orbit_vel_x': (float_array, '//product/generalAnnotation/orbitList/orbit/velocity/x'),
-        'orbit_vel_y': (float_array, '//product/generalAnnotation/orbitList/orbit/velocity/y'),
-        'orbit_vel_z': (float_array, '//product/generalAnnotation/orbitList/orbit/velocity/z'),
-        'number_of_bursts': (scalar_int, '/product/swathTiming/burstList/@count'),
-        'linesPerBurst': (scalar, '/product/swathTiming/linesPerBurst'),
-        'samplesPerBurst': (scalar, '/product/swathTiming/samplesPerBurst'),
-        'all_bursts': (np.array, '//product/swathTiming/burstList/burst'),
-        'burst_azimuthTime': (datetime64_array, '//product/swathTiming/burstList/burst/azimuthTime'),
-        'burst_azimuthAnxTime': (float_array, '//product/swathTiming/burstList/burst/azimuthAnxTime'),
-        'burst_sensingTime': (datetime64_array, '//product/swathTiming/burstList/burst/sensingTime'),
-        'burst_byteOffset': (np.array, '//product/swathTiming/burstList/burst/byteOffset'),
-        'burst_firstValidSample': (
-            float_2Darray_from_string_list, '//product/swathTiming/burstList/burst/firstValidSample'),
-        'burst_lastValidSample': (
-            float_2Darray_from_string_list, '//product/swathTiming/burstList/burst/lastValidSample'),
-        'nb_dcestimate': (scalar_int, '/product/dopplerCentroid/dcEstimateList/@count'),
-        'nb_geoDcPoly': (
-            scalar_int, '/product/dopplerCentroid/dcEstimateList/dcEstimate[1]/geometryDcPolynomial/@count'),
-        'nb_dataDcPoly': (scalar_int, '/product/dopplerCentroid/dcEstimateList/dcEstimate[1]/dataDcPolynomial/@count'),
-        'nb_fineDce': (scalar_int, '/product/dopplerCentroid/dcEstimateList/dcEstimate[1]/fineDceList/@count'),
-        'dc_azimuth_time': (datetime64_array, '//product/dopplerCentroid/dcEstimateList/dcEstimate/azimuthTime'),
-        'dc_t0': (np.array, '//product/dopplerCentroid/dcEstimateList/dcEstimate/t0'),
-        'dc_geoDcPoly': (
+    "annotation": {
+        "product_type": (scalar, "/product/adsHeader/productType"),
+        "swath_subswath": (scalar, "/product/adsHeader/swath"),
+        "line": (
+            uniq_sorted,
+            "/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/line",
+        ),
+        "sample": (
+            uniq_sorted,
+            "/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/pixel",
+        ),
+        "incidenceAngle": (
+            float_array,
+            "/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/incidenceAngle",
+        ),
+        "elevationAngle": (
+            float_array,
+            "/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/elevationAngle",
+        ),
+        "height": (
+            float_array,
+            "/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/height",
+        ),
+        "azimuthTime": (
+            datetime64_array,
+            "/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/azimuthTime",
+        ),
+        "slantRangeTime": (
+            float_array,
+            "/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/slantRangeTime",
+        ),
+        "longitude": (
+            float_array,
+            "/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/longitude",
+        ),
+        "latitude": (
+            float_array,
+            "/product/geolocationGrid/geolocationGridPointList/geolocationGridPoint/latitude",
+        ),
+        "polarization": (scalar, "/product/adsHeader/polarisation"),
+        "line_time_range": (
+            datetime64_array,
+            '/product/imageAnnotation/imageInformation/*[contains(name(),"LineUtcTime")]',
+        ),
+        "line_size": (
+            scalar,
+            "/product/imageAnnotation/imageInformation/numberOfLines",
+        ),
+        "sample_size": (
+            scalar,
+            "/product/imageAnnotation/imageInformation/numberOfSamples",
+        ),
+        "incidence_angle_mid_swath": (
+            scalar_float,
+            "/product/imageAnnotation/imageInformation/incidenceAngleMidSwath",
+        ),
+        "azimuth_time_interval": (
+            scalar_float,
+            "/product/imageAnnotation/imageInformation/azimuthTimeInterval",
+        ),
+        "slant_range_time_image": (
+            scalar_float,
+            "/product/imageAnnotation/imageInformation/slantRangeTime",
+        ),
+        "rangePixelSpacing": (
+            scalar_float,
+            "/product/imageAnnotation/imageInformation/rangePixelSpacing",
+        ),
+        "azimuthPixelSpacing": (
+            scalar_float,
+            "/product/imageAnnotation/imageInformation/azimuthPixelSpacing",
+        ),
+        "denoised": (
+            scalar,
+            "/product/imageAnnotation/processingInformation/thermalNoiseCorrectionPerformed",
+        ),
+        "pol": (scalar, "/product/adsHeader/polarisation"),
+        "pass": (scalar, "/product/generalAnnotation/productInformation/pass"),
+        "platform_heading": (
+            scalar_float,
+            "/product/generalAnnotation/productInformation/platformHeading",
+        ),
+        "radar_frequency": (
+            scalar_float,
+            "/product/generalAnnotation/productInformation/radarFrequency",
+        ),
+        "range_sampling_rate": (
+            scalar_float,
+            "/product/generalAnnotation/productInformation/rangeSamplingRate",
+        ),
+        "azimuth_steering_rate": (
+            scalar_float,
+            "/product/generalAnnotation/productInformation/azimuthSteeringRate",
+        ),
+        "orbit_time": (
+            datetime64_array,
+            "//product/generalAnnotation/orbitList/orbit/time",
+        ),
+        "orbit_frame": (np.array, "//product/generalAnnotation/orbitList/orbit/frame"),
+        "orbit_pos_x": (
+            float_array,
+            "//product/generalAnnotation/orbitList/orbit/position/x",
+        ),
+        "orbit_pos_y": (
+            float_array,
+            "//product/generalAnnotation/orbitList/orbit/position/y",
+        ),
+        "orbit_pos_z": (
+            float_array,
+            "//product/generalAnnotation/orbitList/orbit/position/z",
+        ),
+        "orbit_vel_x": (
+            float_array,
+            "//product/generalAnnotation/orbitList/orbit/velocity/x",
+        ),
+        "orbit_vel_y": (
+            float_array,
+            "//product/generalAnnotation/orbitList/orbit/velocity/y",
+        ),
+        "orbit_vel_z": (
+            float_array,
+            "//product/generalAnnotation/orbitList/orbit/velocity/z",
+        ),
+        "number_of_bursts": (scalar_int, "/product/swathTiming/burstList/@count"),
+        "linesPerBurst": (scalar, "/product/swathTiming/linesPerBurst"),
+        "samplesPerBurst": (scalar, "/product/swathTiming/samplesPerBurst"),
+        "all_bursts": (np.array, "//product/swathTiming/burstList/burst"),
+        "burst_azimuthTime": (
+            datetime64_array,
+            "//product/swathTiming/burstList/burst/azimuthTime",
+        ),
+        "burst_azimuthAnxTime": (
+            float_array,
+            "//product/swathTiming/burstList/burst/azimuthAnxTime",
+        ),
+        "burst_sensingTime": (
+            datetime64_array,
+            "//product/swathTiming/burstList/burst/sensingTime",
+        ),
+        "burst_byteOffset": (
+            np.array,
+            "//product/swathTiming/burstList/burst/byteOffset",
+        ),
+        "burst_firstValidSample": (
+            float_2Darray_from_string_list,
+            "//product/swathTiming/burstList/burst/firstValidSample",
+        ),
+        "burst_lastValidSample": (
+            float_2Darray_from_string_list,
+            "//product/swathTiming/burstList/burst/lastValidSample",
+        ),
+        "nb_dcestimate": (scalar_int, "/product/dopplerCentroid/dcEstimateList/@count"),
+        "nb_geoDcPoly": (
+            scalar_int,
+            "/product/dopplerCentroid/dcEstimateList/dcEstimate[1]/geometryDcPolynomial/@count",
+        ),
+        "nb_dataDcPoly": (
+            scalar_int,
+            "/product/dopplerCentroid/dcEstimateList/dcEstimate[1]/dataDcPolynomial/@count",
+        ),
+        "nb_fineDce": (
+            scalar_int,
+            "/product/dopplerCentroid/dcEstimateList/dcEstimate[1]/fineDceList/@count",
+        ),
+        "dc_azimuth_time": (
+            datetime64_array,
+            "//product/dopplerCentroid/dcEstimateList/dcEstimate/azimuthTime",
+        ),
+        "dc_t0": (np.array, "//product/dopplerCentroid/dcEstimateList/dcEstimate/t0"),
+        "dc_geoDcPoly": (
             list_of_float_1D_array_from_string,
-            '//product/dopplerCentroid/dcEstimateList/dcEstimate/geometryDcPolynomial'),
-        'dc_dataDcPoly': (
-            list_of_float_1D_array_from_string, '//product/dopplerCentroid/dcEstimateList/dcEstimate/dataDcPolynomial'),
-        'dc_rmserr': (np.array, '//product/dopplerCentroid/dcEstimateList/dcEstimate/dataDcRmsError'),
-        'dc_rmserrAboveThres': (
-            bool_array, '//product/dopplerCentroid/dcEstimateList/dcEstimate/dataDcRmsErrorAboveThreshold'),
-        'dc_azstarttime': (
-            datetime64_array, '//product/dopplerCentroid/dcEstimateList/dcEstimate/fineDceAzimuthStartTime'),
-        'dc_azstoptime': (
-            datetime64_array, '//product/dopplerCentroid/dcEstimateList/dcEstimate/fineDceAzimuthStopTime'),
-        'dc_slantRangeTime': (
-            float_array, '///product/dopplerCentroid/dcEstimateList/dcEstimate/fineDceList/fineDce/slantRangeTime'),
-        'dc_frequency': (
-            float_array, '///product/dopplerCentroid/dcEstimateList/dcEstimate/fineDceList/fineDce/frequency'),
-        'nb_fmrate': (scalar_int, '/product/generalAnnotation/azimuthFmRateList/@count'),
-        'fmrate_azimuthtime': (
-            datetime64_array, '//product/generalAnnotation/azimuthFmRateList/azimuthFmRate/azimuthTime'),
-        'fmrate_t0': (float_array, '//product/generalAnnotation/azimuthFmRateList/azimuthFmRate/t0'),
-        'fmrate_c0': (float_array, '//product/generalAnnotation/azimuthFmRateList/azimuthFmRate/c0'),
-        'fmrate_c1': (float_array, '//product/generalAnnotation/azimuthFmRateList/azimuthFmRate/c1'),
-        'fmrate_c2': (float_array, '//product/generalAnnotation/azimuthFmRateList/azimuthFmRate/c2'),
-        'fmrate_azimuthFmRatePolynomial': (
+            "//product/dopplerCentroid/dcEstimateList/dcEstimate/geometryDcPolynomial",
+        ),
+        "dc_dataDcPoly": (
             list_of_float_1D_array_from_string,
-            '//product/generalAnnotation/azimuthFmRateList/azimuthFmRate/azimuthFmRatePolynomial'),
-        
-        'ap_azimuthTime': (
-            datetime64_array, '/product/antennaPattern/antennaPatternList/antennaPattern/azimuthTime'),        
-        'ap_roll' : (float_array, '/product/antennaPattern/antennaPatternList/antennaPattern/roll'),
-        'ap_swath' : (lambda x: np.array(x), '/product/antennaPattern/antennaPatternList/antennaPattern/swath'),
-        
-        'ap_elevationAngle': (
-            list_of_float_1D_array_from_string, '/product/antennaPattern/antennaPatternList/antennaPattern/elevationAngle'), 
-        'ap_incidenceAngle': (
-            list_of_float_1D_array_from_string, '/product/antennaPattern/antennaPatternList/antennaPattern/incidenceAngle'), 
-        'ap_slantRangeTime': (
-            list_of_float_1D_array_from_string, '/product/antennaPattern/antennaPatternList/antennaPattern/slantRangeTime'),        
-        'ap_terrainHeight': (
-            float_array, '/product/antennaPattern/antennaPatternList/antennaPattern/terrainHeight'),        
-        'ap_elevationPattern' : (
-            list_of_float_1D_array_from_string, '/product/antennaPattern/antennaPatternList/antennaPattern/elevationPattern'),        
-        
-        'sm_nbPerSwat': (int_array, '/product/swathMerging/swathMergeList/swathMerge/swathBoundsList/@count'),
-        'sm_swath' : (lambda x: np.array(x),     '/product/swathMerging/swathMergeList/swathMerge/swath'),
-        'sm_azimuthTime' : (datetime64_array, '/product/swathMerging/swathMergeList/swathMerge/swathBoundsList/swathBounds/azimuthTime'),      
-        'sm_firstAzimuthLine' : (int_array, '/product/swathMerging/swathMergeList/swathMerge/swathBoundsList/swathBounds/firstAzimuthLine'),      
-        'sm_lastAzimuthLine' : (int_array, '/product/swathMerging/swathMergeList/swathMerge/swathBoundsList/swathBounds/lastAzimuthLine'),      
-        'sm_firstRangeSample' : (int_array, '/product/swathMerging/swathMergeList/swathMerge/swathBoundsList/swathBounds/firstRangeSample'),      
-        'sm_lastRangeSample' : (int_array, '/product/swathMerging/swathMergeList/swathMerge/swathBoundsList/swathBounds/lastRangeSample'),      
-
-    },   
-    'xsd': {'all': (str, '/xsd:schema/xsd:complexType/xsd:sequence/xsd:element/xsd:annotation/xsd:documentation'),
-            'names': (str, '/xsd:schema/xsd:complexType/xsd:sequence/xsd:element/@name'),
-            'sensingtime': (str, '/xsd:schema/xsd:complexType/xsd:sequence/xsd:element/sensingTime')
-            }
-
+            "//product/dopplerCentroid/dcEstimateList/dcEstimate/dataDcPolynomial",
+        ),
+        "dc_rmserr": (
+            np.array,
+            "//product/dopplerCentroid/dcEstimateList/dcEstimate/dataDcRmsError",
+        ),
+        "dc_rmserrAboveThres": (
+            bool_array,
+            "//product/dopplerCentroid/dcEstimateList/dcEstimate/dataDcRmsErrorAboveThreshold",
+        ),
+        "dc_azstarttime": (
+            datetime64_array,
+            "//product/dopplerCentroid/dcEstimateList/dcEstimate/fineDceAzimuthStartTime",
+        ),
+        "dc_azstoptime": (
+            datetime64_array,
+            "//product/dopplerCentroid/dcEstimateList/dcEstimate/fineDceAzimuthStopTime",
+        ),
+        "dc_slantRangeTime": (
+            float_array,
+            "///product/dopplerCentroid/dcEstimateList/dcEstimate/fineDceList/fineDce/slantRangeTime",
+        ),
+        "dc_frequency": (
+            float_array,
+            "///product/dopplerCentroid/dcEstimateList/dcEstimate/fineDceList/fineDce/frequency",
+        ),
+        "nb_fmrate": (
+            scalar_int,
+            "/product/generalAnnotation/azimuthFmRateList/@count",
+        ),
+        "fmrate_azimuthtime": (
+            datetime64_array,
+            "//product/generalAnnotation/azimuthFmRateList/azimuthFmRate/azimuthTime",
+        ),
+        "fmrate_t0": (
+            float_array,
+            "//product/generalAnnotation/azimuthFmRateList/azimuthFmRate/t0",
+        ),
+        "fmrate_c0": (
+            float_array,
+            "//product/generalAnnotation/azimuthFmRateList/azimuthFmRate/c0",
+        ),
+        "fmrate_c1": (
+            float_array,
+            "//product/generalAnnotation/azimuthFmRateList/azimuthFmRate/c1",
+        ),
+        "fmrate_c2": (
+            float_array,
+            "//product/generalAnnotation/azimuthFmRateList/azimuthFmRate/c2",
+        ),
+        "fmrate_azimuthFmRatePolynomial": (
+            list_of_float_1D_array_from_string,
+            "//product/generalAnnotation/azimuthFmRateList/azimuthFmRate/azimuthFmRatePolynomial",
+        ),
+        "ap_azimuthTime": (
+            datetime64_array,
+            "/product/antennaPattern/antennaPatternList/antennaPattern/azimuthTime",
+        ),
+        "ap_roll": (
+            float_array,
+            "/product/antennaPattern/antennaPatternList/antennaPattern/roll",
+        ),
+        "ap_swath": (
+            lambda x: np.array(x),
+            "/product/antennaPattern/antennaPatternList/antennaPattern/swath",
+        ),
+        "ap_elevationAngle": (
+            list_of_float_1D_array_from_string,
+            "/product/antennaPattern/antennaPatternList/antennaPattern/elevationAngle",
+        ),
+        "ap_incidenceAngle": (
+            list_of_float_1D_array_from_string,
+            "/product/antennaPattern/antennaPatternList/antennaPattern/incidenceAngle",
+        ),
+        "ap_slantRangeTime": (
+            list_of_float_1D_array_from_string,
+            "/product/antennaPattern/antennaPatternList/antennaPattern/slantRangeTime",
+        ),
+        "ap_terrainHeight": (
+            float_array,
+            "/product/antennaPattern/antennaPatternList/antennaPattern/terrainHeight",
+        ),
+        "ap_elevationPattern": (
+            list_of_float_1D_array_from_string,
+            "/product/antennaPattern/antennaPatternList/antennaPattern/elevationPattern",
+        ),
+        "sm_nbPerSwat": (
+            int_array,
+            "/product/swathMerging/swathMergeList/swathMerge/swathBoundsList/@count",
+        ),
+        "sm_swath": (
+            lambda x: np.array(x),
+            "/product/swathMerging/swathMergeList/swathMerge/swath",
+        ),
+        "sm_azimuthTime": (
+            datetime64_array,
+            "/product/swathMerging/swathMergeList/swathMerge/swathBoundsList/swathBounds/azimuthTime",
+        ),
+        "sm_firstAzimuthLine": (
+            int_array,
+            "/product/swathMerging/swathMergeList/swathMerge/swathBoundsList/swathBounds/firstAzimuthLine",
+        ),
+        "sm_lastAzimuthLine": (
+            int_array,
+            "/product/swathMerging/swathMergeList/swathMerge/swathBoundsList/swathBounds/lastAzimuthLine",
+        ),
+        "sm_firstRangeSample": (
+            int_array,
+            "/product/swathMerging/swathMergeList/swathMerge/swathBoundsList/swathBounds/firstRangeSample",
+        ),
+        "sm_lastRangeSample": (
+            int_array,
+            "/product/swathMerging/swathMergeList/swathMerge/swathBoundsList/swathBounds/lastRangeSample",
+        ),
+    },
+    "xsd": {
+        "all": (
+            str,
+            "/xsd:schema/xsd:complexType/xsd:sequence/xsd:element/xsd:annotation/xsd:documentation",
+        ),
+        "names": (str, "/xsd:schema/xsd:complexType/xsd:sequence/xsd:element/@name"),
+        "sensingtime": (
+            str,
+            "/xsd:schema/xsd:complexType/xsd:sequence/xsd:element/sensingTime",
+        ),
+    },
 }
 
 
-def signal_lut_raw(line, sample, lut_sigma0, lut_gamma0,azimuth_times):
+def signal_lut_raw(line, sample, lut_sigma0, lut_gamma0, azimuth_times):
     ds = xr.Dataset()
-    ds['sigma0_lut'] = xr.DataArray(lut_sigma0, dims=['line', 'sample'], coords={'line': line, 'sample': sample},
-                                    name='sigma0', attrs={'description': 'look up table sigma0'})
-    ds['gamma0_lut'] = xr.DataArray(lut_gamma0, dims=['line', 'sample'], coords={'line': line, 'sample': sample},
-                                    name='gamma0', attrs={'description': 'look up table gamma0'})
-    ds['azimuthTime'] = xr.DataArray(azimuth_times, dims=['line'],coords={'line': line},
-                                     attrs={'description': 'azimuth times associated to the signal look up table'})
+    ds["sigma0_lut"] = xr.DataArray(
+        lut_sigma0,
+        dims=["line", "sample"],
+        coords={"line": line, "sample": sample},
+        name="sigma0",
+        attrs={"description": "look up table sigma0"},
+    )
+    ds["gamma0_lut"] = xr.DataArray(
+        lut_gamma0,
+        dims=["line", "sample"],
+        coords={"line": line, "sample": sample},
+        name="gamma0",
+        attrs={"description": "look up table gamma0"},
+    )
+    ds["azimuthTime"] = xr.DataArray(
+        azimuth_times,
+        dims=["line"],
+        coords={"line": line},
+        attrs={"description": "azimuth times associated to the signal look up table"},
+    )
 
     return ds
 
@@ -357,19 +650,19 @@ def signal_lut_raw(line, sample, lut_sigma0, lut_gamma0,azimuth_times):
 def noise_lut_range_raw(lines, samples, noiseLuts, azimuthTimes):
     """
 
-        Parameters
-        ----------
-        lines: np.ndarray
-            1D array of lines. lut is defined at each line
-        samples: list of np.ndarray
-            arrays of samples. list length is same as samples. each array define samples where lut is defined
-        noiseLuts: list of np.ndarray
-            arrays of luts. Same structure as samples.
-        azimuthTimes: np.ndarray
-            1D array of azimuth dates associated to each lines of the noise range grid
+    Parameters
+    ----------
+    lines: np.ndarray
+        1D array of lines. lut is defined at each line
+    samples: list of np.ndarray
+        arrays of samples. list length is same as samples. each array define samples where lut is defined
+    noiseLuts: list of np.ndarray
+        arrays of luts. Same structure as samples.
+    azimuthTimes: np.ndarray
+        1D array of azimuth dates associated to each lines of the noise range grid
 
-        Returns
-        -------
+    Returns
+    -------
     """
 
     ds = xr.Dataset()
@@ -385,50 +678,86 @@ def noise_lut_range_raw(lines, samples, noiseLuts, azimuthTimes):
         normalized_noise_luts.append(noiseLuts[uu][0:minimum_pts])
         normalized_samples.append(samples[uu][0:minimum_pts])
     tmp_noise = np.stack(normalized_noise_luts)
-    ds['noise_lut'] = xr.DataArray(tmp_noise,
-                                   coords={'line': lines, 'sample': samples[0][0:minimum_pts]},
-                                   dims=['line', 'sample'])
+    ds["noise_lut"] = xr.DataArray(
+        tmp_noise,
+        coords={"line": lines, "sample": samples[0][0:minimum_pts]},
+        dims=["line", "sample"],
+    )
     try:
-        ds['azimuthTime'] = xr.DataArray(azimuthTimes,coords={'line': lines},dims=['line'])
-    except: #for IPF2.72 for instance there is no azimuthTimes associated to the noise range LUT
-        ds['azimuthTime'] = xr.DataArray(np.ones(len(lines))*np.nan, coords={'line': lines}, dims=['line'])
+        ds["azimuthTime"] = xr.DataArray(
+            azimuthTimes, coords={"line": lines}, dims=["line"]
+        )
+    except (
+        ValueError
+    ):  # for IPF2.72 for instance there is no azimuthTimes associated to the noise range LUT
+        ds["azimuthTime"] = xr.DataArray(
+            np.ones(len(lines)) * np.nan, coords={"line": lines}, dims=["line"]
+        )
     # ds['sample'] = xr.DataArray(np.stack(normalized_samples), coords={'lines': lines, 'sample_index': np.arange(minimum_pts)},
     #                             dims=['lines', 'sample_index'])
 
     return ds
 
 
-def noise_lut_azi_raw_grd(line_azi, line_azi_start, line_azi_stop,
-                          sample_azi_start, sample_azi_stop, noise_azi_lut, swath):
+def noise_lut_azi_raw_grd(
+    line_azi,
+    line_azi_start,
+    line_azi_stop,
+    sample_azi_start,
+    sample_azi_stop,
+    noise_azi_lut,
+    swath,
+):
     ds = xr.Dataset()
-    for ii, swathi in enumerate(swath):  # with 2018 data the noise vector are not the same size -> stacking impossible
-        ds['noise_lut_%s' % swathi] = xr.DataArray(noise_azi_lut[ii], coords={'line': line_azi[ii]}, dims=['line'])
-    ds['line_start'] = xr.DataArray(line_azi_start, coords={'swath': swath}, dims=['swath'])
-    ds['line_stop'] = xr.DataArray(line_azi_stop, coords={'swath': swath}, dims=['swath'])
-    ds['sample_start'] = xr.DataArray(sample_azi_start, coords={'swath': swath}, dims=['swath'])
-    ds['sample_stop'] = xr.DataArray(sample_azi_stop, coords={'swath': swath}, dims=['swath'])
+    for ii, swathi in enumerate(
+        swath
+    ):  # with 2018 data the noise vector are not the same size -> stacking impossible
+        ds["noise_lut_%s" % swathi] = xr.DataArray(
+            noise_azi_lut[ii], coords={"line": line_azi[ii]}, dims=["line"]
+        )
+    ds["line_start"] = xr.DataArray(
+        line_azi_start, coords={"swath": swath}, dims=["swath"]
+    )
+    ds["line_stop"] = xr.DataArray(
+        line_azi_stop, coords={"swath": swath}, dims=["swath"]
+    )
+    ds["sample_start"] = xr.DataArray(
+        sample_azi_start, coords={"swath": swath}, dims=["swath"]
+    )
+    ds["sample_stop"] = xr.DataArray(
+        sample_azi_stop, coords={"swath": swath}, dims=["swath"]
+    )
 
     return ds
 
 
-def noise_lut_azi_raw_slc(line_azi, line_azi_start, line_azi_stop,
-                          sample_azi_start, sample_azi_stop, noise_azi_lut, swath):
+def noise_lut_azi_raw_slc(
+    line_azi,
+    line_azi_start,
+    line_azi_stop,
+    sample_azi_start,
+    sample_azi_stop,
+    noise_azi_lut,
+    swath,
+):
     ds = xr.Dataset()
     # if 'WV' in mode: # there is no noise in azimuth for WV acquisitions
     if swath == []:  # WV SLC case
-        ds['noise_lut'] = xr.DataArray(
-            1.)  # set noise_azimuth to one to make post steps like noise_azi*noise_range always possible
-        ds['line_start'] = xr.DataArray(line_azi_start, attrs={'swath': swath})
-        ds['line_stop'] = xr.DataArray(line_azi_stop, attrs={'swath': swath})
-        ds['sample_start'] = xr.DataArray(sample_azi_start, attrs={'swath': swath})
-        ds['sample_stop'] = xr.DataArray(sample_azi_stop, attrs={'swath': swath})
+        ds["noise_lut"] = xr.DataArray(
+            1.0
+        )  # set noise_azimuth to one to make post steps like noise_azi*noise_range always possible
+        ds["line_start"] = xr.DataArray(line_azi_start, attrs={"swath": swath})
+        ds["line_stop"] = xr.DataArray(line_azi_stop, attrs={"swath": swath})
+        ds["sample_start"] = xr.DataArray(sample_azi_start, attrs={"swath": swath})
+        ds["sample_stop"] = xr.DataArray(sample_azi_stop, attrs={"swath": swath})
     else:
-        ds['noise_lut'] = xr.DataArray(noise_azi_lut[0], coords={'line': line_azi[0]},
-                                       dims=['line'])  # only on subswath opened
-        ds['line_start'] = xr.DataArray(line_azi_start[0], attrs={'swath': swath})
-        ds['line_stop'] = xr.DataArray(line_azi_stop[0], attrs={'swath': swath})
-        ds['sample_start'] = xr.DataArray(sample_azi_start[0], attrs={'swath': swath})
-        ds['sample_stop'] = xr.DataArray(sample_azi_stop[0], attrs={'swath': swath})
+        ds["noise_lut"] = xr.DataArray(
+            noise_azi_lut[0], coords={"line": line_azi[0]}, dims=["line"]
+        )  # only on subswath opened
+        ds["line_start"] = xr.DataArray(line_azi_start[0], attrs={"swath": swath})
+        ds["line_stop"] = xr.DataArray(line_azi_stop[0], attrs={"swath": swath})
+        ds["sample_start"] = xr.DataArray(sample_azi_start[0], attrs={"swath": swath})
+        ds["sample_stop"] = xr.DataArray(sample_azi_stop[0], attrs={"swath": swath})
     # ds['noise_lut'] = xr.DataArray(np.stack(noise_azi_lut).T, coords={'line_index': np.arange(len(line_azi[0])), 'swath': swath},
     #                               dims=['line_index', 'swath'])
     # ds['line'] = xr.DataArray(np.stack(line_azi).T, coords={'line_index': np.arange(len(line_azi[0])), 'swath': swath},
@@ -444,9 +773,12 @@ def datetime64_array(dates):
 
 def df_files(annotation_files, measurement_files, noise_files, calibration_files):
     # get polarizations and file number from filename
-    pols = [os.path.basename(f).split('-')[3].upper() for f in annotation_files]
-    num = [int(os.path.splitext(os.path.basename(f))[0].split('-')[8]) for f in annotation_files]
-    dsid = [os.path.basename(f).split('-')[1].upper() for f in annotation_files]
+    pols = [os.path.basename(f).split("-")[3].upper() for f in annotation_files]
+    num = [
+        int(os.path.splitext(os.path.basename(f))[0].split("-")[8])
+        for f in annotation_files
+    ]
+    dsid = [os.path.basename(f).split("-")[1].upper() for f in annotation_files]
 
     # check that dsid are spatialy uniques (i.e. there is only one dsid per geographic position)
     # some SAFES like WV, dsid are not uniques ('WV1' and 'WV2')
@@ -457,18 +789,20 @@ def df_files(annotation_files, measurement_files, noise_files, calibration_files
     if dsid_count != subds_count:
         dsid_rad = dsid[0][:-1]  # WV
         dsid = ["%s_%03d" % (dsid_rad, n) for n in num]
-        assert len(set(dsid)) == subds_count  # probably an unknown mode we need to handle
+        assert (
+            len(set(dsid)) == subds_count
+        )  # probably an unknown mode we need to handle
 
     df = pd.DataFrame(
         {
-            'polarization': pols,
-            'dsid': dsid,
-            'annotation': annotation_files,
-            'measurement': measurement_files,
-            'noise': noise_files,
-            'calibration': calibration_files,
+            "polarization": pols,
+            "dsid": dsid,
+            "annotation": annotation_files,
+            "measurement": measurement_files,
+            "noise": noise_files,
+            "calibration": calibration_files,
         },
-        index=num
+        index=num,
     )
     return df
 
@@ -481,11 +815,23 @@ def xsd_files_func(xsd_product_file):
     """
     ds = xr.Dataset()
 
-    ds['xsd_product'] = xarray.DataArray(xsd_product_file)
+    ds["xsd_product"] = xarray.DataArray(xsd_product_file)
     return ds
 
 
-def orbit(time, frame, pos_x, pos_y, pos_z, vel_x, vel_y, vel_z, orbit_pass, platform_heading, return_xarray=True):
+def orbit(
+    time,
+    frame,
+    pos_x,
+    pos_y,
+    pos_z,
+    vel_x,
+    vel_y,
+    vel_z,
+    orbit_pass,
+    platform_heading,
+    return_xarray=True,
+):
     """
     Parameters
     ----------
@@ -496,31 +842,29 @@ def orbit(time, frame, pos_x, pos_y, pos_z, vel_x, vel_y, vel_z, orbit_pass, pla
         with 'geometry' as position, 'time' as index, 'velocity' as velocity, and 'geocent' as crs.
     """
 
-    if (frame[0] != 'Earth Fixed') or (np.unique(frame).size != 1):
+    if (frame[0] != "Earth Fixed") or (np.unique(frame).size != 1):
         raise NotImplementedError('All orbit frames must be of type "Earth Fixed"')
     if return_xarray is False:
-        crs = pyproj.crs.CRS(proj='geocent', ellps='WGS84', datum='WGS84')
+        crs = pyproj.crs.CRS(proj="geocent", ellps="WGS84", datum="WGS84")
 
         res = gpd.GeoDataFrame(
-            {
-                'velocity': list(map(Point, zip(vel_x, vel_y, vel_z)))
-            },
+            {"velocity": list(map(Point, zip(vel_x, vel_y, vel_z)))},
             geometry=list(map(Point, zip(pos_x, pos_y, pos_z))),
             crs=crs,
-            index=time
+            index=time,
         )
     else:
         res = xr.Dataset()
-        res['velocity_x'] = xr.DataArray(vel_x, dims=['time'], coords={'time': time})
-        res['velocity_y'] = xr.DataArray(vel_y, dims=['time'], coords={'time': time})
-        res['velocity_z'] = xr.DataArray(vel_z, dims=['time'], coords={'time': time})
-        res['position_x'] = xr.DataArray(pos_x, dims=['time'], coords={'time': time})
-        res['position_y'] = xr.DataArray(pos_y, dims=['time'], coords={'time': time})
-        res['position_z'] = xr.DataArray(pos_z, dims=['time'], coords={'time': time})
+        res["velocity_x"] = xr.DataArray(vel_x, dims=["time"], coords={"time": time})
+        res["velocity_y"] = xr.DataArray(vel_y, dims=["time"], coords={"time": time})
+        res["velocity_z"] = xr.DataArray(vel_z, dims=["time"], coords={"time": time})
+        res["position_x"] = xr.DataArray(pos_x, dims=["time"], coords={"time": time})
+        res["position_y"] = xr.DataArray(pos_y, dims=["time"], coords={"time": time})
+        res["position_z"] = xr.DataArray(pos_z, dims=["time"], coords={"time": time})
     res.attrs = {
-        'orbit_pass': orbit_pass,
-        'platform_heading': platform_heading,
-        'frame': frame[0]
+        "orbit_pass": orbit_pass,
+        "platform_heading": platform_heading,
+        "frame": frame[0],
     }
     return res
 
@@ -546,19 +890,38 @@ def azimuth_fmrate(azimuthtime, t0, c0, c1, c2, polynomial):
         # old IPF annotation
         polynomial = np.stack([c0, c1, c2], axis=1)
     res = xr.Dataset()
-    res['t0'] = xr.DataArray(t0, dims=['azimuthTime'], coords={'azimuthTime': azimuthtime},
-                             attrs={'source': xpath_mappings['annotation']['fmrate_t0'][1]})
-    res['azimuthFmRatePolynomial'] = xr.DataArray([Polynomial(p) for p in polynomial],
-                                                  dims=['azimuthTime'],
-                                                  coords={'azimuthTime': azimuthtime},
-                                                  attrs={'source': xpath_mappings['annotation'][
-                                                      'fmrate_azimuthFmRatePolynomial'][1]})
+    res["t0"] = xr.DataArray(
+        t0,
+        dims=["azimuthTime"],
+        coords={"azimuthTime": azimuthtime},
+        attrs={"source": xpath_mappings["annotation"]["fmrate_t0"][1]},
+    )
+    res["azimuthFmRatePolynomial"] = xr.DataArray(
+        [Polynomial(p) for p in polynomial],
+        dims=["azimuthTime"],
+        coords={"azimuthTime": azimuthtime},
+        attrs={
+            "source": xpath_mappings["annotation"]["fmrate_azimuthFmRatePolynomial"][1]
+        },
+    )
     return res
 
 
-def image(product_type, line_time_range, line_size, sample_size, incidence_angle_mid_swath, azimuth_time_interval,
-          slant_range_time_image, azimuthPixelSpacing, rangePixelSpacing, swath_subswath, radar_frequency,
-          range_sampling_rate, azimuth_steering_rate):
+def image(
+    product_type,
+    line_time_range,
+    line_size,
+    sample_size,
+    incidence_angle_mid_swath,
+    azimuth_time_interval,
+    slant_range_time_image,
+    azimuthPixelSpacing,
+    rangePixelSpacing,
+    swath_subswath,
+    radar_frequency,
+    range_sampling_rate,
+    azimuth_steering_rate,
+):
     """
     Decode attribute describing the SAR image
     Parameters
@@ -580,94 +943,124 @@ def image(product_type, line_time_range, line_size, sample_size, incidence_angle
     -------
     xarray.Dataset
     """
-    if product_type == 'SLC' or product_type == 'SL2':
-        pixel_sample_m = rangePixelSpacing / np.sin(np.radians(incidence_angle_mid_swath))
+    if product_type == "SLC" or product_type == "SL2":
+        pixel_sample_m = rangePixelSpacing / np.sin(
+            np.radians(incidence_angle_mid_swath)
+        )
     else:
         pixel_sample_m = rangePixelSpacing
     tmp = {
-        'LineUtcTime': (line_time_range, 'line_time_range'),
-        'numberOfLines': (line_size, 'line_size'),
-        'numberOfSamples': (sample_size, 'sample_size'),
-        'azimuthPixelSpacing': (azimuthPixelSpacing, 'azimuthPixelSpacing'),
-        'slantRangePixelSpacing': (rangePixelSpacing, 'rangePixelSpacing'),
-        'groundRangePixelSpacing': (pixel_sample_m, 'rangePixelSpacing'),
-        'incidenceAngleMidSwath': (incidence_angle_mid_swath, 'incidence_angle_mid_swath'),
-        'azimuthTimeInterval': (azimuth_time_interval, 'azimuth_time_interval'),
-        'slantRangeTime': (slant_range_time_image, 'slant_range_time_image'),
-        'swath_subswath': (swath_subswath, 'swath_subswath'),
-        'radarFrequency': (radar_frequency, 'radar_frequency'),
-        'rangeSamplingRate': (range_sampling_rate, 'range_sampling_rate'),
-        'azimuthSteeringRate': (azimuth_steering_rate, 'azimuth_steering_rate'),
+        "LineUtcTime": (line_time_range, "line_time_range"),
+        "numberOfLines": (line_size, "line_size"),
+        "numberOfSamples": (sample_size, "sample_size"),
+        "azimuthPixelSpacing": (azimuthPixelSpacing, "azimuthPixelSpacing"),
+        "slantRangePixelSpacing": (rangePixelSpacing, "rangePixelSpacing"),
+        "groundRangePixelSpacing": (pixel_sample_m, "rangePixelSpacing"),
+        "incidenceAngleMidSwath": (
+            incidence_angle_mid_swath,
+            "incidence_angle_mid_swath",
+        ),
+        "azimuthTimeInterval": (azimuth_time_interval, "azimuth_time_interval"),
+        "slantRangeTime": (slant_range_time_image, "slant_range_time_image"),
+        "swath_subswath": (swath_subswath, "swath_subswath"),
+        "radarFrequency": (radar_frequency, "radar_frequency"),
+        "rangeSamplingRate": (range_sampling_rate, "range_sampling_rate"),
+        "azimuthSteeringRate": (azimuth_steering_rate, "azimuth_steering_rate"),
     }
     ds = xr.Dataset()
     for ke in tmp:
-        ds[ke] = xr.DataArray(tmp[ke][0], attrs={'source': xpath_mappings['annotation'][tmp[ke][1]][1]})
+        ds[ke] = xr.DataArray(
+            tmp[ke][0], attrs={"source": xpath_mappings["annotation"][tmp[ke][1]][1]}
+        )
     return ds
 
 
-def bursts(line_per_burst, sample_per_burst, burst_azimuthTime, burst_azimuthAnxTime, burst_sensingTime,
-           burst_byteOffset, burst_firstValidSample, burst_lastValidSample):
+def bursts(
+    line_per_burst,
+    sample_per_burst,
+    burst_azimuthTime,
+    burst_azimuthAnxTime,
+    burst_sensingTime,
+    burst_byteOffset,
+    burst_firstValidSample,
+    burst_lastValidSample,
+):
     """return burst as an xarray dataset"""
     da = xr.Dataset()
     if (line_per_burst == 0) and (sample_per_burst == 0):
         pass
     else:
-
         # convert to float, so we can use NaN as missing value, instead of -1
         burst_firstValidSample = burst_firstValidSample.astype(float)
         burst_lastValidSample = burst_lastValidSample.astype(float)
         burst_firstValidSample[burst_firstValidSample == -1] = np.nan
         burst_lastValidSample[burst_lastValidSample == -1] = np.nan
-        nbursts = len(burst_azimuthTime)
-        # valid_locations = np.empty((nbursts, 4), dtype='int32')
-        # for ibur in range(nbursts):
-        #     fvs = burst_firstValidSample[ibur, :]
-        #     lvs = burst_lastValidSample[ibur, :]
-        #     # valind = np.where((fvs != -1) | (lvs != -1))[0]
-        #     valind = np.where(np.isfinite(fvs) | np.isfinite(lvs))[0]
-        #     valloc = [ibur * line_per_burst + valind.min(), fvs[valind].min(),
-        #               ibur * line_per_burst + valind.max(), lvs[valind].max()]
-        #     valid_locations[ibur, :] = valloc
         da = xr.Dataset(
             {
-                'azimuthTime': ('burst', burst_azimuthTime),
-                'azimuthAnxTime': ('burst', burst_azimuthAnxTime),
-                'sensingTime': ('burst', burst_sensingTime),
-                'byteOffset': ('burst', burst_byteOffset),
-                'firstValidSample': (['burst', 'line'], burst_firstValidSample),
-                'lastValidSample': (['burst', 'line'], burst_lastValidSample),
+                "azimuthTime": ("burst", burst_azimuthTime),
+                "azimuthAnxTime": ("burst", burst_azimuthAnxTime),
+                "sensingTime": ("burst", burst_sensingTime),
+                "byteOffset": ("burst", burst_byteOffset),
+                "firstValidSample": (["burst", "line"], burst_firstValidSample),
+                "lastValidSample": (["burst", "line"], burst_lastValidSample),
                 # 'valid_location': xr.DataArray(dims=['burst', 'limits'], data=valid_locations,
                 #                                attrs={
                 #                                    'description': 'start line index, start sample index, stop line index, stop sample index'}),
             }
         )
-        da['azimuthTime'].attrs = {'source': xpath_mappings['annotation']['burst_azimuthTime'][1]}
-        da['azimuthAnxTime'].attrs = {'source': xpath_mappings['annotation']['burst_azimuthAnxTime'][1]}
-        da['sensingTime'].attrs = {'source': xpath_mappings['annotation']['burst_sensingTime'][1]}
-        da['byteOffset'].attrs = {'source': xpath_mappings['annotation']['burst_byteOffset'][1]}
-        da['firstValidSample'].attrs = {'source': xpath_mappings['annotation']['burst_firstValidSample'][1]}
-        da['lastValidSample'].attrs = {'source': xpath_mappings['annotation']['burst_lastValidSample'][1]}
+        da["azimuthTime"].attrs = {
+            "source": xpath_mappings["annotation"]["burst_azimuthTime"][1]
+        }
+        da["azimuthAnxTime"].attrs = {
+            "source": xpath_mappings["annotation"]["burst_azimuthAnxTime"][1]
+        }
+        da["sensingTime"].attrs = {
+            "source": xpath_mappings["annotation"]["burst_sensingTime"][1]
+        }
+        da["byteOffset"].attrs = {
+            "source": xpath_mappings["annotation"]["burst_byteOffset"][1]
+        }
+        da["firstValidSample"].attrs = {
+            "source": xpath_mappings["annotation"]["burst_firstValidSample"][1]
+        }
+        da["lastValidSample"].attrs = {
+            "source": xpath_mappings["annotation"]["burst_lastValidSample"][1]
+        }
         # da['valid_location'].attrs = {'source': xpath_mappings['annotation']['burst_firstValidSample'][1]+'\n'+xpath_mappings['annotation']['burst_lastValidSample'][1]}
-    da['linesPerBurst'] = xr.DataArray(line_per_burst,
-                                       attrs={'source': xpath_mappings['annotation']['linesPerBurst'][1]})
-    da['samplesPerBurst'] = xr.DataArray(sample_per_burst,
-                                         attrs={'source': xpath_mappings['annotation']['samplesPerBurst'][1]})
+    da["linesPerBurst"] = xr.DataArray(
+        line_per_burst,
+        attrs={"source": xpath_mappings["annotation"]["linesPerBurst"][1]},
+    )
+    da["samplesPerBurst"] = xr.DataArray(
+        sample_per_burst,
+        attrs={"source": xpath_mappings["annotation"]["samplesPerBurst"][1]},
+    )
     return da
 
 
 def bursts_grd(line_per_burst, sample_per_burst):
     """return burst as an xarray dataset"""
-    da = xr.Dataset({'azimuthTime': ('burst', [])})
+    da = xr.Dataset({"azimuthTime": ("burst", [])})
 
-    da['linesPerBurst'] = xr.DataArray(line_per_burst)
-    da['samplesPerBurst'] = xr.DataArray(sample_per_burst)
+    da["linesPerBurst"] = xr.DataArray(line_per_burst)
+    da["samplesPerBurst"] = xr.DataArray(sample_per_burst)
     return da
 
 
-def doppler_centroid_estimates(nb_dcestimate,
-                               nb_fineDce, dc_azimuth_time, dc_t0, dc_geoDcPoly,
-                               dc_dataDcPoly, dc_rmserr, dc_rmserrAboveThres, dc_azstarttime,
-                               dc_azstoptime, dc_slantRangeTime, dc_frequency):
+def doppler_centroid_estimates(
+    nb_dcestimate,
+    nb_fineDce,
+    dc_azimuth_time,
+    dc_t0,
+    dc_geoDcPoly,
+    dc_dataDcPoly,
+    dc_rmserr,
+    dc_rmserrAboveThres,
+    dc_azstarttime,
+    dc_azstoptime,
+    dc_slantRangeTime,
+    dc_frequency,
+):
     """
     decoding Doppler Centroid estimates information from xml annotation files
     Parameters
@@ -692,39 +1085,66 @@ def doppler_centroid_estimates(nb_dcestimate,
 
     """
     ds = xr.Dataset()
-    ds['t0'] = xr.DataArray(dc_t0.astype(float), dims=['azimuthTime'],
-                            attrs={'source': xpath_mappings['annotation']['dc_t0'][1]},
-                            coords={'azimuthTime': dc_azimuth_time})
-    ds['geometryDcPolynomial'] = xr.DataArray([Polynomial(p) for p in dc_geoDcPoly], dims=['azimuthTime'],
-                                              attrs={'source': xpath_mappings['annotation']['dc_geoDcPoly'][1]},
-                                              coords={'azimuthTime': dc_azimuth_time})
-    ds['dataDcPolynomial'] = xr.DataArray([Polynomial(p) for p in dc_dataDcPoly], dims=['azimuthTime'],
-                                          attrs={'source': xpath_mappings['annotation']['dc_dataDcPoly'][1]},
-                                          coords={'azimuthTime': dc_azimuth_time})
+    ds["t0"] = xr.DataArray(
+        dc_t0.astype(float),
+        dims=["azimuthTime"],
+        attrs={"source": xpath_mappings["annotation"]["dc_t0"][1]},
+        coords={"azimuthTime": dc_azimuth_time},
+    )
+    ds["geometryDcPolynomial"] = xr.DataArray(
+        [Polynomial(p) for p in dc_geoDcPoly],
+        dims=["azimuthTime"],
+        attrs={"source": xpath_mappings["annotation"]["dc_geoDcPoly"][1]},
+        coords={"azimuthTime": dc_azimuth_time},
+    )
+    ds["dataDcPolynomial"] = xr.DataArray(
+        [Polynomial(p) for p in dc_dataDcPoly],
+        dims=["azimuthTime"],
+        attrs={"source": xpath_mappings["annotation"]["dc_dataDcPoly"][1]},
+        coords={"azimuthTime": dc_azimuth_time},
+    )
     dims = (nb_dcestimate, nb_fineDce)
 
-    ds['azimuthTime'].attrs = {'source': xpath_mappings['annotation']['dc_azimuth_time'][1]}
-    ds['fineDceAzimuthStartTime'] = xr.DataArray(dc_azstarttime, dims=['azimuthTime'],
-                                                 attrs={'source': xpath_mappings['annotation']['dc_azstarttime'][1]},
-                                                 coords={'azimuthTime': dc_azimuth_time})
-    ds['fineDceAzimuthStopTime'] = xr.DataArray(dc_azstoptime, dims=['azimuthTime'],
-                                                attrs={'source': xpath_mappings['annotation']['dc_azstoptime'][1]},
-                                                coords={'azimuthTime': dc_azimuth_time})
-    ds['dataDcRmsError'] = xr.DataArray(dc_rmserr.astype(float), dims=['azimuthTime'],
-                                        attrs={'source': xpath_mappings['annotation']['dc_rmserr'][1]},
-                                        coords={'azimuthTime': dc_azimuth_time})
-    ds['slantRangeTime'] = xr.DataArray(dc_slantRangeTime.reshape(dims), dims=['azimuthTime', 'nb_fine_dce'],
-                                        attrs={'source': xpath_mappings['annotation']['dc_slantRangeTime'][1]},
-                                        coords={'azimuthTime': dc_azimuth_time, 'nb_fine_dce': np.arange(nb_fineDce)})
-    ds['frequency'] = xr.DataArray(dc_frequency.reshape(dims), dims=['azimuthTime', 'nb_fine_dce'],
-                                   attrs={'source': xpath_mappings['annotation']['dc_frequency'][1]},
-                                   coords={'azimuthTime': dc_azimuth_time, 'nb_fine_dce': np.arange(nb_fineDce)})
-    ds['dataDcRmsErrorAboveThreshold'] = xr.DataArray(dc_rmserrAboveThres, dims=['azimuthTime'],
-                                                      attrs={
-                                                          'source': xpath_mappings['annotation']['dc_rmserrAboveThres'][
-                                                              1]},
-                                                      coords={'azimuthTime': dc_azimuth_time})
-    
+    ds["azimuthTime"].attrs = {
+        "source": xpath_mappings["annotation"]["dc_azimuth_time"][1]
+    }
+    ds["fineDceAzimuthStartTime"] = xr.DataArray(
+        dc_azstarttime,
+        dims=["azimuthTime"],
+        attrs={"source": xpath_mappings["annotation"]["dc_azstarttime"][1]},
+        coords={"azimuthTime": dc_azimuth_time},
+    )
+    ds["fineDceAzimuthStopTime"] = xr.DataArray(
+        dc_azstoptime,
+        dims=["azimuthTime"],
+        attrs={"source": xpath_mappings["annotation"]["dc_azstoptime"][1]},
+        coords={"azimuthTime": dc_azimuth_time},
+    )
+    ds["dataDcRmsError"] = xr.DataArray(
+        dc_rmserr.astype(float),
+        dims=["azimuthTime"],
+        attrs={"source": xpath_mappings["annotation"]["dc_rmserr"][1]},
+        coords={"azimuthTime": dc_azimuth_time},
+    )
+    ds["slantRangeTime"] = xr.DataArray(
+        dc_slantRangeTime.reshape(dims),
+        dims=["azimuthTime", "nb_fine_dce"],
+        attrs={"source": xpath_mappings["annotation"]["dc_slantRangeTime"][1]},
+        coords={"azimuthTime": dc_azimuth_time, "nb_fine_dce": np.arange(nb_fineDce)},
+    )
+    ds["frequency"] = xr.DataArray(
+        dc_frequency.reshape(dims),
+        dims=["azimuthTime", "nb_fine_dce"],
+        attrs={"source": xpath_mappings["annotation"]["dc_frequency"][1]},
+        coords={"azimuthTime": dc_azimuth_time, "nb_fine_dce": np.arange(nb_fineDce)},
+    )
+    ds["dataDcRmsErrorAboveThreshold"] = xr.DataArray(
+        dc_rmserrAboveThres,
+        dims=["azimuthTime"],
+        attrs={"source": xpath_mappings["annotation"]["dc_rmserrAboveThres"][1]},
+        coords={"azimuthTime": dc_azimuth_time},
+    )
+
     return ds
 
 
@@ -745,9 +1165,21 @@ def geolocation_grid(line, sample, values):
     """
     shape = (line.size, sample.size)
     values = np.reshape(values, shape)
-    return xr.DataArray(values, dims=['line', 'sample'], coords={'line': line, 'sample': sample})
+    return xr.DataArray(
+        values, dims=["line", "sample"], coords={"line": line, "sample": sample}
+    )
+
 
-def antenna_pattern(ap_swath,ap_roll,ap_azimuthTime,ap_terrainHeight,ap_elevationAngle,ap_elevationPattern,ap_incidenceAngle,ap_slantRangeTime):
+def antenna_pattern(
+    ap_swath,
+    ap_roll,
+    ap_azimuthTime,
+    ap_terrainHeight,
+    ap_elevationAngle,
+    ap_elevationPattern,
+    ap_incidenceAngle,
+    ap_slantRangeTime,
+):
     """
 
     Parameters
@@ -760,14 +1192,16 @@ def antenna_pattern(ap_swath,ap_roll,ap_azimuthTime,ap_terrainHeight,ap_elevatio
     ap_elevationPattern
     ap_incidenceAngle
     ap_slantRangeTime
-    
+
     Returns
     -------
     xarray.DataSet
-    """   
+    """
+
     # Fonction to convert string 'EW1' ou 'IW3' as int
     def convert_to_int(swath):
         return int(swath[-1])
+
     vectorized_convert = np.vectorize(convert_to_int)
     swathNumber = vectorized_convert(ap_swath)
 
@@ -776,79 +1210,112 @@ def antenna_pattern(ap_swath,ap_roll,ap_azimuthTime,ap_terrainHeight,ap_elevatio
 
     include_roll = len(ap_roll) != 0
 
-    # Create 2Ds arrays 
-    elevAngle2d = np.full((len(ap_elevationAngle), dim_slantRangeTime), np.nan)  
-    gain2d = np.full((len(ap_elevationPattern), dim_slantRangeTime), np.nan)  
+    # Create 2Ds arrays
+    elevAngle2d = np.full((len(ap_elevationAngle), dim_slantRangeTime), np.nan)
+    gain2d = np.full((len(ap_elevationPattern), dim_slantRangeTime), np.nan)
     slantRangeTime2d = np.full((len(ap_slantRangeTime), dim_slantRangeTime), np.nan)
     incAngle2d = np.full((len(ap_incidenceAngle), dim_slantRangeTime), np.nan)
 
-    
     for i in range(len(ap_elevationAngle)):
-        elevAngle2d[i, :ap_elevationAngle[i].shape[0]] = ap_elevationAngle[i]
+        elevAngle2d[i, : ap_elevationAngle[i].shape[0]] = ap_elevationAngle[i]
 
-        if ap_elevationAngle[i].shape[0] != ap_elevationPattern[i].shape[0] :
-            gain2d[i, :ap_elevationAngle[i].shape[0]] = np.sqrt(ap_elevationPattern[i][::2]**2+ap_elevationPattern[i][1::2]**2)
+        if ap_elevationAngle[i].shape[0] != ap_elevationPattern[i].shape[0]:
+            gain2d[i, : ap_elevationAngle[i].shape[0]] = np.sqrt(
+                ap_elevationPattern[i][::2] ** 2 + ap_elevationPattern[i][1::2] ** 2
+            )
         else:
-            #logging.warn("antenna pattern is not given in complex values. You probably use an old file\n" + e) 
-            gain2d[i, :ap_elevationAngle[i].shape[0]] = ap_elevationPattern[i]
-
-        slantRangeTime2d[i, :ap_slantRangeTime[i].shape[0]] = ap_slantRangeTime[i]
-        incAngle2d[i, :ap_incidenceAngle[i].shape[0]] = ap_incidenceAngle[i]
+            # logging.warn("antenna pattern is not given in complex values. You probably use an old file\n" + e)
+            gain2d[i, : ap_elevationAngle[i].shape[0]] = ap_elevationPattern[i]
 
+        slantRangeTime2d[i, : ap_slantRangeTime[i].shape[0]] = ap_slantRangeTime[i]
+        incAngle2d[i, : ap_incidenceAngle[i].shape[0]] = ap_incidenceAngle[i]
 
     swath_number_2d = np.full((len(np.unique(swathNumber)), dim_azimuthTime), np.nan)
     roll_angle_2d = np.full((len(np.unique(swathNumber)), dim_azimuthTime), np.nan)
     azimuthTime_2d = np.full((len(np.unique(swathNumber)), dim_azimuthTime), np.nan)
     terrainHeight_2d = np.full((len(np.unique(swathNumber)), dim_azimuthTime), np.nan)
 
-    slantRangeTime_2d = np.full((len(np.unique(swathNumber)), dim_slantRangeTime), np.nan)
+    slantRangeTime_2d = np.full(
+        (len(np.unique(swathNumber)), dim_slantRangeTime), np.nan
+    )
 
-    elevationAngle_3d = np.full((len(np.unique(swathNumber)), dim_azimuthTime, dim_slantRangeTime), np.nan)
-    incidenceAngle_3d = np.full((len(np.unique(swathNumber)), dim_azimuthTime, dim_slantRangeTime), np.nan)
-    gain3d = np.full((len(np.unique(swathNumber)), dim_azimuthTime, dim_slantRangeTime), np.nan)
+    elevationAngle_3d = np.full(
+        (len(np.unique(swathNumber)), dim_azimuthTime, dim_slantRangeTime), np.nan
+    )
+    incidenceAngle_3d = np.full(
+        (len(np.unique(swathNumber)), dim_azimuthTime, dim_slantRangeTime), np.nan
+    )
+    gain3d = np.full(
+        (len(np.unique(swathNumber)), dim_azimuthTime, dim_slantRangeTime), np.nan
+    )
 
-    
     for i, swath_number in enumerate(np.unique(swathNumber)):
         length_dim0 = len(ap_azimuthTime[swathNumber == swath_number])
         swath_number_2d[i, :length_dim0] = swathNumber[swathNumber == swath_number]
         azimuthTime_2d[i, :length_dim0] = ap_azimuthTime[swathNumber == swath_number]
-        terrainHeight_2d[i, :length_dim0] = ap_terrainHeight[swathNumber == swath_number]
+        terrainHeight_2d[i, :length_dim0] = ap_terrainHeight[
+            swathNumber == swath_number
+        ]
         slantRangeTime_2d[i, :] = slantRangeTime2d[i, :]
 
         if include_roll:
-            roll_angle_2d[i, :length_dim0] = ap_roll[swathNumber == swath_number]        
+            roll_angle_2d[i, :length_dim0] = ap_roll[swathNumber == swath_number]
 
         for j in range(0, dim_slantRangeTime):
-            elevationAngle_3d[i,:length_dim0,j]=elevAngle2d[swathNumber == swath_number,j]
-            incidenceAngle_3d[i,:length_dim0,j]=incAngle2d[swathNumber == swath_number,j]
-            gain3d[i,:length_dim0,j]=gain2d[swathNumber == swath_number,j]
-        
-    azimuthTime_2d = azimuthTime_2d.astype('datetime64[ns]')
-
-    # return a Dataset
-    ds = xr.Dataset({
-        'slantRangeTime' : (['swath_nb', 'dim_slantRangeTime'], slantRangeTime_2d),
-        'swath' : (['swath_nb', 'dim_azimuthTime'], swath_number_2d),
-        'roll' : (['swath_nb', 'dim_azimuthTime'], roll_angle_2d),
-        'azimuthTime' : (['swath_nb', 'dim_azimuthTime'], azimuthTime_2d),
-        'terrainHeight' : (['swath_nb', 'dim_azimuthTime'], terrainHeight_2d),
-        'elevationAngle' : (['swath_nb', 'dim_azimuthTime','dim_slantRangeTime'],elevationAngle_3d),
-        'incidenceAngle' : (['swath_nb', 'dim_azimuthTime','dim_slantRangeTime'],incidenceAngle_3d),
-        'gain' : (['swath_nb', 'dim_azimuthTime','dim_slantRangeTime'],gain3d),
-        },    
-        coords={'swath_nb': np.unique(swathNumber)}
+            elevationAngle_3d[i, :length_dim0, j] = elevAngle2d[
+                swathNumber == swath_number, j
+            ]
+            incidenceAngle_3d[i, :length_dim0, j] = incAngle2d[
+                swathNumber == swath_number, j
+            ]
+            gain3d[i, :length_dim0, j] = gain2d[swathNumber == swath_number, j]
+
+    azimuthTime_2d = azimuthTime_2d.astype("datetime64[ns]")
+
+    # return a Dataset
+    ds = xr.Dataset(
+        {
+            "slantRangeTime": (["swath_nb", "dim_slantRangeTime"], slantRangeTime_2d),
+            "swath": (["swath_nb", "dim_azimuthTime"], swath_number_2d),
+            "roll": (["swath_nb", "dim_azimuthTime"], roll_angle_2d),
+            "azimuthTime": (["swath_nb", "dim_azimuthTime"], azimuthTime_2d),
+            "terrainHeight": (["swath_nb", "dim_azimuthTime"], terrainHeight_2d),
+            "elevationAngle": (
+                ["swath_nb", "dim_azimuthTime", "dim_slantRangeTime"],
+                elevationAngle_3d,
+            ),
+            "incidenceAngle": (
+                ["swath_nb", "dim_azimuthTime", "dim_slantRangeTime"],
+                incidenceAngle_3d,
+            ),
+            "gain": (["swath_nb", "dim_azimuthTime", "dim_slantRangeTime"], gain3d),
+        },
+        coords={"swath_nb": np.unique(swathNumber)},
     )
     ds.attrs["dim_azimuthTime"] = "max dimension of azimuthTime for a swath"
     ds.attrs["dim_slantRangeTime"] = "max dimension of slantRangeTime for a swath"
-    ds.attrs["comment"] = "The antenna pattern data set record contains a list of vectors of the \
+    ds.attrs[
+        "comment"
+    ] = "The antenna pattern data set record contains a list of vectors of the \
                            antenna elevation pattern values that have been updated along track\
                            and used to correct the radiometry during image processing."
-    ds.attrs["example"] = "for example, if swath Y is smaller than swath X, user has to remove nan to get the dims of the swath"
+    ds.attrs[
+        "example"
+    ] = "for example, if swath Y is smaller than swath X, user has to remove nan to get the dims of the swath"
     ds.attrs["source"] = "Sentinel-1 Product Specification"
 
-    return ds 
+    return ds
+
 
-def swath_merging(sm_swath,sm_nbPerSwat,sm_azimuthTime,sm_firstAzimuthLine,sm_lastAzimuthLine,sm_firstRangeSample,sm_lastRangeSample):
+def swath_merging(
+    sm_swath,
+    sm_nbPerSwat,
+    sm_azimuthTime,
+    sm_firstAzimuthLine,
+    sm_lastAzimuthLine,
+    sm_firstRangeSample,
+    sm_lastRangeSample,
+):
     """
 
     Parameters
@@ -860,28 +1327,33 @@ def swath_merging(sm_swath,sm_nbPerSwat,sm_azimuthTime,sm_firstAzimuthLine,sm_la
     sm_lastAzimuthLine
     sm_firstRangeSample
     sm_lastRangeSample
-    
+
     Returns
     -------
     xarray.DataSet
-    """   
+    """
+
     # Fonction to convert string 'EW1' ou 'IW3' as int
     def convert_to_int(swath):
         return int(swath[-1])
+
     vectorized_convert = np.vectorize(convert_to_int)
     repeated_swaths = np.repeat(sm_swath, sm_nbPerSwat)
     swathNumber = vectorized_convert(repeated_swaths)
-    
-    ds = xr.Dataset({
-        'swaths' : (['dim_azimuthTime'], swathNumber),
-        'azimuthTime' : (['dim_azimuthTime'], sm_azimuthTime),
-        'firstAzimuthLine' : (['dim_azimuthTime'], sm_firstAzimuthLine),
-        'lastAzimuthLine' : (['dim_azimuthTime'], sm_lastAzimuthLine),
-        'firstRangeSample' : (['dim_azimuthTime'], sm_firstRangeSample),
-        'lastRangeSample' : (['dim_azimuthTime'], sm_lastRangeSample),
-        },    
+
+    ds = xr.Dataset(
+        {
+            "swaths": (["dim_azimuthTime"], swathNumber),
+            "azimuthTime": (["dim_azimuthTime"], sm_azimuthTime),
+            "firstAzimuthLine": (["dim_azimuthTime"], sm_firstAzimuthLine),
+            "lastAzimuthLine": (["dim_azimuthTime"], sm_lastAzimuthLine),
+            "firstRangeSample": (["dim_azimuthTime"], sm_firstRangeSample),
+            "lastRangeSample": (["dim_azimuthTime"], sm_lastRangeSample),
+        },
     )
-    ds.attrs["comment"] = "The swath merging data set record contains information about how \
+    ds.attrs[
+        "comment"
+    ] = "The swath merging data set record contains information about how \
                            multiple swaths were stitched together to form one large contiguous \
                            swath. This data set record only applies to IW and EW GRD \
                            products"
@@ -889,159 +1361,225 @@ def swath_merging(sm_swath,sm_nbPerSwat,sm_azimuthTime,sm_firstAzimuthLine,sm_la
 
     return ds
 
+
 # dict of compounds variables.
 # compounds variables are variables composed of several variables.
 # the key is the variable name, and the value is a python structure,
 # where leaves are jmespath in xpath_mappings
 compounds_vars = {
-    'safe_attributes_slcgrd': {
-        'ipf_version': 'manifest.ipf_version',
-        'swath_type': 'manifest.swath_type',
-        'polarizations': 'manifest.polarizations',
-        'product_type': 'manifest.product_type',
-        'mission': 'manifest.mission',
-        'satellite': 'manifest.satellite',
-        'start_date': 'manifest.start_date',
-        'stop_date': 'manifest.stop_date',
-        'footprints': 'manifest.footprints',
-        'aux_cal': 'manifest.aux_cal',
-        'aux_pp1': 'manifest.aux_pp1',
-        'aux_ins': 'manifest.aux_ins',
-        'icid' : 'manifest.instrument_configuration_id'
-    },
-    'safe_attributes_sl2': {
-        'ipf_version': 'manifest.ipf_version',
-        'swath_type': 'manifest.swath_type',
-        'polarizations': 'manifest.polarizations',
-        'product_type': 'manifest.product_type',
-        'mission': 'manifest.mission',
-        'satellite': 'manifest.satellite',
-        'start_date': 'manifest.start_date',
-        'stop_date': 'manifest.stop_date',
-        'footprints': 'manifest.footprints',
-        'aux_cal_sl2': 'manifest.aux_cal_sl2'
+    "safe_attributes_slcgrd": {
+        "ipf_version": "manifest.ipf_version",
+        "swath_type": "manifest.swath_type",
+        "polarizations": "manifest.polarizations",
+        "product_type": "manifest.product_type",
+        "mission": "manifest.mission",
+        "satellite": "manifest.satellite",
+        "start_date": "manifest.start_date",
+        "stop_date": "manifest.stop_date",
+        "footprints": "manifest.footprints",
+        "aux_cal": "manifest.aux_cal",
+        "aux_pp1": "manifest.aux_pp1",
+        "aux_ins": "manifest.aux_ins",
+        "icid": "manifest.instrument_configuration_id",
     },
-    'files': {
-        'func': df_files,
-        'args': (
-            'manifest.annotation_files', 'manifest.measurement_files', 'manifest.noise_files',
-            'manifest.calibration_files')
+    "safe_attributes_sl2": {
+        "ipf_version": "manifest.ipf_version",
+        "swath_type": "manifest.swath_type",
+        "polarizations": "manifest.polarizations",
+        "product_type": "manifest.product_type",
+        "mission": "manifest.mission",
+        "satellite": "manifest.satellite",
+        "start_date": "manifest.start_date",
+        "stop_date": "manifest.stop_date",
+        "footprints": "manifest.footprints",
+        "aux_cal_sl2": "manifest.aux_cal_sl2",
     },
-    'xsd_files': {
-        'func': xsd_files_func,
-        'args': (
-            'manifest.xsd_product_file',
-        )
+    "files": {
+        "func": df_files,
+        "args": (
+            "manifest.annotation_files",
+            "manifest.measurement_files",
+            "manifest.noise_files",
+            "manifest.calibration_files",
+        ),
     },
-    'luts_raw': {
-        'func': signal_lut_raw,
-        'args': ('calibration.line', 'calibration.sample', 'calibration.sigma0_lut', 'calibration.gamma0_lut',
-                 'calibration.azimuthTime')
+    "xsd_files": {"func": xsd_files_func, "args": ("manifest.xsd_product_file",)},
+    "luts_raw": {
+        "func": signal_lut_raw,
+        "args": (
+            "calibration.line",
+            "calibration.sample",
+            "calibration.sigma0_lut",
+            "calibration.gamma0_lut",
+            "calibration.azimuthTime",
+        ),
     },
-    'noise_lut_range_raw': {
-        'func': noise_lut_range_raw,
-        'args': ('noise.range.line', 'noise.range.sample', 'noise.range.noiseLut', 'noise.range.azimuthTime')
+    "noise_lut_range_raw": {
+        "func": noise_lut_range_raw,
+        "args": (
+            "noise.range.line",
+            "noise.range.sample",
+            "noise.range.noiseLut",
+            "noise.range.azimuthTime",
+        ),
     },
-    'noise_lut_azi_raw_grd': {
-        'func': noise_lut_azi_raw_grd,
-        'args': (
-            'noise.azi.line', 'noise.azi.line_start', 'noise.azi.line_stop',
-            'noise.azi.sample_start',
-            'noise.azi.sample_stop', 'noise.azi.noiseLut',
-            'noise.azi.swath')
+    "noise_lut_azi_raw_grd": {
+        "func": noise_lut_azi_raw_grd,
+        "args": (
+            "noise.azi.line",
+            "noise.azi.line_start",
+            "noise.azi.line_stop",
+            "noise.azi.sample_start",
+            "noise.azi.sample_stop",
+            "noise.azi.noiseLut",
+            "noise.azi.swath",
+        ),
     },
-    'noise_lut_azi_raw_slc': {
-        'func': noise_lut_azi_raw_slc,
-        'args': (
-            'noise.azi.line', 'noise.azi.line_start', 'noise.azi.line_stop',
-            'noise.azi.sample_start',
-            'noise.azi.sample_stop', 'noise.azi.noiseLut',
-            'noise.azi.swath')
+    "noise_lut_azi_raw_slc": {
+        "func": noise_lut_azi_raw_slc,
+        "args": (
+            "noise.azi.line",
+            "noise.azi.line_start",
+            "noise.azi.line_stop",
+            "noise.azi.sample_start",
+            "noise.azi.sample_stop",
+            "noise.azi.noiseLut",
+            "noise.azi.swath",
+        ),
     },
-    'denoised': ('annotation.pol', 'annotation.denoised'),
-    'incidenceAngle': {
-        'func': geolocation_grid,
-        'args': ('annotation.line', 'annotation.sample', 'annotation.incidenceAngle')
+    "denoised": ("annotation.pol", "annotation.denoised"),
+    "incidenceAngle": {
+        "func": geolocation_grid,
+        "args": ("annotation.line", "annotation.sample", "annotation.incidenceAngle"),
     },
-    'elevationAngle': {
-        'func': geolocation_grid,
-        'args': ('annotation.line', 'annotation.sample', 'annotation.elevationAngle')
+    "elevationAngle": {
+        "func": geolocation_grid,
+        "args": ("annotation.line", "annotation.sample", "annotation.elevationAngle"),
     },
-    'longitude': {
-        'func': geolocation_grid,
-        'args': ('annotation.line', 'annotation.sample', 'annotation.longitude')
+    "longitude": {
+        "func": geolocation_grid,
+        "args": ("annotation.line", "annotation.sample", "annotation.longitude"),
     },
-    'latitude': {
-        'func': geolocation_grid,
-        'args': ('annotation.line', 'annotation.sample', 'annotation.latitude')
+    "latitude": {
+        "func": geolocation_grid,
+        "args": ("annotation.line", "annotation.sample", "annotation.latitude"),
     },
-    'height': {
-        'func': geolocation_grid,
-        'args': ('annotation.line', 'annotation.sample', 'annotation.height')
+    "height": {
+        "func": geolocation_grid,
+        "args": ("annotation.line", "annotation.sample", "annotation.height"),
     },
-    'azimuthTime': {
-        'func': geolocation_grid,
-        'args': ('annotation.line', 'annotation.sample', 'annotation.azimuthTime')
+    "azimuthTime": {
+        "func": geolocation_grid,
+        "args": ("annotation.line", "annotation.sample", "annotation.azimuthTime"),
     },
-    'slantRangeTime': {
-        'func': geolocation_grid,
-        'args': ('annotation.line', 'annotation.sample', 'annotation.slantRangeTime')
+    "slantRangeTime": {
+        "func": geolocation_grid,
+        "args": ("annotation.line", "annotation.sample", "annotation.slantRangeTime"),
     },
-    'bursts': {
-        'func': bursts,
-        'args': ('annotation.linesPerBurst', 'annotation.samplesPerBurst', 'annotation.burst_azimuthTime',
-                 'annotation.burst_azimuthAnxTime', 'annotation.burst_sensingTime', 'annotation.burst_byteOffset',
-                 'annotation.burst_firstValidSample', 'annotation.burst_lastValidSample')
+    "bursts": {
+        "func": bursts,
+        "args": (
+            "annotation.linesPerBurst",
+            "annotation.samplesPerBurst",
+            "annotation.burst_azimuthTime",
+            "annotation.burst_azimuthAnxTime",
+            "annotation.burst_sensingTime",
+            "annotation.burst_byteOffset",
+            "annotation.burst_firstValidSample",
+            "annotation.burst_lastValidSample",
+        ),
     },
-    'bursts_grd': {
-        'func': bursts_grd,
-        'args': ('annotation.linesPerBurst', 'annotation.samplesPerBurst',)
+    "bursts_grd": {
+        "func": bursts_grd,
+        "args": (
+            "annotation.linesPerBurst",
+            "annotation.samplesPerBurst",
+        ),
     },
-
-    'orbit': {
-        'func': orbit,
-        'args': ('annotation.orbit_time', 'annotation.orbit_frame',
-                 'annotation.orbit_pos_x', 'annotation.orbit_pos_y', 'annotation.orbit_pos_z',
-                 'annotation.orbit_vel_x', 'annotation.orbit_vel_y', 'annotation.orbit_vel_z',
-                 'annotation.pass', 'annotation.platform_heading')
+    "orbit": {
+        "func": orbit,
+        "args": (
+            "annotation.orbit_time",
+            "annotation.orbit_frame",
+            "annotation.orbit_pos_x",
+            "annotation.orbit_pos_y",
+            "annotation.orbit_pos_z",
+            "annotation.orbit_vel_x",
+            "annotation.orbit_vel_y",
+            "annotation.orbit_vel_z",
+            "annotation.pass",
+            "annotation.platform_heading",
+        ),
     },
-    'image': {
-        'func': image,
-        'args': (
-            'annotation.product_type', 'annotation.line_time_range', 'annotation.line_size', 'annotation.sample_size',
-            'annotation.incidence_angle_mid_swath', 'annotation.azimuth_time_interval',
-            'annotation.slant_range_time_image', 'annotation.azimuthPixelSpacing', 'annotation.rangePixelSpacing',
-            'annotation.swath_subswath', 'annotation.radar_frequency', 'annotation.range_sampling_rate',
-            'annotation.azimuth_steering_rate')
+    "image": {
+        "func": image,
+        "args": (
+            "annotation.product_type",
+            "annotation.line_time_range",
+            "annotation.line_size",
+            "annotation.sample_size",
+            "annotation.incidence_angle_mid_swath",
+            "annotation.azimuth_time_interval",
+            "annotation.slant_range_time_image",
+            "annotation.azimuthPixelSpacing",
+            "annotation.rangePixelSpacing",
+            "annotation.swath_subswath",
+            "annotation.radar_frequency",
+            "annotation.range_sampling_rate",
+            "annotation.azimuth_steering_rate",
+        ),
     },
-    'azimuth_fmrate': {
-        'func': azimuth_fmrate,
-        'args': (
-            'annotation.fmrate_azimuthtime', 'annotation.fmrate_t0',
-            'annotation.fmrate_c0', 'annotation.fmrate_c1', 'annotation.fmrate_c2',
-            'annotation.fmrate_azimuthFmRatePolynomial')
+    "azimuth_fmrate": {
+        "func": azimuth_fmrate,
+        "args": (
+            "annotation.fmrate_azimuthtime",
+            "annotation.fmrate_t0",
+            "annotation.fmrate_c0",
+            "annotation.fmrate_c1",
+            "annotation.fmrate_c2",
+            "annotation.fmrate_azimuthFmRatePolynomial",
+        ),
     },
-    'doppler_estimate': {
-        'func': doppler_centroid_estimates,
-        'args': ('annotation.nb_dcestimate',
-                 'annotation.nb_fineDce', 'annotation.dc_azimuth_time', 'annotation.dc_t0', 'annotation.dc_geoDcPoly',
-                 'annotation.dc_dataDcPoly', 'annotation.dc_rmserr', 'annotation.dc_rmserrAboveThres',
-                 'annotation.dc_azstarttime',
-                 'annotation.dc_azstoptime', 'annotation.dc_slantRangeTime', 'annotation.dc_frequency'
-
-                 ),
+    "doppler_estimate": {
+        "func": doppler_centroid_estimates,
+        "args": (
+            "annotation.nb_dcestimate",
+            "annotation.nb_fineDce",
+            "annotation.dc_azimuth_time",
+            "annotation.dc_t0",
+            "annotation.dc_geoDcPoly",
+            "annotation.dc_dataDcPoly",
+            "annotation.dc_rmserr",
+            "annotation.dc_rmserrAboveThres",
+            "annotation.dc_azstarttime",
+            "annotation.dc_azstoptime",
+            "annotation.dc_slantRangeTime",
+            "annotation.dc_frequency",
+        ),
     },
-    'antenna_pattern': {
-        'func': antenna_pattern,
-        'args': ('annotation.ap_swath','annotation.ap_roll','annotation.ap_azimuthTime','annotation.ap_terrainHeight',
-                 'annotation.ap_elevationAngle','annotation.ap_elevationPattern','annotation.ap_incidenceAngle',
-                 'annotation.ap_slantRangeTime'
-        )
+    "antenna_pattern": {
+        "func": antenna_pattern,
+        "args": (
+            "annotation.ap_swath",
+            "annotation.ap_roll",
+            "annotation.ap_azimuthTime",
+            "annotation.ap_terrainHeight",
+            "annotation.ap_elevationAngle",
+            "annotation.ap_elevationPattern",
+            "annotation.ap_incidenceAngle",
+            "annotation.ap_slantRangeTime",
+        ),
     },
-    'swath_merging': {
-        'func': swath_merging,
-        'args': ('annotation.sm_swath','annotation.sm_nbPerSwat','annotation.sm_azimuthTime','annotation.sm_firstAzimuthLine',
-                 'annotation.sm_lastAzimuthLine','annotation.sm_firstRangeSample','annotation.sm_lastRangeSample'
-        )
+    "swath_merging": {
+        "func": swath_merging,
+        "args": (
+            "annotation.sm_swath",
+            "annotation.sm_nbPerSwat",
+            "annotation.sm_azimuthTime",
+            "annotation.sm_firstAzimuthLine",
+            "annotation.sm_lastAzimuthLine",
+            "annotation.sm_firstRangeSample",
+            "annotation.sm_lastRangeSample",
+        ),
     },
 }


=====================================
safe_s1/xml_parser.py
=====================================
@@ -1,12 +1,13 @@
-from lxml import objectify
-import jmespath
 import logging
-from collections.abc import Iterable
 import re
-import yaml
+from collections.abc import Iterable
 from io import BytesIO
 
-logger = logging.getLogger('xsar.xml_parser')
+import jmespath
+import yaml
+from lxml import objectify
+
+logger = logging.getLogger("xsar.xml_parser")
 logger.addHandler(logging.NullHandler())
 
 
@@ -41,7 +42,7 @@ class XmlParser:
         self._mapper = mapper
 
     def __del__(self):
-        logger.debug('__del__ XmlParser')
+        logger.debug("__del__ XmlParser")
 
     def getroot(self, xml_file):
         """return xml root object from xml_file. (also update self._namespaces with fetched ones)"""
@@ -57,7 +58,10 @@ class XmlParser:
         """
 
         xml_root = self.getroot(xml_file)
-        result = [getattr(e, 'pyval', e) for e in xml_root.xpath(path, namespaces=self._namespaces)]
+        result = [
+            getattr(e, "pyval", e)
+            for e in xml_root.xpath(path, namespaces=self._namespaces)
+        ]
         return result
 
     def get_var(self, xml_file, jpath, describe=False):
@@ -91,7 +95,9 @@ class XmlParser:
             return xpath
 
         if not isinstance(xpath, str):
-            raise NotImplementedError('Non leaf xpath of type "%s" instead of str' % type(xpath).__name__)
+            raise NotImplementedError(
+                'Non leaf xpath of type "%s" instead of str' % type(xpath).__name__
+            )
 
         result = self.xpath(xml_file, xpath)
         if func is not None:
@@ -127,18 +133,22 @@ class XmlParser:
         if describe:
             # keep only informative parts in filename
             # sub SAFE path
-            minifile = re.sub('.*SAFE/', '', xml_file)
-            minifile = re.sub(r'-.*\.xml', '.xml', minifile)
+            minifile = re.sub(".*SAFE/", "", xml_file)
+            minifile = re.sub(r"-.*\.xml", ".xml", minifile)
 
         var_object = self._compounds_vars[var_name]
 
         func = None
-        if isinstance(var_object, dict) and 'func' in var_object and callable(var_object['func']):
-            func = var_object['func']
-            if isinstance(var_object['args'], tuple):
-                args = var_object['args']
+        if (
+            isinstance(var_object, dict)
+            and "func" in var_object
+            and callable(var_object["func"])
+        ):
+            func = var_object["func"]
+            if isinstance(var_object["args"], tuple):
+                args = var_object["args"]
             else:
-                raise ValueError('args must be a tuple when func is called')
+                raise ValueError("args must be a tuple when func is called")
         else:
             args = var_object
 
@@ -164,6 +174,3 @@ class XmlParser:
             return description
         else:
             return result
-
-    def __del__(self):
-        logger.debug('__del__ XmlParser')



View it on GitLab: https://salsa.debian.org/debian-gis-team/xarray-safe-s1/-/commit/001034a91a7f82d296446f713fb4d6e0851867e5

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/xarray-safe-s1/-/commit/001034a91a7f82d296446f713fb4d6e0851867e5
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20241204/e76057cc/attachment-0001.htm>


More information about the Pkg-grass-devel mailing list