[Git][debian-gis-team/xsar][upstream] New upstream version 2025.03.07

Antonio Valentino (@antonio.valentino) gitlab at salsa.debian.org
Sat Mar 15 10:44:47 GMT 2025



Antonio Valentino pushed to branch upstream at Debian GIS Project / xsar


Commits:
149b9eee by Antonio Valentino at 2025-03-15T10:33:31+00:00
New upstream version 2025.03.07
- - - - -


5 changed files:

- .git_archival.txt
- .github/workflows/publish.yml
- src/xsar/sentinel1_dataset.py
- src/xsar/sentinel1_meta.py
- src/xsar/utils.py


Changes:

=====================================
.git_archival.txt
=====================================
@@ -1 +1 @@
-ref-names: HEAD -> develop, tag: 2024.12.03
\ No newline at end of file
+ref-names: HEAD -> develop, tag: v1.1.9, tag: v1.1.10, tag: 2025.03.07
\ No newline at end of file


=====================================
.github/workflows/publish.yml
=====================================
@@ -51,4 +51,4 @@ jobs:
           path: dist/
 
       - name: Publish to PyPI
-        uses: pypa/gh-action-pypi-publish at 15c56dba361d8335944d31a2ecd17d700fc7bcbc
+        uses: pypa/gh-action-pypi-publish at 76f52bc884231f62b9a034ebfe128415bbaabdfc


=====================================
src/xsar/sentinel1_dataset.py
=====================================
@@ -18,6 +18,11 @@ from .utils import (
     merge_yaml,
     to_lon180,
     config,
+
+    get_path_aux_cal,
+    get_path_aux_pp1,
+    get_geap_gains,
+    get_gproc_gains,
 )
 from .sentinel1_meta import Sentinel1Meta
 from .ipython_backends import repr_mimebundle
@@ -25,6 +30,8 @@ from .base_dataset import BaseDataset
 import pandas as pd
 import geopandas as gpd
 
+import os
+
 
 logger = logging.getLogger("xsar.sentinel1_dataset")
 logger.addHandler(logging.NullHandler())
@@ -623,6 +630,17 @@ class Sentinel1Dataset(BaseDataset):
 
             if "GRD" in str(self.datatree.attrs["product"]):
                 self.add_swath_number()
+                path_aux_cal_old = get_path_aux_cal(
+                    self.sar_meta.manifest_attrs["aux_cal"]
+                )
+
+                path_aux_pp1_old = get_path_aux_pp1(
+                    self.sar_meta.manifest_attrs["aux_pp1"]
+                )
+
+                if self.apply_recalibration == False:
+                    new_cal = "None"
+                    new_pp1 = "None"
 
                 if self.apply_recalibration:
                     path_dataframe_aux = config["path_dataframe_aux"]
@@ -647,7 +665,7 @@ class Sentinel1Dataset(BaseDataset):
                     sel_cal = sel_cal.sort_values(
                         by=["validation_date", "generation_date"], ascending=False)
 
-                    path_new_cal = sel_cal.iloc[0].aux_path
+                    new_cal = sel_cal.iloc[0].aux_path
 
                     sel_pp1 = dataframe_aux.loc[(dataframe_aux.sat_name == self.sar_meta.manifest_attrs['satellite']) &
                                                 (dataframe_aux.aux_type == "PP1") &
@@ -668,9 +686,20 @@ class Sentinel1Dataset(BaseDataset):
                     sel_pp1 = sel_pp1.sort_values(
                         by=["validation_date", "generation_date"], ascending=False
                     )
-                    path_new_pp1 = sel_pp1.iloc[0].aux_path
+                    new_pp1 = sel_pp1.iloc[0].aux_path
+
+                    path_aux_cal_new = get_path_aux_cal(
+                        os.path.basename(new_cal))
+                    path_aux_pp1_new = get_path_aux_pp1(
+                        os.path.basename(new_pp1))
+
+                    self.add_gains(path_aux_cal_new, path_aux_pp1_new,
+                                   path_aux_cal_old, path_aux_pp1_old)
 
-                    self.add_gains(path_new_cal, path_new_pp1)
+                self.datatree["recalibration"].attrs["aux_cal_new"] = os.path.basename(
+                    new_cal)
+                self.datatree["recalibration"].attrs["aux_pp1_new"] = os.path.basename(
+                    new_pp1)
 
             rasters = self._load_rasters_vars()
             if rasters is not None:
@@ -778,28 +807,12 @@ class Sentinel1Dataset(BaseDataset):
             self._dataset_recalibration
         )
 
-    def add_gains(self, new_aux_cal_name, new_aux_pp1_name):
-        from .utils import (
-            get_path_aux_cal,
-            get_path_aux_pp1,
-            get_geap_gains,
-            get_gproc_gains,
-        )
-        import os
+    def add_gains(self, path_aux_cal_new, path_aux_pp1_new, path_aux_cal_old, path_aux_pp1_old):
+
         from scipy.interpolate import interp1d
 
         logger.debug(
-            f"doing recalibration with AUX_CAL = {new_aux_cal_name} & AUX_PP1 = {new_aux_pp1_name}"
-        )
-
-        path_aux_cal_new = get_path_aux_cal(os.path.basename(new_aux_cal_name))
-        path_aux_cal_old = get_path_aux_cal(
-            os.path.basename(self.sar_meta.manifest_attrs["aux_cal"])
-        )
-
-        path_aux_pp1_new = get_path_aux_pp1(os.path.basename(new_aux_pp1_name))
-        path_aux_pp1_old = get_path_aux_pp1(
-            os.path.basename(self.sar_meta.manifest_attrs["aux_pp1"])
+            f"doing recalibration with AUX_CAL = {path_aux_cal_new} & AUX_PP1 = {path_aux_pp1_new}"
         )
 
         #  1 - compute offboresight angle
@@ -986,10 +999,6 @@ class Sentinel1Dataset(BaseDataset):
             self._dataset_recalibration
         )
 
-        self.datatree["recalibration"].attrs["path_aux_cal_new"] = path_aux_cal_new
-        self.datatree["recalibration"].attrs["path_aux_pp1_new"] = path_aux_pp1_new
-        self.datatree["recalibration"].attrs["path_aux_cal_old"] = path_aux_cal_old
-        self.datatree["recalibration"].attrs["path_aux_pp1_old"] = path_aux_pp1_old
         # return self._dataset
 
     def apply_calibration_and_denoising(self):
@@ -1040,7 +1049,7 @@ class Sentinel1Dataset(BaseDataset):
             )
 
         self._dataset = self._add_denoised(self._dataset)
-        
+
         for var_name, lut_name in self._map_var_lut.items():
             var_name_raw = var_name + "_raw"
             if var_name_raw in self._dataset:
@@ -1051,8 +1060,7 @@ class Sentinel1Dataset(BaseDataset):
                     "Skipping variable '%s' ('%s' lut is missing)"
                     % (var_name, lut_name)
                 )
-                
-                
+
         self.datatree["measurement"] = self.datatree["measurement"].assign(
             self._dataset
         )
@@ -1121,7 +1129,7 @@ class Sentinel1Dataset(BaseDataset):
         if self.sar_meta.swath == "WV":
             if (
                 lut.name in ["noise_lut_azi", "noise_lut"]
-                and self.sar_meta.ipf in [2.9, 2.91]
+                and self.sar_meta.ipf_version in [2.9, 2.91]
                 and self.sar_meta.platform in ["SENTINEL-1A", "SENTINEL-1B"]
             ):
                 noise_calibration_cst_pp1 = {


=====================================
src/xsar/sentinel1_meta.py
=====================================
@@ -79,6 +79,13 @@ class Sentinel1Meta(BaseMeta):
 
         self.manifest_attrs = self.reader.manifest_attrs
 
+        for attr in ['aux_cal', 'aux_pp1', 'aux_ins']:
+            if attr not in self.manifest_attrs:
+                self.manifest_attrs[attr] = None
+            else:
+                self.manifest_attrs[attr] = os.path.basename(
+                    self.manifest_attrs[attr])
+
         self.multidataset = False
         """True if multi dataset"""
         self.subdatasets = gpd.GeoDataFrame(geometry=[], index=[])
@@ -101,8 +108,10 @@ class Sentinel1Meta(BaseMeta):
                 )
             except ValueError:
                 # not as many footprints than subdatasets count. (probably TOPS product)
-                self._submeta = [Sentinel1Meta(subds) for subds in datasets_names]
-                sub_footprints = [submeta.footprint for submeta in self._submeta]
+                self._submeta = [Sentinel1Meta(subds)
+                                 for subds in datasets_names]
+                sub_footprints = [
+                    submeta.footprint for submeta in self._submeta]
                 self.subdatasets = gpd.GeoDataFrame(
                     geometry=sub_footprints, index=datasets_names
                 )
@@ -151,7 +160,8 @@ class Sentinel1Meta(BaseMeta):
 
     def to_dict(self, keys="minimal"):
 
-        info_keys = {"minimal": ["ipf", "platform", "swath", "product", "pols"]}
+        info_keys = {"minimal": [
+            "ipf_version", "platform", "swath", "product", "pols"]}
         info_keys["all"] = info_keys["minimal"] + [
             "name",
             "start_date",
@@ -160,6 +170,11 @@ class Sentinel1Meta(BaseMeta):
             "coverage",
             "orbit_pass",
             "platform_heading",
+            "icid",
+            "aux_cal",
+            "aux_pp1",
+            "aux_ins",
+
         ]  # 'pixel_line_m', 'pixel_sample_m',
 
         if isinstance(keys, str):
@@ -172,7 +187,9 @@ class Sentinel1Meta(BaseMeta):
             elif k in self.manifest_attrs.keys():
                 res_dict[k] = self.manifest_attrs[k]
             else:
-                raise KeyError('Unable to find key/attr "%s" in Sentinel1Meta' % k)
+                raise KeyError(
+                    'Unable to find key/attr "%s" in Sentinel1Meta' % k)
+
         return res_dict
 
     def annotation_angle(self, line, sample, angle):
@@ -240,7 +257,8 @@ class Sentinel1Meta(BaseMeta):
                     self._geoloc[ll].isel(line=a, sample=x).values
                     for a, x in [(0, 0), (0, -1), (-1, -1), (-1, 0)]
                 ]
-            corners = list(zip(footprint_dict["longitude"], footprint_dict["latitude"]))
+            corners = list(
+                zip(footprint_dict["longitude"], footprint_dict["latitude"]))
             p = Polygon(corners)
             self._geoloc.attrs["footprint"] = p
 
@@ -272,10 +290,12 @@ class Sentinel1Meta(BaseMeta):
                 for sample in self._geoloc.sample
             ]
             # approx transform, from all gcps (inaccurate)
-            self._geoloc.attrs["approx_transform"] = rasterio.transform.from_gcps(gcps)
+            self._geoloc.attrs["approx_transform"] = rasterio.transform.from_gcps(
+                gcps)
             for vv in self._geoloc:
                 if vv in self.xsd_definitions:
-                    self._geoloc[vv].attrs["definition"] = str(self.xsd_definitions[vv])
+                    self._geoloc[vv].attrs["definition"] = str(
+                        self.xsd_definitions[vv])
 
         return self._geoloc
 
@@ -315,7 +335,7 @@ class Sentinel1Meta(BaseMeta):
         return self.reader.denoised
 
     @property
-    def ipf(self):
+    def ipf_version(self):
         """ipf version"""
         return self.manifest_attrs["ipf_version"]
 


=====================================
src/xsar/utils.py
=====================================
@@ -98,7 +98,8 @@ def timing(f):
             endrss = process.memory_info().rss
             mem_str = "mem: %+.1fMb" % ((endrss - startrss) / (1024**2))
         logger.debug(
-            "timing %s : %.2fs. %s" % (f.__name__, endtime - starttime, mem_str)
+            "timing %s : %.2fs. %s" % (
+                f.__name__, endtime - starttime, mem_str)
         )
         return result
 
@@ -152,12 +153,14 @@ def haversine(lon1, lat1, lon2, lat2):
     # haversine formula
     dlon = lon2 - lon1
     dlat = lat2 - lat1
-    a = np.sin(dlat / 2) ** 2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon / 2) ** 2
+    a = np.sin(dlat / 2) ** 2 + np.cos(lat1) * \
+        np.cos(lat2) * np.sin(dlon / 2) ** 2
     c = 2 * np.arcsin(np.sqrt(a))
     r = 6371000  # Radius of earth in meters.
     bearing = np.arctan2(
         np.sin(lon2 - lon1) * np.cos(lat2),
-        np.cos(lat1) * np.sin(lat2) - np.sin(lat1) * np.cos(lat2) * np.cos(lon2 - lon1),
+        np.cos(lat1) * np.sin(lat2) - np.sin(lat1) *
+        np.cos(lat2) * np.cos(lon2 - lon1),
     )
     return c * r, np.rad2deg(bearing)
 
@@ -251,7 +254,8 @@ def map_blocks_coords(da, func, func_kwargs={}, **kwargs):
             loc = tuple(zip((0,) * len(block.shape), block.shape))
 
         # use loc to get corresponding coordinates
-        coords_sel = tuple(c[loc[i][0] : loc[i][1]] for i, c in enumerate(coords))
+        coords_sel = tuple(c[loc[i][0]: loc[i][1]]
+                           for i, c in enumerate(coords))
 
         result = f(*coords_sel, **func_kwargs)
 
@@ -267,7 +271,8 @@ def map_blocks_coords(da, func, func_kwargs={}, **kwargs):
     meta = da.data
     dtype = meta.dtype
 
-    from_coords = bind(_evaluate_from_coords, ..., ..., coords.values(), dtype=dtype)
+    from_coords = bind(_evaluate_from_coords, ..., ...,
+                       coords.values(), dtype=dtype)
 
     daskarr = meta.map_blocks(from_coords, func, meta=meta, **kwargs)
     dataarr = xr.DataArray(daskarr, dims=da.dims, coords=coords)
@@ -329,7 +334,8 @@ def compress_safe(
         pass
     os.mkdir(safe_path_out_tmp)
     if "S1" in product:
-        shutil.copytree(safe_path_in + "/annotation", safe_path_out_tmp + "/annotation")
+        shutil.copytree(safe_path_in + "/annotation",
+                        safe_path_out_tmp + "/annotation")
         shutil.copyfile(
             safe_path_in + "/manifest.safe", safe_path_out_tmp + "/manifest.safe"
         )
@@ -373,14 +379,17 @@ def compress_safe(
                 band = src.read(1)
 
             with rasterio.open(
-                safe_path_out_tmp + "/measurement/" + os.path.basename(tiff_file),
+                safe_path_out_tmp + "/measurement/" +
+                    os.path.basename(tiff_file),
                 "w",
                 **open_kwargs,
             ) as dst:
                 dst.write(band, 1)
     elif "RCM" in product:
-        shutil.copytree(safe_path_in + "/metadata", safe_path_out_tmp + "/metadata")
-        shutil.copytree(safe_path_in + "/support", safe_path_out_tmp + "/support")
+        shutil.copytree(safe_path_in + "/metadata",
+                        safe_path_out_tmp + "/metadata")
+        shutil.copytree(safe_path_in + "/support",
+                        safe_path_out_tmp + "/support")
         shutil.copyfile(
             safe_path_in + "/manifest.safe", safe_path_out_tmp + "/manifest.safe"
         )
@@ -431,10 +440,12 @@ def compress_safe(
                 dst.write(band, 1)
 
     elif "RS2" in product:
-        shutil.copytree(safe_path_in + "/schemas", safe_path_out_tmp + "/schemas")
+        shutil.copytree(safe_path_in + "/schemas",
+                        safe_path_out_tmp + "/schemas")
         for xml_file in glob.glob(os.path.join(safe_path_in, "*.xml")):
             shutil.copyfile(
-                xml_file, os.path.join(safe_path_out_tmp, os.path.basename(xml_file))
+                xml_file, os.path.join(
+                    safe_path_out_tmp, os.path.basename(xml_file))
             )
         for tiff_file in glob.glob(os.path.join(safe_path_in, "*.tif")):
             src = rasterio.open(tiff_file)
@@ -705,7 +716,8 @@ def url_get(url, cache_dir=os.path.join(config["data_dir"], "fsspec_cache")):
     if "://" in url:
         with fsspec.open(
             "filecache::%s" % url,
-            https={"client_kwargs": {"timeout": aiohttp.ClientTimeout(total=3600)}},
+            https={"client_kwargs": {
+                "timeout": aiohttp.ClientTimeout(total=3600)}},
             filecache={
                 "cache_storage": os.path.join(
                     os.path.join(config["data_dir"], "fsspec_cache")
@@ -822,6 +834,19 @@ def get_gproc_gains(path_aux_pp1, mode, product):
 
 
 def get_path_aux_cal(aux_cal_name):
+    """
+    Get full path to AUX_CAL file.
+
+    Parameters
+    ----------
+    aux_cal_name: str
+        name of the AUX_CAL file
+
+    Returns
+    -------
+    str
+        full path to the AUX_CAL file
+    """
     path = os.path.join(
         config["auxiliary_dir"],
         aux_cal_name[0:3] + "_AUX_CAL",
@@ -835,6 +860,19 @@ def get_path_aux_cal(aux_cal_name):
 
 
 def get_path_aux_pp1(aux_pp1_name):
+    """
+    Get full path to AUX_PP1 file.
+
+    Parameters
+    ----------
+    aux_pp1_name: str
+        name of the AUX_PP1 file
+
+    Returns
+    -------
+    str
+        full path to the AUX_PP1 file
+    """
     path = os.path.join(
         config["auxiliary_dir"],
         aux_pp1_name[0:3] + "_AUX_PP1",
@@ -845,3 +883,29 @@ def get_path_aux_pp1(aux_pp1_name):
     if not os.path.isfile(path):
         raise FileNotFoundError(f"File doesn't exist: {path}")
     return path
+
+
+def get_path_aux_ins(aux_ins_name):
+    """
+    Get full path to AUX_INS file.
+
+    Parameters
+    ----------
+    aux_ins_name: str
+        name of the AUX_INS file
+
+    Returns
+    -------
+    str
+        full path to the AUX_INS file
+    """
+    path = os.path.join(
+        config["auxiliary_dir"],
+        aux_ins_name[0:3] + "_AUX_INS",
+        aux_ins_name,
+        "data",
+        aux_ins_name[0:3].lower() + "-aux-ins.xml",
+    )
+    if not os.path.isfile(path):
+        raise FileNotFoundError(f"File doesn't exist: {path}")
+    return path



View it on GitLab: https://salsa.debian.org/debian-gis-team/xsar/-/commit/149b9eeed9400a41352c331e232b910459b593ff

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/xsar/-/commit/149b9eeed9400a41352c331e232b910459b593ff
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20250315/ec3fd18c/attachment-0001.htm>


More information about the Pkg-grass-devel mailing list