[Git][debian-gis-team/xsar][upstream] New upstream version 2024.08.08

Antonio Valentino (@antonio.valentino) gitlab at salsa.debian.org
Mon Aug 12 07:06:47 BST 2024



Antonio Valentino pushed to branch upstream at Debian GIS Project / xsar


Commits:
15879c9b by Antonio Valentino at 2024-08-12T05:49:50+00:00
New upstream version 2024.08.08
- - - - -


6 changed files:

- .git_archival.txt
- src/xsar/base_dataset.py
- src/xsar/radarsat2_dataset.py
- src/xsar/rcm_dataset.py
- src/xsar/sentinel1_dataset.py
- src/xsar/xsar.py


Changes:

=====================================
.git_archival.txt
=====================================
@@ -1 +1 @@
-ref-names: HEAD -> develop, tag: 2024.07.30
\ No newline at end of file
+ref-names: HEAD -> develop, tag: 2024.08.08
\ No newline at end of file


=====================================
src/xsar/base_dataset.py
=====================================
@@ -61,6 +61,7 @@ class BaseDataset(ABC):
         'elevation': 'f4',
         'altitude': 'f4',
         'ground_heading': 'f4',
+        'offboresight': 'f4',
         'nesz': None,
         'negz': None,
         'sigma0_raw': None,


=====================================
src/xsar/radarsat2_dataset.py
=====================================
@@ -17,7 +17,8 @@ logger = logging.getLogger('xsar.radarsat2_dataset')
 logger.addHandler(logging.NullHandler())
 
 # we know tiff as no geotransform : ignore warning
-warnings.filterwarnings("ignore", category=rasterio.errors.NotGeoreferencedWarning)
+warnings.filterwarnings(
+    "ignore", category=rasterio.errors.NotGeoreferencedWarning)
 
 # allow nan without warnings
 # some dask warnings are still non filtered: https://github.com/dask/dask/issues/3245
@@ -62,6 +63,7 @@ class RadarSat2Dataset(BaseDataset):
             activate or not the lazy loading of the high resolution fields
 
     """
+
     def __init__(self, dataset_id, resolution=None,
                  resampling=rasterio.enums.Resampling.rms,
                  chunks={'line': 5000, 'sample': 5000},
@@ -86,7 +88,8 @@ class RadarSat2Dataset(BaseDataset):
             # assert isinstance(sar_meta.coords2ll(100, 100),tuple)
         else:
             # we want self.sar_meta to be a dask actor on a worker
-            self.sar_meta = BlockingActorProxy(RadarSat2Meta.from_dict, dataset_id.dict)
+            self.sar_meta = BlockingActorProxy(
+                RadarSat2Meta.from_dict, dataset_id.dict)
         del dataset_id
         if self.sar_meta.multidataset:
             raise IndexError(
@@ -97,37 +100,37 @@ class RadarSat2Dataset(BaseDataset):
         # build datatree
         DN_tmp = load_digital_number(self.sar_meta.dt, resolution=resolution,
                                      resampling=resampling, chunks=chunks)['digital_numbers'].ds
-        ### In order to respect xsar convention, lines and samples have been flipped in the metadata when necessary.
-        ### `load_digital_number` uses these metadata but rio creates new coords without keeping the flipping done.
-        ### So we have to flip again a time digital numbers to respect xsar convention
+        # In order to respect xsar convention, lines and samples have been flipped in the metadata when necessary.
+        # `load_digital_number` uses these metadata but rio creates new coords without keeping the flipping done.
+        # So we have to flip again a time digital numbers to respect xsar convention
         DN_tmp = self.flip_sample_da(DN_tmp)
         DN_tmp = self.flip_line_da(DN_tmp)
 
-        ### geoloc
+        # geoloc
         geoloc = self.sar_meta.geoloc
         geoloc.attrs['history'] = 'annotations'
 
-        ### orbitAndAttitude
+        # orbitAndAttitude
         orbit_and_attitude = self.sar_meta.orbit_and_attitude
         orbit_and_attitude.attrs['history'] = 'annotations'
 
-        ### dopplerCentroid
+        # dopplerCentroid
         doppler_centroid = self.sar_meta.doppler_centroid
         doppler_centroid.attrs['history'] = 'annotations'
 
-        ### dopplerRateValues
+        # dopplerRateValues
         doppler_rate_values = self.sar_meta.doppler_rate_values
         doppler_rate_values.attrs['history'] = 'annotations'
 
-        ### chirp
+        # chirp
         chirp = self.sar_meta.chirp
         chirp.attrs['history'] = 'annotations'
 
-        ### radarParameters
+        # radarParameters
         radar_parameters = self.sar_meta.radar_parameters
         radar_parameters.attrs['history'] = 'annotations'
 
-        ### lookUpTables
+        # lookUpTables
         lut = self.sar_meta.lut
         lut.attrs['history'] = 'annotations'
 
@@ -149,7 +152,8 @@ class RadarSat2Dataset(BaseDataset):
             'gamma0': 'lutGamma',
             'beta0': 'lutBeta',
         }
-        geoloc_vars = ['latitude', 'longitude', 'altitude', 'incidence', 'elevation']
+        geoloc_vars = ['latitude', 'longitude', 'altitude',
+                       'incidence', 'elevation']
         for vv in skip_variables:
             if vv in geoloc_vars:
                 geoloc_vars.remove(vv)
@@ -161,25 +165,28 @@ class RadarSat2Dataset(BaseDataset):
                 self._dataset.attrs[att] = self.sar_meta.__getattr__(att)
 
         value_res_line = self.sar_meta.geoloc.line.attrs['rasterAttributes_sampledLineSpacing_value']
-        value_res_sample = self.sar_meta.geoloc.pixel.attrs['rasterAttributes_sampledPixelSpacing_value']
+        value_res_sample = self.sar_meta.geoloc.pixel.attrs[
+            'rasterAttributes_sampledPixelSpacing_value']
         # self._load_incidence_from_lut()
         refe_spacing = 'slant'
         if resolution is not None:
-            refe_spacing = 'ground'  # if the data sampling changed it means that the quantities are projected on ground
+            # if the data sampling changed it means that the quantities are projected on ground
+            refe_spacing = 'ground'
             if isinstance(resolution, str):
                 value_res_sample = float(resolution.replace('m', ''))
                 value_res_line = value_res_sample
             elif isinstance(resolution, dict):
                 value_res_sample = self.sar_meta.geoloc.pixel.attrs['rasterAttributes_sampledPixelSpacing_value'] \
-                                   * resolution['sample']
+                    * resolution['sample']
                 value_res_line = self.sar_meta.geoloc.line.attrs['rasterAttributes_sampledLineSpacing_value'] \
-                                 * resolution['line']
+                    * resolution['line']
             else:
                 logger.warning('resolution type not handle (%s) should be str or dict -> sampleSpacing'
                                ' and lineSpacing are not correct', type(resolution))
         self._dataset['sampleSpacing'] = xr.DataArray(value_res_sample,
                                                       attrs={'units': 'm', 'referential': refe_spacing})
-        self._dataset['lineSpacing'] = xr.DataArray(value_res_line, attrs={'units': 'm'})
+        self._dataset['lineSpacing'] = xr.DataArray(
+            value_res_line, attrs={'units': 'm'})
 
         # dataset no-pol template for function evaluation on coordinates (*no* values used)
         # what's matter here is the shape of the image, not the values.
@@ -197,30 +204,36 @@ class RadarSat2Dataset(BaseDataset):
         self._dataset = xr.merge([
             xr.DataArray(data=self.sar_meta.samples_flipped,
                          attrs={'meaning':
-                                    'xsar convention : increasing incidence values along samples axis'}
+                                'xsar convention : increasing incidence values along samples axis'}
                          ).to_dataset(name='samples_flipped'),
             self._dataset
         ])
         self._dataset = xr.merge([
             xr.DataArray(data=self.sar_meta.lines_flipped,
                          attrs={'meaning':
-                                    'xsar convention : increasing time along line axis '
-                                    '(whatever ascending or descending pass direction)'}
+                                'xsar convention : increasing time along line axis '
+                                '(whatever ascending or descending pass direction)'}
                          ).to_dataset(name='lines_flipped'),
             self._dataset
         ])
 
         self._luts = self.lazy_load_luts()
         self.apply_calibration_and_denoising()
-        self._dataset = xr.merge([self.load_from_geoloc(geoloc_vars, lazy_loading=lazyloading), self._dataset])
+        self._dataset = xr.merge([self.load_from_geoloc(
+            geoloc_vars, lazy_loading=lazyloading), self._dataset])
+
+        # compute offboresight in self._dataset
+        self._get_offboresight_from_elevation()
         rasters = self._load_rasters_vars()
         if rasters is not None:
             self._dataset = xr.merge([self._dataset, rasters])
         self._dataset = xr.merge([self.interpolate_times, self._dataset])
         if 'ground_heading' not in skip_variables:
-            self._dataset = xr.merge([self.load_ground_heading(), self._dataset])
+            self._dataset = xr.merge(
+                [self.load_ground_heading(), self._dataset])
         if 'velocity' not in skip_variables:
-            self._dataset = xr.merge([self.get_sensor_velocity(), self._dataset])
+            self._dataset = xr.merge(
+                [self.get_sensor_velocity(), self._dataset])
         self._rasterized_masks = self.load_rasterized_masks()
         self._dataset = xr.merge([self._rasterized_masks, self._dataset])
         """a = self._dataset.copy()
@@ -228,7 +241,8 @@ class RadarSat2Dataset(BaseDataset):
         self.datatree['measurement'] = self.datatree['measurement'].assign(self._dataset)
         a = self._dataset.copy()
         self._dataset = self.flip_line_da(a)"""
-        self.datatree['measurement'] = self.datatree['measurement'].assign(self._dataset)
+        self.datatree['measurement'] = self.datatree['measurement'].assign(
+            self._dataset)
         """self.datatree = datatree.DataTree.from_dict(
             {'measurement': self.datatree['measurement'],
              'geolocation_annotation': self.datatree['geolocation_annotation'],
@@ -252,13 +266,33 @@ class RadarSat2Dataset(BaseDataset):
         merge_list = []
         for lut_name in luts_ds:
             lut_f_delayed = dask.delayed()(luts_ds[lut_name])
-            ar = dask.array.from_delayed(lut_f_delayed.data, (luts_ds[lut_name].data.size,), luts_ds[lut_name].dtype)
+            ar = dask.array.from_delayed(
+                lut_f_delayed.data, (luts_ds[lut_name].data.size,), luts_ds[lut_name].dtype)
             da = xr.DataArray(data=ar, dims=['sample'], coords={'sample': luts_ds[lut_name].sample},
                               attrs=luts_ds[lut_name].attrs)
             ds_lut_f_delayed = da.to_dataset(name=lut_name)
             merge_list.append(ds_lut_f_delayed)
         return xr.combine_by_coords(merge_list)
 
+    @timing
+    def _get_offboresight_from_elevation(self):
+        """
+        Compute offboresight angle.
+
+        Returns
+        -------
+
+        """
+        self._dataset["offboresight"] = self._dataset.elevation - \
+            (30.1833947 * self._dataset.latitude ** 0 +
+             0.0082998714 * self._dataset.latitude ** 1 -
+             0.00031181534 * self._dataset.latitude ** 2 -
+             0.0943533e-07 * self._dataset.latitude ** 3 +
+             3.0191435e-08 * self._dataset.latitude ** 4 +
+             4.968415428e-12 * self._dataset.latitude ** 5 -
+             9.315371305e-13 * self._dataset.latitude ** 6) + 29.45
+        self._dataset["offboresight"].attrs['comment'] = 'built from elevation angle and latitude'
+
     @timing
     def load_from_geoloc(self, varnames, lazy_loading=True):
         """
@@ -314,7 +348,8 @@ class RadarSat2Dataset(BaseDataset):
                         interp_func
                     )
                 else:
-                    da_val = interp_func(self._dataset.digital_number.line, self._dataset.digital_number.sample)
+                    da_val = interp_func(
+                        self._dataset.digital_number.line, self._dataset.digital_number.sample)
                     da_var = xr.DataArray(data=da_val, dims=['line', 'sample'],
                                           coords={'line': self._dataset.digital_number.line,
                                                   'sample': self._dataset.digital_number.sample})
@@ -362,7 +397,8 @@ class RadarSat2Dataset(BaseDataset):
         lines = self.sar_meta.geoloc.line
         samples = np.arange(lut.shape[0])
         lut_values_2d = dask.delayed(np.tile)(lut, (lines.shape[0], 1))
-        interp_func = dask.delayed(RectBivariateSpline)(x=lines, y=samples, z=lut_values_2d, kx=1, ky=1)
+        interp_func = dask.delayed(RectBivariateSpline)(
+            x=lines, y=samples, z=lut_values_2d, kx=1, ky=1)
         """var = inter_func(self._dataset.digital_number.line, self._dataset.digital_number.sample)
         da_var = xr.DataArray(data=var, dims=['line', 'sample'],
                               coords={'line': self._dataset.digital_number.line,
@@ -409,11 +445,13 @@ class RadarSat2Dataset(BaseDataset):
         try:
             lut_name = self._map_var_lut_noise[var_name]
         except KeyError:
-            raise ValueError("can't find noise lut name for var '%s'" % var_name)
+            raise ValueError(
+                "can't find noise lut name for var '%s'" % var_name)
         try:
             lut = self.sar_meta.dt['radarParameters'][lut_name]
         except KeyError:
-            raise ValueError("can't find noise lut from name '%s' for variable '%s'" % (lut_name, var_name))
+            raise ValueError(
+                "can't find noise lut from name '%s' for variable '%s'" % (lut_name, var_name))
         return lut
 
     @timing
@@ -442,8 +480,10 @@ class RadarSat2Dataset(BaseDataset):
         noise_values = (10 ** (initial_lut / 10))
         lines = np.arange(self.sar_meta.geoloc.line[-1] + 1)
         noise_values_2d = np.tile(noise_values, (lines.shape[0], 1))
-        indexes = [first_pix + step * i for i in range(0, noise_values.shape[0])]
-        interp_func = dask.delayed(RectBivariateSpline)(x=lines, y=indexes, z=noise_values_2d, kx=1, ky=1)
+        indexes = [first_pix + step *
+                   i for i in range(0, noise_values.shape[0])]
+        interp_func = dask.delayed(RectBivariateSpline)(
+            x=lines, y=indexes, z=noise_values_2d, kx=1, ky=1)
         da_var = map_blocks_coords(
             self._da_tmpl.astype(self._dtypes['noise_lut']),
             interp_func
@@ -469,7 +509,8 @@ class RadarSat2Dataset(BaseDataset):
         concat_list = []
         # add pol dim
         for pol in self._dataset.pol:
-            lut_noise = self._interpolate_for_noise_lut(var_name).assign_coords(pol=pol).expand_dims('pol')
+            lut_noise = self._interpolate_for_noise_lut(
+                var_name).assign_coords(pol=pol).expand_dims('pol')
             concat_list.append(lut_noise)
         return xr.concat(concat_list, dim='pol').to_dataset(name=name)
 
@@ -484,13 +525,16 @@ class RadarSat2Dataset(BaseDataset):
         for var_name, lut_name in self._map_var_lut.items():
             if lut_name in self._luts:
                 # merge var_name into dataset (not denoised)
-                self._dataset = self._dataset.merge(self._apply_calibration_lut(var_name))
+                self._dataset = self._dataset.merge(
+                    self._apply_calibration_lut(var_name))
                 # merge noise equivalent for var_name (named 'ne%sz' % var_name[0)
                 self._dataset = self._dataset.merge(self._get_noise(var_name))
             else:
-                logger.debug("Skipping variable '%s' ('%s' lut is missing)" % (var_name, lut_name))
+                logger.debug("Skipping variable '%s' ('%s' lut is missing)" % (
+                    var_name, lut_name))
         self._dataset = self._add_denoised(self._dataset)
-        self.datatree['measurement'] = self.datatree['measurement'].assign(self._dataset)
+        self.datatree['measurement'] = self.datatree['measurement'].assign(
+            self._dataset)
         # self._dataset = self.datatree[
         #     'measurement'].to_dataset()  # test oct 22 to see if then I can modify variables of the dt
         return
@@ -575,7 +619,8 @@ class RadarSat2Dataset(BaseDataset):
         pass_direction = self.sar_meta.dt.attrs['passDirection']
         flipped_cases = [('Left', 'Ascending'), ('Right', 'Descending')]
         if (antenna_pointing, pass_direction) in flipped_cases:
-            new_ds = ds.copy().isel(sample=slice(None, None, -1)).assign_coords(sample=ds.sample)
+            new_ds = ds.copy().isel(sample=slice(None, None, -1)
+                                    ).assign_coords(sample=ds.sample)
         else:
             new_ds = ds
         return new_ds
@@ -620,7 +665,8 @@ class RadarSat2Dataset(BaseDataset):
         lines = self.sar_meta.geoloc.line
         samples = self.sar_meta.geoloc.pixel
         time_values_2d = np.tile(times, (samples.shape[0], 1)).transpose()
-        interp_func = RectBivariateSpline(x=lines, y=samples, z=time_values_2d.astype(float), kx=1, ky=1)
+        interp_func = RectBivariateSpline(
+            x=lines, y=samples, z=time_values_2d.astype(float), kx=1, ky=1)
         da_var = map_blocks_coords(
             self._da_tmpl.astype('datetime64[ns]'),
             interp_func
@@ -652,7 +698,8 @@ class RadarSat2Dataset(BaseDataset):
         vels = np.sqrt(np.sum(velos, axis=0))
         interp_f = interp1d(azimuth_times.astype(float), vels)
         _vels = interp_f(interp_times.astype(float))
-        res = xr.DataArray(_vels, dims=['line'], coords={'line': self.dataset.line})
+        res = xr.DataArray(_vels, dims=['line'], coords={
+                           'line': self.dataset.line})
         return xr.Dataset({'velocity': res})
 
     def _reconfigure_reader_datatree(self):
@@ -667,7 +714,7 @@ class RadarSat2Dataset(BaseDataset):
         """
 
         dic = {'measurement': self.datatree['measurement'],
-                'geolocation_annotation': self.datatree['geolocation_annotation'],
+               'geolocation_annotation': self.datatree['geolocation_annotation'],
                }
 
         def del_items_in_dt(dt, list_items):
@@ -706,14 +753,17 @@ class RadarSat2Dataset(BaseDataset):
         new_dt['lut'] = dt['lut'].rename(rename_lut)
 
         # extract noise_lut, rename and put these in a dataset
-        new_dt['noise_lut'] = dt['radarParameters'].rename(rename_radarParameters)
+        new_dt['noise_lut'] = dt['radarParameters'].rename(
+            rename_radarParameters)
         new_dt['noise_lut'].attrs = {}  # reset attributes
-        delete_list = get_list_keys_delete(new_dt['noise_lut'], rename_radarParameters.values(), inside=False)
+        delete_list = get_list_keys_delete(
+            new_dt['noise_lut'], rename_radarParameters.values(), inside=False)
         del_items_in_dt(new_dt['noise_lut'], delete_list)
 
         # Create a dataset for radar parameters without the noise luts
         new_dt['radarParameters'] = dt['radarParameters']
-        delete_list = get_list_keys_delete(new_dt['radarParameters'], rename_radarParameters.keys())
+        delete_list = get_list_keys_delete(
+            new_dt['radarParameters'], rename_radarParameters.keys())
         del_items_in_dt(new_dt['radarParameters'], delete_list)
 
         # extract others dataset
@@ -721,7 +771,8 @@ class RadarSat2Dataset(BaseDataset):
         for key in dt.copy():
             if key not in exclude:
                 if key == 'imageGenerationParameters':
-                    new_dt[key] = datatree.DataTree(parent=None, children=copy_dt[key])
+                    new_dt[key] = datatree.DataTree(
+                        parent=None, children=copy_dt[key])
                 else:
                     new_dt[key] = copy_dt[key]
         self.datatree = new_dt
@@ -767,5 +818,3 @@ class RadarSat2Dataset(BaseDataset):
     def rs2meta(self):
         logger.warning('Please use `sar_meta` to call the sar meta object')
         return self.sar_meta
-
-


=====================================
src/xsar/rcm_dataset.py
=====================================
@@ -22,7 +22,8 @@ logger = logging.getLogger('xsar.radarsat2_dataset')
 logger.addHandler(logging.NullHandler())
 
 # we know tiff as no geotransform : ignore warning
-warnings.filterwarnings("ignore", category=rasterio.errors.NotGeoreferencedWarning)
+warnings.filterwarnings(
+    "ignore", category=rasterio.errors.NotGeoreferencedWarning)
 
 # allow nan without warnings
 # some dask warnings are still non filtered: https://github.com/dask/dask/issues/3245
@@ -94,7 +95,8 @@ class RcmDataset(BaseDataset):
             # assert isinstance(rs2meta.coords2ll(100, 100),tuple)
         else:
             # we want self.rs2meta to be a dask actor on a worker
-            self.sar_meta = BlockingActorProxy(RcmMeta.from_dict, dataset_id.dict)
+            self.sar_meta = BlockingActorProxy(
+                RcmMeta.from_dict, dataset_id.dict)
         del dataset_id
 
         if self.sar_meta.multidataset:
@@ -103,15 +105,16 @@ class RcmDataset(BaseDataset):
             )
 
         # build datatree
-        DN_tmp = self.load_digital_number(resolution=resolution, resampling=resampling, chunks=chunks)
+        DN_tmp = self.load_digital_number(
+            resolution=resolution, resampling=resampling, chunks=chunks)
         DN_tmp = self.flip_sample_da(DN_tmp)
         DN_tmp = self.flip_line_da(DN_tmp)
 
-        ### geoloc
+        # geoloc
         geoloc = self.sar_meta.geoloc
         geoloc.attrs['history'] = 'annotations'
 
-        ### orbitInformation
+        # orbitInformation
         orbit = self.sar_meta.orbit
         orbit.attrs['history'] = 'annotations'
 
@@ -120,6 +123,10 @@ class RcmDataset(BaseDataset):
 
         self._dataset = self.datatree['measurement'].to_dataset()
 
+        # merge the datatree with the reader
+        for group in self.sar_meta.dt:
+            self.datatree[group] = self.sar_meta.dt[group]
+
         # dict mapping for calibration type in the reader
         self._map_calibration_type = {
             'sigma0': 'Sigma Nought',
@@ -134,8 +141,7 @@ class RcmDataset(BaseDataset):
         }
 
         geoloc_vars = ['latitude', 'longitude', 'altitude',
-                       'incidence', 'elevation'
-                       ]
+                       'incidence', 'elevation']
         for vv in skip_variables:
             if vv in geoloc_vars:
                 geoloc_vars.remove(vv)
@@ -151,26 +157,29 @@ class RcmDataset(BaseDataset):
         # self._load_incidence_from_lut()
         refe_spacing = 'slant'
         if resolution is not None:
-            refe_spacing = 'ground'  # if the data sampling changed it means that the quantities are projected on ground
+            # if the data sampling changed it means that the quantities are projected on ground
+            refe_spacing = 'ground'
             if isinstance(resolution, str):
                 value_res_sample = float(resolution.replace('m', ''))
                 value_res_line = value_res_sample
             elif isinstance(resolution, dict):
-                value_res_sample = self.sar_meta.pixel_sample_m * resolution['sample']
-                value_res_line = self.sar_meta.pixel_line_m * resolution['line']
+                value_res_sample = self.sar_meta.pixel_sample_m * \
+                    resolution['sample']
+                value_res_line = self.sar_meta.pixel_line_m * \
+                    resolution['line']
             else:
                 logger.warning('resolution type not handle (%s) should be str or dict -> sampleSpacing'
                                ' and lineSpacing are not correct', type(resolution))
         self._dataset['sampleSpacing'] = \
             xr.DataArray(value_res_sample,
                          attrs={'referential': refe_spacing} |
-                               self.sar_meta
-                                        .dt['imageReferenceAttributes/rasterAttributes']['sampledPixelSpacing'].attrs
+                         self.sar_meta
+                         .dt['imageReferenceAttributes/rasterAttributes']['sampledPixelSpacing'].attrs
                          )
         self._dataset['lineSpacing'] = \
             xr.DataArray(value_res_line,
                          attrs=self.sar_meta
-                                        .dt['imageReferenceAttributes/rasterAttributes']['sampledLineSpacing'].attrs
+                         .dt['imageReferenceAttributes/rasterAttributes']['sampledLineSpacing'].attrs
                          )
 
         # dataset no-pol template for function evaluation on coordinates (*no* values used)
@@ -190,35 +199,44 @@ class RcmDataset(BaseDataset):
             self._dataset = xr.merge([
                 xr.DataArray(data=self.sar_meta.samples_flipped,
                              attrs={'meaning':
-                                        'xsar convention : increasing incidence values along samples axis'}
+                                    'xsar convention : increasing incidence values along samples axis'}
                              ).to_dataset(name='samples_flipped'),
                 self._dataset
             ])
             self._dataset = xr.merge([
                 xr.DataArray(data=self.sar_meta.lines_flipped,
                              attrs={'meaning':
-                                        'xsar convention : increasing time along line axis '
-                                        '(whatever ascending or descending pass direction)'}
+                                    'xsar convention : increasing time along line axis '
+                                    '(whatever ascending or descending pass direction)'}
                              ).to_dataset(name='lines_flipped'),
                 self._dataset
             ])
 
         self._luts = self.lazy_load_luts()
         self._noise_luts = self.lazy_load_noise_luts()
-        self._noise_luts = self._noise_luts.drop(['pixelFirstNoiseValue', 'stepSize'])
+        self._noise_luts = self._noise_luts.drop(
+            ['pixelFirstNoiseValue', 'stepSize'])
         self.apply_calibration_and_denoising()
-        self._dataset = xr.merge([self.load_from_geoloc(geoloc_vars, lazy_loading=lazyloading), self._dataset])
+        self._dataset = xr.merge([self.load_from_geoloc(
+            geoloc_vars, lazy_loading=lazyloading), self._dataset])
+        # compute offboresight in self._dataset
+        self._get_offboresight_from_elevation()
+
         rasters = self._load_rasters_vars()
         if rasters is not None:
             self._dataset = xr.merge([self._dataset, rasters])
         if 'ground_heading' not in skip_variables:
-            self._dataset = xr.merge([self.load_ground_heading(), self._dataset])
+            self._dataset = xr.merge(
+                [self.load_ground_heading(), self._dataset])
         if 'velocity' not in skip_variables:
-            self._dataset = xr.merge([self.get_sensor_velocity(), self._dataset])
+            self._dataset = xr.merge(
+                [self.get_sensor_velocity(), self._dataset])
         self._rasterized_masks = self.load_rasterized_masks()
         self._dataset = xr.merge([self._rasterized_masks, self._dataset])
-        self.datatree['measurement'] = self.datatree['measurement'].assign(self._dataset)
+        self.datatree['measurement'] = self.datatree['measurement'].assign(
+            self._dataset)
         # merge the datatree with the reader
+
         self.reconfigure_reader_datatree()
         self._dataset.attrs.update(self.sar_meta.to_dict("all"))
         self.datatree.attrs.update(self.sar_meta.to_dict("all"))
@@ -239,14 +257,16 @@ class RcmDataset(BaseDataset):
                 lut = self.sar_meta.lut.lookup_tables.sel(sarCalibrationType=value, pole=pola)\
                     .rename({
                         'pixel': 'sample',
-                        }
-                    )
+                    }
+                )
                 values_nb = lut.attrs['numberOfValues']
                 lut_f_delayed = dask.delayed()(lut)
-                ar = dask.array.from_delayed(lut_f_delayed.data, (values_nb,), lut.dtype)
+                ar = dask.array.from_delayed(
+                    lut_f_delayed.data, (values_nb,), lut.dtype)
                 da = xr.DataArray(data=ar, dims=['sample'], coords={'sample': lut.sample},
                                   attrs=lut.attrs)
-                da = self._interpolate_var(da).assign_coords(pole=pola).rename({'pole': 'pol'})
+                da = self._interpolate_var(da).assign_coords(
+                    pole=pola).rename({'pole': 'pol'})
                 list_da.append(da)
             full_da = xr.concat(list_da, dim='pol')
             full_da.attrs = lut.attrs
@@ -271,10 +291,11 @@ class RcmDataset(BaseDataset):
                 lut = self.sar_meta.noise_lut.noiseLevelValues.sel(sarCalibrationType=value, pole=pola)\
                     .rename({
                         'pixel': 'sample',
-                        }
-                    )
+                    }
+                )
                 lut_f_delayed = dask.delayed()(lut)
-                ar = dask.array.from_delayed(lut_f_delayed.data, (values_nb,), lut.dtype)
+                ar = dask.array.from_delayed(
+                    lut_f_delayed.data, (values_nb,), lut.dtype)
                 da = xr.DataArray(data=ar, dims=['sample'], coords={'sample': lut.sample},
                                   attrs=lut.attrs)
                 da = self._interpolate_var(da, type='noise').assign_coords(pole=pola)\
@@ -308,7 +329,8 @@ class RcmDataset(BaseDataset):
         """
         accepted_types = ["lut", "noise", "incidence"]
         if type not in accepted_types:
-            raise ValueError("Please enter a type accepted ('lut', 'noise', 'incidence')")
+            raise ValueError(
+                "Please enter a type accepted ('lut', 'noise', 'incidence')")
         lines = self.sar_meta.geoloc.line
         samples = var.sample
         var_type = None
@@ -321,7 +343,8 @@ class RcmDataset(BaseDataset):
         elif type == 'incidence':
             var_type = self._dtypes[type]
         var_2d = np.tile(var, (lines.shape[0], 1))
-        interp_func = dask.delayed(RectBivariateSpline)(x=lines, y=samples, z=var_2d, kx=1, ky=1)
+        interp_func = dask.delayed(RectBivariateSpline)(
+            x=lines, y=samples, z=var_2d, kx=1, ky=1)
         da_var = map_blocks_coords(
             self._da_tmpl.astype(var_type),
             interp_func
@@ -373,7 +396,8 @@ class RcmDataset(BaseDataset):
         try:
             lut = self._noise_luts[lut_name]
         except KeyError:
-            raise ValueError("can't find noise lut from name '%s' for variable '%s' " % (lut_name, var_name))
+            raise ValueError(
+                "can't find noise lut from name '%s' for variable '%s' " % (lut_name, var_name))
         return lut.to_dataset(name=name)
 
     def apply_calibration_and_denoising(self):
@@ -387,13 +411,16 @@ class RcmDataset(BaseDataset):
         for var_name, lut_name in self._map_var_lut.items():
             if lut_name in self._luts:
                 # merge var_name into dataset (not denoised)
-                self._dataset = self._dataset.merge(self._apply_calibration_lut(var_name))
+                self._dataset = self._dataset.merge(
+                    self._apply_calibration_lut(var_name))
                 # merge noise equivalent for var_name (named 'ne%sz' % var_name[0)
                 self._dataset = self._dataset.merge(self._get_noise(var_name))
             else:
-                logger.debug("Skipping variable '%s' ('%s' lut is missing)" % (var_name, lut_name))
+                logger.debug("Skipping variable '%s' ('%s' lut is missing)" % (
+                    var_name, lut_name))
         self._dataset = self._add_denoised(self._dataset)
-        self.datatree['measurement'] = self.datatree['measurement'].assign(self._dataset)
+        self.datatree['measurement'] = self.datatree['measurement'].assign(
+            self._dataset)
         # self._dataset = self.datatree[
         #     'measurement'].to_dataset()  # test oct 22 to see if then I can modify variables of the dt
         return
@@ -415,22 +442,43 @@ class RcmDataset(BaseDataset):
         xarray.DataSet
             dataset with denoised vars
         """
+        already_denoised = self.datatree['imageGenerationParameters'][
+            'sarProcessingInformation'].attrs['noiseSubtractionPerformed']
+
         if vars is None:
             vars = ['sigma0', 'beta0', 'gamma0']
         for varname in vars:
+
             varname_raw = varname + '_raw'
             noise = 'ne%sz' % varname[0]
             if varname_raw not in ds:
                 continue
             else:
-                denoised = ds[varname_raw] - ds[noise]
 
-                if clip:
-                    denoised = denoised.clip(min=0)
-                    denoised.attrs['comment'] = 'clipped, no values <0'
+                if not already_denoised:
+                    denoised = ds[varname_raw] - ds[noise]
+                    if clip:
+                        denoised = denoised.clip(min=0)
+                        denoised.attrs['comment'] = 'clipped, no values <0'
+                    else:
+                        denoised.attrs['comment'] = 'not clipped, some values can be <0'
+                    ds[varname] = denoised
+
                 else:
-                    denoised.attrs['comment'] = 'not clipped, some values can be <0'
-                ds[varname] = denoised
+                    logging.debug(
+                        "product was already denoised (noiseSubtractionPerformed = True), noise added back")
+                    denoised = ds[varname_raw]
+                    denoised.attrs['denoising information'] = 'already denoised by Canadian Agency'
+                    if clip:
+                        denoised = denoised.clip(min=0)
+                        denoised.attrs['comment'] = 'clipped, no values <0'
+                    else:
+                        denoised.attrs['comment'] = 'not clipped, some values can be <0'
+                    ds[varname] = denoised
+
+                    ds[varname_raw] = denoised + ds[noise]
+                    ds[varname_raw].attrs['denoising information'] = 'product was already denoised, noise added back'
+
         return ds
 
     def _load_incidence_from_lut(self):
@@ -446,7 +494,8 @@ class RcmDataset(BaseDataset):
         angles = incidence.angles
         values_nb = incidence.attrs['numberOfValues']
         lut_f_delayed = dask.delayed()(angles)
-        ar = dask.array.from_delayed(lut_f_delayed.data, (values_nb,), self._dtypes['incidence'])
+        ar = dask.array.from_delayed(
+            lut_f_delayed.data, (values_nb,), self._dtypes['incidence'])
         da = xr.DataArray(data=ar, dims=['sample'], coords={'sample': angles.sample},
                           attrs=angles.attrs)
         da = self._interpolate_var(da, type='incidence')
@@ -473,6 +522,25 @@ class RcmDataset(BaseDataset):
         inside = angle_rad * earth_radius / (earth_radius + satellite_height)
         return np.degrees(np.arcsin(inside))
 
+    @timing
+    def _get_offboresight_from_elevation(self):
+        """
+        Compute offboresight angle.
+
+        Returns
+        -------
+
+        """
+        self._dataset["offboresight"] = self._dataset.elevation - \
+            (30.1833947 * self._dataset.latitude ** 0 +
+             0.0082998714 * self._dataset.latitude ** 1 -
+             0.00031181534 * self._dataset.latitude ** 2 -
+             0.0943533e-07 * self._dataset.latitude ** 3 +
+             3.0191435e-08 * self._dataset.latitude ** 4 +
+             4.968415428e-12 * self._dataset.latitude ** 5 -
+             9.315371305e-13 * self._dataset.latitude ** 6) + 29.45
+        self._dataset["offboresight"].attrs['comment'] = 'built from elevation angle and latitude'
+
     @timing
     def load_from_geoloc(self, varnames, lazy_loading=True):
         """
@@ -508,6 +576,7 @@ class RcmDataset(BaseDataset):
                 da = self._load_elevation_from_lut()
                 da.name = varname
                 da_list.append(da)
+
             else:
                 if varname == 'longitude':
                     z_values = self.sar_meta.geoloc[varname]
@@ -529,7 +598,8 @@ class RcmDataset(BaseDataset):
                         interp_func
                     )
                 else:
-                    da_val = interp_func(self._dataset.digital_number.line, self._dataset.digital_number.sample)
+                    da_val = interp_func(
+                        self._dataset.digital_number.line, self._dataset.digital_number.sample)
                     da_var = xr.DataArray(data=da_val, dims=['line', 'sample'],
                                           coords={'line': self._dataset.digital_number.line,
                                                   'sample': self._dataset.digital_number.sample})
@@ -563,7 +633,8 @@ class RcmDataset(BaseDataset):
         lines = self.sar_meta.geoloc.line
         samples = self.sar_meta.geoloc.pixel
         time_values_2d = np.tile(times, (samples.shape[0], 1)).transpose()
-        interp_func = RectBivariateSpline(x=lines, y=samples, z=time_values_2d.astype(float), kx=1, ky=1)
+        interp_func = RectBivariateSpline(
+            x=lines, y=samples, z=time_values_2d.astype(float), kx=1, ky=1)
         da_var = map_blocks_coords(
             self._da_tmpl.astype('datetime64[ns]'),
             interp_func
@@ -585,7 +656,8 @@ class RcmDataset(BaseDataset):
         vels = np.sqrt(np.sum(velos, axis=0))
         interp_f = interp1d(azimuth_times.astype(float), vels)
         _vels = interp_f(self.interpolate_times['time'].astype(float))
-        res = xr.DataArray(_vels, dims=['line'], coords={'line': self.dataset.line})
+        res = xr.DataArray(_vels, dims=['line'], coords={
+                           'line': self.dataset.line})
         return xr.Dataset({'velocity': res})
 
     @timing
@@ -662,7 +734,8 @@ class RcmDataset(BaseDataset):
 
         chunks['pol'] = 1
         # sort chunks keys like map_dims
-        chunks = dict(sorted(chunks.items(), key=lambda pair: list(map_dims.keys()).index(pair[0])))
+        chunks = dict(sorted(chunks.items(), key=lambda pair: list(
+            map_dims.keys()).index(pair[0])))
         chunks_rio = {map_dims[d]: chunks[d] for d in map_dims.keys()}
         self.resolution = None
         if resolution is None:
@@ -729,9 +802,11 @@ class RcmDataset(BaseDataset):
             ).chunk(chunks)
 
             # create coordinates at box center
-            translate = Affine.translation((resolution['sample'] - 1) / 2, (resolution['line'] - 1) / 2)
+            translate = Affine.translation(
+                (resolution['sample'] - 1) / 2, (resolution['line'] - 1) / 2)
             scale = Affine.scale(
-                rio.width // resolution['sample'] * resolution['sample'] / out_shape[1],
+                rio.width // resolution['sample'] *
+                resolution['sample'] / out_shape[1],
                 rio.height // resolution['line'] * resolution['line'] / out_shape[0])
             sample, _ = translate * scale * (dn.sample, 0)
             _, line = translate * scale * (0, dn.line)
@@ -787,7 +862,8 @@ class RcmDataset(BaseDataset):
         pass_direction = self.sar_meta.dt['sourceAttributes/orbitAndAttitude/orbitInformation'].attrs['passDirection']
         flipped_cases = [('Left', 'Ascending'), ('Right', 'Descending')]
         if (antenna_pointing, pass_direction) in flipped_cases:
-            new_ds = ds.copy().isel(sample=slice(None, None, -1)).assign_coords(sample=ds.sample)
+            new_ds = ds.copy().isel(sample=slice(None, None, -1)
+                                    ).assign_coords(sample=ds.sample)
         else:
             new_ds = ds
         return new_ds
@@ -818,11 +894,8 @@ class RcmDataset(BaseDataset):
 
     def reconfigure_reader_datatree(self):
         """
-        Merge self.datatree with the reader's one.
         Merge attributes of the reader's datatree in the attributes of self.datatree
         """
-        for group in self.sar_meta.dt:
-            self.datatree[group] = self.sar_meta.dt[group]
         self.datatree.attrs |= self.sar_meta.dt.attrs
         return
 


=====================================
src/xsar/sentinel1_dataset.py
=====================================
@@ -139,16 +139,18 @@ class Sentinel1Dataset(BaseDataset):
         # geoloc
         geoloc = self.sar_meta.geoloc
         geoloc.attrs['history'] = 'annotations'
+
+        #  offboresight angle
         geoloc["offboresightAngle"] = geoloc.elevationAngle - \
-            (30.1833947 * geoloc.latitude ** 0 + \
-            0.0082998714 * geoloc.latitude ** 1 - \
-            0.00031181534 * geoloc.latitude ** 2 - \
-            0.0943533e-07 * geoloc.latitude ** 3 + \
-            3.0191435e-08 * geoloc.latitude ** 4 + \
-            4.968415428e-12 *geoloc.latitude ** 5 - \
-            9.315371305e-13 * geoloc.latitude ** 6) + 29.45
-        geoloc["offboresightAngle"].attrs['comment']='built from elevation angle and latitude'
-        
+            (30.1833947 * geoloc.latitude ** 0 +
+             0.0082998714 * geoloc.latitude ** 1 -
+             0.00031181534 * geoloc.latitude ** 2 -
+             0.0943533e-07 * geoloc.latitude ** 3 +
+             3.0191435e-08 * geoloc.latitude ** 4 +
+             4.968415428e-12 * geoloc.latitude ** 5 -
+             9.315371305e-13 * geoloc.latitude ** 6) + 29.45
+        geoloc["offboresightAngle"].attrs['comment'] = 'built from elevation angle and latitude'
+
         # bursts
         bu = self.sar_meta._bursts
         bu.attrs['history'] = 'annotations'
@@ -292,10 +294,9 @@ class Sentinel1Dataset(BaseDataset):
 
         # load land_mask by default for GRD products
 
-
         if 'GRD' in str(self.datatree.attrs['product']):
             self.add_high_resolution_variables(
-                                   patch_variable=patch_variable, luts=luts, lazy_loading=lazyloading)
+                patch_variable=patch_variable, luts=luts, lazy_loading=lazyloading)
             if self.apply_recalibration:
                 self.select_gains()
             self.apply_calibration_and_denoising()
@@ -304,7 +305,8 @@ class Sentinel1Dataset(BaseDataset):
         self.datatree['measurement'].attrs = self.datatree.attrs
         if self.sar_meta.product == 'SLC' and 'WV' not in self.sar_meta.swath:  # TOPS cases
             tmp = self.corrected_range_noise_lut(self.datatree)
-            self.datatree['noise_range'].ds = tmp # the corrcted noise_range dataset shold now contain an attrs 'corrected_range_noise_lut'
+            # the corrcted noise_range dataset shold now contain an attrs 'corrected_range_noise_lut'
+            self.datatree['noise_range'].ds = tmp
         self.sliced = False
         """True if dataset is a slice of original L1 dataset"""
 
@@ -314,7 +316,7 @@ class Sentinel1Dataset(BaseDataset):
         # save original bbox
         self._bbox_coords_ori = self._bbox_coords
 
-    def corrected_range_noise_lut(self,dt):
+    def corrected_range_noise_lut(self, dt):
         """
         Patch F.Nouguier see https://jira-projects.cls.fr/browse/MPCS-3581 and https://github.com/umr-lops/xsar_slc/issues/175
         Return range noise lut with corrected line numbering. This function should be used only on the full SLC dataset dt
@@ -326,21 +328,27 @@ class Sentinel1Dataset(BaseDataset):
         # Detection of azimuthTime jumps (linked to burst changes). Burst sensingTime should NOT be used since they have erroneous value too !
         line_shift = 0
         tt = dt['measurement']['time']
-        i_jump = np.ravel(np.argwhere(np.diff(tt)<np.timedelta64(0))+1) # index of jumps
-        line_jump_meas = dt['measurement']['line'][i_jump] # line number of jumps
+        i_jump = np.ravel(np.argwhere(
+            np.diff(tt) < np.timedelta64(0))+1)  # index of jumps
+        # line number of jumps
+        line_jump_meas = dt['measurement']['line'][i_jump]
         # line_jump_noise = np.ravel(dt['noise_range']['line'][1:-1].data) # annotated line number of burst begining, this one is corrupted for some S1 TOPS product
-        line_jump_noise = np.ravel(dt['noise_range']['line'][1:1+len(line_jump_meas)].data) # annoted line number of burst begining
+        # annoted line number of burst begining
+        line_jump_noise = np.ravel(
+            dt['noise_range']['line'][1:1+len(line_jump_meas)].data)
         burst_first_lineshift = line_jump_meas-line_jump_noise
-        if len(np.unique(burst_first_lineshift))==1:
+        if len(np.unique(burst_first_lineshift)) == 1:
             line_shift = int(np.unique(burst_first_lineshift)[0])
-            logging.debug('line_shift: %s',line_shift)
+            logging.debug('line_shift: %s', line_shift)
         else:
-            raise ValueError('Inconsistency in line shifting : {}'.format(burst_first_lineshift))
-        res = dt['noise_range'].ds.assign_coords({'line':dt['noise_range']['line']+line_shift})
-        if line_shift==0:
+            raise ValueError(
+                'Inconsistency in line shifting : {}'.format(burst_first_lineshift))
+        res = dt['noise_range'].ds.assign_coords(
+            {'line': dt['noise_range']['line']+line_shift})
+        if line_shift == 0:
             res.attrs['corrected_range_noise_lut'] = 'no change'
         else:
-            res.attrs['corrected_range_noise_lut'] = 'shift : %i lines'%line_shift
+            res.attrs['corrected_range_noise_lut'] = 'shift : %i lines' % line_shift
         return res
 
     def select_gains(self):
@@ -508,24 +516,28 @@ class Sentinel1Dataset(BaseDataset):
 
             self._dataset = self._dataset.merge(
                 self._load_from_geoloc(geoloc_vars, lazy_loading=lazy_loading))
-                         
+
             if 'GRD' in str(self.datatree.attrs['product']):
                 self.add_swath_number()
-                
+
                 if self.apply_recalibration:
                     path_dataframe_aux = config["path_dataframe_aux"]
                     dataframe_aux = pd.read_csv(path_dataframe_aux)
 
-                    sel_cal = dataframe_aux.loc[(dataframe_aux.sat_name == self.sar_meta.manifest_attrs['satellite']) & 
+                    sel_cal = dataframe_aux.loc[(dataframe_aux.sat_name == self.sar_meta.manifest_attrs['satellite']) &
                                                 (dataframe_aux.aux_type == "CAL") &
+                                                (dataframe_aux.icid == int(self.sar_meta.manifest_attrs['icid'])) &
                                                 (dataframe_aux.validation_date <= self.sar_meta.start_date)]
-                    sel_cal = sel_cal.sort_values(by = ["validation_date","generation_date"],ascending=False)
+                    sel_cal = sel_cal.sort_values(
+                        by=["validation_date", "generation_date"], ascending=False)
                     path_new_cal = sel_cal.iloc[0].aux_path
 
-                    sel_pp1 = dataframe_aux.loc[(dataframe_aux.sat_name == self.sar_meta.manifest_attrs['satellite']) & 
-                                (dataframe_aux.aux_type == "PP1") 
-                                & (dataframe_aux.validation_date <= self.sar_meta.start_date)]
-                    sel_pp1 = sel_pp1.sort_values(by = ["validation_date","generation_date"],ascending=False)
+                    sel_pp1 = dataframe_aux.loc[(dataframe_aux.sat_name == self.sar_meta.manifest_attrs['satellite']) &
+                                                (dataframe_aux.aux_type == "PP1") &
+                                                (dataframe_aux.icid == int(self.sar_meta.manifest_attrs['icid'])) &
+                                                (dataframe_aux.validation_date <= self.sar_meta.start_date)]
+                    sel_pp1 = sel_pp1.sort_values(
+                        by=["validation_date", "generation_date"], ascending=False)
                     path_new_pp1 = sel_pp1.iloc[0].aux_path
 
                     self.add_gains(path_new_cal, path_new_pp1)
@@ -734,7 +746,7 @@ class Sentinel1Dataset(BaseDataset):
 
         self.datatree['recalibration'] = self.datatree['recalibration'].assign(
             self._dataset_recalibration)
-        
+
         self.datatree['recalibration'].attrs["path_aux_cal_new"] = path_aux_cal_new
         self.datatree['recalibration'].attrs["path_aux_pp1_new"] = path_aux_pp1_new
         self.datatree['recalibration'].attrs["path_aux_cal_old"] = path_aux_cal_old


=====================================
src/xsar/xsar.py
=====================================
@@ -189,9 +189,9 @@ def product_info(path, columns='minimal', include_multi=False):
     return df
 
 
-def get_test_file(fname):
+def get_test_file(fname, base_url='https://cyclobs.ifremer.fr/static/sarwing_datarmor/xsardata'):
     """
-    get test file from  https://cyclobs.ifremer.fr/static/sarwing_datarmor/xsardata/
+    get test file from base_url(https://cyclobs.ifremer.fr/static/sarwing_datarmor/xsardata/)
     file is unzipped and extracted to `config['data_dir']`
 
     Parameters
@@ -206,7 +206,6 @@ def get_test_file(fname):
 
     """
     res_path = config['data_dir']
-    base_url = 'https://cyclobs.ifremer.fr/static/sarwing_datarmor/xsardata'
     file_url = '%s/%s.zip' % (base_url, fname)
     if not os.path.exists(os.path.join(res_path, fname)):
         warnings.warn("Downloading %s" % file_url)



View it on GitLab: https://salsa.debian.org/debian-gis-team/xsar/-/commit/15879c9b6c1ce758cf89ccce527a228cdab993a2

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/xsar/-/commit/15879c9b6c1ce758cf89ccce527a228cdab993a2
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20240812/229e6cd3/attachment-0001.htm>


More information about the Pkg-grass-devel mailing list