[Git][debian-gis-team/xsar][upstream] New upstream version 2024.10.21
Antonio Valentino (@antonio.valentino)
gitlab at salsa.debian.org
Thu Oct 24 07:05:48 BST 2024
Antonio Valentino pushed to branch upstream at Debian GIS Project / xsar
Commits:
86938d76 by Antonio Valentino at 2024-10-24T05:56:27+00:00
New upstream version 2024.10.21
- - - - -
7 changed files:
- .git_archival.txt
- .github/workflows/publish.yml
- src/xsar/base_meta.py
- src/xsar/radarsat2_meta.py
- src/xsar/raster_readers.py
- src/xsar/rcm_meta.py
- src/xsar/sentinel1_meta.py
Changes:
=====================================
.git_archival.txt
=====================================
@@ -1 +1 @@
-ref-names: HEAD -> develop, tag: 2024.08.08
\ No newline at end of file
+ref-names: HEAD -> develop, tag: 2024.10.21
\ No newline at end of file
=====================================
.github/workflows/publish.yml
=====================================
@@ -51,4 +51,4 @@ jobs:
path: dist/
- name: Publish to PyPI
- uses: pypa/gh-action-pypi-publish at ec4db0b4ddc65acdf4bff5fa45ac92d78b56bdf0
+ uses: pypa/gh-action-pypi-publish at f7600683efdcb7656dec5b29656edb7bc586e597
=====================================
src/xsar/base_meta.py
=====================================
@@ -22,7 +22,8 @@ logger = logging.getLogger('xsar.base_meta')
logger.addHandler(logging.NullHandler())
# we know tiff as no geotransform : ignore warning
-warnings.filterwarnings("ignore", category=rasterio.errors.NotGeoreferencedWarning)
+warnings.filterwarnings(
+ "ignore", category=rasterio.errors.NotGeoreferencedWarning)
# allow nan without warnings
# some dask warnings are still non filtered: https://github.com/dask/dask/issues/3245
@@ -41,8 +42,6 @@ class BaseMeta(BaseDataset):
'land': cartopy.feature.NaturalEarthFeature('physical', 'land', '10m')
}
- rasters = available_rasters.iloc[0:0].copy()
-
_mask_features = {}
_mask_intersecting_geometries = {}
_mask_geometry = {}
@@ -61,6 +60,9 @@ class BaseMeta(BaseDataset):
safe = None
geoloc = None
+ def __init__(self):
+ self.rasters = available_rasters.iloc[0:0].copy()
+
def _get_mask_feature(self, name):
# internal method that returns a cartopy feature from a mask name
if self._mask_features[name] is None:
@@ -80,14 +82,16 @@ class BaseMeta(BaseDataset):
except KeyError:
crs_in = fshp.crs
crs_in = pyproj.CRS(crs_in)
- proj_transform = pyproj.Transformer.from_crs(pyproj.CRS('EPSG:4326'), crs_in, always_xy=True).transform
+ proj_transform = pyproj.Transformer.from_crs(
+ pyproj.CRS('EPSG:4326'), crs_in, always_xy=True).transform
footprint_crs = transform(proj_transform, self.footprint)
with warnings.catch_warnings():
# ignore "RuntimeWarning: Sequential read of iterator was interrupted. Resetting iterator."
warnings.simplefilter("ignore", RuntimeWarning)
feature = cartopy.feature.ShapelyFeature(
- gpd.read_file(feature, mask=footprint_crs).to_crs(epsg=4326).geometry,
+ gpd.read_file(feature, mask=footprint_crs).to_crs(
+ epsg=4326).geometry,
cartopy.crs.PlateCarree()
)
if not isinstance(feature, cartopy.feature.Feature):
@@ -160,7 +164,8 @@ class BaseMeta(BaseDataset):
descr = self._mask_features_raw[name]
try:
# nice repr for a class (like 'cartopy.feature.NaturalEarthFeature land')
- descr = '%s.%s %s' % (descr.__module__, descr.__class__.__name__, descr.name)
+ descr = '%s.%s %s' % (
+ descr.__module__, descr.__class__.__name__, descr.name)
except AttributeError:
pass
return descr
@@ -343,11 +348,14 @@ class BaseMeta(BaseDataset):
lon, lat = args
# approximation with global inaccurate transform
- line_approx, sample_approx = ~self.approx_transform * (np.asarray(lon), np.asarray(lat))
+ line_approx, sample_approx = ~self.approx_transform * \
+ (np.asarray(lon), np.asarray(lat))
# Theoretical identity. It should be the same, but the difference show the error.
- lon_identity, lat_identity = self.coords2ll(line_approx, sample_approx, to_grid=False)
- line_identity, sample_identity = ~self.approx_transform * (lon_identity, lat_identity)
+ lon_identity, lat_identity = self.coords2ll(
+ line_approx, sample_approx, to_grid=False)
+ line_identity, sample_identity = ~self.approx_transform * \
+ (lon_identity, lat_identity)
# we are now able to compute the error, and make a correction
line_error = line_identity - line_approx
@@ -381,8 +389,10 @@ class BaseMeta(BaseDataset):
"""
- lon1, lat1 = self.coords2ll(lines - 1, samples, to_grid=to_grid, approx=approx)
- lon2, lat2 = self.coords2ll(lines + 1, samples, to_grid=to_grid, approx=approx)
+ lon1, lat1 = self.coords2ll(
+ lines - 1, samples, to_grid=to_grid, approx=approx)
+ lon2, lat2 = self.coords2ll(
+ lines + 1, samples, to_grid=to_grid, approx=approx)
_, heading = haversine(lon1, lat1, lon2, lat2)
return heading
@@ -424,9 +434,12 @@ class BaseMeta(BaseDataset):
default = available_rasters.loc[name:name]
# set from params, or from default
- self_or_cls.rasters.loc[name, 'resource'] = resource or default.loc[name, 'resource']
- self_or_cls.rasters.loc[name, 'read_function'] = read_function or default.loc[name, 'read_function']
- self_or_cls.rasters.loc[name, 'get_function'] = get_function or default.loc[name, 'get_function']
+ self_or_cls.rasters.loc[name,
+ 'resource'] = resource or default.loc[name, 'resource']
+ self_or_cls.rasters.loc[name,
+ 'read_function'] = read_function or default.loc[name, 'read_function']
+ self_or_cls.rasters.loc[name,
+ 'get_function'] = get_function or default.loc[name, 'get_function']
return
@@ -459,4 +472,4 @@ class BaseMeta(BaseDataset):
minidict = copy.copy(minidict)
new = cls(minidict['name'])
new.__dict__.update(minidict)
- return new
\ No newline at end of file
+ return new
=====================================
src/xsar/radarsat2_meta.py
=====================================
@@ -30,6 +30,8 @@ class RadarSat2Meta(BaseMeta):
@timing
def __init__(self, name):
+ super().__init__()
+
from xradarsat2 import rs2_reader
if ':' in name:
self.dt = rs2_reader(name.split(':')[1])
@@ -95,7 +97,8 @@ class RadarSat2Meta(BaseMeta):
footprint_dict[ll] = [
self.geoloc[ll].isel(line=a, pixel=x).values for a, x in [(0, 0), (0, -1), (-1, -1), (-1, 0)]
]
- corners = list(zip(footprint_dict['longitude'], footprint_dict['latitude']))
+ corners = list(
+ zip(footprint_dict['longitude'], footprint_dict['latitude']))
p = Polygon(corners)
self.geoloc.attrs['footprint'] = p
dic["footprints"] = p
@@ -158,7 +161,7 @@ class RadarSat2Meta(BaseMeta):
info_keys = {
'minimal': [
- #'platform',
+ # 'platform',
'swath', 'product', 'pols']
}
info_keys['all'] = info_keys['minimal'] + ['name', 'start_date', 'stop_date',
@@ -167,8 +170,8 @@ class RadarSat2Meta(BaseMeta):
'pixel_line_m', 'pixel_sample_m',
'approx_transform',
- #'orbit_pass',
- #'platform_heading'
+ # 'orbit_pass',
+ # 'platform_heading'
]
if isinstance(keys, str):
@@ -181,7 +184,8 @@ class RadarSat2Meta(BaseMeta):
elif k in self.manifest_attrs.keys():
res_dict[k] = self.manifest_attrs[k]
else:
- raise KeyError('Unable to find key/attr "%s" in RadarSat2Meta' % k)
+ raise KeyError(
+ 'Unable to find key/attr "%s" in RadarSat2Meta' % k)
return res_dict
@property
@@ -214,7 +218,8 @@ class RadarSat2Meta(BaseMeta):
def _get_time_range(self):
if self.multidataset:
- time_range = [self.manifest_attrs['start_date'], self.manifest_attrs['stop_date']]
+ time_range = [self.manifest_attrs['start_date'],
+ self.manifest_attrs['stop_date']]
else:
time_range = self.orbit_and_attitude.timeStamp
return pd.Interval(left=pd.Timestamp(time_range.values[0]), right=pd.Timestamp(time_range.values[-1]), closed='both')
@@ -275,7 +280,8 @@ class RadarSat2Meta(BaseMeta):
idx_line = np.array(geoloc.line)
for ll in ['longitude', 'latitude']:
- resdict[ll] = RectBivariateSpline(idx_line, idx_sample, np.asarray(geoloc[ll]), kx=1, ky=1)
+ resdict[ll] = RectBivariateSpline(
+ idx_line, idx_sample, np.asarray(geoloc[ll]), kx=1, ky=1)
return resdict
@@ -291,7 +297,8 @@ class RadarSat2Meta(BaseMeta):
if (antenna_pointing, pass_direction) in flipped_cases:
for ds_name in samples_depending_ds:
if 'radar' in ds_name:
- self.dt[ds_name] = self.dt[ds_name].rename({'NbOfNoiseLevelValues': 'pixel'})
+ self.dt[ds_name] = self.dt[ds_name].rename(
+ {'NbOfNoiseLevelValues': 'pixel'})
self.dt[ds_name] = self.dt[ds_name].copy().isel(pixel=slice(None, None, -1))\
.assign_coords(pixel=self.dt[ds_name].ds.pixel)
self.samples_flipped = True
@@ -313,6 +320,3 @@ class RadarSat2Meta(BaseMeta):
.assign_coords(line=self.dt[ds_name].ds.line)
self.lines_flipped = True
return
-
-
-
=====================================
src/xsar/raster_readers.py
=====================================
@@ -193,7 +193,7 @@ def era5_0250_1h(fname, **kwargs):
ds_era5 = xr.open_dataset(fname, chunks={'time': 1})
ds_era5 = ds_era5[['u10', 'v10', 'latitude025', 'longitude025']]
- ds_era5 = ds_era5.sel(time=str(kwargs['date']))
+ ds_era5 = ds_era5.sel(time=str(kwargs['date']), method="nearest")
ds_era5 = ds_era5.drop('time')
ds_era5 = ds_era5.rename(
=====================================
src/xsar/rcm_meta.py
=====================================
@@ -27,6 +27,8 @@ class RcmMeta(BaseMeta):
@timing
def __init__(self, name):
+ super().__init__()
+
from safe_rcm import api
if ':' in name:
self.dt = api.open_rcm(name.split(':')[1])
@@ -68,7 +70,8 @@ class RcmMeta(BaseMeta):
# replace `coefficients` dim by `pixel` in the datatree
var_with_coeff = ['noise_lut', 'lut', 'incidence']
for var in var_with_coeff:
- self.dt[self._xpath[var]] = self.dt[self._xpath[var]].rename({'coefficients': 'pixel'})
+ self.dt[self._xpath[var]] = self.dt[self._xpath[var]].rename(
+ {'coefficients': 'pixel'})
# build pixels coords
self.assign_index(var_with_coeff)
@@ -103,7 +106,7 @@ class RcmMeta(BaseMeta):
antenna_pointing = self.dt['sourceAttributes/radarParameters'].attrs['antennaPointing']
pass_direction = self.dt['sourceAttributes/orbitAndAttitude/orbitInformation'].attrs['passDirection']
flipped_cases = [('Left', 'Ascending'), ('Right', 'Descending')]
- samples_depending_ds = ['geolocationGrid', 'lut', 'noise_lut']
+ samples_depending_ds = ['geolocationGrid']
if (antenna_pointing, pass_direction) in flipped_cases:
for ds_name in samples_depending_ds:
self.dt[self._xpath[ds_name]] = self.dt[self._xpath[ds_name]].copy().isel(pixel=slice(None, None, -1))\
@@ -182,7 +185,8 @@ class RcmMeta(BaseMeta):
return
if not all(item in ['lut', 'noise_lut', 'incidence'] for item in ds_list):
- raise ValueError("Please enter accepted dataset names ('lut', 'noise_lut', 'incidence')")
+ raise ValueError(
+ "Please enter accepted dataset names ('lut', 'noise_lut', 'incidence')")
if 'lut' in ds_list:
_assign_index_lut()
if 'noise_lut' in ds_list:
@@ -205,7 +209,8 @@ class RcmMeta(BaseMeta):
footprint_dict[ll] = [
self.geoloc[ll].isel(line=a, pixel=x).values for a, x in [(0, 0), (0, -1), (-1, -1), (-1, 0)]
]
- corners = list(zip(footprint_dict['longitude'], footprint_dict['latitude']))
+ corners = list(
+ zip(footprint_dict['longitude'], footprint_dict['latitude']))
p = Polygon(corners)
self.geoloc.attrs['footprint'] = p
dic["footprints"] = p
@@ -319,7 +324,8 @@ class RcmMeta(BaseMeta):
def _get_time_range(self):
if self.multidataset:
- time_range = [self.manifest_attrs['start_date'], self.manifest_attrs['stop_date']]
+ time_range = [self.manifest_attrs['start_date'],
+ self.manifest_attrs['stop_date']]
else:
time_range = self.orbit.timeStamp
return pd.Interval(left=pd.Timestamp(time_range.values[0]), right=pd.Timestamp(time_range.values[-1]), closed='both')
@@ -349,7 +355,8 @@ class RcmMeta(BaseMeta):
idx_line = np.array(geoloc.line)
for ll in ['longitude', 'latitude']:
- resdict[ll] = RectBivariateSpline(idx_line, idx_sample, np.asarray(geoloc[ll]), kx=1, ky=1)
+ resdict[ll] = RectBivariateSpline(
+ idx_line, idx_sample, np.asarray(geoloc[ll]), kx=1, ky=1)
return resdict
@@ -357,7 +364,7 @@ class RcmMeta(BaseMeta):
info_keys = {
'minimal': [
- #'platform',
+ # 'platform',
'swath', 'product', 'pols']
}
info_keys['all'] = info_keys['minimal'] + ['name', 'start_date', 'stop_date',
@@ -366,8 +373,8 @@ class RcmMeta(BaseMeta):
'pixel_line_m', 'pixel_sample_m',
'approx_transform',
- #'orbit_pass',
- #'platform_heading'
+ # 'orbit_pass',
+ # 'platform_heading'
]
if isinstance(keys, str):
@@ -381,4 +388,4 @@ class RcmMeta(BaseMeta):
res_dict[k] = self.manifest_attrs[k]
else:
raise KeyError('Unable to find key/attr "%s" in RcmMeta' % k)
- return res_dict
\ No newline at end of file
+ return res_dict
=====================================
src/xsar/sentinel1_meta.py
=====================================
@@ -41,6 +41,8 @@ class Sentinel1Meta(BaseMeta):
@timing
def __init__(self, name):
+ super().__init__()
+
try:
from safe_s1.metadata import Sentinel1Reader
except:
@@ -48,10 +50,10 @@ class Sentinel1Meta(BaseMeta):
self.reader = Sentinel1Reader(name)
if not name.startswith('SENTINEL1_DS:'):
- name = name.rstrip('/') # remove trailing space
+ name = name.rstrip('/') # remove trailing space
name = 'SENTINEL1_DS:%s:' % name
else:
- name = name.replace('/:',':')
+ name = name.replace('/:', ':')
self.name = name
"""Gdal dataset name"""
name_parts = self.name.split(':')
@@ -95,15 +97,20 @@ class Sentinel1Meta(BaseMeta):
self.short_name = self.short_name + self.dsid
if self.reader.files.empty:
try:
- self.subdatasets = gpd.GeoDataFrame(geometry=self.manifest_attrs['footprints'], index=datasets_names)
+ self.subdatasets = gpd.GeoDataFrame(
+ geometry=self.manifest_attrs['footprints'], index=datasets_names)
except ValueError:
# not as many footprints than subdatasets count. (probably TOPS product)
- self._submeta = [ Sentinel1Meta(subds) for subds in datasets_names ]
- sub_footprints = [ submeta.footprint for submeta in self._submeta ]
- self.subdatasets = gpd.GeoDataFrame(geometry=sub_footprints, index=datasets_names)
+ self._submeta = [Sentinel1Meta(subds)
+ for subds in datasets_names]
+ sub_footprints = [
+ submeta.footprint for submeta in self._submeta]
+ self.subdatasets = gpd.GeoDataFrame(
+ geometry=sub_footprints, index=datasets_names)
self.multidataset = True
- self.platform = self.manifest_attrs['mission'] + self.manifest_attrs['satellite']
+ self.platform = self.manifest_attrs['mission'] + \
+ self.manifest_attrs['satellite']
"""Mission platform"""
self._time_range = None
for name, feature in self.__class__._mask_features_raw.items():
@@ -130,7 +137,8 @@ class Sentinel1Meta(BaseMeta):
def _get_time_range(self):
if self.multidataset:
- time_range = [self.manifest_attrs['start_date'], self.manifest_attrs['stop_date']]
+ time_range = [self.manifest_attrs['start_date'],
+ self.manifest_attrs['stop_date']]
else:
time_range = self.reader.time_range
return pd.Interval(left=pd.Timestamp(time_range[0]), right=pd.Timestamp(time_range[-1]), closed='both')
@@ -141,7 +149,7 @@ class Sentinel1Meta(BaseMeta):
'minimal': ['ipf', 'platform', 'swath', 'product', 'pols']
}
info_keys['all'] = info_keys['minimal'] + ['name', 'start_date', 'stop_date', 'footprint', 'coverage',
- 'orbit_pass', 'platform_heading'] # 'pixel_line_m', 'pixel_sample_m',
+ 'orbit_pass', 'platform_heading'] # 'pixel_line_m', 'pixel_sample_m',
if isinstance(keys, str):
keys = info_keys[keys]
@@ -153,7 +161,8 @@ class Sentinel1Meta(BaseMeta):
elif k in self.manifest_attrs.keys():
res_dict[k] = self.manifest_attrs[k]
else:
- raise KeyError('Unable to find key/attr "%s" in Sentinel1Meta' % k)
+ raise KeyError(
+ 'Unable to find key/attr "%s" in Sentinel1Meta' % k)
return res_dict
def annotation_angle(self, line, sample, angle):
@@ -220,7 +229,8 @@ class Sentinel1Meta(BaseMeta):
footprint_dict[ll] = [
self._geoloc[ll].isel(line=a, sample=x).values for a, x in [(0, 0), (0, -1), (-1, -1), (-1, 0)]
]
- corners = list(zip(footprint_dict['longitude'], footprint_dict['latitude']))
+ corners = list(
+ zip(footprint_dict['longitude'], footprint_dict['latitude']))
p = Polygon(corners)
self._geoloc.attrs['footprint'] = p
@@ -231,7 +241,7 @@ class Sentinel1Meta(BaseMeta):
acq_line_meters, _ = haversine(*corners[1], *corners[2])
self._geoloc.attrs['coverage'] = "%dkm * %dkm (line * sample )" % (
acq_line_meters / 1000, acq_sample_meters / 1000)
-
+
# compute self._geoloc.attrs['approx_transform'], from gcps
# we need to convert self._geoloc to a list of GroundControlPoint
def _to_rio_gcp(pt_geoloc):
@@ -249,10 +259,12 @@ class Sentinel1Meta(BaseMeta):
for line in self._geoloc.line for sample in self._geoloc.sample
]
# approx transform, from all gcps (inaccurate)
- self._geoloc.attrs['approx_transform'] = rasterio.transform.from_gcps(gcps)
+ self._geoloc.attrs['approx_transform'] = rasterio.transform.from_gcps(
+ gcps)
for vv in self._geoloc:
if vv in self.xsd_definitions:
- self._geoloc[vv].attrs['definition'] = str(self.xsd_definitions[vv])
+ self._geoloc[vv].attrs['definition'] = str(
+ self.xsd_definitions[vv])
return self._geoloc
@@ -358,7 +370,8 @@ class Sentinel1Meta(BaseMeta):
idx_line = np.array(geoloc.line)
for ll in ['longitude', 'latitude']:
- resdict[ll] = RectBivariateSpline(idx_line, idx_sample, np.asarray(geoloc[ll]), kx=1, ky=1)
+ resdict[ll] = RectBivariateSpline(
+ idx_line, idx_sample, np.asarray(geoloc[ll]), kx=1, ky=1)
return resdict
@@ -434,12 +447,11 @@ class Sentinel1Meta(BaseMeta):
@property
def get_noise_range_raw(self):
return self.dt['noise_range_raw'].to_dataset()
-
+
@property
def get_antenna_pattern(self):
return self.dt['antenna_pattern'].to_dataset()
-
@property
def get_swath_merging(self):
return self.dt['swath_merging'].to_dataset()
View it on GitLab: https://salsa.debian.org/debian-gis-team/xsar/-/commit/86938d76aeb84359e56ac71cf0f8718b855c70c6
--
View it on GitLab: https://salsa.debian.org/debian-gis-team/xsar/-/commit/86938d76aeb84359e56ac71cf0f8718b855c70c6
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20241024/99076bd6/attachment-0001.htm>
More information about the Pkg-grass-devel
mailing list