[Git][debian-gis-team/xsar][upstream] New upstream version 2023.09.0
Antonio Valentino (@antonio.valentino)
gitlab at salsa.debian.org
Sat Sep 16 13:47:19 BST 2023
Antonio Valentino pushed to branch upstream at Debian GIS Project / xsar
Commits:
f4fc503d by Antonio Valentino at 2023-09-16T12:40:57+00:00
New upstream version 2023.09.0
- - - - -
8 changed files:
- .git_archival.txt
- .github/workflows/conda-feedstock-check.yml
- .github/workflows/install-test.yml
- .github/workflows/publish.yml
- test/test_xsar.py → highlevel-checks/check_s1_xsar_opendataset.py
- src/xsar/sentinel1_dataset.py
- src/xsar/sentinel1_meta.py
- + test/test_raster_readers.py
Changes:
=====================================
.git_archival.txt
=====================================
@@ -1 +1 @@
-ref-names: HEAD -> develop, tag: 2023.08.0.post1
\ No newline at end of file
+ref-names: HEAD -> develop, tag: 2023.09.0
\ No newline at end of file
=====================================
.github/workflows/conda-feedstock-check.yml
=====================================
@@ -14,7 +14,7 @@ jobs:
# from https://michaelheap.com/dynamic-matrix-generation-github-actions/
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout at v3
+ - uses: actions/checkout at v4
- id: get-matrix
run: |
echo "get matrix for event ${{ github.event_name }}"
@@ -37,7 +37,7 @@ jobs:
shell: bash -l {0}
name: python ${{ matrix.python-version }} on ${{ matrix.os }}
steps:
- - uses: actions/checkout at v3
+ - uses: actions/checkout at v4
- name: Strip python version
run: cat environment.yml | egrep -vw python > environment-nopython.yml
- uses: conda-incubator/setup-miniconda at v2
=====================================
.github/workflows/install-test.yml
=====================================
@@ -24,7 +24,7 @@ jobs:
# from https://michaelheap.com/dynamic-matrix-generation-github-actions/
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout at v3
+ - uses: actions/checkout at v4
- id: get-matrix
run: |
echo "get matrix for event ${{ github.event_name }}"
@@ -47,7 +47,7 @@ jobs:
shell: bash -l {0}
name: python ${{ matrix.python-version }} on ${{ matrix.os }}
steps:
- - uses: actions/checkout at v3
+ - uses: actions/checkout at v4
# cache conda from https://dev.to/epassaro/caching-anaconda-environments-in-github-actions-5hde
# and https://github.com/conda-incubator/setup-miniconda#caching
=====================================
.github/workflows/publish.yml
=====================================
@@ -11,7 +11,7 @@ jobs:
if: github.repository == 'umr-lops/xsar'
steps:
- name: Checkout
- uses: actions/checkout at v3
+ uses: actions/checkout at v4
- name: Set up Python
uses: actions/setup-python at v4
with:
=====================================
test/test_xsar.py → highlevel-checks/check_s1_xsar_opendataset.py
=====================================
@@ -2,7 +2,6 @@ import xsar
import rasterio
import os
import logging
-import pytest
import dill
import pickle
=====================================
src/xsar/sentinel1_dataset.py
=====================================
@@ -1,4 +1,6 @@
# -*- coding: utf-8 -*-
+import pdb
+
import logging
import warnings
import numpy as np
@@ -117,9 +119,11 @@ class Sentinel1Dataset(BaseDataset):
"""Can't open an multi-dataset. Use `xsar.Sentinel1Meta('%s').subdatasets` to show availables ones""" % self.sar_meta.path
)
# security to prevent using resolution argument with SLC
- if self.sar_meta.product == 'SLC' and 'WV' not in self.sar_meta.swath: # TOPS cases
- resolution = None
- logger.warning('xsar is not handling resolution change for SLC TOPS products. resolution set to `None`')
+ if self.sar_meta.product == 'SLC' and resolution is not None and self.sar_meta.swath in ['IW','EW']:
+ # we tolerate resampling for WV since image width is only 20 km
+ logger.error('xsar is not handling resolution change for SLC TOPS products.')
+ raise Exception('xsar is not handling resolution change for SLC TOPS products.')
+
# build datatree
self.resolution, DN_tmp = self.sar_meta.reader.load_digital_number(resolution=resolution,
resampling=resampling,
@@ -148,7 +152,10 @@ class Sentinel1Dataset(BaseDataset):
ds_noise_range.attrs['history'] = 'noise'
ds_noise_azi = self.sar_meta.get_noise_azi_raw
if self.sar_meta.swath == 'WV':
- ds_noise_azi['noise_lut'] = self._patch_lut(ds_noise_azi[
+ # since WV noise is not defined on azimuth we apply the patch on range noise
+ # ds_noise_azi['noise_lut'] = self._patch_lut(ds_noise_azi[
+ # 'noise_lut']) # patch applied here is distinct to same patch applied on interpolated noise LUT
+ ds_noise_range['noise_lut'] = self._patch_lut(ds_noise_range[
'noise_lut']) # patch applied here is distinct to same patch applied on interpolated noise LUT
ds_noise_azi.attrs['history'] = 'noise'
=====================================
src/xsar/sentinel1_meta.py
=====================================
@@ -45,7 +45,10 @@ class Sentinel1Meta(BaseMeta):
self.reader = Sentinel1Reader(name)
if not name.startswith('SENTINEL1_DS:'):
+ name = name.rstrip('/') # remove trailing space
name = 'SENTINEL1_DS:%s:' % name
+ else:
+ name = name.replace('/:',':')
self.name = name
"""Gdal dataset name"""
name_parts = self.name.split(':')
=====================================
test/test_raster_readers.py
=====================================
@@ -0,0 +1,44 @@
+from xsar.raster_readers import _to_lon180
+import pytest
+import xarray as xr
+import copy
+import numpy as np
+
+val_lon_180s = [[120.,121.,122.,123.],[120.6,121.7,122.8,123.9]]
+lat = xr.DataArray(np.array([[40.,41.,42.,43.],[40.5,41.5,42.5,43.5]]), dims=('y','x'),
+ coords={'x':[1,2,3,4], 'y':[5,6]})
+lon = xr.DataArray(np.array(val_lon_180s), dims=('y','x'),
+ coords={'x':[1,2,3,4], 'y':[5,6]})
+lon_acheval = xr.DataArray(np.array([[179.,179.2,179.5,179.9],[-179.4,-179.7,-178.8,-179.1]]), dims=('y','x'),
+ coords={'x':[1,2,3,4], 'y':[5,6]})
+lon_0_360 = xr.DataArray(np.array(val_lon_180s)+200., dims=('y','x'),
+ coords={'x':[1,2,3,4], 'y':[5,6]})
+lon_0_360_treated = xr.DataArray(np.array(val_lon_180s)+200.-360., dims=('y','x'),
+ coords={'x':[1,2,3,4], 'y':[5,6]})
+# latwithNan = copy.copy(lat)
+ds = xr.Dataset()
+ds['lon'] = lon
+ds['lat'] = lat
+
+ds_on_antimeridian = xr.Dataset()
+ds_on_antimeridian['lon'] = lon_acheval
+ds_on_antimeridian['lat'] = lat
+
+ds_0_360 = xr.Dataset()
+ds_0_360['lon'] = lon_0_360
+ds_0_360['lat'] = lat
+
+ds_0_360_expected = xr.Dataset()
+ds_0_360_expected['lon'] = lon_0_360_treated
+ds_0_360_expected['lat'] = lat
+ at pytest.mark.parametrize(
+ ["ds", "expected"],
+ (
+ pytest.param(ds, ds, id="180_180"),
+ pytest.param(ds_0_360, ds_on_antimeridian, id="0_360"),
+ pytest.param(ds_on_antimeridian, ds_0_360_expected, id="180_180_a_cheval"),
+ ),
+)
+def test_to_lon180(ds, expected):
+ actual_ds = _to_lon180(ds)
+ assert actual_ds == expected
\ No newline at end of file
View it on GitLab: https://salsa.debian.org/debian-gis-team/xsar/-/commit/f4fc503d95ab6102eaf75f66bb63026097ea4bc6
--
View it on GitLab: https://salsa.debian.org/debian-gis-team/xsar/-/commit/f4fc503d95ab6102eaf75f66bb63026097ea4bc6
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20230916/6f2e26b2/attachment-0001.htm>
More information about the Pkg-grass-devel
mailing list