[Git][debian-gis-team/asf-search][master] 4 commits: New upstream version 7.0.6
Antonio Valentino (@antonio.valentino)
gitlab at salsa.debian.org
Thu Mar 7 07:36:44 GMT 2024
Antonio Valentino pushed to branch master at Debian GIS Project / asf-search
Commits:
4a35ad7c by Antonio Valentino at 2024-03-07T07:30:14+00:00
New upstream version 7.0.6
- - - - -
f11ace0a by Antonio Valentino at 2024-03-07T07:30:18+00:00
Update upstream source from tag 'upstream/7.0.6'
Update to upstream version '7.0.6'
with Debian dir 2e88c476a659379c6d4eacb1003a4b195f1d54ce
- - - - -
d2bb9881 by Antonio Valentino at 2024-03-07T07:30:50+00:00
New upstream release
- - - - -
3f22e360 by Antonio Valentino at 2024-03-07T07:31:34+00:00
Set distribution to unstable
- - - - -
18 changed files:
- CHANGELOG.md
- asf_search/ASFProduct.py
- asf_search/ASFSearchOptions/validators.py
- asf_search/CMR/datasets.py
- asf_search/CMR/translate.py
- + asf_search/Products/NISARProduct.py
- asf_search/Products/OPERAS1Product.py
- asf_search/Products/S1BurstProduct.py
- asf_search/Products/S1Product.py
- asf_search/Products/__init__.py
- asf_search/constants/DATASET.py
- asf_search/constants/PLATFORM.py
- asf_search/search/search_generator.py
- debian/changelog
- tests/ASFSearchResults/test_ASFSearchResults.py
- tests/Search/test_search.py
- tests/yml_tests/test_ASFSearchOptions.yml
- tests/yml_tests/test_search.yml
Changes:
=====================================
CHANGELOG.md
=====================================
@@ -25,6 +25,17 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
-
-->
+------
+## [v7.0.6](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.5...v7.0.6)
+### Changed
+- timestamps while building queries and reading results from CMR now use UTC if no timezone is provided
+- Changed what collections the `NISAR` dataset and platform collections lists are pointed at.
+
+------
+## [v7.0.5](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.4...v7.0.5)
+### Added
+- Adds basic NISAR dataset search and product functionality for test data
+
------
## [v7.0.4](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.3...v7.0.4)
### Changed
=====================================
asf_search/ASFProduct.py
=====================================
@@ -10,6 +10,7 @@ from asf_search import ASFSession, ASFSearchResults
from asf_search.ASFSearchOptions import ASFSearchOptions
from asf_search.download import download_url
from asf_search.download.file_download_type import FileDownloadType
+from asf_search.CMR.translate import try_parse_date
from asf_search.CMR.translate import try_parse_float, try_parse_int, try_round_float
@@ -44,7 +45,7 @@ class ASFProduct:
# min viable product
'centerLat': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LAT'), 'Values', 0], 'cast': try_parse_float},
'centerLon': {'path': ['AdditionalAttributes', ('Name', 'CENTER_LON'), 'Values', 0], 'cast': try_parse_float},
- 'stopTime': {'path': ['TemporalExtent', 'RangeDateTime', 'EndingDateTime']}, # primary search results sort key
+ 'stopTime': {'path': ['TemporalExtent', 'RangeDateTime', 'EndingDateTime'], 'cast': try_parse_date}, # primary search results sort key
'fileID': {'path': ['GranuleUR']}, # secondary search results sort key
'flightDirection': {'path': [ 'AdditionalAttributes', ('Name', 'ASCENDING_DESCENDING'), 'Values', 0]},
'pathNumber': {'path': ['AdditionalAttributes', ('Name', 'PATH_NUMBER'), 'Values', 0], 'cast': try_parse_int},
@@ -52,7 +53,7 @@ class ASFProduct:
# commonly used
'url': {'path': [ 'RelatedUrls', ('Type', 'GET DATA'), 'URL']},
- 'startTime': {'path': [ 'TemporalExtent', 'RangeDateTime', 'BeginningDateTime']},
+ 'startTime': {'path': [ 'TemporalExtent', 'RangeDateTime', 'BeginningDateTime'], 'cast': try_parse_date},
'sceneName': {'path': [ 'DataGranule', 'Identifiers', ('IdentifierType', 'ProducerGranuleId'), 'Identifier']},
'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]},
'platform': {'path': [ 'AdditionalAttributes', ('Name', 'ASF_PLATFORM'), 'Values', 0]},
@@ -62,7 +63,7 @@ class ASFProduct:
'granuleType': {'path': [ 'AdditionalAttributes', ('Name', 'GRANULE_TYPE'), 'Values', 0]},
'orbit': {'path': [ 'OrbitCalculatedSpatialDomains', 0, 'OrbitNumber'], 'cast': try_parse_int},
'polarization': {'path': [ 'AdditionalAttributes', ('Name', 'POLARIZATION'), 'Values', 0]},
- 'processingDate': {'path': [ 'DataGranule', 'ProductionDateTime'], },
+ 'processingDate': {'path': [ 'DataGranule', 'ProductionDateTime'], 'cast': try_parse_date},
'sensor': {'path': [ 'Platforms', 0, 'Instruments', 0, 'ShortName'], },
}
"""
=====================================
asf_search/ASFSearchOptions/validators.py
=====================================
@@ -1,5 +1,5 @@
import dateparser
-import datetime
+from datetime import datetime, timezone
import requests
from typing import Union, Tuple, TypeVar, Callable, List, Type, Sequence
@@ -7,6 +7,7 @@ from typing import Union, Tuple, TypeVar, Callable, List, Type, Sequence
import math
from shapely import wkt, errors
+
number = TypeVar('number', int, float)
def parse_string(value: str) -> str:
@@ -40,21 +41,27 @@ def parse_float(value: float) -> float:
return value
-def parse_date(value: Union[str, datetime.datetime]) -> str:
+def parse_date(value: Union[str, datetime]) -> Union[datetime, str]:
"""
Base date validator
:param value: String or datetime object to be validated
:return: String passed in, if it can successfully convert to Datetime.
(Need to keep strings like "today" w/out converting them, but throw on "asdf")
"""
- if isinstance(value, datetime.datetime):
- return value
+ if isinstance(value, datetime):
+ return _to_utc(value)
+
date = dateparser.parse(str(value))
if date is None:
raise ValueError(f"Invalid date: '{value}'.")
- return str(value)
-
+
+ return _to_utc(date).strftime('%Y-%m-%dT%H:%M:%SZ')
+def _to_utc(date: datetime):
+ if date.tzinfo is None:
+ date = date.replace(tzinfo=timezone.utc)
+ return date
+
def parse_range(value: Tuple[number, number], h: Callable[[number], number]) -> Tuple[number, number]:
"""
Base range validator. For our purposes, a range is a tuple with exactly two numeric elements (a, b), requiring a <= b.
@@ -79,7 +86,7 @@ def parse_range(value: Tuple[number, number], h: Callable[[number], number]) ->
# Parse and validate a date range: "1991-10-01T00:00:00Z,1991-10-02T00:00:00Z"
-def parse_date_range(value: Tuple[Union[str, datetime.datetime], Union[str, datetime.datetime]]) -> Tuple[datetime.datetime, datetime.datetime]:
+def parse_date_range(value: Tuple[Union[str, datetime], Union[str, datetime]]) -> Tuple[datetime, datetime]:
return parse_range(value, parse_date)
=====================================
asf_search/CMR/datasets.py
=====================================
@@ -2,6 +2,208 @@ from typing import List
dataset_collections = {
+ "NISAR": {
+ "NISAR_NEN_RRST_BETA_V1": [
+ "C1261815181-ASFDEV",
+ "C1261815288-ASF",
+ "C2850220296-ASF"
+ ],
+ "NISAR_NEN_RRST_PROVISIONAL_V1": [
+ "C1261832381-ASFDEV",
+ "C1261832657-ASF",
+ "C2853068083-ASF"
+ ],
+ "NISAR_NEN_RRST_V1": [
+ "C1256533420-ASFDEV",
+ "C1257349121-ASF",
+ "C2727902012-ASF"
+ ],
+ "NISAR_L0A_RRST_BETA_V1": [
+ "C1261813453-ASFDEV",
+ "C1261815147-ASF",
+ "C2850223384-ASF"
+ ],
+ "NISAR_L0A_RRST_PROVISIONAL_V1": [
+ "C1261832466-ASFDEV",
+ "C1261832658-ASF",
+ "C2853086824-ASF"
+ ],
+ "NISAR_L0A_RRST_V1": [
+ "C1256524081-ASFDEV",
+ "C1257349120-ASF",
+ "C2727901263-ASF"
+ ],
+ "NISAR_L0B_RRSD_BETA_V1": [
+ "C1261815274-ASFDEV",
+ "C1261815289-ASF",
+ "C2850224301-ASF"
+ ],
+ "NISAR_L0B_RRSD_PROVISIONAL_V1": [
+ "C1261832497-ASFDEV",
+ "C1261832659-ASF",
+ "C2853089814-ASF"
+ ],
+ "NISAR_L0B_RRSD_V1": [
+ "C1256358262-ASFDEV",
+ "C1257349115-ASF",
+ "C2727901639-ASF"
+ ],
+ "NISAR_L0B_CRSD_BETA_V1": [
+ "C1261815276-ASFDEV",
+ "C1261815301-ASF",
+ "C2850225137-ASF"
+ ],
+ "NISAR_L0B_CRSD_PROVISIONAL_V1": [
+ "C1261832632-ASFDEV",
+ "C1261832671-ASF",
+ "C2853091612-ASF"
+ ],
+ "NISAR_L0B_CRSD_V1": [
+ "C1256358463-ASFDEV",
+ "C1257349114-ASF",
+ "C2727901523-ASF"
+ ],
+ "NISAR_L1_RSLC_BETA_V1": [
+ "C1261813489-ASFDEV",
+ "C1261815148-ASF",
+ "C2850225585-ASF"
+ ],
+ "NISAR_L1_RSLC_PROVISIONAL_V1": [
+ "C1261832868-ASFDEV",
+ "C1261833052-ASF",
+ "C2853145197-ASF"
+ ],
+ "NISAR_L1_RSLC_V1": [
+ "C1256363301-ASFDEV",
+ "C1257349109-ASF",
+ "C2727900439-ASF"
+ ],
+ "NISAR_L1_RIFG_BETA_V1": [
+ "C1261819086-ASFDEV",
+ "C1261819120-ASF",
+ "C2850234202-ASF"
+ ],
+ "NISAR_L1_RIFG_PROVISIONAL_V1": [
+ "C1261832940-ASFDEV",
+ "C1261833063-ASF",
+ "C2853147928-ASF"
+ ],
+ "NISAR_L1_RIFG_V1": [
+ "C1256381769-ASFDEV",
+ "C1257349108-ASF",
+ "C2723110181-ASF"
+ ],
+ "NISAR_L1_RUNW_BETA_V1": [
+ "C1261819098-ASFDEV",
+ "C1261819121-ASF",
+ "C2850235455-ASF"
+ ],
+ "NISAR_L1_RUNW_PROVISIONAL_V1": [
+ "C1261832990-ASFDEV",
+ "C1261833064-ASF",
+ "C2853153429-ASF"
+ ],
+ "NISAR_L1_RUNW_V1": [
+ "C1256420738-ASFDEV",
+ "C1257349107-ASF",
+ "C2727900827-ASF"
+ ],
+ "NISAR_L1_ROFF_BETA_V1": [
+ "C1261819110-ASFDEV",
+ "C1261819145-ASF",
+ "C2850237619-ASF"
+ ],
+ "NISAR_L1_ROFF_PROVISIONAL_V1": [
+ "C1261832993-ASFDEV",
+ "C1261833076-ASF",
+ "C2853156054-ASF"
+ ],
+ "NISAR_L1_ROFF_V1": [
+ "C1256411631-ASFDEV",
+ "C1257349103-ASF",
+ "C2727900080-ASF"
+ ],
+ "NISAR_L2_GSLC_BETA_V1": [
+ "C1261819167-ASFDEV",
+ "C1261819258-ASF",
+ "C2850259510-ASF"
+ ],
+ "NISAR_L2_GSLC_PROVISIONAL_V1": [
+ "C1261833024-ASFDEV",
+ "C1261833127-ASF",
+ "C2854332392-ASF"
+ ],
+ "NISAR_L2_GSLC_V1": [
+ "C1256413628-ASFDEV",
+ "C1257349102-ASF",
+ "C2727896667-ASF"
+ ],
+ "NISAR_L2_GUNW_BETA_V1": [
+ "C1261819168-ASFDEV",
+ "C1261819270-ASF",
+ "C2850261892-ASF"
+ ],
+ "NISAR_L2_GUNW_PROVISIONAL_V1": [
+ "C1261833025-ASFDEV",
+ "C1261846741-ASF",
+ "C2854335566-ASF"
+ ],
+ "NISAR_L2_GUNW_V1": [
+ "C1256432264-ASFDEV",
+ "C1257349096-ASF",
+ "C2727897718-ASF"
+ ],
+ "NISAR_L2_GCOV_BETA_V1": [
+ "C1261819211-ASFDEV",
+ "C1261819275-ASF",
+ "C2850262927-ASF"
+ ],
+ "NISAR_L2_GCOV_PROVISIONAL_V1": [
+ "C1261833026-ASFDEV",
+ "C1261846880-ASF",
+ "C2854338529-ASF"
+ ],
+ "NISAR_L2_GCOV_V1": [
+ "C1256477304-ASFDEV",
+ "C1257349095-ASF",
+ "C2727896018-ASF"
+ ],
+ "NISAR_L2_GOFF_BETA_V1": [
+ "C1261819233-ASFDEV",
+ "C1261819281-ASF",
+ "C2850263910-ASF"
+ ],
+ "NISAR_L2_GOFF_PROVISIONAL_V1": [
+ "C1261833027-ASFDEV",
+ "C1261846994-ASF",
+ "C2854341702-ASF"
+ ],
+ "NISAR_L2_GOFF_V1": [
+ "C1256479237-ASFDEV",
+ "C1257349094-ASF",
+ "C2727896460-ASF"
+ ],
+ "NISAR_L3_SME2_BETA_V1": [
+ "C1261819245-ASFDEV",
+ "C1261819282-ASF",
+ "C2850265000-ASF"
+ ],
+ "NISAR_L3_SME2_PROVISIONAL_V1": [
+ "C1261833050-ASFDEV",
+ "C1261847095-ASF",
+ "C2854344945-ASF"
+ ],
+ "NISAR_L3_SME2_V1": [
+ "C1256568692-ASFDEV",
+ "C1257349093-ASF",
+ "C2727894546-ASF"
+ ],
+ "NISAR_CUSTOM_PROVISIONAL_V1": [
+ "C1262134528-ASFDEV",
+ "C1262135006-ASF",
+ "C2874824964-ASF"
+ ],
+ },
"SENTINEL-1": {
"SENTINEL-1A_SLC": ["C1214470488-ASF", "C1205428742-ASF", "C1234413245-ASFDEV"],
"SENTINEL-1B_SLC": ["C1327985661-ASF", "C1216244348-ASF", "C1234413263-ASFDEV"],
@@ -230,7 +432,7 @@ dataset_collections = {
"SENTINEL-1_INTERFEROGRAMS_UNWRAPPED_PHASE": [
"C1595765183-ASF",
"C1225776659-ASF",
- ]
+ ],
},
"SMAP": {
"SPL1A_RO_METADATA_003": ["C1243122884-ASF", "C1233103964-ASF"],
@@ -735,6 +937,131 @@ collections_per_platform = {
"C1210599503-ASF",
"C1210599673-ASF",
],
+ "NISAR": [
+ # UAT ASFDEV
+ "C1261815181-ASFDEV",
+ "C1261832381-ASFDEV",
+ "C1256533420-ASFDEV",
+ "C1261813453-ASFDEV",
+ "C1261832466-ASFDEV",
+ "C1256524081-ASFDEV",
+ "C1261815274-ASFDEV",
+ "C1261832497-ASFDEV",
+ "C1256358262-ASFDEV",
+ "C1261815276-ASFDEV",
+ "C1261832632-ASFDEV",
+ "C1256358463-ASFDEV",
+ "C1261813489-ASFDEV",
+ "C1261832868-ASFDEV",
+ "C1256363301-ASFDEV",
+ "C1261819086-ASFDEV",
+ "C1261832940-ASFDEV",
+ "C1256381769-ASFDEV",
+ "C1261819098-ASFDEV",
+ "C1261832990-ASFDEV",
+ "C1256420738-ASFDEV",
+ "C1261819110-ASFDEV",
+ "C1261832993-ASFDEV",
+ "C1256411631-ASFDEV",
+ "C1261819167-ASFDEV",
+ "C1261833024-ASFDEV",
+ "C1256413628-ASFDEV",
+ "C1261819168-ASFDEV",
+ "C1261833025-ASFDEV",
+ "C1256432264-ASFDEV",
+ "C1261819211-ASFDEV",
+ "C1261833026-ASFDEV",
+ "C1256477304-ASFDEV",
+ "C1261819233-ASFDEV",
+ "C1261833027-ASFDEV",
+ "C1256479237-ASFDEV",
+ "C1261819245-ASFDEV",
+ "C1261833050-ASFDEV",
+ "C1256568692-ASFDEV",
+ "C1262134528-ASFDEV",
+ # UAT
+ "C1261815288-ASF",
+ "C1261832657-ASF",
+ "C1257349121-ASF",
+ "C1261815147-ASF",
+ "C1261832658-ASF",
+ "C1257349120-ASF",
+ "C1261815289-ASF",
+ "C1261832659-ASF",
+ "C1257349115-ASF",
+ "C1261815301-ASF",
+ "C1261832671-ASF",
+ "C1257349114-ASF",
+ "C1261815148-ASF",
+ "C1261833052-ASF",
+ "C1257349109-ASF",
+ "C1261819120-ASF",
+ "C1261833063-ASF",
+ "C1257349108-ASF",
+ "C1261819121-ASF",
+ "C1261833064-ASF",
+ "C1257349107-ASF",
+ "C1261819145-ASF",
+ "C1261833076-ASF",
+ "C1257349103-ASF",
+ "C1261819258-ASF",
+ "C1261833127-ASF",
+ "C1257349102-ASF",
+ "C1261819270-ASF",
+ "C1261846741-ASF",
+ "C1257349096-ASF",
+ "C1261819275-ASF",
+ "C1261846880-ASF",
+ "C1257349095-ASF",
+ "C1261819281-ASF",
+ "C1261846994-ASF",
+ "C1257349094-ASF",
+ "C1261819282-ASF",
+ "C1261847095-ASF",
+ "C1257349093-ASF",
+ "C1262135006-ASF",
+ # PROD
+ "C2850220296-ASF",
+ "C2853068083-ASF",
+ "C2727902012-ASF",
+ "C2850223384-ASF",
+ "C2853086824-ASF",
+ "C2727901263-ASF",
+ "C2850224301-ASF",
+ "C2853089814-ASF",
+ "C2727901639-ASF",
+ "C2850225137-ASF",
+ "C2853091612-ASF",
+ "C2727901523-ASF",
+ "C2850225585-ASF",
+ "C2853145197-ASF",
+ "C2727900439-ASF",
+ "C2850234202-ASF",
+ "C2853147928-ASF",
+ "C2723110181-ASF",
+ "C2850235455-ASF",
+ "C2853153429-ASF",
+ "C2727900827-ASF",
+ "C2850237619-ASF",
+ "C2853156054-ASF",
+ "C2727900080-ASF",
+ "C2850259510-ASF",
+ "C2854332392-ASF",
+ "C2727896667-ASF",
+ "C2850261892-ASF",
+ "C2854335566-ASF",
+ "C2727897718-ASF",
+ "C2850262927-ASF",
+ "C2854338529-ASF",
+ "C2727896018-ASF",
+ "C2850263910-ASF",
+ "C2854341702-ASF",
+ "C2727896460-ASF",
+ "C2850265000-ASF",
+ "C2854344945-ASF",
+ "C2727894546-ASF",
+ "C2874824964-ASF"
+ ],
}
@@ -896,14 +1223,8 @@ collections_by_processing_level = {
"C1595765183-ASF",
"C1225776659-ASF",
],
- "CSLC-STATIC": [
- "C1259982010-ASF",
- "C2795135668-ASF"
- ],
- "RTC-STATIC": [
- "C1259981910-ASF",
- "C2795135174-ASF"
- ],
+ "CSLC-STATIC": ["C1259982010-ASF", "C2795135668-ASF"],
+ "RTC-STATIC": ["C1259981910-ASF", "C2795135174-ASF"],
"GRD": [
"C1661710583-ASF",
"C1661710586-ASF",
@@ -1077,13 +1398,14 @@ collections_by_processing_level = {
#################### Helper Methods ####################
+
def get_concept_id_alias(param_list: List[str], collections_dict: dict) -> List[str]:
"""
param: param_list (List[str]): list of search values to alias
param: collections_dict (dict): The search value to concept-id dictionary to read from
returns List[str]: Returns a list of concept-ids that correspond to the given list of search values
- If any of the search values are not keys in the collections_dict, this will instead returns an empty list.
+ If any of the search values are not keys in the collections_dict, this will instead returns an empty list.
"""
concept_id_aliases = []
for param in param_list:
@@ -1091,9 +1413,10 @@ def get_concept_id_alias(param_list: List[str], collections_dict: dict) -> List[
concept_id_aliases.extend(alias)
else:
return []
-
+
return concept_id_aliases
+
def get_dataset_concept_ids(datasets: List[str]) -> List[str]:
"""
Returns concept-ids for provided dataset(s)
@@ -1108,6 +1431,8 @@ def get_dataset_concept_ids(datasets: List[str]) -> List[str]:
for concept_ids in collections_by_short_name.values():
output.extend(concept_ids)
else:
- raise ValueError(f'Could not find dataset named "{dataset}" provided for dataset keyword.')
-
- return output
\ No newline at end of file
+ raise ValueError(
+ f'Could not find dataset named "{dataset}" provided for dataset keyword.'
+ )
+
+ return output
=====================================
asf_search/CMR/translate.py
=====================================
@@ -1,4 +1,4 @@
-from datetime import datetime
+from datetime import datetime, timezone
from typing import Any, Dict, List, Optional
from asf_search.ASFSearchOptions import ASFSearchOptions
from asf_search.CMR.datasets import get_concept_id_alias
@@ -8,8 +8,8 @@ from shapely import wkt
from shapely.geometry import Polygon
from shapely.geometry.base import BaseGeometry
from .field_map import field_map
-from .datasets import dataset_collections, collections_per_platform
-
+from .datasets import collections_per_platform
+import dateparser
import logging
@@ -147,6 +147,21 @@ def try_parse_float(value: str) -> Optional[float]:
return float(value)
+def try_parse_date(value: str) -> Optional[str]:
+ if value is None:
+ return None
+
+ date = dateparser.parse(value)
+
+ if date is None:
+ return value
+
+ if date.tzinfo is None:
+ date = date.replace(tzinfo=timezone.utc)
+ # Turn all inputs into a consistant format:
+
+ return date.strftime('%Y-%m-%dT%H:%M:%SZ')
+
def fix_date(fixed_params: Dict[str, Any]):
if 'start' in fixed_params or 'end' in fixed_params or 'season' in fixed_params:
fixed_params["start"] = fixed_params["start"] if "start" in fixed_params else "1978-01-01T00:00:00Z"
=====================================
asf_search/Products/NISARProduct.py
=====================================
@@ -0,0 +1,69 @@
+from typing import Dict, Union
+from asf_search import ASFSearchOptions, ASFSession, ASFStackableProduct
+from asf_search.CMR.translate import try_parse_float, try_parse_int, try_round_float
+from asf_search.constants import PRODUCT_TYPE
+
+
+class NISARProduct(ASFStackableProduct):
+ """
+ Used for NISAR dataset products
+
+ ASF Dataset Documentation Page: https://asf.alaska.edu/nisar/
+ """
+ _base_properties = {
+ 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']}
+ }
+
+ def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
+ super().__init__(args, session)
+
+ accessUrls = []
+
+ if related_data_urls := self.umm_get(self.umm, 'RelatedUrls', ('Type', [('GET DATA', 'URL')]), 0):
+ accessUrls.extend(related_data_urls)
+ if related_metadata_urls := self.umm_get(self.umm, 'RelatedUrls', ('Type', [('EXTENDED METADATA', 'URL')]), 0):
+ accessUrls.extend(related_metadata_urls)
+
+ self.properties['additionalUrls'] = sorted([
+ url for url in list(set(accessUrls)) if not url.endswith('.md5')
+ and not url.startswith('s3://')
+ and 's3credentials' not in url
+ and not url.endswith('.png')
+ and url != self.properties['url']
+ ])
+
+ if self.properties.get('groupID') is None:
+ self.properties['groupID'] = self.properties['sceneName']
+
+ @staticmethod
+ def get_default_baseline_product_type() -> Union[str, None]:
+ """
+ Returns the product type to search for when building a baseline stack.
+ """
+ return None
+
+ def is_valid_reference(self):
+ return False
+
+ def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions:
+ """
+ Build search options that can be used to find an insar stack for this product
+
+ :return: ASFSearchOptions describing appropriate options for building a stack from this product
+ """
+ return None
+
+ @staticmethod
+ def get_property_paths() -> Dict:
+ return {
+ **ASFStackableProduct.get_property_paths(),
+ **NISARProduct._base_properties
+ }
+
+ def get_sort_keys(self):
+ keys = super().get_sort_keys()
+
+ if keys[0] is None:
+ return (self.properties.get('processingDate', ''), keys[1])
+
+ return keys
=====================================
asf_search/Products/OPERAS1Product.py
=====================================
@@ -1,5 +1,6 @@
-from typing import Dict, Optional
+from typing import Dict
from asf_search import ASFSearchOptions, ASFSession
+from asf_search.CMR.translate import try_parse_date
from asf_search.Products import S1Product
@@ -12,7 +13,7 @@ class OPERAS1Product(S1Product):
'centerLon': {'path': []},
'frameNumber': {'path': []},
'operaBurstID': {'path': ['AdditionalAttributes', ('Name', 'OPERA_BURST_ID'), 'Values', 0]},
- 'validityStartDate': {'path': ['TemporalExtent', 'SingleDateTime']},
+ 'validityStartDate': {'path': ['TemporalExtent', 'SingleDateTime'], 'cast': try_parse_date},
'bytes': {'path': ['DataGranule', 'ArchiveAndDistributionInformation']},
'subswath': {'path': ['AdditionalAttributes', ('Name', 'SUBSWATH_NAME'), 'Values', 0]},
'polarization': {'path': ['AdditionalAttributes', ('Name', 'POLARIZATION'), 'Values']} # dual polarization is in list rather than a 'VV+VH' style format
=====================================
asf_search/Products/S1BurstProduct.py
=====================================
@@ -2,6 +2,7 @@ import copy
from typing import Dict, Union
from asf_search import ASFSearchOptions, ASFSession
from asf_search.Products import S1Product
+from asf_search.CMR.translate import try_parse_date
from asf_search.CMR.translate import try_parse_int
from asf_search.constants import PRODUCT_TYPE
@@ -24,7 +25,7 @@ class S1BurstProduct(S1Product):
'burstIndex': {'path': ['AdditionalAttributes', ('Name', 'BURST_INDEX'), 'Values', 0], 'cast': try_parse_int},
'samplesPerBurst': {'path': ['AdditionalAttributes', ('Name', 'SAMPLES_PER_BURST'), 'Values', 0], 'cast': try_parse_int},
'subswath': {'path': ['AdditionalAttributes', ('Name', 'SUBSWATH_NAME'), 'Values', 0]},
- 'azimuthTime': {'path': ['AdditionalAttributes', ('Name', 'AZIMUTH_TIME'), 'Values', 0]},
+ 'azimuthTime': {'path': ['AdditionalAttributes', ('Name', 'AZIMUTH_TIME'), 'Values', 0], 'cast': try_parse_date},
'azimuthAnxTime': {'path': ['AdditionalAttributes', ('Name', 'AZIMUTH_ANX_TIME'), 'Values', 0]},
}
=====================================
asf_search/Products/S1Product.py
=====================================
@@ -1,6 +1,7 @@
import copy
from typing import Dict, List, Optional, Tuple
from asf_search import ASFSearchOptions, ASFSession, ASFStackableProduct
+from asf_search.CMR.translate import try_parse_date
from asf_search.CMR.translate import try_parse_int
from asf_search.constants import PLATFORM
from asf_search.constants import PRODUCT_TYPE
@@ -82,7 +83,7 @@ class S1Product(ASFStackableProduct):
if timestamp is None:
return None
- return timestamp if timestamp.endswith('Z') else f'{timestamp}Z'
+ return try_parse_date(timestamp)
def _parse_state_vector(self, state_vector: str) -> Tuple[Optional[List], Optional[str]]:
if state_vector is None:
=====================================
asf_search/Products/__init__.py
=====================================
@@ -11,3 +11,4 @@ from .SMAPProduct import SMAPProduct
from .S1BurstProduct import S1BurstProduct
from .OPERAS1Product import OPERAS1Product
from .ARIAS1GUNWProduct import ARIAS1GUNWProduct
+from .NISARProduct import NISARProduct
\ No newline at end of file
=====================================
asf_search/constants/DATASET.py
=====================================
@@ -12,4 +12,5 @@ RADARSAT_1 = 'RADARSAT-1'
ERS = 'ERS'
JERS_1 = 'JERS-1'
AIRSAR = 'AIRSAR'
-SEASAT = 'SEASAT'
\ No newline at end of file
+SEASAT = 'SEASAT'
+NISAR = 'NISAR'
=====================================
asf_search/constants/PLATFORM.py
=====================================
@@ -12,3 +12,4 @@ AIRSAR = 'AIRSAR'
SEASAT = 'SEASAT 1'
SMAP = 'SMAP'
UAVSAR = 'UAVSAR'
+NISAR = 'NISAR'
=====================================
asf_search/search/search_generator.py
=====================================
@@ -315,5 +315,7 @@ dataset_to_product_types = {
'DC-8': ASFProductType.AIRSARProduct,
'SEASAT': ASFProductType.SEASATProduct,
- 'SEASAT 1': ASFProductType.SEASATProduct
+ 'SEASAT 1': ASFProductType.SEASATProduct,
+
+ 'NISAR': ASFProductType.NISARProduct
}
=====================================
debian/changelog
=====================================
@@ -1,3 +1,9 @@
+asf-search (7.0.6-1) unstable; urgency=medium
+
+ * New upstream release.
+
+ -- Antonio Valentino <antonio.valentino at tiscali.it> Thu, 07 Mar 2024 07:31:21 +0000
+
asf-search (7.0.4-1) unstable; urgency=medium
* New upstream release.
=====================================
tests/ASFSearchResults/test_ASFSearchResults.py
=====================================
@@ -1,4 +1,6 @@
from typing import Dict, List
+
+import dateparser
import asf_search as asf
from asf_search import ASFSearchResults
import defusedxml.ElementTree as DefusedETree
@@ -13,7 +15,9 @@ from shapely.wkt import loads
from shapely.ops import transform
from shapely.geometry import shape
from shapely.geometry.base import BaseGeometry
+from asf_search.CMR.translate import try_parse_date
from asf_search.constants import PLATFORM
+import re
# when this replaces SearchAPI change values to cached
API_URL = 'https://api.daac.asf.alaska.edu/services/search/param?'
@@ -77,6 +81,18 @@ def check_kml(results: ASFSearchResults, expected_str: str):
actual_canon = ETree.canonicalize( DefusedETree.tostring(actual_root), strip_text=True)
expected_canon = ETree.canonicalize( DefusedETree.tostring(expected_root), strip_text=True)
+ date_pattern = r"\>(?P<variable>[\w ]*time|Time): *(?P<datestring>[^\<]*)\<"
+
+ actual_dates = re.findall(date_pattern, actual_canon, re.MULTILINE)
+ expected_date = re.findall(date_pattern, expected_canon, re.MULTILINE)
+
+ for idx, match in enumerate(actual_dates):
+ date_str, date_value = match
+ assert expected_date[idx][0] == date_str
+ assert try_parse_date(expected_date[idx][1]) == try_parse_date(date_value)
+
+ actual_canon = re.sub(date_pattern, '', actual_canon)
+ expected_canon = re.sub(date_pattern, '', expected_canon)
assert actual_canon == expected_canon
@@ -115,10 +131,16 @@ def check_csv(results: ASFSearchResults, expected_str: str):
expected_value = float(expected_dict[key])
actual_value = float(actual_dict[key])
assert expected_value == actual_value, \
- f"expected \'{expected_dict[key]}\' for key \'{key}\', got \'{actual_dict[key]}\'"
+ f"expected '{expected_dict[key]}' for key '{key}', got '{actual_dict[key]}'"
except ValueError:
- assert expected_dict[key] == actual_dict[key], \
- f"expected \'{expected_dict[key]}\' for key \'{key}\', got \'{actual_dict[key]}\'"
+ try:
+ expected_date = try_parse_date(expected_dict[key])
+ actual_date = try_parse_date(actual_dict[key])
+ assert expected_date == actual_date, \
+ f"Expected date '{expected_date}' for key '{key}', got '{actual_date}'"
+ except ValueError:
+ assert expected_dict[key] == actual_dict[key], \
+ f"expected '{expected_dict[key]}' for key '{key}', got '{actual_dict[key]}'"
def check_jsonLite(results: ASFSearchResults, expected_str: str, output_type: str):
@@ -126,12 +148,19 @@ def check_jsonLite(results: ASFSearchResults, expected_str: str, output_type: st
expected = json.loads(expected_str)['results']
+
+
if jsonlite2:
wkt_key = 'w'
wkt_unwrapped_key = 'wu'
+ start_time_key = 'st'
+ stop_time_key = 'stp'
else:
wkt_key = 'wkt'
wkt_unwrapped_key = 'wkt_unwrapped'
+ start_time_key = 'startTime'
+ stop_time_key = 'stopTime'
+
actual = json.loads(''.join(results.jsonlite2() if jsonlite2 else results.jsonlite()))['results']
@@ -139,11 +168,16 @@ def check_jsonLite(results: ASFSearchResults, expected_str: str, output_type: st
wkt = expected_product.pop(wkt_key)
wkt_unwrapped = expected_product.pop(wkt_unwrapped_key)
+ startTime = expected_product.pop(start_time_key)
+ stopTime = expected_product.pop(stop_time_key)
+
for key in expected_product.keys():
assert actual[idx][key] == expected_product[key]
assert WKT.loads(actual[idx][wkt_key]).equals(WKT.loads(wkt))
assert WKT.loads(actual[idx][wkt_unwrapped_key]).equals(WKT.loads(wkt_unwrapped))
+ assert actual[idx][start_time_key] == try_parse_date(startTime)
+ assert actual[idx][stop_time_key] == try_parse_date(stopTime)
def check_geojson(results: ASFSearchResults):
expected = results.geojson()
=====================================
tests/Search/test_search.py
=====================================
@@ -6,6 +6,7 @@ from asf_search import ASFSession
from tenacity import retry, retry_if_exception_type, stop_after_attempt
from asf_search import ASF_LOGGER, ASFSearchOptions
from asf_search.CMR.subquery import build_subqueries
+from asf_search.CMR.translate import try_parse_date
from asf_search.constants import INTERNAL
from asf_search.exceptions import ASFSearchError
from asf_search.search import search
@@ -39,6 +40,8 @@ def run_test_ASFSearchResults(search_resp):
assert search_resp[idx]['properties']['frameNumber'] == item
elif 'esaFrame' in feature.geojson()['properties'].keys() and key == 'frameNumber':
continue
+ elif key in ['stopTime', 'startTime', 'processingDate']:
+ assert try_parse_date(item) == try_parse_date(search_resp[idx]['properties'][key])
elif search_resp[idx]['properties'].get(key) is not None and item is not None:
assert item == search_resp[idx]['properties'][key]
=====================================
tests/yml_tests/test_ASFSearchOptions.yml
=====================================
@@ -143,8 +143,8 @@ tests:
start: "2022-01-01"
end: "2022-02-02"
expect_output:
- start: "2022-01-01"
- end: "2022-02-02"
+ start: "2022-01-01T00:00:00Z"
+ end: "2022-02-02T00:00:00Z"
- test-ASFSearchOptions - test with defaults NOT null:
exception: Null
=====================================
tests/yml_tests/test_search.yml
=====================================
@@ -586,21 +586,29 @@ tests:
- test-aliasing-search-against-api SLC:
params:
processingLevel: SLC
+ start: '2023-12-01T00:00:00Z'
+ end: '2023-12-31T00:00:00Z'
- test-aliasing-search-against-api S1A SLC:
params:
platform: SENTINEL-1A
processingLevel: SLC
+ start: '2023-12-01T00:00:00Z'
+ end: '2023-12-31T00:00:00Z'
- test-aliasing-search-against-api S1A RTC:
params:
platform: SENTINEL-1A
processingLevel: RTC
+ start: '2023-12-01T00:00:00Z'
+ end: '2023-12-31T00:00:00Z'
- test-aliasing-search-against-api S1A CSLC:
params:
platform: SENTINEL-1A
processingLevel: CSLC
+ start: '2023-12-01T00:00:00Z'
+ end: '2023-12-31T00:00:00Z'
- test-aliasing-search-against-api RTC-STATIC:
params:
View it on GitLab: https://salsa.debian.org/debian-gis-team/asf-search/-/compare/03ba499c77dbb461f100597543750d6622b0d878...3f22e360b7bf21a57abdc76240dc4898976ba586
--
View it on GitLab: https://salsa.debian.org/debian-gis-team/asf-search/-/compare/03ba499c77dbb461f100597543750d6622b0d878...3f22e360b7bf21a57abdc76240dc4898976ba586
You're receiving this email because of your account on salsa.debian.org.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20240307/54b6b92a/attachment-0001.htm>
More information about the Pkg-grass-devel
mailing list