[Git][debian-gis-team/asf-search][master] 4 commits: New upstream version 7.0.8

Antonio Valentino (@antonio.valentino) gitlab at salsa.debian.org
Wed Mar 27 07:08:57 GMT 2024



Antonio Valentino pushed to branch master at Debian GIS Project / asf-search


Commits:
6585de57 by Antonio Valentino at 2024-03-27T07:01:04+00:00
New upstream version 7.0.8
- - - - -
59714201 by Antonio Valentino at 2024-03-27T07:01:08+00:00
Update upstream source from tag 'upstream/7.0.8'

Update to upstream version '7.0.8'
with Debian dir 048a6d7ce78e503e0870a50940eb3565a8e12677
- - - - -
d1d0e333 by Antonio Valentino at 2024-03-27T07:01:41+00:00
New upstream release

- - - - -
49366b3b by Antonio Valentino at 2024-03-27T07:02:01+00:00
Set distribution to unstable

- - - - -


18 changed files:

- CHANGELOG.md
- asf_search/ASFProduct.py
- asf_search/ASFSearchOptions/validator_map.py
- asf_search/ASFSearchOptions/validators.py
- asf_search/CMR/field_map.py
- asf_search/CMR/subquery.py
- asf_search/CMR/translate.py
- asf_search/Products/NISARProduct.py
- asf_search/Products/OPERAS1Product.py
- asf_search/Products/S1Product.py
- asf_search/export/kml.py
- asf_search/search/geo_search.py
- asf_search/search/search.py
- asf_search/search/search_count.py
- asf_search/search/search_generator.py
- debian/changelog
- tests/Search/test_search.py
- tests/yml_tests/test_search.yml


Changes:

=====================================
CHANGELOG.md
=====================================
@@ -25,6 +25,19 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
 -
 
 -->
+------
+## [v7.0.8](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.7...v7.0.8)
+### Added
+- `s3Urls` property added to `S1Product`, `OPERAS1Product`, and `NISARProduct` types, exposing direct access S3 links
+
+------
+## [v7.0.7](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.6...v7.0.7)
+### Added
+- Adds `cmr_keywords` search keyword, enables passing CMR format strings in search directly
+- Adds `shortName` keyword, for use with lists of collection short names
+### Changed
+- Allows using `dataset` and `platform` in same search
+
 ------
 ## [v7.0.6](https://github.com/asfadmin/Discovery-asf_search/compare/v7.0.5...v7.0.6)
 ### Changed


=====================================
asf_search/ASFProduct.py
=====================================
@@ -181,6 +181,30 @@ class ASFProduct:
         """
         return None
 
+    def _get_access_urls(self, url_types: List[str] = ['GET DATA', 'EXTENDED METADATA']) -> List[str]:
+        accessUrls = []
+
+        for url_type in url_types:
+            if urls := self.umm_get(self.umm, 'RelatedUrls', ('Type', [(url_type, 'URL')]), 0):
+                accessUrls.extend(urls)
+
+        return sorted(list(set(accessUrls)))
+    
+    def _get_additional_urls(self) -> List[str]:
+        accessUrls = self._get_access_urls(['GET DATA', 'EXTENDED METADATA'])
+        return [
+            url for url in accessUrls if not url.endswith('.md5')
+            and not url.startswith('s3://')
+            and 's3credentials' not in url
+            and not url.endswith('.png')
+            and url != self.properties['url']
+        ]
+    
+    def _get_s3_urls(self) -> List[str]:
+        s3_urls = self._get_access_urls(['GET DATA', 'EXTENDED METADATA', 'GET DATA VIA DIRECT ACCESS'])
+        return [url for url in s3_urls if url.startswith('s3://')]
+
+
     def centroid(self) -> Point:
         """
         Finds the centroid of a product


=====================================
asf_search/ASFSearchOptions/validator_map.py
=====================================
@@ -3,7 +3,7 @@ from asf_search import ASF_LOGGER
 from .validators import (
     parse_string, parse_float, parse_wkt, parse_date,
     parse_string_list, parse_int_list, parse_int_or_range_list,
-    parse_float_or_range_list,
+    parse_float_or_range_list, parse_cmr_keywords_list,
     parse_session
 )
 
@@ -56,12 +56,14 @@ validator_map = {
     'insarStackId':           parse_string,
     'instrument':             parse_string,
     'collections':            parse_string_list,
+    'shortName':              parse_string_list,
     'temporalBaselineDays':   parse_string_list,
     'operaBurstID':           parse_string_list,
     'absoluteBurstID':        parse_int_list,
     'relativeBurstID':        parse_int_list,
     'fullBurstID':            parse_string_list,
     'dataset':                parse_string_list,
+    'cmr_keywords':           parse_cmr_keywords_list,
 
     # Config parameters       Parser
     'session':                parse_session,


=====================================
asf_search/ASFSearchOptions/validators.py
=====================================
@@ -2,7 +2,7 @@ import dateparser
 from datetime import datetime, timezone
 
 import requests
-from typing import Union, Tuple, TypeVar, Callable, List, Type, Sequence
+from typing import Dict, Union, Tuple, TypeVar, Callable, List, Type, Sequence
 
 import math
 from shapely import wkt, errors
@@ -109,6 +109,22 @@ def parse_list(value: Sequence, h) -> List:
     except ValueError as exc:
         raise ValueError(f'Invalid {h.__name__} list: {exc}') from exc
 
+def parse_cmr_keywords_list(value: Sequence[Union[Dict, Sequence]]):
+    if not isinstance(value, Sequence) or (len(value) == 2 and isinstance(value[0], str)): # in case we're passed single key value pair as sequence
+        value = [value]
+    
+    for idx, item in enumerate(value):
+        if not isinstance(item, tuple) and not isinstance(item, Sequence):
+            raise ValueError(f"Expected item in cmr_keywords list index {idx} to be tuple pair, got value {item} of type {type(item)}")
+        if len(item) != 2:
+            raise ValueError(f"Expected item in cmr_keywords list index {idx} to be of length 2, got value {item} of length {len(item)}")
+        
+        search_key, search_value = item
+        if not isinstance(search_key, str) or not isinstance(search_value, str):
+            raise ValueError(f"Expected tuple pair of types: \"{type(str)}, {type(str)}\" in cmr_keywords at index {idx}, got value \"{str(item)}\" of types: \"{type(search_key)}, {type(search_value)}\"")
+
+    return value
+
 # Parse and validate an iterable of strings: "foo,bar,baz"
 def parse_string_list(value: Sequence[str]) -> List[str]:
     return parse_list(value, str)


=====================================
asf_search/CMR/field_map.py
=====================================
@@ -35,6 +35,7 @@ field_map = {
     'relativeOrbit':        {'key': 'attribute[]',             'fmt': 'int,PATH_NUMBER,{0}'},
     'temporal':             {'key': 'temporal',                'fmt': '{0}'},
     'collections':          {'key': 'echo_collection_id[]',    'fmt': '{0}'},
+    'shortName':            {'key': 'shortName',               'fmt': '{0}'},
     'temporalBaselineDays': {'key': 'attribute[]',             'fmt': 'int,TEMPORAL_BASELINE_DAYS,{0}'},
     
     # SLC BURST fields


=====================================
asf_search/CMR/subquery.py
=====================================
@@ -22,7 +22,7 @@ def build_subqueries(opts: ASFSearchOptions) -> List[ASFSearchOptions]:
         if params.get(chunked_key) is not None:
             params[chunked_key] = chunk_list(params[chunked_key], CMR_PAGE_SIZE)
 
-    list_param_names = ['platform', 'season', 'collections', 'dataset']  # these parameters will dodge the subquery system
+    list_param_names = ['platform', 'season', 'collections', 'dataset', 'cmr_keywords', 'shortName']  # these parameters will dodge the subquery system
     skip_param_names = ['maxResults']# these params exist in opts, but shouldn't be passed on to subqueries at ALL
     
     collections, aliased_keywords = get_keyword_concept_ids(params, opts.collectionAlias)


=====================================
asf_search/CMR/translate.py
=====================================
@@ -51,6 +51,10 @@ def translate_opts(opts: ASFSearchOptions) -> List:
     
     # convert the above parameters to a list of key/value tuples
     cmr_opts = []
+
+    # user provided umm fields
+    custom_cmr_keywords = dict_opts.pop('cmr_keywords', [])
+
     for (key, val) in dict_opts.items():
         # If it's "session" or something else CMR doesn't accept, don't send it:
         if key not in field_map:
@@ -74,6 +78,8 @@ def translate_opts(opts: ASFSearchOptions) -> List:
     if should_use_asf_frame(cmr_opts):
             cmr_opts = use_asf_frame(cmr_opts)
 
+    cmr_opts.extend(custom_cmr_keywords)
+
     additional_keys = [
         ('page_size', CMR_PAGE_SIZE),
         ('options[temporal][and]', 'true'), 


=====================================
asf_search/Products/NISARProduct.py
=====================================
@@ -17,20 +17,8 @@ class NISARProduct(ASFStackableProduct):
     def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
         super().__init__(args, session)
 
-        accessUrls = []
-
-        if related_data_urls := self.umm_get(self.umm, 'RelatedUrls', ('Type', [('GET DATA', 'URL')]), 0):
-            accessUrls.extend(related_data_urls)
-        if related_metadata_urls := self.umm_get(self.umm, 'RelatedUrls', ('Type', [('EXTENDED METADATA', 'URL')]), 0):
-            accessUrls.extend(related_metadata_urls)
-
-        self.properties['additionalUrls'] = sorted([
-            url for url in list(set(accessUrls)) if not url.endswith('.md5')
-            and not url.startswith('s3://')
-            and 's3credentials' not in url
-            and not url.endswith('.png')
-            and url != self.properties['url']
-        ])
+        self.properties['additionalUrls'] = self._get_additional_urls()
+        self.properties['s3Urls'] = self._get_s3_urls()
 
         if self.properties.get('groupID') is None:
             self.properties['groupID'] = self.properties['sceneName']


=====================================
asf_search/Products/OPERAS1Product.py
=====================================
@@ -26,20 +26,7 @@ class OPERAS1Product(S1Product):
 
         self.properties['beamMode'] = self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'BEAM_MODE'), 'Values', 0)
 
-        accessUrls = []
-
-        if related_data_urls := self.umm_get(self.umm, 'RelatedUrls', ('Type', [('GET DATA', 'URL')]), 0):
-            accessUrls.extend(related_data_urls)
-        if related_metadata_urls := self.umm_get(self.umm, 'RelatedUrls', ('Type', [('EXTENDED METADATA', 'URL')]), 0):
-            accessUrls.extend(related_metadata_urls)
-
-        self.properties['additionalUrls'] = sorted([
-            url for url in list(set(accessUrls)) if not url.endswith('.md5')
-            and not url.startswith('s3://')
-            and 's3credentials' not in url
-            and not url.endswith('.png')
-            and url != self.properties['url']
-        ])
+        self.properties['additionalUrls'] = self._get_additional_urls()
 
         self.properties['operaBurstID'] = self.umm_get(self.umm, 'AdditionalAttributes', ('Name', 'OPERA_BURST_ID'), 'Values', 0)
         self.properties['bytes'] = {entry['Name']: {'bytes': entry['SizeInBytes'], 'format': entry['Format']} for entry in self.properties['bytes']}


=====================================
asf_search/Products/S1Product.py
=====================================
@@ -31,6 +31,8 @@ class S1Product(ASFStackableProduct):
     def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
         super().__init__(args, session)
 
+        self.properties['s3Urls'] = self._get_s3_urls()
+        
         if self._has_baseline():
             self.baseline = self.get_baseline_calc_properties()
 


=====================================
asf_search/export/kml.py
=====================================
@@ -126,7 +126,9 @@ class KMLStreamArray(MetalinkStreamArray):
         outerBondaryIs.append(linearRing)
         
         coordinates = ETree.Element('coordinates')
-        coordinates.text = '\n' + (14 * ' ') + ('\n' + (14 * ' ')).join([f"{c['Longitude']},{c['Latitude']},2000" for c in p['shape']]) + '\n' + (14 * ' ')
+        
+        if p.get('shape') is not None:
+            coordinates.text = '\n' + (14 * ' ') + ('\n' + (14 * ' ')).join([f"{c['Longitude']},{c['Latitude']},2000" for c in p.get('shape')]) + '\n' + (14 * ' ')
         linearRing.append(coordinates)
 
         self.indent(placemark, 3)


=====================================
asf_search/search/geo_search.py
=====================================
@@ -43,6 +43,8 @@ def geo_search(
         temporalBaselineDays: Union[str, Sequence[str]] = None,
         operaBurstID: Union[str, Sequence[str]] = None,
         dataset: Union[str, Sequence[str]] = None,
+        shortName: Union[str, Sequence[str]] = None,
+        cmr_keywords: Union[Tuple[str, str], Sequence[Tuple[str, str]]] = None,
         maxResults: int = None,
         opts: ASFSearchOptions = None,
 ) -> ASFSearchResults:


=====================================
asf_search/search/search.py
=====================================
@@ -42,6 +42,8 @@ def search(
         temporalBaselineDays: Union[str, Sequence[str]] = None,
         operaBurstID: Union[str, Sequence[str]] = None,
         dataset: Union[str, Sequence[str]] = None,
+        shortName: Union[str, Sequence[str]] = None,
+        cmr_keywords: Union[Tuple[str, str], Sequence[Tuple[str, str]]] = None,
         maxResults: int = None,
         opts: ASFSearchOptions = None,
 ) -> ASFSearchResults:


=====================================
asf_search/search/search_count.py
=====================================
@@ -44,6 +44,8 @@ def search_count(
         temporalBaselineDays: Union[str, Sequence[str]] = None,
         operaBurstID: Union[str, Sequence[str]] = None,
         dataset: Union[str, Sequence[str]] = None,
+        shortName: Union[str, Sequence[str]] = None,
+        cmr_keywords: Union[Tuple[str, str], Sequence[Tuple[str, str]]] = None,
         maxResults: int = None,
         opts: ASFSearchOptions = None,
 ) -> int:


=====================================
asf_search/search/search_generator.py
=====================================
@@ -60,6 +60,8 @@ def search_generator(
         temporalBaselineDays: Union[str, Sequence[str]] = None,
         operaBurstID: Union[str, Sequence[str]] = None,
         dataset: Union[str, Sequence[str]] = None,
+        shortName: Union[str, Sequence[str]] = None,
+        cmr_keywords: Union[Tuple[str, str], Sequence[Tuple[str, str]]] = None,
         maxResults: int = None,
         opts: ASFSearchOptions = None,
         ) -> Generator[ASFSearchResults, None, None]:
@@ -80,9 +82,6 @@ def search_generator(
         (getattr(opts, 'granule_list', False) or getattr(opts, 'product_list', False)):
             raise ValueError("Cannot use maxResults along with product_list/granule_list.")
     
-    if opts.dataset is not None and opts.platform is not None:
-        raise ValueError("Cannot use dataset along with platform keyword in search.")
-    
     preprocess_opts(opts)
 
     url = '/'.join(s.strip('/') for s in [f'https://{opts.host}', f'{INTERNAL.CMR_GRANULE_PATH}'])


=====================================
debian/changelog
=====================================
@@ -1,3 +1,9 @@
+asf-search (7.0.8-1) unstable; urgency=medium
+
+  * New upstream release.
+
+ -- Antonio Valentino <antonio.valentino at tiscali.it>  Wed, 27 Mar 2024 07:01:47 +0000
+
 asf-search (7.0.6-1) unstable; urgency=medium
 
   * New upstream release.


=====================================
tests/Search/test_search.py
=====================================
@@ -117,12 +117,16 @@ def run_test_build_subqueries(params: ASFSearchOptions, expected: List):
         for key, actual_val in a:
             expected_val = getattr(b, key)
             if isinstance(actual_val, list):
-                if len(actual_val) > 0: # ASFSearchOptions leaves empty lists as None
-                    expected_set = set(expected_val)
-                    actual_set = set(actual_val)
-
-                    difference = expected_set.symmetric_difference(actual_set)
-                    assert len(difference) == 0, f"Found {len(difference)} missing entries for subquery generated keyword: \"{key}\"\n{list(difference)}"
+                if key == 'cmr_keywords':
+                    for idx, key_value_pair in enumerate(actual_val):
+                        assert key_value_pair == expected_val[idx]
+                else:
+                    if len(actual_val) > 0: # ASFSearchOptions leaves empty lists as None
+                        expected_set = set(expected_val)
+                        actual_set = set(actual_val)
+
+                        difference = expected_set.symmetric_difference(actual_set)
+                        assert len(difference) == 0, f"Found {len(difference)} missing entries for subquery generated keyword: \"{key}\"\n{list(difference)}"
             else:
                 assert actual_val == expected_val
 


=====================================
tests/yml_tests/test_search.yml
=====================================
@@ -570,6 +570,30 @@ tests:
             "C1244598379-ASFDEV",
           ]
         }]
+
+  - test-search-build_subquery shortName:
+      params:
+        shortName: 'newShortName'
+      expected:
+        - shortName: ['newShortName']
+
+  - test-search-build_subquery shortName multiple:
+      params:
+        shortName: ['newShortName', 'oldShortName']
+      expected:
+        - shortName: ['newShortName', 'oldShortName']
+
+  - test-search-build_subquery cmr_keywords:
+      params:
+        cmr_keywords: ['attribute[]', 'unique_value']
+      expected:
+        - cmr_keywords: [['attribute[]', 'unique_value']]
+  - test-search-build_subquery multiple cmr_keywords:
+      params:
+        cmr_keywords: [['attribute[]', 'unique_value'],['special_key', 'special_value']]
+      expected:
+        - cmr_keywords: [['attribute[]', 'unique_value'],['special_key', 'special_value']]
+
   - test-search-build_subquery configuration params:
       params:
         host: 'cmr.uat.earthdata.nasa.gov'



View it on GitLab: https://salsa.debian.org/debian-gis-team/asf-search/-/compare/3f22e360b7bf21a57abdc76240dc4898976ba586...49366b3bb3220233b7555d228510f6795ba534dd

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/asf-search/-/compare/3f22e360b7bf21a57abdc76240dc4898976ba586...49366b3bb3220233b7555d228510f6795ba534dd
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20240327/5a3b481d/attachment-0001.htm>


More information about the Pkg-grass-devel mailing list