[Git][debian-gis-team/asf-search][upstream] New upstream version 12.1.0

Antonio Valentino (@antonio.valentino) gitlab at salsa.debian.org
Wed Apr 29 21:04:29 BST 2026



Antonio Valentino pushed to branch upstream at Debian GIS Project / asf-search


Commits:
c24fb901 by Antonio Valentino at 2026-04-29T19:59:52+00:00
New upstream version 12.1.0
- - - - -


8 changed files:

- CHANGELOG.md
- asf_search/CMR/datasets.py
- + asf_search/Stack.py
- asf_search/__init__.py
- + asf_search/warnings.py
- + examples/Stack.ipynb
- + tests/Stack/test_Stack.py
- tests/yml_tests/test_search.yml


Changes:

=====================================
CHANGELOG.md
=====================================
@@ -25,6 +25,14 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
 -
 
 -->
+------
+## [v12.1.0](https://github.com/asfadmin/Discovery-asf_search/compare/v12.0.7...v12.1.0)
+### Added
+- Add `Stack` class, used to create stacks of `Pair` objects. This is a foundational class, which will be used by a near-future `SBASNetwork` class to automate the creation of connected, multi-annual seasonal SBAS stacks of interferograms.
+    - For usage examples, see `examples/Stack.ipynb`
+- `OPERA_L3_DIST-ALERT-S1_V1` shortname/collections added
+- Add `NISAR_EA` shortname/collections to NISAR dataset (available to authorized users)
+
 ------
 ## [v12.0.7](https://github.com/asfadmin/Discovery-asf_search/compare/v12.0.6...v12.0.7)
 ### Added


=====================================
asf_search/CMR/datasets.py
=====================================
@@ -304,45 +304,70 @@ dataset_collections = {
             'C3653529349-ASF',
             'C1274178507-ASF',
         ],
-    # "NISAR_DEM": [
-    #     "C3803703055-ASF",
-    #     "C1274665933-ASF",
-    #     "C1276777683-ASFDEV"
-    # ],
-    # "NISAR_VWC": [
-    #     "C4066106685-ASF",
-    #     "C1280516250-ASF",
-    #     "C1280516237-ASFDEV"
-    # ],
-    # "NISAR_LIA": [
-    #     "C1274178361-ASF",
-    #     "C1274178360-ASFDEV"
-    # ],
-    # "NISAR_WATERMASK": [
-    #     "C3807638962-ASF",
-    #     "C1274177987-ASF",
-    #     "C1276991363-ASFDEV"
-    # ],
-    # "NISAR_L0B_CRSD_BETA_V1": [
-    #     "C2850225137-ASF",
-    #     "C1273831262-ASF",
-    #     "C1261815276-ASFDEV",
-    # ],
-    # "NISAR_L0B_CRSD_PROVISIONAL_V1": [
-    #     "C2853091612-ASF",
-    #     "C1261832671-ASF",
-    #     "C1261832632-ASFDEV",
-    # ],
-    # "NISAR_L0B_CRSD_V1": [
-    #     "C3622254588-ASF",
-    #     "C1257349114-ASF",
-    #     "C1256358463-ASFDEV",
-    # ],
-    # "NISAR_L2_STATIC_LAYERS": [
-    #     "C3852453107-ASF",
-    #     "C1274178365-ASF",
-    #     "C1274178363-ASFDEV"
-    # ],
+        # "NISAR_DEM": [
+        #     "C3803703055-ASF",
+        #     "C1274665933-ASF",
+        #     "C1276777683-ASFDEV"
+        # ],
+        # "NISAR_VWC": [
+        #     "C4066106685-ASF",
+        #     "C1280516250-ASF",
+        #     "C1280516237-ASFDEV"
+        # ],
+        # "NISAR_LIA": [
+        #     "C1274178361-ASF",
+        #     "C1274178360-ASFDEV"
+        # ],
+        # "NISAR_WATERMASK": [
+        #     "C3807638962-ASF",
+        #     "C1274177987-ASF",
+        #     "C1276991363-ASFDEV"
+        # ],
+        # "NISAR_L0B_CRSD_BETA_V1": [
+        #     "C2850225137-ASF",
+        #     "C1273831262-ASF",
+        #     "C1261815276-ASFDEV",
+        # ],
+        # "NISAR_L0B_CRSD_PROVISIONAL_V1": [
+        #     "C2853091612-ASF",
+        #     "C1261832671-ASF",
+        #     "C1261832632-ASFDEV",
+        # ],
+        # "NISAR_L0B_CRSD_V1": [
+        #     "C3622254588-ASF",
+        #     "C1257349114-ASF",
+        #     "C1256358463-ASFDEV",
+        # ],
+        # "NISAR_L2_STATIC_LAYERS": [
+        #     "C3852453107-ASF",
+        #     "C1274178365-ASF",
+        #     "C1274178363-ASFDEV"
+        # ],
+        "NISAR_EA_L3": [
+            "C1280063122-ASFDEV",
+            "C1280063178-ASF",
+            "C4052499802-ASF",
+        ],
+        "NISAR_EA_L2": [
+            "C1280063121-ASFDEV",
+            "C1280063176-ASF",
+            "C4052499921-ASF",
+        ],
+        "NISAR_EA_L1": [
+            "C1280062841-ASFDEV",
+            "C1280063175-ASF",
+            "C4052500045-ASF",
+        ],
+        "NISAR_EA_L0B_CRSD": [
+            "C1280064293-ASFDEV",
+            "C1280064295-ASF",
+            "C4052499976-ASF",
+        ],
+        "NISAR_EA_L0B_RRSD": [
+            "C1280064294-ASFDEV",
+            "C1280064296-ASF",
+            "C4052499850-ASF",
+        ],
     },
     'SENTINEL-1': {
         'SENTINEL-1A_SLC': ['C1214470488-ASF', 'C1205428742-ASF', 'C1234413245-ASFDEV'],
@@ -526,7 +551,8 @@ dataset_collections = {
         'OPERA_L2_RTC-S1-STATIC_V1': ['C1259981910-ASF', 'C2795135174-ASF'],
         'OPERA_L2_RTC-S1_PROVISIONAL_V0': ['C1257995186-ASF'],
         'OPERA_L3_DISP-S1_V1': ['C3294057315-ASF', 'C1271830354-ASF'],
-        'OPERA_L3_DIST-ALERT': ['C1275699124-ASF', 'C4090131664-ASF'],
+        'OPERA_L3_DIST-ALERT': ['C1275699124-ASF'],
+        'OPERA_L3_DIST-ALERT-S1_V1': ['C1275699127-ASF', 'C4090131664-ASF'],
         'OPERA_L3_DISP-S1-STATIC_V1': ['C3959290248-ASF', 'C1273910948-ASF', 'C1273460752-ASFDEV'],
     },
     'TROPO': {
@@ -1490,7 +1516,7 @@ collections_by_processing_level = {
         'C1271830354-ASF'
     ],
     'DISP-S1-STATIC': ['C3959290248-ASF', 'C1273910948-ASF', 'C1273460752-ASFDEV'],
-    'DIST-ALERT-S1': ['C1275699124-ASF', 'C4090131664-ASF'],
+    'DIST-ALERT-S1': ['C1275699124-ASF', 'C4090131664-ASF', 'C1275699127-ASF'],
     'TROPO-ZENITH': ['C3717139408-ASF', 'C1273910987-ASF', 'C1273615785-ASFDEV'],
     'ECMWF_TROPO': [
         'C3653531162-ASF'


=====================================
asf_search/Stack.py
=====================================
@@ -0,0 +1,221 @@
+from collections import defaultdict, deque
+from copy import copy
+from typing import Optional, List, Tuple
+import warnings
+
+from .ASFProduct import ASFProduct
+from .Pair import Pair
+from .ASFSearchOptions import ASFSearchOptions
+from .ASFSearchResults import ASFSearchResults
+from .warnings import PairNotInFullStackWarning
+
+class Stack:
+    """
+    A Stack object contains 4+ lists of Pair objects. Each Pair contains a pair of asf_search.ASFProduct objects.
+    
+    Stack member variables holding lists of Pairs:
+    - Stack.full_stack: Every possible pair based on the provided geo_reference scene 
+      and ASFSearchOptions. This forms a complete network of all represented asf_search.ASFProducts.
+    - Stack.remove_list: The list of Pairs to remove from Stack.full_stack, used to create Stack.subset_stack
+    - Stack.subset_stack: The resulting list after removing Stack.Remove_list from Stack.full_stack. This creates 
+      a possibly disconnected network of asf_search.ASFProducts
+    - Stack.connected_substacks: This is a list of lists of Pairs. It contains each disconnected component of a Stack.subset_stack's
+      asf_search.ASFProduct network. A length of 1 indicates that Stack.subset_stack is a connected network of asf_search.ASFProducts.
+
+    Public Stack methods:
+    - Stack.remove_pairs(): Adds Pairs to Stack.remove_list and removes them from Stack.subset_stack.
+    - Stack.add_pairs(): Adds Pairs to Stack.subset_stack. This either removes them from Stack.remove_list, or if not yet present in the
+      Stack, adds them to self.full_stack.
+    - Stack.get_scene_ids(): A convenience method that returns a given list of Pairs as a list of tuples of asf_search.ASFProduct
+      product IDs, which is useful when ordering on-demand processing via ASF's HyP3 or HyP3+ services.
+    """
+    def __init__(
+        self,
+        geo_reference: ASFProduct,
+        opts: Optional[ASFSearchOptions] = None
+    ):
+        """
+        Constructor that builds a Stack from a geo-reference ASFProduct
+
+        geo_reference: An ASFProduct that serves as a geo-reference scene for the Stack
+        opts: (Optional) ASFSearchOptions to apply to the geo_reference.stack() search when creating Stack.full_stack
+        """
+        self.geo_reference = geo_reference
+        if opts is None:
+            opts = ASFSearchOptions()
+        self.opts = opts
+        self.full_stack = self._build_full_stack()
+        self._remove_list = []
+        self.subset_stack = self._get_subset_stack()
+        self.connected_substacks = self._find_connected_substacks()
+
+    @classmethod
+    def from_search_results(
+        cls,
+        stack_search_results: ASFSearchResults,
+    ):
+        """
+        Alternate class method constructor using ASFSearchResults instead of a single geo_reference.
+        """
+        obj = cls.__new__(cls)
+
+        obj.full_stack = obj._build_full_stack(stack_search_results)
+        obj._remove_list = []
+        obj.subset_stack = obj._get_subset_stack()
+        obj.connected_substacks = obj._find_connected_substacks()
+
+        return obj
+
+    @property
+    def remove_list(self) -> List[Pair]:
+        """
+        Returns a copy of self._remove_list so client changes 
+        do not alter self._remove_list without initiating a stack update
+
+        Disallow: 
+          - my_stack.remove_list.append(my_pair)
+          - my_stack.remove_list.remove(my_pair)
+
+        Support:
+          - my_stack.remove_pairs([pair_1, pair_2, ...])
+          - my_stack.add_pairs([pair_1, pair_2, ...])
+        """
+        return copy(self._remove_list)
+
+    @remove_list.setter
+    def remove_list(self, pairs: List[Pair]):
+        """
+        pairs: A list of Pairs to remove from self.subset_stack
+        """
+        # remove duplicates
+        self._remove_list = list(set(pairs))
+        self._update_stack()
+
+    def remove_pairs(self, pairs: List[Pair]):
+        """
+        Remove pairs from self.subset_stack, 
+        i.e., add them to self._remove_list
+
+        pairs: A list of Pairs to remove from self.subset_stack
+        """
+        for pair in pairs:
+            if pair not in self._remove_list:
+                if pair in self.full_stack:
+                    self._remove_list.append(pair)
+                else:
+                    msg = f"warning: {pair} is not in full_stack"
+                    warnings.warn(PairNotInFullStackWarning(msg))
+        self._update_stack()
+
+    def add_pairs(self, pairs: List[Pair]):
+        """
+        Add pairs to self.subset_stack and, if necessary, to self.full_stack 
+        i.e., remove them from self._remove_list if present or else add them to self.full_stack 
+
+        This allows for the addition of custom pairs that were not originally present
+        in self.full_stack
+
+        pairs: A list of Pairs to add to self.subset_stack
+        """
+        for pair in pairs:
+            if pair in self._remove_list:
+                self._remove_list.remove(pair)
+            else:
+                self.full_stack.append(pair)
+        self._update_stack()
+
+    def _build_full_stack(self, stack_search_results: Optional[ASFSearchResults]=None) -> List[Pair]:
+        """
+        Create self._full_stack, which involves performing a stack search
+        of the georeference scene and creating a list of every possible Pair.
+
+        stack_search_results: (Optional) ASFSearchResults from an ASFProduct.stack search
+        """
+        if stack_search_results is None: 
+            stack_search_results = self.geo_reference.stack(opts=self.opts)
+
+        return [
+            Pair(p1, p2)
+            for i, p1 in enumerate(stack_search_results)
+            for p2 in stack_search_results[i+1:]
+        ]
+
+    def _get_subset_stack(self) -> List[Pair]:
+        """
+        Create a subset_stack by removing every pair in
+        self.remove_list from self.full_stack
+        """
+        return [pair for pair in self.full_stack if pair not in self.remove_list]
+
+    def _update_stack(self):
+        """
+        Recalculate self.subset_stack and find its connected substacks.
+        These two things should always happen together.
+        """
+        self.subset_stack = self._get_subset_stack()
+        self.connected_substacks = self._find_connected_substacks()
+
+    def _find_connected_substacks(self) -> List[List[Pair]]:
+        """
+        Perform a bredth first search to find all connected components of self.subset_stack
+        """
+
+        graph = defaultdict(list)
+        for pair in self.subset_stack:
+            graph[pair.ref].append(pair.sec)
+            graph[pair.sec].append(pair.ref)
+
+        visited_nodes = set()
+        visited_pairs = set()
+        components = []
+
+        for node in graph:
+            if node not in visited_nodes:
+                component_nodes = set()
+                component_pairs = {}
+
+                queue = deque([node])
+                visited_nodes.add(node)
+
+                while queue:
+                    current = queue.popleft()
+                    component_nodes.add(current)
+
+                    for neighbor in graph[current]:
+                        if (current, neighbor) not in visited_pairs and (neighbor, current) not in visited_pairs:
+                            for pair in self.subset_stack:
+                                if (pair.ref == current and pair.sec == neighbor) or \
+                                    (pair.sec == current and pair.ref == neighbor):
+                                    component_pairs[Pair(pair.ref, pair.sec)] = pair
+                                    break
+                            visited_pairs.add((current, neighbor))
+                            visited_pairs.add((neighbor, current))
+
+                        if neighbor not in visited_nodes:
+                            visited_nodes.add(neighbor)
+                            queue.append(neighbor)
+                component_pairs = [v for v in component_pairs.values()]
+                components.append(component_pairs)
+
+        return components
+
+    def get_scene_ids(self, pair_list: Optional[List[Pair]] = None) -> List[Tuple[str, str]]:
+        """
+        Provides scene names for all asf_search.ASFProducts in a list of Pairs.
+        Useful when ordering pair-based products from ASF HyP3 On-Demand Processing.
+
+        If no stack_dict is passed, defaults to the largest connected substack
+
+        pair_list: (Optional) A list of `Pair`s for which to retrieve scene IDs.
+        
+        Returns:
+            A list tuples containing the reference and secondary scene names for each `Pair` in a `Pair` list
+        """
+        if not pair_list:
+            pair_list = max(self.connected_substacks, key=len)
+
+        return [
+            (pair.ref.properties["sceneName"], pair.sec.properties["sceneName"])
+            for pair in pair_list
+            ]
+    
\ No newline at end of file


=====================================
asf_search/__init__.py
=====================================
@@ -55,6 +55,7 @@ from .baseline import *  # noqa: F403 F401 E402
 from .WKT import validate_wkt  # noqa: F401 E402
 from .export import *  # noqa: F403 F401 E402
 from .Pair import Pair  # noqa:  F401, E402
+from .Stack import Stack  # noqa:  F401, E402
 from . import utils # noqa: F401, E402
 
 REPORT_ERRORS = True


=====================================
asf_search/warnings.py
=====================================
@@ -0,0 +1,10 @@
+class ASFWarning(Warning):
+    """
+    Base ASF Warning, not intended for direct use
+
+    Tip: Silence me to silence all child ASFWarnings
+    """
+
+
+class PairNotInFullStackWarning(ASFWarning):
+    """Warn when attempting to do something with a Pair that is not in Stack.full_stack"""
\ No newline at end of file


=====================================
examples/Stack.ipynb
=====================================
@@ -0,0 +1,339 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "24b6bd28",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# This is useful if you are working with a dev install of asf_search and experimenting with changes to the codebase\n",
+    "%load_ext autoreload\n",
+    "%autoreload 2"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "f5086cae",
+   "metadata": {},
+   "source": [
+    "### Search for a geographic reference product from which to create an `asf_search.Stack`\n",
+    "\n",
+    "This notebook uses a full Sentinel-1 scene as the reference product, but Sentinel-1 burst products are also supported."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e55f74a9",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import asf_search as asf\n",
+    "\n",
+    "results = asf.product_search('S1A_IW_SLC__1SDV_20220215T225119_20220215T225146_041930_04FE2E_9252-SLC')\n",
+    "reference = results[0]\n",
+    "reference"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "f006b7a5",
+   "metadata": {},
+   "source": [
+    "### Create an `asf_search.Stack` object from the reference scene `asf_search.Products.S1Product.S1Product` object\n",
+    "\n",
+    "Stack accepts ASFSearchOptions, which you can use to limit its temporal range.\n",
+    "\n",
+    "This creates a `asf_search.Stack` based on the results for a given `asf.Stack` search.\n",
+    "\n",
+    "`stack.full_stack` contains a complete graph, in which every scene is connected to every other scene."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "4d317dcd",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "args = asf.ASFSearchOptions(\n",
+    "    **{\"start\": '2022-01-01', \"end\": '2022-04-02'}\n",
+    ")\n",
+    "\n",
+    "stack = asf.Stack(reference, opts=args)\n",
+    "stack.full_stack"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "bc90ffb8",
+   "metadata": {},
+   "source": [
+    "### Alternatively, create an `asf_search.Stack` object from the results of an `ASFProduct.stack()` search.\n",
+    "\n",
+    "This allows you to alter the results of the stack search on your georeference scene, and then generate a `Stack` object from it."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e54b8d87",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "stack_search_results = reference.stack(opts=args)\n",
+    "altered_stack_search_results = stack_search_results[1:-1] # remove some products from the search results\n",
+    "stack_from_search_results = asf.Stack.from_search_results(altered_stack_search_results)\n",
+    "stack_from_search_results.full_stack"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "423c14bb",
+   "metadata": {},
+   "source": [
+    "### At this point, `stack.full_stack` == `stack.subset_stack` because we haven't yet removed any pairs\n",
+    "\n",
+    "Also note that `stack.remove_list` is empty, and `stack.subset_stack` is connected, which can be seen by the fact that `stack.connected_substacks` contains only a single item."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "5947b309",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print(f'len(stack.full_stack): {len(stack.full_stack)}')\n",
+    "print(f'len(stack.subset_stack): {len(stack.subset_stack)}')\n",
+    "print(f'len(stack.remove_list): {len(stack.remove_list)}')\n",
+    "print(f'len(stack.connected_substacks): {len(stack.connected_substacks)}')\n",
+    "print(f'stack.subset_stack == stack.full_stack: {stack.subset_stack == stack.full_stack}')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "4623a88a",
+   "metadata": {},
+   "source": [
+    "### Remove some Pairs from the stack\n",
+    "\n",
+    "The `Pair`s have been added to `stack.remove_list` and removed from `stack.subset_stack`"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "3c4f82a0",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "stack.remove_pairs(stack.full_stack[1:11])\n",
+    "\n",
+    "print(f'len(stack.full_stack): {len(stack.full_stack)}')\n",
+    "print(f'len(stack.subset_stack): {len(stack.subset_stack)}')\n",
+    "print(f'len(stack.remove_list): {len(stack.remove_list)}')\n",
+    "print(f'stack.subset_stack == stack.full_stack: {stack.subset_stack == stack.full_stack}')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "f182fa95",
+   "metadata": {},
+   "source": [
+    "### Try removing a `Pair` that is not part of `stack.full_stack`\n",
+    "\n",
+    "This will raise a `PairNotInFullStackWarning` indicating that the Pair cannot be removed since it is not part of the Stack"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "2750f377",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Build a Pair from scenes that are not included in the existing Stack\n",
+    "results_1 = asf.product_search('S1A_IW_SLC__1SDV_20250903T225120_20250903T225147_060830_0792D3_868A-SLC')\n",
+    "results_2 = asf.product_search('S1A_IW_SLC__1SDV_20250822T225120_20250822T225147_060655_078BEA_2065-SLC')\n",
+    "my_pair = asf.Pair(results_1[0], results_2[0])\n",
+    "\n",
+    "stack.remove_pairs([my_pair])"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "a303ff16",
+   "metadata": {},
+   "source": [
+    "### Examine `stack.connected_substacks`\n",
+    "\n",
+    "For illustrative purposes, the set of previously removed `Pair`s was intentionally selected to disconnect the `stack.subset_stack` into two connected `Stack` networks.\n",
+    "\n",
+    "We can view these connected substacks by examing `stack.connected_substacks`, which contains a list of all connected substacks within `stack.subset_stack`"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "3d481fc5",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "print(f'len(stack.connected_substacks): {len(stack.connected_substacks)}')\n",
+    "\n",
+    "for s in stack.connected_substacks:\n",
+    "    print(s)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "28fddf79",
+   "metadata": {},
+   "source": [
+    "### Add one of the removed `Pair`s back to `stack.subset_stack`\n",
+    "\n",
+    "Adding this `Pair` back once again fully-connects `stack.subset_stack`"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "b95d8fc7",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "stack.add_pairs([stack.remove_list[0]])\n",
+    "print(f'len(stack.connected_substacks): {len(stack.connected_substacks)}')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "b416e228",
+   "metadata": {},
+   "source": [
+    "### You can also add `Pairs` that are not part of the original `stack.full_stack`\n",
+    "\n",
+    "This is useful if you need to add custom `Pairs` that were not included during automated `Stack.full_stack` creation. We created such a `Pair` earlier in the notebook (`my_pair`).\n",
+    "\n",
+    "Note that adding `my_pair` once again splits `stack.subset_stack` into 2 connected substacks."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "1da9b8f4",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "stack.add_pairs([my_pair])\n",
+    "print(f'len(stack.connected_substacks): {len(stack.connected_substacks)}')"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "c240a25f",
+   "metadata": {},
+   "source": [
+    "### View `stack.subset_stack`'s scene ID's \n",
+    "\n",
+    "When ordering data from ASF [HyP3](https://hyp3-docs.asf.alaska.edu/hyp3-docs/about/) or [HyP3+](https://hyp3-docs.asf.alaska.edu/hyp3-docs/about/hyp3_plus/) On-Demand Processing, it is helpful to provide a `Stack` as a list of scene IDs.\n",
+    "\n",
+    "The `Stack.get_scene_ids()` method offers this functionality."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e04d7bf8",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "possibly_disconnected_subset_stack_ids = stack.get_scene_ids(stack.subset_stack)\n",
+    "possibly_disconnected_subset_stack_ids"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "87798431",
+   "metadata": {},
+   "source": [
+    "### View the largest connected substack in `stack.connected_substacks`\n",
+    "\n",
+    "`stack.get_scene_ids()` takes the optional argument `stack_list`. \n",
+    "\n",
+    "If you do not provide a stack_list, the method defaults to using the largest connected substack in `stack.connected_substacks`."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "14711e1f",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "largest_fully_connected_subset_stack_ids = stack.get_scene_ids()\n",
+    "largest_fully_connected_subset_stack_ids\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "c2d4c0c7",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "len(largest_fully_connected_subset_stack_ids)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "id": "183233d2",
+   "metadata": {},
+   "source": [
+    "### You can view any stack_list contained in a `Stack` object, including the complete `stack.full_stack`, `stack.remove_list`, the possibly disconnected `stack.subset_stack`, or any of the stack_lists in `stack.connected_substacks`"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "7fbc5b28",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "remove_list_scene_ids = stack.get_scene_ids(stack.remove_list)\n",
+    "remove_list_scene_ids"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "60ef46d0",
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "asf_search_312",
+   "language": "python",
+   "name": "python3"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.12.0"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}


=====================================
tests/Stack/test_Stack.py
=====================================
@@ -0,0 +1,62 @@
+from datetime import datetime, timedelta, timezone
+from asf_search.ASFSearchOptions import ASFSearchOptions
+from asf_search.search import product_search
+from asf_search import Pair, Stack
+from asf_search.warnings import PairNotInFullStackWarning
+import numpy as np
+import pytest
+
+def test_make_s1_stack():
+
+    args = ASFSearchOptions(
+        **{"start": '2022-01-01', "end": '2022-04-02'}
+    )
+
+    # Create a Stack and confrm expected size of its full_stack and subset_stack
+    results = product_search('S1A_IW_SLC__1SDV_20220215T225119_20220215T225146_041930_04FE2E_9252-SLC')
+    reference = results[0]
+    stack = Stack(reference, opts=args)
+    assert len(stack.full_stack) == 21
+    assert stack.subset_stack == stack.full_stack
+
+    # Create a Stack from ASFProduct.stack search results with the Stack.from_search_results alternate class method constructor
+    stack_search_results = reference.stack(opts=args)
+    stack_from_search_results = Stack.from_search_results(stack_search_results)
+    assert len(stack_from_search_results.full_stack) == 21
+    assert stack_from_search_results.subset_stack == stack_from_search_results.full_stack
+
+    # Remove Pairs from the Stack, confirm expected Pair list lengths
+    stack.remove_pairs(stack.full_stack[1:11])
+    assert len(stack.subset_stack) == 11
+    assert len(stack.remove_list) == 10
+    assert len(stack.connected_substacks) == 2
+
+    # Add one Pair back and confirm expexted Pair list lengths
+    stack.add_pairs([stack.remove_list[0]])
+    assert len(stack.subset_stack) == 12
+    assert len(stack.remove_list) == 9
+    assert len(stack.connected_substacks) == 1
+
+    # Create a Pair not present in the stack and confirm that it cannot be removed
+    results_1 = product_search('S1A_IW_SLC__1SDV_20250903T225120_20250903T225147_060830_0792D3_868A-SLC')
+    results_2 = product_search('S1A_IW_SLC__1SDV_20250822T225120_20250822T225147_060655_078BEA_2065-SLC')
+    my_pair = Pair(results_1[0], results_2[0])
+    with pytest.warns(PairNotInFullStackWarning):
+        stack.remove_pairs([my_pair])
+
+    # Add the new Pair to the stack and confirm expected Pair list lengths
+    stack.add_pairs([my_pair])
+    assert len(stack.full_stack) == 22
+    assert len(stack.subset_stack) == 13
+    assert len(stack.connected_substacks) == 2
+
+    # Test Stack.get_scene_ids()
+    largest_fully_connected_subset_stack_ids = stack.get_scene_ids()
+    remove_list_scene_ids = stack.get_scene_ids(stack.remove_list)
+    full_stack_scene_ids = stack.get_scene_ids(stack.full_stack)
+    subset_stack_scene_ids = stack.get_scene_ids(stack.subset_stack)
+    assert len(largest_fully_connected_subset_stack_ids) == 12
+    assert len(remove_list_scene_ids) == 9
+    assert len(full_stack_scene_ids) == 22
+    assert len(subset_stack_scene_ids) == 13
+


=====================================
tests/yml_tests/test_search.yml
=====================================
@@ -235,6 +235,11 @@ nisar_collections: &nisar_collections [
     # 'C1261832632-ASFDEV',
     # 'C1256358463-ASFDEV',
     # 'C1274178363-ASFDEV',
+    "C1280063122-ASFDEV",
+    "C1280063121-ASFDEV",
+    "C1280062841-ASFDEV",
+    "C1280064293-ASFDEV",
+    "C1280064294-ASFDEV",
     # UAT
     "C1261815288-ASF",
     "C1261832657-ASF",
@@ -295,6 +300,11 @@ nisar_collections: &nisar_collections [
     # 'C1261832671-ASF',
     # 'C1257349114-ASF',
     # 'C1274178365-ASF',
+    "C1280063178-ASF",
+    "C1280063176-ASF",
+    "C1280063175-ASF",
+    "C1280064295-ASF",
+    "C1280064296-ASF",
     # PROD
     "C2850220296-ASF",
     "C2853068083-ASF",
@@ -354,6 +364,11 @@ nisar_collections: &nisar_collections [
     # 'C2853091612-ASF',
     # 'C3622254588-ASF',
     # 'C3852453107-ASF',
+    "C4052499802-ASF",
+    "C4052499921-ASF",
+    "C4052500045-ASF",
+    "C4052499976-ASF",
+    "C4052499850-ASF",
   ]
 
 tests:



View it on GitLab: https://salsa.debian.org/debian-gis-team/asf-search/-/commit/c24fb9017067fe87cb07f5a4c87b433c412fa4a7

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/asf-search/-/commit/c24fb9017067fe87cb07f5a4c87b433c412fa4a7
You're receiving this email because of your account on salsa.debian.org. Manage all notifications: https://salsa.debian.org/-/profile/notifications | Help: https://salsa.debian.org/help


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20260429/4e1dc398/attachment-0001.htm>


More information about the Pkg-grass-devel mailing list