[Git][debian-gis-team/mintpy][upstream] New upstream version 1.4.1

Antonio Valentino (@antonio.valentino) gitlab at salsa.debian.org
Tue Aug 16 08:40:31 BST 2022



Antonio Valentino pushed to branch upstream at Debian GIS Project / mintpy


Commits:
2aede911 by Antonio Valentino at 2022-08-16T07:26:02+00:00
New upstream version 1.4.1
- - - - -


20 changed files:

- .github/workflows/build-docker.yml
- + .github/workflows/publish-to-test-pypi.yml
- docs/environment.yml
- mintpy/__init__.py
- mintpy/__main__.py
- mintpy/asc_desc2horz_vert.py
- mintpy/geocode.py
- mintpy/load_data.py
- mintpy/objects/insar_vs_gps.py
- mintpy/reference_point.py
- mintpy/smallbaselineApp.py
- mintpy/timeseries2velocity.py
- mintpy/timeseries_rms.py
- mintpy/unwrap_error_phase_closure.py
- mintpy/utils/plot.py
- mintpy/utils/readfile.py
- mintpy/version.py
- mintpy/view.py
- requirements.txt
- setup.py


Changes:

=====================================
.github/workflows/build-docker.yml
=====================================
@@ -12,6 +12,7 @@ on:
 
 jobs:
   dockerize:
+    name: Build Docker image and push to GitHub Container Registry
     runs-on: ubuntu-latest
     steps:
       - uses: actions/checkout at v2
@@ -31,7 +32,7 @@ jobs:
       - name: Set environment variables for docker build
         run: |
           # Lowercase repo for Github Container Registry
-          echo "REPO=${GITHUB_REPOSITORY,,}" >>${GITHUB_ENV}
+          echo "REPO=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV}
           # Ensure tags are checked out
           git fetch origin +refs/tags/*:refs/tags/*
           # Version number from tag


=====================================
.github/workflows/publish-to-test-pypi.yml
=====================================
@@ -0,0 +1,58 @@
+# link: https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/
+name: publish distributions 📦 to PyPI and TestPyPI
+
+on:
+  push:
+    branches:
+      - main
+    tags:
+      - v*
+
+jobs:
+  build-n-publish:
+    if: github.repository_owner == 'insarlab'
+
+    name: Build and publish Python 🐍 distributions 📦 to PyPI and TestPyPI
+    runs-on: ubuntu-latest
+    steps:
+    - uses: actions/checkout at v3
+      with:
+        fetch-depth: 0
+
+    - name: Set up Python 3.10
+      uses: actions/setup-python at v3
+      with:
+        python-version: "3.10"
+
+    - name: Install pypa/build
+      run: >-
+        python -m
+        pip install
+        build
+        --user
+
+    - name: Build a binary wheel and a source tarball
+      run: >-
+        python -m
+        build
+        --sdist
+        --wheel
+        --outdir dist/
+        .
+
+    - name: Publish developed version 📦 to Test PyPI
+      if: github.ref == 'refs/heads/main'
+      uses: pypa/gh-action-pypi-publish at release/v1
+      with:
+        password: ${{ secrets.TEST_PYPI_API_TOKEN }}
+        repository_url: https://test.pypi.org/legacy/
+        skip_existing: false
+        verbose: true
+
+    - name: Publish released version 📦 to PyPI
+      if: startsWith(github.ref, 'refs/tags/v')
+      uses: pypa/gh-action-pypi-publish at release/v1
+      with:
+        password: ${{ secrets.PYPI_API_TOKEN }}
+        verbose: true
+


=====================================
docs/environment.yml
=====================================
@@ -23,11 +23,9 @@ dependencies:
   - pyaps3>=0.3
   - pykml>=0.2
   - pyproj
+  - pyresample
   - pysolid
   - scikit-image
   - scipy
-  # for ARIA, FRInGE, HyP3, GMTSAR
+  # for ISCE, ARIA, FRInGE, HyP3, GMTSAR
   - gdal>=3
-  # for pyresample
-  - pyresample
-  - openmp


=====================================
mintpy/__init__.py
=====================================
@@ -1,5 +1,5 @@
 # get version info
 from mintpy.version import (
-    version_num as __version__,
+    version as __version__,
     logo as __logo__,
 )


=====================================
mintpy/__main__.py
=====================================
@@ -513,7 +513,7 @@ def get_view_parser(subparsers=None):
 def get_parser():
     """Instantiate the command line argument parser."""
     parser = argparse.ArgumentParser(prog=PROG, description=__doc__)
-    parser.add_argument("--version", action="version", version=f"%(prog)s {__version__}")
+    parser.add_argument("-v","--version", action="version", version=f"{__version__}")
 
     # Sub-command management
     sp = parser.add_subparsers(title="sub-commands", dest='func', required=True, metavar='')


=====================================
mintpy/asc_desc2horz_vert.py
=====================================
@@ -116,7 +116,7 @@ def cmd_line_parse(iargs=None):
     if any(ref_diff > inps.max_ref_yx_diff for ref_diff in [ref_y_diff, ref_x_diff]):
         msg = 'REF_LAT/LON difference between input files > {} pixels!\n'.format(inps.max_ref_yx_diff)
         for fname, ref_lat, ref_lon in zip(inps.file, [ref_lat1, ref_lat2], [ref_lon1, ref_lon2]):
-            msg += 'file1: {}\n'.format(fname)
+            msg += 'file: {}\n'.format(fname)
             msg += '\tREF_LAT/LON: [{:.8f}, {:.8f}]\n'.format(ref_lat, ref_lon)
         raise ValueError(msg)
 


=====================================
mintpy/geocode.py
=====================================
@@ -219,8 +219,11 @@ def read_template2inps(template_file, inps):
 def check_num_processor(nprocs):
     """Check number of processors
     Note by Yunjun, 2019-05-02:
-    1. conda install pyresample will install pykdtree and openmp, but it seems not working
+    1. conda install pyresample will install pykdtree and openmp, but it seems not working:
         geocode.py is getting slower with more processors
+            Test on a TS HDF5 file in size of (241, 2267, 2390)
+            Memory: up to 10GB
+            Run time: 2.5 mins for nproc=1, 3 mins for nproc=4
     2. macports seems to have minor speedup when more processors
     Thus, default number of processors is set to 1; although the capability of using multiple
     processors is written here.


=====================================
mintpy/load_data.py
=====================================
@@ -766,7 +766,7 @@ def prepare_metadata(iDict):
         geom_names = ['dem', 'lookupY', 'lookupX', 'incAngle', 'azAngle', 'shadowMask', 'waterMask']
         geom_keys = ['mintpy.load.{}File'.format(i) for i in geom_names]
         geom_files = [os.path.basename(iDict[key]) for key in geom_keys
-                      if (iDict.get(key, 'auto') != 'auto')]
+                      if iDict.get(key, 'auto') not in ['auto', 'None', 'no',  None, False]]
 
         # compose list of input arguments
         iargs = ['-m', meta_file, '-g', geom_dir]


=====================================
mintpy/objects/insar_vs_gps.py
=====================================
@@ -13,144 +13,14 @@ from scipy import stats
 from scipy.interpolate import griddata
 from datetime import datetime as dt
 from dateutil.relativedelta import relativedelta
-from matplotlib import pyplot as plt
 
 from mintpy.objects import timeseries, giantTimeseries
-from mintpy.utils import ptime, readfile, plot as pp, utils as ut
+from mintpy.utils import readfile, plot as pp, utils as ut
 from mintpy.objects.gps import GPS
 from mintpy.defaults.plot import *
 
 
 
-############################## utilities functions ##########################################
-
-def plot_insar_vs_gps_scatter(vel_file, csv_file='gps_enu2los.csv', msk_file=None, ref_gps_site=None,
-                              xname='InSAR', vlim=None, ex_gps_sites=[], display=True):
-    """Scatter plot to compare the velocities between SAR/InSAR and GPS.
-
-    Parameters: vel_file     - str, path of InSAR LOS velocity HDF5 file..
-                ref_gps_site - str, reference GNSS site name
-                csv_file     - str, path of GNSS CSV file, generated after running view.py --gps-comp
-                msk_file     - str, path of InSAR mask file.
-                xname        - str, xaxis label
-                vlim         - list of 2 float, display value range in the unit of cm/yr
-                               Default is None to grab from data
-                               If set, the range will be used to prune the SAR and GPS observations
-                ex_gps_sites - list of str, exclude GNSS sites for analysis and plotting.
-    Example:
-        from mintpy.objects.insar_vs_gps import plot_insar_vs_gps_scatter
-        csv_file = os.path.join(work_dir, 'geo/gps_enu2los.csv')
-        vel_file = os.path.join(work_dir, 'geo/geo_velocity.h5')
-        msk_file = os.path.join(work_dir, 'geo/geo_maskTempCoh.h5')
-        plot_insar_vs_gps_scatter(vel_file, ref_gps_site='CACT', csv_file=csv_file, msk_file=msk_file, vlim=[-2.5, 2])
-    """
-
-    disp_unit = 'cm/yr'
-    unit_fac = 100.
-
-    # read GPS velocity from CSV file (generated by gps.get_gps_los_obs())
-    col_names = ['Site', 'Lon', 'Lat', 'Displacement', 'Velocity']
-    num_col = len(col_names)
-    col_types = ['U10'] + ['f8'] * (num_col - 1)
-
-    print('read GPS velocity from file: {}'.format(csv_file))
-    fc = np.genfromtxt(csv_file, dtype=col_types, delimiter=',', names=True)
-    sites = fc['Site']
-    lats = fc['Lat']
-    lons = fc['Lon']
-    gps_obs = fc[col_names[-1]] * unit_fac
-
-    if ex_gps_sites:
-        ex_flag = np.array([x in ex_gps_sites for x in sites], dtype=np.bool_)
-        if np.sum(ex_flag) > 0:
-            sites = sites[~ex_flag]
-            lats = lats[~ex_flag]
-            lons = lons[~ex_flag]
-            gps_obs = gps_obs[~ex_flag]
-
-    # read InSAR velocity
-    print('read InSAR velocity from file: {}'.format(vel_file))
-    atr = readfile.read_attribute(vel_file)
-    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
-    coord = ut.coordinate(atr)
-    ys, xs = coord.geo2radar(lats, lons)[:2]
-
-    msk = readfile.read(msk_file)[0] if msk_file else np.ones((length, width), dtype=np.bool_)
-
-    num_site = sites.size
-    insar_obs = np.zeros(num_site, dtype=np.float32) * np.nan
-    prog_bar = ptime.progressBar(maxValue=num_site)
-    for i in range(num_site):
-        x, y = xs[i], ys[i]
-        if (0 <= x < width) and (0 <= y < length) and msk[y, x]:
-            box = (x, y, x+1, y+1)
-            insar_obs[i] = readfile.read(vel_file, datasetName='velocity', box=box)[0] * unit_fac
-        prog_bar.update(i+1, suffix='{}/{} {}'.format(i+1, num_site, sites[i]))
-    prog_bar.close()
-
-    off_med = np.nanmedian(insar_obs - gps_obs)
-    print(f'median offset between InSAR and GPS [before common referencing]: {off_med:.2f} cm/year')
-
-    # reference site
-    if ref_gps_site:
-        print(f'referencing both InSAR and GPS data to site: {ref_gps_site}')
-        ref_ind = sites.tolist().index(ref_gps_site)
-        gps_obs -= gps_obs[ref_ind]
-        insar_obs -= insar_obs[ref_ind]
-
-    # remove NaN value
-    print('removing sites with NaN values in GPS or {}'.format(xname))
-    flag = np.multiply(~np.isnan(insar_obs), ~np.isnan(gps_obs))
-    if vlim is not None:
-        print('pruning sites with value range: {} {}'.format(vlim, disp_unit))
-        flag *= gps_obs >= vlim[0]
-        flag *= gps_obs <= vlim[1]
-        flag *= insar_obs >= vlim[0]
-        flag *= insar_obs <= vlim[1]
-
-    gps_obs = gps_obs[flag]
-    insar_obs = insar_obs[flag]
-    sites = sites[flag]
-
-    # stats
-    print('GPS   min/max: {:.2f} / {:.2f}'.format(np.nanmin(gps_obs), np.nanmax(gps_obs)))
-    print('InSAR min/max: {:.2f} / {:.2f}'.format(np.nanmin(insar_obs), np.nanmax(insar_obs)))
-
-    rmse = np.sqrt(np.sum((insar_obs - gps_obs)**2) / (gps_obs.size - 1))
-    r2 = stats.linregress(insar_obs, gps_obs)[2]
-    print('RMSE = {:.1f} cm'.format(rmse))
-    print('R^2 = {:.2f}'.format(r2))
-
-    # plot
-    if display:
-        plt.rcParams.update({'font.size': 12})
-        if vlim is None:
-            vlim = [np.min(insar_obs), np.max(insar_obs)]
-            buffer = (vlim[1] - vlim[0]) * 0.1
-            vlim = [vlim[0] - buffer, vlim[1] + buffer]
-
-        fig, ax = plt.subplots(figsize=[4, 4])
-        ax.plot((vlim[0], vlim[1]), (vlim[0], vlim[1]), 'k--')
-        ax.plot(insar_obs, gps_obs, '.', ms=15)
-
-        # axis format
-        ax.set_xlim(vlim)
-        ax.set_ylim(vlim)
-        ax.set_xlabel(f'{xname} [{disp_unit}]')
-        ax.set_ylabel(f'GNSS [{disp_unit}]')
-        ax.set_aspect('equal', 'box')
-        fig.tight_layout()
-
-        # output
-        out_fig = '{}_vs_gps_scatter.pdf'.format(xname.lower())
-        plt.savefig(out_fig, bbox_inches='tight', transparent=True, dpi=300)
-        print('save figure to file', out_fig)
-        plt.show()
-
-    return sites, insar_obs, gps_obs
-
-
-
 ############################## beginning of insar_vs_gps class ##############################
 class insar_vs_gps:
     """ Comparing InSAR time-series with GPS time-series in LOS direction


=====================================
mintpy/reference_point.py
=====================================
@@ -464,7 +464,9 @@ def read_reference_input(inps):
         # Do not use ref_y/x in masked out area
         if inps.maskFile and os.path.isfile(inps.maskFile):
             print('mask: '+inps.maskFile)
-            mask = readfile.read(inps.maskFile, datasetName='mask')[0]
+            ds_names = readfile.get_dataset_list(inps.maskFile)
+            ds_name = [x for x in ds_names if x in ['mask', 'waterMask']][0]
+            mask = readfile.read(inps.maskFile, datasetName=ds_name)[0]
             if mask[inps.ref_y, inps.ref_x] == 0:
                 inps.ref_y, inps.ref_x = None, None
                 msg = 'input reference point is in masked OUT area defined by {}!'.format(inps.maskFile)


=====================================
mintpy/smallbaselineApp.py
=====================================
@@ -49,7 +49,7 @@ EXAMPLE = """example:
   smallbaselineApp.py -H                      #print    default template options
   smallbaselineApp.py -g                      #generate default template if it does not exist
   smallbaselineApp.py -g <custom_template>    #generate/update default template based on custom template
-  smallbaselineApp.py --plot                  #plot results without run
+  smallbaselineApp.py --plot                  #plot results w/o run [to populate the 'pic' folder after failed runs]
 
   # Run with --start/stop/dostep options
   smallbaselineApp.py GalapagosSenDT128.template --dostep velocity  #run at step 'velocity' only
@@ -617,6 +617,12 @@ class TimeSeriesAnalysis:
             msg += "Try the following:\n"
             msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n"
             msg += "2) Check the network and make sure it's fully connected without subsets"
+            print(f'ERROR: {msg}')
+
+            # populate the pic folder to facilate the trouble shooting
+            self.plot_result(print_aux=False)
+
+            # terminate the program
             raise RuntimeError(msg)
         return
 


=====================================
mintpy/timeseries2velocity.py
=====================================
@@ -487,7 +487,7 @@ def run_timeseries2time_func(inps):
             # Bootstrapping is a resampling method which can be used to estimate properties
             # of an estimator. The method relies on independently sampling the data set with
             # replacement.
-            print('estimating time function STD with bootstrap resampling ({} times) ...'.format(
+            print('estimating time functions STD with bootstrap resampling ({} times) ...'.format(
                 inps.bootstrapCount))
 
             # calc model of all bootstrap sampling
@@ -559,7 +559,7 @@ def run_timeseries2time_func(inps):
                 # TO DO: save the full covariance matrix of the time function parameters
                 # only the STD is saved right now
                 covar_flag = True if len(ts_cov.shape) == 3 else False
-                msg = 'estimating time function STD from time-serries '
+                msg = 'estimating time functions STD from time-serries '
                 msg += 'covariance pixel-by-pixel ...' if covar_flag else 'variance pixel-by-pixel ...'
                 print(msg)
 
@@ -583,7 +583,7 @@ def run_timeseries2time_func(inps):
 
             elif inps.uncertaintyQuantification == 'residue':
                 # option 2.3 - assume obs errors following normal dist. in time
-                print('estimating time function STD from time-series fitting residual ...')
+                print('estimating time functions STD from time-series fitting residual ...')
                 G_inv = linalg.inv(np.dot(G.T, G))
                 m_var = e2.reshape(1, -1) / (num_date - num_param)
                 m_std[:, mask] = np.sqrt(np.dot(np.diag(G_inv).reshape(-1, 1), m_var))


=====================================
mintpy/timeseries_rms.py
=====================================
@@ -9,10 +9,9 @@
 import os
 import sys
 import numpy as np
-import matplotlib.pyplot as plt
-from mpl_toolkits.axes_grid1 import make_axes_locatable
+
 from mintpy.defaults.template import get_template_content
-from mintpy.utils import readfile, ptime, utils as ut, plot as pp
+from mintpy.utils import readfile, utils as ut, plot as pp
 from mintpy.utils.arg_utils import create_argument_parser
 
 
@@ -103,15 +102,14 @@ def analyze_rms(date_list, rms_list, inps):
         print('save date to file: '+ref_date_file)
 
     # exclude date(s) - outliers
-    try:
-        rms_threshold = ut.median_abs_deviation_threshold(rms_list, center=0., cutoff=inps.cutoff)
-    except:
-        # equivalent calculation using numpy assuming Gaussian distribution
-        rms_threshold = np.median(rms_list) / .6745 * inps.cutoff
+    # equivalent calculation using numpy assuming Gaussian distribution as:
+    # rms_threshold = np.median(rms_list) / .6745 * inps.cutoff
+    rms_threshold = ut.median_abs_deviation_threshold(rms_list, center=0., cutoff=inps.cutoff)
 
     ex_idx = [rms_list.index(i) for i in rms_list if i > rms_threshold]
-    print(('-'*50+'\ndate(s) with RMS > {} * median RMS'
-           ' ({:.4f})'.format(inps.cutoff, rms_threshold)))
+    print('-'*50)
+    print(f'date(s) with RMS > {inps.cutoff} * median RMS ({rms_threshold:.4f})')
+
     ex_date_file = 'exclude_date.txt'
     if ex_idx:
         # print
@@ -127,110 +125,37 @@ def analyze_rms(date_list, rms_list, inps):
         if os.path.isfile(ex_date_file):
             os.remove(ex_date_file)
 
-    # plot bar figure and save
-    fig_file = os.path.splitext(inps.rms_file)[0]+'.pdf'
-    fig, ax = plt.subplots(figsize=inps.fig_size)
-    print('create figure in size:', inps.fig_size)
-    ax = plot_rms_bar(ax, date_list, np.array(rms_list)*1000., cutoff=inps.cutoff)
-    fig.savefig(fig_file, bbox_inches='tight', transparent=True)
-    print('save figure to file: '+fig_file)
     return inps
 
 
-def plot_rms_bar(ax, date_list, rms, cutoff=3., font_size=12,
-                 tick_year_num=1, legend_loc='best',
-                 disp_legend=True, disp_side_plot=True, disp_thres_text=False,
-                 ylabel='Residual phase RMS [mm]'):
-    """ Bar plot Phase Residual RMS
-    Parameters: ax : Axes object
-                date_list : list of string in YYYYMMDD format
-                rms    : 1D np.array of float for RMS value in mm
-                cutoff : cutoff value of MAD outlier detection
-                tick_year_num : int, number of years per major tick
-                legend_loc : 'upper right' or (0.5, 0.5)
-    Returns:    ax : Axes object
-    """
-    dates, datevector = ptime.date_list2vector(date_list)
-    dates = np.array(dates)
-    try:
-        bar_width = min(ut.most_common(np.diff(dates).tolist(), k=2))*3/4
-    except:
-        bar_width = np.min(np.diff(dates).tolist())*3/4
-    rms = np.array(rms)
-
-    # Plot all dates
-    ax.bar(dates, rms, bar_width.days, color=pp.mplColors[0])
-
-    # Plot reference date
-    ref_idx = np.argmin(rms)
-    ax.bar(dates[ref_idx], rms[ref_idx], bar_width.days, color=pp.mplColors[1], label='Reference date')
-
-    # Plot exclude dates
-    rms_threshold = ut.median_abs_deviation_threshold(rms, center=0., cutoff=cutoff)
-    ex_idx = rms > rms_threshold
-    if not np.all(ex_idx==False):
-        ax.bar(dates[ex_idx], rms[ex_idx], bar_width.days, color='darkgray', label='Exclude date')
-
-    # Plot rms_threshold line
-    (ax, xmin, xmax) = pp.auto_adjust_xaxis_date(ax, datevector, font_size, every_year=tick_year_num)
-    ax.plot(np.array([xmin, xmax]), np.array([rms_threshold, rms_threshold]), '--k',
-            label='Median Abs Dev * {}'.format(cutoff))
-
-    # axis format
-    ax = pp.auto_adjust_yaxis(ax, np.append(rms, rms_threshold), font_size, ymin=0.0)
-    #ax.set_xlabel('Time [years]', fontsize=font_size)
-    ax.set_ylabel(ylabel, fontsize=font_size)
-    ax.tick_params(which='both', direction='in', labelsize=font_size,
-                   bottom=True, top=True, left=True, right=True)
-
-    # 2nd axes for circles
-    if disp_side_plot:
-        divider = make_axes_locatable(ax)
-        ax2 = divider.append_axes("right", "10%", pad="2%")
-        ax2.plot(np.ones(rms.shape, np.float32) * 0.5, rms, 'o', mfc='none', color=pp.mplColors[0])
-        ax2.plot(np.ones(rms.shape, np.float32)[ref_idx] * 0.5, rms[ref_idx], 'o', mfc='none', color=pp.mplColors[1])
-        if not np.all(ex_idx==False):
-            ax2.plot(np.ones(rms.shape, np.float32)[ex_idx] * 0.5, rms[ex_idx], 'o', mfc='none', color='darkgray')
-        ax2.plot(np.array([0, 1]), np.array([rms_threshold, rms_threshold]), '--k')
-
-        ax2.set_ylim(ax.get_ylim())
-        ax2.set_xlim([0, 1])
-        ax2.tick_params(which='both', direction='in', labelsize=font_size,
-                        bottom=True, top=True, left=True, right=True)
-        ax2.get_xaxis().set_ticks([])
-        ax2.get_yaxis().set_ticklabels([])
-
-    if disp_legend:
-        ax.legend(loc=legend_loc, frameon=False, fontsize=font_size)
-
-    # rms_threshold text
-    if disp_thres_text:
-        ymin, ymax = ax.get_ylim()
-        yoff = (ymax - ymin) * 0.1
-        if (rms_threshold - ymin) > 0.5 * (ymax - ymin):
-            yoff *= -1.
-        ax.annotate('Median Abs Dev * {}'.format(cutoff),
-                    xy=(xmin + (xmax-xmin)*0.05, rms_threshold + yoff ),
-                    color='k', xycoords='data', fontsize=font_size)
-    return ax
-
-
 ######################################################################################################
 def main(iargs=None):
-    plt.switch_backend('Agg')  # Backend setting
 
+    # read inputs
     inps = cmd_line_parse(iargs)
     if inps.template_file:
         inps = read_template2inps(inps.template_file, inps)
 
     # calculate timeseries of residual Root Mean Square
-    (inps.rms_list,
-     inps.date_list,
-     inps.rms_file) = ut.get_residual_rms(inps.timeseries_file,
-                                          mask_file=inps.maskFile,
-                                          ramp_type=inps.deramp)
+    inps.rms_list, inps.date_list, inps.rms_file = ut.get_residual_rms(
+        inps.timeseries_file,
+        mask_file=inps.maskFile,
+        ramp_type=inps.deramp,
+    )
 
+    # analyze RMS: generate reference/exclude_date.txt files
     analyze_rms(inps.date_list, inps.rms_list, inps)
+
+    # plot RMS
+    pp.plot_timeseries_rms(
+        rms_file=inps.rms_file,
+        cutoff=inps.cutoff,
+        out_fig=os.path.splitext(inps.rms_file)[0]+'.pdf',
+        disp_fig=False,
+        fig_size=inps.fig_size,
+        tick_year_num=inps.tick_year_num,
+    )
+
     return
 
 


=====================================
mintpy/unwrap_error_phase_closure.py
=====================================
@@ -11,7 +11,7 @@ import sys
 import time
 import h5py
 import numpy as np
-from matplotlib import pyplot as plt, ticker
+from matplotlib import pyplot as plt
 
 try:
     from cvxopt import matrix
@@ -306,69 +306,11 @@ def calc_num_triplet_with_nonzero_integer_ambiguity(ifgram_file, mask_file=None,
     writefile.write(num_nonzero_closure, out_file, meta)
 
     # plot
-    plot_num_triplet_with_nonzero_integer_ambiguity(out_file)
+    pp.plot_num_triplet_with_nonzero_integer_ambiguity(out_file)
 
     return out_file
 
 
-def plot_num_triplet_with_nonzero_integer_ambiguity(fname, display=False, font_size=12, fig_size=[9,3]):
-    """Plot the histogram for the number of triplets with non-zero integer ambiguity
-
-    Fig. 3d-e in Yunjun et al. (2019, CAGEO).
-    """
-
-    # read data
-    data, atr = readfile.read(fname)
-    vmax = int(np.nanmax(data))
-
-    # plot
-    fig, axs = plt.subplots(nrows=1, ncols=2, figsize=fig_size)
-
-    # subplot 1 - map
-    ax = axs[0]
-    im = ax.imshow(data, cmap='RdBu_r', interpolation='nearest')
-
-    # reference point
-    if all(key in atr.keys() for key in ['REF_Y','REF_X']):
-        ax.plot(int(atr['REF_X']), int(atr['REF_Y']), 's', color='white', ms=3)
-
-    # format
-    pp.auto_flip_direction(atr, ax=ax, print_msg=False)
-    fig.colorbar(im, ax=ax)
-    ax.set_title(r'$T_{int}$', fontsize=font_size)
-
-    # subplot 2 - histogram
-    ax = axs[1]
-    ax.hist(data[~np.isnan(data)].flatten(), range=(0, vmax), log=True, bins=vmax)
-
-    # axis format
-    ax.set_xlabel(r'# of triplets w non-zero int ambiguity $T_{int}$', fontsize=font_size)
-    ax.set_ylabel('# of pixels', fontsize=font_size)
-    ax.xaxis.set_minor_locator(ticker.AutoMinorLocator())
-    ax.yaxis.set_major_locator(ticker.LogLocator(base=10.0, numticks=15))
-    ax.yaxis.set_minor_locator(ticker.LogLocator(base=10.0, numticks=15,
-                                                 subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9)))
-    ax.yaxis.set_minor_formatter(ticker.NullFormatter())
-
-    for ax in axs:
-        ax.tick_params(which='both', direction='in', labelsize=font_size,
-                       bottom=True, top=True, left=True, right=True)
-
-    fig.tight_layout()
-
-    # output
-    out_fig = '{}.png'.format(os.path.splitext(fname)[0])
-    print('plot and save figure to file', out_fig)
-    fig.savefig(out_fig, bbox_inches='tight', transparent=True, dpi=300)
-
-    if display:
-        plt.show()
-    else:
-        plt.close(fig)
-
-    return
-
-
 ##########################################################################################
 def get_common_region_int_ambiguity(ifgram_file, cc_mask_file, water_mask_file=None, num_sample=100,
                                     dsNameIn='unwrapPhase', cc_min_area=2.5e3):
@@ -584,7 +526,7 @@ def main(iargs=None):
         # for debug
         debug_mode = False
         if debug_mode:
-            plot_num_triplet_with_nonzero_integer_ambiguity(out_file)
+            pp.plot_num_triplet_with_nonzero_integer_ambiguity(out_file)
 
     m, s = divmod(time.time()-start_time, 60)
     print('time used: {:02.0f} mins {:02.1f} secs\nDone.'.format(m, s))


=====================================
mintpy/utils/plot.py
=====================================
@@ -17,6 +17,7 @@ import numpy as np
 import matplotlib as mpl
 from matplotlib import pyplot as plt, ticker, dates as mdates
 from mpl_toolkits.axes_grid1 import make_axes_locatable
+from scipy import stats
 
 from mintpy.objects.coord import coordinate
 from mintpy.objects.colors import ColormapExt
@@ -788,6 +789,7 @@ def plot_perp_baseline_hist(ax, dateList, pbaseList, p_dict={}, dateList_drop=[]
 
     return ax
 
+
 def plot_rotate_diag_coherence_matrix(ax, coh_list, date12_list, date12_list_drop=[],
                                       rotate_deg=-45., cmap='RdBu', disp_half=False, disp_min=0.2):
     """Plot Rotated Coherence Matrix, suitable for Sentinel-1 data with sequential network"""
@@ -927,9 +929,173 @@ def plot_coherence_matrix(ax, date12List, cohList, date12List_drop=[], p_dict={}
     return ax, coh_mat, im
 
 
+def plot_num_triplet_with_nonzero_integer_ambiguity(fname, display=False, font_size=12, fig_size=[9,3]):
+    """Plot the histogram for the number of triplets with non-zero integer ambiguity.
+
+    Fig. 3d-e in Yunjun et al. (2019, CAGEO).
+
+    Parameters: fname - str, path to the numTriNonzeroIntAmbiguity.h5 file.
+    """
+
+    # read data
+    data, atr = readfile.read(fname)
+    vmax = int(np.nanmax(data))
+
+    # plot
+    fig, axs = plt.subplots(nrows=1, ncols=2, figsize=fig_size)
+
+    # subplot 1 - map
+    ax = axs[0]
+    im = ax.imshow(data, cmap='RdBu_r', interpolation='nearest')
+
+    # reference point
+    if all(key in atr.keys() for key in ['REF_Y','REF_X']):
+        ax.plot(int(atr['REF_X']), int(atr['REF_Y']), 's', color='white', ms=3)
+
+    # format
+    auto_flip_direction(atr, ax=ax, print_msg=False)
+    fig.colorbar(im, ax=ax)
+    ax.set_title(r'$T_{int}$', fontsize=font_size)
+
+    # subplot 2 - histogram
+    ax = axs[1]
+    ax.hist(data[~np.isnan(data)].flatten(), range=(0, vmax), log=True, bins=vmax)
+
+    # axis format
+    ax.set_xlabel(r'# of triplets w non-zero int ambiguity $T_{int}$', fontsize=font_size)
+    ax.set_ylabel('# of pixels', fontsize=font_size)
+    ax.xaxis.set_minor_locator(ticker.AutoMinorLocator())
+    ax.yaxis.set_major_locator(ticker.LogLocator(base=10.0, numticks=15))
+    ax.yaxis.set_minor_locator(ticker.LogLocator(base=10.0, numticks=15,
+                                                 subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9)))
+    ax.yaxis.set_minor_formatter(ticker.NullFormatter())
+
+    for ax in axs:
+        ax.tick_params(which='both', direction='in', labelsize=font_size,
+                       bottom=True, top=True, left=True, right=True)
+
+    fig.tight_layout()
+
+    # output
+    out_fig = '{}.png'.format(os.path.splitext(fname)[0])
+    print('plot and save figure to file', out_fig)
+    fig.savefig(out_fig, bbox_inches='tight', transparent=True, dpi=300)
+
+    if display:
+        plt.show()
+    else:
+        plt.close(fig)
+
+    return
+
+
+def plot_timeseries_rms(rms_file, cutoff=3, out_fig=None, disp_fig=True,
+                        fig_size=[5, 3], font_size=12, tick_year_num=1, legend_loc='best',
+                        disp_legend=True, disp_side_plot=True, disp_thres_text=False,
+                        ylabel='Residual phase RMS [mm]'):
+    """ Bar plot for the phase residual RMS time series.
+
+    Parameters: rms_file - str, path to the time series RMS text file
+                           Generated by utils1.get_residual_rms().
+                cutoff   - float, cutoff value of MAD outlier detection
+                fig_size - list of 2 float, figure size in inch
+    """
+
+    # read date / RMS info from text file
+    fc = np.loadtxt(rms_file, dtype=bytes).astype(str)
+    rms_list = fc[:, 1].astype(np.float32) * 1000.
+    date_list = list(fc[:, 0])
+
+    dates, datevector = ptime.date_list2vector(date_list)
+    dates = np.array(dates)
+    try:
+        bar_width = min(ut0.most_common(np.diff(dates).tolist(), k=2))*3/4
+    except:
+        bar_width = np.min(np.diff(dates).tolist())*3/4
+    rms = np.array(rms_list)
+
+    # Plot all dates
+    fig, ax = plt.subplots(figsize=fig_size)
+    ax.bar(dates, rms, bar_width.days, color='C0')
+
+    # Plot reference date
+    ref_idx = np.argmin(rms)
+    ax.bar(dates[ref_idx], rms[ref_idx], bar_width.days, color='C1', label='Reference date')
+
+    # Plot exclude dates
+    rms_threshold = ut0.median_abs_deviation_threshold(rms, center=0., cutoff=cutoff)
+    ex_idx = rms > rms_threshold
+    if np.any(ex_idx==True):
+        ax.bar(dates[ex_idx], rms[ex_idx], bar_width.days, color='darkgray', label='Exclude date')
+
+    # Plot rms_threshold line
+    (ax, xmin, xmax) = auto_adjust_xaxis_date(ax, datevector, font_size, every_year=tick_year_num)
+    ax.plot(np.array([xmin, xmax]), np.array([rms_threshold, rms_threshold]), '--k',
+            label='Median Abs Dev * {}'.format(cutoff))
+
+    # axis format
+    ax = auto_adjust_yaxis(ax, np.append(rms, rms_threshold), font_size, ymin=0.0)
+    #ax.set_xlabel('Time [years]', fontsize=font_size)
+    ax.set_ylabel(ylabel, fontsize=font_size)
+    ax.tick_params(which='both', direction='in', labelsize=font_size,
+                   bottom=True, top=True, left=True, right=True)
+
+    # 2nd axes for circles
+    if disp_side_plot:
+        divider = make_axes_locatable(ax)
+        ax2 = divider.append_axes("right", "10%", pad="2%")
+        ax2.plot(np.ones(rms.shape, np.float32) * 0.5, rms, 'o', mfc='none', color='C0')
+        ax2.plot(np.ones(rms.shape, np.float32)[ref_idx] * 0.5, rms[ref_idx], 'o', mfc='none', color='C1')
+        if np.any(ex_idx==True):
+            ax2.plot(np.ones(rms.shape, np.float32)[ex_idx] * 0.5, rms[ex_idx], 'o', mfc='none', color='darkgray')
+        ax2.plot(np.array([0, 1]), np.array([rms_threshold, rms_threshold]), '--k')
+
+        ax2.set_ylim(ax.get_ylim())
+        ax2.set_xlim([0, 1])
+        ax2.tick_params(which='both', direction='in', labelsize=font_size,
+                        bottom=True, top=True, left=True, right=True)
+        ax2.get_xaxis().set_ticks([])
+        ax2.get_yaxis().set_ticklabels([])
+
+    if disp_legend:
+        ax.legend(loc=legend_loc, frameon=False, fontsize=font_size)
+
+    # rms_threshold text
+    if disp_thres_text:
+        ymin, ymax = ax.get_ylim()
+        yoff = (ymax - ymin) * 0.1
+        if (rms_threshold - ymin) > 0.5 * (ymax - ymin):
+            yoff *= -1.
+        ax.annotate('Median Abs Dev * {}'.format(cutoff),
+                    xy=(xmin + (xmax-xmin)*0.05, rms_threshold + yoff ),
+                    color='k', xycoords='data', fontsize=font_size)
+
+    # figure output
+    if out_fig:
+        print('save figure to file:', out_fig)
+        fig.savefig(out_fig, bbox_inches='tight', transparent=True)
+
+    if disp_fig:
+        plt.show()
+    else:
+        plt.close()
+
+    return
+
 
 
+###############################################  GNSS  ###############################################
+
 def plot_gps(ax, SNWE, inps, metadata=dict(), print_msg=True):
+    """Plot GNSS as scatters on top of the input matplotlib.axes.
+
+    Parameters: ax       - matplotlib.axes object
+                SNWE     - tuple of 4 float, for south, north, west and east
+                inps     - Namespace object, from view.py
+                metadata - dict, mintpy metadata
+    Returns:    ax       - matplotlib.axes object
+    """
+
     from mintpy.objects import gps
     vprint = print if print_msg else lambda *args, **kwargs: None
 
@@ -1047,6 +1213,143 @@ def plot_gps(ax, SNWE, inps, metadata=dict(), print_msg=True):
     return ax
 
 
+def plot_insar_vs_gps_scatter(vel_file, csv_file='gps_enu2los.csv', msk_file=None, ref_gps_site=None, cutoff=5,
+                              fig_size=[4, 4], xname='InSAR', vlim=None, ex_gps_sites=[], display=True):
+    """Scatter plot to compare the velocities between SAR/InSAR and GPS.
+
+    Parameters: vel_file     - str, path of InSAR LOS velocity HDF5 file..
+                csv_file     - str, path of GNSS CSV file, generated after running view.py --gps-comp
+                msk_file     - str, path of InSAR mask file.
+                ref_gps_site - str, reference GNSS site name
+                cutoff       - float, threshold in terms of med abs dev (MAD) for outlier detection
+                xname        - str, xaxis label
+                vlim         - list of 2 float, display value range in the unit of cm/yr
+                               Default is None to grab from data
+                               If set, the range will be used to prune the SAR and GPS observations
+                ex_gps_sites - list of str, exclude GNSS sites for analysis and plotting.
+    Returns:    sites        - list of str, GNSS site names used for comparison
+                insar_obs    - 1D np.ndarray in float32, InSAR velocity in cm/yr
+                gps_obs      - 1D np.ndarray in float32, GNSS  velocity in cm/yr
+    Example:
+        from mintpy.utils import plot as pp
+        csv_file = os.path.join(work_dir, 'geo/gps_enu2los.csv')
+        vel_file = os.path.join(work_dir, 'geo/geo_velocity.h5')
+        msk_file = os.path.join(work_dir, 'geo/geo_maskTempCoh.h5')
+        pp.plot_insar_vs_gps_scatter(vel_file, ref_gps_site='CACT', csv_file=csv_file, msk_file=msk_file, vlim=[-2.5, 2])
+    """
+
+    disp_unit = 'cm/yr'
+    unit_fac = 100.
+
+    # read GPS velocity from CSV file (generated by gps.get_gps_los_obs())
+    col_names = ['Site', 'Lon', 'Lat', 'Displacement', 'Velocity']
+    num_col = len(col_names)
+    col_types = ['U10'] + ['f8'] * (num_col - 1)
+
+    print('read GPS velocity from file: {}'.format(csv_file))
+    fc = np.genfromtxt(csv_file, dtype=col_types, delimiter=',', names=True)
+    sites = fc['Site']
+    lats = fc['Lat']
+    lons = fc['Lon']
+    gps_obs = fc[col_names[-1]] * unit_fac
+
+    if ex_gps_sites:
+        ex_flag = np.array([x in ex_gps_sites for x in sites], dtype=np.bool_)
+        if np.sum(ex_flag) > 0:
+            sites = sites[~ex_flag]
+            lats = lats[~ex_flag]
+            lons = lons[~ex_flag]
+            gps_obs = gps_obs[~ex_flag]
+
+    # read InSAR velocity
+    print('read InSAR velocity from file: {}'.format(vel_file))
+    atr = readfile.read_attribute(vel_file)
+    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
+    ys, xs = coordinate(atr).geo2radar(lats, lons)[:2]
+
+    msk = readfile.read(msk_file)[0] if msk_file else np.ones((length, width), dtype=np.bool_)
+
+    num_site = sites.size
+    insar_obs = np.zeros(num_site, dtype=np.float32) * np.nan
+    prog_bar = ptime.progressBar(maxValue=num_site)
+    for i in range(num_site):
+        x, y = xs[i], ys[i]
+        if (0 <= x < width) and (0 <= y < length) and msk[y, x]:
+            box = (x, y, x+1, y+1)
+            insar_obs[i] = readfile.read(vel_file, datasetName='velocity', box=box)[0] * unit_fac
+        prog_bar.update(i+1, suffix='{}/{} {}'.format(i+1, num_site, sites[i]))
+    prog_bar.close()
+
+    off_med = np.nanmedian(insar_obs - gps_obs)
+    print(f'median offset between InSAR and GPS [before common referencing]: {off_med:.2f} cm/year')
+
+    # reference site
+    if ref_gps_site:
+        print(f'referencing both InSAR and GPS data to site: {ref_gps_site}')
+        ref_ind = sites.tolist().index(ref_gps_site)
+        gps_obs -= gps_obs[ref_ind]
+        insar_obs -= insar_obs[ref_ind]
+
+    # remove NaN value
+    print('removing sites with NaN values in GPS or {}'.format(xname))
+    flag = np.multiply(~np.isnan(insar_obs), ~np.isnan(gps_obs))
+    if vlim is not None:
+        print('pruning sites with value range: {} {}'.format(vlim, disp_unit))
+        flag *= gps_obs >= vlim[0]
+        flag *= gps_obs <= vlim[1]
+        flag *= insar_obs >= vlim[0]
+        flag *= insar_obs <= vlim[1]
+
+    gps_obs = gps_obs[flag]
+    insar_obs = insar_obs[flag]
+    sites = sites[flag]
+
+    # stats
+    print('GPS   min/max: {:.2f} / {:.2f}'.format(np.nanmin(gps_obs), np.nanmax(gps_obs)))
+    print('InSAR min/max: {:.2f} / {:.2f}'.format(np.nanmin(insar_obs), np.nanmax(insar_obs)))
+
+    rmse = np.sqrt(np.sum((insar_obs - gps_obs)**2) / (gps_obs.size - 1))
+    r2 = stats.linregress(insar_obs, gps_obs)[2]
+    print('RMSE = {:.2f} {}'.format(rmse, disp_unit))
+    print('R^2 = {:.2f}'.format(r2))
+
+    # preliminary outlier detection
+    diff_mad = ut0.median_abs_deviation(abs(insar_obs - gps_obs), center=0)
+    print(f'Preliminary outliers detection: abs(InSAR - GNSS) > med abs dev ({diff_mad:.2f}) * {cutoff}')
+    print('Site:  InSAR  GNSS')
+    for site_name, insar_val, gps_val in zip(sites, insar_obs, gps_obs):
+        if abs(insar_val - gps_val) > diff_mad * cutoff:
+            print(f'{site_name:s}: {insar_val:5.1f}, {gps_val:5.1f}  {disp_unit}')
+
+    # plot
+    if display:
+        plt.rcParams.update({'font.size': 12})
+        if vlim is None:
+            vlim = [np.min(insar_obs), np.max(insar_obs)]
+            vbuffer = (vlim[1] - vlim[0]) * 0.2
+            vlim = [vlim[0] - vbuffer, vlim[1] + vbuffer]
+
+        fig, ax = plt.subplots(figsize=fig_size)
+        ax.plot((vlim[0], vlim[1]), (vlim[0], vlim[1]), 'k--')
+        ax.plot(insar_obs, gps_obs, '.', ms=15)
+
+        # axis format
+        ax.set_xlim(vlim)
+        ax.set_ylim(vlim)
+        ax.set_xlabel(f'{xname} [{disp_unit}]')
+        ax.set_ylabel(f'GNSS [{disp_unit}]')
+        ax.set_aspect('equal', 'box')
+        fig.tight_layout()
+
+        # output
+        out_fig = '{}_vs_gps_scatter.pdf'.format(xname.lower())
+        plt.savefig(out_fig, bbox_inches='tight', transparent=True, dpi=300)
+        print('save figure to file', out_fig)
+        plt.show()
+
+    return sites, insar_obs, gps_obs
+
+
 def plot_colorbar(inps, im, cax):
     # extend
     if not inps.cbar_ext:


=====================================
mintpy/utils/readfile.py
=====================================
@@ -395,26 +395,35 @@ def read_hdf5_file(fname, datasetName=None, box=None, xstep=1, ystep=1, print_ms
                     slice_flag[date_list.index(d)] = True
 
             # read data
+            num_slice = np.sum(slice_flag)
+            inds = np.where(slice_flag)[0].tolist()
+
             if xstep * ystep == 1:
-                data = ds[:,
-                          box[1]:box[3],
-                          box[0]:box[2]][slice_flag]
+                if num_slice / slice_flag.size < 0.05:
+                    # single indexing if only a small fraction is read
+                    data = np.zeros((num_slice, ysize, xsize), dtype=ds.dtype)
+                    for i, ind in enumerate(inds):
+                        data[i] = ds[ind,
+                                     box[1]:box[3],
+                                     box[0]:box[2]]
+                else:
+                    data = ds[:,
+                              box[1]:box[3],
+                              box[0]:box[2]][slice_flag]
 
             else:
                 # sampling / nearest interplation in y/xstep
                 # use for loop to save memory
-                num_slice = np.sum(slice_flag)
                 data = np.zeros((num_slice, ysize, xsize), ds.dtype)
 
-                inds = np.where(slice_flag)[0]
-                for i in range(num_slice):
+                for i, ind in enumerate(inds):
                     # print out msg
                     if print_msg:
                         sys.stdout.write('\r' + f'reading 2D slices {i+1}/{num_slice}...')
                         sys.stdout.flush()
 
                     # read and index
-                    d2 = ds[inds[i],
+                    d2 = ds[ind,
                             box[1]:box[3],
                             box[0]:box[2]]
                     d2 = d2[int(ystep/2)::ystep,


=====================================
mintpy/version.py
=====================================
@@ -10,6 +10,7 @@ import collections
 ###########################################################################
 Tag = collections.namedtuple('Tag', 'version date')
 release_history = (
+    Tag('1.4.1', '2022-08-15'),
     Tag('1.4.0', '2022-08-04'),
     Tag('1.3.3', '2022-04-14'),
     Tag('1.3.2', '2021-11-21'),
@@ -34,19 +35,19 @@ release_history = (
 release_version = release_history[0].version
 release_date = release_history[0].date
 
-def get_version_info(version='v{}'.format(release_version), date=release_date):
+def get_version_info():
     """Grab version and date of the latest commit from a git repository"""
     # go to the repository directory
     dir_orig = os.getcwd()
     os.chdir(os.path.dirname(os.path.dirname(__file__)))
 
-    # grab git info into string
     try:
+        # grab from git cmd
         cmd = "git describe --tags"
         version = subprocess.check_output(cmd.split(), stderr=subprocess.DEVNULL)
-        version = version.decode('utf-8').strip()
+        version = version.decode('utf-8').strip()[1:]
 
-        #if there are new commits after the latest release
+        # if there are new commits after the latest release
         if '-' in version:
             version, num_commit = version.split('-')[:2]
             version += '-{}'.format(num_commit)
@@ -54,8 +55,11 @@ def get_version_info(version='v{}'.format(release_version), date=release_date):
         cmd = "git log -1 --date=short --format=%cd"
         date = subprocess.check_output(cmd.split(), stderr=subprocess.DEVNULL)
         date = date.decode('utf-8').strip()
+
     except:
-        pass
+        # use the latest release version/date
+        version = release_version
+        date = release_date
 
     # go back to the original directory
     os.chdir(dir_orig)
@@ -64,9 +68,9 @@ def get_version_info(version='v{}'.format(release_version), date=release_date):
 
 ###########################################################################
 
-version_num, version_date = get_version_info()
+version, version_date = get_version_info()
 version_description = """MintPy version {v}, date {d}""".format(
-    v=version_num,
+    v=version,
     d=version_date,
 )
 
@@ -87,7 +91,7 @@ ___________________________________________________________
    Miami InSAR Time-series software in Python    \______/ 
           MintPy {v}, {d}
 ___________________________________________________________
-""".format(v=version_num, d=version_date)
+""".format(v=version, d=version_date)
 
 website = 'https://github.com/insarlab/MintPy'
 


=====================================
mintpy/view.py
=====================================
@@ -407,11 +407,8 @@ def update_data_with_plot_inps(data, metadata, inps):
                         inps.ref_yx[1] + 1, inps.ref_yx[0] + 1]
 
         # update ref_y/x to subset
-        try:
-            ref_y = inps.ref_yx[0] - inps.pix_box[1]
-            ref_x = inps.ref_yx[1] - inps.pix_box[0]
-        except:
-            pass
+        ref_y = inps.ref_yx[0] - inps.pix_box[1]
+        ref_x = inps.ref_yx[1] - inps.pix_box[0]
 
         # update ref_y/x for multilooking
         if inps.multilook_num > 1:
@@ -567,8 +564,8 @@ def plot_slice(ax, data, metadata, inps=None):
             y, x = coord.geo2radar(ref_site_lalo[0], ref_site_lalo[1])[0:2]
             ref_data = data[y - inps.pix_box[1], x - inps.pix_box[0]]
             data -= ref_data
-            vprint(('referencing InSAR data to the pixel nearest to '
-                    f'GPS station: {inps.ref_gps_site} at {ref_site_lalo} '
+            vprint(('referencing InSAR data to the pixel nearest to GNSS station: '
+                    f'{inps.ref_gps_site} at [{ref_site_lalo[0]:.6f}, {ref_site_lalo[1]:.6f}] '
                     f'by substrating {ref_data:.3f} {inps.disp_unit}'))
             # do not show the original InSAR reference point
             inps.disp_ref_pixel = False
@@ -1107,6 +1104,10 @@ def read_data4figure(i_start, i_end, inps, metadata):
                               xstep=inps.multilook_num,
                               ystep=inps.multilook_num,
                               print_msg=False)[0]
+            # reference pixel info in unwrapPhase
+            if inps.dset[i].startswith('unwrapPhase') and inps.file_ref_yx:
+                ref_y, ref_x = inps.file_ref_yx
+                d[d!=0] -= d[ref_y, ref_x]
             data[i - i_start, :, :] = d
             prog_bar.update(i - i_start + 1, suffix=inps.dset[i].split('/')[-1])
         prog_bar.close()


=====================================
requirements.txt
=====================================
@@ -18,11 +18,9 @@ numpy
 pyaps3>=0.3
 pykml>=0.2
 pyproj
+pyresample
 pysolid
 scikit-image
 scipy
-# for ARIA, FRInGE, HyP3, GMTSAR
+# for ISCE, ARIA, FRInGE, HyP3, GMTSAR
 # gdal>=3
-# for pyresample
-pyresample
-openmp


=====================================
setup.py
=====================================
@@ -5,26 +5,20 @@
 # Author: Zhang Yunjun, Nov 2020                           #
 ############################################################
 
-
+import os
+import sys
 # Always prefer setuptools over distutils
 from setuptools import setup, find_packages
 
+# Grab version and description from version.py
+# link: https://stackoverflow.com/questions/53648900
+sys.path.append(os.path.dirname(__file__))
+from mintpy.version import version, description
 
-# Grab from README file: long_description
+# Grab long_description from README.md
 with open("docs/README.md", "r") as f:
     long_description = f.read()
 
-# Grab from version.py file: version and description
-with open("mintpy/version.py", "r") as f:
-    lines = f.readlines()
-    # version
-    line = [line for line in lines if line.strip().startswith("Tag(")][0].strip()
-    version = line.replace("'",'"').split('"')[1]
-    # description
-    line = [line for line in lines if line.startswith("description")][0].strip()
-    description = line.replace("'",'"').split('"')[1]
-
-
 setup(
     name="mintpy",
     version=version,
@@ -70,11 +64,10 @@ setup(
         "pyaps3>=0.3",
         "pykml>=0.2",
         "pyproj",
+        "pyresample",  # pip installed version does not work
         "setuptools",
         "scikit-image",
         "scipy",
-        "pyresample",
-        # "openmp",
     ],
     extras_require={
         "cli": ["argcomplete"],



View it on GitLab: https://salsa.debian.org/debian-gis-team/mintpy/-/commit/2aede91191d1b5c6837d2d9eb14aa86c9b83853a

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/mintpy/-/commit/2aede91191d1b5c6837d2d9eb14aa86c9b83853a
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20220816/0675ff51/attachment-0001.htm>


More information about the Pkg-grass-devel mailing list