diff -pruN 1.3.3-2/.circleci/config.yml 1.4.0-1/.circleci/config.yml
--- 1.3.3-2/.circleci/config.yml	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/.circleci/config.yml	2022-08-04 20:01:49.000000000 +0000
@@ -38,7 +38,7 @@ jobs:
             export PATH=${CONDA_PREFIX}/bin:${PATH}
             # install dependencies and source code
             source activate root
-            mamba install --verbose --yes gdal">=3" --file ${MINTPY_HOME}/docs/requirements.txt
+            mamba install --verbose --yes gdal">=3" --file ${MINTPY_HOME}/requirements.txt
             python -m pip install ${MINTPY_HOME}
             # test installation
             smallbaselineApp.py -v
@@ -49,35 +49,36 @@ jobs:
           name: Testing MintPy Python Modules
           command: |
             export PATH=${CONDA_PREFIX}/bin:${PATH}
-            ${MINTPY_HOME}/tests/test_asc_desc2horz_vert.py
+            ${MINTPY_HOME}/tests/asc_desc2horz_vert.py
+            ${MINTPY_HOME}/tests/objects/ionex.py
 
       - run:
           name: Testing MintPy on Example Dataset 1/4 - FernandinaSenDT128 (ISCE/topsStack)
           command: |
             export PATH=${CONDA_PREFIX}/bin:${PATH}
             mkdir -p ${HOME}/data
-            ${MINTPY_HOME}/tests/test_smallbaselineApp.py --dir ${HOME}/data --dset FernandinaSenDT128
+            ${MINTPY_HOME}/tests/smallbaselineApp.py --dir ${HOME}/data --dset FernandinaSenDT128
 
       - run:
           name: Testing MintPy on Example Dataset 2/4 - SanFranSenDT42 (ARIA)
           command: |
             export PATH=${CONDA_PREFIX}/bin:${PATH}
             mkdir -p ${HOME}/data
-            ${MINTPY_HOME}/tests/test_smallbaselineApp.py --dir ${HOME}/data --dset SanFranSenDT42
+            ${MINTPY_HOME}/tests/smallbaselineApp.py --dir ${HOME}/data --dset SanFranSenDT42
 
       - run:
           name: Testing MintPy on Example Dataset 3/4 - WellsEnvD2T399 (Gamma)
           command: |
             export PATH=${CONDA_PREFIX}/bin:${PATH}
             mkdir -p ${HOME}/data
-            ${MINTPY_HOME}/tests/test_smallbaselineApp.py --dir ${HOME}/data --dset WellsEnvD2T399
+            ${MINTPY_HOME}/tests/smallbaselineApp.py --dir ${HOME}/data --dset WellsEnvD2T399
 
       - run:
           name: Testing MintPy on Example Dataset 4/4 - WCapeSenAT29 (SNAP)
           command: |
             export PATH=${CONDA_PREFIX}/bin:${PATH}
             mkdir -p ${HOME}/data
-            ${MINTPY_HOME}/tests/test_smallbaselineApp.py --dir ${HOME}/data --dset WCapeSenAT29
+            ${MINTPY_HOME}/tests/smallbaselineApp.py --dir ${HOME}/data --dset WCapeSenAT29
 
 workflows:
   version: 2
diff -pruN 1.3.3-2/debian/changelog 1.4.0-1/debian/changelog
--- 1.3.3-2/debian/changelog	2022-07-10 16:07:59.000000000 +0000
+++ 1.4.0-1/debian/changelog	2022-08-05 14:44:15.000000000 +0000
@@ -1,3 +1,19 @@
+mintpy (1.4.0-1) unstable; urgency=medium
+
+  * New upstream release.
+  * Update d/copyright.
+  * debian/control:
+    - update dependencies.
+  * debian/patches:
+    - drop 0001-fix-intepreter.patch and 0002-Spelling.patch,
+      applied upstream
+    - refresh and renumber remaining patches.
+  * Update manpages.
+  * Update scripts installation.
+  * Update ithe test program name.
+
+ -- Antonio Valentino <antonio.valentino@tiscali.it>  Fri, 05 Aug 2022 14:44:15 +0000
+
 mintpy (1.3.3-2) unstable; urgency=medium
 
   * Team upload.
diff -pruN 1.3.3-2/debian/control 1.4.0-1/debian/control
--- 1.3.3-2/debian/control	2022-07-01 07:14:46.000000000 +0000
+++ 1.4.0-1/debian/control	2022-08-05 14:44:15.000000000 +0000
@@ -10,6 +10,7 @@ Build-Depends: debhelper-compat (= 12),
                mkdocs,
                pybuild-plugin-pyproject,
                python3-all,
+               python3-argcomplete,
                python3-cartopy,
                python3-cvxopt,
                python3-dask,
@@ -57,6 +58,7 @@ Section: utils
 Depends: python3-mintpy (= ${source:Version}),
          ${python3:Depends},
          ${misc:Depends}
+Recommends: python3-argcomplete
 Description: Miami INsar Time-series software in PYthon -- scripts
  The Miami INsar Time-series software in PYthon (MintPy as /mInt paI/)
  is an open-source package for Interferometric Synthetic Aperture Radar
diff -pruN 1.3.3-2/debian/copyright 1.4.0-1/debian/copyright
--- 1.3.3-2/debian/copyright	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/copyright	2022-08-05 14:44:15.000000000 +0000
@@ -5,11 +5,11 @@ Source: https://github.com/insarlab/Mint
 
 Files: *
 Copyright: 2013,2016, Zhang Yunjun and Heresh Fattahi
-License: GPL-3
+License: GPL-3+
 
 Files: debian/*
 Copyright: 2022, Antonio Valentino <antonio.valentino@tiscali.it>
-License: GPL-3
+License: GPL-3+
 
 Files: mintpy/data/dygraph-combined.js
 Copyright: 2011, Dan Vanderkam <danvdk@gmail.com>
@@ -27,7 +27,11 @@ Files: mintpy/data/colormaps/wiki-scotla
 Copyright: 2009, Eric Gaba
 License: CC-BY-SA-3.0
 
-License: GPL-3
+Files: scripts/jupyter_notebook_config.py
+Copyright: Jupyter Development Team
+License: BSD-3-clause
+
+License: GPL-3+
  This program is free software: you can redistribute it and/or modify
  it under the terms of the GNU General Public License as published by
  the Free Software Foundation, either version 3 of the License, or
@@ -452,3 +456,30 @@ License: CC-BY-SA-3.0
  does not form part of the License.
  .
  Creative Commons may be contacted at https://creativecommons.org/.
+
+License: BSD-3-clause
+ All rights reserved.
+ .
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions
+ are met:
+ 1. Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in the
+    documentation and/or other materials provided with the distribution.
+ 3. Neither the name of the University nor the names of its contributors
+    may be used to endorse or promote products derived from this software
+    without specific prior written permission.
+ .
+ THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
+ ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ ARE DISCLAIMED.  IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ SUCH DAMAGE.
diff -pruN 1.3.3-2/debian/man/mintpy.1 1.4.0-1/debian/man/mintpy.1
--- 1.3.3-2/debian/man/mintpy.1	1970-01-01 00:00:00.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy.1	2022-08-05 14:44:15.000000000 +0000
@@ -0,0 +1,214 @@
+.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.1.
+.TH MINTPY "1" "August 2022" "mintpy v1.4.0" "User Commands"
+.SH NAME
+mintpy \- The Miami INsar Time\-series software in PYthon
+.SH DESCRIPTION
+usage: mintpy [\-h] [\-\-version]  ...
+.PP
+Command line interface for MintPy. The Miami INsar Time\-series software in
+PYthon (MintPy as \fI\,/mint\/\fP pai/) is an open\-source package for Interferometric
+Synthetic Aperture Radar (InSAR) time series analysis. It reads the stack of
+interferograms (coregistered and unwrapped) in ISCE, ARIA, FRInGE, HyP3,
+GMTSAR, SNAP, GAMMA or ROI_PAC format, and produces three dimensional (2D in
+space and 1D in time) ground surface displacement in line\-of\-sight direction.
+It includes a routine time series analysis (`smallbaselineApp.py`) and some
+independent toolbox. This is research code provided to you "as is" with NO
+WARRANTIES OF CORRECTNESS. Use at your own risk.
+.SS "options:"
+.TP
+\fB\-h\fR, \fB\-\-help\fR
+show this help message and exit
+.TP
+\fB\-\-version\fR
+show program's version number and exit
+.PP
+.SS "sub\-commands:"
+.TP
+smallbaselineApp
+Routine Time Series Analysis for Small Baseline InSAR Stack
+.TP
+asc_desc2horz_vert
+Project Asc and Desc LOS displacement to Horizontal and Vertical direction
+.TP
+geocode
+Resample radar\-coded files into geo\-coordinates or vice versa
+.TP
+ifgram_inversion
+Invert network of interferograms into time\-series.
+.TP
+mask
+Mask file
+.TP
+modify_network
+Modify the network of interferograms
+.TP
+multilook
+Multilook the input file
+.TP
+reference_date
+Change reference date of timeseries.
+.TP
+reference_point
+Reference to the same pixel in space.
+.TP
+spatial_average
+Calculate average in space
+.TP
+spatial_filter
+Spatial filtering of 2D image.
+.TP
+temporal_average
+Calculate temporal average (stacking) of multitemporal datasets
+.TP
+temporal_derivative
+Calculate the temporal derivative of time\-series.
+.TP
+temporal_filter
+Smoothing timeseries in time domain with a moving filter
+.TP
+timeseries_rms
+Calculate Root Mean Square (RMS) of deramped residual phase time\-series.
+.TP
+timeseries2velocity
+Estimate velocity / time functions from time\-series.
+.TP
+add
+Generate the sum of multiple input files.
+.TP
+diff
+Generate the difference of two input files.
+.TP
+image_math
+Basic Mathmatic Operation of file
+.TP
+image_stitch
+Stitch/mosaic multiple geocoded datasets into one.
+.TP
+subset
+Generate a subset from file/dataset
+.TP
+closure_phase_bias
+Phase non\-closure related biases correction
+.TP
+dem_error
+DEM Error (Topographic Residual) Correction
+.TP
+iono_tec
+Calculate ionospheric ramps using Global Iono Maps from GNSS\-based TEC products.
+.TP
+local_oscilator_drift
+Local Oscilator Drift (LOD) correction of Envisat
+.TP
+remove_ramp
+Remove 2D ramp(s) from the input file.
+.TP
+s1ab_range_bias
+Sentinel\-1 A/B range bias correction
+.TP
+solid_earth_tides
+Solid Earth tides (SET) correction via PySolid
+.TP
+tropo_gacos
+Tropospheric correction using GACOS (http://www.gacos.net) delays
+.TP
+tropo_phase_elevation
+Correct Topo\-correlated Stratified tropospheric delay
+.TP
+tropo_pyaps3
+Tropospheric correction using weather models via PyAPS
+.TP
+unwrap_error_bridging
+Unwrapping Error Correction with Bridging
+.TP
+unwrap_error_phase_closure
+Unwrapping Error Correction based on Phase Closure
+.TP
+dem_gsi
+Prepare DEM from GSI (Japan) DEHM grib files.
+.TP
+generate_mask
+Generate mask file from input file
+.TP
+lookup_geo2radar
+Convert lookup table from geo\-coord (GAMMA, ROI_PAC) into radar\-coord (ISCE)
+.TP
+prep_aria
+Prepare ARIA processed products for MintPy.
+.TP
+prep_cosicorr
+Prepare attributes file for COSI\-Corr pixel offset product.
+.TP
+prep_fringe
+Prepare FRInGE products for MintPy
+.TP
+prep_gamma
+Prepare attributes file for Gamma product.
+.TP
+prep_gmtsar
+Prepare GMTSAR metadata files.
+.TP
+prep_hyp3
+Prepare attributes file for HyP3 InSAR product.
+.TP
+prep_isce
+Prepare ISCE metadata files.
+.TP
+prep_roipac
+Prepare attributes file for ROI_PAC products.
+.TP
+prep_snap
+Prepare attributes file for SNAP products.
+.TP
+load_data
+Load stacks of interferograms to HDF5 files
+.TP
+load_gbis
+Load GBIS inversion result to HDF5 format.
+.TP
+remove_hdf5_dataset
+Remove an existing dataset from HDF5 file
+.TP
+save_gbis
+Convert MintPy product to GBIS .mat format.
+.TP
+save_gdal
+Generate GDAL raster from MintPy h5 file.
+.TP
+save_gmt
+Export geocoded file to GMT grd file
+.TP
+save_hdfeos5
+Convert MintPy timeseries product into HDF\-EOS5 format
+.TP
+save_kite
+Generate KITE (https://github.com/pyrocko/kite) npz and yaml from MintPy HDF5 file.
+.TP
+save_kmz_timeseries
+Generare Google Earth KMZ file for time\-series file.
+.TP
+save_kmz
+Generate Google Earth KMZ file (overlay / placemarks for files in geo / radar coordinates).
+.TP
+save_qgis
+Convert to QGIS compatible ps time\-series
+.TP
+save_roipac
+Convert MintPy HDF5 file to ROI_PAC format.
+.TP
+info
+Display Metadata / Structure information of ANY File
+.TP
+plot_coherence_matrix
+Plot the coherence matrix of one pixel (interactive)
+.TP
+plot_network
+Display Network of Interferograms
+.TP
+plot_transection
+Generate transect/profile along a line
+.TP
+tsview
+Interactive time\-series viewer
+.TP
+view
+Plot InSAR Product in 2D
diff -pruN 1.3.3-2/debian/man/mintpy-add.1 1.4.0-1/debian/man/mintpy-add.1
--- 1.3.3-2/debian/man/mintpy-add.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-add.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,35 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-ADD "1" "May 2022" "mintpy-add v1.3.3" "User Commands"
-.SH NAME
-mintpy-add \- Generate sum of multiple input files.
-.SH DESCRIPTION
-usage: add.py [\-h] [\-o OUTFILE] [\-\-force] file [file ...]
-.PP
-Generate sum of multiple input files.
-.SS "positional arguments:"
-.TP
-file
-files (2 or more) to be added
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file name
-.TP
-\fB\-\-force\fR
-Enforce the adding for the shared dates only for time\-series files
-.SS "example:"
-.TP
-add.py mask_1.h5 mask_2.h5 mask_3.h5
-\fB\-o\fR mask_all.h5
-.TP
-add.py 081008_100220.unw
-100220_110417.unw \fB\-o\fR 081008_110417.unw
-.TP
-add.py timeseries_ERA5.h5 inputs/ERA5.h5
-\fB\-o\fR timeseries.h5
-.TP
-add.py timeseriesRg.h5
-inputs/TECsub.h5  \fB\-o\fR timeseriesRg_TECsub.h5 \fB\-\-force\fR
diff -pruN 1.3.3-2/debian/man/mintpy-add_attribute.1 1.4.0-1/debian/man/mintpy-add_attribute.1
--- 1.3.3-2/debian/man/mintpy-add_attribute.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-add_attribute.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,20 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-ADD_ATTRIBUTE "1" "May 2022" "mintpy-add_attribute v1.3.3" "User Commands"
-.SH NAME
-mintpy-add_attribute \- Add/Update attributes to file.
-.SH DESCRIPTION
-usage: add_attribute.py file metadata_file
-.IP
-add_attribute.py file key1=value1 [key2=value2 [...]]
-.PP
-Add/Update attributes to file.
-.SS "Example:"
-.TP
-add_attribute.py timeseries.h5
-unavco_attribute.txt
-.TP
-add_attribute.py timeseries.h5
-track=422   frame=650
-.TP
-add_attribute.py ifgramStack.h5 ref_y=None
-ref_x=None  #Use None value to delete attribute
diff -pruN 1.3.3-2/debian/man/mintpy-asc_desc2horz_vert.1 1.4.0-1/debian/man/mintpy-asc_desc2horz_vert.1
--- 1.3.3-2/debian/man/mintpy-asc_desc2horz_vert.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-asc_desc2horz_vert.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,89 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-ASC_DESC2HORZ_VERT "1" "May 2022" "mintpy-asc_desc2horz_vert v1.3.3" "User Commands"
-.SH NAME
-mintpy-asc_desc2horz_vert \- Project Asc and Desc LOS displacement to Horizontal and Vertical direction
-.SH DESCRIPTION
-usage: asc_desc2horz_vert.py [\-h] [\-d DS_NAME] [\-g GEOM_FILE GEOM_FILE]
-.TP
-[\-\-max\-ref\-yx\-diff MAX_REF_YX_DIFF]
-[\-\-az HORZ_AZ_ANGLE] [\-o HZ_FILE UP_FILE]
-[\-\-oo ONE_OUTFILE]
-file file
-.PP
-Project Asc and Desc LOS displacement to Horizontal and Vertical direction
-.SS "positional arguments:"
-.TP
-file
-Ascending and descending files
-Both files need to be geocoded in the same spatial resolution.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-d\fR DS_NAME, \fB\-\-dset\fR DS_NAME
-dataset to use, default: 1st dataset
-.TP
-\fB\-g\fR GEOM_FILE GEOM_FILE, \fB\-\-geom\-file\fR GEOM_FILE GEOM_FILE
-Geometry files for the input data files.
-.TP
-\fB\-\-max\-ref\-yx\-diff\fR MAX_REF_YX_DIFF
-Maximum difference between REF_Y/X (derived from REF_LAT/LON) of input files (default: 3).
-.TP
-\fB\-\-az\fR HORZ_AZ_ANGLE, \fB\-\-horz\-az\-angle\fR HORZ_AZ_ANGLE
-Azimuth angle in degrees of the interested horizontal direction (default: \fB\-90\fR.0).
-Measured from the north with positive for anti\-clockwise direction.
-E.g.: \fB\-90\fR for East direction
-.TP
-0
-for North direction
-.TP
-Set to the azimuth angle of the strike\-slip fault to measure the fault\-parallel displacement.
-Note:
-a. This assumes no deformation in its perpendicular direction
-b. Near north direction can not be well resolved due to the lack of
-.TP
-diversity in viewing geometry. Check exact dilution of precision for
-each component in Wright et al. (2004, GRL)
-.TP
-\fB\-o\fR HZ_FILE UP_FILE, \fB\-\-output\fR HZ_FILE UP_FILE
-output file name for vertical and horizontal components
-.TP
-\fB\-\-oo\fR ONE_OUTFILE, \fB\-\-one\-output\fR ONE_OUTFILE
-Stack the input/output files into one HDF5 file.
-This will disable the HZ/UP_FILE output option.
-.SS "reference:"
-.IP
-Fialko, Y., Simons, M., & Agnew, D. (2001). The complete (3\-D) surface displacement
-.IP
-field in the epicentral area of the 1999 MW7.1 Hector Mine Earthquake, California,
-from space geodetic observations. Geophysical Research Letters, 28(16), 3063\-3066.
-doi:10.1029/2001GL013174
-.IP
-Wright, T. J., B. E. Parsons, and Z. Lu (2004), Toward mapping surface deformation
-.IP
-in three dimensions using InSAR, Geophysical Research Letters, 31(1), L01607,
-doi:10.1029/2003GL018827.
-.SS "example:"
-.IP
-# for data with different spatial resolution and coverage
-# use geocode.py \fB\-x\fR/y \fB\-\-bbox\fR option to make them consistent
-cd AlosAT424/mintpy
-mask.py velocity.h5 \fB\-m\fR maskTempCoh.h5
-geocode.py velocity_msk.h5 \fB\-l\fR inputs/geometryRadar.h5 \fB\-x\fR 0.00027778 \fB\-y\fR \fB\-0\fR.00027778 \fB\-\-bbox\fR 32.0 32.5 130.1 130.5
-cd AlosDT73/mintpy
-mask.py velocity.h5 \fB\-m\fR maskTempCoh.h5
-geocode.py velocity_msk.h5 \fB\-l\fR inputs/geometryRadar.h5 \fB\-x\fR 0.00027778 \fB\-y\fR \fB\-0\fR.00027778 \fB\-\-bbox\fR 32.0 32.5 130.1 130.5
-asc_desc2horz_vert.py AlosAT424/mintpy/geo_velocity_msk.h5 AlosDT73/mintpy/geo_velocity_msk.h5
-.IP
-# write horz/vert to two files
-asc_desc2horz_vert.py AlosAT424/mintpy/velocity_msk.h5 AlosDT73/mintpy/velocity_msk.h5
-asc_desc2horz_vert.py AlosAT424/mintpy/velocity_msk.h5 AlosDT73/mintpy/velocity_msk.h5  \fB\-\-azimuth\fR 16
-asc_desc2horz_vert.py AlosAT424/mintpy/velocity_msk.h5 AlosDT73/mintpy/velocity_msk.h5  \fB\-\-dset\fR step20200107
-.IP
-# write all asc/desc/horz/vert datasets into one file
-asc_desc2horz_vert.py Alos2AT131/mintpy/20171219_20190702.unw Alos2DT23/mintpy/20171211_20190819.unw \fB\-\-oo\fR Kirishima2017post.h5
-view.py Kirishima2017post.h5 \fB\-u\fR cm \fB\-\-wrap\fR \fB\-\-wrap\-range\fR \fB\-5\fR 5  #check deformation signal with multiple viewing geometries.
-.IP
-# pixel\-wise decomposition [for large area analysis]
-asc_desc2horz_vert.py asc_velocity.h5 desc_velocity.h5 \fB\-g\fR asc_geometry.h5 desc_geometry.h5
diff -pruN 1.3.3-2/debian/man/mintpy-closure_phase_bias.1 1.4.0-1/debian/man/mintpy-closure_phase_bias.1
--- 1.3.3-2/debian/man/mintpy-closure_phase_bias.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-closure_phase_bias.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,38 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-CLOSURE_PHASE_BIAS "1" "May 2022" "mintpy-closure_phase_bias v1.3.3" "User Commands"
-.SH NAME
-mintpy-tropo_gacos \- Create an indication map for closure phase bias.
-.SH DESCRIPTION
-usage: closure_phase_bias.py [\-h] [\-i IFGRAM_STACK] [\-\-nl NL]
-.TP
-[\-\-numsigma NUMSIGMA] [\-\-epi EPISILON]
-[\-\-maxMemory MAX_MEMORY] [\-o OUTDIR]
-.PP
-Create an indication map for closure phase bias.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-i\fR IFGRAM_STACK, \fB\-\-ifgramstack\fR IFGRAM_STACK
-interferogram stack file that contains the unwrapped
-phases
-.TP
-\fB\-\-nl\fR NL
-connection level that we are correcting to (or
-consider as no bias)
-.TP
-\fB\-\-numsigma\fR NUMSIGMA
-Threshold for phase (number of sigmas,0\-infty),
-default to be 3 sigma of a Gaussian distribution
-(assumed distribution for the cumulative closure
-phase) with sigma = pi/sqrt(3*num_cp)
-.TP
-\fB\-\-epi\fR EPISILON
-Threshold for amplitude (0\-1), default 0.3
-.TP
-\fB\-\-maxMemory\fR MAX_MEMORY
-max memory to use in GB
-.TP
-\fB\-o\fR OUTDIR
-output file directory
diff -pruN 1.3.3-2/debian/man/mintpy-dem_error.1 1.4.0-1/debian/man/mintpy-dem_error.1
--- 1.3.3-2/debian/man/mintpy-dem_error.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-dem_error.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,105 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-DEM_ERROR "1" "May 2022" "mintpy-dem_error v1.3.3" "User Commands"
-.SH NAME
-mintpy-dem_error \- DEM Error (Topographic Residual) Correction
-.SH DESCRIPTION
-usage: dem_error.py [\-h] [\-g GEOM_FILE] [\-o OUTFILE] [\-t TEMPLATE_FILE]
-.TP
-[\-\-ex [EXCLUDEDATE ...]] [\-p POLYORDER]
-[\-s [STEPFUNCDATE ...]]
-[\-\-periodic PERIODIC [PERIODIC ...]] [\-\-phase\-velocity]
-[\-\-update] [\-\-ram MAXMEMORY] [\-c {lsf,pbs,slurm,local}]
-[\-\-num\-worker NUMWORKER] [\-\-config CONFIG]
-timeseries_file
-.PP
-DEM Error (Topographic Residual) Correction
-.SS "positional arguments:"
-.TP
-timeseries_file
-Timeseries file to be corrrected
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-g\fR GEOM_FILE, \fB\-\-geometry\fR GEOM_FILE
-geometry file including datasets:
-incidence angle
-slant range distance
-and/or 3D perpendicular baseline
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-outfile\fR OUTFILE
-Output file name for corrected time\-series
-.TP
-\fB\-\-phase\-velocity\fR
-Use phase velocity instead of phase for inversion constrain.
-.TP
-\fB\-\-update\fR
-Enable update mode, and skip inversion if:
-1) output timeseries file already exists, readable and newer than input interferograms file
-2) all configuration parameters are the same.
-.TP
-\fB\-\-ram\fR MAXMEMORY, \fB\-\-memory\fR MAXMEMORY
-Max amount of memory in GB to use (default: 4.0).
-Adjust according to your computer memory.
-.SS "temporal deformation model:"
-.TP
-\fB\-t\fR TEMPLATE_FILE, \fB\-\-template\fR TEMPLATE_FILE
-template file with the options
-.TP
-\fB\-\-ex\fR [EXCLUDEDATE ...], \fB\-\-exclude\fR [EXCLUDEDATE ...]
-Exclude date(s) for DEM error estimation.
-All dates will be corrected for DEM residual phase still.
-.TP
-\fB\-p\fR POLYORDER, \fB\-\-poly\-order\fR POLYORDER
-polynomial order number of temporal deformation model (default: 2).
-.TP
-\fB\-s\fR [STEPFUNCDATE ...], \fB\-\-step\-date\fR [STEPFUNCDATE ...]
-Date of step jump for temporal deformation model (default: []). i.e. date of earthquake/volcanic eruption
-.TP
-\fB\-\-periodic\fR PERIODIC [PERIODIC ...], \fB\-\-period\fR PERIODIC [PERIODIC ...], \fB\-\-peri\fR PERIODIC [PERIODIC ...]
-periodic functinos of temporal deformation model (default: []).
-.SS "parallel:"
-.IP
-parallel processing using dask
-.TP
-\fB\-c\fR {lsf,pbs,slurm,local}, \fB\-\-cluster\fR {lsf,pbs,slurm,local}, \fB\-\-cluster\-type\fR {lsf,pbs,slurm,local}
-Cluster to use for parallel computing (default: None to turn OFF).
-.TP
-\fB\-\-num\-worker\fR NUMWORKER
-Number of workers to use (default: 4).
-.TP
-\fB\-\-config\fR CONFIG, \fB\-\-config\-name\fR CONFIG
-Configuration name to use in dask.yaml (default: None).
-.SS "reference:"
-.IP
-Fattahi, H., and F. Amelung (2013), DEM Error Correction in InSAR Time Series,
-IEEE TGRS, 51(7), 4249\-4259, doi:10.1109/TGRS.2012.2227761.
-.SS "template options:"
-.IP
-## Topographic residual (DEM error) correction
-## reference: Fattahi and Amelung (2013, IEEE\-TGRS)
-## stepFuncDate      \- specify stepFuncDate option if you know there are sudden displacement jump in your area,
-##                     e.g. volcanic eruption, or earthquake
-## excludeDate       \- dates excluded for the error estimation
-## pixelwiseGeometry \- use pixel\-wise geometry (incidence angle & slant range distance)
-##                     yes \- use pixel\-wise geometry if they are available [slow; used by default]
-##                     no  \- use the mean   geometry [fast]
-mintpy.topographicResidual                   = auto  #[yes / no], auto for yes
-mintpy.topographicResidual.polyOrder         = auto  #[1\-inf], auto for 2, poly order of temporal deformation model
-mintpy.topographicResidual.phaseVelocity     = auto  #[yes / no], auto for no \- phase, use phase velocity for minimization
-mintpy.topographicResidual.stepFuncDate      = auto  #[20080529,20190704T1733 / no], auto for no, date of step jump
-mintpy.topographicResidual.excludeDate       = auto  #[20070321 / txtFile / no], auto for exclude_date.txt
-mintpy.topographicResidual.pixelwiseGeometry = auto  #[yes / no], auto for yes, use pixel\-wise geometry info
-.SS "example:"
-.IP
-# correct DEM error with pixel\-wise geometry parameters [slow]
-dem_error.py  timeseries_ERA5_ramp.h5 \fB\-g\fR inputs/geometryRadar.h5 \fB\-t\fR smallbaselineApp.cfg
-.IP
-# correct DEM error with mean geometry parameters [fast]
-dem_error.py  timeseries_ERA5_ramp.h5 \fB\-t\fR smallbaselineApp.cfg
-.IP
-# get updated/corrected DEM
-save_roipac.py inputs/geometryGeo.h5 \fB\-o\fR dem.h5   #for dataset in geo coordinates
-mask.py demErr.h5 \fB\-m\fR maskTempCoh.h5 \fB\-o\fR demErr_msk.h5
-add.py demErr_msk.h5 dem.h5 \fB\-o\fR demNew.h5
diff -pruN 1.3.3-2/debian/man/mintpy-dem_gsi.1 1.4.0-1/debian/man/mintpy-dem_gsi.1
--- 1.3.3-2/debian/man/mintpy-dem_gsi.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-dem_gsi.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,26 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-DEM_GSI "1" "May 2022" "mintpy-dem_gsi v1.3.3" "User Commands"
-.SH NAME
-mintpy-dem_gsi \- Prepare DEM from GSI (Japan) DEHM grib files.
-.SH DESCRIPTION
-usage: dem_gsi.py [\-h] \fB\-b\fR S N W E [\-o OUTFILE] [\-g GRID_DIR]
-.PP
-Prepare DEM from GSI (Japan) DEHM grib files.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-b\fR S N W E, \fB\-\-bbox\fR S N W E
-Bounding box in latitude [\-90, 90] and longitude [\-180, 180].
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file name (default: gsi10m.dem.wgs84).
-.TP
-\fB\-g\fR GRID_DIR, \fB\-\-grid\-dir\fR GRID_DIR
-Directory of DEHM grib files (default: \fI\,$DEMDB/GSI_DEHM10m\/\fP).
-.SS "example:"
-.IP
-cd \fI\,$KIRISHIMA/KirishimaAlosAT424/DEM\/\fP
-dem_gsi.py \fB\-b\fR 31.1 32.8 130.1 131.9
-dem_gsi.py \fB\-b\fR 31.1 32.8 130.1 131.9 \fB\-\-grid\-dir\fR \fI\,~/data/DEM/GSI_DEHM10m\/\fP
diff -pruN 1.3.3-2/debian/man/mintpy-diff.1 1.4.0-1/debian/man/mintpy-diff.1
--- 1.3.3-2/debian/man/mintpy-diff.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-diff.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,44 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-DIFF "1" "May 2022" "mintpy-diff v1.3.3" "User Commands"
-.SH NAME
-mintpy-tropo_gacos \- Generates the difference of two input files.
-.SH DESCRIPTION
-usage: diff.py [\-h] [\-o OUTFILE] [\-\-force] file1 file2 [file2 ...]
-.PP
-Generates the difference of two input files.
-.SS "positional arguments:"
-.TP
-file1
-file to be subtracted
-.TP
-file2
-file used to subtract
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file name, default is file1_diff_file2.h5
-.TP
-\fB\-\-force\fR
-Enforce the differencing for the shared dates only for time\-series files
-.SS "example:"
-.TP
-diff.py
-velocity.h5    velocity_demErr.h5
-.TP
-diff.py
-timeseries.h5  inputs/ERA5.h5  \fB\-o\fR timeseries_ERA5.h5
-.TP
-diff.py
-timeseries.h5  inputs/ERA5.h5  \fB\-o\fR timeseries_ERA5.h5  \fB\-\-force\fR
-.TP
-diff.py
-timeseries_ERA5_ramp_demErr.h5  ../GIANT/Stack/LS\-PARAMS.h5 \fB\-o\fR mintpy_giant.h5
-.TP
-diff.py
-reconUnwrapIfgram.h5  ./inputs/ifgramStack.h5  \fB\-o\fR diffUnwrapIfgram.h5
-.IP
-# multiple files
-diff.py  waterMask.h5  maskSantiago.h5  maskFernandina.h5  \fB\-o\fR maskIsabela.h5
diff -pruN 1.3.3-2/debian/man/mintpy-generate_mask.1 1.4.0-1/debian/man/mintpy-generate_mask.1
--- 1.3.3-2/debian/man/mintpy-generate_mask.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-generate_mask.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,143 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-GENERATE_MASK "1" "May 2022" "mintpy-generate_mask v1.3.3" "User Commands"
-.SH NAME
-mintpy-generate_mask \- Generate mask file from input file
-.SH DESCRIPTION
-usage: generate_mask.py [\-h] [\-o OUTFILE] [\-\-keep\-nan] [\-\-revert] [\-m VMIN]
-.TP
-[\-M VMAX] [\-p MINPIXELS] [\-\-vx XMIN XMAX]
-[\-\-vy YMIN YMAX] [\-\-vroipoly] [\-\-vstd]
-[\-\-vstd\-num VSTD_NUM] [\-x XMIN XMAX] [\-y YMIN YMAX]
-[\-\-ex\-circle X Y RADIUS] [\-\-in\-circle X Y RADIUS]
-[\-\-base BASE_FILE] [\-\-base\-dset BASE_DATASET]
-[\-\-base\-value BASE_VALUE] [\-\-roipoly]
-[\-\-view\-cmd VIEW_CMD] [\-\-nonzero] [\-\-update]
-file [dset]
-.PP
-Generate mask file from input file
-.SS "positional arguments:"
-.TP
-file
-input file
-.TP
-dset
-date of timeseries, or date12 of interferograms to be converted
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file name.
-.TP
-\fB\-\-keep\-nan\fR
-Do not exclude pixels with NaN value
-.TP
-\fB\-\-revert\fR
-revert 0 and 1 value of output mask file
-.TP
-\fB\-m\fR VMIN, \fB\-\-min\fR VMIN
-minimum value for selected pixels
-.TP
-\fB\-M\fR VMAX, \fB\-\-max\fR VMAX
-maximum value for selected pixels
-.TP
-\fB\-p\fR MINPIXELS, \fB\-\-mp\fR MINPIXELS, \fB\-\-minpixels\fR MINPIXELS
-minimum cluster size in pixels, to remove small pixel clusters.
-.TP
-\fB\-\-vstd\fR
-mask according to the formula: |velocity| > a * velocityStd
-.TP
-\fB\-\-vstd\-num\fR VSTD_NUM
-multiple of velocityStd (a) to use for cutoff
-.TP
-\fB\-\-nonzero\fR
-Select all non\-zero pixels.
-i.e. maskConnComp.h5 from ifgramStack.h5
-.TP
-\fB\-\-update\fR
-Enable update checking for \fB\-\-nonzero\fR option.
-.SS "AOI for threshold:"
-.IP
-Define the AOI for thresholding operations.
-.TP
-\fB\-\-vx\fR XMIN XMAX
-AOI range in X for threshold operation (and keep the rest untouched.)
-.TP
-\fB\-\-vy\fR YMIN YMAX
-AOI range in Y for threshold operation (and keep the rest untouched.)
-.TP
-\fB\-\-vroipoly\fR
-AOI via interactive polygonal region of interest (ROI) selection.
-.SS "AOI:"
-.IP
-define secondary area of interest
-.TP
-\fB\-x\fR XMIN XMAX, \fB\-\-sub\-x\fR XMIN XMAX
-selection range in x/cross\-track/range direction
-.TP
-\fB\-y\fR YMIN YMAX, \fB\-\-sub\-y\fR YMIN YMAX
-selection range in y/along\-track/azimuth direction
-.TP
-\fB\-\-ex\-circle\fR X Y RADIUS
-exclude area defined by an circle (x, y, radius) in pixel number
-.TP
-\fB\-\-in\-circle\fR X Y RADIUS
-include area defined by an circle (x, y, radius) in pixel number
-.TP
-\fB\-\-base\fR BASE_FILE
-exclude pixels == base_value
-output_mask[base_data == base_value] = 0
-.TP
-\fB\-\-base\-dset\fR BASE_DATASET, \fB\-\-base\-dataset\fR BASE_DATASET
-dataset in base_file to be used, for file with multiple datasets.
-i.e.: \fB\-\-base\fR inputs/geometryRadar.h5 \fB\-\-base\-dset\fR shadow \fB\-\-base\-value\fR 1
-.TP
-\fB\-\-base\-value\fR BASE_VALUE
-value of pixels in base_file to be excluded.
-Default: 0
-.TP
-\fB\-\-roipoly\fR
-Interactive polygonal region of interest (ROI) selection.
-.TP
-\fB\-\-view\-cmd\fR VIEW_CMD
-view.py command to facilitate the AOI selection.E.g. "\-v \fB\-0\fR.1 0.1"
-.SS "example:"
-.TP
-generate_mask.py
-temporalCoherence.h5 \fB\-m\fR 0.7 \fB\-o\fR maskTempCoh.h5
-.TP
-generate_mask.py
-temporalCoherence.h5 \fB\-m\fR 0.7 \fB\-o\fR maskTempCoh.h5 \fB\-\-base\fR inputs/geometryRadar.h5 \fB\-\-base\-dset\fR shadow \fB\-\-base\-value\fR 1
-.TP
-generate_mask.py
-avgSpatialCoh.h5     \fB\-m\fR 0.7 \fB\-\-base\fR waterMask.h5 \fB\-o\fR maskSpatialCoh.h5
-.IP
-# exclude area by min/max value and/or subset in row/col direction
-generate_mask.py  081018_090118.unw \fB\-m\fR 3 \fB\-M\fR 8 \fB\-y\fR 100 700 \fB\-x\fR 200 800 \fB\-o\fR mask_1.h5
-.IP
-# exclude pixel cluster based on minimum number of pixels
-generate_mask.py  maskTempCoh.h5 \fB\-p\fR 10 mask_1.h5
-.IP
-# exclude pixels with large velocity STD: |velocity| > cutoff (2 by default) * velocityStd
-generate_mask.py  velocity.h5 \fB\-\-vstd\fR
-generate_mask.py  velocity.h5 \fB\-\-vstd\fR \fB\-\-vstd\-num\fR 3
-.IP
-# exclude / include an circular area
-generate_mask.py  maskTempCoh.h5 \fB\-m\fR 0.5 \fB\-\-ex\-circle\fR 230 283 100 \fB\-o\fR maskTempCoh_nonDef.h5
-generate_mask.py  maskTempCoh.h5 \fB\-m\fR 0.5 \fB\-\-in\-circle\fR 230 283 100 \fB\-o\fR maskTempCoh_Def.h5
-# maskout an area within a circle AND with height smaller than a threshold
-generate_mask.py  inputs/geometryGeo.h5 height \fB\-\-in\-circle\fR 339 370 21 \fB\-M\fR 1400 \fB\-\-revert\fR \fB\-o\fR maskCrater.h5
-.IP
-# use an specific dataset from multiple dataset file
-generate_mask.py  geometryRadar.dem height \fB\-m\fR 0.5 \fB\-o\fR waterMask.h5
-generate_mask.py  ifgramStack.h5 unwrapPhase\-20101120_20110220 \fB\-m\fR 4
-.IP
-# common mask file of pixels in all connected components / with non\-zero unwrapped phase
-generate_mask.py  ifgramStack.h5  \fB\-\-nonzero\fR  \fB\-o\fR maskConnComp.h5  \fB\-\-update\fR
-.IP
-# interative polygon selection of region of interest
-# useful for custom mask generation in unwrap error correction with bridging
-generate_mask.py  waterMask.h5 \fB\-m\fR 0.5 \fB\-\-roipoly\fR
-generate_mask.py  azOff.h5 \fB\-\-roipoly\fR \fB\-\-view\-cmd\fR "\-v \fB\-0\fR.1 0.1"
-generate_mask.py  velocity.h5 \fB\-\-roipoly\fR \fB\-\-view\-cmd\fR "\-\-dem ./inputs/geometryGeo.h5 \fB\-\-contour\-step\fR 100 \fB\-\-contour\-smooth\fR 0.0"
diff -pruN 1.3.3-2/debian/man/mintpy-geocode.1 1.4.0-1/debian/man/mintpy-geocode.1
--- 1.3.3-2/debian/man/mintpy-geocode.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-geocode.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,115 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-GEOCODE "1" "May 2022" "mintpy-geocode v1.3.3" "User Commands"
-.SH NAME
-mintpy-geocode \- Resample radar coded files into geo coordinates, or reverse
-.SH DESCRIPTION
-usage: geocode.py [\-h] [\-d DSET] [\-l LOOKUPFILE] [\-\-lat\-file LATFILE]
-.IP
-[\-\-lon\-file LONFILE] [\-\-geo2radar] [\-t TEMPLATEFILE]
-[\-b S N W E] [\-\-lalo LAT_STEP LON_STEP]
-[\-i {nearest,linear}] [\-\-fill FILLVALUE] [\-n NPROCS]
-[\-\-software {pyresample,scipy}] [\-\-update] [\-o OUTFILE]
-[\-\-outdir OUT_DIR] [\-\-ram MAXMEMORY]
-file [file ...]
-.PP
-Resample radar coded files into geo coordinates, or reverse
-.SS "positional arguments:"
-.TP
-file
-File(s) to be geocoded
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-d\fR DSET, \fB\-\-dset\fR DSET
-dataset to be geocoded, for example:
-height                        for geometryRadar.h5
-unwrapPhase\-20100114_20101017 for ifgramStack.h5
-.TP
-\fB\-l\fR LOOKUPFILE, \fB\-\-lookup\fR LOOKUPFILE
-Lookup table file generated by InSAR processors.
-.TP
-\fB\-\-lat\-file\fR LATFILE
-lookup table file for latitude.
-.TP
-\fB\-\-lon\-file\fR LONFILE
-lookup table file for longitude.
-.TP
-\fB\-\-geo2radar\fR, \fB\-\-geo2rdr\fR
-resample geocoded files into radar coordinates.
-ONLY for lookup table in radar\-coord (ISCE, Doris).
-.TP
-\fB\-t\fR TEMPLATEFILE, \fB\-\-template\fR TEMPLATEFILE
-Template file with geocoding options.
-.TP
-\fB\-\-update\fR
-skip resampling if output file exists and newer than input file
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file name. Default: add prefix 'geo_'
-.TP
-\fB\-\-outdir\fR OUT_DIR, \fB\-\-output\-dir\fR OUT_DIR
-output directory.
-.TP
-\fB\-\-ram\fR MAXMEMORY, \fB\-\-memory\fR MAXMEMORY
-Max amount of memory in GB to use (default: 4.0).
-Adjust according to your computer memory.
-.SS "grid in geo-coordinates:"
-.TP
-\fB\-b\fR S N W E, \fB\-\-bbox\fR S N W E
-Bounding box for the area of interest.
-using coordinates of the uppler left corner of the first pixel
-.TP
-and the lower right corner of the last pixel
-for radar2geo, it's the output spatial extent
-for geo2radar, it's the input  spatial extent
-.TP
-\fB\-\-lalo\fR LAT_STEP LON_STEP, \fB\-\-lalo\-step\fR LAT_STEP LON_STEP
-output pixel size in degree in latitude / longitude.
-degrees     \fB\-\-\fR> meters on equator
-0.000925926 \fB\-\-\fR> 100
-0.000833334 \fB\-\-\fR> 90
-0.000555556 \fB\-\-\fR> 60
-0.000462963 \fB\-\-\fR> 50
-0.000277778 \fB\-\-\fR> 30
-0.000185185 \fB\-\-\fR> 20
-0.000092593 \fB\-\-\fR> 10
-.SS "interpolation:"
-.TP
-\fB\-i\fR {nearest,linear}, \fB\-\-interp\fR {nearest,linear}
-interpolation/resampling method (default: nearest).
-.TP
-\fB\-\-fill\fR FILLVALUE
-Fill value for extrapolation (default: nan).
-.TP
-\fB\-n\fR NPROCS, \fB\-\-nprocs\fR NPROCS
-number of processors to be used for calculation (default: 1).
-Note: Do not use more processes than available processor cores.
-.TP
-\fB\-\-software\fR {pyresample,scipy}
-software/module used for interpolation (default: pyresample)
-Note: \fB\-\-bbox\fR is not supported for \fB\-p\fR scipy
-.SS "template options:"
-.IP
-# for input dataset in radar coordinates only
-# commonly used resolution in meters and in degrees (on equator)
-# 100,         60,          50,          30,          20,          10
-# 0.000925926, 0.000555556, 0.000462963, 0.000277778, 0.000185185, 0.000092593
-mintpy.geocode              = auto  #[yes / no], auto for yes
-mintpy.geocode.SNWE         = auto  #[\-1.2,0.5,\-92,\-91 / none ], auto for none, output extent in degree
-mintpy.geocode.laloStep     = auto  #[\-0.000555556,0.000555556 / None], auto for None, output resolution in degree
-mintpy.geocode.interpMethod = auto  #[nearest], auto for nearest, interpolation method
-mintpy.geocode.fillValue    = auto  #[np.nan, 0, ...], auto for np.nan, fill value for outliers.
-.SS "example:"
-.IP
-geocode.py velocity.h5
-geocode.py velocity.h5 \fB\-b\fR \fB\-0\fR.5 \fB\-0\fR.25 \fB\-91\fR.3 \fB\-91\fR.1
-geocode.py velocity.h5 timeseries.h5 \fB\-t\fR smallbaselineApp.cfg \fB\-\-outdir\fR ./geo \fB\-\-update\fR
-.IP
-# geocode file using ISCE\-2 lat/lon.rdr file
-geocode.py filt_fine.int \fB\-\-lat\-file\fR ../../geom_reference/lat.rdr \fB\-\-lon\-file\fR ../../geom_reference/lon.rdr
-.IP
-# radar\-code file in geo coordinates
-geocode.py swbdLat_S02_N01_Lon_W092_W090.wbd \fB\-l\fR geometryRadar.h5 \fB\-o\fR waterMask.rdr \fB\-\-geo2radar\fR
-geocode.py geo_velocity.h5 \fB\-\-geo2radar\fR
diff -pruN 1.3.3-2/debian/man/mintpy-ifgram_inversion.1 1.4.0-1/debian/man/mintpy-ifgram_inversion.1
--- 1.3.3-2/debian/man/mintpy-ifgram_inversion.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-ifgram_inversion.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,162 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-IFGRAM_INVERSION "1" "May 2022" "mintpy-ifgram_inversion v1.3.3" "User Commands"
-.SH NAME
-mintpy-ifgram_inversion \- Invert network of interferograms into time\-series.
-.SH DESCRIPTION
-usage: ifgram_inversion.py [\-h] [\-t TEMPLATEFILE] [\-i OBSDATASETNAME]
-.TP
-[\-m WATERMASKFILE]
-[\-o TS_FILE TCOH_FILE NUM_INV_FILE]
-[\-\-ref\-date REF_DATE] [\-\-skip\-reference]
-[\-w {fim,coh,var,no}] [\-\-min\-norm\-phase]
-[\-\-norm {L1,L2}] [\-\-calc\-cov]
-[\-\-mask\-dset MASKDATASET] [\-\-mask\-thres NUM]
-[\-\-min\-redun NUM] [\-\-ram MAXMEMORY]
-[\-c {lsf,pbs,slurm,local}] [\-\-num\-worker NUMWORKER]
-[\-\-config CONFIG] [\-\-update]
-ifgramStackFile
-.PP
-Invert network of interferograms into time\-series.
-.SS "positional arguments:"
-.TP
-ifgramStackFile
-interferograms stack file to be inverted
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-t\fR TEMPLATEFILE, \fB\-\-template\fR TEMPLATEFILE
-template text file with options
-.TP
-\fB\-i\fR OBSDATASETNAME, \fB\-d\fR OBSDATASETNAME, \fB\-\-dset\fR OBSDATASETNAME
-dataset name of unwrap phase / offset to be used for inversion
-e.g.: unwrapPhase, unwrapPhase_bridging, ...
-.TP
-\fB\-m\fR WATERMASKFILE, \fB\-\-water\-mask\fR WATERMASKFILE
-Skip inversion on the masked out region, i.e. water.
-.TP
-\fB\-o\fR TS_FILE TCOH_FILE NUM_INV_FILE, \fB\-\-output\fR TS_FILE TCOH_FILE NUM_INV_FILE
-Output file name. (default: None).
-.TP
-\fB\-\-ref\-date\fR REF_DATE
-Reference date, first date by default.
-.TP
-\fB\-\-skip\-reference\fR, \fB\-\-skip\-ref\fR
-[for offset and testing] do not apply spatial referencing.
-.TP
-\fB\-\-calc\-cov\fR
-Calculate time\-series STD via linear propagation from the network of interferograms or offset pairs.
-.TP
-\fB\-\-ram\fR MAXMEMORY, \fB\-\-memory\fR MAXMEMORY
-Max amount of memory in GB to use (default: 4.0).
-Adjust according to your computer memory.
-.TP
-\fB\-\-update\fR
-Enable update mode, and skip inversion if output timeseries file already exists,
-readable and newer than input interferograms file
-.SS "solver:"
-.IP
-solver for the network inversion problem
-.TP
-\fB\-w\fR {fim,coh,var,no}, \fB\-\-weight\-func\fR {fim,coh,var,no}
-function used to convert coherence to weight for inversion:
-var \- inverse of phase variance due to temporal decorrelation (default)
-fim \- Fisher Information Matrix as weightcoh \- spatial coherence
-no  \- no/uniform weight
-.TP
-\fB\-\-min\-norm\-phase\fR
-Enable inversion with minimum\-norm deformation phase, instead of the default minimum\-norm deformation velocity.
-.TP
-\fB\-\-norm\fR {L1,L2}
-Optimization mehtod, L1 or L2 norm. (default: L2).
-.SS "mask:"
-.IP
-mask observation data before inversion
-.TP
-\fB\-\-mask\-dset\fR MASKDATASET, \fB\-\-mask\-dataset\fR MASKDATASET, \fB\-\-md\fR MASKDATASET
-dataset used to mask unwrapPhase, e.g. coherence, connectComponent
-.TP
-\fB\-\-mask\-thres\fR NUM, \fB\-\-mask\-threshold\fR NUM, \fB\-\-mt\fR NUM
-threshold to generate mask when mask is coherence (default: 0.4).
-.TP
-\fB\-\-min\-redun\fR NUM, \fB\-\-min\-redundancy\fR NUM, \fB\-\-mr\fR NUM
-minimum redundancy of interferograms for every SAR acquisition. (default: 1.0).
-.SS "parallel:"
-.IP
-parallel processing using dask
-.TP
-\fB\-c\fR {lsf,pbs,slurm,local}, \fB\-\-cluster\fR {lsf,pbs,slurm,local}, \fB\-\-cluster\-type\fR {lsf,pbs,slurm,local}
-Cluster to use for parallel computing (default: None to turn OFF).
-.TP
-\fB\-\-num\-worker\fR NUMWORKER
-Number of workers to use (default: 4).
-.TP
-\fB\-\-config\fR CONFIG, \fB\-\-config\-name\fR CONFIG
-Configuration name to use in dask.yaml (default: None).
-.SS "references:"
-.IP
-Berardino, P., Fornaro, G., Lanari, R., & Sansosti, E. (2002). A new algorithm for surface
-.IP
-deformation monitoring based on small baseline differential SAR interferograms. IEEE TGRS,
-40(11), 2375\-2383. doi:10.1109/TGRS.2002.803792
-.IP
-Pepe, A., and Lanari, R. (2006), On the extension of the minimum cost flow algorithm for phase unwrapping
-.IP
-of multitemporal differential SAR interferograms, IEEE\-TGRS, 44(9), 2374\-2383.
-.IP
-Perissin, D., and Wang, T. (2012), Repeat\-pass SAR interferometry with partially coherent targets, IEEE TGRS,
-.IP
-50(1), 271\-280, doi:10.1109/tgrs.2011.2160644.
-.IP
-Samiei\-Esfahany, S., Martins, J. E., Van Leijen, F., and Hanssen, R. F. (2016), Phase Estimation for Distributed
-.IP
-Scatterers in InSAR Stacks Using Integer Least Squares Estimation, IEEE TGRS, 54(10), 5671\-5687.
-.IP
-Seymour, M. S., and Cumming, I. G. (1994), Maximum likelihood estimation for SAR interferometry, 1994.
-.IP
-IGARSS '94., 8\-12 Aug 1994.
-.IP
-Yunjun, Z., Fattahi, H., and Amelung, F. (2019), Small baseline InSAR time series analysis: Unwrapping error
-.IP
-correction and noise reduction, Computers & Geosciences, 133, 104331, doi:10.1016/j.cageo.2019.104331.
-.IP
-Yunjun, Z., Fattahi, H., Brancato, V., Rosen, P., Simons, M. (2021), Oral: Tectonic displacement mapping from SAR
-.IP
-offset time series: noise reduction and uncertainty quantification, ID 590, FRINGE 2021, 31 May ??? 4 Jun, 2021, Virtual.
-.SS "template options:"
-.IP
-## Invert network of interferograms into time\-series using weighted least sqaure (WLS) estimator.
-## weighting options for least square inversion [fast option available but not best]:
-## a. var \- use inverse of covariance as weight (Tough et al., 1995; Guarnieri & Tebaldini, 2008) [recommended]
-## b. fim \- use Fisher Information Matrix as weight (Seymour & Cumming, 1994; Samiei\-Esfahany et al., 2016).
-## c. coh \- use coherence as weight (Perissin & Wang, 2012)
-## d. no  \- uniform weight (Berardino et al., 2002) [fast]
-## SBAS (Berardino et al., 2002) = minNormVelocity (yes) + weightFunc (no)
-mintpy.networkInversion.weightFunc      = auto #[var / fim / coh / no], auto for var
-mintpy.networkInversion.waterMaskFile   = auto #[filename / no], auto for waterMask.h5 or no [if not found]
-mintpy.networkInversion.minNormVelocity = auto #[yes / no], auto for yes, min\-norm deformation velocity / phase
-mintpy.networkInversion.residualNorm    = auto #[L2 ], auto for L2, norm minimization solution
-.IP
-## mask options for unwrapPhase of each interferogram before inversion (recommend if weightFunct=no):
-## a. coherence              \- mask out pixels with spatial coherence < maskThreshold
-## b. connectComponent       \- mask out pixels with False/0 value
-## c. no                     \- no masking [recommended].
-## d. range/azimuthOffsetStd \- mask out pixels with offset std. dev. > maskThreshold [for offset]
-mintpy.networkInversion.maskDataset   = auto #[coherence / connectComponent / rangeOffsetStd / azimuthOffsetStd / no], auto for no
-mintpy.networkInversion.maskThreshold = auto #[0\-inf], auto for 0.4
-mintpy.networkInversion.minRedundancy = auto #[1\-inf], auto for 1.0, min num_ifgram for every SAR acquisition
-.IP
-## Temporal coherence is calculated and used to generate the mask as the reliability measure
-## reference: Pepe & Lanari (2006, IEEE\-TGRS)
-mintpy.networkInversion.minTempCoh  = auto #[0.0\-1.0], auto for 0.7, min temporal coherence for mask
-mintpy.networkInversion.minNumPixel = auto #[int > 1], auto for 100, min number of pixels in mask above
-mintpy.networkInversion.shadowMask  = auto #[yes / no], auto for yes [if shadowMask is in geometry file] or no.
-.SS "example:"
-.IP
-ifgram_inversion.py inputs/ifgramStack.h5 \fB\-t\fR smallbaselineApp.cfg \fB\-\-update\fR
-ifgram_inversion.py inputs/ifgramStack.h5 \fB\-w\fR no  # turn off weight for fast processing
-ifgram_inversion.py inputs/ifgramStack.h5 \fB\-c\fR no  # turn off parallel processing
-# offset
-ifgram_inversion.py inputs/ifgramStack.h5 \fB\-i\fR rangeOffset   \fB\-w\fR no \fB\-m\fR waterMask.h5 \fB\-\-md\fR offsetSNR \fB\-\-mt\fR 5
-ifgram_inversion.py inputs/ifgramStack.h5 \fB\-i\fR azimuthOffset \fB\-w\fR no \fB\-m\fR waterMask.h5 \fB\-\-md\fR offsetSNR \fB\-\-mt\fR 5
diff -pruN 1.3.3-2/debian/man/mintpy-ifgram_reconstruction.1 1.4.0-1/debian/man/mintpy-ifgram_reconstruction.1
--- 1.3.3-2/debian/man/mintpy-ifgram_reconstruction.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-ifgram_reconstruction.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,28 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-IFGRAM_RECONSTRUCTION "1" "May 2022" "mintpy-ifgram_reconstruction v1.3.3" "User Commands"
-.SH NAME
-mintpy-ifgram_reconstruction \- Reconstruct network of interferograms from time\-series
-.SH DESCRIPTION
-usage: ifgram_reconstruction.py [\-h] [\-r IFGRAM_FILE] [\-o OUT_FILE]
-.IP
-timeseries_file
-.PP
-Reconstruct network of interferograms from time\-series
-.SS "positional arguments:"
-.TP
-timeseries_file
-time\-series file.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-r\fR IFGRAM_FILE
-reference interferograms stack file
-.TP
-\fB\-o\fR OUT_FILE, \fB\-\-output\fR OUT_FILE
-output filename for the reconstructed interferograms.
-.SS "example:"
-.IP
-ifgram_reconstruction.py timeseries_ERA5_ramp_demErr.h5
-ifgram_reconstruction.py timeseries_ERA5_ramp_demErr.h5 \fB\-r\fR inputs/ifgramStack.h5 \fB\-o\fR ifgramStackRecon.h5
diff -pruN 1.3.3-2/debian/man/mintpy-image_math.1 1.4.0-1/debian/man/mintpy-image_math.1
--- 1.3.3-2/debian/man/mintpy-image_math.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-image_math.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,41 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-IMAGE_MATH "1" "May 2022" "mintpy-image_math v1.3.3" "User Commands"
-.SH NAME
-mintpy-image_math \- Basic Mathmatic Operation of file
-.SH DESCRIPTION
-usage: image_math.py [\-h] [\-o OUTFILE] file {+,\-,*,/,^} VALUE
-.PP
-Basic Mathmatic Operation of file
-.SS "positional arguments:"
-.TP
-file
-input file
-.TP
-{+,\-,*,/,^}
-mathmatical operator
-.TP
-VALUE
-value to be operated with input file
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file name.
-.SS "example:"
-.TP
-image_math.py
-velocity.h5            '+'  0.5
-.TP
-image_math.py
-geo_080212_101120.cor  '\-'  0.2
-.TP
-image_math.py
-timeseries.h5          '*'  1.5
-.TP
-image_math.py
-velocity.h5            '/'  2.0
-.TP
-image_math.py
-velocity.h5            '^'  2.0
diff -pruN 1.3.3-2/debian/man/mintpy-image_stitch.1 1.4.0-1/debian/man/mintpy-image_stitch.1
--- 1.3.3-2/debian/man/mintpy-image_stitch.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-image_stitch.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,39 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-IMAGE_STITCH "1" "May 2022" "mintpy-image_stitch v1.3.3" "User Commands"
-.SH NAME
-mintpy-image_stitch \- Stitch >=2 geocoded datasets sharing common area into one.
-.SH DESCRIPTION
-usage: image_stitch.py [\-h] \fB\-o\fR OUTFILE [\-\-no\-offset] [\-\-nodisplay]
-.IP
-file1 file2 [file2 ...]
-.PP
-Stitch >=2 geocoded datasets sharing common area into one.
-.IP
-Function automatically finds the common area and calculates
-the average offset between the two velocity.
-.SS "positional arguments:"
-.TP
-file1
-file to stitch
-.TP
-file2
-file(s) to stitch
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file name
-.TP
-\fB\-\-no\-offset\fR, \fB\-\-no\-off\fR
-Do not apply offset if data sets are merely to be stitched and no adjustment of values needs to be made (i.e., for two coherence maps), use this flag
-.TP
-\fB\-\-nodisplay\fR
-do not display the result plotting.
-.SS "example:"
-.TP
-image_stitch.py
-vel_AlosAT422.h5  vel_AlosAT423.h5  vel_AlosAT424.h5  vel_AlosAT425.h5 \fB\-o\fR  vel_AlosA.h5
-.IP
-image_stitch.py geom_AlosAT422.h5 geom_AlosAT423.h5 geom_AlosAT424.h5 geom_AlosAT425.h5 \fB\-o\fR geom_AlosA.h5 \fB\-\-no\-offset\fR
diff -pruN 1.3.3-2/debian/man/mintpy-info.1 1.4.0-1/debian/man/mintpy-info.1
--- 1.3.3-2/debian/man/mintpy-info.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-info.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,66 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-INFO "1" "May 2022" "mintpy-info v1.3.3" "User Commands"
-.SH NAME
-mintpy-info \- Display Metadata / Structure information of ANY File
-.SH DESCRIPTION
-usage: info.py [\-h] [\-\-compact] [\-\-dset DSET] [\-\-date] [\-\-num] [\-\-slice]
-.IP
-[\-\-show {dropped,kept,all}]
-file
-.PP
-Display Metadata / Structure information of ANY File
-.SS "positional arguments:"
-.TP
-file
-File to check
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-\-compact\fR
-show compact info by displaying only the top 20 metadata
-.TP
-\fB\-\-dset\fR DSET
-Show dataset
-.SS "List:"
-.IP
-list date/slice info
-.TP
-\fB\-\-date\fR
-Show date/date12 info of input file
-.TP
-\fB\-\-num\fR
-Show date/date12 info with numbers
-.TP
-\fB\-\-slice\fR
-Show slice list of the file
-.TP
-\fB\-\-show\fR {dropped,kept,all}, \fB\-\-show\-ifgram\fR {dropped,kept,all}
-Show all / kept / dropped interferograms only. Default: all.
-.SS "example:"
-.IP
-info.py timeseries.h5
-info.py velocity.h5
-info.py ifgramStack.h5
-.IP
-# Display dataset
-info.py timeseries.py \fB\-\-dset\fR date
-info.py timeseries.py \fB\-\-dset\fR bperp
-.IP
-# Time / Date Info
-info.py ifgramStack.h5 \fB\-\-date\fR                 #print date1_date2 info for all  interferograms
-info.py timeseries.h5  \fB\-\-num\fR                  #print date list of timeseries with its number
-info.py ifgramStack.h5 \fB\-\-date\fR \fB\-\-show\fR kept     #print date1_date2 info for kept interferograms
-info.py ifgramStack.h5 \fB\-\-date\fR \fB\-\-show\fR dropped  #print date1_date2 info for dropped/excluded interferograms
-info.py LS\-PARAMS.h5   \fB\-\-date\fR > date_list.txt #print date list of timeseries and save it to txt file.
-info.py S1_IW12_128_0593_0597_20141213_20180619.h5 \fB\-\-date\fR
-.IP
-# save date1_date2 info of interferograms to a text file
-info.py ifgramStack.h5 \fB\-\-date\fR \fB\-\-show\fR kept > date12_list.txt
-.IP
-# Slice / Dataset Info
-info.py timeseries.h5                              \fB\-\-slice\fR
-info.py inputs/ifgramStack.h5                      \fB\-\-slice\fR
-info.py S1_IW12_128_0593_0597_20141213_20180619.h5 \fB\-\-slice\fR
-info.py LS\-PARAMS.h5                               \fB\-\-slice\fR
diff -pruN 1.3.3-2/debian/man/mintpy-iono_tec.1 1.4.0-1/debian/man/mintpy-iono_tec.1
--- 1.3.3-2/debian/man/mintpy-iono_tec.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-iono_tec.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,78 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-IONO_TEC "1" "May 2022" "mintpy-iono_tec v1.3.3" "User Commands"
-.SH NAME
-mintpy-iono_tec \- Calculate ionospheric ramps using Global Iono Maps (GIM) from GNSS\-based TEC products.
-.SH DESCRIPTION
-usage: iono_tec.py [\-h] \fB\-g\fR GEOM_FILE [\-s TEC_SOL] [\-\-tec\-dir TEC_DIR]
-.IP
-[\-\-update] [\-\-iono\-file IONO_FILE]
-[\-i {nearest,linear3d,linear2d}] [\-\-norotate]
-[\-\-ratio SUB_TEC_RATIO]
-dis_file
-.PP
-Calculate ionospheric ramps using Global Iono Maps (GIM) from GNSS\-based TEC products.
-.SS "positional arguments:"
-.TP
-dis_file
-displacement time\-series HDF5 file, i.e. timeseries.h5
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-g\fR GEOM_FILE, \fB\-\-geomtry\fR GEOM_FILE
-geometry file including incidence/azimuthAngle.
-.TP
-\fB\-s\fR TEC_SOL, \fB\-\-sol\fR TEC_SOL, \fB\-\-tec\-sol\fR TEC_SOL
-TEC solution center (default: jpl).
-.TP
-jpl \- JPL (Final)
-igs \- IGS (Final)
-cod \- CODE (Final)
-.TP
-Check more at:
-https://cddis.nasa.gov/Data_and_Derived_Products/GNSS/atmospheric_products.html
-.TP
-\fB\-\-tec\-dir\fR TEC_DIR
-directory of downloaded GNSS TEC data (default: ${WEATHER_DIR}/GIM_IGS).
-.TP
-\fB\-\-update\fR
-Enable update mode.
-.TP
-\fB\-\-iono\-file\fR IONO_FILE
-calculated LOS iono ramp time\-series file.
-.SS "GIM extraction:"
-.IP
-Parameters to extract TEC at point of interest from GIM (mainly for impact demonstration).
-.TP
-\fB\-i\fR {nearest,linear3d,linear2d}, \fB\-\-interp\fR {nearest,linear3d,linear2d}
-Interpolation method to grab the GIM value at the point of interest (default: linear3d).
-.TP
-\fB\-\-norotate\fR
-Rotate TEC maps along the longitude direction to compensate the correlation between
-the ionosphere and the Sun's position, as suggested by Schaer et al. (1998).
-For 'interp_method == linear3d' ONLY. (default: True).
-.TP
-\fB\-\-ratio\fR SUB_TEC_RATIO
-Ratio to calculate the sub\-orbital TEC from the total TEC.
-Set to "adaptive" for seasonally adaptive scaling.
-.TP
-Based on equation (14) from Yunjun et al. (2022).
-Set to "a value" within (0,1] for a fixed scaling
-E.g. 0.75 for TerraSAR\-X (Gisinger et al., 2021)
-.TP
-0.90 for Sentinel\-1 (Gisinger et al., 2021)
-0.69 for Sentinel\-1 (Yunjun et al., 2022)
-.SS "references:"
-.IP
-Yunjun, Z., Fattahi, H., Pi, X., Rosen, P., Simons, M., Agram, P., & Aoki, Y. (2022). Range Geolocation Accuracy
-.IP
-of C/L\-band SAR and its Implications for Operational Stack Coregistration. IEEE Trans. Geosci. Remote Sens.
-.IP
-Schaer, S., Gurtner, W., & Feltens, J. (1998). IONEX: The ionosphere map exchange format version 1.1.
-.IP
-Paper presented at the Proceedings of the IGS AC workshop, Darmstadt, Germany, Darmstadt, Germany.
-.SS "example:"
-.IP
-iono_tec.py timeseriesRg.h5 \fB\-g\fR inputs/geometryRadar.h5
-iono_tec.py timeseriesRg.h5 \fB\-g\fR inputs/geometryRadar.h5 \fB\-s\fR cod
diff -pruN 1.3.3-2/debian/man/mintpy-load_data.1 1.4.0-1/debian/man/mintpy-load_data.1
--- 1.3.3-2/debian/man/mintpy-load_data.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-load_data.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,103 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-LOAD_DATA "1" "May 2022" "mintpy-load_data v1.3.3" "User Commands"
-.SH NAME
-mintpy-load_data \- Saving a stack of Interferograms to an HDF5 file
-.SH DESCRIPTION
-usage: load_data.py [\-h] [\-H] [\-t TEMPLATE_FILE [TEMPLATE_FILE ...]]
-.TP
-[\-\-project PROJECT_NAME]
-[\-\-processor {isce,aria,hyp3,gmtsar,snap,gamma,roipac,cosicorr}]
-[\-\-enforce] [\-\-compression {None,gzip,lzf}]
-[\-o OUTFILE OUTFILE OUTFILE]
-.PP
-Saving a stack of Interferograms to an HDF5 file
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-H\fR
-Print/Show the example template file for loading.
-.TP
-\fB\-t\fR TEMPLATE_FILE [TEMPLATE_FILE ...], \fB\-\-template\fR TEMPLATE_FILE [TEMPLATE_FILE ...]
-template file with path info.
-.TP
-\fB\-\-project\fR PROJECT_NAME
-project name of dataset for INSARMAPS Web Viewer
-.TP
-\fB\-\-processor\fR {isce,aria,hyp3,gmtsar,snap,gamma,roipac,cosicorr}
-InSAR processor/software of the file
-.TP
-\fB\-\-enforce\fR, \fB\-f\fR
-Disable the update mode, or skip checking dataset already loaded.
-.TP
-\fB\-\-compression\fR {None,gzip,lzf}
-compress loaded geometry while writing HDF5 file, default: None.
-.TP
-\fB\-o\fR OUTFILE OUTFILE OUTFILE, \fB\-\-output\fR OUTFILE OUTFILE OUTFILE
-output HDF5 file
-.SS "template options:"
-.IP
-##\-\-\-\-\-\-\-\-\-add attributes manually
-## MintPy requires attributes listed at: https://mintpy.readthedocs.io/en/latest/api/attributes/
-## Missing attributes can be added below manually (uncomment #), e.g.
-# ORBIT_DIRECTION = ascending
-# PLATFORM = CSK
-# ...
-## a. autoPath \- automatic path pattern defined in mintpy.defaults.auto_path.AUTO_PATH_*
-## b. load_data.py \fB\-H\fR to check more details and example inputs.
-## c. compression to save disk usage for ifgramStack.h5 file:
-## no   \- save   0% disk usage, fast [default]
-## lzf  \- save ~57% disk usage, relative slow
-## gzip \- save ~62% disk usage, very slow [not recommend]
-mintpy.load.processor      = auto  #[isce, aria, hyp3, gmtsar, snap, gamma, roipac], auto for isce
-mintpy.load.autoPath       = auto  #[yes / no], auto for no, use pre\-defined auto path
-mintpy.load.updateMode     = auto  #[yes / no], auto for yes, skip re\-loading if HDF5 files are complete
-mintpy.load.compression    = auto  #[gzip / lzf / no], auto for no.
-##\-\-\-\-\-\-\-\-\-for ISCE only:
-mintpy.load.metaFile       = auto  #[path of common metadata file for the stack], i.e.: ./reference/IW1.xml, ./referenceShelve/data.dat
-mintpy.load.baselineDir    = auto  #[path of the baseline dir], i.e.: ./baselines
-##\-\-\-\-\-\-\-\-\-interferogram datasets:
-mintpy.load.unwFile        = auto  #[path pattern of unwrapped interferogram files]
-mintpy.load.corFile        = auto  #[path pattern of spatial coherence       files]
-mintpy.load.connCompFile   = auto  #[path pattern of connected components    files], optional but recommended
-mintpy.load.intFile        = auto  #[path pattern of wrapped interferogram   files], optional
-mintpy.load.ionoFile       = auto  #[path pattern of ionospheric delay       files], optional
-mintpy.load.magFile        = auto  #[path pattern of interferogram magnitude files], optional
-##\-\-\-\-\-\-\-\-\-offset datasets (optional):
-mintpy.load.azOffFile      = auto  #[path pattern of azimuth offset file], optional
-mintpy.load.rgOffFile      = auto  #[path pattern of range   offset file], optional
-mintpy.load.azOffStdFile   = auto  #[path pattern of azimuth offset variance file], optional
-mintpy.load.rgOffStdFile   = auto  #[path pattern of range   offset variance file], optional
-mintpy.load.offSnrFile     = auto  #[path pattern of offset signal\-to\-noise ratio file], optional
-##\-\-\-\-\-\-\-\-\-geometry datasets:
-mintpy.load.demFile        = auto  #[path of DEM file]
-mintpy.load.lookupYFile    = auto  #[path of latitude \fI\,/row\/\fP   \fI\,/y\/\fP coordinate file], not required for geocoded data
-mintpy.load.lookupXFile    = auto  #[path of longitude/column/x coordinate file], not required for geocoded data
-mintpy.load.incAngleFile   = auto  #[path of incidence angle file], optional but recommended
-mintpy.load.azAngleFile    = auto  #[path of azimuth   angle file], optional
-mintpy.load.shadowMaskFile = auto  #[path of shadow mask file], optional but recommended
-mintpy.load.waterMaskFile  = auto  #[path of water  mask file], optional but recommended
-mintpy.load.bperpFile      = auto  #[path pattern of 2D perpendicular baseline file], optional
-##\-\-\-\-\-\-\-\-\-multilook (optional):
-## multilook while loading data with nearest interpolation, to reduce dataset size
-mintpy.load.ystep          = auto    #[int >= 1], auto for 1 \- no multilooking
-mintpy.load.xstep          = auto    #[int >= 1], auto for 1 \- no multilooking
-##\-\-\-\-\-\-\-\-\-subset (optional):
-## if both yx and lalo are specified, use lalo option unless a) no lookup file AND b) dataset is in radar coord
-mintpy.subset.yx           = auto    #[y0:y1,x0:x1 / no], auto for no
-mintpy.subset.lalo         = auto    #[S:N,W:E / no], auto for no
-.SS "NOTE:"
-.IP
-For interferogram, unwrapPhase is required, the other dataset are optional, including coherence, connectComponent, wrapPhase, etc.
-The unwrapPhase metadata file requires DATE12 attribute in YYMMDD\-YYMMDD format.
-All path of data file must contain the reference and secondary date, either in file name or folder name.
-.SS "example:"
-.IP
-load_data.py \fB\-t\fR GalapagosSenDT128.tempalte
-load_data.py \fB\-t\fR smallbaselineApp.cfg
-load_data.py \fB\-t\fR smallbaselineApp.cfg GalapagosSenDT128.tempalte \fB\-\-project\fR GalapagosSenDT128
-load_data.py \fB\-H\fR #Show example input template for ISCE/ROI_PAC/GAMMA products
-.IP
-# load geometry only
-# fill metaFile, baselineDir and geometry datasets in the template and run load_data.py
diff -pruN 1.3.3-2/debian/man/mintpy-load_gbis.1 1.4.0-1/debian/man/mintpy-load_gbis.1
--- 1.3.3-2/debian/man/mintpy-load_gbis.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-load_gbis.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,26 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-LOAD_GBIS "1" "May 2022" "mintpy-load_gbis v1.3.3" "User Commands"
-.SH NAME
-mintpy-load_gbis \- Load GBIS inversion result to HDF5 format.
-.SH DESCRIPTION
-usage: load_gbis.py [\-h] [\-o OUTFILE] [\-\-nodisplay] file
-.PP
-Load GBIS inversion result to HDF5 format.
-.SS "positional arguments:"
-.TP
-file
-GBIS inversion mat file.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file name.
-.TP
-\fB\-\-nodisplay\fR
-do not display the figure
-.SS "example:"
-.IP
-load_gbis.py invert_1_2_C.mat
-load_gbis.py invert_1_2_C.mat \fB\-\-nodisplay\fR
diff -pruN 1.3.3-2/debian/man/mintpy-local_oscilator_drift.1 1.4.0-1/debian/man/mintpy-local_oscilator_drift.1
--- 1.3.3-2/debian/man/mintpy-local_oscilator_drift.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-local_oscilator_drift.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,41 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-LOCAL_OSCILATOR_DRIFT "1" "May 2022" "mintpy-local_oscilator_drift v1.3.3" "User Commands"
-.SH NAME
-mintpy-local_oscilator_drift \- Local Oscilator Drift (LOD) correction of Envisat
-.SH DESCRIPTION
-usage: local_oscilator_drift.py [\-h] [\-o OUTFILE] file range_dist_file
-.PP
-Local Oscilator Drift (LOD) correction of Envisat
-.SS "positional arguments:"
-.TP
-file
-timeseries / interferograms file, i.e. timeseries.h5
-.TP
-range_dist_file
-Slant range distance file, i.e. inputs/geometryRadar.h5, inputs/geometryGeo.h5
-or use range_distance.py to generate it.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-Output file name for corrected file.
-.SS "reference:"
-.IP
-Marinkovic, P., and Y. Larsen (2013), Consequences of long\-term ASAR local oscillator
-frequency decay \- An empirical study of 10 years of data, in Living Planet Symposium,
-Edinburgh, U.K.
-.SS "template options:"
-.IP
-## Local Oscillator Drift (LOD) correction (for Envisat only)
-## reference: Marinkovic and Larsen (2013, Proc. LPS)
-## automatically applied to Envisat data (identified via PLATFORM attribute)
-## and skipped for all the other satellites.
-.SS "example:"
-.TP
-local_oscilator_drift.py
-timeseries.h5                 inputs/geometryRadar.h5
-.TP
-local_oscilator_drift.py
-filt_101020_110220_4rlks.unw  inputs/geometryRadar.h5
diff -pruN 1.3.3-2/debian/man/mintpy-lookup_geo2radar.1 1.4.0-1/debian/man/mintpy-lookup_geo2radar.1
--- 1.3.3-2/debian/man/mintpy-lookup_geo2radar.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-lookup_geo2radar.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,27 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-LOOKUP_GEO2RADAR "1" "May 2022" "mintpy-lookup_geo2radar v1.3.3" "User Commands"
-.SH NAME
-mintpy-lookup_geo2radar \- Convert lookup table from geo\-coord (GAMMA, ROI_PAC) into radar\-coord (ISCE)
-.SH DESCRIPTION
-usage: lookup_geo2radar.py [\-h] [\-w FILE] [\-\-parallel NUM] geometryGeo
-.PP
-Convert lookup table from geo\-coord (GAMMA, ROI_PAC) into radar\-coord (ISCE)
-.SS "positional arguments:"
-.TP
-geometryGeo
-geometryGeo file which includes geo\-coordinates based lookup\-table
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-w\fR FILE, \fB\-\-write\fR FILE
-update geometryRadar.h5 file by adding the radar\-coordinates based lookup\-table.
-.TP
-\fB\-\-parallel\fR NUM
-Enable parallel processing and specify the the used processor number.[default: 1]
-.SS "examples:"
-.IP
-lookup_geo2radar.py geometryGeo.h5
-lookup_geo2radar.py geometryGeo.h5 \fB\-w\fR geometryRadar.h5
-lookup_geo2radar.py geometryGeo.h5 \fB\-w\fR geometryRadar.h5 \fB\-\-parallel\fR 4
diff -pruN 1.3.3-2/debian/man/mintpy-mask.1 1.4.0-1/debian/man/mintpy-mask.1
--- 1.3.3-2/debian/man/mintpy-mask.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-mask.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,57 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-MASK "1" "May 2022" "mintpy-mask v1.3.3" "User Commands"
-.SH NAME
-mintpy-mask \- Mask file.
-.SH DESCRIPTION
-usage: mask.py [\-h] \fB\-m\fR MASK_FILE [\-o OUTFILE] [\-t THRESHOLD]
-.IP
-[\-\-fill FILL_VALUE] [\-x SUBSET_X SUBSET_X]
-[\-y SUBSET_Y SUBSET_Y]
-file
-.PP
-Mask file
-.SS "positional arguments:"
-.TP
-file
-File to be masked
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-m\fR MASK_FILE, \fB\-\-mask\fR MASK_FILE
-mask for pixels used in ramp estimation
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-outfile\fR OUTFILE
-Output file name.
-.TP
-\fB\-t\fR THRESHOLD
-threshold value used for masking.
-if not specified, only pixels with mask value equal to zero is masked out.
-.TP
-\fB\-\-fill\fR FILL_VALUE
-fill masked out area with input value. i.e.
-np.nan (default), 0, 1000, ...
-If np.nan and input data matrix is not float/complex, convert matrix data type to np.float32.
-.TP
-\fB\-x\fR SUBSET_X SUBSET_X
-subset range in x/cross\-track/column direction
-.TP
-\fB\-y\fR SUBSET_Y SUBSET_Y
-subset range in y/along\-track/row direction
-.SS "example:"
-.TP
-mask.py
-velocity.h5     \fB\-m\fR Mask.h5
-.TP
-mask.py
-timeseries.h5   \fB\-m\fR temporalCoherence.h5  \fB\-t\fR 0.7
-.TP
-mask.py
-ifgramStack.h5  \fB\-m\fR 100102_101120.cor     \fB\-t\fR 0.9  \fB\-y\fR  200 300  \fB\-x\fR 300 400
-.TP
-mask.py
-filt_20060924_20090214.int \fB\-m\fR waterMask.h5 \fB\-o\fR filt_20060924_20090214_msk.int
-.TP
-mask.py
-filt_20060924_20090214.cor \fB\-m\fR waterMask.h5 \fB\-o\fR filt_20060924_20090214_msk.cor
diff -pruN 1.3.3-2/debian/man/mintpy-modify_network.1 1.4.0-1/debian/man/mintpy-modify_network.1
--- 1.3.3-2/debian/man/mintpy-modify_network.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-modify_network.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,154 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-MODIFY_NETWORK "1" "May 2022" "mintpy-modify_network v1.3.3" "User Commands"
-.SH NAME
-mintpy-modify_network \- Modify the network of interferograms
-.SH DESCRIPTION
-usage: modify_network.py [\-h] [\-t TEMPLATE_FILE] [\-\-reset] [\-\-noaux]
-.TP
-[\-\-max\-tbase TEMPBASEMAX] [\-\-max\-pbase PERPBASEMAX]
-[\-\-max\-conn\-num CONNNUMMAX] [\-r REFERENCEFILE]
-[\-\-exclude\-ifg\-index [EXCLUDEIFGINDEX ...]]
-[\-\-exclude\-date [EXCLUDEDATE ...]]
-[\-\-start\-date STARTDATE] [\-\-end\-date ENDDATE]
-[\-\-coherence\-based] [\-\-min\-coherence MINCOHERENCE]
-[\-\-area\-ratio\-based] [\-\-min\-area\-ratio MINAREARATIO]
-[\-\-no\-mst] [\-\-mask MASKFILE] [\-\-aoi\-yx X0 Y0 X1 Y1]
-[\-\-aoi\-lalo W S E N] [\-\-lookup LOOKUPFILE] [\-\-manual]
-file
-.PP
-Modify the network of interferograms
-.SS "positional arguments:"
-.TP
-file
-Files to modify/drop network, e.g. inputs/ifgramStack.h5.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-t\fR TEMPLATE_FILE, \fB\-\-template\fR TEMPLATE_FILE
-Template file with input options
-.TP
-\fB\-\-reset\fR
-restore all interferograms in the file, by marking all dropIfgram=True
-.TP
-\fB\-\-noaux\fR
-Do not update auxiliary files, e.g.
-maskConnComp.h5 or avgSpatialCoh.h5 from ifgramStack.h5
-.TP
-\fB\-\-max\-tbase\fR TEMPBASEMAX
-max temporal baseline in days
-.TP
-\fB\-\-max\-pbase\fR PERPBASEMAX
-max perpendicular baseline in meters
-.TP
-\fB\-\-max\-conn\-num\fR CONNNUMMAX
-max number of connections/neighbors per acquisition
-.TP
-\fB\-r\fR REFERENCEFILE, \fB\-\-reference\fR REFERENCEFILE
-Reference hdf5 / list file with network information.
-i.e. ifgramStack.h5, date12_list.txt
-.TP
-\fB\-\-exclude\-ifg\-index\fR [EXCLUDEIFGINDEX ...]
-index of interferograms to remove/drop.
-1 as the first
-.TP
-\fB\-\-exclude\-date\fR [EXCLUDEDATE ...]
-date(s) to remove/drop, all interferograms included date(s) will be removed
-.TP
-\fB\-\-start\-date\fR STARTDATE, \fB\-\-min\-date\fR STARTDATE
-remove/drop interferograms with date earlier than start\-date in YYMMDD or YYYYMMDD format
-.TP
-\fB\-\-end\-date\fR ENDDATE, \fB\-\-max\-date\fR ENDDATE
-remove/drop interferograms with date later than end\-date in YYMMDD or YYYYMMDD format
-.SS "Data-driven network modification:"
-.IP
-Drop/modify network based on data
-.TP
-\fB\-\-coherence\-based\fR
-Enable coherence\-based network modification (default: False).
-.TP
-\fB\-\-min\-coherence\fR MINCOHERENCE
-Minimum coherence value (default: 0.7).
-.TP
-\fB\-\-area\-ratio\-based\fR
-Enable area ratio\-based network modification (default: False).
-.TP
-\fB\-\-min\-area\-ratio\fR MINAREARATIO
-Minimum area ratio value (default: 0.75).
-.TP
-\fB\-\-no\-mst\fR
-Do not keep interferograms in Min Span Tree network based on inversed mean coherene
-.TP
-\fB\-\-mask\fR MASKFILE
-Mask file used to calculate the spatial coherence (default: waterMask.h5 or None)
-.TP
-\fB\-\-aoi\-yx\fR X0 Y0 X1 Y1
-AOI in row/column range for coherence calculation (default: None).
-.TP
-\fB\-\-aoi\-lalo\fR W S E N
-AOI in lat/lon range for coherence calculation (default: None).
-.TP
-\fB\-\-lookup\fR LOOKUPFILE
-Lookup table/mapping transformation file for geo/radar coordinate conversion.
-Needed for mask AOI in lalo
-.SS "Manual Network:"
-.IP
-Manually select/drop/modify network
-.TP
-\fB\-\-manual\fR
-display network to manually choose line/interferogram to remove
-.SS "reference:"
-.IP
-Yunjun, Z., Fattahi, H. and Amelung, F. (2019), Small baseline InSAR time series analysis:
-Unwrapping error correction and noise reduction, Computers & Geosciences, 133, 104331,
-doi:10.1016/j.cageo.2019.104331.
-.IP
-Chaussard, E., B??rgmann, R., Fattahi, H., Nadeau, R. M., Taira, T., Johnson, C. W. and Johanson, I.
-(2015), Potential for larger earthquakes in the East San Francisco Bay Area due to the direct
-connection between the Hayward and Calaveras Faults, Geophysical Research Letters, 42(8),
-2734\-2741, doi:10.1002/2015GL063575.
-.IP
-Kang, Y., Lu, Z., Zhao, C., Xu, Y., Kim, J. W., & Gallegos, A. J. (2021).InSAR monitoring
-of creeping landslides in mountainous regions: A case study in Eldorado National Forest,
-California. Remote Sensing of Environment, 258, 112400. doi:10.1016/j.rse.2021.112400
-.SS "template options:"
-.IP
-## 1) Network modification based on temporal/perpendicular baselines, date, num of connections etc.
-mintpy.network.tempBaseMax     = auto  #[1\-inf, no], auto for no, max temporal baseline in days
-mintpy.network.perpBaseMax     = auto  #[1\-inf, no], auto for no, max perpendicular spatial baseline in meter
-mintpy.network.connNumMax      = auto  #[1\-inf, no], auto for no, max number of neighbors for each acquisition
-mintpy.network.startDate       = auto  #[20090101 / no], auto for no
-mintpy.network.endDate         = auto  #[20110101 / no], auto for no
-mintpy.network.excludeDate     = auto  #[20080520,20090817 / no], auto for no
-mintpy.network.excludeIfgIndex = auto  #[1:5,25 / no], auto for no, list of ifg index (start from 0)
-mintpy.network.referenceFile   = auto  #[date12_list.txt / ifgramStack.h5 / no], auto for no
-.IP
-## 2) Data\-driven network modification
-## a \- Coherence\-based network modification = (threshold + MST) by default
-## reference: Yunjun et al. (2019, section 4.2 and 5.3.1); Chaussard et al. (2015, GRL)
-## It calculates a average coherence for each interferogram using spatial coherence based on input mask (with AOI)
-## Then it finds a minimum spanning tree (MST) network with inverse of average coherence as weight (keepMinSpanTree)
-## Next it excludes interferograms if a) the average coherence < minCoherence AND b) not in the MST network.
-mintpy.network.coherenceBased  = auto  #[yes / no], auto for no, exclude interferograms with coherence < minCoherence
-mintpy.network.minCoherence    = auto  #[0.0\-1.0], auto for 0.7
-.IP
-## b \- Effective Coherence Ratio network modification = (threshold + MST) by default
-## reference: Kang et al. (2021, RSE)
-## It calculates the area ratio of each interferogram that is above a spatial coherence threshold.
-## This threshold is defined as the spatial coherence of the interferograms within the input mask.
-## It then finds a minimum spanning tree (MST) network with inverse of the area ratio as weight (keepMinSpanTree)
-## Next it excludes interferograms if a) the area ratio < minAreaRatio AND b) not in the MST network.
-mintpy.network.areaRatioBased  = auto  #[yes / no], auto for no, exclude interferograms with area ratio < minAreaRatio
-mintpy.network.minAreaRatio    = auto  #[0.0\-1.0], auto for 0.75
-.IP
-## Additional common parameters for the 2) data\-driven network modification
-mintpy.network.keepMinSpanTree = auto  #[yes / no], auto for yes, keep interferograms in Min Span Tree network
-mintpy.network.maskFile        = auto  #[file name, no], auto for waterMask.h5 or no [if no waterMask.h5 found]
-mintpy.network.aoiYX           = auto  #[y0:y1,x0:x1 / no], auto for no, area of interest for coherence calculation
-mintpy.network.aoiLALO         = auto  #[S:N,W:E / no], auto for no \- use the whole area
-.SS "example:"
-.IP
-modify_network.py inputs/ifgramStack.h5 \fB\-t\fR smallbaselineApp.cfg
-modify_network.py inputs/ifgramStack.h5 \fB\-\-reset\fR
-modify_network.py inputs/ifgramStack.h5 \fB\-\-manual\fR
diff -pruN 1.3.3-2/debian/man/mintpy-multilook.1 1.4.0-1/debian/man/mintpy-multilook.1
--- 1.3.3-2/debian/man/mintpy-multilook.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-multilook.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,45 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-MULTILOOK "1" "May 2022" "mintpy-multilook v1.3.3" "User Commands"
-.SH NAME
-mintpy-multilook \- Multilook. 
-.SH DESCRIPTION
-usage: multilook.py [\-h] [\-r LKS_X] [\-a LKS_Y] [\-o OUTFILE]
-.TP
-[\-m {average,nearest}] [\-\-margin TOP BOTTOM LEFT RIGHT]
-file [file ...]
-.PP
-Multilook.
-.SS "positional arguments:"
-.TP
-file
-File(s) to multilook
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-r\fR LKS_X, \fB\-\-range\fR LKS_X, \fB\-x\fR LKS_X
-number of multilooking in range  \fI\,/x\/\fP direction (default: 1).
-.TP
-\fB\-a\fR LKS_Y, \fB\-\-azimuth\fR LKS_Y, \fB\-y\fR LKS_Y
-number of multilooking in azimuth/y direction (default: 1).
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-outfile\fR OUTFILE
-Output file name. Disabled when more than 1 input files
-.TP
-\fB\-m\fR {average,nearest}, \fB\-\-method\fR {average,nearest}
-downsampling method (default: average)
-e.g. nearest for geometry, average for observations
-.TP
-\fB\-\-margin\fR TOP BOTTOM LEFT RIGHT
-number of pixels on the margin to skip, (default: [0, 0, 0, 0]).
-.SS "example:"
-.TP
-multilook.py
-velocity.h5  \fB\-r\fR 15 \fB\-a\fR 15
-.TP
-multilook.py
-srtm30m.dem  \fB\-r\fR 10 \fB\-a\fR 10  \fB\-o\fR srtm30m_300m.dem
-.IP
-# Ignore / skip marginal pixels
-multilook.py ../../geom_reference/hgt.rdr.full \fB\-r\fR 300 \fB\-a\fR 100 \fB\-\-margin\fR 58 58 58 58 \fB\-o\fR hgt.rdr
diff -pruN 1.3.3-2/debian/man/mintpy-multi_transect.1 1.4.0-1/debian/man/mintpy-multi_transect.1
--- 1.3.3-2/debian/man/mintpy-multi_transect.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-multi_transect.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,56 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-MULTI_TRANSECT "1" "May 2022" "mintpy-multi_transect v1.3.3" "User Commands"
-.SH NAME
-mintpy-multi_transect \- Generating multiple profiles perpendicular to a Fault.
-.SH DESCRIPTION
-.IP
-Generating multiple profiles(each profile includes seeveral transects [specified by \fB\-n]\fR)
-perpendicular to a Fault . Fault is a path specified by lat and lon coordinates.
-.IP
-Usage:
-.HP
-\fB\-n\fR number of transects used to generate one profile
-.HP
-\fB\-d\fR distance [in pixel] between individual transects to generate one profile
-.HP
-\fB\-F\fR a txt file including the fault coordinates (first column lon , second column: lat)
-.HP
-\fB\-p\fR flip profile left\-right (yes or no) [default: no]
-.HP
-\fB\-u\fR flip up \- down [default: no]
-.HP
-\fB\-g\fR gps_file (if exists)
-.HP
-\fB\-S\fR source of GPS velocities (usgs,cmm4,mintpy)
-.TP
-\fB\-G\fR gps stations to compare with InSAR
-(all,insar,profile)
-.IP
-"all": all gps stations is projected to the profile
-"insar": same as all but limited to the area covered by insar
-"profile": only those gps stations which are in the profile area]
-.HP
-\fB\-x\fR lower bound to display in x direction
-.HP
-\fB\-X\fR higher bound to display in x direction
-.HP
-\fB\-l\fR lower bound to display in y direction
-.HP
-\fB\-h\fR higher bound to display in y direction
-.HP
-\fB\-I\fR display InSAR velocity [on] or off
-.HP
-\fB\-A\fR display Average InSAR velocity [on] or off
-.HP
-\fB\-U\fR display Standard deviation of the InSAR velocity [on] or off
-.HP
-\fB\-E\fR Export the generated transect to a matlab file [off] or on
-.HP
-\fB\-W\fR Length of a profile
-.HP
-\fB\-D\fR Distance between two consequent average profile
-.IP
-Example:
-.IP
-multi_transect.py \fB\-f\fR geo_velocity_masked.h5 \fB\-n\fR 50 \fB\-d\fR 1 \fB\-W\fR 10 \fB\-D\fR 2 \fB\-F\fR Chaman_fault.txt
-
diff -pruN 1.3.3-2/debian/man/mintpy-plot_coherence_matrix.1 1.4.0-1/debian/man/mintpy-plot_coherence_matrix.1
--- 1.3.3-2/debian/man/mintpy-plot_coherence_matrix.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-plot_coherence_matrix.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,79 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-PLOT_COHERENCE_MATRIX "1" "May 2022" "mintpy-plot_coherence_matrix v1.3.3" "User Commands"
-.SH NAME
-mintpy-plot_coherence_matrix \- Plot the coherence matrix of one pixel (interactive)
-.SH DESCRIPTION
-usage: plot_coherence_matrix.py [\-h] [\-\-yx Y X] [\-\-lalo LAT LON]
-.TP
-[\-\-lookup LOOKUP_FILE] [\-c CMAP_NAME]
-[\-\-cmap\-vlist CMAP_VLIST CMAP_VLIST CMAP_VLIST]
-[\-\-figsize WID LEN] [\-\-img\-file IMG_FILE]
-[\-\-view\-cmd VIEW_CMD] [\-\-tcoh TCOH_FILE]
-[\-t TEMPLATE_FILE] [\-\-save] [\-\-nodisplay]
-[\-\-noverbose]
-ifgram_file
-.PP
-Plot the coherence matrix of one pixel (interactive)
-.SS "positional arguments:"
-.TP
-ifgram_file
-interferogram stack file
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-\-yx\fR Y X
-Point of interest in y(row)/x(col)
-.TP
-\fB\-\-lalo\fR LAT LON
-Point of interest in lat/lon
-.TP
-\fB\-\-lookup\fR LOOKUP_FILE, \fB\-\-lut\fR LOOKUP_FILE
-Lookup file to convert lat/lon into y/x
-.TP
-\fB\-c\fR CMAP_NAME, \fB\-\-cmap\fR CMAP_NAME
-Colormap for coherence matrix.
-Default: RdBu_truncate
-.TP
-\fB\-\-cmap\-vlist\fR CMAP_VLIST CMAP_VLIST CMAP_VLIST
-start/jump/end fraction for truncated colormap. Default: 0.0 0.7 1.0
-.TP
-\fB\-\-figsize\fR WID LEN, \fB\-\-fs\fR WID LEN
-figure size in inches. Default: [8, 4]
-.TP
-\fB\-\-img\-file\fR IMG_FILE
-dataset to show in map to facilitate point selection. Default: velocity.h5
-.TP
-\fB\-\-view\-cmd\fR VIEW_CMD
-view.py command to plot the input map file
-Default: view.py img_file \fB\-\-wrap\fR \fB\-\-noverbose\fR
-.TP
-\fB\-\-tcoh\fR TCOH_FILE
-temporal coherence file.
-.TP
-\fB\-t\fR TEMPLATE_FILE, \fB\-\-template\fR TEMPLATE_FILE
-temporal file.
-.TP
-\fB\-\-save\fR
-save the figure
-.TP
-\fB\-\-nodisplay\fR
-save and do not display the figure
-.TP
-\fB\-\-noverbose\fR
-Disable the verbose message printing.
-.SS "example:"
-.IP
-plot_coherence_matrix.py inputs/ifgramStack.h5
-plot_coherence_matrix.py inputs/ifgramStack.h5 \fB\-\-yx\fR 277 1069
-plot_coherence_matrix.py inputs/ifgramStack.h5 \fB\-\-lalo\fR \fB\-0\fR.8493 \fB\-91\fR.1510 \fB\-c\fR RdBu
-.IP
-# left: map view
-plot_coherence_matrix.py inputs/ifgramStack.h5 \fB\-\-view\-cmd\fR "view.py {} \fB\-\-dem\fR inputs/gsi10m.dem.wgs84"
-plot_coherence_matrix.py inputs/ifgramStack.h5 \fB\-\-view\-cmd\fR 'view.py {} \fB\-\-wrap\fR \fB\-\-wrap\-range\fR \fB\-3\fR 3"
-plot_coherence_matrix.py inputs/ifgramStack.h5 \fB\-\-view\-cmd\fR 'view.py {} \fB\-\-sub\-x\fR 900 1400 \fB\-\-sub\-y\fR 0 500'
-.IP
-# right: matrix view
-# show color jump same as the coherence threshold in network inversion with pixel\-wised masking
-plot_coherence_matrix.py inputs/ifgramStack.h5 \fB\-\-cmap\-vlist\fR 0 0.4 1
diff -pruN 1.3.3-2/debian/man/mintpy-plot_network.1 1.4.0-1/debian/man/mintpy-plot_network.1
--- 1.3.3-2/debian/man/mintpy-plot_network.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-plot_network.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,109 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-PLOT_NETWORK "1" "May 2022" "mintpy-plot_network v1.3.3" "User Commands"
-.SH NAME
-mintpy-plot_network \- Display Network of Interferograms
-.SH DESCRIPTION
-usage: plot_network.py [\-h] [\-\-show\-kept]
-.TP
-[\-d {pbase,offsetSNR,coherence,tbase}] [\-v VLIM VLIM]
-[\-t TEMPLATE_FILE] [\-\-mask MASKFILE] [\-c CMAP_NAME]
-[\-\-cmap\-vlist CMAP_VLIST CMAP_VLIST CMAP_VLIST]
-[\-\-fs FS] [\-\-lw LINEWIDTH] [\-\-mc MARKERCOLOR]
-[\-\-ms MARKERSIZE] [\-\-every\-year EVERY_YEAR]
-[\-\-dpi FIG_DPI] [\-\-figsize FIG_SIZE FIG_SIZE]
-[\-\-notitle] [\-\-number NUMBER] [\-\-nosplit\-cmap] [\-\-save]
-[\-\-nodisplay]
-file
-.PP
-Display Network of Interferograms
-.SS "positional arguments:"
-.TP
-file
-file with network information, ifgramStack.h5 or coherenceSpatialAvg.txt
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-\-show\-kept\fR
-display kept interferograms only, without dropped interferograms
-.SS "Color-code network/matrix plot:"
-.IP
-color\-code phase/offset pairs with coherence/baseline in network/matrix plot
-.TP
-\fB\-d\fR {pbase,offsetSNR,coherence,tbase}, \fB\-\-dset\fR {pbase,offsetSNR,coherence,tbase}
-dataset used to calculate the mean. (default: coherence)
-.TP
-\fB\-v\fR VLIM VLIM, \fB\-\-vlim\fR VLIM VLIM
-display range
-.TP
-\fB\-t\fR TEMPLATE_FILE, \fB\-\-template\fR TEMPLATE_FILE
-template file with options below:
-.TP
-mintpy.network.maskFile
-= auto  #[file name, no], auto for waterMask.h5 or no for all pixels
-.TP
-mintpy.network.aoiYX
-= auto  #[y0:y1,x0:x1 / no], auto for no, area of interest for coherence calculation
-.TP
-mintpy.network.aoiLALO
-= auto  #[lat0:lat1,lon0:lon1 / no], auto for no \- use the whole area
-.TP
-\fB\-\-mask\fR MASKFILE
-mask file used to calculate the coherence. Default: waterMask.h5 or None.
-.TP
-\fB\-c\fR CMAP_NAME, \fB\-\-colormap\fR CMAP_NAME
-colormap name for the network display. Default: RdBu_truncate
-.TP
-\fB\-\-cmap\-vlist\fR CMAP_VLIST CMAP_VLIST CMAP_VLIST
-normalized start/jump/end value for truncated colormap. Default: 0.2 0.4 1.0
-.SS "Figure:"
-.IP
-Figure settings for display
-.TP
-\fB\-\-fs\fR FS, \fB\-\-fontsize\fR FS
-font size in points
-.TP
-\fB\-\-lw\fR LINEWIDTH, \fB\-\-linewidth\fR LINEWIDTH
-line width in points
-.TP
-\fB\-\-mc\fR MARKERCOLOR, \fB\-\-markercolor\fR MARKERCOLOR
-marker color
-.TP
-\fB\-\-ms\fR MARKERSIZE, \fB\-\-markersize\fR MARKERSIZE
-marker size in points
-.TP
-\fB\-\-every\-year\fR EVERY_YEAR
-number of years per major tick on x\-axis
-.TP
-\fB\-\-dpi\fR FIG_DPI
-DPI \- dot per inch \- for display/write
-.TP
-\fB\-\-figsize\fR FIG_SIZE FIG_SIZE
-figure size in inches \- width and length
-.TP
-\fB\-\-notitle\fR
-Do not display figure title.
-.TP
-\fB\-\-number\fR NUMBER
-number mark to be plot at the corner of figure.
-.TP
-\fB\-\-nosplit\-cmap\fR
-do not split colormap for coherence color
-.TP
-\fB\-\-save\fR
-save the figure
-.TP
-\fB\-\-nodisplay\fR
-save and do not display the figure
-.SS "example:"
-.IP
-plot_network.py inputs/ifgramStack.h5
-plot_network.py inputs/ifgramStack.h5 \fB\-t\fR smallbaselineApp.cfg \fB\-\-nodisplay\fR   #Save figures to files without display
-plot_network.py inputs/ifgramStack.h5 \fB\-t\fR smallbaselineApp.cfg \fB\-\-show\-kept\fR   #Do not plot dropped ifgrams
-plot_network.py inputs/ifgramStack.h5 \fB\-d\fR tbase \fB\-v\fR 0 365.25 \fB\-c\fR RdYlBu_r      #Color\-code lines by temporal      baseline
-plot_network.py inputs/ifgramStack.h5 \fB\-d\fR pbase \fB\-v\fR 0 180    \fB\-c\fR RdYlBu_r      #Color\-code lines by perpendicular baseline
-plot_network.py coherenceSpatialAvg.txt
-.IP
-# offsetSNR
-plot_network.py inputs/ifgramStack.h5 \fB\-d\fR offsetSNR \fB\-v\fR 0 20 \fB\-\-cmap\-vlist\fR 0 0.2 1
diff -pruN 1.3.3-2/debian/man/mintpy-plot_transection.1 1.4.0-1/debian/man/mintpy-plot_transection.1
--- 1.3.3-2/debian/man/mintpy-plot_transection.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-plot_transection.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,202 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-PLOT_TRANSECTION "1" "May 2022" "mintpy-plot_transection v1.3.3" "User Commands"
-.SH NAME
-mintpy-plot_transection \- Generate transect/profile along a line
-.SH DESCRIPTION
-usage: plot_transection.py [\-h] [\-\-dset DSET] [\-v VMIN VMAX]
-.TP
-[\-\-offset OFFSET [OFFSET ...]] [\-\-noverbose]
-[\-\-start\-yx Y0 X0] [\-\-end\-yx Y1 X1]
-[\-\-start\-lalo LAT0 LON0] [\-\-end\-lalo LAT1 LON1]
-[\-\-line\-file LOLA_FILE]
-[\-\-interpolation {nearest,bilinear,cubic}]
-[\-\-ms MARKER_SIZE] [\-\-fontsize FONT_SIZE]
-[\-\-fontcolor FONT_COLOR] [\-\-nowhitespace]
-[\-\-noaxis] [\-\-notick] [\-c COLORMAP] [\-\-cm\-lut NUM]
-[\-\-cm\-vlist CMAP_VLIST CMAP_VLIST CMAP_VLIST]
-[\-\-nocbar] [\-\-cbar\-nbins NUM]
-[\-\-cbar\-ext {None,max,neither,both,min}]
-[\-\-cbar\-label CBAR_LABEL] [\-\-cbar\-loc CBAR_LOC]
-[\-\-cbar\-size CBAR_SIZE] [\-\-notitle] [\-\-title\-in]
-[\-\-figtitle FIG_TITLE] [\-\-title4sen]
-[\-\-figsize WID LEN] [\-\-dpi DPI]
-[\-\-figext {.emf,.eps,.pdf,.png,.ps,.raw,.rgba,.svg,.svgz}]
-[\-\-fignum NUM] [\-\-nrows NUM] [\-\-ncols NUM]
-[\-\-wspace FIG_WID_SPACE] [\-\-hspace FIG_HEI_SPACE]
-[\-\-no\-tight\-layout] [\-\-coord {radar,geo}]
-[\-\-animation] [\-o [OUTFILE ...]] [\-\-save]
-[\-\-nodisplay] [\-\-update]
-file [file ...]
-.PP
-Generate transect/profile along a line
-.SS "positional arguments:"
-.TP
-file
-input file to show transection
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-\-dset\fR DSET
-Dataset name to read
-.TP
-\fB\-v\fR VMIN VMAX, \fB\-\-vlim\fR VMIN VMAX
-Display limits for matrix plotting.
-.TP
-\fB\-\-offset\fR OFFSET [OFFSET ...], \fB\-\-off\fR OFFSET [OFFSET ...]
-offset between transects [for multiple files only; default: [0.05] m].
-number of input offsets should be:
-.TP
-1 \- same (sequential) offset between adjacent transects OR
-num_file \- different (cumulative) offset for each file, starting from 0.
-.TP
-\fB\-\-noverbose\fR
-Disable the verbose message printing.
-.SS "Profile location:"
-.IP
-Start/end points of profile
-.TP
-\fB\-\-start\-yx\fR Y0 X0, \fB\-\-yx0\fR Y0 X0
-start point of the profile in pixel number [y, x]
-.TP
-\fB\-\-end\-yx\fR Y1 X1, \fB\-\-yx1\fR Y1 X1
-end   point of the profile in pixel number [y, x]
-.TP
-\fB\-\-start\-lalo\fR LAT0 LON0, \fB\-\-lalo0\fR LAT0 LON0
-start point of the profile in [lat, lon]
-.TP
-\fB\-\-end\-lalo\fR LAT1 LON1, \fB\-\-lalo1\fR LAT1 LON1
-end   point of the profile in [lat, lon]
-.TP
-\fB\-\-line\-file\fR LOLA_FILE
-file with start and end point info in lon lat, same as GMT format.
-GMT xy file, i.e. transect_lonlat.xy:
->
-131.1663    33.1157
-131.2621    33.0860
-.TP
-\fB\-\-interpolation\fR {nearest,bilinear,cubic}
-interpolation method while extacting profile along the line. Default: nearest.
-.TP
-\fB\-\-ms\fR MARKER_SIZE, \fB\-\-markersize\fR MARKER_SIZE
-Point marker size. Default: 2.0
-.SS "Figure:"
-.IP
-Figure settings for display
-.TP
-\fB\-\-fontsize\fR FONT_SIZE
-font size
-.TP
-\fB\-\-fontcolor\fR FONT_COLOR
-font color (default: k).
-.TP
-\fB\-\-nowhitespace\fR
-do not display white space
-.TP
-\fB\-\-noaxis\fR
-do not display axis
-.TP
-\fB\-\-notick\fR
-do not display tick in x/y axis
-.TP
-\fB\-c\fR COLORMAP, \fB\-\-colormap\fR COLORMAP
-colormap used for display, i.e. jet, cmy, RdBu, hsv, jet_r, temperature, viridis, etc.
-More at https://mintpy.readthedocs.io/en/latest/api/colormaps/
-.TP
-\fB\-\-cm\-lut\fR NUM, \fB\-\-cmap\-lut\fR NUM
-number of increment of colormap lookup table (default: 256).
-.TP
-\fB\-\-cm\-vlist\fR CMAP_VLIST CMAP_VLIST CMAP_VLIST, \fB\-\-cmap\-vlist\fR CMAP_VLIST CMAP_VLIST CMAP_VLIST
-list of 3 float numbers, for truncated colormap only (default: [0.0, 0.7, 1.0]).
-.TP
-\fB\-\-nocbar\fR, \fB\-\-nocolorbar\fR
-do not display colorbar
-.TP
-\fB\-\-cbar\-nbins\fR NUM
-number of bins for colorbar.
-.TP
-\fB\-\-cbar\-ext\fR {None,max,neither,both,min}
-Extend setting of colorbar; based on data stat by default.
-.TP
-\fB\-\-cbar\-label\fR CBAR_LABEL
-colorbar label
-.TP
-\fB\-\-cbar\-loc\fR CBAR_LOC
-colorbar location for single plot (default: right).
-.TP
-\fB\-\-cbar\-size\fR CBAR_SIZE
-colorbar size and pad (default: 2%).
-.TP
-\fB\-\-notitle\fR
-do not display title
-.TP
-\fB\-\-title\-in\fR
-draw title in/out of axes
-.TP
-\fB\-\-figtitle\fR FIG_TITLE
-Title shown in the figure.
-.TP
-\fB\-\-title4sen\fR, \fB\-\-title4sentinel1\fR
-display Sentinel\-1 A/B and IPF info in title.
-.TP
-\fB\-\-figsize\fR WID LEN
-figure size in inches \- width and length
-.TP
-\fB\-\-dpi\fR DPI
-DPI \- dot per inch \- for display/write (default: 300).
-.TP
-\fB\-\-figext\fR {.emf,.eps,.pdf,.png,.ps,.raw,.rgba,.svg,.svgz}
-File extension for figure output file (default: .png).
-.TP
-\fB\-\-fignum\fR NUM
-number of figure windows
-.TP
-\fB\-\-nrows\fR NUM
-subplot number in row
-.TP
-\fB\-\-ncols\fR NUM
-subplot number in column
-.TP
-\fB\-\-wspace\fR FIG_WID_SPACE
-width space between subplots in inches
-.TP
-\fB\-\-hspace\fR FIG_HEI_SPACE
-height space between subplots in inches
-.TP
-\fB\-\-no\-tight\-layout\fR
-disable automatic tight layout for multiple subplots
-.TP
-\fB\-\-coord\fR {radar,geo}
-Display in radar/geo coordination system (for geocoded file only; default: geo).
-.TP
-\fB\-\-animation\fR
-enable animation mode
-.SS "Save/Output:"
-.IP
-Save figure and write to file(s)
-.TP
-\fB\-o\fR [OUTFILE ...], \fB\-\-outfile\fR [OUTFILE ...]
-save the figure with assigned filename.
-By default, it's calculated based on the input file name.
-.TP
-\fB\-\-save\fR
-save the figure
-.TP
-\fB\-\-nodisplay\fR
-save and do not display the figure
-.TP
-\fB\-\-update\fR
-enable update mode for save figure: skip running if
-1) output file already exists AND
-2) output file is newer than input file.
-.SS "example:"
-.IP
-plot_transection.py velocity.h5 \fB\-\-start\-yx\fR 5290 5579 \fB\-\-end\-yx\fR 12177 482
-plot_transection.py velocity.h5 \fB\-\-start\-lalo\fR 30.125 129.988 \fB\-\-end\-lalo\fR 30.250 130.116
-plot_transection.py velocity.h5 \fB\-\-line\-file\fR  transect_lonlat.xy \fB\-\-dem\fR gsi10m.dem
-.IP
-# Multiple files
-plot_transection.py AlosA*/velocity.h5 AlosD*/velocity.h5 \fB\-\-off\fR 2
-plot_transection.py Kirishima2017*.h5 Kirishima2008*.h5 \fB\-\-off\fR 0 0 10 10
-plot_transection.py Kirishima2017*.h5 Kirishima2008*.h5 \fB\-\-off\fR 0 0 10 10 \fB\-\-lalo0\fR 31.947 130.843 \fB\-\-lalo1\fR 31.947 130.860
diff -pruN 1.3.3-2/debian/man/mintpy-prep_aria.1 1.4.0-1/debian/man/mintpy-prep_aria.1
--- 1.3.3-2/debian/man/mintpy-prep_aria.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-prep_aria.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,102 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-PREP_ARIA "1" "May 2022" "mintpy-prep_aria v1.3.3" "User Commands"
-.SH NAME
-mintpy-prep_aria \- Prepare ARIA processed products for MintPy.
-.SH DESCRIPTION
-usage: prep_aria.py [\-h] [\-t TEMPLATE_FILE] [\-o OUTFILE OUTFILE] [\-\-update]
-.TP
-[\-\-compression {gzip,None,lzf}] [\-s STACKDIR] [\-u UNWFILE]
-[\-c CORFILE] [\-l CONNCOMPFILE] [\-\-amp\-stack\-name MAGFILE]
-[\-d DEMFILE] [\-i INCANGLEFILE] [\-a AZANGLEFILE]
-[\-w WATERMASKFILE]
-.PP
-Prepare ARIA processed products for MintPy.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-t\fR TEMPLATE_FILE, \fB\-\-template\fR TEMPLATE_FILE
-template file with the options
-.TP
-\fB\-o\fR OUTFILE OUTFILE, \fB\-\-output\fR OUTFILE OUTFILE
-output HDF5 file
-.TP
-\fB\-\-update\fR
-Enable the update mode: checking dataset already loaded.
-.TP
-\fB\-\-compression\fR {gzip,None,lzf}
-HDF5 file compression, default: None
-.SS "interferogram stack:"
-.TP
-\fB\-s\fR STACKDIR, \fB\-\-stack\-dir\fR STACKDIR
-The directory which contains stack VRT files.
-.TP
-\fB\-u\fR UNWFILE, \fB\-\-unwrap\-stack\-name\fR UNWFILE
-Name of the stack VRT file of unwrapped data.
-default: unwrapStack.vrt
-.TP
-\fB\-c\fR CORFILE, \fB\-\-coherence\-stack\-name\fR CORFILE
-Name of the stack VRT file of coherence data.
-default: cohStack.vrt
-.TP
-\fB\-l\fR CONNCOMPFILE, \fB\-\-conn\-comp\-stack\-name\fR CONNCOMPFILE
-Name of the stack VRT file of connected component data.
-default: connCompStack.vrt
-.TP
-\fB\-\-amp\-stack\-name\fR MAGFILE, \fB\-\-amplitude\-stack\-name\fR MAGFILE
-Name of the stack VRT file of interferogram amplitude data (optional).
-default: ampStack.vrt
-.SS "geometry:"
-.TP
-\fB\-d\fR DEMFILE, \fB\-\-dem\fR DEMFILE
-Name of the DEM file
-.TP
-\fB\-i\fR INCANGLEFILE, \fB\-\-incidence\-angle\fR INCANGLEFILE
-Name of the incidence angle file
-.TP
-\fB\-a\fR AZANGLEFILE, \fB\-\-az\-angle\fR AZANGLEFILE, \fB\-\-azimuth\-angle\fR AZANGLEFILE
-Name of the azimuth angle file.
-.TP
-\fB\-w\fR WATERMASKFILE, \fB\-\-water\-mask\fR WATERMASKFILE
-Name of the water mask file
-.SS "template options:"
-.IP
-########## 1. load_data
-## no   \- save   0% disk usage, fast [default]
-## lzf  \- save ~57% disk usage, relative slow
-## gzip \- save ~62% disk usage, very slow [not recommend]
-mintpy.load.processor      = aria  #[isce, aria, snap, gamma, roipac], auto for isce
-mintpy.load.updateMode     = auto  #[yes / no], auto for yes, skip re\-loading if HDF5 files are complete
-mintpy.load.compression    = auto  #[gzip / lzf / no], auto for no.
-##\-\-\-\-\-\-\-\-\-interferogram datasets:
-mintpy.load.unwFile        = ../stack/unwrapStack.vrt
-mintpy.load.corFile        = ../stack/cohStack.vrt
-mintpy.load.connCompFile   = ../stack/connCompStack.vrt
-mintpy.load.magFile        = ../stack/ampStack.vrt        # optional
-##\-\-\-\-\-\-\-\-\-geometry datasets:
-mintpy.load.demFile        = ../DEM/SRTM_3arcsec.dem
-mintpy.load.incAngleFile   = ../incidenceAngle/*.vrt
-mintpy.load.azAngleFile    = ../azimuthAngle/*.vrt
-mintpy.load.waterMaskFile  = ../mask/watermask.msk
-##\-\-\-\-\-\-\-\-\-multilook (optional):
-## multilook while loading data with nearest interpolation, to reduce dataset size
-mintpy.load.ystep          = auto    #[int >= 1], auto for 1 \- no multilooking
-mintpy.load.xstep          = auto    #[int >= 1], auto for 1 \- no multilooking
-##\-\-\-\-\-\-\-\-\-subset (optional):
-## if both yx and lalo are specified, use lalo option
-mintpy.subset.yx           = auto    #[y0:y1,x0:x1 / no], auto for no
-mintpy.subset.lalo         = auto    #[lat0:lat1,lon0:lon1 / no], auto for no
-.SS "example:"
-.TP
-prep_aria.py \fB\-t\fR smallbaselineApp.cfg
-# recommended
-.IP
-prep_aria.py \fB\-t\fR SanFranSenDT42.txt
-prep_aria.py \fB\-s\fR ../stack/ \fB\-d\fR ../DEM/SRTM_3arcsec.dem \fB\-i\fR ../incidenceAngle/*.vrt
-prep_aria.py \fB\-s\fR ../stack/ \fB\-d\fR ../DEM/SRTM_3arcsec.dem \fB\-i\fR ../incidenceAngle/*.vrt \fB\-a\fR ../azimuthAngle/*.vrt \fB\-w\fR ../mask/watermask.msk
-.IP
-# download / extract / prepare inteferograms stack from ARIA using ARIA\-tools:
-# reference: https://github.com/aria\-tools/ARIA\-tools
-ariaDownload.py \fB\-b\fR '37.25 38.1 \fB\-122\fR.6 \fB\-121\fR.75' \fB\-\-track\fR 42
-ariaTSsetup.py \fB\-f\fR 'products/*.nc' \fB\-b\fR '37.25 38.1 \fB\-122\fR.6 \fB\-121\fR.75' \fB\-\-mask\fR Download \fB\-\-num_threads\fR 4 \fB\-\-verbose\fR
diff -pruN 1.3.3-2/debian/man/mintpy-prep_cosicorr.1 1.4.0-1/debian/man/mintpy-prep_cosicorr.1
--- 1.3.3-2/debian/man/mintpy-prep_cosicorr.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-prep_cosicorr.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,30 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-PREP_COSICORR "1" "May 2022" "mintpy-prep_cosicorr v1.3.3" "User Commands"
-.SH NAME
-mintpy-prep_cosicorr \- Prepare attributes file for COSI\-Corr pixel offset product. 
-.SH DESCRIPTION
-usage: prep_cosicorr.py [\-h] [\-m META_FILE] file [file ...]
-.PP
-Prepare attributes file for COSI\-Corr pixel offset product.
-.SS "positional arguments:"
-.TP
-file
-cosicorr file(s)
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-m\fR META_FILE, \fB\-\-metadata\fR META_FILE
-metadata file with date info. E.g.:
-offset1NS.tif  20160206 20161122
-offset1EW.tif  20160206 20161122
-offset1SNR.tif 20160206 20161122
-offset2NS.tif  20160206 20170225
-offset2EW.tif  20160206 20170225
-offset2SNR.tif 20160206 20170225
-\&...            ...   ...
-.SS "example:"
-.IP
-prep_cosicorr.py offsets/*offset.tif \fB\-m\fR metadata.txt
-prep_cosicorr.py snr/*snr.tif        \fB\-m\fR metadata.txt
diff -pruN 1.3.3-2/debian/man/mintpy-prep_fringe.1 1.4.0-1/debian/man/mintpy-prep_fringe.1
--- 1.3.3-2/debian/man/mintpy-prep_fringe.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-prep_fringe.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,78 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-PREP_FRINGE "1" "May 2022" "mintpy-prep_fringe. v1.3.3" "User Commands"
-.SH NAME
-mintpy-prep_fringe \- Prepare FRInGE products for MintPy
-.SH DESCRIPTION
-usage: prep_fringe.py [\-h] [\-u UNWFILE] [\-c COHFILE] [\-\-ps\-mask PSMASKFILE]
-.TP
-[\-g GEOMDIR] [\-m METAFILE] [\-b BASELINEDIR] [\-o OUTDIR]
-[\-r LKS_X] [\-a LKS_Y] [\-\-geom\-only] [\-\-sub\-x XMIN XMAX]
-[\-\-sub\-y YMIN YMAX]
-.PP
-Prepare FRInGE products for MintPy
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-u\fR UNWFILE, \fB\-\-unw\-file\fR UNWFILE
-path pattern of unwrapped interferograms (default: ./PS_DS/unwrap/*.unw).
-.TP
-\fB\-c\fR COHFILE, \fB\-\-coh\-file\fR COHFILE
-temporal coherence file (default: ./PS_DS/tcorr_ds_ps.bin).
-.TP
-\fB\-\-ps\-mask\fR PSMASKFILE
-PS pixels file (default: ./ampDispersion/ps_pixels).
-.TP
-\fB\-g\fR GEOMDIR, \fB\-\-geom\-dir\fR GEOMDIR
-FRInGE geometry directory (default: ./geometry).
-This is used to grab 1) bounding box
-.IP
-AND 2) geometry source directory where the binary files are.
-.TP
-\fB\-m\fR METAFILE, \fB\-\-meta\-file\fR METAFILE
-metadata file (default: ../reference/IW*.xml).
-e.g.: ./reference/IW1.xml        for ISCE/topsStack OR
-.IP
-\&./referenceShelve/data.dat for ISCE/stripmapStack
-.TP
-\fB\-b\fR BASELINEDIR, \fB\-\-baseline\-dir\fR BASELINEDIR
-baseline directory (default: ../baselines).
-.TP
-\fB\-o\fR OUTDIR, \fB\-\-out\-dir\fR OUTDIR
-output directory (default: ./mintpy).
-.TP
-\fB\-r\fR LKS_X, \fB\-\-range\fR LKS_X
-number of looks in range direction, for multilooking applied after fringe processing.
-Only impacts metadata. (default: 1).
-.TP
-\fB\-a\fR LKS_Y, \fB\-\-azimuth\fR LKS_Y
-number of looks in azimuth direction, for multilooking applied after fringe processing.
-Only impacts metadata. (default: 1).
-.TP
-\fB\-\-geom\-only\fR
-Only create the geometry file (useful for geocoding a watermask).
-.SS "Subset:"
-.IP
-Display dataset in subset range
-.TP
-\fB\-\-sub\-x\fR XMIN XMAX, \fB\-\-subx\fR XMIN XMAX, \fB\-\-subset\-x\fR XMIN XMAX
-subset display in x/cross\-track/range direction
-.TP
-\fB\-\-sub\-y\fR YMIN YMAX, \fB\-\-suby\fR YMIN YMAX, \fB\-\-subset\-y\fR YMIN YMAX
-subset display in y/along\-track/azimuth direction
-.SS "example:"
-.IP
-prep_fringe.py \fB\-u\fR './PS_DS/unwrap/*.unw' \fB\-c\fR ./PS_DS/tcorr_ds_ps.bin \fB\-g\fR ./geometry \fB\-m\fR '../reference/IW*.xml' \fB\-b\fR ../baselines \fB\-o\fR ./mintpy
-.IP
-cd \fI\,~/data/SanAndreasSenDT42/fringe\/\fP
-prep_fringe.py
-.IP
-## example commands after prep_fringe.py
-reference_point.py timeseries.h5 \fB\-y\fR 500 \fB\-x\fR 1150
-generate_mask.py temporalCoherence.h5 \fB\-m\fR 0.7 \fB\-o\fR maskTempCoh.h5
-tropo_pyaps3.py \fB\-f\fR timeseries.h5 \fB\-g\fR inputs/geometryRadar.h5
-remove_ramp.py timeseries_ERA5.h5 \fB\-m\fR maskTempCoh.h5 \fB\-s\fR linear
-dem_error.py timeseries_ERA5_ramp.h5 \fB\-g\fR inputs/geometryRadar.h5
-timeseries2velocity.py timeseries_ERA5_ramp_demErr.h5
-geocode.py velocity.h5 \fB\-l\fR inputs/geometryRadar.h5
diff -pruN 1.3.3-2/debian/man/mintpy-prep_gamma.1 1.4.0-1/debian/man/mintpy-prep_gamma.1
--- 1.3.3-2/debian/man/mintpy-prep_gamma.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-prep_gamma.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,122 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-PREP_GAMMA "1" "May 2022" "mintpy-prep_gamma v1.3.3" "User Commands"
-.SH NAME
-mintpy-prep_gamma \- Prepare attributes file for Gamma product.
-.SH DESCRIPTION
-usage: prep_gamma.py [\-h]
-.TP
-[\-\-sensor {alos,alos2,csk,env,ers,gfen3,jers,ksat5,ni,rs1,rs2,rcm,sen,tsx,uav}]
-file [file ...]
-.PP
-Prepare attributes file for Gamma product.
-.IP
-For each interferogram, including unwrapped/wrapped interferograms and coherence, 3 metadata files are required:
-1) reference .par file, e.g. 130118_4rlks.amp.par
-2) secondary .par file, e.g. 130129_4rlks.amp.par
-3) interferogram .off file, e.g. 130118\-130129_4rlks.off
-.IP
-Other metadata files are recommended and can be generated from the above 3 if not existed, more specifically:
-4) baseline files, e.g. 130118\-130129_4rlks.baseline and 130118\-130129_4rlks.base_perp
-.IP
-It can be generated from file 1\-3 with Gamma command base_orbit and base_perp)
-.IP
-5) corner files, e.g. 130118_4rlks.amp.corner_full and 130118_4rlks.amp.corner
-.IP
-It can be generated from file 1 with Gamma command SLC_corners)
-.IP
-This script will read all these files (generate 4 and 5 if not existed), merge them into one, convert their name from
-Gamma style to ROI_PAC style, and write to an metadata file, same name as input binary data file with suffix .rsc,
-e.g. diff_filt_HDR_130118\-130129_4rlks.unw.rsc
-.IP
-For DEM file in radar/geo coordinates (.hgt_sim or .rdc.dem / .utm.dem) and
-.IP
-lookup table file for geocoding (.UTM_TO_RDC), 2 metadata files are required:
-.TP
-1) .par
-file, for DEM in geo   coordinates and lookup table, e.g.: sim_150911_4rlks.utm.dem.par
-.IP
-2) .diff_par file, for DEM in radar coordinates, e.g. sim_150911_4rlks.diff_par
-.IP
-Here is an example of how your Gamma files should look like:
-Before loading:
-.IP
-For each interferogram, 5 files are needed:
-.IP
-130118\-130129_4rlks.off
-130118_4rlks.amp.par
-130129_4rlks.amp.par
-filt_130118\-130129_4rlks.cor
-diff_130118\-130129_4rlks.unw
-.IP
-For each dataset, only one sim* folder with 5 files are needed,
-.IP
-sim_150911_4rlks.UTM_TO_RDC
-sim_150911_4rlks.diff_par
-sim_150911_4rlks.hgt_sim or sim_150911.rdc.dem
-sim_150911_4rlks.utm.dem
-sim_150911_4rlks.utm.dem.par
-.IP
-After running prep_gamma.py:
-.IP
-For each interferogram:
-.IP
-130118\-130129_4rlks.base_perp
-130118\-130129_4rlks.baseline
-130118\-130129_4rlks.off
-130118_4rlks.ramp.corner
-130118_4rlks.ramp.corner_full
-130118_4rlks.ramp.par
-130129_4rlks.ramp.par
-filt_130118\-130129_4rlks.cor
-filt_130118\-130129_4rlks.cor.rsc
-diff_130118\-130129_4rlks.unw
-diff_130118\-130129_4rlks.unw.rsc
-.IP
-For the geometry files in each dataset:
-.IP
-sim_150911_4rlks.UTM_TO_RDC
-sim_150911_4rlks.UTM_TO_RDC.rsc
-sim_150911_4rlks.diff_par
-sim_150911_4rlks.rdc.dem      or sim_150911_4rlks.hgt_sim
-sim_150911_4rlks.rdc.dem.rsc  or sim_150911_4rlks.hgt_sim.rsc
-sim_150911_4rlks.utm.dem
-sim_150911_4rlks.utm.dem.par
-sim_150911_4rlks.utm.dem.rsc
-.IP
-Notes: both \- and _ are supported;
-.IP
-both YYMMDD and YYYYMMDD naming are also supported;
-if no multilooking applied, do not add "_4rlks" in your file names.
-.SS "positional arguments:"
-.TP
-file
-Gamma file(s)
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-\-sensor\fR {alos,alos2,csk,env,ers,gfen3,jers,ksat5,ni,rs1,rs2,rcm,sen,tsx,uav}
-SAR sensor
-.SS "example:"
-.TP
-prep_gamma.py
-diff_filt_HDR_20130118_20130129_4rlks.unw
-.TP
-prep_gamma.py
-interferograms/*/diff_*rlks.unw \fB\-\-sensor\fR sen
-.TP
-prep_gamma.py
-interferograms/*/filt_*rlks.cor
-.TP
-prep_gamma.py
-interferograms/*/diff_*rlks.int
-.TP
-prep_gamma.py
-sim_20150911_20150922.hgt_sim
-.TP
-prep_gamma.py
-sim_20150911_20150922.utm.dem
-.TP
-prep_gamma.py
-sim_20150911_20150922.UTM_TO_RDC
diff -pruN 1.3.3-2/debian/man/mintpy-prep_giant.1 1.4.0-1/debian/man/mintpy-prep_giant.1
--- 1.3.3-2/debian/man/mintpy-prep_giant.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-prep_giant.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,41 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-PREP_GIANT "1" "May 2022" "mintpy-prep_giant v1.3.3" "User Commands"
-.SH NAME
-mintpy-prep_giant \- Prepare attributes for GIAnT timeseries file.
-.SH DESCRIPTION
-usage: prep_giant.py [\-h] [\-x XML_FILE [XML_FILE ...]] file
-.PP
-Prepare attributes for GIAnT timeseries file.
-.SS "positional arguments:"
-.TP
-file
-GIAnT timeseries file
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-x\fR XML_FILE [XML_FILE ...], \fB\-\-xml\fR XML_FILE [XML_FILE ...]
-XML file with data setting info.
-.SS "example:"
-.TP
-prep_giant.py
-LS\-PARAMS.h5
-.TP
-prep_giant.py
-TS\-PARAMS.h5
-.TP
-prep_giant.py
-NSBAS\-PARAMS.h5
-.TP
-prep_giant.py
-RAW\-STACK.h5
-.TP
-prep_giant.py
-PROC\-STACK.h5
-.TP
-prep_giant.py
-LS\-PARAMS.h5 \fB\-x\fR ../data.xml ../sbas.xml ../mints.xml
-.TP
-prep_giant.py
-LS\-PARAMS.h5 \fB\-x\fR ../data.xml ../sbas.xml ../mints.xml ../filt_fine.unw.rsc
diff -pruN 1.3.3-2/debian/man/mintpy-prep_gmtsar.1 1.4.0-1/debian/man/mintpy-prep_gmtsar.1
--- 1.3.3-2/debian/man/mintpy-prep_gmtsar.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-prep_gmtsar.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,25 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-PREP_GMTSAR "1" "May 2022" "mintpy-prep_gmtsar v1.3.3" "User Commands"
-.SH NAME
-mintpy-prep_gmtsar \- Prepare GMTSAR metadata files.
-.SH DESCRIPTION
-usage: prep_gmtsar.py [\-h] [\-\-mintpy\-dir MINTPY_DIR] [\-\-force] template_file
-.PP
-Prepare GMTSAR metadata files.
-.SS "positional arguments:"
-.TP
-template_file
-MintPy template file for GMTSAR products.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-\-mintpy\-dir\fR MINTPY_DIR
-MintPy directory (default: ./).
-.TP
-\fB\-\-force\fR
-Force to overwrite all .rsc metadata files.
-.SS "example:"
-.IP
-prep_gmtsar.py StHelensEnvDT156.txt
diff -pruN 1.3.3-2/debian/man/mintpy-prep_hyp3.1 1.4.0-1/debian/man/mintpy-prep_hyp3.1
--- 1.3.3-2/debian/man/mintpy-prep_hyp3.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-prep_hyp3.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,98 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY\-PREP_HYP3 "1" "May 2022" "mintpy\-prep_hyp3 v1.3.3" "User Commands"
-.SH NAME
-mintpy\-prep_hyp3 \- Prepare attributes file for HyP3 InSAR product.
-.SH DESCRIPTION
-usage: prep_hyp3.py [\-h] file [file ...]
-.PP
-Prepare attributes file for HyP3 InSAR product.
-.IP
-For each interferogram, the unwrapped interferogram, coherence, and metadata the file name is required e.g.:
-.EX
-1) S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_unw_phase.tif
-2) S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_corr.tif
-3) S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2.txt
-.EE
-.TP
-A DEM filename is needed and a incidence angle filename is recommended
-e.g.:
-.IP
-.EX
-1) S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_dem.tif
-2) S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_lv_theta.tif
-.EE
-.IP
-This script will read these files, read the geospatial metadata from GDAL,
-find the corresponding HyP3 metadata file (for interferograms and coherence),
-and write to a ROI_PAC .rsc metadata file with the same name as the input file with suffix .rsc,
-e.g. S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_unw_phase.tif.rsc
-.IP
-Here is an example of how your HyP3 files should look:
-.IP
-Before loading:
-.IP
-For each interferogram, 3 files are needed:
-.IP
-.EX
-S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_unw_phase_clip.tif
-S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_corr_clip.tif
-S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2.txt
-.EE
-.IP
-For the geometry file 2 file are recommended:
-.TP
-S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_dem_clip.tif
-(required)
-.IP
-S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_lv_theta_clip.tif (optional but recommended)
-.IP
-After running prep_hyp3.py:
-.IP
-For each interferogram:
-.IP
-.EX
-S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_unw_phase_clip.tif
-S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_unw_phase_clip.tif.rsc
-S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_corr_clip.tif
-S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_corr_clip.tif.rsc
-S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2.txt
-.EE
-.IP
-For the input geometry files:
-.IP
-.EX
-S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_dem_clip.tif
-S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_dem_clip.tif.rsc
-S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_lv_theta_clip.tif
-S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_lv_theta_clip.tif.rsc
-.EE
-.IP
-Notes:
-.IP
-HyP3 currently only supports generation of Sentinel\-1 interferograms, so
-some Sentinel\-1 metadata is hard\-coded. If HyP3 adds processing of interferograms
-from other satellites, changes will be needed.
-.SS "positional arguments:"
-.TP
-file
-HyP3 file(s)
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.SS "example:"
-.TP
-prep_hyp3.py
-interferograms/*/*unw_phase_clip.tif
-.TP
-prep_hyp3.py
-interferograms/*/*corr_clip.tif
-.TP
-prep_hyp3.py
-interferograms/*/*dem_clip.tif
-.TP
-prep_hyp3.py
-interferograms/*/*lv_theta_clip.tif
-.TP
-prep_hyp3.py
-interferograms/*/*clip.tif
diff -pruN 1.3.3-2/debian/man/mintpy-prep_isce.1 1.4.0-1/debian/man/mintpy-prep_isce.1
--- 1.3.3-2/debian/man/mintpy-prep_isce.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-prep_isce.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,58 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-PREP_ISCE "1" "May 2022" "mintpy-prep_isce v1.3.3" "User Commands"
-.SH NAME
-mintpy-prep_isce \- Prepare ISCE metadata files.
-.SH DESCRIPTION
-usage: prep_isce.py [\-h] \fB\-d\fR DSETDIR [\-f DSETFILES [DSETFILES ...]] \fB\-m\fR METAFILE
-.TP
-[\-b BASELINEDIR] \fB\-g\fR GEOMETRYDIR
-[\-\-geom\-files [GEOMETRYFILES ...]] [\-\-force]
-.PP
-Prepare ISCE metadata files.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-d\fR DSETDIR, \fB\-\-ds\-dir\fR DSETDIR, \fB\-\-dset\-dir\fR DSETDIR
-The directory which contains all pairs
-e.g.: \fI\,$PROJECT_DIR/merged/interferograms\/\fP OR
-.TP
-$PROJECT_DIR/pairs/*\-*/insar OR
-\fI\,$PROJECT_DIR/merged/offsets\/\fP
-.TP
-\fB\-f\fR DSETFILES [DSETFILES ...], \fB\-\-file\-pattern\fR DSETFILES [DSETFILES ...]
-List of observation file basenames, e.g.: filt_fine.unw OR filtAz*.off
-.TP
-\fB\-m\fR METAFILE, \fB\-\-meta\-file\fR METAFILE
-Metadata file to extract common metada for the stack:
-e.g.: for ISCE/topsStack    : reference/IW3.xml;
-.TP
-for ISCE/stripmapStack: referenceShelve/data.dat;
-for ISCE/alosStack    : pairs/150408\-150701/150408.track.xml
-.IP
-where 150408 is the reference date of stack processing
-.TP
-\fB\-b\fR BASELINEDIR, \fB\-\-baseline\-dir\fR BASELINEDIR
-Directory with baselines.Set "random" to generate baseline with random value from [\-10,10].Set "random\-100" to generate baseline with random value from [\-100,100].
-.TP
-\fB\-g\fR GEOMETRYDIR, \fB\-\-geometry\-dir\fR GEOMETRYDIR
-Directory with geometry files
-.TP
-\fB\-\-geom\-files\fR [GEOMETRYFILES ...]
-List of geometry file basenames. Default: ['hgt.rdr', 'lat.rdr', 'lon.rdr', 'los.rdr', 'shadowMask.rdr', 'waterMask.rdr', 'incLocal.rdr'].
-All geometry files need to be in the same directory.
-.TP
-\fB\-\-force\fR
-Force to overwrite all .rsc metadata files.
-.SS "example:"
-.IP
-# interferogram stack
-prep_isce.py \fB\-d\fR ./merged/interferograms \fB\-m\fR ./reference/IW1.xml \fB\-b\fR ./baselines \fB\-g\fR ./merged/geom_reference      #for topsStack
-prep_isce.py \fB\-d\fR ./Igrams \fB\-m\fR ./referenceShelve/data.dat \fB\-b\fR ./baselines \fB\-g\fR ./geom_reference                     #for stripmapStack
-prep_isce.py \fB\-m\fR 20120507_slc_crop.xml \fB\-g\fR ./geometry                                                           #for stripmapApp
-prep_isce.py \fB\-d\fR "pairs/*\-*/insar" \fB\-m\fR "pairs/*\-*/150408.track.xml" \fB\-b\fR baseline \fB\-g\fR dates_resampled/150408/insar #for alosStack with 150408 as ref date
-.IP
-# offset stack
-prep_isce.py \fB\-d\fR ./offsets \fB\-f\fR *Off*.bip \fB\-m\fR ./../reference/IW1.xml \fB\-b\fR ./../baselines \fB\-g\fR ./offsets/geom_reference  #for topsStack
-prep_isce.py \fB\-d\fR ./offsets \fB\-f\fR *Off*.bip \fB\-m\fR ./SLC/*/data.dat       \fB\-b\fR random         \fB\-g\fR ./geometry                #for UAVSAR coregStack
diff -pruN 1.3.3-2/debian/man/mintpy-prep_roipac.1 1.4.0-1/debian/man/mintpy-prep_roipac.1
--- 1.3.3-2/debian/man/mintpy-prep_roipac.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-prep_roipac.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,43 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-PREP_ROIPAC "1" "May 2022" "mintpy-prep_roipac v1.3.3" "User Commands"
-.SH NAME
-mintpy-prep_roipac \- Prepare attributes file for ROI_PAC products.
-.SH DESCRIPTION
-usage: prep_roipac.py [\-h] [\-\-no\-parallel] file [file ...]
-.PP
-Prepare attributes file for ROI_PAC products.
-.IP
-For each binary file (unwrapped/wrapped interferogram, spatial coherence file), there are 2 .rsc files:
-1) basic metadata file and 2) baseline parameter file. This script find those two rsc files based on
-input binary file name, and merge those two metadata files into one.
-.IP
-For example, if input binary file is filt_100901\-110117\-sim_HDR_4rlks_c10.unw, this script will find
-1) filt_100901\-110117\-sim_HDR_4rlks_c10.unw.rsc and 2) 100901\-110117_baseline.rsc and merge 1) and 2) into
-one file: filt_100901\-110117\-sim_HDR_4rlks_c10.unw.rsc
-.SS "positional arguments:"
-.TP
-file
-Gamma file(s)
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-\-no\-parallel\fR
-Disable parallel processing. Disabled auto for 1 input file.
-.SS "example:"
-.TP
-prep_roipac.py
-filt_100901\-110117\-sim_HDR_4rlks_c10.unw
-.TP
-prep_roipac.py
-\&./interferograms/*/filt_*.unw
-.TP
-prep_roipac.py
-\&./interferograms/*/filt_*rlks.cor
-.TP
-prep_roipac.py
-\&./interferograms/*/filt_*rlks.int
-.TP
-prep_roipac.py
-\&./interferograms/*/filt_*_snap_connect.byt
diff -pruN 1.3.3-2/debian/man/mintpy-prep_snap.1 1.4.0-1/debian/man/mintpy-prep_snap.1
--- 1.3.3-2/debian/man/mintpy-prep_snap.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-prep_snap.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,38 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-PREP_SNAP "1" "May 2022" "mintpy-prep_snap v1.3.3" "User Commands"
-.SH NAME
-mintpy-prep_snap \- Prepare attributes file for SNAP products.
-.SH DESCRIPTION
-usage: prep_snap.py [\-h] file [file ...]
-.PP
-Prepare attributes file for SNAP products.
-.IP
-For each interferogram, coherence or unwrapped .dim product this script will prepare.rsc
-metadata files for for mintpy based on .dim metadata file.
-.IP
-The SNAP .dim file should contain all the required sensor / baseline metadata needed.
-The baseline metadata gets written during snap back\-geocoding (co\-registration).
-prep_snap is run separately for unw/ifg/cor files so needs separate .dim/.data products
-with only the relevant band in each product. Use Band Subset > save BEAM\-DIMAP file.
-.IP
-The file name should be yyyymmdd_yyyymmdd_type_tc.dim where type can be filt/unw/coh.
-.IP
-The DEM should be prepared by adding an elevation file to a coregestered product \-
-then extract the elevation band only. Use Band Subset > save BEAM\-DIMAP file
-.IP
-Currently only works for geocoded (terrain correction step in SNAP) interferograms.
-.SS "positional arguments:"
-.TP
-file
-SNAP data file(s) in *.img format.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.SS "example:"
-.TP
-prep_snap.py
-\&../interferograms/*/*/Unw_*.img
-.TP
-prep_snap.py
-\&../dem_tc.data/dem*.img
diff -pruN 1.3.3-2/debian/man/mintpy-reference_date.1 1.4.0-1/debian/man/mintpy-reference_date.1
--- 1.3.3-2/debian/man/mintpy-reference_date.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-reference_date.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,49 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-REFERENCE_DATE "1" "May 2022" "mintpy-reference_date v1.3.3" "User Commands"
-.SH NAME
-mintpy-reference_date \- Change reference date of timeseries.
-.SH DESCRIPTION
-usage: reference_date.py [\-h] [\-r REFDATE] [\-t TEMPLATE_FILE] [\-o OUTFILE]
-.TP
-[\-\-force] [\-\-ram MAXMEMORY]
-timeseries_file [timeseries_file ...]
-.PP
-Change reference date of timeseries.
-.SS "positional arguments:"
-.TP
-timeseries_file
-timeseries file(s)
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-r\fR REFDATE, \fB\-\-ref\-date\fR REFDATE
-reference date or method, default: auto. e.g.
-20101120
-time\-series HDF5 file with REF_DATE in its attributes
-reference_date.txt \- text file with date in YYYYMMDD format in it
-minRMS             \- choose date with min residual standard deviation
-.TP
-\fB\-t\fR TEMPLATE_FILE, \fB\-\-template\fR TEMPLATE_FILE
-template file with options
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-outfile\fR OUTFILE
-Output file name.
-.TP
-\fB\-\-force\fR
-Force updating the data matrix.
-.TP
-\fB\-\-ram\fR MAXMEMORY, \fB\-\-memory\fR MAXMEMORY
-Max amount of memory in GB to use (default: 4.0).
-Adjust according to your computer memory.
-.SS "template options:"
-.IP
-## Reference all time\-series to one date in time
-## reference: Yunjun et al. (2019, section 4.9)
-## no     \- do not change the default reference date (1st date)
-mintpy.reference.date = auto   #[reference_date.txt / 20090214 / no], auto for reference_date.txt
-.SS "example:"
-.IP
-reference_date.py timeseries.h5 timeseries_ERA5.h5 timeseries_ERA5_demErr.h5 \fB\-\-template\fR smallbaselineApp.cfg
-reference_date.py timeseries_ERA5_demErr.h5 \fB\-\-ref\-date\fR 20050107
diff -pruN 1.3.3-2/debian/man/mintpy-reference_point.1 1.4.0-1/debian/man/mintpy-reference_point.1
--- 1.3.3-2/debian/man/mintpy-reference_point.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-reference_point.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,126 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-REFERENCE_POINT "1" "May 2022" "mintpy-reference_point v1.3.3" "User Commands"
-.SH NAME
-mintpy-reference_point \- Reference to the same pixel in space.
-.SH DESCRIPTION
-usage: reference_point.py [\-h] [\-t TEMPLATE_FILE] [\-m MASKFILE] [\-o OUTFILE]
-.TP
-[\-\-write\-data] [\-\-reset] [\-\-force] [\-y REF_Y]
-[\-x REF_X] [\-l REF_LAT] [\-L REF_LON]
-[\-r REFERENCE_FILE] [\-\-lookup LOOKUP_FILE]
-[\-c COHERENCEFILE] [\-\-min\-coherence MINCOHERENCE]
-[\-\-method {maxCoherence,manual,random}]
-file
-.PP
-Reference to the same pixel in space.
-.SS "positional arguments:"
-.TP
-file
-file to be referenced.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-t\fR TEMPLATE_FILE, \fB\-\-template\fR TEMPLATE_FILE
-template with reference info
-.TP
-\fB\-m\fR MASKFILE, \fB\-\-mask\fR MASKFILE
-mask file
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-outfile\fR OUTFILE
-output file name (default: None). This option is disabled for ifgramStack file.
-None (default) for update data value directly without writing to a new file.
-.TP
-\fB\-\-write\-data\fR
-(option for ifgramStack file only) update data value, in addition to update metadata.
-.TP
-\fB\-\-reset\fR
-remove reference pixel information from attributes in the file
-.TP
-\fB\-\-force\fR
-Enforce the re\-selection of reference point.
-.TP
-\fB\-c\fR COHERENCEFILE, \fB\-\-coherence\fR COHERENCEFILE
-use input coherence file to find the pixel with max coherence for reference pixel.
-.TP
-\fB\-\-min\-coherence\fR MINCOHERENCE
-minimum coherence of reference pixel for max\-coherence method.
-.TP
-\fB\-\-method\fR {maxCoherence,manual,random}
-methods to select reference pixel if not given in specific y/x or lat/lon:
-maxCoherence : select pixel with highest coherence value as reference point
-.TP
-enabled when there is \fB\-\-coherence\fR option input
-manual       : display stack of input file and manually select reference point
-random       : random select pixel as reference point
-.SS "input coordinates:"
-.TP
-\fB\-y\fR REF_Y, \fB\-\-row\fR REF_Y
-row/azimuth  number of reference pixel
-.TP
-\fB\-x\fR REF_X, \fB\-\-col\fR REF_X
-column/range number of reference pixel
-.TP
-\fB\-l\fR REF_LAT, \fB\-\-lat\fR REF_LAT
-latitude  of reference pixel
-.TP
-\fB\-L\fR REF_LON, \fB\-\-lon\fR REF_LON
-longitude of reference pixel
-.TP
-\fB\-r\fR REFERENCE_FILE, \fB\-\-reference\fR REFERENCE_FILE
-use reference/seed info of this file
-.TP
-\fB\-\-lookup\fR LOOKUP_FILE, \fB\-\-lookup\-file\fR LOOKUP_FILE
-Lookup table file from SAR to DEM, i.e. geomap_4rlks.trans
-Needed for radar coord input file with \fB\-\-lat\fR/lon seeding option.
-.SS "note: Reference value cannot be nan, thus, all selected reference point must be:"
-.IP
-a. non zero in mask, if mask is given
-b. non nan  in data (stack)
-.IP
-Priority:
-.IP
-input reference_lat/lon
-input reference_y/x
-input selection_method
-existing REF_Y/X attributes (can be ignored by \fB\-\-force\fR option)
-default selection methods:
-.IP
-maxCoherence
-random
-.IP
-The recommended reference pixel should meets the following criteria:
-1) not in deforming areas
-2) not in areas affected by strong atmospheric turbulence, such as ionospheric streaks
-3) close but outside of deforming area of interest with similar elevation, to minimize
-.IP
-the spatial correlation effect of atmosspheric delay, especially for shot\-wavelength
-deformation (Chaussard et al., 2013; Morales\-Rivera et al., 2016)
-.IP
-4) in high coherent area to minimize the decorrelation effect
-.SS "template options:"
-.IP
-## Reference all interferograms to one common point in space
-## auto \- randomly select a pixel with coherence > minCoherence
-## however, manually specify using prior knowledge of the study area is highly recommended
-##   with the following guideline (section 4.3 in Yunjun et al., 2019):
-## 1) located in a coherence area, to minimize the decorrelation effect.
-## 2) not affected by strong atmospheric turbulence, i.e. ionospheric streaks
-## 3) close to and with similar elevation as the AOI, to minimize the impact of spatially correlated atmospheric delay
-mintpy.reference.yx            = auto   #[257,151 / auto]
-mintpy.reference.lalo          = auto   #[31.8,130.8 / auto]
-mintpy.reference.maskFile      = auto   #[filename / no], auto for maskConnComp.h5
-mintpy.reference.coherenceFile = auto   #[filename], auto for avgSpatialCoh.h5
-mintpy.reference.minCoherence  = auto   #[0.0\-1.0], auto for 0.85, minimum coherence for auto method
-.SS "example:"
-.IP
-# for ifgramStack file, update metadata only
-# add \fB\-\-write\-data\fR to update data matrix value
-reference_point.py  inputs/ifgramStack.h5  \fB\-t\fR smallbaselineApp.cfg  \fB\-c\fR avgSpatialCoh.h5
-reference_point.py  inputs/ifgramStack.h5 \fB\-\-method\fR manual
-reference_point.py  inputs/ifgramStack.h5 \fB\-\-method\fR random
-.IP
-# for all the other files, update both metadata and data matrix value
-reference_point.py  091120_100407.unw \fB\-y\fR 257    \fB\-x\fR 151      \fB\-m\fR Mask.h5
-reference_point.py  geo_velocity.h5   \fB\-l\fR 34.45  \fB\-L\fR \fB\-116\fR.23  \fB\-m\fR Mask.h5
diff -pruN 1.3.3-2/debian/man/mintpy-remove_hdf5_dataset.1 1.4.0-1/debian/man/mintpy-remove_hdf5_dataset.1
--- 1.3.3-2/debian/man/mintpy-remove_hdf5_dataset.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-remove_hdf5_dataset.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,29 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-REMOVE_HDF5_DATASET "1" "May 2022" "mintpy-remove_hdf5_dataset v1.3.3" "User Commands"
-.SH NAME
-mintpy-remove_hdf5_dataset \- Remove an existing dataset from HDF5 file
-.SH DESCRIPTION
-usage: remove_hdf5_dataset.py [\-h] file dset [dset ...]
-.PP
-Remove an existing dataset from HDF5 file
-.SS "positional arguments:"
-.TP
-file
-HDF5 file of interest
-.TP
-dset
-dataset to be removed.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.SS "Example:"
-.TP
-remove_hdf5_dataset.py
-ifgramStack.h5  unwrapPhase_phaseClosure
-.TP
-remove_hdf5_dataset.py
-ifgramStack.h5  unwrapPhase_phaseClosure  unwrapPhase_bridging
-.TP
-remove_hdf5_dataset.py
-velocity.h5     velocityStd
diff -pruN 1.3.3-2/debian/man/mintpy-remove_ramp.1 1.4.0-1/debian/man/mintpy-remove_ramp.1
--- 1.3.3-2/debian/man/mintpy-remove_ramp.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-remove_ramp.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,55 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-REMOVE_RAMP "1" "May 2022" "mintpy-remove_ramp v1.3.3" "User Commands"
-.SH NAME
-mintpy-remove_ramp \- Remove phase ramp
-.SH DESCRIPTION
-usage: remove_ramp.py [\-h] [\-m MASK_FILE]
-.TP
-[\-s {linear,linear_range,linear_azimuth,quadratic,quadratic_range,quadratic_azimuth}]
-[\-d DSET] [\-o OUTFILE] [\-\-save\-ramp\-coeff] [\-\-update]
-file
-.PP
-Remove phase ramp
-.SS "positional arguments:"
-.TP
-file
-File for ramp removal
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-m\fR MASK_FILE, \fB\-\-mask\fR MASK_FILE
-mask for pixels used in ramp estimation
-default \- maskTempCoh.h5
-no \- use the whole area
-.TP
-\fB\-s\fR {linear,linear_range,linear_azimuth,quadratic,quadratic_range,quadratic_azimuth}
-type of surface/ramp to remove, linear by default
-.TP
-\fB\-d\fR DSET, \fB\-\-dset\fR DSET
-dataset name to be derampped in ifgramStack file
-e.g.: unwrapPhase
-.IP
-unwrapPhase_bridging
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-outfile\fR OUTFILE
-Output file name.
-.TP
-\fB\-\-save\-ramp\-coeff\fR
-Save the estimated ramp coefficients into text file.
-.TP
-\fB\-\-update\fR
-Enable update mode, and skip inversion if:
-1) output file already exists, readable and newer than input file
-2) all configuration parameters are the same.
-.SS "example:"
-.TP
-remove_ramp.py
-timeseries.h5      \fB\-m\fR maskTempCoh.h5
-.TP
-remove_ramp.py
-ifgramStack.h5     \fB\-m\fR maskTempCoh.h5  \fB\-d\fR unwrapPhase_bridging
-.TP
-remove_ramp.py
-090214_101120.unw  \fB\-m\fR maskTempCoh.h5  \fB\-s\fR quadratic
diff -pruN 1.3.3-2/debian/man/mintpy-save_gbis.1 1.4.0-1/debian/man/mintpy-save_gbis.1
--- 1.3.3-2/debian/man/mintpy-save_gbis.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-save_gbis.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,59 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-SAVE_GBIS "1" "May 2022" "mintpy-save_gbis v1.3.3" "User Commands"
-.SH NAME
-mintpy-save_gbis \- Convert MintPy product to GBIS .mat format.
-.SH DESCRIPTION
-usage: save_gbis.py [\-h] \fB\-g\fR GEOM_FILE [\-m MASK_FILE]
-.TP
-[\-\-ref\-lalo REF_LALO REF_LALO] [\-\-nodisplay] [\-o OUTFILE]
-[\-\-out\-dir OUTDIR] [\-\-ellipsoid2geoid]
-file [dset]
-.PP
-Convert MintPy product to GBIS .mat format.
-.SS "positional arguments:"
-.TP
-file
-deformation file.
-.TP
-dset
-date/date12 of timeseries, or date12 of interferograms to be converted
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-g\fR GEOM_FILE, \fB\-\-geometry\fR GEOM_FILE
-geometry file
-.TP
-\fB\-m\fR MASK_FILE, \fB\-\-mask\fR MASK_FILE
-mask file.
-.TP
-\fB\-\-ref\-lalo\fR REF_LALO REF_LALO
-custom reference pixel in lat/lon
-.TP
-\fB\-\-nodisplay\fR
-do not display the figure
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file name.
-.TP
-\fB\-\-out\-dir\fR OUTDIR
-custom output directory, ONLY IF \fB\-\-output\fR is not specified.
-.TP
-\fB\-\-ellipsoid2geoid\fR
-Convert the height of ellipsoid to geoid using "geoidheight" module
-Download & install geoidheight as below:
-https://github.com/geodesymiami/2021_Kirishima
-.SS "references:"
-.IP
-Bagnardi, M., and A. Hooper (2018), Inversion of Surface Deformation Data for Rapid Estimates of Source
-Parameters and Uncertainties: A Bayesian Approach, Geochemistry, Geophysics, Geosystems, 19,
-doi:10.1029/2018GC007585.
-.IP
-Yunjun, Z., Amelung, F., & Aoki, Y. (2021), Imaging the hydrothermal system of Kirishima volcanic complex
-with L\-band InSAR time series, Geophysical Research Letters, 48(11), e2021GL092879. doi:10.1029/2021GL092879
-.SS "example:"
-.IP
-save_gbis.py velocity.h5 \fB\-g\fR inputs/geometryGeo.h5 \fB\-o\fR AlosDT73_20081012_20100302.mat
-save_gbis.py 20150223_20161031_msk.unw \fB\-g\fR inputs/geometryGeo.h5 \fB\-o\fR Alos2DT23_20150223_20161031.mat
-save_gbis.py 20150223_20161031.unw \fB\-g\fR inputs/geometryGeo.h5 \fB\-\-out\-data\fR ../Model/data \fB\-\-ellipsoid2geoid\fR
diff -pruN 1.3.3-2/debian/man/mintpy-save_gdal.1 1.4.0-1/debian/man/mintpy-save_gdal.1
--- 1.3.3-2/debian/man/mintpy-save_gdal.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-save_gdal.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,32 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-SAVE_GDAL "1" "May 2022" "mintpy-save_gdal v1.3.3" "User Commands"
-.SH NAME
-mintpy-save_gdal \- Generate GDAL raster from MintPy h5 file.
-.SH DESCRIPTION
-usage: save_gdal.py [\-h] [\-d DSET] [\-o OUTFILE] [\-\-of OUT_FORMAT] file
-.PP
-Generate GDAL raster from MintPy h5 file.
-.SS "positional arguments:"
-.TP
-file
-file to be converted, in geo coordinate.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-d\fR DSET, \fB\-\-dset\fR DSET, \fB\-\-dataset\fR DSET
-date of timeseries, or date12 of interferograms to be converted
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file base name. Extension is fixed by GDAL driver
-.TP
-\fB\-\-of\fR OUT_FORMAT, \fB\-\-out\-format\fR OUT_FORMAT, \fB\-\-output\-format\fR OUT_FORMAT
-file format as defined by GDAL driver name, e.g. GTiff, ENVI, default: GTiff
-GDAL driver names can be found at https://gdal.org/drivers/raster/index.html
-.SS "example:"
-.IP
-save_gdal.py geo/geo_velocity.h5
-save_gdal.py geo/geo_timeseries_ERA5_demErr.h5 \fB\-d\fR 20200505_20200517 \fB\-\-of\fR ENVI
-save_gdal.py geo/geo_ifgramStack.h5 \fB\-d\fR unwrapPhase\-20101120_20110220 \fB\-\-of\fR ISCE
-save_gdal.py geo/geo_ifgramStack.h5 \fB\-d\fR coherence\-20101120_20110220 \fB\-\-of\fR ISCE
diff -pruN 1.3.3-2/debian/man/mintpy-save_gmt.1 1.4.0-1/debian/man/mintpy-save_gmt.1
--- 1.3.3-2/debian/man/mintpy-save_gmt.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-save_gmt.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,38 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-SAVE_GMT "1" "May 2022" "mintpy-save_gmt v1.3.3" "User Commands"
-.SH NAME
-mintpy-save_gmt \- Export geocoded file to GMT grd file
-.SH DESCRIPTION
-usage: save_gmt.py [\-h] [\-o OUTFILE] file [dset]
-.PP
-Export geocoded file to GMT grd file
-.SS "positional arguments:"
-.TP
-file
-file to be converted, in geo coordinate.
-.TP
-dset
-date of timeseries, or date12 of interferograms to be converted
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file base name. Extension is fixed with .kmz
-.SS "example:"
-.TP
-save_gmt.py
-geo_velocity.h5
-.TP
-save_gmt.py
-geo_timeseries.h5     20071031
-.TP
-save_gmt.py
-geo_timeseries.h5
-.TP
-save_gmt.py
-geo_filt_100608\-101024\-sim_HDR_16rlks_c10.unw
-.TP
-save_gmt.py
-gsi10m.dem
diff -pruN 1.3.3-2/debian/man/mintpy-save_hdfeos5.1 1.4.0-1/debian/man/mintpy-save_hdfeos5.1
--- 1.3.3-2/debian/man/mintpy-save_hdfeos5.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-save_hdfeos5.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,61 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-SAVE_HDFEOS5 "1" "May 2022" "mintpy-save_hdfeos5 v1.3.3" "User Commands"
-.SH NAME
-mintpy-save_hdfeos5 \- Convert MintPy timeseries product into HDF\-EOS5 format
-.SH DESCRIPTION
-usage: save_hdfeos5.py [\-h] [\-t TEMPLATE_FILE] [\-\-tc TCOH_FILE]
-.TP
-[\-\-asc SCOH_FILE] [\-m MASK_FILE] [\-g GEOM_FILE]
-[\-\-update] [\-\-subset]
-ts_file
-.PP
-Convert MintPy timeseries product into HDF\-EOS5 format
-.IP
-https://earthdata.nasa.gov/esdis/eso/standards\-and\-references/hdf\-eos5
-https://mintpy.readthedocs.io/en/latest/hdfeos5/
-.SS "positional arguments:"
-.TP
-ts_file
-Timeseries file
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-t\fR TEMPLATE_FILE, \fB\-\-template\fR TEMPLATE_FILE
-Template file for 1) arguments/options and 2) missing
-metadata
-.TP
-\fB\-\-tc\fR TCOH_FILE, \fB\-\-temp\-coh\fR TCOH_FILE
-Coherence/correlation file, i.e. temporalCoherence.h5
-.TP
-\fB\-\-asc\fR SCOH_FILE, \fB\-\-avg\-spatial\-coh\fR SCOH_FILE
-Average spatial coherence file, i.e. avgSpatialCoh.h5
-.TP
-\fB\-m\fR MASK_FILE, \fB\-\-mask\fR MASK_FILE
-Mask file
-.TP
-\fB\-g\fR GEOM_FILE, \fB\-\-geometry\fR GEOM_FILE
-geometry file
-.TP
-\fB\-\-update\fR
-Enable update mode, a.k.a. put XXXXXXXX as endDate in
-filename if endDate < 1 year
-.TP
-\fB\-\-subset\fR
-Enable subset mode, a.k.a. put suffix
-_N31700_N32100_E130500_E131100
-.SS "template options:"
-.TP
-mintpy.save.hdfEos5
-= auto   #[yes / no], auto for no, save time\-series to HDF\-EOS5 format
-.TP
-mintpy.save.hdfEos5.update
-= auto   #[yes / no], auto for no, put XXXXXXXX as endDate in output filename
-.TP
-mintpy.save.hdfEos5.subset
-= auto   #[yes / no], auto for no, put subset range info   in output filename
-.SS "example:"
-.IP
-save_hdfeos5.py geo/geo_timeseries_ERA5_ramp_demErr.h5
-save_hdfeos5.py timeseries_ERA5_ramp_demErr.h5 \fB\-\-tc\fR temporalCoherence.h5 \fB\-\-asc\fR avgSpatialCoh.h5 \fB\-m\fR maskTempCoh.h5 \fB\-g\fR inputs/geometryGeo.h5
diff -pruN 1.3.3-2/debian/man/mintpy-save_kite.1 1.4.0-1/debian/man/mintpy-save_kite.1
--- 1.3.3-2/debian/man/mintpy-save_kite.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-save_kite.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,67 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-SAVE_KITE "1" "May 2022" "mintpy-save_kite v1.3.3" "User Commands"
-.SH NAME
-mintpy-save_kite \- Generate KITE (https://github.com/pyrocko/kite) npz and yaml from MintPy HDF5 file.
-.SH DESCRIPTION
-usage: save_kite.py [\-h] \fB\-d\fR DSET [\-g GEOM_FILE] [\-m MASK_FILE] [\-o OUTFILE]
-.TP
-[\-\-sub\-x XMIN XMAX] [\-\-sub\-y YMIN YMAX]
-[\-\-sub\-lat LATMIN LATMAX] [\-\-sub\-lon LONMIN LONMAX]
-file
-.PP
-Generate KITE (https://github.com/pyrocko/kite) npz and yaml from MintPy HDF5 file.
-.SS "positional arguments:"
-.TP
-file
-file to be converted, in geo coordinate.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-d\fR DSET, \fB\-\-dset\fR DSET, \fB\-\-dataset\fR DSET
-dataset of interest to be converted.
-e.g.: velocity / stepYYYYMMDD for velocity HDF5 file,
-.TP
-date12 in YYYYMMDD_YYYYMMDD for time\-series HDF5 file,
-date12 in unwrapPhase\-YYYYMMDD_YYYYMMDD for ifgramStack HDF5 file.
-.TP
-\fB\-g\fR GEOM_FILE, \fB\-\-geom\fR GEOM_FILE
-geometry file for incidence \fI\,/azimuth\/\fP angle and height.
-.TP
-\fB\-m\fR MASK_FILE, \fB\-\-mask\fR MASK_FILE
-mask file, or run mask.py to mask the input file beforehand.
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output filename
-.SS "Subset:"
-.IP
-Display dataset in subset range
-.TP
-\fB\-\-sub\-x\fR XMIN XMAX, \fB\-\-subx\fR XMIN XMAX, \fB\-\-subset\-x\fR XMIN XMAX
-subset display in x/cross\-track/range direction
-.TP
-\fB\-\-sub\-y\fR YMIN YMAX, \fB\-\-suby\fR YMIN YMAX, \fB\-\-subset\-y\fR YMIN YMAX
-subset display in y/along\-track/azimuth direction
-.TP
-\fB\-\-sub\-lat\fR LATMIN LATMAX, \fB\-\-sublat\fR LATMIN LATMAX, \fB\-\-subset\-lat\fR LATMIN LATMAX
-subset display in latitude
-.TP
-\fB\-\-sub\-lon\fR LONMIN LONMAX, \fB\-\-sublon\fR LONMIN LONMAX, \fB\-\-subset\-lon\fR LONMIN LONMAX
-subset display in longitude
-.SS "example:"
-.IP
-## displacement [event\-type inversion]
-# option 1: use velocity file with step estimation from timeseries2velocity.py for co\-seismic displacement
-save_kite.py geo/geo_velocity.h5 \fB\-d\fR step20210104 \fB\-g\fR geo/geo_geometry.h5 \fB\-m\fR geo/geo_maskTempCoh.h5 \fB\-o\fR dsc
-.IP
-# option 2: use time\-series / ifgramStack file with date1_date2 for the transient displacement:
-save_kite.py geo/geo_timeseries_ERA5_ramp_demErr.h5 \fB\-d\fR 20101120_20110220 \fB\-g\fR geo/geo_geometry.h5 \fB\-m\fR geo/geo_maskTempCoh.h5 \fB\-o\fR dsc
-save_kite.py geo/geo_ifgramStack.h5     \fB\-d\fR unwrapPhase\-20101120_20110220 \fB\-g\fR geo/geo_geometry.h5 \fB\-m\fR geo/geo_maskTempCoh.h5 \fB\-o\fR dsc
-.IP
-## velocity [interseismic or tensile dislocation inversion]
-# https://pyrocko.org/beat/docs/current/examples/Rectangular_tensile.html
-save_kite.py geo/geo_velocity.h5 \fB\-d\fR velocity \fB\-g\fR geo/geo_geometry.h5 \fB\-m\fR geo/geo_maskTempCoh.h5 \fB\-o\fR dsc
-.IP
-## import to kite
-spool outfile_name    % \fI\,/do\/\fP quadtree,covariance/aps and then File>Save Scene and it is ready for GROND or BEAT
diff -pruN 1.3.3-2/debian/man/mintpy-save_kmz.1 1.4.0-1/debian/man/mintpy-save_kmz.1
--- 1.3.3-2/debian/man/mintpy-save_kmz.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-save_kmz.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,123 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-SAVE_KMZ "1" "May 2022" "mintpy-save_kmz v1.3.3" "User Commands"
-.SH NAME
-mintpy-save_kmz \- Generate Google Earth KMZ file (overlay / placemarks for files in geo / radar coordinates).
-.SH DESCRIPTION
-usage: save_kmz.py [\-h] [\-m FILE] [\-\-zero\-mask] [\-o OUTFILE] [\-\-kk] [\-g FILE]
-.IP
-[\-\-step STEP] [\-v MIN MAX] [\-u UNIT] [\-c COLORMAP] [\-\-wrap]
-[\-\-wrap\-range MIN MAX] [\-\-dpi NUM] [\-\-figsize WID LEN]
-[\-\-cbar\-loc {lower left,lower right,upper left,upper right}]
-[\-\-cbar\-label LABEL] [\-\-cbar\-bin\-num NUM] [\-\-noreference]
-[\-\-ref\-color COLOR] [\-\-ref\-size NUM] [\-\-ref\-marker SYMBOL]
-[\-\-sub\-x XMIN XMAX] [\-\-sub\-y YMIN YMAX]
-[\-\-sub\-lat LATMIN LATMAX] [\-\-sub\-lon LONMIN LONMAX]
-file [dset]
-.PP
-Generate Google Earth KMZ file (overlay / placemarks for files in geo / radar coordinates).
-.SS "positional arguments:"
-.TP
-file
-file to be converted, in geo or radar coordinate.
-Note: for files in radar\-coordinate, the corresponding lookup table
-in radar\-coordinate (as provided by ISCE) is required.
-.TP
-dset
-date of timeseries, or date12 of interferograms to be converted
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-m\fR FILE, \fB\-\-mask\fR FILE
-mask file for display
-.TP
-\fB\-\-zero\-mask\fR
-Mask pixels with zero value.
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file base name. Extension is fixed with .kmz
-.TP
-\fB\-\-kk\fR, \fB\-\-keep\-kml\fR, \fB\-\-keep\-kml\-file\fR
-Do not remove KML and data/resource files after compressing into KMZ file.
-.TP
-\fB\-g\fR FILE, \fB\-\-geom\fR FILE
-geometry file with lat/lon. [required for file in radar coordinates]
-.TP
-\fB\-\-step\fR STEP
-output one point per {step} pixels, to reduce file size (default: 5).
-For file in radar\-coordinate ONLY.
-.TP
-\fB\-v\fR MIN MAX, \fB\-\-vlim\fR MIN MAX
-Y/value limits for plotting.
-.TP
-\fB\-u\fR UNIT
-unit for display.
-.TP
-\fB\-c\fR COLORMAP, \fB\-\-cm\fR COLORMAP, \fB\-\-colormap\fR COLORMAP
-Colormap for plotting. Default: jet
-.TP
-\fB\-\-wrap\fR
-re\-wrap data to display data in fringes.
-.TP
-\fB\-\-wrap\-range\fR MIN MAX
-range of one cycle after wrapping, default: [\-pi, pi]
-.SS "Figure:"
-.TP
-\fB\-\-dpi\fR NUM
-Figure DPI (dots per inch). Default: 600
-.TP
-\fB\-\-figsize\fR WID LEN
-Figure size in inches \- width and length
-.TP
-\fB\-\-cbar\-loc\fR {lower left,lower right,upper left,upper right}
-Location of colorbar in the screen. Default: lower left.
-.TP
-\fB\-\-cbar\-label\fR LABEL
-Colorbar label. Default: Mean LOS velocity
-.TP
-\fB\-\-cbar\-bin\-num\fR NUM
-Colorbar bin number (default: None).
-.SS "Reference Pixel:"
-.TP
-\fB\-\-noreference\fR
-do not show reference point
-.TP
-\fB\-\-ref\-color\fR COLOR
-marker color of reference point
-.TP
-\fB\-\-ref\-size\fR NUM
-marker size of reference point (default: 5).
-.TP
-\fB\-\-ref\-marker\fR SYMBOL
-marker symbol of reference point
-.SS "Subset:"
-.IP
-Display dataset in subset range
-.TP
-\fB\-\-sub\-x\fR XMIN XMAX, \fB\-\-subx\fR XMIN XMAX, \fB\-\-subset\-x\fR XMIN XMAX
-subset display in x/cross\-track/range direction
-.TP
-\fB\-\-sub\-y\fR YMIN YMAX, \fB\-\-suby\fR YMIN YMAX, \fB\-\-subset\-y\fR YMIN YMAX
-subset display in y/along\-track/azimuth direction
-.TP
-\fB\-\-sub\-lat\fR LATMIN LATMAX, \fB\-\-sublat\fR LATMIN LATMAX, \fB\-\-subset\-lat\fR LATMIN LATMAX
-subset display in latitude
-.TP
-\fB\-\-sub\-lon\fR LONMIN LONMAX, \fB\-\-sublon\fR LONMIN LONMAX, \fB\-\-subset\-lon\fR LONMIN LONMAX
-subset display in longitude
-.SS "example:"
-.IP
-save_kmz.py geo/geo_velocity.h5
-save_kmz.py geo/geo_velocity.h5 \fB\-u\fR cm \fB\-\-wrap\fR \fB\-\-wrap\-range\fR \fB\-3\fR 7
-.IP
-save_kmz.py geo/geo_timeseries_ERA5_ramp_demErr.h5 20101120
-save_kmz.py geo/geo_timeseries_ERA5_demErr.h5 20200505_20200517
-.IP
-save_kmz.py geo/geo_ifgramStack.h5 20101120_20110220
-save_kmz.py geo/geo_geometryRadar.h5 height \fB\-\-cbar\-label\fR Elevation
-.IP
-# to generate placemarks for the file in radar coordinates, the corresponding
-# geometry file with latitude & longitude in radar coordinates are required,
-# such as provided by ISCE + MintPy workflow
-save_kmz.py velocity.h5 \fB\-\-sub\-x\fR 300 800 \fB\-\-sub\-y\fR 1000 1500 \fB\-\-step\fR 1
diff -pruN 1.3.3-2/debian/man/mintpy-save_kmz_timeseries.1 1.4.0-1/debian/man/mintpy-save_kmz_timeseries.1
--- 1.3.3-2/debian/man/mintpy-save_kmz_timeseries.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-save_kmz_timeseries.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,77 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-SAVE_KMZ_TIMESERIES "1" "May 2022" "mintpy-save_kmz_timeseries v1.3.3" "User Commands"
-.SH NAME
-mintpy-save_kmz_timeseries \- Generare Google Earth KMZ file for time\-series file.
-.SH DESCRIPTION
-usage: save_kmz_timeseries.py [\-h] [\-\-vel FILE] [\-\-tcoh FILE] [\-\-mask FILE]
-.TP
-[\-o OUTFILE] [\-\-steps STEPS STEPS STEPS]
-[\-\-level\-of\-details LODS LODS LODS LODS]
-[\-\-vlim VMIN VMAX] [\-\-wrap]
-[\-\-colormap COLORMAP] [\-\-cutoff CUTOFF]
-[\-\-min\-percentage MIN_PERCENTAGE] [\-\-kk]
-timeseries_file
-.PP
-Generare Google Earth KMZ file for time\-series file.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-\-kk\fR, \fB\-\-keep\-kml\fR, \fB\-\-keep\-kml\-file\fR
-Do not remove KML and data/resource files after compressing into KMZ file.
-.SS "Input files:"
-.IP
-File/Dataset to display
-.TP
-timeseries_file
-Timeseries file to generate KML for
-.TP
-\fB\-\-vel\fR FILE
-Velocity file, used for the color of dot
-.TP
-\fB\-\-tcoh\fR FILE
-temporal coherence file, used for stat info
-.TP
-\fB\-\-mask\fR FILE
-Mask file
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-Output KMZ file name.
-.SS "Display options:"
-.IP
-configurations for the display
-.TP
-\fB\-\-steps\fR STEPS STEPS STEPS
-list of steps for output pixel (default: [20, 5, 2]).
-Set to [20, 5, 0] to skip the 3rd high\-resolution level to reduce file size.
-.TP
-\fB\-\-level\-of\-details\fR LODS LODS LODS LODS, \fB\-\-lods\fR LODS LODS LODS LODS
-list of level of details to determine the visible range while browering. Default: 0, 1500, 4000, \fB\-1\fR.
-Ref: https://developers.google.com/kml/documentation/kml_21tutorial
-.TP
-\fB\-\-vlim\fR VMIN VMAX, \fB\-v\fR VMIN VMAX
-min/max range in cm/yr for color coding.
-.TP
-\fB\-\-wrap\fR
-re\-wrap data to [VMIN, VMAX) for color coding.
-.TP
-\fB\-\-colormap\fR COLORMAP, \fB\-c\fR COLORMAP
-colormap used for display, i.e. jet, RdBu, hsv, jet_r, temperature, viridis,  etc.
-colormaps in Matplotlib \- http://matplotlib.org/users/colormaps.html
-colormaps in GMT \- http://soliton.vm.bytemark.co.uk/pub/cpt\-city/
-.SS "HD for deforming areas:"
-.IP
-High resolution output for deforming areas
-.TP
-\fB\-\-cutoff\fR CUTOFF
-choose points with velocity >= cutoff * MAD. Default: 3.
-.TP
-\fB\-\-min\-percentage\fR MIN_PERCENTAGE, \fB\-\-min\-perc\fR MIN_PERCENTAGE
-choose boxes with >= min percentage of pixels are deforming. Default: 0.2.
-.SS "example:"
-.IP
-cd \fI\,$PROJECT_NAME/mintpy/geo\/\fP
-save_kmz_timeseries.py geo_timeseries_ERA5_ramp_demErr.h5
-save_kmz_timeseries.py geo_timeseries_ERA5_ramp_demErr.h5 \fB\-v\fR \fB\-5\fR 5 \fB\-\-wrap\fR
-save_kmz_timeseries.py timeseries_ERA5_demErr.h5 \fB\-\-vel\fR velocity.h5 \fB\-\-tcoh\fR temporalCoherence.h5 \fB\-\-mask\fR maskTempCoh.h5
diff -pruN 1.3.3-2/debian/man/mintpy-save_qgis.1 1.4.0-1/debian/man/mintpy-save_qgis.1
--- 1.3.3-2/debian/man/mintpy-save_qgis.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-save_qgis.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,37 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-SAVE_QGIS "1" "May 2022" "mintpy-save_qgis v1.3.3" "User Commands"
-.SH NAME
-mintpy-save_qgis \- Convert to QGIS compatible ps time\-series
-.SH DESCRIPTION
-usage: save_qgis.py [\-h] \fB\-g\fR GEOM_FILE [\-o SHP_FILE] [\-b Y0 Y1 X0 X1]
-.TP
-[\-B S N W E]
-ts_file
-.PP
-Convert to QGIS compatible ps time\-series
-.SS "positional arguments:"
-.TP
-ts_file
-time\-series HDF5 file
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-g\fR GEOM_FILE, \fB\-\-geom\fR GEOM_FILE
-geometry HDF5 file
-.TP
-\fB\-o\fR SHP_FILE, \fB\-\-outshp\fR SHP_FILE
-Output shape file.
-.TP
-\fB\-b\fR Y0 Y1 X0 X1, \fB\-\-bbox\fR Y0 Y1 X0 X1
-bounding box : minLine maxLine minPixel maxPixel
-.TP
-\fB\-B\fR S N W E, \fB\-\-geo\-bbox\fR S N W E
-bounding box in lat lon: South North West East
-.SS "example:"
-.IP
-save_qgis.py timeseries_ERA5_ramp_demErr.h5 \fB\-g\fR inputs/geometrygeo.h5
-save_qgis.py timeseries_ERA5_ramp_demErr.h5 \fB\-g\fR inputs/geometryRadar.h5
-save_qgis.py geo/geo_timeseries_ERA5_ramp_demErr.h5 \fB\-g\fR geo/geo_geometryRadar.h5
-save_qgis.py timeseries_ERA5_ramp_demErr.h5 \fB\-g\fR inputs/geometryRadar.h5 \fB\-b\fR 200 150 400 350
diff -pruN 1.3.3-2/debian/man/mintpy-save_roipac.1 1.4.0-1/debian/man/mintpy-save_roipac.1
--- 1.3.3-2/debian/man/mintpy-save_roipac.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-save_roipac.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,67 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-SAVE_ROIPAC "1" "May 2022" "mintpy-save_roipac v1.3.3" "User Commands"
-.SH NAME
-mintpy-save_roipac \- Convert MintPy HDF5 file to ROI_PAC format.
-.SH DESCRIPTION
-usage: save_roipac.py [\-h] [\-o OUTFILE] [\-m MASK_FILE [MASK_FILE ...]]
-.TP
-[\-\-ref\-yx REF_YX REF_YX] [\-\-ref\-lalo REF_LALO REF_LALO]
-[\-\-keep\-all\-metadata]
-file [dset]
-.PP
-Convert MintPy HDF5 file to ROI_PAC format.
-.SS "positional arguments:"
-.TP
-file
-HDF5 file to be converted.
-.TP
-dset
-date/date12 of timeseries, or date12 of interferograms to be converted
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file name.
-.TP
-\fB\-m\fR MASK_FILE [MASK_FILE ...], \fB\-\-mask\fR MASK_FILE [MASK_FILE ...]
-mask file
-.TP
-\fB\-\-ref\-yx\fR REF_YX REF_YX
-custom reference pixel in y/x
-.TP
-\fB\-\-ref\-lalo\fR REF_LALO REF_LALO
-custom reference pixel in lat/lon
-.TP
-\fB\-\-keep\-all\-metadata\fR
-Do not clean the metadata as ROIPAC format
-.SS "example:"
-.IP
-#\-\-\-\-\- unwrapped phase
-#for velocity: output an interferogram with temporal baseline in DATE12 metadata
-save_roipac.py  velocity.h5
-save_roipac.py  velocity.h5 \fB\-m\fR maskTempCoh.h5 maskAoiShinmoe.h5
-.IP
-#for time\-series: specify (date1_)date2
-save_roipac.py  timeseries_ERA5_ramp_demErr.h5  #use the last date
-save_roipac.py  timeseries_ERA5_ramp_demErr.h5  20050601
-save_roipac.py  timeseries_ERA5_ramp_demErr.h5  20040728_20050601
-.IP
-#for HDF\-EOS5: specify displacement\-date1_date2
-save_roipac.py  S1_IW12_128_0593_0597_20141213_20180619.he5  displacement\-20170904_20170916
-save_roipac.py  S1_IW12_128_0593_0597_20141213_20180619.he5  displacement\-20170916
-.IP
-#for ifgramStack: specify date1_date2
-save_roipac.py  inputs/ifgramStack.h5  unwrapPhase\-20091225_20100723
-save_roipac.py  inputs/ifgramStack.h5  unwrapPhase\-20091225_20100723  \fB\-\-ref\-yx\fR 640 810
-.IP
-#\-\-\-\-\- coherence
-save_roipac.py  inputs/ifgramStack.h5  coherence\-20091225_20100723
-save_roipac.py  temporalCoherence.h5
-save_roipac.py  S1_IW12_128_0593_0597_20141213_20180619.he5 temporalCoherence \fB\-o\fR 20170904_20170916.cor
-.IP
-#\-\-\-\-\- DEM
-save_roipac.py  geo_geometryRadar.h5  height \fB\-o\fR srtm1.dem
-save_roipac.py  geo_geometryRadar.h5  height \fB\-o\fR srtm1.hgt
-save_roipac.py  S1_IW12_128_0593_0597_20141213_20180619.he5 height \fB\-o\fR srtm1.dem
diff -pruN 1.3.3-2/debian/man/mintpy-smallbaselineApp.1 1.4.0-1/debian/man/mintpy-smallbaselineApp.1
--- 1.3.3-2/debian/man/mintpy-smallbaselineApp.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-smallbaselineApp.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,86 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-SMALLBASELINEAPP "1" "May 2022" "mintpy-smallbaselineApp v1.3.3" "User Commands"
-.SH NAME
-mintpy-smallbaselineApp \- Routine Time Series Analysis for Small Baseline InSAR Stack
-.SH DESCRIPTION
-usage: smallbaselineApp.py [\-h] [\-\-dir WORKDIR] [\-g] [\-H] [\-v] [\-\-plot]
-.TP
-[\-\-start STEP] [\-\-end STEP] [\-\-dostep STEP]
-[customTemplateFile]
-.PP
-Routine Time Series Analysis for Small Baseline InSAR Stack
-.SS "positional arguments:"
-.TP
-customTemplateFile
-custom template with option settings.
-ignored if the default smallbaselineApp.cfg is input.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-\-dir\fR WORKDIR, \fB\-\-work\-dir\fR WORKDIR
-work directory, (default: ./).
-.TP
-\fB\-g\fR
-generate default template (if it does not exist) and exit.
-.TP
-\fB\-H\fR
-print the default template file and exit.
-.TP
-\fB\-v\fR, \fB\-\-version\fR
-print software version and exit
-.TP
-\fB\-\-plot\fR
-plot results [only] without running smallbaselineApp.
-.SS "steps processing (start/end/dostep):"
-.IP
-Command line options for steps processing with names are chosen from the following list:
-.IP
-['load_data', 'modify_network', 'reference_point', 'quick_overview', 'correct_unwrap_error']
-['invert_network', 'correct_LOD', 'correct_SET', 'correct_troposphere', 'deramp', 'correct_topography']
-['residual_RMS', 'reference_date', 'velocity', 'geocode', 'google_earth', 'hdfeos5']
-.IP
-In order to use either \fB\-\-start\fR or \fB\-\-dostep\fR, it is necessary that a
-previous run was done using one of the steps options to process at least
-through the step immediately preceding the starting step of the current run.
-.TP
-\fB\-\-start\fR STEP
-start processing at the named step (default: load_data).
-.TP
-\fB\-\-end\fR STEP, \fB\-\-stop\fR STEP
-end processing at the named step (default: hdfeos5)
-.TP
-\fB\-\-dostep\fR STEP
-run processing at the named step only
-.SS "reference:"
-.IP
-Yunjun, Z., H. Fattahi, and F. Amelung (2019), Small baseline InSAR time series analysis:
-Unwrapping error correction and noise reduction, Computers & Geosciences, 133, 104331,
-doi:10.1016/j.cageo.2019.104331.
-.SS "example:"
-.TP
-smallbaselineApp.py
-#run with default template 'smallbaselineApp.cfg'
-.TP
-smallbaselineApp.py <custom_template>
-#run with default and custom templates
-.TP
-smallbaselineApp.py \fB\-h\fR / \fB\-\-help\fR
-#help
-.TP
-smallbaselineApp.py \fB\-H\fR
-#print    default template options
-.TP
-smallbaselineApp.py \fB\-g\fR
-#generate default template if it does not exist
-.TP
-smallbaselineApp.py \fB\-g\fR <custom_template>
-#generate/update default template based on custom template
-.TP
-smallbaselineApp.py \fB\-\-plot\fR
-#plot results without run
-.IP
-# Run with \fB\-\-start\fR/stop/dostep options
-smallbaselineApp.py GalapagosSenDT128.template \fB\-\-dostep\fR velocity  #run at step 'velocity' only
-smallbaselineApp.py GalapagosSenDT128.template \fB\-\-end\fR load_data    #end after step 'load_data'
diff -pruN 1.3.3-2/debian/man/mintpy-solid_earth_tides.1 1.4.0-1/debian/man/mintpy-solid_earth_tides.1
--- 1.3.3-2/debian/man/mintpy-solid_earth_tides.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-solid_earth_tides.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,56 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-SOLID_EARTH_TIDES "1" "May 2022" "mintpy-solid_earth_tides v1.3.3" "User Commands"
-.SH NAME
-mintpy-solid_earth_tides \- Solid Earth tides (SET) correction
-.SH DESCRIPTION
-usage: solid_earth_tides.py [\-h] \fB\-g\fR GEOM_FILE [\-\-date\-wise\-acq\-time]
-.TP
-[\-\-verbose] [\-\-update] [\-\-set\-file SET_FILE]
-[\-o COR_DIS_FILE]
-dis_file
-.PP
-Solid Earth tides (SET) correction
-.SS "positional arguments:"
-.TP
-dis_file
-timeseries HDF5 file, i.e. timeseries.h5
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-g\fR GEOM_FILE, \fB\-\-geomtry\fR GEOM_FILE
-geometry file including incidence/azimuthAngle.
-.TP
-\fB\-\-date\-wise\-acq\-time\fR
-Use the exact date\-wise acquisition time instead of the common one for tides calculation.
-For ISCE\-2/topsStack products only, and requires ../reference and ../secondarys folder.
-There is <1 min difference btw. S1A/B \-> Negligible impact for InSAR.
-.TP
-\fB\-\-verbose\fR
-Verbose message.
-.TP
-\fB\-\-update\fR
-Enable update mode.
-.TP
-\fB\-\-set\-file\fR SET_FILE
-line\-of\-sight solid earth tide file name
-.TP
-\fB\-o\fR COR_DIS_FILE
-Output file name for the corrected timeseries.
-.SS "reference:"
-.IP
-Milbert, D., Solid Earth Tide, http://geodesyworld.github.io/SOFTS/solid.htm, Accessd 2020 September 6.
-Fattahi, H., Z. Yunjun, X. Pi, P. S. Agram, P. Rosen, and Y. Aoki (2020), Absolute geolocation of SAR
-.IP
-Big\-Data: The first step for operational InSAR time\-series analysis, AGU Fall Meeting 2020, 1\-17 Dec 2020.
-.SS "template options:"
-.IP
-## Solid Earth tides (SET) correction [need to install insarlab/PySolid]
-## reference: Milbert (2018); Fattahi et al. (2020, AGU)
-mintpy.solidEarthTides = auto #[yes / no], auto for no
-.SS "example:"
-.IP
-solid_earth_tides.py timeseries.h5 \fB\-g\fR inputs/geometryRadar.h5
-solid_earth_tides.py timeseries.h5 \fB\-g\fR inputs/geometryGeo.h5
-solid_earth_tides.py geo/geo_timeseries_ERA5_demErr.h5 \fB\-g\fR geo/geo_geometryRadar.h5
diff -pruN 1.3.3-2/debian/man/mintpy-spatial_average.1 1.4.0-1/debian/man/mintpy-spatial_average.1
--- 1.3.3-2/debian/man/mintpy-spatial_average.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-spatial_average.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,33 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-SPATIAL_AVERAGE "1" "May 2022" "mintpy-spatial_average v1.3.3" "User Commands"
-.SH NAME
-mintpy-spatial_average \- Calculate average in space
-.SH DESCRIPTION
-usage: spatial_average.py [\-h] [\-d DATASETNAME] [\-m MASK_FILE] [\-\-nodisplay]
-.IP
-file
-.PP
-Calculate average in space
-.SS "positional arguments:"
-.TP
-file
-File to calculate spatial average
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-d\fR DATASETNAME, \fB\-\-dset\fR DATASETNAME, \fB\-\-dataset\fR DATASETNAME
-dataset used to calculate, for ifgramStack file only.
-.TP
-\fB\-m\fR MASK_FILE, \fB\-\-mask\fR MASK_FILE
-Mask file for the calculation
-.TP
-\fB\-\-nodisplay\fR
-save and do not display the figure
-.SS "example:"
-.TP
-spatial_average.py inputs/ifgramStack.h5
-\fB\-d\fR coherence \fB\-m\fR maskConnComp.h5
-.IP
-spatial_average.py timeseries_ERA5_demErr.h5 \fB\-m\fR maskTempCoh.h5
diff -pruN 1.3.3-2/debian/man/mintpy-spatial_filter.1 1.4.0-1/debian/man/mintpy-spatial_filter.1
--- 1.3.3-2/debian/man/mintpy-spatial_filter.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-spatial_filter.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,74 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-SPATIAL_FILTER "1" "May 2022" "mintpy-spatial_filter v1.3.3" "User Commands"
-.SH NAME
-mintpy-spatial_filter \- Spatial filtering of 2D image.
-.SH DESCRIPTION
-usage: spatial_filter.py [\-h]
-.TP
-[\-f {lowpass_gaussian,highpass_gaussian,lowpass_avg,highpass_avg,sobel,roberts,canny,double_difference}]
-[\-p [FILTER_PAR ...]] [\-o OUTFILE]
-file [dset ...]
-.PP
-Spatial filtering of 2D image.
-.SS "positional arguments:"
-.TP
-file
-File to be filtered
-.TP
-dset
-optional \- dataset(s) to filter (default: []).
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-f\fR {lowpass_gaussian,highpass_gaussian,lowpass_avg,highpass_avg,sobel,roberts,canny,double_difference}
-Filter type (default: lowpass_gaussian).
-Check Bekaert et al. (2020) for double_difference;
-Check scikit\-image as below for the other filters:
-.IP
-http://scikit\-image.org/docs/dev/api/skimage.filters.html
-.TP
-\fB\-p\fR [FILTER_PAR ...], \fB\-\-filter_par\fR [FILTER_PAR ...]
-Filter parameters for filters. Default:
-.TP
-Sigma
-for low/high pass gaussian filter, default: 3.0
-.TP
-Kernel Size
-for low/high pass average  filter, default: 5
-.IP
-Kernel Radius for double difference local and regional filters, default: 1 10
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-outfile\fR OUTFILE
-Output file name.
-.SS "references:"
-.IP
-Bekaert, David PS, et al. "InSAR\-based detection method for mapping and monitoring slow\-moving
-landslides in remote regions with steep and mountainous terrain: An application to Nepal."
-Remote Sensing of Environment 249 (2020), doi:10.1016/j.rse.2020.111983.
-.SS "example:"
-.TP
-spatial_filter.py
-velocity.h5
-.TP
-spatial_filter.py
-timeseries.h5 \fB\-f\fR lowpass_avg       \fB\-p\fR 5
-.TP
-spatial_filter.py
-velocity.h5   \fB\-f\fR lowpass_avg       \fB\-p\fR 5
-.TP
-spatial_filter.py
-velocity.h5   \fB\-f\fR highpass_gaussian \fB\-p\fR 3
-.TP
-spatial_filter.py
-velocity.h5   \fB\-f\fR sobel
-.TP
-spatial_filter.py
-ifgramStack.h5 unwrapPhase
-.TP
-spatial_filter.py
-ifgramStack.h5 unwrapPhase \fB\-f\fR lowpass_avg \fB\-p\fR 5
-.TP
-spatial_filter.py
-ifgramStack.h5 unwrapPhase \fB\-f\fR double_difference \fB\-p\fR 1 10
diff -pruN 1.3.3-2/debian/man/mintpy-subset.1 1.4.0-1/debian/man/mintpy-subset.1
--- 1.3.3-2/debian/man/mintpy-subset.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-subset.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,96 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-SUBSET "1" "May 2022" "mintpy-subset v1.3.3" "User Commands"
-.SH NAME
-mintpy-subset \- Generate a subset from file/dataset
-.SH DESCRIPTION
-usage: subset.py [\-h] [\-x SUBSET_X SUBSET_X] [\-y SUBSET_Y SUBSET_Y]
-.IP
-[\-l SUBSET_LAT SUBSET_LAT] [\-L SUBSET_LON SUBSET_LON]
-[\-t TEMPLATE_FILE] [\-r REFERENCE] [\-\-tight]
-[\-\-outfill FILL_VALUE] [\-\-no\-parallel] [\-o OUTFILE]
-[\-\-lookup LOOKUP_FILE]
-file [file ...]
-.PP
-Generate a subset from file/dataset
-.SS "positional arguments:"
-.TP
-file
-File(s) to subset/crop
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-x\fR SUBSET_X SUBSET_X, \fB\-\-sub\-x\fR SUBSET_X SUBSET_X, \fB\-\-subset\-x\fR SUBSET_X SUBSET_X
-subset range in x/cross\-track/column direction
-.TP
-\fB\-y\fR SUBSET_Y SUBSET_Y, \fB\-\-sub\-y\fR SUBSET_Y SUBSET_Y, \fB\-\-subset\-y\fR SUBSET_Y SUBSET_Y
-subset range in y/along\-track/row direction
-.TP
-\fB\-l\fR SUBSET_LAT SUBSET_LAT, \fB\-\-lat\fR SUBSET_LAT SUBSET_LAT, \fB\-\-sub\-lat\fR SUBSET_LAT SUBSET_LAT, \fB\-\-subset\-lat\fR SUBSET_LAT SUBSET_LAT
-subset range in latitude
-.TP
-\fB\-L\fR SUBSET_LON SUBSET_LON, \fB\-\-lon\fR SUBSET_LON SUBSET_LON, \fB\-\-sub\-lon\fR SUBSET_LON SUBSET_LON, \fB\-\-subset\-lon\fR SUBSET_LON SUBSET_LON
-subset range in column
-.TP
-\fB\-t\fR TEMPLATE_FILE, \fB\-\-template\fR TEMPLATE_FILE
-template file with subset setting.  i.e.
-mintpy.subset.yx    = 300:800,1000:3500
-mintpy.subset.lalo  = 30.2:30.5,130.1:131.3
-.TP
-\fB\-r\fR REFERENCE, \fB\-\-reference\fR REFERENCE
-reference file, subset to the same lalo as reference file
-.TP
-\fB\-\-tight\fR
-subset geomap_*.trans file based on non\-zero values.
-For geocoded file(s) onlyA convenient way to get rid of extra wide space due to "too large" DEM.
-.TP
-\fB\-\-outfill\fR FILL_VALUE
-fill subset area out of data coverage with input value. i.e.
-np.nan, 0, 1000, ...
-By default, it's None for no\-outfill.
-.TP
-\fB\-\-no\-parallel\fR
-Disable parallel processing. Disabled auto for 1 input file.
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file name
-add prefix "sub_" if input/output files are in the same directory;
-same filename otherwise.
-.SS "Datasets:"
-.IP
-Create a subset of entire dataset in radar using y/x or lat/lon option
-Including *.trans and *.dem in geo coord.
-.TP
-\fB\-\-lookup\fR LOOKUP_FILE
-calculate bounding box in geo/radar coord from input radar/geo subset range
-using transformation file, i.e. geomap_4rlks.trans
-All input radar coord file should be same size/coverage; same for all geo coord files.
-.PP
-template
-## if both yx and lalo are specified, use lalo option unless a) no lookup file AND b) dataset is in radar coord
-mintpy.subset.yx       = auto    #[1800:2000,700:800 / no], auto for no
-mintpy.subset.lalo     = auto    #[31.5:32.5,130.5:131.0 / no], auto for no
-.SS "example:"
-.TP
-subset.py inputs/ifgramStack.h5 \fB\-y\fR 400
-1500 \fB\-x\fR 200   600
-.TP
-subset.py geo_velocity.h5
-\fB\-l\fR 30.5 30.8 \fB\-L\fR 130.3 130.9
-.TP
-subset.py 030405_090801.unw
-\fB\-t\fR SinabungT495F50AlosA.template
-.IP
-# subset to the same coverage as the reference file
-subset.py geo_incidence.h5 \fB\-r\fR subset_geo_velocity.h5
-.IP
-# multiple files input
-subset.py *velocity*.h5 timeseries*.h5  \fB\-y\fR 400 1500  \fB\-x\fR 200 600
-.IP
-# crop to larger area with custom fill value
-subset.py geo_velocity.h5 \fB\-l\fR 32.2 33.5  \fB\-\-outfill\-nan\fR
-subset.py Mask.h5 \fB\-x\fR 500 3500 \fB\-\-outfill\fR 0
-.IP
-# "tight" subset for geocoded lookup table larger than data file
-subset.py geomap_4rlks.trans \fB\-\-tight\fR
diff -pruN 1.3.3-2/debian/man/mintpy-temporal_average.1 1.4.0-1/debian/man/mintpy-temporal_average.1
--- 1.3.3-2/debian/man/mintpy-temporal_average.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-temporal_average.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,31 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-TEMPORAL_AVERAGE "1" "May 2022" "mintpy-temporal_average v1.3.3" "User Commands"
-.SH NAME
-mintpy-temporal_average \- Calculate temporal average (stacking) of multi\-temporal datasets
-.SH DESCRIPTION
-usage: temporal_average.py [\-h] [\-d DATASETNAME] [\-o OUTFILE] [\-\-update] file
-.PP
-Calculate temporal average (stacking) of multi\-temporal datasets
-.SS "positional arguments:"
-.TP
-file
-input file with multi\-temporal datasets
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-d\fR DATASETNAME, \fB\-\-ds\fR DATASETNAME, \fB\-\-dataset\fR DATASETNAME
-dataset name to be averaged, for file with multiple dataset family,
-e.g. ifgramStack.h5
-default: coherence
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-outfile\fR OUTFILE
-output file name
-.TP
-\fB\-\-update\fR
-Enable update checking for \fB\-\-nonzero\fR option.
-.SS "example:"
-.IP
-temporal_average.py ./inputs/ifgramStack.h5 \fB\-d\fR unwrapPhase \fB\-o\fR avgPhaseVelocity.h5
-temporal_average.py ./inputs/ifgramStack.h5 \fB\-d\fR coherence   \fB\-o\fR avgSpatialCoh.h5
diff -pruN 1.3.3-2/debian/man/mintpy-temporal_derivative.1 1.4.0-1/debian/man/mintpy-temporal_derivative.1
--- 1.3.3-2/debian/man/mintpy-temporal_derivative.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-temporal_derivative.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,23 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-TEMPORAL_DERIVATIVE "1" "May 2022" "mintpy-temporal_derivative v1.3.3" "User Commands"
-.SH NAME
-mintpy-temporal_derivative \- Calculate the temporal derivative of time\-series.
-.SH DESCRIPTION
-usage: temporal_derivative.py [\-h] [\-o OUTFILE] file
-.PP
-Calculate the temporal derivative of time\-series.
-.SS "positional arguments:"
-.TP
-file
-time\-series displacement file.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output derivative time\-series file.
-.SS "example:"
-.TP
-temporal_derivative.py
-timeseries.h5
diff -pruN 1.3.3-2/debian/man/mintpy-temporal_filter.1 1.4.0-1/debian/man/mintpy-temporal_filter.1
--- 1.3.3-2/debian/man/mintpy-temporal_filter.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-temporal_filter.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,29 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-TEMPORAL_FILTER "1" "May 2022" "mintpy-temporal_filter v1.3.3" "User Commands"
-.SH NAME
-mintpy-temporal_filter \- Smoothing timeseries in time domain with a moving Gaussian window
-.SH DESCRIPTION
-usage: temporal_filter.py [\-h] [\-t TIME_WIN] [\-o OUTFILE] timeseries_file
-.PP
-Smoothing timeseries in time domain with a moving Gaussian window
-.SS "positional arguments:"
-.TP
-timeseries_file
-timeseries file to be smoothed.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-t\fR TIME_WIN, \fB\-\-time\-win\fR TIME_WIN
-time window in years, default: 0.1 (Sigma of the assmued Gaussian distribution.)
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-outfile\fR OUTFILE
-Output file name.
-.SS "reference:"
-.IP
-Wikipedia: https://en.wikipedia.org/wiki/Gaussian_blur
-.SS "example:"
-.IP
-temporal_filter.py timeseries_ERA5_demErr.h5
-temporal_filter.py timeseries_ERA5_demErr.h5 \fB\-t\fR 0.1
diff -pruN 1.3.3-2/debian/man/mintpy-timeseries2velocity.1 1.4.0-1/debian/man/mintpy-timeseries2velocity.1
--- 1.3.3-2/debian/man/mintpy-timeseries2velocity.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-timeseries2velocity.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,177 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-TIMESERIES2VELOCITY "1" "May 2022" "mintpy-timeseries2velocity v1.3.3" "User Commands"
-.SH NAME
-mintpy-timeseries2velocity \- Estimate velocity / time functions from time\-series.
-.SH DESCRIPTION
-usage: timeseries2velocity.py [\-h] [\-\-template TEMPLATE_FILE]
-.TP
-[\-\-ts\-cov\-file TS_COV_FILE] [\-o OUTFILE]
-[\-\-update] [\-\-ref\-lalo LAT LON] [\-\-ref\-yx Y X]
-[\-\-ref\-date DATE] [\-\-start\-date STARTDATE]
-[\-\-end\-date ENDDATE]
-[\-\-exclude EXCLUDEDATE [EXCLUDEDATE ...]]
-[\-\-bootstrap] [\-\-bc BOOTSTRAPCOUNT]
-[\-\-poly POLYNOMIAL]
-[\-\-periodic PERIODIC [PERIODIC ...]]
-[\-\-step STEP [STEP ...]] [\-\-exp EXP [EXP ...]]
-[\-\-log LOG [LOG ...]] [\-\-save\-res]
-[\-\-res\-file RES_FILE] [\-\-ram MAXMEMORY]
-timeseries_file
-.PP
-Estimate velocity / time functions from time\-series.
-.SS "positional arguments:"
-.TP
-timeseries_file
-Time series file for velocity inversion.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-\-template\fR TEMPLATE_FILE, \fB\-t\fR TEMPLATE_FILE
-template file with options
-.TP
-\fB\-\-ts\-cov\-file\fR TS_COV_FILE
-Time\-series (co)variance file for velocity STD calculation
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-output\fR OUTFILE
-output file name
-.TP
-\fB\-\-update\fR
-Enable update mode, and skip estimation if:
-1) output velocity file already exists, readable and newer than input file
-2) all configuration parameters are the same.
-.TP
-\fB\-\-ref\-lalo\fR LAT LON
-Change reference point LAT LON for estimation.
-.TP
-\fB\-\-ref\-yx\fR Y X
-Change reference point Y X for estimation.
-.TP
-\fB\-\-ref\-date\fR DATE
-Change reference date for estimation.
-.TP
-\fB\-\-ram\fR MAXMEMORY, \fB\-\-memory\fR MAXMEMORY
-Max amount of memory in GB to use (default: 4.0).
-Adjust according to your computer memory.
-.SS "dates of interest:"
-.TP
-\fB\-\-start\-date\fR STARTDATE, \fB\-s\fR STARTDATE
-start date for velocity estimation
-.TP
-\fB\-\-end\-date\fR ENDDATE, \fB\-e\fR ENDDATE
-end date for velocity estimation
-.TP
-\fB\-\-exclude\fR EXCLUDEDATE [EXCLUDEDATE ...], \fB\-\-ex\fR EXCLUDEDATE [EXCLUDEDATE ...]
-date(s) not included in velocity estimation, i.e.:
-\fB\-\-exclude\fR 20040502 20060708 20090103
-\fB\-\-exclude\fR exclude_date.txt
-exclude_date.txt:
-20040502
-20060708
-20090103
-.SS "bootstrapping:"
-.IP
-estimating the mean / STD of the velocity estimator
-.TP
-\fB\-\-bootstrap\fR, \fB\-\-bootstrapping\fR
-Enable bootstrapping to estimate the mean and STD of the velocity estimator.
-.TP
-\fB\-\-bc\fR BOOTSTRAPCOUNT, \fB\-\-bootstrap\-count\fR BOOTSTRAPCOUNT
-number of iterations for bootstrapping (default: 400).
-.SS "Deformation Model:"
-.IP
-A suite of time functions
-.TP
-\fB\-\-poly\fR POLYNOMIAL, \fB\-\-polynomial\fR POLYNOMIAL, \fB\-\-poly\-order\fR POLYNOMIAL
-a polynomial function with the input degree (default: 1). E.g.:
-\fB\-\-poly\fR 1                                  # linear
-\fB\-\-poly\fR 2                                  # quadratic
-\fB\-\-poly\fR 3                                  # cubic
-.TP
-\fB\-\-periodic\fR PERIODIC [PERIODIC ...], \fB\-\-period\fR PERIODIC [PERIODIC ...], \fB\-\-peri\fR PERIODIC [PERIODIC ...]
-periodic function(s) with period in decimal years (default: []). E.g.:
-\fB\-\-periodic\fR 1.0                            # an annual cycle
-\fB\-\-periodic\fR 1.0 0.5                        # an annual cycle plus a semi\-annual cycle
-.TP
-\fB\-\-step\fR STEP [STEP ...]
-step function(s) at YYYYMMDD (default: []). E.g.:
-\fB\-\-step\fR 20061014                           # coseismic step  at 2006\-10\-14T00:00
-\fB\-\-step\fR 20110311 20120928T1733             # coseismic steps at 2011\-03\-11T00:00 and 2012\-09\-28T17:33
-.TP
-\fB\-\-exp\fR EXP [EXP ...], \fB\-\-exponential\fR EXP [EXP ...]
-exponential function(s) at YYYYMMDD with characteristic time(s) tau in decimal days (default: []). E.g.:
-\fB\-\-exp\fR  20181026 60                        # exp onset at 2006\-10\-14T00:00 with tau=60 days
-\fB\-\-exp\fR  20181026T1355 60 120               # exp onset at 2006\-10\-14T13:55 with tau=60 days overlayed by a tau=145 days
-\fB\-\-exp\fR  20161231 80.5 \fB\-\-exp\fR 20190125 100   # 1st exp onset at 2011\-03\-11 with tau=80.5 days and
-.TP
-# 2nd exp onset at 2012\-09\-28 with tau=100
-days
-.TP
-\fB\-\-log\fR LOG [LOG ...], \fB\-\-logarithmic\fR LOG [LOG ...]
-logarithmic function(s) at YYYYMMDD with characteristic time(s) tau in decimal days (default: []). E.g.:
-\fB\-\-log\fR  20181016 90.4                      # log onset at 2006\-10\-14T00:00 with tau=90.4 days
-\fB\-\-log\fR  20181016T1733 90.4 240             # log onset at 2006\-10\-14T17:33 with tau=90.4 days overlayed by a tau=240 days
-\fB\-\-log\fR  20161231 60 \fB\-\-log\fR 20190125 180.2   # 1st log onset at 2011\-03\-11 with tau=60 days and
-.IP
-# 2nd log onset at 2012\-09\-28 with tau=180.2 days
-.SS "Residual file:"
-.IP
-Save residual displacement time\-series to HDF5 file.
-.TP
-\fB\-\-save\-res\fR, \fB\-\-save_residual\fR
-Save the residual displacement time\-series to HDF5 file.
-.TP
-\fB\-\-res\-file\fR RES_FILE, \fB\-\-residual\-file\fR RES_FILE
-Output file name for the residual time\-series file (default: timeseriesResidual.h5).
-.SS "template options:"
-.IP
-## Estimate linear velocity and its standard deviation from time\-series
-## and from tropospheric delay file if exists.
-## reference: Fattahi and Amelung (2015, JGR)
-mintpy.velocity.excludeDate    = auto   #[exclude_date.txt / 20080520,20090817 / no], auto for exclude_date.txt
-mintpy.velocity.startDate      = auto   #[20070101 / no], auto for no
-mintpy.velocity.endDate        = auto   #[20101230 / no], auto for no
-.IP
-## Bootstrapping
-## reference: Efron and Tibshirani (1986, Stat. Sci.)
-mintpy.velocity.bootstrap      = auto   #[yes / no], auto for no, use bootstrap
-mintpy.velocity.bootstrapCount = auto   #[int>1], auto for 400, number of iterations for bootstrapping
-.SS "references:"
-.IP
-Fattahi, H., and F. Amelung (2015), InSAR bias and uncertainty due to the systematic and stochastic
-tropospheric delay, Journal of Geophysical Research: Solid Earth, 120(12), 8758\-8773, doi:10.1002/2015JB012419.
-.IP
-Efron, B., and R. Tibshirani (1986), Bootstrap methods for standard errors, confidence intervals,
-and other measures of statistical accuracy, Statistical science, 54\-75, doi:10.1214/ss/1177013815.
-.SS "example:"
-.TP
-timeseries2velocity.py
-timeseries_ERA5_demErr.h5
-.TP
-timeseries2velocity.py
-timeseries_ERA5_demErr_ramp.h5  \fB\-t\fR KyushuT73F2980_2990AlosD.template
-.TP
-timeseries2velocity.py
-timeseries.h5  \fB\-\-start\-date\fR 20080201  \fB\-\-end\-date\fR 20100508
-.TP
-timeseries2velocity.py
-timeseries.h5  \fB\-\-exclude\fR exclude_date.txt
-.TP
-timeseries2velocity.py
-LS\-PARAMS.h5
-.TP
-timeseries2velocity.py
-NSBAS\-PARAMS.h5
-.TP
-timeseries2velocity.py
-TS\-PARAMS.h5
-.IP
-# bootstrapping for STD calculation
-timeseries2velocity.py timeseries_ERA5_demErr.h5 \fB\-\-bootstrap\fR
-.IP
-# complex time functions
-timeseries2velocity.py timeseries_ERA5_ramp_demErr.h5 \fB\-\-poly\fR 3 \fB\-\-period\fR 1 0.5 \fB\-\-step\fR 20170910
-timeseries2velocity.py timeseries_ERA5_demErr.h5      \fB\-\-poly\fR 1 \fB\-\-exp\fR 20170910 90
-timeseries2velocity.py timeseries_ERA5_demErr.h5      \fB\-\-poly\fR 1 \fB\-\-log\fR 20170910 60.4
-timeseries2velocity.py timeseries_ERA5_demErr.h5      \fB\-\-poly\fR 1 \fB\-\-log\fR 20170910 60.4 200 \fB\-\-log\fR 20171026 200.7
diff -pruN 1.3.3-2/debian/man/mintpy-timeseries_rms.1 1.4.0-1/debian/man/mintpy-timeseries_rms.1
--- 1.3.3-2/debian/man/mintpy-timeseries_rms.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-timeseries_rms.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,60 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-TIMESERIES_RMS "1" "May 2022" "mintpy-timeseries_rms v1.3.3" "User Commands"
-.SH NAME
-mintpy-timeseries_rms \- Calculate Root Mean Square (RMS) of deramped residual phase time\-series.
-.SH DESCRIPTION
-usage: timeseries_rms.py [\-h] [\-t TEMPLATE_FILE] [\-m MASKFILE] [\-r DERAMP]
-.TP
-[\-\-cutoff CUTOFF] [\-\-figsize WID LEN]
-[\-\-tick\-year\-num TICK_YEAR_NUM]
-timeseries_file
-.PP
-Calculate Root Mean Square (RMS) of deramped residual phase time\-series.
-.SS "positional arguments:"
-.TP
-timeseries_file
-Timeseries file
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-t\fR TEMPLATE_FILE, \fB\-\-template\fR TEMPLATE_FILE
-template file with options
-.TP
-\fB\-m\fR MASKFILE, \fB\-\-mask\fR MASKFILE
-mask file for estimation
-.TP
-\fB\-r\fR DERAMP, \fB\-\-ramp\fR DERAMP, \fB\-\-deramp\fR DERAMP
-ramp type to be remove for RMS calculation.
-Default \- quadratic; no \- do not remove ramp
-.TP
-\fB\-\-cutoff\fR CUTOFF
-M\-score used for outlier detection based on standardised residuals
-Recommend range: [3, 4], default is 3.
-.TP
-\fB\-\-figsize\fR WID LEN
-figure size in inches \- width and length
-.TP
-\fB\-\-tick\-year\-num\fR TICK_YEAR_NUM
-Year number per major tick
-.SS "template options:"
-.IP
-## Calculate the Root Mean Square (RMS) of residual phase time\-series for each acquisition
-## reference: Yunjun et al. (2019, section 4.9 and 5.4)
-## To get rid of long wavelength component in space, a ramp is removed for each acquisition
-## Set optimal reference date to date with min RMS
-## Set exclude dates (outliers) to dates with RMS > cutoff * median RMS (Median Absolute Deviation)
-mintpy.residualRMS.maskFile = auto  #[file name / no], auto for maskTempCoh.h5, mask for ramp estimation
-mintpy.residualRMS.deramp   = auto  #[quadratic / linear / no], auto for quadratic
-mintpy.residualRMS.cutoff   = auto  #[0.0\-inf], auto for 3
-.SS "example:"
-.TP
-timeseries_rms.py
-timeseriesResidual.h5
-.TP
-timeseries_rms.py
-timeseriesResidual.h5  \fB\-\-template\fR smallbaselineApp.cfg
-.TP
-timeseries_rms.py
-timeseriesResidual.h5  \fB\-m\fR maskTempCoh.h5  \fB\-\-cutoff\fR 3
diff -pruN 1.3.3-2/debian/man/mintpy-tropo_gacos.1 1.4.0-1/debian/man/mintpy-tropo_gacos.1
--- 1.3.3-2/debian/man/mintpy-tropo_gacos.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-tropo_gacos.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,51 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-TROPO_GACOS "1" "May 2022" "mintpy-tropo_gacos v1.3.3" "User Commands"
-.SH NAME
-mintpy-tropo_gacos \- Tropospheric correction using GACOS (http://www.gacos.net) delays
-.SH DESCRIPTION
-usage: tropo_gacos.py [\-h] \fB\-f\fR DIS_FILE \fB\-g\fR GEOM_FILE [\-\-dir GACOS_DIR]
-.IP
-[\-o COR_DIS_FILE]
-.PP
-Tropospheric correction using GACOS (http://www.gacos.net) delays
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-f\fR DIS_FILE, \fB\-\-file\fR DIS_FILE
-timeseries HDF5 file, i.e. timeseries.h5
-.TP
-\fB\-g\fR GEOM_FILE, \fB\-\-geom\fR GEOM_FILE
-geometry file.
-.TP
-\fB\-\-dir\fR GACOS_DIR, \fB\-\-GACOS\-dir\fR GACOS_DIR
-directory to downloaded GACOS delays data (default: ./GACOS).
-.TP
-\fB\-o\fR COR_DIS_FILE
-Output file name for trospheric corrected timeseries.
-.SS "references:"
-.IP
-Yu, C., Li, Z., Penna, N. T., & Crippa, P. (2018). Generic atmospheric correction model for Interferometric
-.IP
-Synthetic Aperture Radar observations. Journal of Geophysical Research: Solid Earth, 123(10), 9202\-9222.
-.IP
-Yu, C., Li, Z., & Penna, N. T. (2018). Interferometric synthetic aperture radar atmospheric correction
-.IP
-using a GPS\-based iterative tropospheric decomposition model. Remote Sensing of Environment, 204, 109\-121.
-.PP
-\fB\-\-dir\fR ./GACOS
-.IP
-20060624.ztd
-20060624.ztd.rsc
-20061225.ztd
-20061225.ztd.rsc
-\&...
-OR
-20060624.ztd.tif
-20061225.ztd.tif
-\&...
-.SS "example:"
-.IP
-tropo_gacos.py \fB\-f\fR timeseries.h5 \fB\-g\fR inputs/geometryRadar.h5 \fB\-\-dir\fR ./GACOS
-tropo_gacos.py \fB\-f\fR geo/geo_timeseries.h5 \fB\-g\fR geo/geo_geometryRadar.h5 \fB\-\-dir\fR ./GACOS
diff -pruN 1.3.3-2/debian/man/mintpy-tropo_phase_elevation.1 1.4.0-1/debian/man/mintpy-tropo_phase_elevation.1
--- 1.3.3-2/debian/man/mintpy-tropo_phase_elevation.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-tropo_phase_elevation.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,51 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-TROPO_PHASE_ELEVATION "1" "May 2022" "mintpy-tropo_phase_elevation v1.3.3" "User Commands"
-.SH NAME
-mintpy-tropo_phase_elevation \- Correct Topo\-correlated Stratified tropospheric delay
-.SH DESCRIPTION
-usage: tropo_phase_elevation.py [\-h] \fB\-g\fR GEOM_FILE \fB\-m\fR MASK_FILE [\-t THRESHOLD]
-.TP
-[\-l NUM_MULTILOOK] [\-\-poly\-order {1,2,3}]
-[\-o OUTFILE]
-timeseries_file
-.PP
-Correct Topo\-correlated Stratified tropospheric delay
-.SS "positional arguments:"
-.TP
-timeseries_file
-time\-series file to be corrected
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-g\fR GEOM_FILE, \fB\-\-geometry\fR GEOM_FILE
-DEM file used for correlation calculation.
-.TP
-\fB\-m\fR MASK_FILE, \fB\-\-mask\fR MASK_FILE
-mask file for pixels used for correlation calculation
-.TP
-\fB\-t\fR THRESHOLD, \fB\-\-threshold\fR THRESHOLD
-correlation threshold to apply phase correction.
-if not set, all dates will be corrected.
-.TP
-\fB\-l\fR NUM_MULTILOOK, \fB\-\-looks\fR NUM_MULTILOOK
-number of looks applied to data for empirical estimation (default: 8).
-.TP
-\fB\-\-poly\-order\fR {1,2,3}, \fB\-p\fR {1,2,3}
-polynomial order of phase\-height correlation (default: 1).
-.TP
-\fB\-o\fR OUTFILE, \fB\-\-outfile\fR OUTFILE
-output corrected timeseries file name
-.SS "reference:"
-.IP
-Doin, M. P., C. Lasserre, G. Peltzer, O. Cavalie, and C. Doubre (2009), Corrections of stratified
-tropospheric delays in SAR interferometry: Validation with global atmospheric models, J App. Geophy.,
-69(1), 35\-50, doi:http://dx.doi.org/10.1016/j.jappgeo.2009.03.010.
-.SS "example:"
-.TP
-tropo_phase_elevation.py
-timeseries_demErr.h5      \fB\-g\fR inputs/geometryRadar.h5  \fB\-m\fR maskTempCoh.h5
-.TP
-tropo_phase_elevation.py
-geo_timeseries_demErr.h5  \fB\-g\fR geo_geometryRadar.h5     \fB\-m\fR geo_maskTempCoh.h5
diff -pruN 1.3.3-2/debian/man/mintpy-tropo_pyaps3.1 1.4.0-1/debian/man/mintpy-tropo_pyaps3.1
--- 1.3.3-2/debian/man/mintpy-tropo_pyaps3.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-tropo_pyaps3.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,125 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-TROPO_PYAPS3 "1" "May 2022" "mintpy-tropo_pyaps3 v1.3.3" "User Commands"
-.SH NAME
-mintpy-tropo_pyaps3 \- Tropospheric correction using weather models
-.SH DESCRIPTION
-usage: tropo_pyaps3.py [\-h] [\-f DIS_FILE] [\-d [DATE_LIST ...]] [\-\-hour HOUR]
-.TP
-[\-o COR_DIS_FILE] [\-m {ERA5}] [\-\-delay {wet,dry,comb}]
-[\-w WEATHER_DIR] [\-g GEOM_FILE]
-[\-\-custom\-height CUSTOM_HEIGHT]
-[\-\-tropo\-file TROPO_FILE] [\-\-verbose]
-.PP
-Tropospheric correction using weather models
-.IP
-PyAPS is used to download and calculate the delay for each acquisition.
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-f\fR DIS_FILE, \fB\-\-file\fR DIS_FILE
-timeseries HDF5 file, i.e. timeseries.h5
-.TP
-\fB\-d\fR [DATE_LIST ...], \fB\-\-date\-list\fR [DATE_LIST ...]
-List of dates in YYYYMMDD or YYMMDD format. It can be:
-a) list of strings in YYYYMMDD or YYMMDD format OR
-b) a text file with the first column as list of date in YYYYMMDD or YYMMDD format OR
-c) a text file with Sentinel\-1 SAFE filenames
-e.g.: SAFE_files.txt:
-.TP
-\fI\,/data/SanAndreasSenDT42/SLC/S1B_IW_SLC__1SDV_20191117T140737_20191117T140804_018968_023C8C_82DC.zip\/\fP
-\fI\,/data/SanAndreasSenDT42/SLC/S1A_IW_SLC__1SDV_20191111T140819_20191111T140846_029864_036803_69CA.zip\/\fP
-\&...
-.TP
-\fB\-\-hour\fR HOUR
-time of data in HH, e.g. 12, 06
-.TP
-\fB\-o\fR COR_DIS_FILE
-Output file name for trospheric corrected timeseries.
-.SS "delay calculation:"
-.TP
-\fB\-m\fR {ERA5}, \fB\-\-model\fR {ERA5}, \fB\-s\fR {ERA5}
-source of the atmospheric model (default: ERA5).
-.TP
-\fB\-\-delay\fR {wet,dry,comb}
-Delay type to calculate, comb contains both wet and dry delays (default: comb).
-.TP
-\fB\-w\fR WEATHER_DIR, \fB\-\-dir\fR WEATHER_DIR, \fB\-\-weather\-dir\fR WEATHER_DIR
-parent directory of downloaded weather data file (default: ${WEATHER_DIR}).
-e.g.: \fB\-\-weather\-dir\fR \fI\,~/data/aux\/\fP
-atmosphere/
-.TP
-\fI\,/ERA5\/\fP
-ERA5_N20_N40_E120_E140_20060624_14.grb
-ERA5_N20_N40_E120_E140_20060924_14.grb
-\&...
-.TP
-\fI\,/MERRA\/\fP
-merra\-20110126\-06.nc4
-merra\-20110313\-06.nc4
-\&...
-.TP
-\fB\-g\fR GEOM_FILE, \fB\-\-geomtry\fR GEOM_FILE
-geometry file including height, incidenceAngle and/or latitude and longitude
-.TP
-\fB\-\-custom\-height\fR CUSTOM_HEIGHT
-[for testing] specify a custom height value for delay calculation.
-.TP
-\fB\-\-tropo\-file\fR TROPO_FILE
-tropospheric delay file name
-.TP
-\fB\-\-verbose\fR
-Verbose message.
-.SS "reference:"
-.IP
-Jolivet, R., R. Grandin, C. Lasserre, M.\-P. Doin and G. Peltzer (2011), Systematic InSAR tropospheric
-phase delay corrections from global meteorological reanalysis data, Geophys. Res. Lett., 38, L17311,
-doi:10.1029/2011GL048757
-.IP
-Jolivet, R., P. S. Agram, N. Y. Lin, M. Simons, M. P. Doin, G. Peltzer, and Z. Li (2014), Improving
-InSAR geodesy using global atmospheric models, Journal of Geophysical Research: Solid Earth, 119(3),
-2324\-2341, doi:10.1002/2013JB010588.
-.IP
-# ERA5
-Hersbach, H., Bell, B., Berrisford, P., Hirahara, S., Hor??nyi, A., Mu??oz\-Sabater, J., et al. (2020).
-The ERA5 global reanalysis. Quarterly Journal of the Royal Meteorological Society, 146(730), 1999???2049.
-https://doi.org/10.1002/qj.3803
-.SS "Global Atmospheric Models:"
-.TP
-re\-analysis_dataset
-coverage  temp_resolution  spatial_resolution       latency       assimilation
-.TP
-ERA5(T)
-(ECMWF)          global       hourly        0.25 deg (~31 km)   3 months (5 days)    4D\-Var
-.TP
-ERA\-Int
-(ECMWF)          global       6\-hourly      0.75 deg (~79 km)        2 months        4D\-Var
-.TP
-MERRA(2) (NASA Goddard)
-global       6\-hourly     0.5*0.625 (~50 km)       2\-3 weeks        3D\-Var
-.TP
-NARR
-(NOAA, working from Jan 1979 to Oct 2014)
-.SS "Notes for data access:"
-.IP
-For MERRA2, you need an Earthdata account, and pre\-authorize the "NASA GESDISC DATA ARCHIVE" application
-.IP
-following https://disc.gsfc.nasa.gov/earthdata\-login.
-.IP
-For ERA5 from CDS, you need to agree to the Terms of Use of every datasets that you intend to download.
-.SS "example:"
-.IP
-# download datasets, calculate tropospheric delays and correct time\-series file.
-tropo_pyaps3.py \fB\-f\fR timeseries.h5 \fB\-g\fR inputs/geometryRadar.h5
-tropo_pyaps3.py \fB\-f\fR filt_fine.unw \fB\-g\fR ../../../mintpy/inputs/geometryRadar.h5
-.IP
-# download datasets, calculate tropospheric delays
-tropo_pyaps3.py \fB\-d\fR date.list         \fB\-\-hour\fR 12 \fB\-m\fR ERA5  \fB\-g\fR inputs/geometryGeo.h5
-tropo_pyaps3.py \fB\-d\fR 20151002 20151003 \fB\-\-hour\fR 12 \fB\-m\fR MERRA \fB\-g\fR inputs/geometryRadar.h5
-.IP
-# download datasets (covering the whole world)
-tropo_pyaps3.py \fB\-d\fR date.list \fB\-\-hour\fR 12
-tropo_pyaps3.py \fB\-d\fR SAFE_files.txt
-# download datasets (covering the area of interest)
-tropo_pyaps3.py \fB\-d\fR SAFE_files.txt \fB\-g\fR inputs/geometryRadar.h5
diff -pruN 1.3.3-2/debian/man/mintpy-tsview.1 1.4.0-1/debian/man/mintpy-tsview.1
--- 1.3.3-2/debian/man/mintpy-tsview.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-tsview.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,508 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-TSVIEW "1" "May 2022" "mintpy-tsview v1.3.3" "User Commands"
-.SH NAME
-mintpy-tsview \- Interactive time\-series viewer
-.SH DESCRIPTION
-tsview.py \fB\-\-help\fR
-usage: tsview.py [\-h] [\-\-label [FILE_LABEL ...]] [\-\-ylim YMIN YMAX]
-.IP
-[\-\-tick\-right] [\-l LOOKUP_FILE] [\-n NUM] [\-\-error ERROR_FILE]
-[\-\-start\-date START_DATE] [\-\-end\-date END_DATE]
-[\-\-exclude [EX_DATE_LIST ...]] [\-\-zf] [\-\-off OFFSET]
-[\-\-noverbose] [\-\-nomodel] [\-\-plot\-model\-conf\-int]
-[\-\-poly POLYNOMIAL] [\-\-periodic PERIODIC [PERIODIC ...]]
-[\-\-step STEP [STEP ...]] [\-\-exp EXP [EXP ...]]
-[\-\-log LOG [LOG ...]] [\-\-yx Y X] [\-\-lalo LAT LON]
-[\-\-marker MARKER] [\-\-ms MARKER_SIZE] [\-\-lw LINEWIDTH]
-[\-\-ew EDGE_WIDTH] [\-v VMIN VMAX] [\-u UNIT]
-[\-\-nd NO_DATA_VALUE] [\-\-wrap] [\-\-wrap\-range MIN MAX]
-[\-\-flip\-lr] [\-\-flip\-ud] [\-\-noflip] [\-\-nmli NUM]
-[\-\-nomultilook] [\-\-alpha TRANSPARENCY] [\-d DEM_FILE]
-[\-\-mask\-dem] [\-\-dem\-noshade] [\-\-dem\-nocontour]
-[\-\-contour\-smooth DEM_CONTOUR_SMOOTH] [\-\-contour\-step NUM]
-[\-\-contour\-linewidth NUM] [\-\-shade\-az DEG] [\-\-shade\-alt DEG]
-[\-\-shade\-min MIN] [\-\-shade\-max MAX] [\-\-shade\-exag SHADE_EXAG]
-[\-\-fontsize FONT_SIZE] [\-\-fontcolor FONT_COLOR]
-[\-\-nowhitespace] [\-\-noaxis] [\-\-notick] [\-c COLORMAP]
-[\-\-cm\-lut NUM] [\-\-cm\-vlist CMAP_VLIST CMAP_VLIST CMAP_VLIST]
-[\-\-nocbar] [\-\-cbar\-nbins NUM]
-[\-\-cbar\-ext {None,both,neither,max,min}]
-[\-\-cbar\-label CBAR_LABEL] [\-\-cbar\-loc CBAR_LOC]
-[\-\-cbar\-size CBAR_SIZE] [\-\-notitle] [\-\-title\-in]
-[\-\-figtitle FIG_TITLE] [\-\-title4sen] [\-\-figsize WID LEN]
-[\-\-dpi DPI]
-[\-\-figext {.emf,.eps,.pdf,.png,.ps,.raw,.rgba,.svg,.svgz}]
-[\-\-fignum NUM] [\-\-nrows NUM] [\-\-ncols NUM]
-[\-\-wspace FIG_WID_SPACE] [\-\-hspace FIG_HEI_SPACE]
-[\-\-no\-tight\-layout] [\-\-coord {radar,geo}] [\-\-animation]
-[\-\-show\-gps] [\-\-mask\-gps] [\-\-gps\-label]
-[\-\-gps\-ms GPS_MARKER_SIZE]
-[\-\-gps\-comp {up2los,vert,enu2los,hz2los,horz}] [\-\-gps\-redo]
-[\-\-ref\-gps REF_GPS_SITE] [\-\-ex\-gps [EX_GPS_SITES ...]]
-[\-\-gps\-start\-date YYYYMMDD] [\-\-gps\-end\-date YYYYMMDD]
-[\-\-horz\-az HORZ_AZ_ANGLE] [\-m FILE] [\-\-mask\-vmin MASK_VMIN]
-[\-\-mask\-vmax MASK_VMAX] [\-\-zm] [\-\-coastline {10m,110m,50m}]
-[\-\-lalo\-label] [\-\-lalo\-step DEG] [\-\-lalo\-max\-num NUM]
-[\-\-lalo\-loc left right top bottom] [\-\-scalebar LEN X Y]
-[\-\-noscalebar] [\-\-scalebar\-pad SCALEBAR_PAD]
-[\-\-ram MAXMEMORY] [\-\-ref\-date DATE] [\-\-ref\-lalo LAT LON]
-[\-\-ref\-yx Y X] [\-\-noreference] [\-\-ref\-marker REF_MARKER]
-[\-\-ref\-size NUM] [\-o [OUTFILE ...]] [\-\-save] [\-\-nodisplay]
-[\-\-update] [\-\-sub\-x XMIN XMAX] [\-\-sub\-y YMIN YMAX]
-[\-\-sub\-lat LATMIN LATMAX] [\-\-sub\-lon LONMIN LONMAX]
-file [file ...]
-.PP
-Interactive time\-series viewer
-.SS "positional arguments:"
-.TP
-file
-time\-series file to display
-i.e.: timeseries_ERA5_ramp_demErr.h5 (MintPy)
-.TP
-LS\-PARAMS.h5 (GIAnT)
-S1_IW12_128_0593_0597_20141213_20180619.he5 (HDF\-EOS5)
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-\-label\fR [FILE_LABEL ...]
-labels to display for multiple input files
-.TP
-\fB\-\-ylim\fR YMIN YMAX
-Y limits for point plotting.
-.TP
-\fB\-\-tick\-right\fR
-set tick and tick label to the right
-.TP
-\fB\-l\fR LOOKUP_FILE, \fB\-\-lookup\fR LOOKUP_FILE
-lookup table file
-.TP
-\fB\-n\fR NUM
-Epoch/slice number for initial display.
-.TP
-\fB\-\-error\fR ERROR_FILE
-txt file with error for each date.
-.TP
-\fB\-\-start\-date\fR START_DATE
-start date of displacement to display
-.TP
-\fB\-\-end\-date\fR END_DATE
-end date of displacement to display
-.TP
-\fB\-\-exclude\fR [EX_DATE_LIST ...], \fB\-\-ex\fR [EX_DATE_LIST ...]
-Exclude date shown as gray.
-.TP
-\fB\-\-zf\fR, \fB\-\-zero\-first\fR
-Set displacement at first acquisition to zero.
-.TP
-\fB\-\-off\fR OFFSET, \fB\-\-offset\fR OFFSET
-Offset for each timeseries file.
-.TP
-\fB\-\-noverbose\fR
-Disable the verbose message printing.
-.TP
-\fB\-\-nomodel\fR, \fB\-\-nofit\fR
-Do not plot the prediction of the time function (deformation model) fitting.
-.TP
-\fB\-\-plot\-model\-conf\-int\fR, \fB\-\-plot\-fit\-conf\-int\fR
-Plot the time function prediction confidence intervals.
-[!\-\- Preliminary feature alert! \fB\-\-\fR!]
-[!\-\- This feature is NOT throughly checked. Read the code before use. Interpret at your own risk! \fB\-\-\fR!]
-.TP
-\fB\-\-ram\fR MAXMEMORY, \fB\-\-memory\fR MAXMEMORY
-Max amount of memory in GB to use (default: 4.0).
-Adjust according to your computer memory.
-.SS "Deformation Model:"
-.IP
-A suite of time functions
-.TP
-\fB\-\-poly\fR POLYNOMIAL, \fB\-\-polynomial\fR POLYNOMIAL, \fB\-\-poly\-order\fR POLYNOMIAL
-a polynomial function with the input degree (default: 1). E.g.:
-\fB\-\-poly\fR 1                                  # linear
-\fB\-\-poly\fR 2                                  # quadratic
-\fB\-\-poly\fR 3                                  # cubic
-.TP
-\fB\-\-periodic\fR PERIODIC [PERIODIC ...], \fB\-\-period\fR PERIODIC [PERIODIC ...], \fB\-\-peri\fR PERIODIC [PERIODIC ...]
-periodic function(s) with period in decimal years (default: []). E.g.:
-\fB\-\-periodic\fR 1.0                            # an annual cycle
-\fB\-\-periodic\fR 1.0 0.5                        # an annual cycle plus a semi\-annual cycle
-.TP
-\fB\-\-step\fR STEP [STEP ...]
-step function(s) at YYYYMMDD (default: []). E.g.:
-\fB\-\-step\fR 20061014                           # coseismic step  at 2006\-10\-14T00:00
-\fB\-\-step\fR 20110311 20120928T1733             # coseismic steps at 2011\-03\-11T00:00 and 2012\-09\-28T17:33
-.TP
-\fB\-\-exp\fR EXP [EXP ...], \fB\-\-exponential\fR EXP [EXP ...]
-exponential function(s) at YYYYMMDD with characteristic time(s) tau in decimal days (default: []). E.g.:
-\fB\-\-exp\fR  20181026 60                        # exp onset at 2006\-10\-14T00:00 with tau=60 days
-\fB\-\-exp\fR  20181026T1355 60 120               # exp onset at 2006\-10\-14T13:55 with tau=60 days overlayed by a tau=145 days
-\fB\-\-exp\fR  20161231 80.5 \fB\-\-exp\fR 20190125 100   # 1st exp onset at 2011\-03\-11 with tau=80.5 days and
-.TP
-# 2nd exp onset at 2012\-09\-28 with tau=100
-days
-.TP
-\fB\-\-log\fR LOG [LOG ...], \fB\-\-logarithmic\fR LOG [LOG ...]
-logarithmic function(s) at YYYYMMDD with characteristic time(s) tau in decimal days (default: []). E.g.:
-\fB\-\-log\fR  20181016 90.4                      # log onset at 2006\-10\-14T00:00 with tau=90.4 days
-\fB\-\-log\fR  20181016T1733 90.4 240             # log onset at 2006\-10\-14T17:33 with tau=90.4 days overlayed by a tau=240 days
-\fB\-\-log\fR  20161231 60 \fB\-\-log\fR 20190125 180.2   # 1st log onset at 2011\-03\-11 with tau=60 days and
-.IP
-# 2nd log onset at 2012\-09\-28 with tau=180.2 days
-.SS "Pixel Input:"
-.TP
-\fB\-\-yx\fR Y X
-initial pixel to plot in Y/X coord
-.TP
-\fB\-\-lalo\fR LAT LON
-initial pixel to plot in lat/lon coord
-.TP
-\fB\-\-marker\fR MARKER
-marker style (default: o).
-.TP
-\fB\-\-ms\fR MARKER_SIZE, \fB\-\-markersize\fR MARKER_SIZE
-marker size (default: 6.0).
-.TP
-\fB\-\-lw\fR LINEWIDTH, \fB\-\-linewidth\fR LINEWIDTH
-line width (default: 0).
-.TP
-\fB\-\-ew\fR EDGE_WIDTH, \fB\-\-edgewidth\fR EDGE_WIDTH
-Edge width for the error bar (default: 1.0)
-.SS "Data Display Options:"
-.IP
-Options to adjust the dataset display
-.TP
-\fB\-v\fR VMIN VMAX, \fB\-\-vlim\fR VMIN VMAX
-Display limits for matrix plotting.
-.TP
-\fB\-u\fR UNIT, \fB\-\-unit\fR UNIT
-unit for display.  Its priority > wrap
-.TP
-\fB\-\-nd\fR NO_DATA_VALUE, \fB\-\-no\-data\-val\fR NO_DATA_VALUE, \fB\-\-no\-data\-value\fR NO_DATA_VALUE
-Specify the no\-data\-value to be ignored and masked.
-.TP
-\fB\-\-wrap\fR
-re\-wrap data to display data in fringes.
-.TP
-\fB\-\-wrap\-range\fR MIN MAX
-range of one cycle after wrapping (default: [\-3.141592653589793, 3.141592653589793]).
-.TP
-\fB\-\-flip\-lr\fR
-flip left\-right
-.TP
-\fB\-\-flip\-ud\fR
-flip up\-down
-.TP
-\fB\-\-noflip\fR
-turn off auto flip for radar coordinate file
-.TP
-\fB\-\-nmli\fR NUM, \fB\-\-num\-multilook\fR NUM, \fB\-\-multilook\-num\fR NUM
-multilook data in X and Y direction with a factor for display (default: 1).
-.TP
-\fB\-\-nomultilook\fR, \fB\-\-no\-multilook\fR
-do not multilook, for high quality display.
-If multilook is True and multilook_num=1, multilook_num will be estimated automatically.
-Useful when displaying big datasets.
-.TP
-\fB\-\-alpha\fR TRANSPARENCY
-Data transparency.
-0.0 \- fully transparent, 1.0 \- no transparency.
-.SS "DEM:"
-.IP
-display topography in the background
-.TP
-\fB\-d\fR DEM_FILE, \fB\-\-dem\fR DEM_FILE
-DEM file to show topography as background
-.TP
-\fB\-\-mask\-dem\fR
-Mask out DEM pixels not coincident with valid data pixels
-.TP
-\fB\-\-dem\-noshade\fR
-do not show DEM shaded relief
-.TP
-\fB\-\-dem\-nocontour\fR
-do not show DEM contour lines
-.TP
-\fB\-\-contour\-smooth\fR DEM_CONTOUR_SMOOTH
-Background topography contour smooth factor \- sigma of Gaussian filter.
-Set to 0.0 for no smoothing; (default: 3.0).
-.TP
-\fB\-\-contour\-step\fR NUM
-Background topography contour step in meters (default: 200.0).
-.TP
-\fB\-\-contour\-linewidth\fR NUM
-Background topography contour linewidth (default: 0.5).
-.TP
-\fB\-\-shade\-az\fR DEG
-The azimuth (0\-360, degrees clockwise from North) of the light source (default: 315.0).
-.TP
-\fB\-\-shade\-alt\fR DEG
-The altitude (0\-90, degrees up from horizontal) of the light source (default: 45.0).
-.TP
-\fB\-\-shade\-min\fR MIN
-The min height in m of colormap of shaded relief topography (default: \fB\-4000\fR.0).
-.TP
-\fB\-\-shade\-max\fR MAX
-The max height of colormap of shaded relief topography (default: max(DEM)+2000).
-.TP
-\fB\-\-shade\-exag\fR SHADE_EXAG
-Vertical exaggeration ratio (default: 0.5).
-.SS "Figure:"
-.IP
-Figure settings for display
-.TP
-\fB\-\-fontsize\fR FONT_SIZE
-font size
-.TP
-\fB\-\-fontcolor\fR FONT_COLOR
-font color (default: k).
-.TP
-\fB\-\-nowhitespace\fR
-do not display white space
-.TP
-\fB\-\-noaxis\fR
-do not display axis
-.TP
-\fB\-\-notick\fR
-do not display tick in x/y axis
-.TP
-\fB\-c\fR COLORMAP, \fB\-\-colormap\fR COLORMAP
-colormap used for display, i.e. jet, cmy, RdBu, hsv, jet_r, temperature, viridis, etc.
-More at https://mintpy.readthedocs.io/en/latest/api/colormaps/
-.TP
-\fB\-\-cm\-lut\fR NUM, \fB\-\-cmap\-lut\fR NUM
-number of increment of colormap lookup table (default: 256).
-.TP
-\fB\-\-cm\-vlist\fR CMAP_VLIST CMAP_VLIST CMAP_VLIST, \fB\-\-cmap\-vlist\fR CMAP_VLIST CMAP_VLIST CMAP_VLIST
-list of 3 float numbers, for truncated colormap only (default: [0.0, 0.7, 1.0]).
-.TP
-\fB\-\-nocbar\fR, \fB\-\-nocolorbar\fR
-do not display colorbar
-.TP
-\fB\-\-cbar\-nbins\fR NUM
-number of bins for colorbar.
-.TP
-\fB\-\-cbar\-ext\fR {None,both,neither,max,min}
-Extend setting of colorbar; based on data stat by default.
-.TP
-\fB\-\-cbar\-label\fR CBAR_LABEL
-colorbar label
-.TP
-\fB\-\-cbar\-loc\fR CBAR_LOC
-colorbar location for single plot (default: right).
-.TP
-\fB\-\-cbar\-size\fR CBAR_SIZE
-colorbar size and pad (default: 2%).
-.TP
-\fB\-\-notitle\fR
-do not display title
-.TP
-\fB\-\-title\-in\fR
-draw title in/out of axes
-.TP
-\fB\-\-figtitle\fR FIG_TITLE
-Title shown in the figure.
-.TP
-\fB\-\-title4sen\fR, \fB\-\-title4sentinel1\fR
-display Sentinel\-1 A/B and IPF info in title.
-.TP
-\fB\-\-figsize\fR WID LEN
-figure size in inches \- width and length
-.TP
-\fB\-\-dpi\fR DPI
-DPI \- dot per inch \- for display/write (default: 300).
-.TP
-\fB\-\-figext\fR {.emf,.eps,.pdf,.png,.ps,.raw,.rgba,.svg,.svgz}
-File extension for figure output file (default: .png).
-.TP
-\fB\-\-fignum\fR NUM
-number of figure windows
-.TP
-\fB\-\-nrows\fR NUM
-subplot number in row
-.TP
-\fB\-\-ncols\fR NUM
-subplot number in column
-.TP
-\fB\-\-wspace\fR FIG_WID_SPACE
-width space between subplots in inches
-.TP
-\fB\-\-hspace\fR FIG_HEI_SPACE
-height space between subplots in inches
-.TP
-\fB\-\-no\-tight\-layout\fR
-disable automatic tight layout for multiple subplots
-.TP
-\fB\-\-coord\fR {radar,geo}
-Display in radar/geo coordination system (for geocoded file only; default: geo).
-.TP
-\fB\-\-animation\fR
-enable animation mode
-.SS "GPS:"
-.IP
-GPS data to display
-.TP
-\fB\-\-show\-gps\fR
-Show UNR GPS location within the coverage.
-.TP
-\fB\-\-mask\-gps\fR
-Mask out GPS stations not coincident with valid data pixels
-.TP
-\fB\-\-gps\-label\fR
-Show GPS site name
-.TP
-\fB\-\-gps\-ms\fR GPS_MARKER_SIZE
-Plot GPS value as scatter in size of ms**2 (default: 6).
-.TP
-\fB\-\-gps\-comp\fR {up2los,vert,enu2los,hz2los,horz}
-Plot GPS in color indicating deformation velocity direction
-.TP
-\fB\-\-gps\-redo\fR
-Re\-calculate GPS observations in LOS direction, instead of read from existing CSV file.
-.TP
-\fB\-\-ref\-gps\fR REF_GPS_SITE
-Reference GPS site
-.TP
-\fB\-\-ex\-gps\fR [EX_GPS_SITES ...]
-Exclude GPS sites, require \fB\-\-gps\-comp\fR.
-.TP
-\fB\-\-gps\-start\-date\fR YYYYMMDD
-start date of GPS data, default is date of the 1st SAR acquisition
-.TP
-\fB\-\-gps\-end\-date\fR YYYYMMDD
-start date of GPS data, default is date of the last SAR acquisition
-.TP
-\fB\-\-horz\-az\fR HORZ_AZ_ANGLE, \fB\-\-hz\-az\fR HORZ_AZ_ANGLE
-Azimuth angle (anti\-clockwise from the north) of the horizontal movement in degrees
-E.g.: \fB\-90\fR. for east  direction [default]
-.TP
-0.
-for north direction
-.IP
-Set to the azimuth angle of the strike\-slip fault to show the fault\-parallel displacement.
-.SS "Mask:"
-.IP
-Mask file/options
-.TP
-\fB\-m\fR FILE, \fB\-\-mask\fR FILE
-mask file for display. "no" to turn OFF masking.
-.TP
-\fB\-\-mask\-vmin\fR MASK_VMIN
-hide pixels with mask value < vmin (default: None).
-.TP
-\fB\-\-mask\-vmax\fR MASK_VMAX
-hide pixels with mask value > vmax (default: None).
-.TP
-\fB\-\-zm\fR, \fB\-\-zero\-mask\fR
-mask pixels with zero value.
-.SS "Map:"
-.IP
-for one subplot in geo\-coordinates only
-.TP
-\fB\-\-coastline\fR {10m,110m,50m}
-Draw coastline with specified resolution (default: None).
-This will enable \fB\-\-lalo\-label\fR option.
-Link: https://scitools.org.uk/cartopy/docs/latest/matplotlib/geoaxes.html#cartopy.mpl.geoaxes.GeoAxes.coastlines
-.TP
-\fB\-\-lalo\-label\fR
-Show N, S, E, W tick label for plot in geo\-coordinate.
-Useful for final figure output.
-.TP
-\fB\-\-lalo\-step\fR DEG
-Lat/lon step for lalo\-label option.
-.TP
-\fB\-\-lalo\-max\-num\fR NUM
-Maximum number of lalo tick label (default: 3).
-.TP
-\fB\-\-lalo\-loc\fR left right top bottom
-Draw lalo label in [left, right, top, bottom] (default: [1, 0, 0, 1]).
-.TP
-\fB\-\-scalebar\fR LEN X Y
-scale bar distance and location in ratio (default: [0.2, 0.2, 0.1]).
-distance in ratio of total width
-location in X/Y in ratio with respect to the lower left corner
-.TP
-\fB\-\-scalebar\fR 0.2 0.2 0.1
-#for lower left  corner
-.TP
-\fB\-\-scalebar\fR 0.2 0.2 0.8
-#for upper left  corner
-.TP
-\fB\-\-scalebar\fR 0.2 0.8 0.1
-#for lower right corner
-.TP
-\fB\-\-scalebar\fR 0.2 0.8 0.8
-#for upper right corner
-.TP
-\fB\-\-noscalebar\fR, \fB\-\-nosbar\fR
-do not display scale bar.
-.TP
-\fB\-\-scalebar\-pad\fR SCALEBAR_PAD, \fB\-\-sbar\-pad\fR SCALEBAR_PAD
-scale bar label pad in ratio of scalebar width (default: 0.05).
-.SS "Reference:"
-.IP
-Show / Modify reference in time and space for display
-.TP
-\fB\-\-ref\-date\fR DATE
-Change reference date for display
-.TP
-\fB\-\-ref\-lalo\fR LAT LON
-Change reference point LAT LON for display
-.TP
-\fB\-\-ref\-yx\fR Y X
-Change reference point Y X for display
-.TP
-\fB\-\-noreference\fR
-do not show reference point
-.TP
-\fB\-\-ref\-marker\fR REF_MARKER
-marker of reference pixel (default: ks).
-.TP
-\fB\-\-ref\-size\fR NUM
-marker size of reference point (default: 6).
-.SS "Save/Output:"
-.IP
-Save figure and write to file(s)
-.TP
-\fB\-o\fR [OUTFILE ...], \fB\-\-outfile\fR [OUTFILE ...]
-save the figure with assigned filename.
-By default, it's calculated based on the input file name.
-.TP
-\fB\-\-save\fR
-save the figure
-.TP
-\fB\-\-nodisplay\fR
-save and do not display the figure
-.TP
-\fB\-\-update\fR
-enable update mode for save figure: skip running if
-1) output file already exists AND
-2) output file is newer than input file.
-.SS "Subset:"
-.IP
-Display dataset in subset range
-.TP
-\fB\-\-sub\-x\fR XMIN XMAX, \fB\-\-subx\fR XMIN XMAX, \fB\-\-subset\-x\fR XMIN XMAX
-subset display in x/cross\-track/range direction
-.TP
-\fB\-\-sub\-y\fR YMIN YMAX, \fB\-\-suby\fR YMIN YMAX, \fB\-\-subset\-y\fR YMIN YMAX
-subset display in y/along\-track/azimuth direction
-.TP
-\fB\-\-sub\-lat\fR LATMIN LATMAX, \fB\-\-sublat\fR LATMIN LATMAX, \fB\-\-subset\-lat\fR LATMIN LATMAX
-subset display in latitude
-.TP
-\fB\-\-sub\-lon\fR LONMIN LONMAX, \fB\-\-sublon\fR LONMIN LONMAX, \fB\-\-subset\-lon\fR LONMIN LONMAX
-subset display in longitude
-.SS "example:"
-.IP
-tsview.py timeseries.h5
-tsview.py timeseries.h5  \fB\-\-wrap\fR
-tsview.py timeseries.h5  \fB\-\-yx\fR 300 400 \fB\-\-zero\-first\fR  \fB\-\-nodisplay\fR
-tsview.py geo_timeseries.h5  \fB\-\-lalo\fR 33.250 131.665  \fB\-\-nodisplay\fR
-tsview.py slcStack.h5 \fB\-u\fR dB \fB\-v\fR 20 60 \fB\-c\fR gray
-.IP
-# press left / right key to slide images
-.IP
-# multiple time\-series files
-tsview.py timeseries_ERA5_ramp_demErr.h5 timeseries_ERA5_ramp.h5 timeseries_ERA5.h5 timeseries.h5 \fB\-\-off\fR 5
-tsview.py timeseries_ERA5_ramp_demErr.h5 ../GIANT/Stack/LS\-PARAMS.h5 \fB\-\-off\fR 5 \fB\-\-label\fR mintpy giant
diff -pruN 1.3.3-2/debian/man/mintpy-unwrap_error_bridging.1 1.4.0-1/debian/man/mintpy-unwrap_error_bridging.1
--- 1.3.3-2/debian/man/mintpy-unwrap_error_bridging.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-unwrap_error_bridging.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,93 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-UNWRAP_ERROR_BRIDGING "1" "May 2022" "mintpy-unwrap_error_bridging v1.3.3" "User Commands"
-.SH NAME
-mintpy-unwrap_error_bridging \- Unwrapping Error Correction with Bridging
-.SH DESCRIPTION
-usage: unwrap_error_bridging.py [\-h] [\-r BRIDGEPTSRADIUS]
-.TP
-[\-\-ramp {linear,quadratic}]
-[\-\-water\-mask WATERMASKFILE]
-[\-m CONNCOMPMINAREA] [\-t TEMPLATE_FILE]
-[\-i DATASETNAMEIN] [\-o DATASETNAMEOUT]
-[\-\-update]
-ifgram_file
-.PP
-Unwrapping Error Correction with Bridging
-.IP
-by connecting reliable regions with MST bridges. This method assumes the phase differences
-between neighboring regions are less than pi rad in magnitude.
-.SS "positional arguments:"
-.TP
-ifgram_file
-interferograms file to be corrected
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-r\fR BRIDGEPTSRADIUS, \fB\-\-radius\fR BRIDGEPTSRADIUS
-radius of the end point of bridge to search area to get median representative value
-default: 50.
-.TP
-\fB\-\-ramp\fR {linear,quadratic}
-type of phase ramp to be removed before correction.
-.TP
-\fB\-\-water\-mask\fR WATERMASKFILE, \fB\-\-wm\fR WATERMASKFILE
-path of water mask file.
-.TP
-\fB\-m\fR CONNCOMPMINAREA, \fB\-\-min\-area\fR CONNCOMPMINAREA
-minimum region/area size of a single connComponent.
-.TP
-\fB\-t\fR TEMPLATE_FILE, \fB\-\-template\fR TEMPLATE_FILE
-template file with bonding point info, e.g.
-mintpy.unwrapError.yx = 283,1177,305,1247;350,2100,390,2200
-.TP
-\fB\-i\fR DATASETNAMEIN, \fB\-\-in\-dataset\fR DATASETNAMEIN
-name of dataset to be corrected, default: unwrapPhase
-.TP
-\fB\-o\fR DATASETNAMEOUT, \fB\-\-out\-dataset\fR DATASETNAMEOUT
-name of dataset to be written after correction, default: {}_bridging
-.TP
-\fB\-\-update\fR
-Enable update mode: if unwrapPhase_unwCor dataset exists, skip the correction.
-.SS "reference:"
-.IP
-Yunjun, Z., H. Fattahi, and F. Amelung (2019), Small baseline InSAR time series analysis:
-Unwrapping error correction and noise reduction, Computers & Geosciences, 133, 104331,
-doi:10.1016/j.cageo.2019.104331.
-.SS "template options:"
-.IP
-## connected components (mintpy.load.connCompFile) are required for this step.
-## SNAPHU (Chem & Zebker,2001) is currently the only unwrapper that provides connected components as far as we know.
-## reference: Yunjun et al. (2019, section 3)
-## supported methods:
-## a. phase_closure          \- suitable for highly redundant network
-## b. bridging               \- suitable for regions separated by narrow decorrelated features, e.g. rivers, narrow water bodies
-## c. bridging+phase_closure \- recommended when there is a small percentage of errors left after bridging
-mintpy.unwrapError.method          = auto  #[bridging / phase_closure / bridging+phase_closure / no], auto for no
-mintpy.unwrapError.waterMaskFile   = auto  #[waterMask.h5 / no], auto for waterMask.h5 or no [if not found]
-mintpy.unwrapError.connCompMinArea = auto  #[1\-inf], auto for 2.5e3, discard regions smaller than the min size in pixels
-.IP
-## phase_closure options:
-## numSample \- a region\-based strategy is implemented to speedup L1\-norm regularized least squares inversion.
-##     Instead of inverting every pixel for the integer ambiguity, a common connected component mask is generated,
-##     for each common conn. comp., numSample pixels are radomly selected for inversion, and the median value of the results
-##     are used for all pixels within this common conn. comp.
-mintpy.unwrapError.numSample       = auto  #[int>1], auto for 100, number of samples to invert for common conn. comp.
-.IP
-## briding options:
-## ramp \- a phase ramp could be estimated based on the largest reliable region, removed from the entire interferogram
-##     before estimating the phase difference between reliable regions and added back after the correction.
-## bridgePtsRadius \- half size of the window used to calculate the median value of phase difference
-mintpy.unwrapError.ramp            = auto  #[linear / quadratic], auto for no; recommend linear for L\-band data
-mintpy.unwrapError.bridgePtsRadius = auto  #[1\-inf], auto for 50, half size of the window around end points
-.SS "Example:"
-.TP
-unwrap_error_bridging.py
-\&./inputs/ifgramStack.h5  \fB\-t\fR GalapagosSenDT128.template \fB\-\-update\fR
-.TP
-unwrap_error_bridging.py
-\&./inputs/ifgramStack.h5  \fB\-\-water\-mask\fR waterMask.h5
-.TP
-unwrap_error_bridging.py
-20180502_20180619.unw    \fB\-\-water\-mask\fR waterMask.h5
diff -pruN 1.3.3-2/debian/man/mintpy-unwrap_error_phase_closure.1 1.4.0-1/debian/man/mintpy-unwrap_error_phase_closure.1
--- 1.3.3-2/debian/man/mintpy-unwrap_error_phase_closure.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-unwrap_error_phase_closure.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,114 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-UNWRAP_ERROR_PHASE_CLOSURE "1" "May 2022" "mintpy-unwrap_error_phase_closure v1.3.3" "User Commands"
-.SH NAME
-mintpy-unwrap_error_phase_closure \- Unwrapping Error Correction based on Phase Closure
-.SH DESCRIPTION
-usage: unwrap_error_phase_closure.py [\-h] [\-c CC_MASK_FILE] [\-n NUMSAMPLE]
-.TP
-[\-m CONNCOMPMINAREA]
-[\-a {calculate,correct}]
-[\-i DATASETNAMEIN] [\-o DATASETNAMEOUT]
-[\-\-water\-mask WATERMASKFILE]
-[\-t TEMPLATE_FILE] [\-\-update]
-ifgram_file
-.PP
-Unwrapping Error Correction based on Phase Closure
-.IP
-by exploiting the conservertiveness of the integer ambiguity of interferograms triplets.
-This method assumes:
-a. abundance of network: for interferogram with unwrapping error, there is
-.IP
-at least of one triangular connection to form a closed circle; with more
-closed circles comes better constrain.
-.IP
-b. majority rightness: most of interferograms have to be right (no unwrapping
-.IP
-error) to correct the wrong minority. And if most of interferograms have
-unwrapping errors, then the minor right interferograms will turn into wrong.
-.SS "positional arguments:"
-.TP
-ifgram_file
-interferograms file to be corrected
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-c\fR CC_MASK_FILE, \fB\-\-cc\-mask\fR CC_MASK_FILE
-common connected components file, required for \fB\-\-action\fR correct
-.TP
-\fB\-n\fR NUMSAMPLE, \fB\-\-num\-sample\fR NUMSAMPLE
-Number of randomly samples/pixels for each common connected component.
-.TP
-\fB\-m\fR CONNCOMPMINAREA, \fB\-\-min\-area\fR CONNCOMPMINAREA
-minimum region/area size of a single connComponent.
-.TP
-\fB\-a\fR {calculate,correct}, \fB\-\-action\fR {calculate,correct}
-action to take (default: correct):
-correct   \- correct phase unwrapping error
-calculate \- calculate the number of non\-zero closure phase
-.TP
-\fB\-i\fR DATASETNAMEIN, \fB\-\-in\-dataset\fR DATASETNAMEIN
-name of dataset to be corrected, default: unwrapPhase
-.TP
-\fB\-o\fR DATASETNAMEOUT, \fB\-\-out\-dataset\fR DATASETNAMEOUT
-name of dataset to be written after correction, default: {}_phaseClosure
-.TP
-\fB\-\-update\fR
-Enable update mode: if unwrapPhase_phaseClosure dataset exists, skip the correction.
-.SS "mask:"
-.TP
-\fB\-\-water\-mask\fR WATERMASKFILE, \fB\-\-wm\fR WATERMASKFILE
-path of water mask file.
-.TP
-\fB\-t\fR TEMPLATE_FILE, \fB\-\-template\fR TEMPLATE_FILE
-template file with options for setting.
-.SS "reference:"
-.IP
-Yunjun, Z., H. Fattahi, and F. Amelung (2019), Small baseline InSAR time series analysis:
-Unwrapping error correction and noise reduction, Computers & Geosciences, 133, 104331,
-doi:10.1016/j.cageo.2019.104331.
-.SS "template options:"
-.IP
-## A quick assessment of:
-## 1) possible groud deformation
-##    using the velocity from the traditional interferogram stacking
-##    reference: Zebker et al. (1997, JGR)
-## 2) distribution of phase unwrapping error
-##    from the number of interferogram triplets with non\-zero integer ambiguity of closue phase
-##    reference: T_int in Yunjun et al. (2019, CAGEO). Related to section 3.2, equation (8\-9) and Fig. 3d\-e.
-.SS "template options:"
-.IP
-## connected components (mintpy.load.connCompFile) are required for this step.
-## SNAPHU (Chem & Zebker,2001) is currently the only unwrapper that provides connected components as far as we know.
-## reference: Yunjun et al. (2019, section 3)
-## supported methods:
-## a. phase_closure          \- suitable for highly redundant network
-## b. bridging               \- suitable for regions separated by narrow decorrelated features, e.g. rivers, narrow water bodies
-## c. bridging+phase_closure \- recommended when there is a small percentage of errors left after bridging
-mintpy.unwrapError.method          = auto  #[bridging / phase_closure / bridging+phase_closure / no], auto for no
-mintpy.unwrapError.waterMaskFile   = auto  #[waterMask.h5 / no], auto for waterMask.h5 or no [if not found]
-mintpy.unwrapError.connCompMinArea = auto  #[1\-inf], auto for 2.5e3, discard regions smaller than the min size in pixels
-.IP
-## phase_closure options:
-## numSample \- a region\-based strategy is implemented to speedup L1\-norm regularized least squares inversion.
-##     Instead of inverting every pixel for the integer ambiguity, a common connected component mask is generated,
-##     for each common conn. comp., numSample pixels are radomly selected for inversion, and the median value of the results
-##     are used for all pixels within this common conn. comp.
-mintpy.unwrapError.numSample       = auto  #[int>1], auto for 100, number of samples to invert for common conn. comp.
-.IP
-## briding options:
-## ramp \- a phase ramp could be estimated based on the largest reliable region, removed from the entire interferogram
-##     before estimating the phase difference between reliable regions and added back after the correction.
-## bridgePtsRadius \- half size of the window used to calculate the median value of phase difference
-mintpy.unwrapError.ramp            = auto  #[linear / quadratic], auto for no; recommend linear for L\-band data
-mintpy.unwrapError.bridgePtsRadius = auto  #[1\-inf], auto for 50, half size of the window around end points
-.SS "example:"
-.IP
-# correct phase unwrapping error with phase closure
-unwrap_error_phase_closure.py  ./inputs/ifgramStack.h5  \fB\-\-cc\-mask\fR maskConnComp.h5  \fB\-t\fR smallbaselineApp.cfg   \fB\-\-update\fR
-unwrap_error_phase_closure.py  ./inputs/ifgramStack.h5  \fB\-\-cc\-mask\fR maskConnComp.h5  \fB\-\-water\-mask\fR waterMask.h5 \fB\-\-update\fR
-.IP
-# calculate the number of non\-zero closure phase
-unwrap_error_phase_closure.py  ./inputs/ifgramStack.h5  \fB\-\-action\fR calculate
-unwrap_error_phase_closure.py  ./inputs/ifgramStack.h5  \fB\-\-action\fR calculate  \fB\-\-water\-mask\fR waterMask.h5
diff -pruN 1.3.3-2/debian/man/mintpy-view.1 1.4.0-1/debian/man/mintpy-view.1
--- 1.3.3-2/debian/man/mintpy-view.1	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/man/mintpy-view.1	1970-01-01 00:00:00.000000000 +0000
@@ -1,501 +0,0 @@
-.\" DO NOT MODIFY THIS FILE!  It was generated by help2man 1.49.2.
-.TH MINTPY-VIEW "1" "May 2022" "mintpy-view v1.3.3" "User Commands"
-.SH NAME
-mintpy-view \- Plot InSAR Product in 2D
-.SH DESCRIPTION
-usage: view.py [\-h] [\-n [NUM ...]] [\-\-nosearch] [\-\-ex [Dset ...]]
-.IP
-[\-\-show\-kept] [\-\-plot\-setting DISP_SETTING_FILE] [\-\-noverbose]
-[\-\-math {square,deg2rad,rad2deg,inverse,sqrt,reverse}]
-[\-v VMIN VMAX] [\-u UNIT] [\-\-nd NO_DATA_VALUE] [\-\-wrap]
-[\-\-wrap\-range MIN MAX] [\-\-flip\-lr] [\-\-flip\-ud] [\-\-noflip]
-[\-\-nmli NUM] [\-\-nomultilook] [\-\-alpha TRANSPARENCY]
-[\-d DEM_FILE] [\-\-mask\-dem] [\-\-dem\-noshade] [\-\-dem\-nocontour]
-[\-\-contour\-smooth DEM_CONTOUR_SMOOTH] [\-\-contour\-step NUM]
-[\-\-contour\-linewidth NUM] [\-\-shade\-az DEG] [\-\-shade\-alt DEG]
-[\-\-shade\-min MIN] [\-\-shade\-max MAX] [\-\-shade\-exag SHADE_EXAG]
-[\-\-fontsize FONT_SIZE] [\-\-fontcolor FONT_COLOR]
-[\-\-nowhitespace] [\-\-noaxis] [\-\-notick] [\-c COLORMAP]
-[\-\-cm\-lut NUM] [\-\-cm\-vlist CMAP_VLIST CMAP_VLIST CMAP_VLIST]
-[\-\-nocbar] [\-\-cbar\-nbins NUM]
-[\-\-cbar\-ext {min,max,None,both,neither}]
-[\-\-cbar\-label CBAR_LABEL] [\-\-cbar\-loc CBAR_LOC]
-[\-\-cbar\-size CBAR_SIZE] [\-\-notitle] [\-\-title\-in]
-[\-\-figtitle FIG_TITLE] [\-\-title4sen] [\-\-figsize WID LEN]
-[\-\-dpi DPI]
-[\-\-figext {.emf,.eps,.pdf,.png,.ps,.raw,.rgba,.svg,.svgz}]
-[\-\-fignum NUM] [\-\-nrows NUM] [\-\-ncols NUM]
-[\-\-wspace FIG_WID_SPACE] [\-\-hspace FIG_HEI_SPACE]
-[\-\-no\-tight\-layout] [\-\-coord {radar,geo}] [\-\-animation]
-[\-\-show\-gps] [\-\-mask\-gps] [\-\-gps\-label]
-[\-\-gps\-ms GPS_MARKER_SIZE]
-[\-\-gps\-comp {horz,up2los,hz2los,enu2los,vert}] [\-\-gps\-redo]
-[\-\-ref\-gps REF_GPS_SITE] [\-\-ex\-gps [EX_GPS_SITES ...]]
-[\-\-gps\-start\-date YYYYMMDD] [\-\-gps\-end\-date YYYYMMDD]
-[\-\-horz\-az HORZ_AZ_ANGLE] [\-m FILE] [\-\-mask\-vmin MASK_VMIN]
-[\-\-mask\-vmax MASK_VMAX] [\-\-zm] [\-\-coastline {50m,10m,110m}]
-[\-\-lalo\-label] [\-\-lalo\-step DEG] [\-\-lalo\-max\-num NUM]
-[\-\-lalo\-loc left right top bottom] [\-\-scalebar LEN X Y]
-[\-\-noscalebar] [\-\-scalebar\-pad SCALEBAR_PAD] [\-\-ram MAXMEMORY]
-[\-\-pts\-marker PTS_MARKER] [\-\-pts\-ms PTS_MARKER_SIZE]
-[\-\-pts\-yx Y X | \fB\-\-pts\-lalo\fR LAT LON | \fB\-\-pts\-file\fR PTS_FILE]
-[\-\-ref\-date DATE] [\-\-ref\-lalo LAT LON] [\-\-ref\-yx Y X]
-[\-\-noreference] [\-\-ref\-marker REF_MARKER] [\-\-ref\-size NUM]
-[\-o [OUTFILE ...]] [\-\-save] [\-\-nodisplay] [\-\-update]
-[\-\-sub\-x XMIN XMAX] [\-\-sub\-y YMIN YMAX]
-[\-\-sub\-lat LATMIN LATMAX] [\-\-sub\-lon LONMIN LONMAX]
-file [dset ...]
-.PP
-Plot InSAR Product in 2D
-.SS "options:"
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-show this help message and exit
-.TP
-\fB\-\-show\-kept\fR, \fB\-\-show\-kept\-ifgram\fR
-display kept interferograms only, without dropped interferograms
-.TP
-\fB\-\-plot\-setting\fR DISP_SETTING_FILE
-Template file with plot setting.
-Plot Setting:
-.TP
-plot.name
-= 'Yunjun et al., 2016, AGU, Fig 4f'
-.TP
-plot.type
-= LOS_VELOCITY
-.TP
-plot.startDate
-=
-.TP
-plot.endDate
-=
-.TP
-plot.displayUnit
-= cm/yr
-.TP
-plot.displayMin
-= \fB\-2\fR
-.TP
-plot.displayMax
-= 2
-.TP
-plot.colormap
-= jet
-.TP
-plot.subset.lalo
-= 33.05:33.15, 131.15:131.27
-.IP
-plot.seed.lalo = 33.0651, 131.2076
-.TP
-\fB\-\-noverbose\fR
-Disable the verbose message printing (default: True).
-.TP
-\fB\-\-math\fR {square,deg2rad,rad2deg,inverse,sqrt,reverse}
-Apply the math operation before displaying [for single subplot ONLY].
-E.g. plot the std. dev. of the variance file.
-.TP
-square
-= x^2
-.TP
-sqrt
-= x^1/2
-.TP
-reverse = x * \fB\-1\fR
-inverse = 1 / x
-.TP
-\fB\-\-ram\fR MAXMEMORY, \fB\-\-memory\fR MAXMEMORY
-Max amount of memory in GB to use (default: 4.0).
-Adjust according to your computer memory.
-.SS "Input File:"
-.IP
-File/Dataset to display
-.TP
-file
-file for display
-.TP
-dset
-optional \- dataset(s) to display (default: []).
-.TP
-\fB\-n\fR [NUM ...], \fB\-\-dset\-num\fR [NUM ...]
-optional \- order number of date/dataset(s) to display (default: []).
-.TP
-\fB\-\-nosearch\fR
-Disable glob search for input dset.
-.TP
-\fB\-\-ex\fR [Dset ...], \fB\-\-exclude\fR [Dset ...]
-dates will not be displayed (default: []).
-.SS "Data Display Options:"
-.IP
-Options to adjust the dataset display
-.TP
-\fB\-v\fR VMIN VMAX, \fB\-\-vlim\fR VMIN VMAX
-Display limits for matrix plotting.
-.TP
-\fB\-u\fR UNIT, \fB\-\-unit\fR UNIT
-unit for display.  Its priority > wrap
-.TP
-\fB\-\-nd\fR NO_DATA_VALUE, \fB\-\-no\-data\-val\fR NO_DATA_VALUE, \fB\-\-no\-data\-value\fR NO_DATA_VALUE
-Specify the no\-data\-value to be ignored and masked.
-.TP
-\fB\-\-wrap\fR
-re\-wrap data to display data in fringes.
-.TP
-\fB\-\-wrap\-range\fR MIN MAX
-range of one cycle after wrapping (default: [\-3.141592653589793, 3.141592653589793]).
-.TP
-\fB\-\-flip\-lr\fR
-flip left\-right
-.TP
-\fB\-\-flip\-ud\fR
-flip up\-down
-.TP
-\fB\-\-noflip\fR
-turn off auto flip for radar coordinate file
-.TP
-\fB\-\-nmli\fR NUM, \fB\-\-num\-multilook\fR NUM, \fB\-\-multilook\-num\fR NUM
-multilook data in X and Y direction with a factor for display (default: 1).
-.TP
-\fB\-\-nomultilook\fR, \fB\-\-no\-multilook\fR
-do not multilook, for high quality display.
-If multilook is True and multilook_num=1, multilook_num will be estimated automatically.
-Useful when displaying big datasets.
-.TP
-\fB\-\-alpha\fR TRANSPARENCY
-Data transparency.
-0.0 \- fully transparent, 1.0 \- no transparency.
-.SS "DEM:"
-.IP
-display topography in the background
-.TP
-\fB\-d\fR DEM_FILE, \fB\-\-dem\fR DEM_FILE
-DEM file to show topography as background
-.TP
-\fB\-\-mask\-dem\fR
-Mask out DEM pixels not coincident with valid data pixels
-.TP
-\fB\-\-dem\-noshade\fR
-do not show DEM shaded relief
-.TP
-\fB\-\-dem\-nocontour\fR
-do not show DEM contour lines
-.TP
-\fB\-\-contour\-smooth\fR DEM_CONTOUR_SMOOTH
-Background topography contour smooth factor \- sigma of Gaussian filter.
-Set to 0.0 for no smoothing; (default: 3.0).
-.TP
-\fB\-\-contour\-step\fR NUM
-Background topography contour step in meters (default: 200.0).
-.TP
-\fB\-\-contour\-linewidth\fR NUM
-Background topography contour linewidth (default: 0.5).
-.TP
-\fB\-\-shade\-az\fR DEG
-The azimuth (0\-360, degrees clockwise from North) of the light source (default: 315.0).
-.TP
-\fB\-\-shade\-alt\fR DEG
-The altitude (0\-90, degrees up from horizontal) of the light source (default: 45.0).
-.TP
-\fB\-\-shade\-min\fR MIN
-The min height in m of colormap of shaded relief topography (default: \fB\-4000\fR.0).
-.TP
-\fB\-\-shade\-max\fR MAX
-The max height of colormap of shaded relief topography (default: max(DEM)+2000).
-.TP
-\fB\-\-shade\-exag\fR SHADE_EXAG
-Vertical exaggeration ratio (default: 0.5).
-.SS "Figure:"
-.IP
-Figure settings for display
-.TP
-\fB\-\-fontsize\fR FONT_SIZE
-font size
-.TP
-\fB\-\-fontcolor\fR FONT_COLOR
-font color (default: k).
-.TP
-\fB\-\-nowhitespace\fR
-do not display white space
-.TP
-\fB\-\-noaxis\fR
-do not display axis
-.TP
-\fB\-\-notick\fR
-do not display tick in x/y axis
-.TP
-\fB\-c\fR COLORMAP, \fB\-\-colormap\fR COLORMAP
-colormap used for display, i.e. jet, cmy, RdBu, hsv, jet_r, temperature, viridis, etc.
-More at https://mintpy.readthedocs.io/en/latest/api/colormaps/
-.TP
-\fB\-\-cm\-lut\fR NUM, \fB\-\-cmap\-lut\fR NUM
-number of increment of colormap lookup table (default: 256).
-.TP
-\fB\-\-cm\-vlist\fR CMAP_VLIST CMAP_VLIST CMAP_VLIST, \fB\-\-cmap\-vlist\fR CMAP_VLIST CMAP_VLIST CMAP_VLIST
-list of 3 float numbers, for truncated colormap only (default: [0.0, 0.7, 1.0]).
-.TP
-\fB\-\-nocbar\fR, \fB\-\-nocolorbar\fR
-do not display colorbar
-.TP
-\fB\-\-cbar\-nbins\fR NUM
-number of bins for colorbar.
-.TP
-\fB\-\-cbar\-ext\fR {min,max,None,both,neither}
-Extend setting of colorbar; based on data stat by default.
-.TP
-\fB\-\-cbar\-label\fR CBAR_LABEL
-colorbar label
-.TP
-\fB\-\-cbar\-loc\fR CBAR_LOC
-colorbar location for single plot (default: right).
-.TP
-\fB\-\-cbar\-size\fR CBAR_SIZE
-colorbar size and pad (default: 2%).
-.TP
-\fB\-\-notitle\fR
-do not display title
-.TP
-\fB\-\-title\-in\fR
-draw title in/out of axes
-.TP
-\fB\-\-figtitle\fR FIG_TITLE
-Title shown in the figure.
-.TP
-\fB\-\-title4sen\fR, \fB\-\-title4sentinel1\fR
-display Sentinel\-1 A/B and IPF info in title.
-.TP
-\fB\-\-figsize\fR WID LEN
-figure size in inches \- width and length
-.TP
-\fB\-\-dpi\fR DPI
-DPI \- dot per inch \- for display/write (default: 300).
-.TP
-\fB\-\-figext\fR {.emf,.eps,.pdf,.png,.ps,.raw,.rgba,.svg,.svgz}
-File extension for figure output file (default: .png).
-.TP
-\fB\-\-fignum\fR NUM
-number of figure windows
-.TP
-\fB\-\-nrows\fR NUM
-subplot number in row
-.TP
-\fB\-\-ncols\fR NUM
-subplot number in column
-.TP
-\fB\-\-wspace\fR FIG_WID_SPACE
-width space between subplots in inches
-.TP
-\fB\-\-hspace\fR FIG_HEI_SPACE
-height space between subplots in inches
-.TP
-\fB\-\-no\-tight\-layout\fR
-disable automatic tight layout for multiple subplots
-.TP
-\fB\-\-coord\fR {radar,geo}
-Display in radar/geo coordination system (for geocoded file only; default: geo).
-.TP
-\fB\-\-animation\fR
-enable animation mode
-.SS "GPS:"
-.IP
-GPS data to display
-.TP
-\fB\-\-show\-gps\fR
-Show UNR GPS location within the coverage.
-.TP
-\fB\-\-mask\-gps\fR
-Mask out GPS stations not coincident with valid data pixels
-.TP
-\fB\-\-gps\-label\fR
-Show GPS site name
-.TP
-\fB\-\-gps\-ms\fR GPS_MARKER_SIZE
-Plot GPS value as scatter in size of ms**2 (default: 6).
-.TP
-\fB\-\-gps\-comp\fR {horz,up2los,hz2los,enu2los,vert}
-Plot GPS in color indicating deformation velocity direction
-.TP
-\fB\-\-gps\-redo\fR
-Re\-calculate GPS observations in LOS direction, instead of read from existing CSV file.
-.TP
-\fB\-\-ref\-gps\fR REF_GPS_SITE
-Reference GPS site
-.TP
-\fB\-\-ex\-gps\fR [EX_GPS_SITES ...]
-Exclude GPS sites, require \fB\-\-gps\-comp\fR.
-.TP
-\fB\-\-gps\-start\-date\fR YYYYMMDD
-start date of GPS data, default is date of the 1st SAR acquisition
-.TP
-\fB\-\-gps\-end\-date\fR YYYYMMDD
-start date of GPS data, default is date of the last SAR acquisition
-.TP
-\fB\-\-horz\-az\fR HORZ_AZ_ANGLE, \fB\-\-hz\-az\fR HORZ_AZ_ANGLE
-Azimuth angle (anti\-clockwise from the north) of the horizontal movement in degrees
-E.g.: \fB\-90\fR. for east  direction [default]
-.TP
-0.
-for north direction
-.IP
-Set to the azimuth angle of the strike\-slip fault to show the fault\-parallel displacement.
-.SS "Mask:"
-.IP
-Mask file/options
-.TP
-\fB\-m\fR FILE, \fB\-\-mask\fR FILE
-mask file for display. "no" to turn OFF masking.
-.TP
-\fB\-\-mask\-vmin\fR MASK_VMIN
-hide pixels with mask value < vmin (default: None).
-.TP
-\fB\-\-mask\-vmax\fR MASK_VMAX
-hide pixels with mask value > vmax (default: None).
-.TP
-\fB\-\-zm\fR, \fB\-\-zero\-mask\fR
-mask pixels with zero value.
-.SS "Map:"
-.IP
-for one subplot in geo\-coordinates only
-.TP
-\fB\-\-coastline\fR {50m,10m,110m}
-Draw coastline with specified resolution (default: None).
-This will enable \fB\-\-lalo\-label\fR option.
-Link: https://scitools.org.uk/cartopy/docs/latest/matplotlib/geoaxes.html#cartopy.mpl.geoaxes.GeoAxes.coastlines
-.TP
-\fB\-\-lalo\-label\fR
-Show N, S, E, W tick label for plot in geo\-coordinate.
-Useful for final figure output.
-.TP
-\fB\-\-lalo\-step\fR DEG
-Lat/lon step for lalo\-label option.
-.TP
-\fB\-\-lalo\-max\-num\fR NUM
-Maximum number of lalo tick label (default: 3).
-.TP
-\fB\-\-lalo\-loc\fR left right top bottom
-Draw lalo label in [left, right, top, bottom] (default: [1, 0, 0, 1]).
-.TP
-\fB\-\-scalebar\fR LEN X Y
-scale bar distance and location in ratio (default: [0.2, 0.2, 0.1]).
-distance in ratio of total width
-location in X/Y in ratio with respect to the lower left corner
-.TP
-\fB\-\-scalebar\fR 0.2 0.2 0.1
-#for lower left  corner
-.TP
-\fB\-\-scalebar\fR 0.2 0.2 0.8
-#for upper left  corner
-.TP
-\fB\-\-scalebar\fR 0.2 0.8 0.1
-#for lower right corner
-.TP
-\fB\-\-scalebar\fR 0.2 0.8 0.8
-#for upper right corner
-.TP
-\fB\-\-noscalebar\fR, \fB\-\-nosbar\fR
-do not display scale bar.
-.TP
-\fB\-\-scalebar\-pad\fR SCALEBAR_PAD, \fB\-\-sbar\-pad\fR SCALEBAR_PAD
-scale bar label pad in ratio of scalebar width (default: 0.05).
-.SS "Point:"
-.IP
-Plot points defined by y/x or lat/lon
-.TP
-\fB\-\-pts\-marker\fR PTS_MARKER
-Marker of points of interest (default: k^).
-.TP
-\fB\-\-pts\-ms\fR PTS_MARKER_SIZE
-Marker size for points of interest (default: 6.0).
-.TP
-\fB\-\-pts\-yx\fR Y X
-Point in Y/X
-.TP
-\fB\-\-pts\-lalo\fR LAT LON
-Point in Lat/Lon
-.TP
-\fB\-\-pts\-file\fR PTS_FILE
-Text file for point(s) in lat/lon column
-.SS "Reference:"
-.IP
-Show / Modify reference in time and space for display
-.TP
-\fB\-\-ref\-date\fR DATE
-Change reference date for display
-.TP
-\fB\-\-ref\-lalo\fR LAT LON
-Change reference point LAT LON for display
-.TP
-\fB\-\-ref\-yx\fR Y X
-Change reference point Y X for display
-.TP
-\fB\-\-noreference\fR
-do not show reference point
-.TP
-\fB\-\-ref\-marker\fR REF_MARKER
-marker of reference pixel (default: ks).
-.TP
-\fB\-\-ref\-size\fR NUM
-marker size of reference point (default: 6).
-.SS "Save/Output:"
-.IP
-Save figure and write to file(s)
-.TP
-\fB\-o\fR [OUTFILE ...], \fB\-\-outfile\fR [OUTFILE ...]
-save the figure with assigned filename.
-By default, it's calculated based on the input file name.
-.TP
-\fB\-\-save\fR
-save the figure
-.TP
-\fB\-\-nodisplay\fR
-save and do not display the figure
-.TP
-\fB\-\-update\fR
-enable update mode for save figure: skip running if
-1) output file already exists AND
-2) output file is newer than input file.
-.SS "Subset:"
-.IP
-Display dataset in subset range
-.TP
-\fB\-\-sub\-x\fR XMIN XMAX, \fB\-\-subx\fR XMIN XMAX, \fB\-\-subset\-x\fR XMIN XMAX
-subset display in x/cross\-track/range direction
-.TP
-\fB\-\-sub\-y\fR YMIN YMAX, \fB\-\-suby\fR YMIN YMAX, \fB\-\-subset\-y\fR YMIN YMAX
-subset display in y/along\-track/azimuth direction
-.TP
-\fB\-\-sub\-lat\fR LATMIN LATMAX, \fB\-\-sublat\fR LATMIN LATMAX, \fB\-\-subset\-lat\fR LATMIN LATMAX
-subset display in latitude
-.TP
-\fB\-\-sub\-lon\fR LONMIN LONMAX, \fB\-\-sublon\fR LONMIN LONMAX, \fB\-\-subset\-lon\fR LONMIN LONMAX
-subset display in longitude
-.SS "example:"
-.IP
-view.py velocity.h5
-view.py velocity.h5 velocity \fB\-\-wrap\fR \fB\-\-wrap\-range\fR \fB\-2\fR 2 \fB\-c\fR cmy \fB\-\-lalo\-label\fR
-view.py velocity.h5 \fB\-\-ref\-yx\fR  210 566                              #change reference pixel for display
-view.py velocity.h5 \fB\-\-sub\-lat\fR 31.05 31.10 \fB\-\-sub\-lon\fR 130.05 130.10  #subset in lalo / yx
-.IP
-view.py timeseries.h5
-view.py timeseries.h5 \fB\-m\fR no                   #do not use auto mask
-view.py timeseries.h5 \fB\-\-ref\-date\fR 20101120     #change reference date
-view.py timeseries.h5 \fB\-\-ex\fR drop_date.txt      #exclude dates to plot
-view.py timeseries.h5 '*2017*' '*2018*'       #all acquisitions in 2017 and 2018
-view.py timeseries.h5 20200616_20200908       #reconstruct interferogram on the fly
-.IP
-view.py ifgramStack.h5 coherence
-view.py ifgramStack.h5 unwrapPhase\-           #unwrapPhase only in the presence of unwrapPhase_bridging
-view.py ifgramStack.h5 \fB\-n\fR 6                   #the 6th slice
-view.py ifgramStack.h5 20171010_20171115      #all data      related with 20171010_20171115
-view.py ifgramStack.h5 'coherence*20171010*'  #all coherence related with 20171010
-view.py ifgramStack.h5 unwrapPhase\-20070927_20100217 \fB\-\-zero\-mask\fR \fB\-\-wrap\fR     #wrapped phase
-view.py ifgramStack.h5 unwrapPhase\-20070927_20100217 \fB\-\-mask\fR ifgramStack.h5  #mask using connected components
-.IP
-# GPS (for one subplot in geo\-coordinates only)
-view.py geo_velocity_msk.h5 velocity \fB\-\-show\-gps\fR \fB\-\-gps\-label\fR   #show locations of available GPS
-view.py geo_velocity_msk.h5 velocity \fB\-\-show\-gps\fR \fB\-\-gps\-comp\fR enu2los \fB\-\-ref\-gps\fR GV01
-view.py geo_timeseries_ERA5_ramp_demErr.h5 20180619 \fB\-\-ref\-date\fR 20141213 \fB\-\-show\-gps\fR \fB\-\-gps\-comp\fR enu2los \fB\-\-ref\-gps\fR GV01
-.IP
-# Save and Output
-view.py velocity.h5 \fB\-\-save\fR
-view.py velocity.h5 \fB\-\-nodisplay\fR
-view.py geo_velocity.h5 velocity \fB\-\-nowhitespace\fR
diff -pruN 1.3.3-2/debian/mintpy.install 1.4.0-1/debian/mintpy.install
--- 1.3.3-2/debian/mintpy.install	2022-06-28 03:11:52.000000000 +0000
+++ 1.4.0-1/debian/mintpy.install	2022-08-05 14:44:15.000000000 +0000
@@ -1,2 +1,2 @@
 usr/bin/*.py usr/share/mintpy
-mintpy/sh/*.sh usr/share/mintpy
+scripts/*.sh usr/share/mintpy
diff -pruN 1.3.3-2/debian/mintpy.links 1.4.0-1/debian/mintpy.links
--- 1.3.3-2/debian/mintpy.links	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/mintpy.links	2022-08-05 14:44:15.000000000 +0000
@@ -1,66 +1 @@
-/usr/share/mintpy/add.py /usr/bin/mintpy-add
-/usr/share/mintpy/add_attribute.py /usr/bin/mintpy-add_attribute
-/usr/share/mintpy/asc_desc2horz_vert.py /usr/bin/mintpy-asc_desc2horz_vert
-/usr/share/mintpy/closure_phase_bias.py /usr/bin/mintpy-closure_phase_bias
-/usr/share/mintpy/dem_error.py /usr/bin/mintpy-dem_error
-/usr/share/mintpy/dem_gsi.py /usr/bin/mintpy-dem_gsi
-/usr/share/mintpy/diff.py /usr/bin/mintpy-diff
-/usr/share/mintpy/generate_mask.py /usr/bin/mintpy-generate_mask
-/usr/share/mintpy/geocode.py /usr/bin/mintpy-geocode
-/usr/share/mintpy/ifgram_inversion.py /usr/bin/mintpy-ifgram_inversion
-/usr/share/mintpy/ifgram_reconstruction.py /usr/bin/mintpy-ifgram_reconstruction
-/usr/share/mintpy/image_math.py /usr/bin/mintpy-image_math
-/usr/share/mintpy/image_stitch.py /usr/bin/mintpy-image_stitch
-/usr/share/mintpy/info.py /usr/bin/mintpy-info
-/usr/share/mintpy/iono_tec.py /usr/bin/mintpy-iono_tec
-/usr/share/mintpy/load_data.py /usr/bin/mintpy-load_data
-/usr/share/mintpy/load_gbis.py /usr/bin/mintpy-load_gbis
-/usr/share/mintpy/local_oscilator_drift.py /usr/bin/mintpy-local_oscilator_drift
-/usr/share/mintpy/lookup_geo2radar.py /usr/bin/mintpy-lookup_geo2radar
-/usr/share/mintpy/mask.py /usr/bin/mintpy-mask
-/usr/share/mintpy/modify_network.py /usr/bin/mintpy-modify_network
-/usr/share/mintpy/multi_transect.py /usr/bin/mintpy-multi_transect
-/usr/share/mintpy/multilook.py /usr/bin/mintpy-multilook
-/usr/share/mintpy/plot_coherence_matrix.py /usr/bin/mintpy-plot_coherence_matrix
-/usr/share/mintpy/plot_network.py /usr/bin/mintpy-plot_network
-/usr/share/mintpy/plot_transection.py /usr/bin/mintpy-plot_transection
-/usr/share/mintpy/prep_aria.py /usr/bin/mintpy-prep_aria
-/usr/share/mintpy/prep_cosicorr.py /usr/bin/mintpy-prep_cosicorr
-/usr/share/mintpy/prep_fringe.py /usr/bin/mintpy-prep_fringe
-/usr/share/mintpy/prep_gamma.py /usr/bin/mintpy-prep_gamma
-/usr/share/mintpy/prep_giant.py /usr/bin/mintpy-prep_giant
-/usr/share/mintpy/prep_gmtsar.py /usr/bin/mintpy-prep_gmtsar
-/usr/share/mintpy/prep_hyp3.py /usr/bin/mintpy-prep_hyp3
-/usr/share/mintpy/prep_isce.py /usr/bin/mintpy-prep_isce
-/usr/share/mintpy/prep_roipac.py /usr/bin/mintpy-prep_roipac
-/usr/share/mintpy/prep_snap.py /usr/bin/mintpy-prep_snap
-/usr/share/mintpy/reference_date.py /usr/bin/mintpy-reference_date
-/usr/share/mintpy/reference_point.py /usr/bin/mintpy-reference_point
-/usr/share/mintpy/remove_hdf5_dataset.py /usr/bin/mintpy-remove_hdf5_dataset
-/usr/share/mintpy/remove_ramp.py /usr/bin/mintpy-remove_ramp
-/usr/share/mintpy/save_gbis.py /usr/bin/mintpy-save_gbis
-/usr/share/mintpy/save_gdal.py /usr/bin/mintpy-save_gdal
-/usr/share/mintpy/save_gmt.py /usr/bin/mintpy-save_gmt
-/usr/share/mintpy/save_hdfeos5.py /usr/bin/mintpy-save_hdfeos5
-/usr/share/mintpy/save_kite.py /usr/bin/mintpy-save_kite
-/usr/share/mintpy/save_kmz.py /usr/bin/mintpy-save_kmz
-/usr/share/mintpy/save_kmz_timeseries.py /usr/bin/mintpy-save_kmz_timeseries
-/usr/share/mintpy/save_qgis.py /usr/bin/mintpy-save_qgis
-/usr/share/mintpy/save_roipac.py /usr/bin/mintpy-save_roipac
-/usr/share/mintpy/smallbaselineApp.py /usr/bin/mintpy-smallbaselineApp
-/usr/share/mintpy/solid_earth_tides.py /usr/bin/mintpy-solid_earth_tides
-/usr/share/mintpy/spatial_average.py /usr/bin/mintpy-spatial_average
-/usr/share/mintpy/spatial_filter.py /usr/bin/mintpy-spatial_filter
-/usr/share/mintpy/subset.py /usr/bin/mintpy-subset
-/usr/share/mintpy/temporal_average.py /usr/bin/mintpy-temporal_average
-/usr/share/mintpy/temporal_derivative.py /usr/bin/mintpy-temporal_derivative
-/usr/share/mintpy/temporal_filter.py /usr/bin/mintpy-temporal_filter
-/usr/share/mintpy/timeseries2velocity.py /usr/bin/mintpy-timeseries2velocity
-/usr/share/mintpy/timeseries_rms.py /usr/bin/mintpy-timeseries_rms
-/usr/share/mintpy/tropo_gacos.py /usr/bin/mintpy-tropo_gacos
-/usr/share/mintpy/tropo_phase_elevation.py /usr/bin/mintpy-tropo_phase_elevation
-/usr/share/mintpy/tropo_pyaps3.py /usr/bin/mintpy-tropo_pyaps3
-/usr/share/mintpy/tsview.py /usr/bin/mintpy-tsview
-/usr/share/mintpy/unwrap_error_bridging.py /usr/bin/mintpy-unwrap_error_bridging
-/usr/share/mintpy/unwrap_error_phase_closure.py /usr/bin/mintpy-unwrap_error_phase_closure
-/usr/share/mintpy/view.py /usr/bin/mintpy-view
+/usr/share/mintpy/mintpy /usr/bin/mintpy
diff -pruN 1.3.3-2/debian/mintpy.manpages 1.4.0-1/debian/mintpy.manpages
--- 1.3.3-2/debian/mintpy.manpages	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/mintpy.manpages	2022-08-05 14:44:15.000000000 +0000
@@ -1 +1 @@
-debian/man/mintpy-*.1
+debian/man/*.1
diff -pruN 1.3.3-2/debian/patches/0001-fix-intepreter.patch 1.4.0-1/debian/patches/0001-fix-intepreter.patch
--- 1.3.3-2/debian/patches/0001-fix-intepreter.patch	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/patches/0001-fix-intepreter.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,21 +0,0 @@
-From: Debian GIS Project <pkg-grass-devel@lists.alioth.debian.org>
-Date: Sun, 15 May 2022 15:09:20 +0000
-Subject: fix-intepreter
-
-Fix interpreter
-
-Forwarded: https://github.com/insarlab/MintPy/pull/774
----
- mintpy/sh/load_data_aoi.sh | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/mintpy/sh/load_data_aoi.sh b/mintpy/sh/load_data_aoi.sh
-index 157849e..8ac02a0 100755
---- a/mintpy/sh/load_data_aoi.sh
-+++ b/mintpy/sh/load_data_aoi.sh
-@@ -1,4 +1,4 @@
--#! /bin/sh
-+#! /bin/bash
- # Extract input datasets in mintpy/inputs for an area of interest (AOI) from the one of the whole frame
- # Parameters: SNWE : string for the AOI
- #             step : string/number, output resolution in degree
diff -pruN 1.3.3-2/debian/patches/0001-Fix-privacy-breachs.patch 1.4.0-1/debian/patches/0001-Fix-privacy-breachs.patch
--- 1.3.3-2/debian/patches/0001-Fix-privacy-breachs.patch	1970-01-01 00:00:00.000000000 +0000
+++ 1.4.0-1/debian/patches/0001-Fix-privacy-breachs.patch	2022-08-05 14:44:15.000000000 +0000
@@ -0,0 +1,248 @@
+From: Antonio Valentino <antonio.valentino@tiscali.it>
+Date: Sun, 12 Jun 2022 19:36:34 +0000
+Subject: Fix privacy breachs
+
+Forwarded: not-needed
+---
+ docs/QGIS.md                       |  8 ++++----
+ docs/README.md                     | 10 +---------
+ docs/_layouts/default_tactile.html |  6 ------
+ docs/api/colormaps.md              | 17 ++++++++++-------
+ docs/api/coord.md                  |  2 +-
+ docs/dask.md                       |  2 +-
+ docs/demo_dataset.md               | 16 ++++++++--------
+ docs/google_earth.md               | 12 ++++++------
+ docs/hdfeos5.md                    |  2 +-
+ 9 files changed, 32 insertions(+), 43 deletions(-)
+
+diff --git a/docs/QGIS.md b/docs/QGIS.md
+index 7e897c7..9b29cfe 100644
+--- a/docs/QGIS.md
++++ b/docs/QGIS.md
+@@ -9,12 +9,12 @@ Displacement time-series can be exported as QGIS PS Time Series Viewer plugin co
+ ramp_color('RdBu', scale_linear(VEL, -20, 20, 0, 1))
+ ```
+ 
+-<p align="left">
+-  <img width="1000" src="https://yunjunzhang.files.wordpress.com/2019/11/ps-time-series-viewer-1.png">
++<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2019/11/ps-time-series-viewer-1.png">
++  https://yunjunzhang.files.wordpress.com/2019/11/ps-time-series-viewer-1.png</a>
+ </p>
+ 
+ 5. Select the [PS Time Series Viewer logo](https://gitlab.com/faunalia/ps-speed/blob/master/icons/logo.png) to activate the tool, and click/play on the map to display the time-series!
+ 
+-<p align="left">
+-  <img width="800" src="https://yunjunzhang.files.wordpress.com/2019/11/ps-time-series-viewer-2.png">
++<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2019/11/ps-time-series-viewer-2.png">
++  https://yunjunzhang.files.wordpress.com/2019/11/ps-time-series-viewer-2.png</a>
+ </p>
+diff --git a/docs/README.md b/docs/README.md
+index 39b4a78..ec469bf 100644
+--- a/docs/README.md
++++ b/docs/README.md
+@@ -1,11 +1,3 @@
+-[![Language](https://img.shields.io/badge/python-3.6%2B-blue.svg)](https://www.python.org/)
+-[![CircleCI](https://img.shields.io/circleci/build/github/insarlab/MintPy.svg?logo=circleci&label=CI)](https://circleci.com/gh/insarlab/MintPy)
+-[![Docs Status](https://readthedocs.org/projects/mintpy/badge/?color=green&version=latest)](https://mintpy.readthedocs.io/?badge=latest)
+-[![Version](https://img.shields.io/github/v/release/insarlab/MintPy?color=green&label=version)](https://github.com/insarlab/MintPy/releases)
+-[![License](https://img.shields.io/badge/license-GPLv3+-yellow.svg)](https://github.com/insarlab/MintPy/blob/main/LICENSE)
+-[![Forum](https://img.shields.io/badge/forum-Google%20Groups-orange.svg)](https://groups.google.com/g/mintpy)
+-[![Citation](https://img.shields.io/badge/doi-10.1016%2Fj.cageo.2019.104331-blue)](https://doi.org/10.1016/j.cageo.2019.104331)
+-
+ ## MintPy ##
+ 
+ The Miami INsar Time-series software in PYthon (MintPy as /mɪnt paɪ/) is an open-source package for Interferometric Synthetic Aperture Radar (InSAR) time series analysis. It reads the stack of interferograms (coregistered and unwrapped) in [ISCE](https://github.com/isce-framework/isce2), [ARIA](https://github.com/aria-tools/ARIA-tools), [FRInGE](https://github.com/isce-framework/fringe), [HyP3](https://hyp3-docs.asf.alaska.edu/), [GMTSAR](https://github.com/gmtsar/gmtsar), [SNAP](http://step.esa.int/), [GAMMA](https://www.gamma-rs.ch/no_cache/software.html) or ROI_PAC format, and produces three dimensional (2D in space and 1D in time) ground surface displacement in line-of-sight direction. It includes a routine time series analysis (`smallbaselineApp.py`) and some independent toolbox.
+@@ -49,7 +41,7 @@ smallbaselineApp.py ${MINTPY_HOME}/mintpy/data/input_files/FernandinaSenDT128.tx
+ ```
+ 
+ <p align="left">
+-  <img width="600" src="https://yunjunzhang.files.wordpress.com/2019/06/fernandinasendt128_poi.jpg">
++  <a href="https://yunjunzhang.files.wordpress.com/2019/06/fernandinasendt128_poi.jpg">https://yunjunzhang.files.wordpress.com/2019/06/fernandinasendt128_poi.jpg</a>
+ </p>
+ 
+ Results are plotted in **./pic** folder. To explore more data information and visualization, try the following scripts:
+diff --git a/docs/_layouts/default_tactile.html b/docs/_layouts/default_tactile.html
+index dcc0755..0540c31 100644
+--- a/docs/_layouts/default_tactile.html
++++ b/docs/_layouts/default_tactile.html
+@@ -6,9 +6,6 @@
+     <link href='https://fonts.googleapis.com/css?family=Chivo:900' rel='stylesheet' type='text/css'>
+     <link rel="stylesheet" href="{{ '/assets/css/style.css?v=' | append: site.github.build_revision | relative_url }}">
+     <link rel="stylesheet" type="text/css" href="{{ '/assets/css/print.css' | relative_url }}" media="print">
+-    <!--[if lt IE 9]>
+-    <script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>
+-    <![endif]-->
+     <title>{{ site.title | default: site.github.repository_name }} by {{ site.github.owner_name }}</title>
+   </head>
+ 
+@@ -17,9 +14,6 @@
+       <div class="inner">
+ 
+         <header>
+-          <p align="center">
+-            <img src="https://yunjunzhang.files.wordpress.com/2017/08/logo_githubpage.pdf">
+-          </p>
+         </header>
+         <section id="downloads" class="clearfix">
+           {% if site.github.is_project_page %}
+diff --git a/docs/api/colormaps.md b/docs/api/colormaps.md
+index 1c86fa5..5588441 100644
+--- a/docs/api/colormaps.md
++++ b/docs/api/colormaps.md
+@@ -8,8 +8,8 @@ MintPy support the following colormaps:
+ 
+ We recommend to use cyclic colormap `cmy` for wrapped phase/displacement measurement.
+ 
+-<p align="left">
+-  <img width="280" src="https://yunjunzhang.files.wordpress.com/2020/01/cmap_cmy-1.png">
++<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2020/01/cmap_cmy-1.png">
++  https://yunjunzhang.files.wordpress.com/2020/01/cmap_cmy-1.png</a>
+ </p>
+ 
+ To use colormap `cmy` in view.py:
+@@ -29,9 +29,12 @@ cmap = ColormapExt('cmy').colromap
+ 
+ All GMT cpt files, e.g. the 20 built-in colormaps shown below, can be recognized by setting the variable `GMT_CPT_DIR` in `$MINTPY_HOME/mintpy/objects/colors.py`. The default hardwired value is `/opt/local/share/gmt/cpt` for macOS users with GMT installed using [MacPorts](https://www.macports.org).
+ 
+-<p align="left">
+-  <img width="600" src="https://docs.generic-mapping-tools.org/5.4/_images/GMT_App_M_1a.png">
+-  <img width="600" src="https://docs.generic-mapping-tools.org/5.4/_images/GMT_App_M_1b.png">
++<p align="left"><a href="https://docs.generic-mapping-tools.org/5.4/_images/GMT_App_M_1a.png">
++  https://docs.generic-mapping-tools.org/5.4/_images/GMT_App_M_1a.png</a>
++</p>
++
++<p align="left"><a href="https://docs.generic-mapping-tools.org/5.4/_images/GMT_App_M_1b.png">
++  https://docs.generic-mapping-tools.org/5.4/_images/GMT_App_M_1b.png</a>
+ </p>
+ 
+ ### Colormaps from [cpt-city](http://soliton.vm.bytemark.co.uk/pub/cpt-city/views/totp-cpt.html) ###
+@@ -63,8 +66,8 @@ The following colormaps is included by default:
+ + vikO (cyclic diverging)
+ + More at [Scientific Color-Maps](http://www.fabiocrameri.ch/colourmaps.php) ([Crameri, 2018](https://doi.org/10.5194/gmd-11-2541-2018))
+ 
+-<p align="left">
+-  <img src="https://yunjunzhang.files.wordpress.com/2021/01/scientificcolourmaps_fabiocrameri.png">
++<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2021/01/scientificcolourmaps_fabiocrameri.png">
++  https://yunjunzhang.files.wordpress.com/2021/01/scientificcolourmaps_fabiocrameri.png</a>
+ </p>
+ 
+ ### Interactive [web tool](https://jdherman.github.io/colormap/) to generate custom colormaps by Jon Herman ###
+diff --git a/docs/api/coord.md b/docs/api/coord.md
+index e1a7964..7ef06d6 100644
+--- a/docs/api/coord.md
++++ b/docs/api/coord.md
+@@ -11,4 +11,4 @@ Y_UNIT     degrees
+ 
+ X/Y_FIRST are the longitude/latitude value of the first (upper left corner) pixel’s upper left corner, as shown below:   
+ 
+-<img src="https://yunjunzhang.files.wordpress.com/2019/06/coord_index.png" width="400">
+\ No newline at end of file
++<p><a href="https://yunjunzhang.files.wordpress.com/2019/06/coord_index.png">https://yunjunzhang.files.wordpress.com/2019/06/coord_index.png</a></p>
+diff --git a/docs/dask.md b/docs/dask.md
+index e8d5b4a..0edf1ee 100644
+--- a/docs/dask.md
++++ b/docs/dask.md
+@@ -54,7 +54,7 @@ A typical run time without local cluster is 30 secs and with 8 workers 11.4 secs
+ 
+ To show the run time improvement, we test three datasets (South Isabela, Fernandina, and Kilauea) with different number of cores and same amount of allocated memory (4 GB) on a compute node in the [Stampede2 cluster's skx-normal queue](https://portal.tacc.utexas.edu/user-guides/stampede2#overview-skxcomputenodes). Results are as below:
+ 
+-![Dask LocalCluster Performance](https://yunjunzhang.files.wordpress.com/2020/08/dask_local_cluster_performance.png)
++[Dask LocalCluster Performance](https://yunjunzhang.files.wordpress.com/2020/08/dask_local_cluster_performance.png)
+ 
+ #### 1.5 Known problems ####
+ 
+diff --git a/docs/demo_dataset.md b/docs/demo_dataset.md
+index 8af4a4c..474be19 100644
+--- a/docs/demo_dataset.md
++++ b/docs/demo_dataset.md
+@@ -13,8 +13,8 @@ cd FernandinaSenDT128/mintpy
+ smallbaselineApp.py ${MINTPY_HOME}/mintpy/data/input_files/FernandinaSenDT128.txt     
+ ```
+ 
+-<p align="left">
+-  <img width="650" src="https://yunjunzhang.files.wordpress.com/2019/06/fernandinasendt128_poi.jpg">
++<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2019/06/fernandinasendt128_poi.jpg">
++  https://yunjunzhang.files.wordpress.com/2019/06/fernandinasendt128_poi.jpg</a>
+ </p>
+ 
+ Relevant literature:
+@@ -34,8 +34,8 @@ cd SanFranSenDT42/mintpy
+ smallbaselineApp.py ${MINTPY_HOME}/mintpy/data/input_files/SanFranSenDT42.txt     
+ ```
+ 
+-<p align="left">
+-  <img width="650" src="https://yunjunzhang.files.wordpress.com/2020/11/sanfransendt42_transect.jpg">
++<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2020/11/sanfransendt42_transect.jpg">
++  https://yunjunzhang.files.wordpress.com/2020/11/sanfransendt42_transect.jpg</a>
+ </p>
+ 
+ Relevant literature:
+@@ -55,8 +55,8 @@ cd WellsEnvD2T399/mintpy
+ smallbaselineApp.py ${MINTPY_HOME}/mintpy/data/input_files/WellsEnvD2T399.txt
+ ```
+ 
+-<p align="left">
+-  <img width="650" src="https://yunjunzhang.files.wordpress.com/2019/06/wellsenvd2t399_co_poi.jpg">
++<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2019/06/wellsenvd2t399_co_poi.jpg">
++  https://yunjunzhang.files.wordpress.com/2019/06/wellsenvd2t399_co_poi.jpg</a>
+ </p>
+ 
+ Relevant literature:
+@@ -102,8 +102,8 @@ cd KujuAlosAT422F650/mintpy
+ smallbaselineApp.py ${MINTPY_HOME}/mintpy/data/input_files/KujuAlosAT422F650.txt     
+ ```
+ 
+-<p align="left">
+-  <img width="650" src="https://yunjunzhang.files.wordpress.com/2019/06/kujualosat422f650_vel.jpg">
++<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2019/06/kujualosat422f650_vel.jpg">
++  https://yunjunzhang.files.wordpress.com/2019/06/kujualosat422f650_vel.jpg</a>
+ </p>
+ 
+ Relevant literature:
+diff --git a/docs/google_earth.md b/docs/google_earth.md
+index eada9ef..c9dc193 100644
+--- a/docs/google_earth.md
++++ b/docs/google_earth.md
+@@ -4,8 +4,8 @@ MintPy use [pyKML](https://pythonhosted.org/pykml/) to generate KMZ (Keyhole Mar
+ 
+ `save_kmz_timeseries.py` takes 3D displacement time-series file and outputs a KMZ file with interactive time-seires plot.
+ 
+-<p align="center">
+-  <img src="https://yunjunzhang.files.wordpress.com/2019/02/fernandinasendt128_ge-1.png">
++<p align="center"><a href="https://yunjunzhang.files.wordpress.com/2019/02/fernandinasendt128_ge-1.png">
++  https://yunjunzhang.files.wordpress.com/2019/02/fernandinasendt128_ge-1.png</a>
+ </p>
+ 
+ [Download KMZ file](https://miami.box.com/v/FernandinaSenDT128TS)
+@@ -14,8 +14,8 @@ MintPy use [pyKML](https://pythonhosted.org/pykml/) to generate KMZ (Keyhole Mar
+ 
+ `save_kmz.py` takes any 2D matrix and outputs a KMZ file with a overlay image.
+ 
+-<p align="center">
+-  <img src="https://yunjunzhang.files.wordpress.com/2019/02/vel_fernandinasendt128_ge.png">
++<p align="center"><a href="https://yunjunzhang.files.wordpress.com/2019/02/vel_fernandinasendt128_ge.png">
++  https://yunjunzhang.files.wordpress.com/2019/02/vel_fernandinasendt128_ge.png</a>
+ </p>
+ 
+ [Download KMZ file](https://miami.box.com/v/FernandinaSenDT128VEL)
+@@ -34,8 +34,8 @@ The script samples the input 3D dataset at 3 levels of details by default (`--st
+ 
+ The low- and moderate-resolution LODs cover the entire region, while the high-resolution LOD covers only the actively deforming regions. These regions (red boxes below) are currently identified as boxes having >20% pixels with velocity magnitude > the global velocity median absolute deviation [[source code](https://github.com/insarlab/MintPy/blob/main/mintpy/save_kmz_timeseries.py#L160)].
+ 
+-<p align="center">
+-  <img src="https://yunjunzhang.files.wordpress.com/2020/03/defo_area.png">
++<p align="center"><a href="https://yunjunzhang.files.wordpress.com/2020/03/defo_area.png">
++  https://yunjunzhang.files.wordpress.com/2020/03/defo_area.png</a>
+ </p>
+ 
+ 2. Region-based Network Links
+diff --git a/docs/hdfeos5.md b/docs/hdfeos5.md
+index 0db8243..4b53498 100644
+--- a/docs/hdfeos5.md
++++ b/docs/hdfeos5.md
+@@ -90,4 +90,4 @@ HDF-EOS5 file format is used as the input of the University of Miami's web viewe
+ 
+ <p align="center"><b>http://insarmaps.miami.edu</b><br></p>
+ 
+-[![InSAR Web Viewer](https://yunjunzhang.files.wordpress.com/2019/06/web_viewer_kujualosat422.png)](http://insarmaps.miami.edu/)
++[InSAR Web Viewer](https://yunjunzhang.files.wordpress.com/2019/06/web_viewer_kujualosat422.png)
diff -pruN 1.3.3-2/debian/patches/0002-Spelling.patch 1.4.0-1/debian/patches/0002-Spelling.patch
--- 1.3.3-2/debian/patches/0002-Spelling.patch	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/patches/0002-Spelling.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,259 +0,0 @@
-From: Antonio Valentino <antonio.valentino@tiscali.it>
-Date: Sun, 15 May 2022 15:11:25 +0000
-Subject: Spelling
-
-Forwarded: https://github.com/insarlab/MintPy/pull/775
----
- mintpy/closure_phase_bias.py         | 4 ++--
- mintpy/defaults/smallbaselineApp.cfg | 4 ++--
- mintpy/diff.py                       | 4 ++--
- mintpy/generate_mask.py              | 4 ++--
- mintpy/image_stitch.py               | 2 +-
- mintpy/modify_network.py             | 2 +-
- mintpy/multi_transect.py             | 2 +-
- mintpy/prep_roipac.py                | 2 +-
- mintpy/prep_snap.py                  | 2 +-
- mintpy/reference_point.py            | 4 ++--
- mintpy/subset.py                     | 2 +-
- mintpy/timeseries2velocity.py        | 4 ++--
- mintpy/utils/arg_group.py            | 4 ++--
- mintpy/utils/plot.py                 | 4 ++--
- mintpy/utils/utils0.py               | 2 +-
- 15 files changed, 23 insertions(+), 23 deletions(-)
-
-diff --git a/mintpy/closure_phase_bias.py b/mintpy/closure_phase_bias.py
-index 2275c8d..83b420d 100755
---- a/mintpy/closure_phase_bias.py
-+++ b/mintpy/closure_phase_bias.py
-@@ -28,8 +28,8 @@ def create_parser():
-     parser = argparse.ArgumentParser(description = 'Create an indication map for closure phase bias.')
-     parser.add_argument('-i','--ifgramstack',type = str, dest = 'ifgram_stack',help = 'interferogram stack file that contains the unwrapped phases')
-     parser.add_argument('--nl', dest = 'nl', type = int, default = 20, help = 'connection level that we are correcting to (or consider as no bias)')
--    parser.add_argument('--numsigma',dest = 'numsigma', type = float, default = 3, help = 'Threashold for phase (number of sigmas,0-infty), default to be 3 sigma of a Gaussian distribution (assumed distribution for the cumulative closure phase) with sigma = pi/sqrt(3*num_cp)')
--    parser.add_argument('--epi',dest = 'episilon', type = float, default = 0.3, help = 'Threashold for amplitude (0-1), default 0.3')
-+    parser.add_argument('--numsigma',dest = 'numsigma', type = float, default = 3, help = 'Threshold for phase (number of sigmas,0-infty), default to be 3 sigma of a Gaussian distribution (assumed distribution for the cumulative closure phase) with sigma = pi/sqrt(3*num_cp)')
-+    parser.add_argument('--epi',dest = 'episilon', type = float, default = 0.3, help = 'Threshold for amplitude (0-1), default 0.3')
-     parser.add_argument('--maxMemory', dest = 'max_memory', type = float, default = 8, help = 'max memory to use in GB')
-     parser.add_argument('-o', dest = 'outdir', type = str, default = '.', help = 'output file directory')
-     return parser
-diff --git a/mintpy/defaults/smallbaselineApp.cfg b/mintpy/defaults/smallbaselineApp.cfg
-index f08c7f8..f693e84 100644
---- a/mintpy/defaults/smallbaselineApp.cfg
-+++ b/mintpy/defaults/smallbaselineApp.cfg
-@@ -166,7 +166,7 @@ mintpy.networkInversion.waterMaskFile   = auto #[filename / no], auto for waterM
- mintpy.networkInversion.minNormVelocity = auto #[yes / no], auto for yes, min-norm deformation velocity / phase
- mintpy.networkInversion.residualNorm    = auto #[L2 ], auto for L2, norm minimization solution
- 
--## mask options for unwrapPhase of each interferogram before inversion (recommed if weightFunct=no):
-+## mask options for unwrapPhase of each interferogram before inversion (recommend if weightFunct=no):
- ## a. coherence              - mask out pixels with spatial coherence < maskThreshold
- ## b. connectComponent       - mask out pixels with False/0 value
- ## c. no                     - no masking [recommended].
-@@ -281,7 +281,7 @@ mintpy.velocity.startDate      = auto   #[20070101 / no], auto for no
- mintpy.velocity.endDate        = auto   #[20101230 / no], auto for no
- 
- ## Bootstrapping
--## refernce: Efron and Tibshirani (1986, Stat. Sci.)
-+## reference: Efron and Tibshirani (1986, Stat. Sci.)
- mintpy.velocity.bootstrap      = auto   #[yes / no], auto for no, use bootstrap
- mintpy.velocity.bootstrapCount = auto   #[int>1], auto for 400, number of iterations for bootstrapping
- 
-diff --git a/mintpy/diff.py b/mintpy/diff.py
-index cfbb023..b196d4d 100755
---- a/mintpy/diff.py
-+++ b/mintpy/diff.py
-@@ -40,8 +40,8 @@ def create_parser():
-                                      formatter_class=argparse.RawTextHelpFormatter,
-                                      epilog=EXAMPLE)
- 
--    parser.add_argument('file1', help='file to be substracted.')
--    parser.add_argument('file2', nargs='+', help='file used to substract')
-+    parser.add_argument('file1', help='file to be subtracted.')
-+    parser.add_argument('file2', nargs='+', help='file used to subtract')
-     parser.add_argument('-o', '--output', dest='outfile',
-                         help='output file name, default is file1_diff_file2.h5')
-     parser.add_argument('--force', action='store_true',
-diff --git a/mintpy/generate_mask.py b/mintpy/generate_mask.py
-index 97363c5..d276903 100755
---- a/mintpy/generate_mask.py
-+++ b/mintpy/generate_mask.py
-@@ -22,10 +22,10 @@ EXAMPLE = """example:
-   generate_mask.py  temporalCoherence.h5 -m 0.7 -o maskTempCoh.h5 --base inputs/geometryRadar.h5 --base-dset shadow --base-value 1
-   generate_mask.py  avgSpatialCoh.h5     -m 0.7 --base waterMask.h5 -o maskSpatialCoh.h5
- 
--  # exlcude area by min/max value and/or subset in row/col direction
-+  # exclude area by min/max value and/or subset in row/col direction
-   generate_mask.py  081018_090118.unw -m 3 -M 8 -y 100 700 -x 200 800 -o mask_1.h5
- 
--  # exlcude pixel cluster based on minimum number of pixels
-+  # exclude pixel cluster based on minimum number of pixels
-   generate_mask.py  maskTempCoh.h5 -p 10 mask_1.h5
- 
-   # exclude pixels with large velocity STD: |velocity| > cutoff (2 by default) * velocityStd
-diff --git a/mintpy/image_stitch.py b/mintpy/image_stitch.py
-index c2d7291..d1b3e8e 100755
---- a/mintpy/image_stitch.py
-+++ b/mintpy/image_stitch.py
-@@ -46,7 +46,7 @@ def create_parser():
- 
-     # plot options
-     parser.add_argument('--nodisplay', dest='disp_fig', action='store_false',
--                        help='do not display the result ploting.')
-+                        help='do not display the result plotting.')
-     return parser
- 
- 
-diff --git a/mintpy/modify_network.py b/mintpy/modify_network.py
-index 4c29f11..4b862ed 100755
---- a/mintpy/modify_network.py
-+++ b/mintpy/modify_network.py
-@@ -59,7 +59,7 @@ def create_parser():
-     parser.add_argument('--reset', action='store_true',
-                         help='restore all interferograms in the file, by marking all dropIfgram=True')
-     parser.add_argument('--noaux', dest='update_aux', action='store_false',
--                        help='Do not update auxilary files, e.g.\n' +
-+                        help='Do not update auxiliary files, e.g.\n' +
-                              'maskConnComp.h5 or avgSpatialCoh.h5 from ifgramStack.h5')
- 
-     # 1. temp/perp baseline, num of conn., dates, pair index, etc.
-diff --git a/mintpy/multi_transect.py b/mintpy/multi_transect.py
-index c298618..5b796f1 100755
---- a/mintpy/multi_transect.py
-+++ b/mintpy/multi_transect.py
-@@ -720,7 +720,7 @@ def main(argv=None):
-                     print(""" 
- %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-       
--      WARNING: nan value for InSAR data at the refernce pixel!
-+      WARNING: nan value for InSAR data at the reference pixel!
-                reference station should be a pixel with valid value in InSAR data.
-                                
-                please select another GPS station as the reference station.
-diff --git a/mintpy/prep_roipac.py b/mintpy/prep_roipac.py
-index 27a8b14..47b89b7 100755
---- a/mintpy/prep_roipac.py
-+++ b/mintpy/prep_roipac.py
-@@ -41,7 +41,7 @@ def create_parser():
- 
-     parser.add_argument('file', nargs='+', help='Gamma file(s)')
-     parser.add_argument('--no-parallel', dest='parallel', action='store_false', default=True,
--                        help='Disable parallel processing. Diabled auto for 1 input file.')
-+                        help='Disable parallel processing. Disabled auto for 1 input file.')
-     return parser
- 
- 
-diff --git a/mintpy/prep_snap.py b/mintpy/prep_snap.py
-index 8c4c145..a0de9cb 100755
---- a/mintpy/prep_snap.py
-+++ b/mintpy/prep_snap.py
-@@ -23,7 +23,7 @@ DESCRIPTION = """
- 
-   The SNAP .dim file should contain all the required sensor / baseline metadata needed.
-   The baseline metadata gets written during snap back-geocoding (co-registration).
--  prep_snap is run seperately for unw/ifg/cor files so neeeds seperate .dim/.data products
-+  prep_snap is run separately for unw/ifg/cor files so needs separate .dim/.data products
-   with only the relevant band in each product. Use Band Subset > save BEAM-DIMAP file.
- 
-   The file name should be yyyymmdd_yyyymmdd_type_tc.dim where type can be filt/unw/coh.
-diff --git a/mintpy/reference_point.py b/mintpy/reference_point.py
-index 62f3593..7465633 100755
---- a/mintpy/reference_point.py
-+++ b/mintpy/reference_point.py
-@@ -66,7 +66,7 @@ def create_parser():
-     parser.add_argument('-m', '--mask', dest='maskFile', help='mask file')
- 
-     parser.add_argument('-o', '--outfile', type=str, default=None,
--                        help='output file name (default: %(default)s). This option is diabled for ifgramStack file.\n'
-+                        help='output file name (default: %(default)s). This option is disabled for ifgramStack file.\n'
-                              'None (default) for update data value directly without writing to a new file.\n')
- 
-     parser.add_argument('--write-data', dest='write_data', action='store_true',
-@@ -321,7 +321,7 @@ def manual_select_reference_yx(data, inps, mask=None):
-     """
-     from matplotlib import pyplot as plt
-     print('\nManual select reference point ...')
--    print('Click on a pixel that you want to choose as the refernce ')
-+    print('Click on a pixel that you want to choose as the reference ')
-     print('    pixel in the time-series analysis;')
-     print('Then close the displayed window to continue.\n')
-     if mask is not None:
-diff --git a/mintpy/subset.py b/mintpy/subset.py
-index 74f397d..e817d67 100755
---- a/mintpy/subset.py
-+++ b/mintpy/subset.py
-@@ -78,7 +78,7 @@ def create_parser():
-                              "np.nan, 0, 1000, ... \n"
-                              "By default, it's None for no-outfill.")
-     parser.add_argument('--no-parallel', dest='parallel', action='store_false', default=True,
--                        help='Disable parallel processing. Diabled auto for 1 input file.\n\n')
-+                        help='Disable parallel processing. Disabled auto for 1 input file.\n\n')
- 
-     parser.add_argument('-o', '--output', dest='outfile',
-                         help='output file name\n' +
-diff --git a/mintpy/timeseries2velocity.py b/mintpy/timeseries2velocity.py
-index b002b4b..5f1266c 100755
---- a/mintpy/timeseries2velocity.py
-+++ b/mintpy/timeseries2velocity.py
-@@ -94,9 +94,9 @@ def create_parser():
-     # reference in time and space
-     # for input file without reference info, e.g. ERA5.h5
-     parser.add_argument('--ref-lalo', dest='ref_lalo', metavar=('LAT', 'LON'), type=float, nargs=2,
--                        help='Change referene point LAT LON for estimation.')
-+                        help='Change reference point LAT LON for estimation.')
-     parser.add_argument('--ref-yx', dest='ref_yx', metavar=('Y', 'X'), type=int, nargs=2,
--                        help='Change referene point Y X for estimation.')
-+                        help='Change reference point Y X for estimation.')
-     parser.add_argument('--ref-date', dest='ref_date', metavar='DATE',
-                         help='Change reference date for estimation.')
- 
-diff --git a/mintpy/utils/arg_group.py b/mintpy/utils/arg_group.py
-index e0e6b15..b47f758 100644
---- a/mintpy/utils/arg_group.py
-+++ b/mintpy/utils/arg_group.py
-@@ -301,9 +301,9 @@ def add_reference_argument(parser):
- 
-     # reference pixel
-     ref.add_argument('--ref-lalo', dest='ref_lalo', metavar=('LAT', 'LON'), type=float, nargs=2,
--                     help='Change referene point LAT LON for display')
-+                     help='Change reference point LAT LON for display')
-     ref.add_argument('--ref-yx', dest='ref_yx', metavar=('Y', 'X'), type=int, nargs=2,
--                     help='Change referene point Y X for display')
-+                     help='Change reference point Y X for display')
- 
-     # reference pixel style
-     ref.add_argument('--noreference', dest='disp_ref_pixel',
-diff --git a/mintpy/utils/plot.py b/mintpy/utils/plot.py
-index d81e770..118de6d 100644
---- a/mintpy/utils/plot.py
-+++ b/mintpy/utils/plot.py
-@@ -58,7 +58,7 @@ mplColors = ['#1f77b4',
- 
- ########################################### Parser utilities ##############################################
- def cmd_line_parse(iargs=''):
--    parser = argparse.ArgumentParser(description='Ploting Parser')
-+    parser = argparse.ArgumentParser(description='Plotting Parser')
-     parser = arg_group.add_data_disp_argument(parser)
-     parser = arg_group.add_dem_argument(parser)
-     parser = arg_group.add_figure_argument(parser)
-@@ -649,7 +649,7 @@ def plot_network(ax, date12List, dateList, pbaseList, p_dict={}, date12List_drop
-     idx_date_keep = [dateList.index(i) for i in dateList_keep]
-     idx_date_drop = [dateList.index(i) for i in dateList_drop]
- 
--    # Ploting
-+    # Plotting
-     if cohList is not None:
-         data_min = min(cohList)
-         data_max = max(cohList)
-diff --git a/mintpy/utils/utils0.py b/mintpy/utils/utils0.py
-index 71efa6b..60afe41 100644
---- a/mintpy/utils/utils0.py
-+++ b/mintpy/utils/utils0.py
-@@ -770,7 +770,7 @@ def check_parallel(file_num=1, print_msg=True, maxParallelNum=8):
-     if file_num <= 1:
-         enable_parallel = False
-         if print_msg:
--            print('parallel processing is diabled for one input file')
-+            print('parallel processing is disabled for one input file')
-         return 1, enable_parallel, None, None
- 
-     # Check required python module
diff -pruN 1.3.3-2/debian/patches/0003-Fix-privacy-breachs.patch 1.4.0-1/debian/patches/0003-Fix-privacy-breachs.patch
--- 1.3.3-2/debian/patches/0003-Fix-privacy-breachs.patch	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/patches/0003-Fix-privacy-breachs.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,248 +0,0 @@
-From: Antonio Valentino <antonio.valentino@tiscali.it>
-Date: Sun, 12 Jun 2022 19:36:34 +0000
-Subject: Fix privacy breachs
-
-Forwarded: not-needed
----
- docs/QGIS.md                       |  8 ++++----
- docs/README.md                     | 10 +---------
- docs/_layouts/default_tactile.html |  6 ------
- docs/api/colormaps.md              | 17 ++++++++++-------
- docs/api/coord.md                  |  2 +-
- docs/dask.md                       |  2 +-
- docs/demo_dataset.md               | 16 ++++++++--------
- docs/google_earth.md               | 12 ++++++------
- docs/hdfeos5.md                    |  2 +-
- 9 files changed, 32 insertions(+), 43 deletions(-)
-
-diff --git a/docs/QGIS.md b/docs/QGIS.md
-index 7e897c7..9b29cfe 100644
---- a/docs/QGIS.md
-+++ b/docs/QGIS.md
-@@ -9,12 +9,12 @@ Displacement time-series can be exported as QGIS PS Time Series Viewer plugin co
- ramp_color('RdBu', scale_linear(VEL, -20, 20, 0, 1))
- ```
- 
--<p align="left">
--  <img width="1000" src="https://yunjunzhang.files.wordpress.com/2019/11/ps-time-series-viewer-1.png">
-+<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2019/11/ps-time-series-viewer-1.png">
-+  https://yunjunzhang.files.wordpress.com/2019/11/ps-time-series-viewer-1.png</a>
- </p>
- 
- 5. Select the [PS Time Series Viewer logo](https://gitlab.com/faunalia/ps-speed/blob/master/icons/logo.png) to activate the tool, and click/play on the map to display the time-series!
- 
--<p align="left">
--  <img width="800" src="https://yunjunzhang.files.wordpress.com/2019/11/ps-time-series-viewer-2.png">
-+<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2019/11/ps-time-series-viewer-2.png">
-+  https://yunjunzhang.files.wordpress.com/2019/11/ps-time-series-viewer-2.png</a>
- </p>
-diff --git a/docs/README.md b/docs/README.md
-index 5ab6e23..958b9c2 100644
---- a/docs/README.md
-+++ b/docs/README.md
-@@ -1,11 +1,3 @@
--[![Language](https://img.shields.io/badge/python-3.6%2B-blue.svg)](https://www.python.org/)
--[![CircleCI](https://img.shields.io/circleci/build/github/insarlab/MintPy.svg?logo=circleci&label=test)](https://circleci.com/gh/insarlab/MintPy)
--[![Docs Status](https://readthedocs.org/projects/mintpy/badge/?color=green&version=latest)](https://mintpy.readthedocs.io/?badge=latest)
--[![Version](https://img.shields.io/github/v/release/insarlab/MintPy?color=green)](https://github.com/insarlab/MintPy/releases)
--[![License](https://img.shields.io/badge/license-GPLv3-yellow.svg)](https://github.com/insarlab/MintPy/blob/main/LICENSE)
--[![Forum](https://img.shields.io/badge/forum-Google%20Groups-orange.svg)](https://groups.google.com/g/mintpy)
--[![Citation](https://img.shields.io/badge/doi-10.1016%2Fj.cageo.2019.104331-blue)](https://doi.org/10.1016/j.cageo.2019.104331)
--
- ## MintPy ##
- 
- The Miami INsar Time-series software in PYthon (MintPy as /mɪnt paɪ/) is an open-source package for Interferometric Synthetic Aperture Radar (InSAR) time series analysis. It reads the stack of interferograms (coregistered and unwrapped) in [ISCE](https://github.com/isce-framework/isce2), [ARIA](https://github.com/aria-tools/ARIA-tools), [FRInGE](https://github.com/isce-framework/fringe), [HyP3](https://hyp3-docs.asf.alaska.edu/), [GMTSAR](https://github.com/gmtsar/gmtsar), [SNAP](http://step.esa.int/), [GAMMA](https://www.gamma-rs.ch/no_cache/software.html) or ROI_PAC format, and produces three dimensional (2D in space and 1D in time) ground surface displacement in line-of-sight direction. It includes a routine time series analysis (`smallbaselineApp.py`) and some independent toolbox.
-@@ -49,7 +41,7 @@ smallbaselineApp.py ${MINTPY_HOME}/mintpy/data/input_files/FernandinaSenDT128.tx
- ```
- 
- <p align="left">
--  <img width="600" src="https://yunjunzhang.files.wordpress.com/2019/06/fernandinasendt128_poi.jpg">
-+  <a href="https://yunjunzhang.files.wordpress.com/2019/06/fernandinasendt128_poi.jpg">https://yunjunzhang.files.wordpress.com/2019/06/fernandinasendt128_poi.jpg</a>
- </p>
- 
- Results are plotted in **./pic** folder. To explore more data information and visualization, try the following scripts:
-diff --git a/docs/_layouts/default_tactile.html b/docs/_layouts/default_tactile.html
-index dcc0755..0540c31 100644
---- a/docs/_layouts/default_tactile.html
-+++ b/docs/_layouts/default_tactile.html
-@@ -6,9 +6,6 @@
-     <link href='https://fonts.googleapis.com/css?family=Chivo:900' rel='stylesheet' type='text/css'>
-     <link rel="stylesheet" href="{{ '/assets/css/style.css?v=' | append: site.github.build_revision | relative_url }}">
-     <link rel="stylesheet" type="text/css" href="{{ '/assets/css/print.css' | relative_url }}" media="print">
--    <!--[if lt IE 9]>
--    <script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>
--    <![endif]-->
-     <title>{{ site.title | default: site.github.repository_name }} by {{ site.github.owner_name }}</title>
-   </head>
- 
-@@ -17,9 +14,6 @@
-       <div class="inner">
- 
-         <header>
--          <p align="center">
--            <img src="https://yunjunzhang.files.wordpress.com/2017/08/logo_githubpage.pdf">
--          </p>
-         </header>
-         <section id="downloads" class="clearfix">
-           {% if site.github.is_project_page %}
-diff --git a/docs/api/colormaps.md b/docs/api/colormaps.md
-index 1c86fa5..5588441 100644
---- a/docs/api/colormaps.md
-+++ b/docs/api/colormaps.md
-@@ -8,8 +8,8 @@ MintPy support the following colormaps:
- 
- We recommend to use cyclic colormap `cmy` for wrapped phase/displacement measurement.
- 
--<p align="left">
--  <img width="280" src="https://yunjunzhang.files.wordpress.com/2020/01/cmap_cmy-1.png">
-+<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2020/01/cmap_cmy-1.png">
-+  https://yunjunzhang.files.wordpress.com/2020/01/cmap_cmy-1.png</a>
- </p>
- 
- To use colormap `cmy` in view.py:
-@@ -29,9 +29,12 @@ cmap = ColormapExt('cmy').colromap
- 
- All GMT cpt files, e.g. the 20 built-in colormaps shown below, can be recognized by setting the variable `GMT_CPT_DIR` in `$MINTPY_HOME/mintpy/objects/colors.py`. The default hardwired value is `/opt/local/share/gmt/cpt` for macOS users with GMT installed using [MacPorts](https://www.macports.org).
- 
--<p align="left">
--  <img width="600" src="https://docs.generic-mapping-tools.org/5.4/_images/GMT_App_M_1a.png">
--  <img width="600" src="https://docs.generic-mapping-tools.org/5.4/_images/GMT_App_M_1b.png">
-+<p align="left"><a href="https://docs.generic-mapping-tools.org/5.4/_images/GMT_App_M_1a.png">
-+  https://docs.generic-mapping-tools.org/5.4/_images/GMT_App_M_1a.png</a>
-+</p>
-+
-+<p align="left"><a href="https://docs.generic-mapping-tools.org/5.4/_images/GMT_App_M_1b.png">
-+  https://docs.generic-mapping-tools.org/5.4/_images/GMT_App_M_1b.png</a>
- </p>
- 
- ### Colormaps from [cpt-city](http://soliton.vm.bytemark.co.uk/pub/cpt-city/views/totp-cpt.html) ###
-@@ -63,8 +66,8 @@ The following colormaps is included by default:
- + vikO (cyclic diverging)
- + More at [Scientific Color-Maps](http://www.fabiocrameri.ch/colourmaps.php) ([Crameri, 2018](https://doi.org/10.5194/gmd-11-2541-2018))
- 
--<p align="left">
--  <img src="https://yunjunzhang.files.wordpress.com/2021/01/scientificcolourmaps_fabiocrameri.png">
-+<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2021/01/scientificcolourmaps_fabiocrameri.png">
-+  https://yunjunzhang.files.wordpress.com/2021/01/scientificcolourmaps_fabiocrameri.png</a>
- </p>
- 
- ### Interactive [web tool](https://jdherman.github.io/colormap/) to generate custom colormaps by Jon Herman ###
-diff --git a/docs/api/coord.md b/docs/api/coord.md
-index e1a7964..7ef06d6 100644
---- a/docs/api/coord.md
-+++ b/docs/api/coord.md
-@@ -11,4 +11,4 @@ Y_UNIT     degrees
- 
- X/Y_FIRST are the longitude/latitude value of the first (upper left corner) pixel’s upper left corner, as shown below:   
- 
--<img src="https://yunjunzhang.files.wordpress.com/2019/06/coord_index.png" width="400">
-\ No newline at end of file
-+<p><a href="https://yunjunzhang.files.wordpress.com/2019/06/coord_index.png">https://yunjunzhang.files.wordpress.com/2019/06/coord_index.png</a></p>
-diff --git a/docs/dask.md b/docs/dask.md
-index e8d5b4a..0edf1ee 100644
---- a/docs/dask.md
-+++ b/docs/dask.md
-@@ -54,7 +54,7 @@ A typical run time without local cluster is 30 secs and with 8 workers 11.4 secs
- 
- To show the run time improvement, we test three datasets (South Isabela, Fernandina, and Kilauea) with different number of cores and same amount of allocated memory (4 GB) on a compute node in the [Stampede2 cluster's skx-normal queue](https://portal.tacc.utexas.edu/user-guides/stampede2#overview-skxcomputenodes). Results are as below:
- 
--![Dask LocalCluster Performance](https://yunjunzhang.files.wordpress.com/2020/08/dask_local_cluster_performance.png)
-+[Dask LocalCluster Performance](https://yunjunzhang.files.wordpress.com/2020/08/dask_local_cluster_performance.png)
- 
- #### 1.5 Known problems ####
- 
-diff --git a/docs/demo_dataset.md b/docs/demo_dataset.md
-index 8af4a4c..474be19 100644
---- a/docs/demo_dataset.md
-+++ b/docs/demo_dataset.md
-@@ -13,8 +13,8 @@ cd FernandinaSenDT128/mintpy
- smallbaselineApp.py ${MINTPY_HOME}/mintpy/data/input_files/FernandinaSenDT128.txt     
- ```
- 
--<p align="left">
--  <img width="650" src="https://yunjunzhang.files.wordpress.com/2019/06/fernandinasendt128_poi.jpg">
-+<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2019/06/fernandinasendt128_poi.jpg">
-+  https://yunjunzhang.files.wordpress.com/2019/06/fernandinasendt128_poi.jpg</a>
- </p>
- 
- Relevant literature:
-@@ -34,8 +34,8 @@ cd SanFranSenDT42/mintpy
- smallbaselineApp.py ${MINTPY_HOME}/mintpy/data/input_files/SanFranSenDT42.txt     
- ```
- 
--<p align="left">
--  <img width="650" src="https://yunjunzhang.files.wordpress.com/2020/11/sanfransendt42_transect.jpg">
-+<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2020/11/sanfransendt42_transect.jpg">
-+  https://yunjunzhang.files.wordpress.com/2020/11/sanfransendt42_transect.jpg</a>
- </p>
- 
- Relevant literature:
-@@ -55,8 +55,8 @@ cd WellsEnvD2T399/mintpy
- smallbaselineApp.py ${MINTPY_HOME}/mintpy/data/input_files/WellsEnvD2T399.txt
- ```
- 
--<p align="left">
--  <img width="650" src="https://yunjunzhang.files.wordpress.com/2019/06/wellsenvd2t399_co_poi.jpg">
-+<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2019/06/wellsenvd2t399_co_poi.jpg">
-+  https://yunjunzhang.files.wordpress.com/2019/06/wellsenvd2t399_co_poi.jpg</a>
- </p>
- 
- Relevant literature:
-@@ -102,8 +102,8 @@ cd KujuAlosAT422F650/mintpy
- smallbaselineApp.py ${MINTPY_HOME}/mintpy/data/input_files/KujuAlosAT422F650.txt     
- ```
- 
--<p align="left">
--  <img width="650" src="https://yunjunzhang.files.wordpress.com/2019/06/kujualosat422f650_vel.jpg">
-+<p align="left"><a href="https://yunjunzhang.files.wordpress.com/2019/06/kujualosat422f650_vel.jpg">
-+  https://yunjunzhang.files.wordpress.com/2019/06/kujualosat422f650_vel.jpg</a>
- </p>
- 
- Relevant literature:
-diff --git a/docs/google_earth.md b/docs/google_earth.md
-index eada9ef..c9dc193 100644
---- a/docs/google_earth.md
-+++ b/docs/google_earth.md
-@@ -4,8 +4,8 @@ MintPy use [pyKML](https://pythonhosted.org/pykml/) to generate KMZ (Keyhole Mar
- 
- `save_kmz_timeseries.py` takes 3D displacement time-series file and outputs a KMZ file with interactive time-seires plot.
- 
--<p align="center">
--  <img src="https://yunjunzhang.files.wordpress.com/2019/02/fernandinasendt128_ge-1.png">
-+<p align="center"><a href="https://yunjunzhang.files.wordpress.com/2019/02/fernandinasendt128_ge-1.png">
-+  https://yunjunzhang.files.wordpress.com/2019/02/fernandinasendt128_ge-1.png</a>
- </p>
- 
- [Download KMZ file](https://miami.box.com/v/FernandinaSenDT128TS)
-@@ -14,8 +14,8 @@ MintPy use [pyKML](https://pythonhosted.org/pykml/) to generate KMZ (Keyhole Mar
- 
- `save_kmz.py` takes any 2D matrix and outputs a KMZ file with a overlay image.
- 
--<p align="center">
--  <img src="https://yunjunzhang.files.wordpress.com/2019/02/vel_fernandinasendt128_ge.png">
-+<p align="center"><a href="https://yunjunzhang.files.wordpress.com/2019/02/vel_fernandinasendt128_ge.png">
-+  https://yunjunzhang.files.wordpress.com/2019/02/vel_fernandinasendt128_ge.png</a>
- </p>
- 
- [Download KMZ file](https://miami.box.com/v/FernandinaSenDT128VEL)
-@@ -34,8 +34,8 @@ The script samples the input 3D dataset at 3 levels of details by default (`--st
- 
- The low- and moderate-resolution LODs cover the entire region, while the high-resolution LOD covers only the actively deforming regions. These regions (red boxes below) are currently identified as boxes having >20% pixels with velocity magnitude > the global velocity median absolute deviation [[source code](https://github.com/insarlab/MintPy/blob/main/mintpy/save_kmz_timeseries.py#L160)].
- 
--<p align="center">
--  <img src="https://yunjunzhang.files.wordpress.com/2020/03/defo_area.png">
-+<p align="center"><a href="https://yunjunzhang.files.wordpress.com/2020/03/defo_area.png">
-+  https://yunjunzhang.files.wordpress.com/2020/03/defo_area.png</a>
- </p>
- 
- 2. Region-based Network Links
-diff --git a/docs/hdfeos5.md b/docs/hdfeos5.md
-index 0db8243..4b53498 100644
---- a/docs/hdfeos5.md
-+++ b/docs/hdfeos5.md
-@@ -90,4 +90,4 @@ HDF-EOS5 file format is used as the input of the University of Miami's web viewe
- 
- <p align="center"><b>http://insarmaps.miami.edu</b><br></p>
- 
--[![InSAR Web Viewer](https://yunjunzhang.files.wordpress.com/2019/06/web_viewer_kujualosat422.png)](http://insarmaps.miami.edu/)
-+[InSAR Web Viewer](https://yunjunzhang.files.wordpress.com/2019/06/web_viewer_kujualosat422.png)
diff -pruN 1.3.3-2/debian/patches/series 1.4.0-1/debian/patches/series
--- 1.3.3-2/debian/patches/series	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/patches/series	2022-08-05 14:44:15.000000000 +0000
@@ -1,3 +1 @@
-0001-fix-intepreter.patch
-0002-Spelling.patch
-0003-Fix-privacy-breachs.patch
+0001-Fix-privacy-breachs.patch
diff -pruN 1.3.3-2/debian/README.Debian 1.4.0-1/debian/README.Debian
--- 1.3.3-2/debian/README.Debian	2022-06-13 06:16:24.000000000 +0000
+++ 1.4.0-1/debian/README.Debian	2022-08-05 14:44:15.000000000 +0000
@@ -1,16 +1,16 @@
 MintPy for Debian
 -----------------
 
-On Debian systems MintPy command line scripts are installed, with their
-original names, in the /usr/share/mintpy folder, hence not included in
-the stahdad search PATH for executable programs.
+On Debian systems MintPy command line scripts are installed in the
+/usr/share/mintpy folder, hence not included in the stahdad search PATH
+for executable programs.
 
-In addition links with a "mintpy-" prefix are generated in the /usr/bin
-folder. E.g. the "/usr/share/mintpy/prep_snap.py" program is linked into
-"/usr/bin/" with the "mintpy-prep_snap" name.
+In addition a link for the "mintpy" porigram is generated in the
+/usr/bin folder, i.e. the "/usr/share/mintpy/mintpy" program is linked
+into "/usr/bin/".
 
-In order to use the programs the user shall use the corresponding name
-with the "mintpy-" prefix or add the "/usr/share/mintpy/" folder to the
-executable programs search PATH.
+With the only exception of the "mintpy" program, to be able to use
+MintPy command line programs, the user shall add the
+"/usr/share/mintpy/" folder to the executable programs search PATH.
 
- -- Antonio Valentino <antonio.valentino@tiscali.it>  Sat, 14 May 2022 16:38:03 +0000
+ -- Antonio Valentino <antonio.valentino@tiscali.it>  Fri, 05 Aug 2022 10:47:33 +0000
diff -pruN 1.3.3-2/debian/rules 1.4.0-1/debian/rules
--- 1.3.3-2/debian/rules	2022-06-28 03:11:52.000000000 +0000
+++ 1.4.0-1/debian/rules	2022-08-05 14:44:15.000000000 +0000
@@ -11,7 +11,7 @@ override_dh_auto_clean:
 
 override_dh_auto_test:
 	PYBUILD_SYSTEM=custom \
-        PYBUILD_TEST_ARGS="env PYTHONPATH={build_dir} {interpreter} $(CURDIR)/tests/test_asc_desc2horz_vert.py" \
+        PYBUILD_TEST_ARGS="env PYTHONPATH={build_dir} {interpreter} $(CURDIR)/tests/asc_desc2horz_vert.py" \
 	dh_auto_test
 
 override_dh_auto_build: export http_proxy=127.0.0.1:9
diff -pruN 1.3.3-2/debian/tests/python3 1.4.0-1/debian/tests/python3
--- 1.3.3-2/debian/tests/python3	2022-06-29 03:57:31.000000000 +0000
+++ 1.4.0-1/debian/tests/python3	2022-08-05 14:44:15.000000000 +0000
@@ -9,5 +9,5 @@ cd "$AUTOPKGTEST_TMP"
 
 for py in $PYS; do
     echo "=== $py ==="
-    $py ${TESTDIR}/test_asc_desc2horz_vert.py 2>&1
+    $py ${TESTDIR}/asc_desc2horz_vert.py 2>&1
 done
diff -pruN 1.3.3-2/docker/Dockerfile 1.4.0-1/docker/Dockerfile
--- 1.3.3-2/docker/Dockerfile	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/docker/Dockerfile	1970-01-01 00:00:00.000000000 +0000
@@ -1,44 +0,0 @@
-# Use mambaforge as the base image
-# Builds in ~ 4.25 min and is ~ 2.6 GB on a windows laptop
-FROM mambaorg/micromamba:0.15.3
-
-# Label image
-LABEL \
-    "Description"="Container for open source time series InSAR processing with Mintpy" \
-    "Github Source"="https://github.com/insarlab/MintPy/" \
-    "Installation"="https://github.com/insarlab/MintPy/blob/main/docs/installation.md" \
-    "Dockerfile Author"="Forrest Williams" \
-    "Email"="forrestfwilliams@icloud.com"
-
-# Install command line tools: git, vim and wget
-USER root
-RUN apt-get update && \
-    apt-get install -y --no-install-recommends git vim wget && \
-    apt-get clean && \
-    rm -rf /var/lib/apt/lists/*
-
-# Setup path / environment for MintPy
-USER micromamba
-WORKDIR /home/micromamba
-
-ARG MINTPY_HOME=/home/micromamba/tools/MintPy
-ARG PYAPS_HOME=/home/micromamba/tools/PyAPS
-
-ENV PATH=${MINTPY_HOME}/mintpy:/opt/conda/bin:${PATH}
-ENV PYTHONPATH=${MINTPY_HOME}:${PYAPS_HOME}
-
-# Download source code
-RUN mkdir -p ${MINTPY_HOME} ${PYAPS_HOME} && \
-    git clone https://github.com/insarlab/MintPy.git ${MINTPY_HOME} && \
-    git clone https://github.com/yunjunz/PyAPS.git ${PYAPS_HOME}
-
-# Install dependencies
-# # # Optionally add Jupyter Lab to environment file
-# # RUN echo "  - jupyterlab\n  - ipympl" >> ${MINTPY_HOME}/docs/environment.yml
-
-# ADD mintpy.yml /tmp
-RUN micromamba install -y -n base -f ${MINTPY_HOME}/docs/environment.yml python=3.6 && \
-    micromamba clean --all --yes
-
-# # Have the container start with a Jupyter Lab instance
-# CMD ["jupyter", "lab", "--port=8888", "--no-browser", "--ip=0.0.0.0","--NotebookApp.token=mintpy"]
diff -pruN 1.3.3-2/Dockerfile 1.4.0-1/Dockerfile
--- 1.3.3-2/Dockerfile	1970-01-01 00:00:00.000000000 +0000
+++ 1.4.0-1/Dockerfile	2022-08-04 20:01:49.000000000 +0000
@@ -0,0 +1,44 @@
+# Builds in ~ 5 min and is ~ 3 GB on a linux laptop
+FROM mambaorg/micromamba:0.24.0
+
+# Label image following opencontainers image-spec annotations recommendation:
+# https://github.com/opencontainers/image-spec/blob/main/annotations.md
+LABEL org.opencontainers.image.description="Container for InSAR time series analysis with MintPy"
+LABEL org.opencontainers.image.authors="Forrest Williams <forrestfwilliams@icloud.com>, Joseph H Kennedy <me@jhkennedy.org>, Andre Theron <andretheronsa@gmail.com>"
+LABEL org.opencontainers.image.url="https://github.com/insarlab/MintPy"
+LABEL org.opencontainers.image.source="https://github.com/insarlab/MintPy"
+LABEL org.opencontainers.image.documentation="https://mintpy.readthedocs.io/en/latest/"
+LABEL org.opencontainers.image.licenses="GPL-3.0-or-later"
+
+# Dynamic lables to define at build time via `docker build --label`
+# LABEL org.opencontainers.image.created=""
+# LABEL org.opencontainers.image.version=""
+# LABEL org.opencontainers.image.revision=""
+
+USER root
+
+ARG DEBIAN_FRONTEND=noninteractive
+ENV PYTHONDONTWRITEBYTECODE=true
+
+RUN apt-get update && \
+    apt-get install -y --no-install-recommends git vim wget && \
+    apt-get clean && \
+    rm -rf /var/lib/apt/lists/*
+
+USER mambauser
+WORKDIR /home/mambauser
+
+ENV PATH=/opt/conda/bin:${PATH}
+
+ARG MINTPY_HOME=/home/mambauser/tools/MintPy
+COPY --chown=mambauser:mambauser . ${MINTPY_HOME}/
+
+ARG PYTHON_VERSION="3.9"
+RUN micromamba install -y -n base -c conda-forge python=${PYTHON_VERSION}  \
+      jupyterlab ipympl gdal">=3" isce2 -f ${MINTPY_HOME}/requirements.txt && \
+    python -m pip install --no-cache-dir ${MINTPY_HOME} && \
+    micromamba clean --all --yes
+
+# Jupyter setup
+COPY --chown=mambauser:mambauser scripts/jupyter_notebook_config.py /home/mambauser/.jupyter/
+EXPOSE 8888
diff -pruN 1.3.3-2/docs/api/data_structure.md 1.4.0-1/docs/api/data_structure.md
--- 1.3.3-2/docs/api/data_structure.md	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/docs/api/data_structure.md	2022-08-04 20:01:49.000000000 +0000
@@ -75,7 +75,6 @@ Attributes         Dictionary for metada
 /unwrapPhase       3D array of float32 in size of (m, l, w) in radian.
 /coherence         3D array of float32 in size of (m, l, w).
 /connectComponent  3D array of int16   in size of (m, l, w).
-/ionoPhase         3D array of float32 in size of (m, l, w) in radian.
 /wrapPhase         3D array of float32 in size of (m, l, w) in radian.
 /azimuthOffset     3D array of float32 in size of (m, l, w) in pixel number.
 /azimuthOffsetStd  3D array of float32 in size of (m, l, w) in pixel number.
diff -pruN 1.3.3-2/docs/api/module_hierarchy.md 1.4.0-1/docs/api/module_hierarchy.md
--- 1.3.3-2/docs/api/module_hierarchy.md	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/docs/api/module_hierarchy.md	2022-08-04 20:01:49.000000000 +0000
@@ -9,6 +9,7 @@ Hierarchy of sub-modules within MintPy.
     /objects
         cluster
         colors
+        constants
         giant
         ramp
         sensor
@@ -17,15 +18,16 @@ Hierarchy of sub-modules within MintPy.
             l1
             l1regls
             lstl1
-        arg_group
-        constants
-        ptime
+        arg_utils
+        map
+        ptime (including progress)
         utils0
     /simulation
         fractal
 ------------------ level 1 --------------------
     /objects
         conncomp      (objects/ramp)
+        ionex         (objects/ptime, utils/map)
         stack         (utils/ptime)
     /utils
         time_func     (utils/ptime)
@@ -35,32 +37,32 @@ Hierarchy of sub-modules within MintPy.
         variance      (utils/ptime)
 ------------------ level 2 --------------------
     /utils
-        readfile      (utils/{utils0}, objects/{stack, giant, sensor})
-        s1_utils      (utils/{ptime, time_func})
+        readfile      (objects/{stack, giant, sensor}, utils/{ptime, utils0})
+        s1_utils      (objects/{stack}, utils/{ptime, time_func})
 ------------------ level 3 --------------------
     /objects
-        resample      (utils/{utils0, ptime,  readfile})
+        resample      (utils/{utils0, ptime, readfile})
         coord         (utils/{utils0, utils1, readfile})
     /simulation
-        iono          (utils/{utils0, ptime,  readfile})
+        iono          (utils/{utils0, readfile})
     /utils
         writefile     (utils/{readfile})
         network       (objects/{stack, sensor}, utils/{readfile})
 ------------------ level 4 --------------------
     /objects
-        gps           (objects/{stack, coord},  utils/{ptime, utils1, readfile})
-        stackDict     (objects/{stack},         utils/{ptime, utils0, readfile})
+        gps           (objects/{stack, coord}, utils/{ptime, utils1, readfile})
+        stackDict     (objects/{stack}, utils/{ptime, utils0, readfile})
     /simulation
-        simulation    (objects/{stack},         utils/{ptime, network}, simulation/{fractal, decorrelation, defo_model})
+        simulation    (objects/{stack}, utils/{ptime, network}, simulation/{fractal, decorrelation, defo_model})
     /utils
-        attribute     (objects/{coord},         utils/{readfile})
-        utils1        (objects/{stack, ramp},   utils/{ptime, utils0, readfile, writefile})
+        attribute     (objects/{coord}, utils/{readfile})
+        utils1        (objects/{stack, ramp}, utils/{ptime, utils0, readfile, writefile})
 ------------------ level 5 --------------------
     /utils
-        plot          (objects/{stack, coord, colors},  utils/{ptime, utils0, readfile, network})
-        utils         (objects/{stack, coord},          utils/{ptime, utils1, readfile})
-        isce_utils    (utils/{ptime, readfile, writefile, utils1})
+        plot          (objects/{stack, coord, colors}, utils/{ptime, utils0, readfile, network, map})
+        utils         (objects/{stack, coord, resample}, utils/{ptime, attribute, utils1, readfile})
+        isce_utils    (objects/{constants}, utils/{ptime, readfile, writefile, attribute, utils1})
 ------------------ level 6 --------------------
     /objects
-        insar_vs_gps  (objects/{stack, giant},          utils/{readfile, gps, plot, utils})
+        insar_vs_gps  (objects/{stack, giant}, utils/{readfile, gps, plot, utils})
 ```
diff -pruN 1.3.3-2/docs/CONTRIBUTING.md 1.4.0-1/docs/CONTRIBUTING.md
--- 1.3.3-2/docs/CONTRIBUTING.md	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/docs/CONTRIBUTING.md	2022-08-04 20:01:49.000000000 +0000
@@ -92,10 +92,10 @@ Some things that will increase the chanc
 
 ## Testing ##
 
-It's a good idea to test any changes or bugs you have fixed, in the feature branch locally, before issuing/submitting the pull request. We realize that we don't have a complete testing system in place yet (maybe you can contribute this!), except for an overall testing script `test_smallbaselineApp.py`:
+It's a good idea to test any changes or bugs you have fixed, in the feature branch locally, before issuing/submitting the pull request. We realize that we don't have a complete testing system in place yet (maybe you can contribute this!), except for an overall testing script for `smallbaselineApp.py`:
 
 ```
-${MINTPY_HOME}/tests/test_smallbaselineApp.py
+${MINTPY_HOME}/tests/smallbaselineApp.py
 ```
 
 It takes about 15 mins to finish.
diff -pruN 1.3.3-2/docs/docker.md 1.4.0-1/docs/docker.md
--- 1.3.3-2/docs/docker.md	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/docs/docker.md	2022-08-04 20:01:49.000000000 +0000
@@ -1,35 +1,67 @@
-## Running as Docker container
+## The MintPy Docker container ##
 
-Thanks to Andre Theron for putting together an [MintPy container on DockerHub](https://hub.docker.com/r/forrestwilliams/mintpy). [Docker](https://docs.docker.com/get-started/) allows you to run MintPy in a dedicated container (essentially an efficient virtual machine). [Here](https://docs.docker.com/install/) is the instruction to install docker.
+[Docker](https://docs.docker.com/get-started/) allows you to run MintPy in a dedicated container, which is essentially an efficient virtual machine. Check [here](https://docs.docker.com/install/) for the installation instruction.
 
-To pull the MintPy container from Dockerhub to your local machine: 
+### 1. Pulling the mintpy Docker image ###
 
-```
-docker pull forrestwilliams/mintpy:latest
+We publish the mintpy Docker images in the [GitHub Container Registry](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry) at [ghcr.io/insarlab/mintpy](https://github.com/insarlab/MintPy/pkgs/container/mintpy). 
+
+The latest stable released version can be pulled to your local machine via the `latest` tag as:
+
+```shell
+docker pull ghcr.io/insarlab/mintpy:latest
 ```
 
-To start an interactive shell session in the container from the terminal, with bash for example: 
+The latest development version (the current HEAD of the main branch) can be pulled via the `develop` tag as:
 
+```shell
+docker pull ghcr.io/insarlab/mintpy:develop
 ```
-docker run -it forrestwilliams/mintpy:latest bash
+
+Note that both `latest` and `develop` are *rolling* tags, meaning they change as MintPy evolves. Thus, in a production system, one may want to use a specific version for reproducebility. This is available (since version 1.3.3) via the version tag as:
+
+```shell
+docker pull ghcr.io/insarlab/mintpy:v1.3.3
 ```
 
-To map data on the host (local) machine to the container use [volumes](https://docs.docker.com/storage/volumes/):
+### 2. Running the mintpy Docker container ###
+
+Run the following to start an interactive shell session in the container with a host path to the data directory using [volumes](https://docs.docker.com/storage/volumes/):
 
+```shell
+docker run -it -v </path/to/data/dir>:/home/mambauser/data ghcr.io/insarlab/mintpy:latest
+# use "docker run --name" option to name the container, e.g. "--name mintpy"
+# then enter the running container as "docker exec -it mintpy"
+
+# now inside the container
+cd data/FernandinaSenDT128/mintpy
+smallbaselineApp.py FernandinaSenDT128.txt
 ```
-docker run -it -v /path/to/data/dir:/home/work/ forrestwilliams/mintpy:latest bash
+
+Or run mintpy executables directly as:
+
+```shell
+docker run -it -v </path/to/data/dir>:/home/mambauser/data ghcr.io/insarlab/mintpy:latest smallbaselineApp.py --help
+docker run -it -v </path/to/data/dir>:/home/mambauser/data ghcr.io/insarlab/mintpy:latest smallbaselineApp.py /home/mambauser/data/FernandinaSenDT128/mintpy/FernandinaSenDT128.txt
 ```
 
-Background processing is possible using something like:  
+Or run the following to launch the Jupyter Lab server, then copy and paste the printed `http://localhost:8888/lab?token=` url in a brower.
 
+```shell
+# to launch a Jupyter Notebook frontend, replace "lab" with "notebook" in the command below
+docker run -p 8888:8888 -it ghcr.io/insarlab/mintpy:latest jupyter lab
 ```
-docker run -it -v /path/to/data/dir:/home/work/ forrestwilliams/mintpy:latest python /home/python/MintPy/mintpy/smallbaselineApp.py /home/work/smallbaselineApp.cfg
+
+Or launch the Jupyter server with custom startup options as:
+
+```shell
+docker run -p 8888:8888 -it ghcr.io/insarlab/mintpy:latest jupyter {lab,notebook} [JUPYTER_OPTIONS]
+# to see all the custom startup option:
+docker run -p 8888:8888 -it ghcr.io/insarlab/mintpy:latest jupyter {lab,notebook} --help-all
 ```
 
 ### Notes ###
 
-+ The container may have strong permissions for directories you map to it.   
-
-+ Container was built on `insarlab/main` - should be updated with new releases.  
++ The container image is built using the [mambaorg/micromamba](https://hub.docker.com/r/mambaorg/micromamba) as a base. To manage conda environments inside the container use the `micromamba` command. For more information on micromamba, see: https://github.com/mamba-org/mamba#micromamba
 
-+ Needs further testing and improvement - can be made smaller (use Alpine instead of Debian...)  
++ Docker tightly maps user/group ids (uid/gid) inside and outside the container. By default, a `mambauser` with `uid=1000` and `gid=1000` will run inside the container and write files as that user. If you mount in a volume, files written to that volume will be owned by the *user on your local machine* with `uid=1000` and `gid=1000`. On linux and mac these are the default uid/gid values, but on a shared or managed system, these may not be *your* uid/gid values. You can override the users running inside the container with the `--user` argument to `docker run`, see: https://docs.docker.com/engine/reference/run/#user
diff -pruN 1.3.3-2/docs/environment.yml 1.4.0-1/docs/environment.yml
--- 1.3.3-2/docs/environment.yml	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/docs/environment.yml	2022-08-04 20:01:49.000000000 +0000
@@ -20,7 +20,7 @@ dependencies:
   - lxml
   - matplotlib
   - numpy
-  - pyaps3
+  - pyaps3>=0.3
   - pykml>=0.2
   - pyproj
   - pysolid
diff -pruN 1.3.3-2/docs/installation.md 1.4.0-1/docs/installation.md
--- 1.3.3-2/docs/installation.md	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/docs/installation.md	2022-08-04 20:01:49.000000000 +0000
@@ -10,19 +10,32 @@ conda install -c conda-forge mintpy
 
 #### b. via docker ####
 
-Docker allows one to run MintPy in a dedicated container (essentially an efficient virtual machine) and to be independent of platform OS. After installing [docker](https://docs.docker.com/install/), run the following to pull the [MintPy container from DockerHub](https://hub.docker.com/r/forrestwilliams/mintpy) to your local machine, check more details at [here](docker.md).
+Docker allows one to run MintPy in a dedicated container, which is essentially an efficient virtual machine, and to be independent of platform OS. First, install [docker](https://docs.docker.com/install/) if you have not already done so. Then run the following to pull the latest stable released constainer image version from [MintPy GitHub Container Registry](https://github.com/insarlab/MintPy/pkgs/container/mintpy) to your local machine:
 
 ```bash
-docker pull forrestwilliams/mintpy:1.3.1
+docker pull ghcr.io/insarlab/mintpy:latest
 ```
 
-Then complete the [post-installation setup](#3-post-installation-setup).
+Check [here](docker.md) for more details on Docker container image usage, e.g. pulling development version and running in shell or Jupyter server.
+
+#### c. via apt (Linux Debian) ####
+
+MintPy is available in the main archive of the [Debian](https://tracker.debian.org/pkg/mintpy) GNU/Linux OS. It can be installed by using your favourite package manager or running the following command:
+
+```bash
+apt install mintpy
+```
+
+The same procedure, in priciple, can be used in [Ubuntu](https://ubuntu.com) and all the other [Debian derivatives](https://wiki.debian.org/Derivatives/Census). Check [here](https://salsa.debian.org/debian-gis-team/mintpy/-/blob/master/debian/README.Debian) for more detailed usage.
+
+
+#### d. [Post-Installation Setup](#3-post-installation-setup) ####
 
 ## 2. Install the development version ##
 
-Note: The installation note below is tested on Linux and macOS, and is still experimental on Windows (may has bugs).
+Note: The installation note below is tested on Linux and macOS, and is still experimental on Windows (may have bugs).
 
-MintPy is written in Python 3 and relies on several Python modules, check the [requirements.txt](https://github.com/insarlab/MintPy/blob/main/docs/requirements.txt) file for details. We recommend using [conda](https://docs.conda.io/en/latest/miniconda.html) or [macports](https://www.macports.org/install.php) to install the python environment and the prerequisite packages, because of the convenient management and default [performance setting with numpy/scipy](http://markus-beuckelmann.de/blog/boosting-numpy-blas.html) and [pyresample](https://pyresample.readthedocs.io/en/latest/installation.html#using-pykdtree).
+MintPy is written in Python 3 and relies on several Python modules, check the [requirements.txt](https://github.com/insarlab/MintPy/blob/main/requirements.txt) file for details. We recommend using [conda](https://docs.conda.io/en/latest/miniconda.html) or [macports](https://www.macports.org/install.php) to install the python environment and the prerequisite packages, because of the convenient management and default [performance setting with numpy/scipy](http://markus-beuckelmann.de/blog/boosting-numpy-blas.html) and [pyresample](https://pyresample.readthedocs.io/en/latest/installation.html#using-pykdtree).
 
 Quick links:
 
@@ -43,10 +56,9 @@ git clone https://github.com/insarlab/Mi
 
 #### b. Install dependencies via conda ####
 
-Install [miniconda](https://docs.conda.io/en/latest/miniconda.html) if you have not already done so. You may need to close and restart the shell for changes to take effect. 
+Install [miniconda](https://docs.conda.io/en/latest/miniconda.html) if you have not already done so. You may need to close and restart the shell for changes to take effect.
 
 ```bash
-# download and install miniconda
 # use wget or curl to download in command line or click from the web browser
 # for macOS, use Miniconda3-latest-MacOSX-x86_64.sh instead.
 wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh
@@ -54,15 +66,15 @@ bash Miniconda3-latest-Linux-x86_64.sh -
 ~/tools/miniconda3/bin/conda init bash
 ```
 
-Install the dependencies into an custom existing environment [recommended] by running:
+Install the dependencies into a custom existing environment [recommended] by running:
 
 ```bash
 # To create a new custom environment, e.g. named "insar", run "conda create --name insar; conda activate insar"
-# To speedup conda install, try "conda install mamba", then use "mamba install" to replace "conda install"
+# To speedup, try "conda install mamba", then use "mamba install" to replace "conda install" below
 
 # Add "gdal'>=3'" below to install extra dependencies if you use ARIA, FRInGE, HyP3 or GMTSAR
 # Add "isce2"     below to install extra dependencies if you use ISCE-2
-conda install -c conda-forge --file ~/tools/MintPy/docs/requirements.txt
+conda install -c conda-forge --file ~/tools/MintPy/requirements.txt
 ```
 
 Or install the dependencies to a new environment named "mintpy" by running:
@@ -86,7 +98,7 @@ Or install MintPy with pip in developmen
 python -m pip install -e MintPy
 ```
 
-Or simply setup the environment variables as below in your source file (e.g. `~/.bash_profile` for _bash_ users or `~/.cshrc` for _csh/tcsh_ users), because MintPy is written in pure Python:
+Or simply set up the environment variables as below in your source file, e.g. `~/.bash_profile` for _bash_ users or `~/.cshrc` for _csh/tcsh_ users:
 
 ```bash
 if [ -z ${PYTHONPATH+x} ]; then export PYTHONPATH=""; fi
@@ -118,7 +130,7 @@ Same as the [instruction for Linux](#21-
 
 #### b. Install MintPy via MacPorts ####
 
-Same as the [instruction for Linux](#21-install-on-linux), except for the dependencies installation, which is as below.
+Same as the [instruction for Linux](#21-install-on-linux), except for the dependencies' installation, which is as below.
 
 Install [macports](https://www.macports.org/install.php) if you have not done so. Add the following at the bottom of your `~/.bash_profile` file:
 
@@ -159,9 +171,9 @@ Same as the [instruction for Linux](#21-
 
 #### a. ERA5 for tropospheric correction ####
 
-Setup an account for ERA5 to download weather re-analysis datasets for tropospheric delay correction as described in [insarlab/PyAPS](https://github.com/insarlab/pyaps#2-account-setup-for-era5).
+Set up an account for ERA5 to download weather re-analysis datasets for tropospheric delay correction as described in [insarlab/PyAPS](https://github.com/insarlab/pyaps#2-account-setup-for-era5).
 
-`WEATHER_DIR`: Optionally, if you defined an environment variable named `WEATHER_DIR` to contain the path to a directory, MintPy applications will download the GAM files into the indicated directory. Also, MintPy application will look for the GAM files in the directory before downloading a new one to prevent downloading multiple copies if you work with different dataset that cover the same date/time.
+`WEATHER_DIR`: Optionally, if you defined an environment variable named `WEATHER_DIR` to contain the path to a directory, MintPy will download the GAM files into the indicated directory. Also, MintPy will look for the GAM files in the directory before downloading a new one to prevent downloading multiple copies if you work with different dataset that cover the same date/time.
 
 #### b. Dask for parallel processing ####
 
@@ -170,6 +182,6 @@ We recommend setting the `temporary-dire
 ```yaml
 temporary-directory: /tmp  # Directory for local disk like /tmp, /scratch, or /local
 
-## If you are sharing the same machine with others, use the following instead to avoid permission issues with others.
-# temporary-directory: /tmp/{replace_this_with_your_user_name}  # Directory for local disk like /tmp, /scratch, or /local
+# If you are sharing the same machine with others, use the following instead to avoid permission issues with others.
+# temporary-directory: /tmp/{replace_this_with_your_user_name}
 ```
diff -pruN 1.3.3-2/docs/README.md 1.4.0-1/docs/README.md
--- 1.3.3-2/docs/README.md	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/docs/README.md	2022-08-04 20:01:49.000000000 +0000
@@ -1,8 +1,8 @@
 [![Language](https://img.shields.io/badge/python-3.6%2B-blue.svg)](https://www.python.org/)
-[![CircleCI](https://img.shields.io/circleci/build/github/insarlab/MintPy.svg?logo=circleci&label=test)](https://circleci.com/gh/insarlab/MintPy)
+[![CircleCI](https://img.shields.io/circleci/build/github/insarlab/MintPy.svg?logo=circleci&label=CI)](https://circleci.com/gh/insarlab/MintPy)
 [![Docs Status](https://readthedocs.org/projects/mintpy/badge/?color=green&version=latest)](https://mintpy.readthedocs.io/?badge=latest)
-[![Version](https://img.shields.io/github/v/release/insarlab/MintPy?color=green)](https://github.com/insarlab/MintPy/releases)
-[![License](https://img.shields.io/badge/license-GPLv3-yellow.svg)](https://github.com/insarlab/MintPy/blob/main/LICENSE)
+[![Version](https://img.shields.io/github/v/release/insarlab/MintPy?color=green&label=version)](https://github.com/insarlab/MintPy/releases)
+[![License](https://img.shields.io/badge/license-GPLv3+-yellow.svg)](https://github.com/insarlab/MintPy/blob/main/LICENSE)
 [![Forum](https://img.shields.io/badge/forum-Google%20Groups-orange.svg)](https://groups.google.com/g/mintpy)
 [![Citation](https://img.shields.io/badge/doi-10.1016%2Fj.cageo.2019.104331-blue)](https://doi.org/10.1016/j.cageo.2019.104331)
 
@@ -61,13 +61,13 @@ tsview.py                  #1D point tim
 plot_coherence_matrix.py   #plot coherence matrix for one pixel (interactive)
 plot_network.py            #plot network configuration of the dataset    
 plot_transection.py        #plot 1D profile along a line of a 2D matrix (interactive)
-save_kmz.py                #generate Google Earth KMZ file in raster image
-save_kmz_timeseries.py     #generate Goodle Earth KMZ file in points for time-series (interactive)
+save_kmz.py                #generate Google Earth KMZ file in points or raster image
+save_kmz_timeseries.py     #generate Google Earth KMZ file in points for time-series (interactive)
 ```
 
 #### 2.2 Customized processing recipe ####
 
-MintPy is a toolbox with individual utility scripts. Simply run the script with `-h / --help` to see its usage, you could build your own customized processing recipe! [Here](../mintpy/sh/compare_velocity_with_diff_tropo.sh) is an example to compare the velocities estimated from displacement time-series with different tropospheric delay corrections.
+MintPy is a toolbox with individual utility scripts. Simply run the script with `-h / --help` to see its usage, you could build your own customized processing recipe! [Here](../scripts/compare_velocity_with_diff_tropo.sh) is an example to compare the velocities estimated from displacement time-series with different tropospheric delay corrections.
 
 #### 2.3 Build on top of `mintpy` module ####
 
diff -pruN 1.3.3-2/docs/requirements.txt 1.4.0-1/docs/requirements.txt
--- 1.3.3-2/docs/requirements.txt	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/docs/requirements.txt	1970-01-01 00:00:00.000000000 +0000
@@ -1,28 +0,0 @@
-# requirements4rtd.txt for readthedocs, which uses pip with limited memory usage
-# requirements.txt     for dependency installation via conda
-# environment.yml      for dependency installation via conda and create a new environment
-# ports.txt            for dependency installation via mac-ports
-# setup.py             for mintpy     installation via pip after the dependency installation above
-python>=3.6
-pip
-cartopy
-cvxopt
-dask>=1.0
-dask-jobqueue>=0.3
-defusedxml
-h5py
-joblib
-lxml
-matplotlib
-numpy
-pyaps3
-pykml>=0.2
-pyproj
-pysolid
-scikit-image
-scipy
-# for ARIA, FRInGE, HyP3, GMTSAR
-# gdal>=3
-# for pyresample
-pyresample
-openmp
diff -pruN 1.3.3-2/.github/workflows/build-docker.yml 1.4.0-1/.github/workflows/build-docker.yml
--- 1.3.3-2/.github/workflows/build-docker.yml	1970-01-01 00:00:00.000000000 +0000
+++ 1.4.0-1/.github/workflows/build-docker.yml	2022-08-04 20:01:49.000000000 +0000
@@ -0,0 +1,64 @@
+name: build docker image
+
+on:
+  push:
+    branches:
+      - main
+    tags:
+      - v*
+  pull_request:
+    branches:
+      - main
+
+jobs:
+  dockerize:
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v2
+        with:
+          fetch-depth: 0
+
+      - name: Set up Docker Buildx
+        uses: docker/setup-buildx-action@v1
+
+      - name: Login to GitHub Container Registry
+        uses: docker/login-action@v1
+        with:
+          registry: ghcr.io
+          username: ${{ github.actor }}
+          password: ${{ secrets.GITHUB_TOKEN }}
+
+      - name: Set environment variables for docker build
+        run: |
+          # Lowercase repo for Github Container Registry
+          echo "REPO=${GITHUB_REPOSITORY,,}" >>${GITHUB_ENV}
+          # Ensure tags are checked out
+          git fetch origin +refs/tags/*:refs/tags/*
+          # Version number from tag
+          echo "MINTPY_VERSION=$(git describe --tags)" >> $GITHUB_ENV
+
+      - name: Build, tag, and push image to Github Container Registry
+        uses: docker/build-push-action@v2
+        with:
+          context: .
+          push: ${{ github.event_name != 'pull_request' }}
+          tags: |
+            ghcr.io/${{ env.REPO }}:${{ env.MINTPY_VERSION }}
+          labels: |
+            org.opencontainers.image.created=${{ env.CI_JOB_TIMESTAMP }}
+            org.opencontainers.image.version=${{ env.MINTPY_VERSION }}
+            org.opencontainers.image.revision=${{ github.sha }}
+
+      - name: Add develop tag
+        if: github.ref == 'refs/heads/main'
+        uses: akhilerm/tag-push-action@v1.0.0
+        with:
+          src: ghcr.io/${{ env.REPO }}:${{ env.MINTPY_VERSION }}
+          dst: ghcr.io/${{ env.REPO }}:develop
+
+      - name: Add latest tag
+        if: startsWith(github.ref, 'refs/tags/v')
+        uses: akhilerm/tag-push-action@v1.0.0
+        with:
+          src: ghcr.io/${{ env.REPO }}:${{ env.MINTPY_VERSION }}
+          dst: ghcr.io/${{ env.REPO }}:latest
diff -pruN 1.3.3-2/.gitignore 1.4.0-1/.gitignore
--- 1.3.3-2/.gitignore	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/.gitignore	2022-08-04 20:01:49.000000000 +0000
@@ -1,15 +1,134 @@
 *.DS_Store
 *.pyc
 *.idea/*
-*ipynb_checkpoints*
-*.egg*
+isce.log
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
 build/
+develop-eggs/
 dist/
-docs/api_docs/
-docs/resources/*logo*
-docs/resources/*.ai
-docs/resources/*.pdf
-docs/deps.svg
-isce.log
-mintpy/modis*
-site/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+#   However, in case of collaboration, if having platform-specific dependencies or dependencies
+#   having no cross-platform support, pipenv may install dependencies that don't work, or not
+#   install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
\ No newline at end of file
diff -pruN 1.3.3-2/mintpy/add_attribute.py 1.4.0-1/mintpy/add_attribute.py
--- 1.3.3-2/mintpy/add_attribute.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/add_attribute.py	2022-08-04 20:01:49.000000000 +0000
@@ -91,7 +91,7 @@ def main(argv=None):
     # add attributes to file
     update_file_attribute(fname=infile, atr_new=atr_new)
 
-    return infile
+    return
 
 
 ################################################################################
diff -pruN 1.3.3-2/mintpy/add.py 1.4.0-1/mintpy/add.py
--- 1.3.3-2/mintpy/add.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/add.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,10 +8,10 @@
 
 import os
 import sys
-import argparse
 import numpy as np
 from mintpy.objects import timeseries
 from mintpy.utils import readfile, writefile
+from mintpy.utils.arg_utils import create_argument_parser
 from mintpy.diff import check_reference
 
 
@@ -24,11 +24,13 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
+def create_parser(subparsers=None):
     """ Command line parser """
-    parser = argparse.ArgumentParser(description='Generate sum of multiple input files.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+    synopsis = 'Generate the sum of multiple input files.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', nargs='+', help='files (2 or more) to be added')
     parser.add_argument('-o', '--output', dest='outfile', help='output file name')
@@ -170,10 +172,10 @@ def main(iargs=None):
     inps = cmd_line_parse(iargs)
     print('input files to be added: ({})\n{}'.format(len(inps.file), inps.file))
 
-    inps.outfile = add_file(inps.file, inps.outfile, force=inps.force)
+    add_file(inps.file, inps.outfile, force=inps.force)
 
     print('Done.')
-    return inps.outfile
+    return
 
 
 ################################################################################
diff -pruN 1.3.3-2/mintpy/asc_desc2horz_vert.py 1.4.0-1/mintpy/asc_desc2horz_vert.py
--- 1.3.3-2/mintpy/asc_desc2horz_vert.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/asc_desc2horz_vert.py	2022-08-04 20:01:49.000000000 +0000
@@ -7,10 +7,10 @@
 
 
 import sys
-import argparse
 import numpy as np
 from mintpy.objects import sensor
 from mintpy.utils import ptime, readfile, writefile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ################################################################################
@@ -49,10 +49,13 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Project Asc and Desc LOS displacement to Horizontal and Vertical direction',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=REFERENCE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Project Asc and Desc LOS displacement to Horizontal and Vertical direction'
+    epilog = REFERENCE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+ 
     # input files
     parser.add_argument('file', nargs=2,
                         help='Ascending and descending files\n'
@@ -111,10 +114,10 @@ def cmd_line_parse(iargs=None):
     ref_y_diff = abs((ref_lat1 - ref_lat2) / float(atr1['Y_STEP']))
     ref_x_diff = abs((ref_lon1 - ref_lon2) / float(atr1['X_STEP']))
     if any(ref_diff > inps.max_ref_yx_diff for ref_diff in [ref_y_diff, ref_x_diff]):
-        msg = 'REF_Y/X difference between input files > {}!\n'.format(inps.max_ref_yx_diff)
-        for fname, ref_lalo, ref_yx in zip(inps.file, [ref_lalo1, ref_lalo2], [ref_yx1, ref_yx2]):
+        msg = 'REF_LAT/LON difference between input files > {} pixels!\n'.format(inps.max_ref_yx_diff)
+        for fname, ref_lat, ref_lon in zip(inps.file, [ref_lat1, ref_lat2], [ref_lon1, ref_lon2]):
             msg += 'file1: {}\n'.format(fname)
-            msg += '\tREF_LAT/LON: {}\n'.format(ref_lalo)
+            msg += '\tREF_LAT/LON: [{:.8f}, {:.8f}]\n'.format(ref_lat, ref_lon)
         raise ValueError(msg)
 
     return inps
@@ -153,7 +156,7 @@ def get_design_matrix4east_north_up(los_
     for i, (inc_angle, az_angle, obs_dir) in enumerate(zip(los_inc_angle, los_az_angle, obs_direction)):
         # calculate the unit vector
         if obs_dir == 'range':
-            # for range offset / InSAR phase [positive value for motion toward from satellite]
+            # for range offset / InSAR phase [with positive value for motion toward the satellite]
             ve = np.sin(np.deg2rad(inc_angle)) * np.sin(np.deg2rad(az_angle)) * -1
             vn = np.sin(np.deg2rad(inc_angle)) * np.cos(np.deg2rad(az_angle))
             vu = np.cos(np.deg2rad(inc_angle))
@@ -341,18 +344,18 @@ def run_asc_desc2horz_vert(inps):
     if inps.one_outfile:
         print('write asc/desc/horz/vert datasets into {}'.format(inps.one_outfile))
         dsDict = {}
-        for i, atr in enumerate(atr_list):
+        for i, atr_i in enumerate(atr_list):
             # dataset name for LOS data
-            track_num = atr.get('trackNumber', None)
-            proj_name = atr.get('PROJECT_NAME', None)
+            track_num = atr_i.get('trackNumber', None)
+            proj_name = atr_i.get('PROJECT_NAME', None)
             if proj_name in ['none', 'None', None]:
-                proj_name = atr.get('FILE_PATH', None)
+                proj_name = atr_i.get('FILE_PATH', None)
             proj_name = sensor.project_name2sensor_name(proj_name)[0]
 
             ds_name = proj_name if proj_name else ''
-            ds_name += 'A' if atr['ORBIT_DIRECTION'].lower().startswith('asc') else 'D'
+            ds_name += 'A' if atr_i['ORBIT_DIRECTION'].lower().startswith('asc') else 'D'
             ds_name += f'T{track_num}' if track_num else ''
-            ds_name += '_{}'.format(atr['DATE12'])
+            ds_name += '_{}'.format(atr_i['DATE12'])
 
             # assign dataset value
             dsDict[ds_name] = dlos[i]
diff -pruN 1.3.3-2/mintpy/bulk_plate_motion.py 1.4.0-1/mintpy/bulk_plate_motion.py
--- 1.3.3-2/mintpy/bulk_plate_motion.py	1970-01-01 00:00:00.000000000 +0000
+++ 1.4.0-1/mintpy/bulk_plate_motion.py	2022-08-04 20:01:49.000000000 +0000
@@ -0,0 +1,369 @@
+#!/usr/bin/env python3
+############################################################
+# Program is part of MintPy                                #
+# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
+# Author: Yuan-Kai Liu, Zhang Yunjun, May 2022             #
+############################################################
+#
+# Extra dependency:
+#   + platemotion: https://github.com/lcx366/PlateTectonic
+#   + astropy
+#   How to install both:
+#      option (1) pip install platemotion
+#      option (2) git clone git@github.com:lcx366/PlateTectonic.git $TOOL_DIR/PlateTectonic
+#                 echo 'export PYTHONPATH=$PYTHONPATH:$TOOL_DIR/PlateTectonic' >> ~/.bashrc
+#                 somehow install other dependencies in setup.py using your conda
+#
+# To-Do List (updated 2022.5.30 Yuan-Kai Liu):
+#   + Potentially, we can make built-in PMM tables/dictionaries for easier user string input of the plate name
+#   + Calculate Euler rotation to multi-points ENU motion is slow (called by pmm2enu_at() here)
+#       In `platemotion` package, use array operation rather than for loops
+#           https://github.com/lcx366/PlateTectonic/blob/main/platemotion/classes/plate.py#L153
+#   + Replace platemotion package by equations of Euler trasnformation to relax this dependency at all?
+
+
+import os
+import sys
+import collections
+import numpy as np
+from skimage.transform import resize
+
+from mintpy.objects.resample import resample
+from mintpy.utils import readfile, writefile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
+from mintpy.diff import diff_file
+
+# https://docs.astropy.org/en/stable/units/index.html
+try:
+    from platemotion import Plate
+    from astropy import units as u
+except ImportError:
+    msg = 'Can NOT import platemotion!'
+    msg += '\nCheck more details at https://github.com/lcx366/PlateTectonic.'
+    raise ImportError(msg)
+
+
+# ITRF2014-PMM defined in Altamimi et al. (2017)
+# units:
+#   omega_x/y/z in mas/yr (milli-second of arc per year)
+#   omega       in deg/Ma (degree per megayear or one-million-year)
+#   wrms_e/n    in mm/yr  (milli-meter per year), WRMS: weighted root mean scatter
+Tag = collections.namedtuple('Tag', 'name num_site omega_x omega_y omega_z omega wrms_e wrms_n')
+ITRF2014_PMM = {
+    'ANTA' : Tag('Antartica'  ,   7,  -0.248,  -0.324,   0.675,  0.219,  0.20,  0.16),
+    'ARAB' : Tag('Arabia'     ,   5,   1.154,  -0.136,   1.444,  0.515,  0.36,  0.43),
+    'AUST' : Tag('Australia'  ,  36,   1.510,   1.182,   1.215,  0.631,  0.24,  0.20),
+    'EURA' : Tag('Eurasia'    ,  97,  -0.085,  -0.531,   0.770,  0.261,  0.23,  0.19),
+    'INDI' : Tag('India'      ,   3,   1.154,  -0.005,   1.454,  0.516,  0.21,  0.21),
+    'NAZC' : Tag('Nazca'      ,   2,  -0.333,  -1.544,   1.623,  0.629,  0.13,  0.19),
+    'NOAM' : Tag('N. America' ,  72,   0.024,  -0.694,  -0.063,  0.194,  0.23,  0.28),
+    'NUBI' : Tag('Nubia'      ,  24,   0.099,  -0.614,   0.733,  0.267,  0.28,  0.36),
+    'PCFC' : Tag('Pacific'    ,  18,  -0.409,   1.047,  -2.169,  0.679,  0.36,  0.31),
+    'SOAM' : Tag('S. America' ,  30,  -0.270,  -0.301,  -0.140,  0.119,  0.34,  0.35),
+    'SOMA' : Tag('Somalia'    ,   3,  -0.121,  -0.794,   0.884,  0.332,  0.32,  0.30),
+}
+
+
+
+#########################################  Usage  ##############################################
+REFERENCE = """reference:
+  Stephenson, O. L., Liu, Y. K., Yunjun, Z., Simons, M., Rosen, P. and Xu, X., (2022), The Impact of
+    Plate Motions on Long-Wavelength InSAR-Derived Velocity Fields, Geophys. Res. Lett. (under review)
+    doi:10.1002/essoar.10511538.2
+  Peter, H., Fernández, M., Aguilar, J., & Fernández, J. (2021). Copernicus POD Product Handbook:
+    Copernicus Sentinel-1, -2 and -3 Precise orbit Determination Serivice (CPOD) (GMV-CPOD-TN-0009).
+    https://sentinels.copernicus.eu/documents/247904/3372484/Sentinels-POD-Product-Handbook-1.19.pdf
+
+  # list of no-net-rotation (NNR) plate motion models (PMMs):
+  # ONLY ITRF14 should be used, as Sentinel-1's orbit is in ITRF2014 reference frame.
+  # Other values, e.g. MORVEL56, should be converted into ITR2014 before use.
+  ITRF14 - Table 1 of Altamimi et al. (2017) - 11 plates
+    Altamimi, Z., Métivier, L., Rebischung, P., Rouby, H., & Collilieux, X. (2017).
+    ITRF2014 plate motion model. Geophysical Journal International, 209(3), 1906-1912.
+    doi:10.1093/gji/ggx136
+  MORVEL56 - Table 1 of Argus et al. (2011) - 56 plates
+    Argus, D. F., Gordon, R. G., & DeMets, C. (2011). Geologically current motion of 56
+    plates relative to the no-net-rotation reference frame. Geochemistry, Geophysics, Geosystems, 12(11).
+    doi:10.1029/2011GC003751
+"""
+
+EXAMPLE = """example:
+  # Cartesian form of Euler pole rotation in [wx, wy, wz] in unit of mas/year [milli arc second per year]
+  # e.g., Arabia plate in ITRF14-PMM (Table 1 in Altamimi et al., 2017)
+  bulk_plate_motion.py -g inputs/geometryGeo.h5   --om-cart 1.154 -0.136  1.444 -v velocity.h5
+  bulk_plate_motion.py -g inputs/geometryRadar.h5 --om-cart 1.154 -0.136  1.444
+
+  # Simple constant local ENU translation (based on one GNSS vector) in [ve, vn, vu] in unit of m/year
+  #   E.g., https://www.unavco.org/software/visualization/GPS-Velocity-Viewer/GPS-Velocity-Viewer.html
+  #   -> select 'GNSS Data source' as 'World, IGS08/NNR, GEM GSRM' (referenced to ITRF2008, NNR PMM)
+  #   -> check box `Station labels and data download` and click `Draw Map`
+  #   -> navigate to the region of interest,
+  #   -> click on a representative station,
+  #   -> get the "Speed components" in mm/yr.
+  bulk_plate_motion.py -g inputs/geometryGeo.h5 --enu 25.0 30.5 0.0 -v velocity.h5
+"""
+
+NOTE = """
+  Removing the effect of bulk traslation and rotation based on a given plate motion model (PMM).
+  For Sentinel-1, its orbit is measured with respect to ITRF2014 (Table 3-2 of Peter et al., 2021), which is an
+  Earth-centered, Earth-fixed (ECEF) reference frame in which there is no net rotation (NNR) of the Earth surface.
+"""
+
+def create_parser(subparsers=None):
+    synopsis = 'Bulk Plate Motion Correction.'
+    epilog = REFERENCE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis+NOTE, epilog=epilog, subparsers=subparsers)
+
+    # input files
+    parser.add_argument('-g', '--geom', dest='geom_file', type=str, required=True,
+                        help='Input geometry file in geo-coordinates, e.g., geometryGeo.h5')
+    parser.add_argument('-v', '--velo', dest='vel_file', type=str,
+                        help='Input velocity file to be corrected.')
+    parser.add_argument('-o', '--output', dest='cor_vel_file', type=str,
+                        help='Output velocity file after the correction, default: add "_ITRF14" suffix.')
+
+    # plate motion configurations
+    pmm = parser.add_mutually_exclusive_group(required=True)
+    pmm.add_argument('--om-cart', dest='omega_cart', type=float, nargs=3, metavar=('WX', 'WY', 'WZ'), default=None,
+                     help='Cartesian form of Euler Pole rotation (unit: mas/yr) (default: %(default)s).')
+    pmm.add_argument('--om-sph', dest='omega_sph', type=float, nargs=3, metavar=('LAT', 'LON', 'W'), default=None,
+                     help='Spherical form of Euler Pole rotation (unit: deg, deg, deg/Ma) (default: %(default)s).')
+    pmm.add_argument('--enu', dest='const_vel_enu', type=float, nargs=3, metavar=('VE', 'VN', 'VU'), default=None,
+                     help='Constant local ground translation (unit: m/year) (default: %(default)s).')
+
+    parser.add_argument('--step','--pmm-step', dest='pmm_step', type=float, default=10.,
+                        help='Ground step/resolution in km for computing PMM to ENU velocity (default: %(default)s).')
+
+    return parser
+
+
+def cmd_line_parse(iargs=None):
+    parser = create_parser()
+    inps = parser.parse_args(args=iargs)
+
+    # default output filenames
+    geom_dir = os.path.dirname(inps.geom_file)
+    inps.pmm_enu_file = os.path.join(geom_dir, 'ITRF14ENU.h5')
+    inps.pmm_los_file = os.path.join(geom_dir, 'ITRF14.h5')
+
+    if inps.vel_file and not inps.cor_vel_file:
+        vbase = os.path.splitext(inps.vel_file)[0]
+        inps.cor_vel_file = os.path.abspath(f'{vbase}_ITRF14.h5')
+
+    return inps
+
+
+########################################## Sub Functions #############################################
+
+def build_plate_motion_model(omega_cart=None, omega_sph=None):
+    """Build a plate motion model based on the given Euler roation vector
+    Parameters: omega_sph  - list or np.array, Spherical representation [lat, lon, w] (deg, deg, deg/Ma)
+                omega_cart - list or np.array, Cartesian representation [wx, wy, wz] (mas/yr)
+    Returns:    plate      - platemotion.Plate object
+    """
+    # Check input variables
+    if (omega_cart is None) and (omega_sph is None):
+        raise ValueError('Neither omega_cart (wxyz) nor omega_sph (Euler Pole) are given! At least one is required.')
+
+    # Set a NaN moment of inertia (to get class Plate running)
+    iner_null = np.zeros([3,3]) * np.nan * (u.kg * u.km**2)
+
+    # Create an instrance of class Plate from `platemotion` pkg
+    plate = Plate(info={'inertia_tensor': iner_null})
+    if omega_cart is not None:
+        print('input omega_cartesian in [wx, wy, wz] (mas/yr)')
+        omega = np.array(omega_cart) * u.mas/u.yr
+        plate.set_omega(omega, 'cartesian')
+    else:
+        print('input omega_spherical in [lat, lon, w] (deg, deg, deg/Ma)')
+        omega = [omega_sph[0]*u.deg,
+                 omega_sph[1]*u.deg,
+                 omega_sph[2]*u.deg/u.Ma]
+        plate.set_omega(omega, 'spherical')
+
+    print('\n--------------------------------------')
+    print('Euler Pole and Rotation Vector')
+    print('in spherical coordinates:')
+    print('  Pole Latitude : {:10.4f} deg'.format(plate.omega_spherical[0].degree))
+    print('  Pole Longitude: {:10.4f} deg'.format(plate.omega_spherical[1].degree))
+    print('  Rotation rate : {:10.4f}  \n'.format(plate.omega_spherical[2].to(u.deg/u.Ma)))
+    print('in Cartesian coordinates:')
+    print('  wx: {:10.4f}'.format(plate.omega_cartesian[0]))
+    print('  wy: {:10.4f}'.format(plate.omega_cartesian[1]))
+    print('  wz: {:10.4f}'.format(plate.omega_cartesian[2]))
+    print('--------------------------------------\n')
+    return plate
+
+
+def pmm2enu_at(pmm_obj, lats, lons):
+    """Evaluate the PMM at given lats/lons for the motion in ENU.
+
+    Parameters: pmm_obj - plate motion model instance
+                lats    - 0/1/2D array in float32, latitudes
+                lons    - 0/1/2D array in float32, longitudes
+    Returns:    ve/n/u  - 0/1/2D array in float32, plate motion in east / north / up
+                          in meter/year.
+    """
+    if isinstance(lats, float) or isinstance(lats, int):
+        loc = np.array([lats, lons, 0])
+        v = pmm_obj.velocity_at(loc,'geodetic')
+        ve = v.en[0]
+        vn = v.en[1]
+        vu = 0
+
+    elif lats.ndim in [1, 2]:
+        # prepare locations as array in size of (3, num_pts)
+        elev = np.zeros_like(lats)
+        locs = np.vstack((
+            lats.flatten(),
+            lons.flatten(),
+            elev.flatten(),
+        ))
+        # run PMM
+        v = pmm_obj.velocity_at(locs, 'geodetic')
+        ve = v.en[:, 0].reshape(lats.shape)
+        vn = v.en[:, 1].reshape(lats.shape)
+        vu = np.zeros(lats.shape, dtype=np.float32)
+
+    else:
+        raise ValueError(f'Un-recognized lat/lon grid dimension: {lats.ndim}!')
+
+    # convert from mm/year to meter/year
+    #     and from astropy.units.quantity.Quantity to np.ndarray
+    ve = np.array(ve, dtype=np.float32) * 1e-3
+    vn = np.array(vn, dtype=np.float32) * 1e-3
+    vu = np.array(vu, dtype=np.float32) * 1e-3
+
+    return ve, vn, vu
+
+
+####################################### Higher-level Sub Functions ##########################################
+
+def calc_bulk_plate_motion(geom_file, omega_cart=None, omega_sph=None, const_vel_enu=None,
+                           pmm_enu_file=None, pmm_los_file=None, pmm_step=10.):
+    """Estimate LOS motion due to pure bulk tranlation or due to plate rotation
+    Parameters: geom_file     - str, path to the input geometry file
+                omega_cart    - list or 1D array, Cartesian representation of plate rotation
+                                in [wx, wy, wz]  (mas/yr)
+                omega_sph     - list or 1D array, Spherical representation of plate rotation
+                                in [lat, lon, w] (deg, deg, deg/Ma)
+                const_vel_enu - list or 1D array, a single-vector [ve, vn, vu] (meter/year)
+                                simulating the bulk translation of the ground (e.g., from GNSS)
+                pmm_enu_file  - str, path to the output bulk plate motion in east, north, up direction
+                pmm_los_file  - str, path to the output bulk plate motion in LOS direction
+                pmm_reso      - float, ground resolution for computing Plate rotation to ENU velocity (km)
+    Returns:    ve/vn/vu/vlos - 2D np.ndarray, bulk plate motion in east / north / up / LOS direction
+    """
+
+    # Get LOS geometry
+    atr_geo = ut.prepare_geo_los_geometry(geom_file, unit='deg')[2]
+    shape_geo = [int(atr_geo['LENGTH']), int(atr_geo['WIDTH'])]
+
+    ## Bulk motion model in the region
+    print('-'*50)
+    if omega_cart or omega_sph:
+        print('compute the bulk plate motion using a plate motion model (PMM; translation & rotation)')
+        if omega_cart is not None:
+            pmm_obj = build_plate_motion_model(omega_cart=omega_cart)
+        else:
+            pmm_obj = build_plate_motion_model(omega_sph=omega_sph)
+
+        # prepare the coarse grid
+        latc = float(atr_geo['Y_FIRST']) + float(atr_geo['Y_STEP']) * shape_geo[0] / 2
+        ystep = abs(int(pmm_step * 1000 / (float(atr_geo['Y_STEP']) * 108e3)))
+        xstep = abs(int(pmm_step * 1000 / (float(atr_geo['X_STEP']) * 108e3 * np.cos(np.deg2rad(latc)))))
+        ystep, xstep = max(ystep, 5), max(xstep, 5)
+        lats, lons = ut.get_lat_lon(atr_geo, dimension=2, ystep=ystep, xstep=xstep)
+
+        # transform PMM to ENU velocity on a coarse grid
+        # to-do: multi-pixel rotation is slow; change the `platemotion` code to Big Array operation rather than For Loops
+        print(f'compute PMM via platemotion.Plate: grid_size = {pmm_step} km, grid_shape = {lats.shape} ...')
+        ve_low, vn_low, vu_low = pmm2enu_at(pmm_obj, lats, lons)
+
+        # interpolate back to the initial grid
+        print(f'interpolate corase PMM to the full resolution: {lats.shape} -> {shape_geo}'
+              ' via skimage.transform.resize ...')
+        kwargs = dict(order=1, mode='edge', anti_aliasing=True, preserve_range=True)
+        ve = resize(ve_low, shape_geo, **kwargs)
+        vn = resize(vn_low, shape_geo, **kwargs)
+        vu = resize(vu_low, shape_geo, **kwargs)
+
+    elif const_vel_enu:
+        print(f'compute the bulk plate motion using a single vector (translation): {const_vel_enu}')
+        ve = const_vel_enu[0] * np.ones(shape_geo, dtype=np.float32)
+        vn = const_vel_enu[1] * np.ones(shape_geo, dtype=np.float32)
+        vu = const_vel_enu[2] * np.ones(shape_geo, dtype=np.float32)
+
+
+    # radar-code PMM if input geometry is in radar coordinates
+    atr = readfile.read_attribute(geom_file)
+    if 'Y_FIRST' not in atr.keys():
+        print('radar-coding the bulk plate motion in ENU ...')
+        res_obj = resample(lut_file=geom_file)
+        res_obj.open()
+        res_obj.src_meta = atr_geo
+        res_obj.prepare()
+
+        # resample data
+        box = res_obj.src_box_list[0]
+        ve = res_obj.run_resample(src_data=ve[box[1]:box[3], box[0]:box[2]])
+        vn = res_obj.run_resample(src_data=vn[box[1]:box[3], box[0]:box[2]])
+        vu = res_obj.run_resample(src_data=vu[box[1]:box[3], box[0]:box[2]])
+
+
+    # Project model to LOS velocity
+    print('project the bulk plate motion from ENU onto LOS direction')
+    inc_angle = readfile.read(geom_file, datasetName='incidenceAngle')[0]
+    az_angle = readfile.read(geom_file, datasetName='azimuthAngle')[0]
+    vlos = ut.enu2los(ve, vn, vu, inc_angle=inc_angle, az_angle=az_angle)
+
+    # Save the bulk motion model velocity into HDF5 files
+    atr['FILE_TYPE'] = 'velocity'
+    atr['DATA_TYPE'] = 'float32'
+    atr['UNIT'] = 'm/year'
+    dsDict = {
+        'east'  : ve,
+        'north' : vn,
+        'up'    : vu,
+    }
+    writefile.write(dsDict, out_file=pmm_enu_file, metadata=atr)
+    writefile.write(vlos,   out_file=pmm_los_file, metadata=atr)
+
+    return ve, vn, vu, vlos
+
+
+def correct_bulk_plate_motion(vel_file, mfile, ofile):
+    """Apply the bulk motion correction from files.
+    """
+    file1 = vel_file       # input uncorrected LOS velocity file
+    file2 = [mfile]        # PMM LOS velocity file
+    diff_file(file1, file2, ofile)
+    return
+
+
+#######################################  Main Function  ########################################
+def main(iargs=None):
+    inps = cmd_line_parse(iargs)
+
+    calc_bulk_plate_motion(
+        geom_file=inps.geom_file,
+        omega_cart=inps.omega_cart,
+        omega_sph=inps.omega_sph,
+        const_vel_enu=inps.const_vel_enu,
+        pmm_enu_file=inps.pmm_enu_file,
+        pmm_los_file=inps.pmm_los_file,
+        pmm_step=inps.pmm_step,
+    )
+
+    if inps.vel_file and inps.pmm_los_file and inps.cor_vel_file:
+        print('-'*50)
+        print('Correct input velocity for the bulk plate motion')
+        correct_bulk_plate_motion(inps.vel_file, inps.pmm_los_file, inps.cor_vel_file)
+
+    return
+
+################################################################################################
+if __name__ == '__main__':
+    main(sys.argv[1:])
diff -pruN 1.3.3-2/mintpy/closure_phase_bias.py 1.4.0-1/mintpy/closure_phase_bias.py
--- 1.3.3-2/mintpy/closure_phase_bias.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/closure_phase_bias.py	2022-08-04 20:01:49.000000000 +0000
@@ -2,164 +2,1174 @@
 ############################################################
 # Program is part of MintPy                                #
 # Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
-# Author: Yujie Zheng, Feb 2022                            #
+# Author: Yujie Zheng, Zhang Yunjun, Feb 2022              #
 ############################################################
-# Compute average con-nl closure phase and output mask identifying areas suseptible to closure phase errors.
+# Recommend import:
+#   from mintpy import closure_phase_bias as cpbias
+
 
 import os
 import sys
-import argparse
+import time
 import numpy as np
+import glob
+from datetime import datetime as dt
 
-from mintpy.objects import ifgramStack
-from mintpy.utils import readfile, writefile
-from mintpy import ifgram_inversion as ifginv
+from mintpy.objects import ifgramStack, cluster
+from mintpy.utils import (
+    arg_utils,
+    ptime,
+    readfile,
+    writefile,
+    isce_utils,
+    utils as ut,
+)
+from mintpy.ifgram_inversion import estimate_timeseries
 
 
 ################################################################################
 REFERENCE = """reference:
-  Zheng, Y., et al., (2022) On Closure Phase and Systematic Bias in Multilooked SAR Interferometry, IEEE TGRS, under review (minor revision)
+  Y. Zheng, H. Fattahi, P. Agram, M. Simons and P. Rosen, (2022). On Closure Phase
+    and Systematic Bias in Multi-looked SAR Interferometry, in IEEE Trans. Geosci.
+    Remote Sens., doi:10.1109/TGRS.2022.3167648.
 """
+
 EXAMPLE = """example:
-  closure_phase_bias.py -i inputs/ifgramStack.h5 --nl 20 --numsigma 2.5
+  # Note: ONLY sequential network is supported in this implementation.
+
+  # create mask for areas suseptible to biases
+  closure_phase_bias.py -i inputs/ifgramStack.h5 --nl 5  -a mask
+  closure_phase_bias.py -i inputs/ifgramStack.h5 --nl 20 -a mask --num-sigma 2.5
+
+  # estimate non-closure phase bias time-series [quick and approximate solution]
+  closure_phase_bias.py -i inputs/ifgramStack.h5 --nl 5  --bw 3  -a quick_estimate --num-worker 6
+
+  # estimate non-closure phase bias time-series
+  closure_phase_bias.py -i inputs/ifgramStack.h5 --nl 5  --bw 3  -a estimate --num-worker 6 -c local
+  closure_phase_bias.py -i inputs/ifgramStack.h5 --nl 20 --bw 10 -a estimate --num-worker 6 -c local
 """
 
-def create_parser():
-    parser = argparse.ArgumentParser(description = 'Create an indication map for closure phase bias.')
-    parser.add_argument('-i','--ifgramstack',type = str, dest = 'ifgram_stack',help = 'interferogram stack file that contains the unwrapped phases')
-    parser.add_argument('--nl', dest = 'nl', type = int, default = 20, help = 'connection level that we are correcting to (or consider as no bias)')
-    parser.add_argument('--numsigma',dest = 'numsigma', type = float, default = 3, help = 'Threashold for phase (number of sigmas,0-infty), default to be 3 sigma of a Gaussian distribution (assumed distribution for the cumulative closure phase) with sigma = pi/sqrt(3*num_cp)')
-    parser.add_argument('--epi',dest = 'episilon', type = float, default = 0.3, help = 'Threashold for amplitude (0-1), default 0.3')
-    parser.add_argument('--maxMemory', dest = 'max_memory', type = float, default = 8, help = 'max memory to use in GB')
-    parser.add_argument('-o', dest = 'outdir', type = str, default = '.', help = 'output file directory')
+def create_parser(subparsers=None):
+    synopsis = 'Phase non-closure related biases correction'
+    epilog = REFERENCE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = arg_utils.create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
+    parser.add_argument('-i','--ifgramstack', type=str, dest='stack_file',
+                        help='interferogram stack file that contains the unwrapped phases')
+    parser.add_argument('--wm','--water-mask', dest='water_mask_file',
+                        help='Water mask to skip pixels on water body.\n'
+                             'Default: waterMask.h5 if exists, otherwise None.')
+
+    # bandwidth and bias free connection level
+    parser.add_argument('--nl','--conn-level', dest='nl', type=int, default=20,
+                        help='connection level that we are correcting to (or consider as no bias)\n'
+                             '(default: %(default)s)')
+    parser.add_argument('--bw', dest='bw', type=int, default=10,
+                        help='bandwidth of time-series analysis that you want to correct')
+
+    parser.add_argument('-a','--action', dest='action', type=str, default='mask',
+                        choices={'mask', 'quick_estimate', 'estimate'},
+                        help='action to take (default: %(default)s):\n'
+                             'mask           - create a mask of areas susceptible to closure phase errors\n'
+                             'quick_estimate - quick and approximate estimation on how bias decays with time\n'
+                             '                 output sequential closure phase files\n'
+                             'estimate       - estimate how bias decays with time\n'
+                             '                 processed for each pixel on a pixel by pixel basis [slow]')
+
+    # mask configuration
+    mask = parser.add_argument_group('Mask', 'Configuration for closure phase bias mask')
+    mask.add_argument('--num-sigma', dest='num_sigma', type=float, default=3,
+                      help='Threashold for phase, in number of sigmas (default: %(default)s).\n'
+                           'Assuming a Gaussian distribution for the cumulative closure phase'
+                           ' with sigma = pi / sqrt(3*num_cp)')
+    mask.add_argument('--eps','--epsilon', dest='epsilon', type=float, default=0.3,
+                      help='Threashold for the normalized amplitude in [0-1] (default: %(default)s).')
+
+    # compute
+    parser = arg_utils.add_parallel_argument(parser)
+    parser = arg_utils.add_memory_argument(parser)
+
+    # output
+    parser.add_argument('-o', dest='outdir', type=str, default='./', help='output file directory')
+
     return parser
 
+
 def cmd_line_parse(iargs=None):
     parser = create_parser()
-    inps = parser.parse_args(args = iargs)
+    inps = parser.parse_args(args=iargs)
+
+    # --water-mask option
+    if not inps.water_mask_file and os.path.isfile('./waterMask.h5'):
+        inps.water_mask_file = os.path.abspath('./waterMask.h5')
+
     return inps
 
-# Obtain sum of consecutive complex sequential closure phase of connection n
-def cum_seq_closurePhase(SLC_list, date12_list_all, ifgram_stack, ref_phase, n, box):
+
+#################################  Mask  #######################################
+def calc_closure_phase_mask(stack_file, bias_free_conn, num_sigma=3, threshold_amp=0.3,
+                            outdir='./', max_memory=4.0):
+    """Calculate a mask for areas suseptible to biases, based on the average closure phase tau.
+
+    Equation: tau = 1 / K * Sigma_{k=1}^K (np.exp(j * Phi_k^{nl}))
+      where K is the number of closure phase for connection nl, Phi_k^{nl} is the k-th sequential
+      closure phase for connection nl, as defined in equation (21).
+    Reference: Section VI in Zheng et al. (2022, TGRS).
+
+    Parameters: stack_file        - str, path for ifgramStack.h5 file
+                bias_free_conn    - int, connection level at which we assume is bias-free
+                num_sigma         - float, number of sigmas for computing phase threshold
+                threshold_amp     - float, threshold of ampliutde of the cumulative sequential closure phase
+                outdir            - str, directory of output files
+                max_mermory       - float, maximum memory in GB for each patch processed
+    Returns:    mask              - 2D np.ndarray of size (length, width) in boolean, 0 for areas suseptible to biases.
+                                    Saved to file: maskClosurePhase.h5
+                avg_cp - 2D np.ndarray of size (length, width) in complex64, average cum. seq. closure phase
+                                    Saved to file: avgCpxClosurePhase.h5
     """
-    Input parameters:
-        SLC_list : list of SLC dates
-        date12_list_all: date12 of all the interferograms stored in the ifgramstack file
-        ifgram_stack: stack file
-        refphase : reference phase
-        n        : connection level of the closure phase
-        box      : bounding box for the patch
+
+    # basic info
+    stack_obj = ifgramStack(stack_file)
+    stack_obj.open(print_msg=False)
+    meta = dict(stack_obj.metadata)
+    length, width = stack_obj.length, stack_obj.width
+    date_list = stack_obj.get_date_list(dropIfgram=True)
+    num_cp = stack_obj.get_closure_phase_index(bias_free_conn).shape[0]
+
+    ## What is a good thredshold?
+    # Assume that it's pure noise so that the phase is uniform distributed from -pi to pi.
+    # The standard deviation of phase in each loop is:
+    #     pi/sqrt(3)
+    # (technically should be smaller because when forming loops there should be a reduction in phase variance)
+    # The standard deviation of phase in cumulative wrapped closure phase is:
+    #     pi/sqrt(3)/sqrt(num_cp) -- again another simplification assuming no correlation.
+    # We use 3\delta as threshold -- 99.7% confidence
+    threshold_pha = np.pi / np.sqrt(3 * num_cp) * num_sigma
+
+    # key info
+    print('\n'+'-'*80)
+    print('calculating the mask to flag areas suseptible to non-closure-phase related biases (as zero) ...')
+    print(f'number of valid acquisitions: {len(date_list)} ({date_list[0]} - {date_list[-1]})')
+    print(f'average complex closure phase threshold in amplitude/correlation: {threshold_amp}')
+    print(f'average complex closure phase threshold in phase: {num_sigma} sigma ({threshold_pha:.1f} rad)')
+
+    # calculate the average complex closure phase
+    # process block-by-block to save memory
+    print('\ncalculating the average complex closure phase')
+    print(f'length / width: {length} / {width}')
+    box_list, num_box = stack_obj.split2boxes(max_memory=max_memory, dim0_size=stack_obj.numIfgram+num_cp*2)
+
+    avg_cp = np.zeros([length,width], dtype=np.complex64)
+    for i, box in enumerate(box_list):
+        if num_box > 1:
+            print('\n------- processing patch {} out of {} --------------'.format(i+1, num_box))
+            print(f'box: {box}')
+            print('box width:  {}'.format(box[2] - box[0]))
+            print('box length: {}'.format(box[3] - box[1]))
+
+        avg_cp[box[1]:box[3], box[0]:box[2]], num_cp = stack_obj.get_sequential_closure_phase(
+            box=box,
+            conn=bias_free_conn,
+            post_proc='mean',
+        )[1:]
+
+    # mask out no-data pixels
+    geom_file = 'geometryGeo.h5' if 'Y_FIRST' in meta.keys() else 'geometryRadar.h5'
+    geom_file = os.path.join(os.path.dirname(stack_file), geom_file)
+    if os.path.isfile(geom_file):
+        geom_ds_names = readfile.get_dataset_list(geom_file)
+        ds_names = [x for x in ['incidenceAngle', 'waterMask'] if x in geom_ds_names]
+        if len(ds_names) > 0:
+            print(f'mask out pixels with no-data-value (zero {ds_names[0]} from file: {os.path.basename(geom_file)})')
+            no_data_mask = readfile.read(geom_file, datasetName=ds_names[0])[0] == 0
+            avg_cp[no_data_mask] = np.nan
+
+    # create mask
+    print('\ncreate mask for areas suseptible to non-closure phase biases')
+    mask = np.ones([length,width], dtype=bool)
+
+    # mask areas with potential bias
+    print(f'set pixels with average complex closure phase angle > {num_sigma} sigma ({threshold_pha:.1f} rad) to 0.')
+    mask[np.abs(np.angle(avg_cp)) > threshold_pha] = 0
+
+    # unmask areas with low correlation
+    # where it's hard to know wheter there is bias or not
+    print(f'set pixels with average complex closure phase amplitude (correlation) < {threshold_amp} to 1.')
+    mask[np.abs(np.abs(avg_cp) < threshold_amp)] = 1
+
+    # write file 1 - mask
+    mask_file = os.path.join(outdir, 'maskClosurePhase.h5')
+    meta['FILE_TYPE'] = 'mask'
+    meta['DATA_TYPE'] = 'bool'
+    writefile.write(mask, out_file=mask_file, metadata=meta)
+
+    # write file 2 - average closure phase
+    avg_cp_file = os.path.join(outdir, 'avgCpxClosurePhase.h5')
+    meta['FILE_TYPE'] = 'mask'
+    meta['DATA_TYPE'] = 'float32'
+    ds_dict = {
+        'amplitude' : np.abs(avg_cp),
+        'phase'     : np.angle(avg_cp),
+    }
+    writefile.write(ds_dict, out_file=avg_cp_file, metadata=meta)
+
+    return mask, avg_cp
+
+
+################################################################################
+def unwrap_closure_phase(int_file, cor_file, unw_file):
+    """Unwrap the input wrapped sequential closure phase interferogram.
     """
-    cp_idx = []
-    NSLC = len(SLC_list)
-    for i in range(NSLC-n):
-        ifgram = []
-        flag = True
-        for j in range(n):
-            ifgram.append('{}_{}'.format(SLC_list[i+j],SLC_list[i+j+1]))
-        ifgram.append('{}_{}'.format(SLC_list[i],SLC_list[i+n]))
-        for ifgram_name in ifgram:
-            if ifgram_name not in date12_list_all:
-                flag = False # if missing an interferogram, we won't make the corresponding closure phase
-        if flag:
-            cp_idx.append([date12_list_all.index(ifgram[j]) for j in range(n+1)])
-
-    cp_idx = np.array(cp_idx, np.int16)
-    cp_idx = np.unique(cp_idx, axis = 0)
-
-    num_cp = len(cp_idx)
-    print('Number of closure measurements expected, ', len(SLC_list)-n)
-    print('Number of closure measurements found, ', num_cp)
-
-    if num_cp <1:
-        print('No closure phase measurements found, abort')
-        raise Exception("No triplets found!")
-
-    box_width  = box[2] - box[0]
-    box_length = box[3] - box[1]
-    phase = readfile.read(ifgram_stack, box=box,print_msg=False)[0]
-    cum_cp = np.zeros((box_length, box_width), np.complex64)
-    for i in range(num_cp):
-        cp0_w = np.zeros ((box_length, box_width), np.float32)
-        for j in range(n):
-                    idx = cp_idx[i,j]
-                    cp0_w = cp0_w + phase[idx,:,:] - ref_phase[idx]
-        idx = cp_idx[i,n]
-        cp0_w = cp0_w - (phase[idx,:,:]-ref_phase[idx])
-        cum_cp = cum_cp + (np.exp(1j*cp0_w))
 
-    # cum_cp = np.angle(cum_cp)
-    return cum_cp, num_cp
+    if os.path.isfile(cor_file) and os.path.isfile(unw_file):
+        print(f'unwrapped interferogram file exists: {unw_file}, skip re-unwrapping.')
+    else:
+        isce_utils.estimate_coherence(int_file, cor_file)
+        isce_utils.unwrap_snaphu(int_file, cor_file, unw_file)
+
+    return unw_file
 
-def main(iargs = None):
-    inps = cmd_line_parse(iargs)
-    stack_obj = ifgramStack(inps.ifgram_stack)
+
+def cum_seq_unw_closure_phase_timeseries(conn, conn_dir, date_list, meta):
+    '''Output cumulative conn-n sequential closure phase in time-series format,
+    which is the weighted phase history of the temporally inconsistent process (f^n / n_l).
+
+    Reference: Equation (25) and (28) in Zheng et al. (2022, TGRS).
+
+    Parameters: conn        - int, connection level of closure phases
+                conn_dir    - str, path of sequential closure phases file for connection - n
+                date_list   - list of str, SLC dates
+                meta        - dict, metadata of ifgramStack.h5
+    Returns:    bias_ts     - 3D np.ndarray in size of (num_ifgram, length, width) in float32,
+                              cumulative sequential closure phase time series,
+                              saved to file: cumSeqClosurePhase.h5
+                common_mask - 2D np.ndarray in size of (length, width) in bool,
+                              mask based on common connected components,
+                              saved to file: maskConnComp.h5
+    '''
+
+    # output file
+    cum_cp_file = os.path.join(conn_dir, 'cumSeqClosurePhase.h5')
+    mask_file = os.path.join(conn_dir, 'maskConnComp.h5')
+
+    # update mode checking
+    if os.path.isfile(cum_cp_file) and os.path.isfile(mask_file):
+        msg = 'cumulative seq closure phase time series and mask files exist, skip re-generating.'
+        msg += f'\n{cum_cp_file}\n{mask_file}'
+        print(msg)
+
+        # read data
+        bias_ts = readfile.read(cum_cp_file)[0]
+        common_mask = readfile.read(mask_file)[0]
+
+        return bias_ts, common_mask
+
+    # basic info
+    length, width = int(meta['LENGTH']), int(meta['WIDTH'])
+    ref_y, ref_x = int(meta['REF_Y']), int(meta['REF_X'])
+
+    unw_files = sorted(glob.glob(os.path.join(conn_dir, '*.unw')))
+    num_file = len(unw_files)
+
+    print('calculate the cumulative seq closure phase time series ...')
+    cp_phase = np.zeros((num_file, length, width), dtype=np.float32)
+    mask = np.zeros((num_file, length, width), dtype=np.float32)
+
+    prog_bar = ptime.progressBar(maxValue=num_file)
+    for i, unw_file in enumerate(unw_files):
+        prog_bar.update(i+1, suffix=f'{i+1}/{num_file} {os.path.basename(unw_file)}')
+
+        unw = readfile.read(unw_file, datasetName='phase')[0]
+        unw -= unw[ref_y, ref_x]
+        cp_phase[i] = unw
+
+        conn_comp_file = unw_file + '.conncomp'
+        conn_comp = readfile.read(conn_comp_file)[0]
+        mask[i] = np.where(conn_comp >= 1, 1, np.nan)
+
+    prog_bar.close()
+
+    # compute cumulative sequential closure phase - f^n
+    # equation (25) in Zheng et al. (2022, TGRS)
+    num_date = len(date_list)
+    bias_ts = np.zeros((num_date, length, width), dtype=np.float32)
+    bias_ts[1:num_date-conn+1, :, :] = np.cumsum(cp_phase, 0)
+    for i in range(num_date-conn+1, num_date):
+        bias_ts[i] = (i - num_date + conn) * cp_phase[-1] + bias_ts[num_date - conn]
+
+    # equation (28)
+    bias_ts /= conn
+
+    # write bias time series to HDF5 file
+    ds_dict = {
+        'timeseries' : [np.float32,     (num_date, length, width), bias_ts],
+        'date'       : [np.dtype('S8'), (num_date,), np.array(date_list, np.string_)],
+    }
+    meta['FILE_TYPE'] = 'timeseries'
+    writefile.layout_hdf5(cum_cp_file, ds_dict, metadata=meta)
+
+    # write mask to HDF5 file
+    common_mask = np.where(np.isnan(np.sum(mask,0)), False, True)
+    meta['FILE_TYPE'] = 'mask'
+    writefile.write(common_mask, out_file=mask_file, metadata=meta)
+
+    return bias_ts, common_mask
+
+
+def compute_unwrap_closure_phase(stack_file, conn, num_worker=1, outdir='./', max_memory=4.0):
+    '''Compute the following phase stack & time-series of connection-conn:
+
+    +   wrapped seq closure phase stack
+    + unwrapped seq closure phase stack
+    + cumulative unwrapped seq closure phase time-series
+    at directory: outdir/closurePhase/conn{conn}
+
+    Parameters: stack_file  - str, path for ifgramStack.h5
+                conn        - int, connection level
+                max_mermory - float, maximum memory in GB for each patch processed
+                outdir      - str, path for output files
+    '''
+    # output directory
+    conn_dir = os.path.join(outdir, f'closurePhase/conn{conn}')
+    os.makedirs(conn_dir, exist_ok=True)
+
+    # update mode checking
+    cum_cp_file = os.path.join(conn_dir, 'cumSeqClosurePhase.h5')
+    if os.path.isfile(cum_cp_file):
+        print(f'cumulative unwrapped seq closure phase time-series exists at: {cum_cp_file}, skip re-generating.')
+        return
+
+    print('-'*60)
+    print('step 1/3: calculate and filter the wrapped sequential closure phase stack ...')
+
+    # basic info
+    stack_obj = ifgramStack(stack_file)
     stack_obj.open()
     length, width = stack_obj.length, stack_obj.width
-    date12_list = stack_obj.get_date12_list(dropIfgram=True)
-    date12_list_all = stack_obj.get_date12_list(dropIfgram=False)
-    print('scene length, width', length, width)
-    ref_phase = stack_obj.get_reference_phase(unwDatasetName = 'unwrapPhase')
-    inps.length = length
-    inps.width = width
-    # retrieve the list of SLC dates from ifgramStack.h5
-    ifgram0 = date12_list[0]
-    date1, date2 = ifgram0.split('_')
-    SLC_list = [date1, date2]
-    for ifgram in date12_list:
-        date1, date2 = ifgram.split('_')
-        if date1 not in SLC_list:
-            SLC_list.append(date1)
-        if date2 not in SLC_list:
-            SLC_list.append(date2)
-    SLC_list.sort()
-    print('number of SLC found : ', len(SLC_list))
-    print('first SLC: ', SLC_list[0])
-    print('last  SLC: ', SLC_list[-1])
+    meta = dict(stack_obj.metadata)
+    print(f'scene size: {length} x {width}')
 
+    date_list = stack_obj.get_date_list(dropIfgram=True)
+    num_date = len(date_list)
+    print(f'number of acquisitions found: {num_date}')
+    print(f'start / end date: {date_list[0]} / {date_list[-1]}')
+    # number of expected closure phase
+    num_cp = num_date - conn
+    num_digit = len(str(num_cp))
+
+    ## default output binary filenames
+    fbases = [os.path.join(conn_dir, f'filt_{x+1:0{num_digit}}') for x in range(num_cp)]
+    int_files = [f'{x}.int' for x in fbases]
+    cor_files = [f'{x}.cor' for x in fbases]
+    unw_files = [f'{x}.unw' for x in fbases]
 
-    # split igram_file into blocks to save memory
-    box_list, num_box = ifginv.split2boxes(inps.ifgram_stack,inps.max_memory)
-    closurephase =  np.zeros([length,width],np.complex64)
-    #process block-by-block
-    for i, box in enumerate(box_list):
-            box_width  = box[2] - box[0]
-            box_length = box[3] - box[1]
+    if all(os.path.isfile(x) for x in int_files):
+        print('ALL the filtered closure phase file exist, skip re-generation.')
+
+    else:
+        # process block-by-block
+        # split igram_file into blocks to save memory
+        box_list, num_box = stack_obj.split2boxes(max_memory=max_memory,
+                                                  dim0_size=stack_obj.numIfgram+num_cp*2)
+        closure_phase = np.zeros([num_cp, length, width],np.float32)
+        for i, box in enumerate(box_list):
             print(box)
             if num_box > 1:
                 print('\n------- processing patch {} out of {} --------------'.format(i+1, num_box))
-                print('box width:  {}'.format(box_width))
-                print('box length: {}'.format(box_length))
+                print('box length: {}'.format(box[3] - box[1]))
+                print('box width : {}'.format(box[2] - box[0]))
 
-            closurephase[box[1]:box[3],box[0]:box[2]], numcp = cum_seq_closurePhase(SLC_list, date12_list_all, inps.ifgram_stack, ref_phase,inps.nl,box)
+            closure_phase[:, box[1]:box[3], box[0]:box[2]] = stack_obj.get_sequential_closure_phase(
+                box=box,
+                conn=conn,
+            )[0]
+
+        ## filter the closure phase and re-unwrap
+        print('-'*80)
+        print('filter the wrapped closure phase stack with a Gaussian kernel of 5 x 5 ...')
+        print(f'number of wrapped closure phase: {num_cp}')
+
+        kernel = isce_utils.gaussian_kernel(5, 5, 1, 1)
+        for i, int_file in enumerate(int_files):
+            if not os.path.isfile(int_file):
+                # filter the closure phase interferogram
+                closure_phase_filt = isce_utils.convolve(
+                    data=np.exp(1j*closure_phase[i]),
+                    kernel=kernel,
+                ).astype(np.complex64)
+
+                # write to binary file in isce2 format
+                print(f'write file: {int_file}')
+                with open(int_file, mode='wb') as fid:
+                    closure_phase_filt.tofile(fid)
+
+                # write metadata in isce2/roipac format
+                meta['FILE_TYPE'] = '.int'
+                meta['INTERLEAVE'] = 'BIP'
+                meta['DATA_TYPE'] = 'complex64'
+                meta['BANDS'] = 1
+                writefile.write_isce_xml(meta, int_file)
+                writefile.write_roipac_rsc(meta, int_file+'.rsc')
+        del closure_phase
+
+    print('-'*60)
+    print('step 2/3: unwrap the filtered wrapped closure phase stack ...')
+    print(f'number of closure phase: {num_cp}')
+    if all(os.path.isfile(x) for x in unw_files):
+        print('ALL the unwrapped closure phase file exist, skip re-generation.')
 
+    else:
+        num_core, run_parallel, Parallel, delayed = ut.check_parallel(
+            num_cp,
+            print_msg=False,
+            maxParallelNum=num_worker)
+
+        if run_parallel and num_core > 1:
+            print(f'parallel processing using {num_core} cores')
+            Parallel(n_jobs=num_core)(delayed(unwrap_closure_phase)(x, y, z)
+                                      for x, y, z in zip(int_files, cor_files, unw_files))
+
+        else:
+            for x, y, z in zip(int_files, cor_files, unw_files):
+                unwrap_closure_phase(x, y, z)
+
+    ## calc the cumulativev unwrapped closure phase time-series
+    print('-'*60)
+    print('step 3/3: calculate the unwrapped cumulative sequential closure phase time-series ...')
+    cum_seq_unw_closure_phase_timeseries(conn, conn_dir, date_list, meta)
 
+    return
 
-    # What is a good thredshold?
-    # Assume that it's pure noise so that the phase is uniform distributed from -pi to pi.
-    # The standard deviation of phase in each loop is pi/sqrt(3) (technically should be smaller because when forming loops there should be a reduction in phase variance)
-    # The standard deviation of phase in cumulative wrapped closure phase is pi/sqrt(3)/sqrt(num_cp) -- again another simplification assuming no correlation.
-    # We use 3\delta as default threshold -- 99.7% confidence
 
-    if inps.numsigma:
-        threshold_cp = np.pi/np.sqrt(3)/np.sqrt(numcp)*inps.numsigma
+################################################################################
+def read_cum_seq_closure_phase4conn(conn, outdir='./', box=None, print_msg=False):
+    '''Read cumulative sequential closure phase from individual closure phase directory.
+
+    Reference: Eq. (25) in Zheng et al. (2022).
+
+    Parameters: conn    - integer, connection level of sequential closure phases
+                outdir  - string, directory of conn{n}_cumSeqClosurePhase.h5
+                box     - list in size of (4,) in integer, coordinates of bounding box
+    Returns:    bias_ts - 3D array in size of (num_date, box_lengh, box_wid) in float,
+                          cumulative sequential closure phases
+    '''
+    cum_cp_file = os.path.join(outdir, f'closurePhase/conn{conn}/cumSeqClosurePhase.h5')
+    if print_msg:
+        print(f'read timeseries from file: {cum_cp_file}')
+
+    bias_ts = readfile.read(cum_cp_file, box=box, print_msg=False)[0]
+    return bias_ts
+
+
+def estimate_wratio(tbase, conn, bias_free_conn, wvl, box, outdir='./', mask=False):
+    '''Estimate W_r & velocity bias for the given connection level.
+
+    W_r is the M x M diagonal matrix with W_r(ii) = w(i*delta_t) / w(delta_t), i = 1,2,...,M.
+    This is defined in Equation (20), to be used for bias estimation; and can be calculated from
+    the weighted phase history (cumulative seq closure phase) based on Equation (29).
+
+    Parameters: tbase          - list(float) or array, in size of (num_date), time in accumulated years
+                conn           - integer, connection-level
+                bias_free_conn - integer, minimum connection-level that we think is bias-free
+                wvl            - float, wavelength
+                box            - list in size of (4,) in integer, coordinates of bounding box
+                outdir         - str, the working directory
+                mask           - bool, whether to mask out areas with average bias velocity less than 1 mm/year
+    Returns:    wratio_connN   - 2D np.ndarray of size (box_len, box_wid) in float32, W_r of the input connection level
+                vel_bias_connN - 2D np.ndarray of size (box_len, box_wid) in float32, velocity bias of the input connection level
+    '''
+    delta_t = tbase[-1] - tbase[0]
+    phase2range = -1 * wvl / (4 * np.pi)
+
+    # cum/vel_bias at bias free connection level
+    cum_bias_connF = read_cum_seq_closure_phase4conn(bias_free_conn, outdir, box)[-1,:,:]
+    vel_bias_connF = cum_bias_connF / delta_t * phase2range
+
+    # calc wratio at input connection level
+    box_wid = box[2] - box[0]
+    box_len = box[3] - box[1]
+    wratio_connN = np.ones([box_len, box_wid], dtype=np.float32)
+
+    if conn > 1:
+        cum_bias_connN = read_cum_seq_closure_phase4conn(conn, outdir, box)[-1,:,:]
+        # skip invalid pixels
+        flag = np.multiply(~np.isnan(cum_bias_connF), cum_bias_connF != 0)
+        # Equation (29)
+        wratio_connN[flag] = 1 - cum_bias_connN[flag] / cum_bias_connF[flag]
+
+        # bound within [0, 1]
+        wratio_connN[wratio_connN > 1] = 1
+        wratio_connN[wratio_connN < 0] = 0
+
     else:
-        threshold_cp = np.pi/np.sqrt(3)/np.sqrt(numcp)*3 # 3/sigma, 99.7% confidence
+        cum_bias_connN = None
 
-    mask = np.ones([length,width],np.float32)
-    mask[np.abs(np.angle(closurephase))>threshold_cp] = 0 # this masks areas with potential bias
-    mask[np.abs(np.abs(closurephase)/numcp < inps.episilon)] = 1 # this unmasks areas with low correlation (where it's hard to know wheter there is bias either)
+    # vel_bias at input connection level
+    vel_bias_connN = np.multiply(wratio_connN, vel_bias_connF)
+    if mask:
+        # if average velocity smaller than 1 mm/year (hardcoded here), mask out for better visual
+        # this option is only turned on while outputing wratio.h5 file.
+        wratio_connN[abs(vel_bias_connF) < 0.001] = np.nan
+
+    # debug mode
+    debug_mode = False
+    if debug_mode:
+        from matplotlib import pyplot as plt
+
+        data_list = [wratio_connN, vel_bias_connN, cum_bias_connN,
+                     None,         vel_bias_connF, cum_bias_connF]
+        titles = ['w_ratio', f'bias_vel_{conn}', f'bias_{conn}',
+                  None,       'bias_vel_F',       'bias_F']
+
+        fig, axs = plt.subplots(nrows=2, ncols=3, figsize=[12, 6])
+        for ax, data, title in zip(axs.flatten(), data_list, titles):
+            if data is not None:
+                im = ax.imshow(data, cmap='jet', interpolation='nearest')
+                ax.set_title(title)
+                fig.colorbar(im, ax=ax)
+            else:
+                ax.axis('off')
+        fig.tight_layout()
+        plt.show()
+
+    return wratio_connN, vel_bias_connN
+
+
+def estimate_wratio_all(bw, bias_free_conn, outdir, box):
+    '''Estimate diaginal matrix W_r for all connections levels within the given bandwidth.
+
+    Parameters: bias_free_conn - integer, minimum connection-level that we think is bias-free
+                bw             - integer, bandwidth of given time-sereis analysis
+                box            - list in size of (4,) in integer, coordinates of bounding box
+                outdir         - string, the working directory
+    Returns:    wratio         - 3D array in size of (bw+1, length, width) in float32,
+                                 the first slice (w[0,:,:]) is a padding 
+                                 to ensure that wratio[n,:,:] = w(n * delta_t) / w(delta_t).
+    '''
+    box_wid = box[2] - box[0]
+    box_len = box[3] - box[1]
+    cum_bias_connF = read_cum_seq_closure_phase4conn(bias_free_conn, outdir, box)[-1,:,:]
+    # skip invalid pixels
+    flag = np.multiply(~np.isnan(cum_bias_connF), cum_bias_connF != 0)
+
+    wratio = np.ones([bw+1, box_len, box_wid], dtype=np.float32)
+    for n in np.arange(2, bw+1):
+        cum_bias_connN = read_cum_seq_closure_phase4conn(n, outdir, box)[-1,:,:]
+        wratio[n,flag] = 1 - cum_bias_connN[flag] / cum_bias_connF[flag]
+
+    # bound into [0, 1]
+    wratio[wratio > 1] = 1
+    wratio[wratio < 0] = 0
+
+    return wratio
+
+
+def get_avg_time_span_within_bandwidth(date_ordinal, bw):
+    '''Compute average temporal span (days) for all interferogram within the given bandwidth
+
+    Parameters: date_ordinal - list of size (num_date,) in integer, time in days
+                bw           - integer, bandwidth of time-series analysis
+    Return:     avg_time     - float, average time-span in days
+    '''
+    avg_time = 0
+    num_ifgram = 0
+    for level in range(1, bw+1):
+        slc_date_firstN = date_ordinal[0:level]
+        slc_date_lastN  = date_ordinal[-level:]
+        for i in range(level):
+            avg_time += slc_date_lastN[i] - slc_date_firstN[i]
+        num_ifgram += len(date_ordinal) - level
+
+    avg_time /= num_ifgram
+
+    return avg_time
+
+
+def get_avg_time_span4conn(date_ordinal, conn):
+    '''Compute the average temporal span (days) for connection-n interferograms
+
+    Parameters: date_ordinal - list of size (num_date,) in integer, time in days
+                conn         - int, connection level of interferograms
+    Return:     avg_time     - float, average time-span in days
+    '''
+    slc_date_firstN = date_ordinal[0:conn]
+    slc_date_lastN = date_ordinal[-conn:]
+    avg_time = 0
+    for i in range(conn):
+        avg_time += slc_date_lastN[i] - slc_date_firstN[i]
+
+    num_ifgram = len(date_ordinal) - conn
+    avg_time /= num_ifgram
+
+    return avg_time
+
+
+def estimate_bias_timeseries_approx_patch(bias_free_conn, bw, tbase, date_ordinal, wvl, box, outdir):
+    '''Quick and approximate estimate of the bias time-series of a certain bandwidth (bw) for a bounding box
+
+    Note: This estimate is not exact, but often close enough.
+      It is good for a quick estimate to see how big the biases are.
+
+    Parameters: bias_free_conn - integer, connection level that we assume bias-free
+                bw             - integer, bandwidth of the given time-series analysis
+                tbase          - 1D np.ndarray in size of (num_date,) in float, time in accumulated years
+                date_ordinal   - list of size (num_date,) in integer, time in days
+                wvl            - float, wavelength of the SAR system
+                box            - list in size of (4,) in integer, coordinates of bounding box
+                outdir         - string, directory for outputing files
+    Returns:    bias_ts        - 3D array in size of (num_date, box_len, box_wid) in float, bias timeseries
+    '''
+    print('\n'+'-'*60)
+    print(f'quick and approximate estimation of bias time series for bandwidth = {bw}')
+    # basic info
+    phase2range = -1 * wvl / (4 * np.pi)
+    num_date = tbase.size
+
+    # average temporal span for ifgrams of connection-1 to connection-bw
+    deltat_n = np.asarray([get_avg_time_span4conn(date_ordinal, n) for n in range(1, bw+1)])
+    avg_time_span = get_avg_time_span_within_bandwidth(date_ordinal, bw)
+
+    # the bias in a bandwidth-bw analysis is similar to bias in connectoin-p interferograms
+    p = (np.abs(deltat_n - avg_time_span)).argmin() + 1
+    print(f'average connection level within the bandwidth = {p}')
+
+    # get wratio
+    kwargs1 = dict(
+        bias_free_conn=bias_free_conn,
+        wvl=wvl,
+        box=box,
+        outdir=outdir,
+    )
+    wratio_p = estimate_wratio(tbase, conn=p, **kwargs1)[0]
+    wratio_2 = estimate_wratio(tbase, conn=2, **kwargs1)[0]
+    wratio_p[np.isnan(wratio_p)] = 0
+    wratio_2[np.isnan(wratio_2)] = 0
+
+    wratio_2[abs(wratio_2 - 1) < 0.1] = np.nan
+    ratio_p2 = wratio_p / (1 - wratio_2)
+
+    # get bias_ts
+    kwargs2 = dict(outdir=outdir, box=box, print_msg=True)
+    bias_ts = read_cum_seq_closure_phase4conn(2, **kwargs2) * phase2range
+    bias_ts_bf = read_cum_seq_closure_phase4conn(bias_free_conn, **kwargs2) * phase2range
+    for i in range(num_date):
+        bias_ts[i,:,:] *= ratio_p2
+        bias_ts_bf[i,:,:] *= wratio_p
+
+    flag = np.isnan(bias_ts)
+    bias_ts[flag] = bias_ts_bf[flag]
+
+    return bias_ts
+
+
+def estimate_bias_timeseries_approx(stack_file, bias_free_conn, bw, water_mask_file=None, outdir='./', max_memory=4.0):
+    '''Quick & approximate estimation of the bias time series and Wr.
+
+    Reference: Eq. (20) in Zheng et al. (2022, TGRS).
+
+    Parameters: stack_file     - string, path for ifgramStack.h5
+                bias_free_conn - integer, connection level at which we assume is bias-free
+                bw             - integer, bandwidth of the given time-series.
+                wvl            - float, wavelength of the SAR System
+                outdir         - str, directory for output files
+                max_mermory    - float, maximum memory in GB for each patch processed
+    Returns:    bias_ts_file   - str, path to the HDF5 file for the approximate bias time series in (num_date, length, width)
+                wratio_file    - str, path to the HDF5 file for the wratio and bias velocity in (bw, length, width)
+                                 Shows how fast the bias-inducing signal decays with temporal baseline.
+    '''
+    print('\n'+'-'*80)
+    print(f'quick estimation of the non-closure phase bias time-series for bandwidth={bw} (Zheng et al., 2022) ...')
 
-    # save mask
+    stack_obj = ifgramStack(stack_file)
+    stack_obj.open()
+    length, width = stack_obj.length, stack_obj.width
     meta = dict(stack_obj.metadata)
-    meta['FILE_TYPE'] = 'mask'
-    ds_name_dict = {'cpmask': [np.float32, (length, width), mask],}
-    writefile.layout_hdf5(os.path.join(inps.outdir,'cpmask.h5'), ds_name_dict, meta)
+    wvl = float(meta['WAVELENGTH'])
+
+    # time info
+    date_list = stack_obj.get_date_list(dropIfgram=True)
+    num_date = len(date_list)
+
+    tbase = np.array(ptime.date_list2tbase(date_list)[0], dtype=np.float32) / 365.25
+    date_str_fmt = ptime.get_date_str_format(date_list[0])
+    date_ordinal = [dt.strptime(x, date_str_fmt).toordinal() for x in date_list]
+
+    # split igram_file into blocks to save memory
+    box_list, num_box = stack_obj.split2boxes(max_memory=max_memory, dim0_size=num_date)
+
+    # initiate output files
+    # 1 - wratio file
+    wratio_file = os.path.join(outdir, 'wratio.h5')
+    meta['FILE_TYPE'] = 'wratio'
+    meta['DATA_TYPE'] = 'float32'
+    meta['UNIT'] = '1'
+    ds_name_dict = {
+        'wratio'       : [np.float32,     (bw, length, width), None],
+        'velocityBias' : [np.float32,     (bw, length, width), None],
+    }
+    ds_unit_dict = {
+        'wratio'       : '1',
+        'velocityBias' : 'm/year',
+    }
+    writefile.layout_hdf5(wratio_file, ds_name_dict, metadata=meta, ds_unit_dict=ds_unit_dict)
+
+    # 2 - time series file
+    bias_ts_file = os.path.join(outdir, 'timeseriesBiasApprox.h5')
+    meta['FILE_TYPE'] = 'timeseries'
+    meta['UNIT'] = 'm'
+    ds_name_dict = {
+        'timeseries' : [np.float32,     (num_date, length, width), None],
+        'date'       : [np.dtype('S8'), (num_date,),  np.array(date_list, np.string_)],
+    }
+    writefile.layout_hdf5(bias_ts_file, ds_name_dict, meta)
+
+    # process block-by-block
+    for i, box in enumerate(box_list):
+        box_wid = box[2] - box[0]
+        box_len = box[3] - box[1]
+        print(box)
+        if num_box > 1:
+            print('\n------- processing patch {} out of {} --------------'.format(i+1, num_box))
+            print('box width:  {}'.format(box_wid))
+            print('box length: {}'.format(box_len))
+
+        # read water mask
+        if water_mask_file:
+            print(f'skip pixels on water (zero value from file: {os.path.basename(water_mask_file)})')
+            water_mask = readfile.read(water_mask_file, box=box)[0]
+        else:
+            water_mask_file = None
+
+        # 1 - estimate the wratio(_velocity)
+        wratio = np.zeros([bw, box_len, box_wid], dtype=np.float32)
+        bias_vel = np.zeros([bw, box_len, box_wid], dtype=np.float32)
+        for j in range(bw):
+            print(f'estimation W_ratio for connection level: {j+1}')
+            wratio[j, :, :], bias_vel[j, :, :] = estimate_wratio(
+                tbase,
+                conn=j+1,
+                bias_free_conn=bias_free_conn,
+                wvl=wvl,
+                box=box,
+                outdir=outdir,
+                mask=True)
+
+        if water_mask_file:
+            wratio[:, water_mask==0] = np.nan
+            bias_vel[:, water_mask==0] = np.nan
+
+        # write the block to disk
+        block = [0, bw, box[1], box[3], box[0], box[2]]
+
+        writefile.write_hdf5_block(
+            wratio_file,
+            data=wratio,
+            datasetName='wratio',
+            block=block)
+
+        writefile.write_hdf5_block(
+            wratio_file,
+            data=bias_vel,
+            datasetName='velocityBias',
+            block=block)
+
+        # 2 - estimate the bias time series
+        bias_ts = estimate_bias_timeseries_approx_patch(
+            bias_free_conn=bias_free_conn,
+            bw=bw,
+            tbase=tbase,
+            date_ordinal=date_ordinal,
+            wvl=wvl,
+            box=box,
+            outdir=outdir)
+
+        if water_mask_file:
+            bias_ts[:, water_mask==0] = np.nan
+
+        # write the block to disk
+        block = [0, len(date_list), box[1], box[3], box[0], box[2]]
+        writefile.write_hdf5_block(
+            bias_ts_file,
+            data=bias_ts,
+            datasetName='timeseries',
+            block=block)
+
+    return bias_ts_file, wratio_file
+
+
+
+################################################################################
+def bandwidth2num_ifgram(bw, num_date):
+    '''Get the number of interferograms for the network with the given bandwidth.
+
+    Reference: Equation (15) in Zheng et al. (2022)
+
+    Parameters: bw         - int, bandwith
+                num_date   - int, number of acquisitions
+    Returns:    num_ifgram - int, number of interferograms
+    '''
+    return int(bw * (num_date * 2 - bw - 1) / 2)
+
+
+def get_design_matrix_Wr(date12_list, bw, box, bias_free_conn, outdir='./'):
+    '''Computes the matrix W_r for a given bounding box, following Equation (20).
+
+    Parameters: date12_list    - list(str), interfeorgram pairs list in YYYYMMDD_YYYYMMDD
+                bw             - integer, bandwidth of time-series analysis
+                bias_free_conn - integer, minimum connection-level that we think is bias-free
+                box            - list in size of (4,) in integer, coordinates of bounding box
+                outdir         - string, the working directory
+    Returns:    Wr             - 2D np.ndarray in size of (num_ifgram, num_pix) in float32,
+                                 each row stores the diagnal component of W (Eq. 16 in Zheng et al., 2022) for one pixel.
+                A              - 2D np.ndarray in size of (num_ifgram, num_date) in float32
+    '''
+    # get design matrix A
+    A = ifgramStack.get_design_matrix4timeseries(date12_list=date12_list, refDate='no')[0]
+    num_ifgram = A.shape[0]
+
+    # get w(delta_t) * phi^x - section VI-A
+    wratio_all = estimate_wratio_all(bw, bias_free_conn, outdir, box)
+
+    # intial output value
+    num_pix = (box[2] - box[0]) * (box[3] - box[1])
+    Wr = np.zeros((num_ifgram, num_pix), dtype=np.float32)
+    for i in range(num_ifgram):
+        # get the connection level
+        Aline = list(A[i,:])
+        idx1 = Aline.index(-1)
+        idx2 = Aline.index(1)
+        conn = idx2 - idx1
+        if conn > bw:
+            print('Existing max-conn-level > the input bandwidth, '
+                  'use modify_network.py inputs/ifgramStack.h5 to adjust the max-conn-level.')
+
+        # assign to Wr matrix
+        Wr[i, :] = wratio_all[conn, :, :].reshape(-1)
+
+    return Wr, A
+
+
+def estimate_bias_timeseries_patch(stack_file, bias_free_conn, bw, wvl, box, water_mask_file=None, outdir='./'):
+    '''Estimate the bias time-series of a certain bandwidth (bw) for a bounding box.
+
+    Reference: Zheng et al. (2022, TGRS).
+
+    Parameters: stack_file     - string, path for ifgramStack.h5
+                bias_free_conn - integer, connection level at which we assume is bias-free
+                bw             - integer, bandwidth of the given time-series.
+                wvl            - float, wavelength of the SAR System
+                box            - list in size of (4,) in integer, coordinates of bounding box
+                outdir         - string, directory for output files
+    Returns:    bias_ts_bwn     - 3D array of size (bw, box_len, box_wid) of float, estimated bias time-series
+                box            - list in size of (4,) in integer, coordinates of bounding box, output for parallel computing
+    '''
+    phase2range = -1 * wvl / (4 * np.pi)
+    box_wid, box_len = box[2] - box[0], box[3] - box[1]
+    num_pix = box_wid * box_len
+
+    stack_obj = ifgramStack(stack_file)
+    stack_obj.open(print_msg=False)
+    date12_list = stack_obj.get_date12_list(dropIfgram=True)
+    num_ifgram = len(date12_list)
 
-    # also save the average closure phase
-    ds_name_dict2 = {'phase': [np.float32, (length, width), np.angle(closurephase)],
-                    'amplitude':[np.float32,(length,width),np.abs(closurephase)/numcp],}
-    writefile.layout_hdf5(os.path.join(inps.outdir,'avgwcp.h5'), ds_name_dict2, meta)
+    # time info
+    date_list = stack_obj.get_date_list(dropIfgram=True)
+    num_date = len(date_list)
+    # tbase in the unit of years
+    tbase = np.array(ptime.date_list2tbase(date_list)[0], dtype=np.float32) / 365.25
+    tbase_diff = np.diff(tbase).reshape(-1,1)
+
+
+    ## mask of pixels to invert
+    mask = np.ones(num_pix, dtype=np.bool_)
+
+    # water mask
+    if water_mask_file and os.path.isfile(water_mask_file):
+        print(f'skip pixels (on the water) with zero value in file: {os.path.basename(water_mask_file)}')
+        water_mask = readfile.read(water_mask_file, box=box)[0].flatten()
+        mask *= np.array(water_mask, dtype=np.bool_)
+        del water_mask
+    else:
+        water_mask_file = None
+
+    # invert pixels on mask(s)
+    num_pix2inv = int(np.sum(mask))
+    idx_pix2inv = np.where(mask)[0]
+    print('number of pixels to invert: {} out of {} ({:.1f}%)'.format(
+        num_pix2inv, num_pix, num_pix2inv/num_pix*100))
+
+
+    ## 1. get bias time-series for bw-1 analysis
+    kwargs = dict(outdir=outdir, box=box, print_msg=True)
+    bias_ts_bw1_rough = read_cum_seq_closure_phase4conn(bias_free_conn, **kwargs).reshape(num_date, -1)
+    bias_ts_bw1_fine  = read_cum_seq_closure_phase4conn(2, **kwargs).reshape(num_date, -1)
+
+    bias_vel_bw1 = bias_ts_bw1_fine[-1,:] * phase2range / (tbase[-1] - tbase[0])
+    flag = np.where(np.abs(bias_vel_bw1) < 0.001, 0, 1).astype(np.bool_)
+    num_pix_less, num_pix_more = np.sum(flag[mask]), np.sum(~flag[mask])
+    digit = len(str(num_pix))
+    msg = 'number of pixels with bandwidth=1 velocity bias '
+    msg += f'< | >= 0.1 mm/yr: {num_pix_less:{digit}d} | {num_pix_more:{digit}d} out of {num_pix} '
+    msg += f'({num_pix_less/num_pix*100:.0f}% | {num_pix_less/num_pix*100:.0f}%)'
+    print(msg)
+
+    # scale bias_ts_bw1_fine based on bias_ts_bw1_rough
+    r2f_flag = np.multiply(~np.isnan(bias_ts_bw1_fine[-1,:]), bias_ts_bw1_fine[-1,:] != 0)
+    r2f_scale = np.ones((num_pix), dtype=np.float32)
+    r2f_scale[r2f_flag] = bias_ts_bw1_rough[-1,r2f_flag] / bias_ts_bw1_fine[-1,r2f_flag]
+    for i in range(num_date):
+        bias_ts_bw1_fine[i,:] *= r2f_scale
+    del r2f_flag, r2f_scale
+
+
+    # 2. construct bias_stack = W * A * Phi^X = Wr * A * w(delta_t) * Phi^X
+    # Equation (20) in Zheng et al. (2022, TGRS)
+    # this matrix is a num_pix by num_ifgram matrix, each row stores the diagnal component of the Wr matrix for that pixel
+    print('estimating bias_stack = Wr * A * w(delta_t) * Phi^X (Zheng et al., 2022, TGRS) ...')
+    Wr, A = get_design_matrix_Wr(date12_list, bw, box, bias_free_conn, outdir)
+    wPhi_x = np.array(bias_ts_bw1_rough, dtype=np.float32)
+    wPhi_x[:, flag] = bias_ts_bw1_fine[:, flag]
+
+    bias_stack = np.zeros((num_ifgram, num_pix), dtype=np.float32)
+    prog_bar = ptime.progressBar(maxValue=num_pix2inv)
+    for i in range(num_pix2inv):
+        idx = idx_pix2inv[i]
+
+        # calculate the bias_stack = W * A * phi^x = W^r * A * w(delta_t) * phi^x
+        bias_stack[:, idx] = np.linalg.multi_dot([np.diag(Wr[:, idx]), A, wPhi_x[:, idx]]).flatten()
+
+        prog_bar.update(i+1, every=3000, suffix='{}/{} pixels'.format(i+1, num_pix2inv))
+    prog_bar.close()
+    del bias_ts_bw1_rough, bias_ts_bw1_fine, wPhi_x, Wr
+
+
+    # 3. estimate bias time-series from bias stack: bias_ts = A+ * bias_stack
+    # Equation (20) in Zheng et al. (2022, TGRS)
+    # perform phase velocity inversion as per the original SBAS paper rather doing direct phase inversion.
+    print('estimating bias time-series from bias stack via the SBAS approach ...')
+    A1, B1 = stack_obj.get_design_matrix4timeseries(date12_list=date12_list)
+    kwargs = {
+        'A'                 : A1,
+        'B'                 : B1,
+        'tbase_diff'        : tbase_diff,
+        'weight_sqrt'       : None,
+        'min_norm_velocity' : True,
+        'inv_quality_name'  : 'no',
+    }
+
+    # a. split mask into mask_all/par_net
+    # mask for valid (~NaN) observations in ALL ifgrams (share one B in sbas inversion)
+    mask_all_net = np.all(~np.isnan(bias_stack), axis=0) * np.all(bias_stack != 0, axis=0)
+    mask_all_net *= mask
+    mask_par_net = mask ^ mask_all_net
+    msg = 'estimating time-series for pixels with valid stack values'
+
+    # b. invert once for all pixels with obs in ALL ifgrams
+    bias_ts = np.zeros((num_date, num_pix), dtype=np.float32)
+    if np.sum(mask_all_net) > 0:
+        num_pix_all = int(np.sum(mask_all_net))
+        print(f'{msg} in all  ifgrams ({num_pix_all} pixels; {num_pix_all/num_pix2inv*100:.0f}%) ...')
+
+        # invert
+        bias_ts[:, mask_all_net] = estimate_timeseries(y=bias_stack[:, mask_all_net], **kwargs)[0]
+
+    # c. invert pixel-by-pixel for pixels with obs NOT in all ifgrams
+    if np.sum(mask_par_net) > 0:
+        num_pix_par = int(np.sum(mask_par_net))
+        idx_pix_par = np.where(mask_par_net)[0]
+        print(f'{msg} in some ifgrams ({num_pix_par} pixels; {num_pix_par/num_pix2inv*100:.0f}%) ...')
+
+        prog_bar = ptime.progressBar(maxValue=num_pix_par)
+        for i in range(num_pix_par):
+            idx = idx_pix_par[i]
+            # invert
+            bias_ts[:, idx] = estimate_timeseries(y=bias_stack[:, idx], **kwargs)[0].flatten()
+
+            prog_bar.update(i+1, every=200, suffix='{}/{} pixels'.format(i+1, num_pix_par))
+        prog_bar.close()
+    del bias_stack
+
+    bias_ts = bias_ts.reshape(num_date, box_len, box_wid) * phase2range
+
+    return bias_ts, box
+
+
+def estimate_bias_timeseries(stack_file, bias_free_conn, bw, cluster_kwargs, water_mask_file=None, outdir='./', max_memory=4.0):
+    '''Run the bias time-series estimation.
+
+    Parameters: stack_file     - string, path for ifgramStack.h5
+                bias_free_conn - integer, connection level at which we assume is bias-free
+                bw                - integer, bandwidth of the given time-series.
+                cluster_kwargs - dictonary containing settings of parallel computing. To turn off, set parallel['clustertype']=''
+                outdir            - string, directory for output files
+                max_memory        - float, maximum memory in GB for each patch processed
+    Returns:    bias_ts_file   - str, path to the bias time series file: timeseriesBias.h5
+    '''
+    print('\n'+'-'*80)
+    print(f'estimating the non-closure phase bias time-series for bandwidth={bw} (Zheng et al., 2022) ...')
+
+    stack_obj = ifgramStack(stack_file)
+    stack_obj.open(print_msg=False)
+    length, width = stack_obj.length, stack_obj.width
+    meta = dict(stack_obj.metadata)
+    wvl = float(meta['WAVELENGTH'])
+
+    date12_list = stack_obj.get_date12_list(dropIfgram=True)
+    date_list = stack_obj.get_date_list(dropIfgram=True)
+    num_ifgram = len(date12_list)
+    num_date = len(date_list)
+
+    # check the bandwidth of ifgramStack
+    num_ifgram_exp = bandwidth2num_ifgram(bw, num_date)
+    print(f'number of interferograms expected for bandwidth={bw}: {num_ifgram_exp}')
+    print(f'number of interferograms kept in ifgramStack.h5  : {num_ifgram}')
+    if num_ifgram != num_ifgram_exp:
+        msg = f'number of the kept interferograms ({num_ifgram}) is NOT the same as expected ({num_ifgram_exp})!'
+        msg += f'\n  This indicates the bandwidth between ifgramStack.h5 and the user input ({bw}) are NOT consistent!'
+        msg +=  '\n  Modify the network of interferograms to be consistent via modify_network.py by:'
+        msg += f'\n  1) set mintpy.network.connNumMax={bw} and 2) re-run modify_network.py -t smallbaselineApp.cfg'
+        raise Exception(msg)
+
+    # estimate for bias time-series
+    bias_ts_file = os.path.join(outdir, 'timeseriesBias.h5')
+    ds_name_dict = {
+        'timeseries' : [np.float32,     (num_date, length, width), None],
+        'date'       : [np.dtype('S8'), (num_date,),  np.array(date_list, np.string_)],
+    }
+    meta['FILE_TYPE'] = 'timeseries'
+    meta['DATA_TYPE'] = 'float32'
+    meta['REF_DATE'] = date_list[0]
+    meta['UNIT'] = 'm'
+    meta['BANDS'] = '1'
+    meta['DATE12'] = f'{date_list[0][2:]}-{date_list[-1][2:]}'
+    writefile.layout_hdf5(bias_ts_file, ds_name_dict, meta)
+
+    data_kwargs = {
+        "stack_file"      : stack_file,
+        "bias_free_conn"  : bias_free_conn,
+        "bw"              : bw,
+        "wvl"             : wvl,
+        "water_mask_file" : water_mask_file,
+        "outdir"          : outdir,
+    }
+
+    # split igram_file into blocks to save memory
+    box_list, num_box = stack_obj.split2boxes(max_memory=max_memory, dim0_size=num_ifgram*2+num_date*3)
+    num_threads_dict = cluster.set_num_threads("1")
+
+    for i, box in enumerate(box_list):
+        box_wid, box_len = box[2] - box[0], box[3] - box[1]
+        print(box)
+        if num_box > 1:
+            print('\n------- processing patch {} out of {} --------------'.format(i+1, num_box))
+            print('box width : {}'.format(box_wid))
+            print('box length: {}'.format(box_len))
+
+        #update box argument in the input data
+        data_kwargs['box'] = box
+
+        if not cluster_kwargs['cluster_type']:
+            # non-parallel
+            bias_ts = estimate_bias_timeseries_patch(**data_kwargs)[:-1]
+
+        else:
+            # parallel
+            print('\n\n------- start parallel processing using Dask -------')
+
+            # initiate the output data
+            bias_ts = np.zeros((num_date, box_len, box_wid), dtype=np.float32)
+
+            # initiate dask cluster and client
+            cluster_obj = cluster.DaskCluster(**cluster_kwargs)
+            cluster_obj.open()
+
+            # run dask
+            bias_ts = cluster_obj.run(
+                func=estimate_bias_timeseries_patch,
+                func_data=data_kwargs,
+                results=[bias_ts],
+            )
+
+            # close dask cluster and client
+            cluster_obj.close()
+            print('------- finished parallel processing -------\n\n')
+
+        writefile.write_hdf5_block(
+            bias_ts_file,
+            data=bias_ts,
+            datasetName='timeseries',
+            block=[0, num_date, box[1], box[3], box[0], box[2]],
+        )
 
+    # roll back to the original number of threads
+    cluster.roll_back_num_threads(num_threads_dict)
+
+    return bias_ts_file
+
+
+################################################################################
+def main(iargs=None):
+    inps = cmd_line_parse(iargs)
+    start_time = time.time()
+
+    # common inputs
+    kwargs = dict(outdir=inps.outdir, max_memory=inps.maxMemory)    
+
+    if inps.action == 'mask':
+        calc_closure_phase_mask(
+            stack_file=inps.stack_file,
+            bias_free_conn=inps.nl,
+            num_sigma=inps.num_sigma,
+            threshold_amp=inps.epsilon,
+            **kwargs)
+
+    elif inps.action.endswith ('estimate'):
+        # compute the unwrapped closure phase bias time-series
+        # and re-unwrap to mitigate the impact of phase unwrapping errors
+        # which can dominate the true non-closure phase.
+        # max(2, inps.bw) is used to ensure we have conn-2 closure phase processed
+        conn_list = np.arange(2, max(2, inps.bw) + 1).tolist() + [inps.nl]
+        conn_list = sorted(list(set(conn_list)))
+        for conn in conn_list:
+            print('\n'+'-'*80)
+            print('calculating the unwrapped closure phase for '
+                  f'connection level = {conn} out of {conn_list} ...')
+            compute_unwrap_closure_phase(
+                stack_file=inps.stack_file,
+                conn=conn,
+                num_worker=int(inps.numWorker),
+                **kwargs)
+
+        if inps.action == 'quick_estimate':
+            estimate_bias_timeseries_approx(
+                stack_file=inps.stack_file,
+                bias_free_conn=inps.nl,
+                bw=inps.bw,
+                water_mask_file=inps.water_mask_file,
+                **kwargs)
+
+        elif inps.action == 'estimate':
+            cluster_kwargs = {
+                "cluster_type" : inps.cluster,
+                "num_worker"   : inps.numWorker,
+                "config_name"  : inps.config}
+            estimate_bias_timeseries(
+                stack_file=inps.stack_file,
+                bias_free_conn=inps.nl,
+                bw=inps.bw,
+                cluster_kwargs=cluster_kwargs,
+                water_mask_file=inps.water_mask_file,
+                **kwargs)
+
+    # used time
+    m, s = divmod(time.time() - start_time, 60)
+    print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s))
+
+    return
+
+
+################################################################################
 if __name__ == '__main__':
     main(sys.argv[1:])
diff -pruN 1.3.3-2/mintpy/defaults/auto_path.py 1.4.0-1/mintpy/defaults/auto_path.py
--- 1.3.3-2/mintpy/defaults/auto_path.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/defaults/auto_path.py	2022-08-04 20:01:49.000000000 +0000
@@ -16,90 +16,92 @@ import numpy as np
 
 # Default path of data files from different InSAR processors to be loaded into MintPy
 AUTO_PATH_ISCE_TOPS = '''##----------Default file path of ISCE/topsStack products
-mintpy.load.processor      = isce
-mintpy.load.metaFile       = ../reference/IW*.xml
-mintpy.load.baselineDir    = ../baselines
-
-mintpy.load.unwFile        = ../merged/interferograms/*/filt*.unw
-mintpy.load.corFile        = ../merged/interferograms/*/filt*.cor
-mintpy.load.connCompFile   = ../merged/interferograms/*/filt*.unw.conncomp
-mintpy.load.ionoFile       = None
-mintpy.load.intFile        = None
-
-mintpy.load.demFile        = ../merged/geom_reference/hgt.rdr
-mintpy.load.lookupYFile    = ../merged/geom_reference/lat.rdr
-mintpy.load.lookupXFile    = ../merged/geom_reference/lon.rdr
-mintpy.load.incAngleFile   = ../merged/geom_reference/los.rdr
-mintpy.load.azAngleFile    = ../merged/geom_reference/los.rdr
-mintpy.load.shadowMaskFile = ../merged/geom_reference/shadowMask.rdr
-mintpy.load.waterMaskFile  = ../merged/geom_reference/waterMask.rdr
-mintpy.load.bperpFile      = None
+mintpy.load.processor       = isce
+mintpy.load.metaFile        = ../reference/IW*.xml
+mintpy.load.baselineDir     = ../baselines
+
+mintpy.load.unwFile         = ../merged/interferograms/*/filt*.unw
+mintpy.load.corFile         = ../merged/interferograms/*/filt*.cor
+mintpy.load.connCompFile    = ../merged/interferograms/*/filt*.unw.conncomp
+mintpy.load.intFile         = None
+
+mintpy.load.ionUnwFile      = ../ion/*/ion_cal/filt.ion
+mintpy.load.ionCorFile      = ../ion/*/ion_cal/raw_no_projection.cor
+mintpy.load.ionConnCompFile = None
+
+mintpy.load.demFile         = ../merged/geom_reference/hgt.rdr
+mintpy.load.lookupYFile     = ../merged/geom_reference/lat.rdr
+mintpy.load.lookupXFile     = ../merged/geom_reference/lon.rdr
+mintpy.load.incAngleFile    = ../merged/geom_reference/los.rdr
+mintpy.load.azAngleFile     = ../merged/geom_reference/los.rdr
+mintpy.load.shadowMaskFile  = ../merged/geom_reference/shadowMask.rdr
+mintpy.load.waterMaskFile   = ../merged/geom_reference/waterMask.rdr
+mintpy.load.bperpFile       = None
 '''
 
 AUTO_PATH_ISCE_STRIPMAP = '''##----------Default file path of ISCE/stripmapStack products
-mintpy.load.processor      = isce
-mintpy.load.metaFile       = ${m_shelve}/data.dat
-mintpy.load.baselineDir    = ../baselines
-
-mintpy.load.unwFile        = ../Igrams/*/filt*.unw
-mintpy.load.corFile        = ../Igrams/*/filt*.cor
-mintpy.load.connCompFile   = ../Igrams/*/filt*.unw.conncomp
-mintpy.load.ionoFile       = None
-mintpy.load.intFile        = None
-
-mintpy.load.demFile        = ../geom_reference/hgt.rdr
-mintpy.load.lookupYFile    = ../geom_reference/lat.rdr
-mintpy.load.lookupXFile    = ../geom_reference/lon.rdr
-mintpy.load.incAngleFile   = ../geom_reference/los.rdr
-mintpy.load.azAngleFile    = ../geom_reference/los.rdr
-mintpy.load.shadowMaskFile = ../geom_reference/shadowMask.rdr
-mintpy.load.waterMaskFile  = ../geom_reference/waterMask.rdr
-mintpy.load.bperpFile      = None
+mintpy.load.processor       = isce
+mintpy.load.metaFile        = ${m_shelve}/data.dat
+mintpy.load.baselineDir     = ../baselines
+
+mintpy.load.unwFile         = ../Igrams/*/filt*.unw
+mintpy.load.corFile         = ../Igrams/*/filt*.cor
+mintpy.load.connCompFile    = ../Igrams/*/filt*.unw.conncomp
+mintpy.load.intFile         = None
+
+mintpy.load.demFile         = ../geom_reference/hgt.rdr
+mintpy.load.lookupYFile     = ../geom_reference/lat.rdr
+mintpy.load.lookupXFile     = ../geom_reference/lon.rdr
+mintpy.load.incAngleFile    = ../geom_reference/los.rdr
+mintpy.load.azAngleFile     = ../geom_reference/los.rdr
+mintpy.load.shadowMaskFile  = ../geom_reference/shadowMask.rdr
+mintpy.load.waterMaskFile   = ../geom_reference/waterMask.rdr
+mintpy.load.bperpFile       = None
 '''
 
 AUTO_PATH_ROIPAC = '''##----------Default file path of ROI_PAC products
-mintpy.load.processor      = roipac
-mintpy.load.unwFile        = ../PROCESS/DONE/IFG*/filt*.unw
-mintpy.load.corFile        = ../PROCESS/DONE/IFG*/filt*.cor
-mintpy.load.connCompFile   = ../PROCESS/DONE/IFG*/filt*snap_connect.byt
-
-mintpy.load.demFile        = ../PROCESS/DONE/*${m_date12}*/radar_*rlks.hgt
-mintpy.load.lookupYFile    = ../PROCESS/GEO/geo_${m_date12}/geomap_*rlks.trans
-mintpy.load.lookupXFile    = ../PROCESS/GEO/geo_${m_date12}/geomap_*rlks.trans
-mintpy.load.incAngleFile   = None
-mintpy.load.azAngleFile    = None
-mintpy.load.shadowMaskFile = None
-mintpy.load.bperpFile      = None
+mintpy.load.processor       = roipac
+mintpy.load.unwFile         = ../PROCESS/DONE/IFG*/filt*.unw
+mintpy.load.corFile         = ../PROCESS/DONE/IFG*/filt*.cor
+mintpy.load.connCompFile    = ../PROCESS/DONE/IFG*/filt*snap_connect.byt
+
+mintpy.load.demFile         = ../PROCESS/DONE/*${m_date12}*/radar_*rlks.hgt
+mintpy.load.lookupYFile     = ../PROCESS/GEO/geo_${m_date12}/geomap_*rlks.trans
+mintpy.load.lookupXFile     = ../PROCESS/GEO/geo_${m_date12}/geomap_*rlks.trans
+mintpy.load.incAngleFile    = None
+mintpy.load.azAngleFile     = None
+mintpy.load.shadowMaskFile  = None
+mintpy.load.bperpFile       = None
 '''
 
 AUTO_PATH_GAMMA = '''##----------Default file path of GAMMA products
-mintpy.load.processor      = gamma
-mintpy.load.unwFile        = ../PROCESS/DONE/IFG*/diff*rlks.unw
-mintpy.load.corFile        = ../PROCESS/DONE/IFG*/*filt*rlks.cor
-mintpy.load.connCompFile   = None
-
-mintpy.load.demFile        = ../PROCESS/SIM/sim_${m_date12}/sim*.hgt_sim
-mintpy.load.lookupYFile    = ../PROCESS/SIM/sim_${m_date12}/sim*.UTM_TO_RDC
-mintpy.load.lookupXFile    = ../PROCESS/SIM/sim_${m_date12}/sim*.UTM_TO_RDC
-mintpy.load.incAngleFile   = None
-mintpy.load.azAngleFile    = None
-mintpy.load.shadowMaskFile = None
-mintpy.load.bperpFile      = ../merged/baselines/*/*.base_perp
+mintpy.load.processor       = gamma
+mintpy.load.unwFile         = ../PROCESS/DONE/IFG*/diff*rlks.unw
+mintpy.load.corFile         = ../PROCESS/DONE/IFG*/*filt*rlks.cor
+mintpy.load.connCompFile    = None
+
+mintpy.load.demFile         = ../PROCESS/SIM/sim_${m_date12}/sim*.hgt_sim
+mintpy.load.lookupYFile     = ../PROCESS/SIM/sim_${m_date12}/sim*.UTM_TO_RDC
+mintpy.load.lookupXFile     = ../PROCESS/SIM/sim_${m_date12}/sim*.UTM_TO_RDC
+mintpy.load.incAngleFile    = None
+mintpy.load.azAngleFile     = None
+mintpy.load.shadowMaskFile  = None
+mintpy.load.bperpFile       = ../merged/baselines/*/*.base_perp
 '''
 
 AUTO_PATH_ARIA = '''##----------Default file path of ARIA products
-mintpy.load.processor      = aria
-mintpy.load.unwFile        = ../stack/unwrapStack.vrt
-mintpy.load.corFile        = ../stack/cohStack.vrt
-mintpy.load.connCompFile   = ../stack/connCompStack.vrt
-
-mintpy.load.demFile        = ../DEM/*.dem
-mintpy.load.lookupYFile    = None
-mintpy.load.lookupXFile    = None
-mintpy.load.incAngleFile   = ../incidenceAngle/*.vrt
-mintpy.load.azAngleFile    = ../azimuthAngle/*.vrt
-mintpy.load.shadowMaskFile = None
-mintpy.load.waterMaskFile  = ../mask/watermask.msk
+mintpy.load.processor       = aria
+mintpy.load.unwFile         = ../stack/unwrapStack.vrt
+mintpy.load.corFile         = ../stack/cohStack.vrt
+mintpy.load.connCompFile    = ../stack/connCompStack.vrt
+
+mintpy.load.demFile         = ../DEM/*.dem
+mintpy.load.lookupYFile     = None
+mintpy.load.lookupXFile     = None
+mintpy.load.incAngleFile    = ../incidenceAngle/*.vrt
+mintpy.load.azAngleFile     = ../azimuthAngle/*.vrt
+mintpy.load.shadowMaskFile  = None
+mintpy.load.waterMaskFile   = ../mask/watermask.msk
 '''
 
 
diff -pruN 1.3.3-2/mintpy/defaults/smallbaselineApp_auto.cfg 1.4.0-1/mintpy/defaults/smallbaselineApp_auto.cfg
--- 1.3.3-2/mintpy/defaults/smallbaselineApp_auto.cfg	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/defaults/smallbaselineApp_auto.cfg	2022-08-04 20:01:49.000000000 +0000
@@ -11,12 +11,13 @@ mintpy.load.processor    = isce
 mintpy.load.autoPath     = no
 mintpy.load.updateMode   = yes
 mintpy.load.compression  = no
-##---------multilook (optional):
-mintpy.load.ystep        = 1
-mintpy.load.xstep        = 1
 ##-------subset (optional)
 mintpy.subset.yx         = no
 mintpy.subset.lalo       = no
+##---------multilook (optional):
+mintpy.multilook.method  = nearest
+mintpy.multilook.ystep   = 1
+mintpy.multilook.xstep   = 1
 
 
 ########## modify_network
@@ -125,13 +126,21 @@ mintpy.reference.date    = reference_dat
 
 
 ########## velocity
-mintpy.velocity.excludeDate    = exclude_date.txt
-mintpy.velocity.startDate      = no
-mintpy.velocity.endDate        = no
-
-## bootstrap
-mintpy.velocity.bootstrap      = no
-mintpy.velocity.bootstrapCount = 400
+mintpy.timeFunc.startDate    = no
+mintpy.timeFunc.endDate      = no
+mintpy.timeFunc.excludeDate  = exclude_date.txt
+
+## time functions
+mintpy.timeFunc.polynomial   = 1
+mintpy.timeFunc.periodic     = no
+mintpy.timeFunc.stepDate     = no
+mintpy.timeFunc.exp          = no
+mintpy.timeFunc.log          = no
+
+## uncertainty quantification
+mintpy.timeFunc.uncertaintyQuantification  = residue
+mintpy.timeFunc.timeSeriesCovFile          = no
+mintpy.timeFunc.bootstrapCount             = 400
 
 
 ########## geocode
diff -pruN 1.3.3-2/mintpy/defaults/smallbaselineApp.cfg 1.4.0-1/mintpy/defaults/smallbaselineApp.cfg
--- 1.3.3-2/mintpy/defaults/smallbaselineApp.cfg	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/defaults/smallbaselineApp.cfg	2022-08-04 20:01:49.000000000 +0000
@@ -26,43 +26,52 @@ mintpy.compute.config    = auto #[none /
 ## no   - save   0% disk usage, fast [default]
 ## lzf  - save ~57% disk usage, relative slow
 ## gzip - save ~62% disk usage, very slow [not recommend]
-mintpy.load.processor      = auto  #[isce, aria, hyp3, gmtsar, snap, gamma, roipac], auto for isce
-mintpy.load.autoPath       = auto  #[yes / no], auto for no, use pre-defined auto path
-mintpy.load.updateMode     = auto  #[yes / no], auto for yes, skip re-loading if HDF5 files are complete
-mintpy.load.compression    = auto  #[gzip / lzf / no], auto for no.
+mintpy.load.processor       = auto  #[isce, aria, hyp3, gmtsar, snap, gamma, roipac], auto for isce
+mintpy.load.autoPath        = auto  #[yes / no], auto for no, use pre-defined auto path
+mintpy.load.updateMode      = auto  #[yes / no], auto for yes, skip re-loading if HDF5 files are complete
+mintpy.load.compression     = auto  #[gzip / lzf / no], auto for no.
 ##---------for ISCE only:
-mintpy.load.metaFile       = auto  #[path of common metadata file for the stack], i.e.: ./reference/IW1.xml, ./referenceShelve/data.dat
-mintpy.load.baselineDir    = auto  #[path of the baseline dir], i.e.: ./baselines
-##---------interferogram datasets:
-mintpy.load.unwFile        = auto  #[path pattern of unwrapped interferogram files]
-mintpy.load.corFile        = auto  #[path pattern of spatial coherence       files]
-mintpy.load.connCompFile   = auto  #[path pattern of connected components    files], optional but recommended
-mintpy.load.intFile        = auto  #[path pattern of wrapped interferogram   files], optional
-mintpy.load.ionoFile       = auto  #[path pattern of ionospheric delay       files], optional
-mintpy.load.magFile        = auto  #[path pattern of interferogram magnitude files], optional
-##---------offset datasets (optional):
-mintpy.load.azOffFile      = auto  #[path pattern of azimuth offset file], optional
-mintpy.load.rgOffFile      = auto  #[path pattern of range   offset file], optional
-mintpy.load.azOffStdFile   = auto  #[path pattern of azimuth offset variance file], optional
-mintpy.load.rgOffStdFile   = auto  #[path pattern of range   offset variance file], optional
-mintpy.load.offSnrFile     = auto  #[path pattern of offset signal-to-noise ratio file], optional
-##---------geometry datasets:
-mintpy.load.demFile        = auto  #[path of DEM file]
-mintpy.load.lookupYFile    = auto  #[path of latitude /row   /y coordinate file], not required for geocoded data
-mintpy.load.lookupXFile    = auto  #[path of longitude/column/x coordinate file], not required for geocoded data
-mintpy.load.incAngleFile   = auto  #[path of incidence angle file], optional but recommended
-mintpy.load.azAngleFile    = auto  #[path of azimuth   angle file], optional
-mintpy.load.shadowMaskFile = auto  #[path of shadow mask file], optional but recommended
-mintpy.load.waterMaskFile  = auto  #[path of water  mask file], optional but recommended
-mintpy.load.bperpFile      = auto  #[path pattern of 2D perpendicular baseline file], optional
-##---------multilook (optional):
-## multilook while loading data with nearest interpolation, to reduce dataset size
-mintpy.load.ystep          = auto    #[int >= 1], auto for 1 - no multilooking
-mintpy.load.xstep          = auto    #[int >= 1], auto for 1 - no multilooking
+mintpy.load.metaFile        = auto  #[path of common metadata file for the stack], i.e.: ./reference/IW1.xml, ./referenceShelve/data.dat
+mintpy.load.baselineDir     = auto  #[path of the baseline dir], i.e.: ./baselines
+##---------interferogram stack:
+mintpy.load.unwFile         = auto  #[path pattern of unwrapped interferogram files]
+mintpy.load.corFile         = auto  #[path pattern of spatial coherence       files]
+mintpy.load.connCompFile    = auto  #[path pattern of connected components    files], optional but recommended
+mintpy.load.intFile         = auto  #[path pattern of wrapped interferogram   files], optional
+mintpy.load.magFile         = auto  #[path pattern of interferogram magnitude files], optional
+##---------ionosphere stack (optional):
+mintpy.load.ionUnwFile      = auto  #[path pattern of unwrapped interferogram files]
+mintpy.load.ionCorFile      = auto  #[path pattern of spatial coherence       files]
+mintpy.load.ionConnCompFile = auto  #[path pattern of connected components    files], optional but recommended
+##---------offset stack (optional):
+mintpy.load.azOffFile       = auto  #[path pattern of azimuth offset file]
+mintpy.load.rgOffFile       = auto  #[path pattern of range   offset file]
+mintpy.load.azOffStdFile    = auto  #[path pattern of azimuth offset variance file], optional but recommended
+mintpy.load.rgOffStdFile    = auto  #[path pattern of range   offset variance file], optional but recommended
+mintpy.load.offSnrFile      = auto  #[path pattern of offset signal-to-noise ratio file], optional
+##---------geometry:
+mintpy.load.demFile         = auto  #[path of DEM file]
+mintpy.load.lookupYFile     = auto  #[path of latitude /row   /y coordinate file], not required for geocoded data
+mintpy.load.lookupXFile     = auto  #[path of longitude/column/x coordinate file], not required for geocoded data
+mintpy.load.incAngleFile    = auto  #[path of incidence angle file], optional but recommended
+mintpy.load.azAngleFile     = auto  #[path of azimuth   angle file], optional
+mintpy.load.shadowMaskFile  = auto  #[path of shadow mask file], optional but recommended
+mintpy.load.waterMaskFile   = auto  #[path of water  mask file], optional but recommended
+mintpy.load.bperpFile       = auto  #[path pattern of 2D perpendicular baseline file], optional
 ##---------subset (optional):
 ## if both yx and lalo are specified, use lalo option unless a) no lookup file AND b) dataset is in radar coord
-mintpy.subset.yx           = auto    #[y0:y1,x0:x1 / no], auto for no
-mintpy.subset.lalo         = auto    #[S:N,W:E / no], auto for no
+mintpy.subset.yx            = auto    #[y0:y1,x0:x1 / no], auto for no
+mintpy.subset.lalo          = auto    #[S:N,W:E / no], auto for no
+##---------multilook (optional):
+## multilook while loading data with the specified method, to reduce dataset size
+## method - nearest, mean and median methods are applicable to interferogram/ionosphere/offset stack(s), except for:
+##   connected components and all geometry datasets, for which nearest is hardwired.
+## Use mean / median method with caution! It could smoothen the noise for a better SNR, but it could also smoothen the
+##   unwrapping errors, breaking the integer 2pi relationship, which is used in the unwrapping error correction.
+##   If you really want to increase the SNR, consider re-generate your stack of interferograms with more looks instead.
+mintpy.multilook.method     = auto    #[nearest, mean, median], auto for nearest - lines/rows skipping approach
+mintpy.multilook.ystep      = auto    #[int >= 1], auto for 1 - no multilooking
+mintpy.multilook.xstep      = auto    #[int >= 1], auto for 1 - no multilooking
 
 
 ########## 2. modify_network
@@ -145,7 +154,7 @@ mintpy.unwrapError.connCompMinArea = aut
 ##     are used for all pixels within this common conn. comp.
 mintpy.unwrapError.numSample       = auto  #[int>1], auto for 100, number of samples to invert for common conn. comp.
 
-## briding options:
+## bridging options:
 ## ramp - a phase ramp could be estimated based on the largest reliable region, removed from the entire interferogram
 ##     before estimating the phase difference between reliable regions and added back after the correction.
 ## bridgePtsRadius - half size of the window used to calculate the median value of phase difference
@@ -166,7 +175,7 @@ mintpy.networkInversion.waterMaskFile
 mintpy.networkInversion.minNormVelocity = auto #[yes / no], auto for yes, min-norm deformation velocity / phase
 mintpy.networkInversion.residualNorm    = auto #[L2 ], auto for L2, norm minimization solution
 
-## mask options for unwrapPhase of each interferogram before inversion (recommed if weightFunct=no):
+## mask options for unwrapPhase of each interferogram before inversion (recommend if weightFunct=no):
 ## a. coherence              - mask out pixels with spatial coherence < maskThreshold
 ## b. connectComponent       - mask out pixels with False/0 value
 ## c. no                     - no masking [recommended].
@@ -191,7 +200,7 @@ mintpy.networkInversion.shadowMask  = au
 
 ########## correct_SET
 ## Solid Earth tides (SET) correction [need to install insarlab/PySolid]
-## reference: Milbert (2018); Fattahi et al. (2020, AGU)
+## reference: Milbert (2018); Yunjun et al. (2022, IEEE-TGRS)
 mintpy.solidEarthTides = auto #[yes / no], auto for no
 
 
@@ -258,7 +267,7 @@ mintpy.topographicResidual.pixelwiseGeom
 ########## 9.1 residual_RMS (root mean squares for noise evaluation)
 ## Calculate the Root Mean Square (RMS) of residual phase time-series for each acquisition
 ## reference: Yunjun et al. (2019, section 4.9 and 5.4)
-## To get rid of long wavelength component in space, a ramp is removed for each acquisition
+## To get rid of long spatial wavelength component, a ramp is removed for each acquisition
 ## Set optimal reference date to date with min RMS
 ## Set exclude dates (outliers) to dates with RMS > cutoff * median RMS (Median Absolute Deviation)
 mintpy.residualRMS.maskFile = auto  #[file name / no], auto for maskTempCoh.h5, mask for ramp estimation
@@ -273,24 +282,41 @@ mintpy.reference.date = auto   #[referen
 
 
 ########## 10. velocity
-## Estimate linear velocity and its standard deviation from time-series
-## and from tropospheric delay file if exists.
-## reference: Fattahi and Amelung (2015, JGR)
-mintpy.velocity.excludeDate    = auto   #[exclude_date.txt / 20080520,20090817 / no], auto for exclude_date.txt
-mintpy.velocity.startDate      = auto   #[20070101 / no], auto for no
-mintpy.velocity.endDate        = auto   #[20101230 / no], auto for no
-
-## Bootstrapping
-## refernce: Efron and Tibshirani (1986, Stat. Sci.)
-mintpy.velocity.bootstrap      = auto   #[yes / no], auto for no, use bootstrap
-mintpy.velocity.bootstrapCount = auto   #[int>1], auto for 400, number of iterations for bootstrapping
+## Estimate a suite of time functions [linear velocity by default]
+## from final displacement file (and from tropospheric delay file if exists)
+mintpy.timeFunc.startDate   = auto   #[20070101 / no], auto for no
+mintpy.timeFunc.endDate     = auto   #[20101230 / no], auto for no
+mintpy.timeFunc.excludeDate = auto   #[exclude_date.txt / 20080520,20090817 / no], auto for exclude_date.txt
+
+## Fit a suite of time functions
+## reference: Hetland et al. (2012, JGR) equation (2-9)
+## polynomial function    is  defined by its degree in integer. 1 for linear, 2 for quadratic, etc.
+## periodic   function(s) are defined by a list of periods in decimal years. 1 for annual, 0.5 for semi-annual, etc.
+## step       function(s) are defined by a list of onset times in str in YYYYMMDD(THHMM) format
+## exp & log  function(s) are defined by an onset time followed by an charateristic time in integer days.
+##   Multiple exp and log functions can be overlaied on top of each other, achieved via e.g.:
+##   20110311,60,120          - two functions sharing the same onset time OR
+##   20110311,60;20170908,120 - separated by ";"
+mintpy.timeFunc.polynomial = auto   #[int >= 0], auto for 1, degree of the polynomial function
+mintpy.timeFunc.periodic   = auto   #[1,0.5 / list_of_float / no], auto for no, periods in decimal years
+mintpy.timeFunc.step       = auto   #[20110311,20170908 / 20120928T1733 / no], auto for no, step function(s)
+mintpy.timeFunc.exp        = auto   #[20110311,60 / 20110311,60,120 / 20110311,60;20170908,120 / no], auto for no
+mintpy.timeFunc.log        = auto   #[20110311,60 / 20110311,60,120 / 20110311,60;20170908,120 / no], auto for no
+
+## Uncertainty quantification methods:
+## a. residue    - propagate from fitting residue assuming normal dist. in time (Fattahi & Amelung, 2015, JGR)
+## b. covariance - propagate from time series (co)variance matrix
+## c. bootstrap  - bootstrapping (independently resampling with replacement; Efron & Tibshirani, 1986, Stat. Sci.)
+mintpy.timeFunc.uncertaintyQuantification = auto   #[residue, covariance, bootstrap], auto for residue
+mintpy.timeFunc.timeSeriesCovFile         = auto   #[filename / no], auto for no, time series covariance file
+mintpy.timeFunc.bootstrapCount            = auto   #[int>1], auto for 400, number of iterations for bootstrapping
 
 
 ########## 11.1 geocode (post-processing)
 # for input dataset in radar coordinates only
 # commonly used resolution in meters and in degrees (on equator)
-# 100,         60,          50,          30,          20,          10
-# 0.000925926, 0.000555556, 0.000462963, 0.000277778, 0.000185185, 0.000092593
+# 100,         90,          60,          50,          30,          20,          10
+# 0.000925926, 0.000833334, 0.000555556, 0.000462963, 0.000277778, 0.000185185, 0.000092593
 mintpy.geocode              = auto  #[yes / no], auto for yes
 mintpy.geocode.SNWE         = auto  #[-1.2,0.5,-92,-91 / none ], auto for none, output extent in degree
 mintpy.geocode.laloStep     = auto  #[-0.000555556,0.000555556 / None], auto for None, output resolution in degree
diff -pruN 1.3.3-2/mintpy/dem_error.py 1.4.0-1/mintpy/dem_error.py
--- 1.3.3-2/mintpy/dem_error.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/dem_error.py	2022-08-04 20:01:49.000000000 +0000
@@ -13,9 +13,10 @@ import argparse
 import h5py
 import numpy as np
 from scipy import linalg
+
 from mintpy.objects import timeseries, geometry, cluster
 from mintpy.defaults.template import get_template_content
-from mintpy.utils import arg_group, ptime, time_func, readfile, writefile, utils as ut
+from mintpy.utils import arg_utils, ptime, time_func, readfile, writefile, utils as ut
 
 
 # key configuration parameter name
@@ -31,6 +32,11 @@ configKeys = [
 ############################################################################
 TEMPLATE = get_template_content('correct_topography')
 
+REFERENCE = """reference:
+  Fattahi, H., and F. Amelung (2013), DEM Error Correction in InSAR Time Series,
+  IEEE Trans. Geosci. Remote Sens., 51(7), 4249-4259, doi:10.1109/TGRS.2012.2227761.
+"""
+
 EXAMPLE = """example:
   # correct DEM error with pixel-wise geometry parameters [slow]
   dem_error.py  timeseries_ERA5_ramp.h5 -g inputs/geometryRadar.h5 -t smallbaselineApp.cfg
@@ -44,16 +50,12 @@ EXAMPLE = """example:
   add.py demErr_msk.h5 dem.h5 -o demNew.h5
 """
 
-REFERENCE = """reference:
-  Fattahi, H., and F. Amelung (2013), DEM Error Correction in InSAR Time Series,
-  IEEE TGRS, 51(7), 4249-4259, doi:10.1109/TGRS.2012.2227761.
-"""
-
-
-def create_parser():
-    parser = argparse.ArgumentParser(description='DEM Error (Topographic Residual) Correction',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog='{}\n{}\n{}'.format(REFERENCE, TEMPLATE, EXAMPLE))
+def create_parser(subparsers=None):
+    synopsis = 'DEM Error (Topographic Residual) Correction'
+    epilog = REFERENCE + '\n' + TEMPLATE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = arg_utils.create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('timeseries_file',
                         help='Timeseries file to be corrrected')
@@ -87,8 +89,8 @@ def create_parser():
                              'and newer than input interferograms file\n' +
                              '2) all configuration parameters are the same.')
     # computing
-    parser = arg_group.add_memory_argument(parser)
-    parser = arg_group.add_parallel_argument(parser)
+    parser = arg_utils.add_memory_argument(parser)
+    parser = arg_utils.add_parallel_argument(parser)
 
     return parser
 
@@ -176,7 +178,7 @@ def read_template2inps(template_file, in
         inps = cmd_line_parse()
     iDict = vars(inps)
     print('read options from template file: '+os.path.basename(template_file))
-    template = readfile.read_template(template_file)
+    template = readfile.read_template(template_file, skip_chars=['[', ']'])
     template = ut.check_template_auto_value(template)
 
     # Read template option
@@ -189,8 +191,7 @@ def read_template2inps(template_file, in
             if key in ['polyOrder']:
                 iDict[key] = int(value)
             elif key in ['excludeDate','stepFuncDate']:
-                value = value.replace('[','').replace(']','').replace(',', ' ')
-                iDict[key] = ptime.yyyymmdd(value.split())
+                iDict[key] = ptime.yyyymmdd(value.split(','))
 
     # computing configurations
     dask_key_prefix = 'mintpy.compute.'
@@ -676,7 +677,7 @@ def main(iargs=None):
     # run
     correct_dem_error(inps)
 
-    return inps.outfile
+    return
 
 
 ################################################################################
diff -pruN 1.3.3-2/mintpy/dem_gsi.py 1.4.0-1/mintpy/dem_gsi.py
--- 1.3.3-2/mintpy/dem_gsi.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/dem_gsi.py	2022-08-04 20:01:49.000000000 +0000
@@ -11,7 +11,9 @@ import sys
 import glob
 import argparse
 import numpy as np
+
 from mintpy.utils import writefile
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 # DEHM basic info
@@ -29,16 +31,18 @@ EXAMPLE = """example:
   dem_gsi.py -b 31.1 32.8 130.1 131.9 --grid-dir ~/data/DEM/GSI_DEHM10m
 """
 
-REFERENCE = """DEHM: Digital Ellipsoidal Height Model
+NOTE = """DEHM: Digital Ellipsoidal Height Model
 yyxx.dehm with yy and xx indicating the coordinates of the upper left corner of the firt pixel.
 where longitude = xx + 100
       latitude  = (yy + 1) / 1.5
 """
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Prepare DEM from GSI (Japan) DEHM grib files.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Prepare DEM from GSI (Japan) DEHM grib files.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('-b','--bbox', dest='SNWE', type=float, nargs=4, metavar=('S','N','W','E'), required=True,
                         help='Bounding box in latitude [-90, 90] and longitude [-180, 180].')
diff -pruN 1.3.3-2/mintpy/diff.py 1.4.0-1/mintpy/diff.py
--- 1.3.3-2/mintpy/diff.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/diff.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,7 +9,6 @@
 import os
 import sys
 import time
-import argparse
 import numpy as np
 
 from mintpy.objects import (
@@ -20,6 +19,7 @@ from mintpy.objects import (
     ifgramDatasetNames,
 )
 from mintpy.utils import readfile, writefile
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 #####################################################################################
@@ -35,16 +35,18 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Generates the difference of two input files.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
-
-    parser.add_argument('file1', help='file to be substracted.')
-    parser.add_argument('file2', nargs='+', help='file used to substract')
-    parser.add_argument('-o', '--output', dest='outfile',
+def create_parser(subparsers=None):
+    synopsis = 'Generate the difference of two input files.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
+    parser.add_argument('file1', help='file to be subtracted.')
+    parser.add_argument('file2', nargs='+', help='file used to subtract')
+    parser.add_argument('-o', '--output', dest='out_file',
                         help='output file name, default is file1_diff_file2.h5')
-    parser.add_argument('--force', action='store_true',
+    parser.add_argument('--force','--force-diff', dest='force_diff', action='store_true',
                         help='Enforce the differencing for the shared dates only for time-series files')
     return parser
 
@@ -53,11 +55,20 @@ def cmd_line_parse(iargs=None):
     parser = create_parser()
     inps = parser.parse_args(args=iargs)
 
-    # for timeseries and ifgramStack, only two files differencing is supported
-    atr = readfile.read_attribute(inps.file1)
-    if atr['FILE_TYPE'] in ['timeseries', 'ifgramStack']:
+    # ONLY TWO files differencing is supported for timeseries and ifgramStack types
+    ftype = readfile.read_attribute(inps.file1)['FILE_TYPE']
+    if ftype in ['timeseries', 'ifgramStack']:
         if len(inps.file2) > 1:
-            raise SystemExit('ERROR: only one file2 is inputed for {} type'.format(atr['FILE_TYPE']))
+            raise SystemExit(f'ERROR: ONLY ONE file2 is inputed for {ftype} type!')
+
+    # --output
+    if not inps.out_file:
+        if len(inps.file2) > 1:
+            raise ValueError('--output is required for >=2 files!')
+        fbase1, fext = os.path.splitext(inps.file1)
+        fbase2 = os.path.splitext(os.path.basename(inps.file2[0]))[0]
+        inps.out_file = f'{fbase1}_diff_{fbase2}{fext}'
+
     return inps
 
 
@@ -71,46 +82,39 @@ def check_reference(atr1, atr2):
     # 1. reference date
     # if same, do nothing
     # if different, use the 1st one as the reference
-    if atr1['REF_DATE'] == atr2.get('REF_DATE', None):
+    ref_date1 = atr1.get('REF_DATE', None)
+    ref_date2 = atr2.get('REF_DATE', None)
+    if ref_date1 == ref_date2:
         ref_date = None
     else:
-        ref_date = atr1['REF_DATE']
+        ref_date = ref_date1
 
     # 2. reference point
     # if same, do nothing
     # if different, use the 1st one as the reference
-    ref_y = atr1.get('REF_Y', None)
-    ref_x = atr1.get('REF_X', None)
-    if ref_x == atr2.get('REF_X', None) or ref_y == atr2.get('REF_Y', None):
-        ref_y = None
-        ref_x = None
+    ref_yx1 = [atr1.get('REF_Y', None), atr1.get('REF_X', None)]
+    ref_yx2 = [atr2.get('REF_Y', None), atr2.get('REF_X', None)]
+    if ref_yx1 == ref_yx2:
+        ref_y, ref_x = None, None
     else:
-        ref_y = ref_y
-        ref_x = ref_x
+        ref_y, ref_x = ref_yx1
+
+    # ensure ref_y/x are integer
+    ref_y = int(ref_y) if ref_y is not None else None
+    ref_x = int(ref_x) if ref_x is not None else None
 
-    if ref_y is not None:
-        ref_y = int(ref_y)
-    if ref_x is not None:
-        ref_x = int(ref_x)
     return ref_date, ref_y, ref_x
 
 
-def diff_file(file1, file2, out_file=None, force=False, max_num_pixel=2e8):
+def diff_file(file1, file2, out_file, force_diff=False, max_num_pixel=2e8):
     """calculate/write file1 - file2
 
-    Parameters: file1    - str, path of file1
-                file2    - list of str, path of file2(s)
-                out_file - str, path of output file
-                force    - bool, overwrite existing output file
+    Parameters: file1         - str, path of file1
+                file2         - list(str), path of file2(s)
+                out_file      - str, path of output file
+                force_diff    - bool, overwrite existing output file
                 max_num_pixel - float, maximum number of pixels for each block
     """
-    start_time = time.time()
-
-    if not out_file:
-        fbase, fext = os.path.splitext(file1)
-        if len(file2) > 1:
-            raise ValueError('Output file name is needed for more than 2 files input.')
-        out_file = '{}_diff_{}{}'.format(fbase, os.path.splitext(os.path.basename(file2[0]))[0], fext)
     print('{} - {} --> {}'.format(file1, file2, out_file))
 
     # Read basic info
@@ -142,7 +146,7 @@ def diff_file(file1, file2, out_file=Non
         dateShared = np.ones((len(dateList1)), dtype=np.bool_)
         if dateListShared != dateList1:
             print('WARNING: {} does not contain all dates in {}'.format(file2, file1))
-            if force:
+            if force_diff:
                 dateListEx = list(set(dateList1) - set(dateListShared))
                 print('Continue and enforce the differencing for their shared dates only.')
                 print('\twith following dates are ignored for differencing:\n{}'.format(dateListEx))
@@ -255,7 +259,7 @@ def diff_file(file1, file2, out_file=Non
         # loop over each file
         dsDict = {}
         for ds_name in ds_names:
-            print('adding {} ...'.format(ds_name))
+            print('differencing {} ...'.format(ds_name))
             data = readfile.read(file1, datasetName=ds_name)[0]
             dtype = data.dtype
 
@@ -264,6 +268,12 @@ def diff_file(file1, file2, out_file=Non
                 ds_name2read = None if len(ds_names_list[i+1]) == 1 else ds_name
                 # read
                 data2 = readfile.read(fname, datasetName=ds_name2read)[0]
+                # do the referencing for velocity files
+                if ds_name == 'velocity':
+                    ref_y, ref_x = check_reference(atr1, atr2)[1:]
+                    if ref_y and ref_x:
+                        print('* referencing data from {} to y/x: {}/{}'.format(os.path.basename(file2[0]), ref_y, ref_x))
+                        data2 -= data2[ref_y, ref_x]
                 # convert to float32 to apply the operation because some types, e.g. bool, do not support it.
                 # then convert back to the original data type
                 data = np.array(data, dtype=np.float32) - np.array(data2, dtype=np.float32)
@@ -275,18 +285,23 @@ def diff_file(file1, file2, out_file=Non
         print('use metadata from the 1st file: {}'.format(file1))
         writefile.write(dsDict, out_file=out_file, metadata=atr1, ref_file=file1)
 
-    m, s = divmod(time.time()-start_time, 60)
-    print('time used: {:02.0f} mins {:02.1f} secs'.format(m, s))
-
     return out_file
 
 
 def main(iargs=None):
     inps = cmd_line_parse(iargs)
+    start_time = time.time()
 
-    inps.outfile = diff_file(inps.file1, inps.file2, inps.outfile, force=inps.force)
+    diff_file(file1=inps.file1,
+              file2=inps.file2,
+              out_file=inps.out_file,
+              force_diff=inps.force_diff)
+
+    # used time
+    m, s = divmod(time.time()-start_time, 60)
+    print('time used: {:02.0f} mins {:02.1f} secs'.format(m, s))
 
-    return inps.outfile
+    return
 
 
 #####################################################################################
diff -pruN 1.3.3-2/mintpy/generate_mask.py 1.4.0-1/mintpy/generate_mask.py
--- 1.3.3-2/mintpy/generate_mask.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/generate_mask.py	2022-08-04 20:01:49.000000000 +0000
@@ -10,10 +10,10 @@ import os
 import sys
 import time
 import warnings
-import argparse
 import h5py
 import numpy as np
 from mintpy.utils import readfile, writefile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ################################################################################################
@@ -22,10 +22,10 @@ EXAMPLE = """example:
   generate_mask.py  temporalCoherence.h5 -m 0.7 -o maskTempCoh.h5 --base inputs/geometryRadar.h5 --base-dset shadow --base-value 1
   generate_mask.py  avgSpatialCoh.h5     -m 0.7 --base waterMask.h5 -o maskSpatialCoh.h5
 
-  # exlcude area by min/max value and/or subset in row/col direction
+  # exclude area by min/max value and/or subset in row/col direction
   generate_mask.py  081018_090118.unw -m 3 -M 8 -y 100 700 -x 200 800 -o mask_1.h5
 
-  # exlcude pixel cluster based on minimum number of pixels
+  # exclude pixel cluster based on minimum number of pixels
   generate_mask.py  maskTempCoh.h5 -p 10 mask_1.h5
 
   # exclude pixels with large velocity STD: |velocity| > cutoff (2 by default) * velocityStd
@@ -53,10 +53,12 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Generate mask file from input file',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Generate mask file from input file'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', help='input file')
     parser.add_argument('dset', nargs='?',
@@ -340,11 +342,12 @@ def main(iargs=None):
         return inps.outfile
 
     ##### Mask: Threshold
-    inps.outfile = create_threshold_mask(inps)
+    create_threshold_mask(inps)
 
     m, s = divmod(time.time()-start_time, 60)
     print('time used: {:02.0f} mins {:02.1f} secs.'.format(m, s))
-    return inps.outfile
+
+    return
 
 
 ################################################################################################
diff -pruN 1.3.3-2/mintpy/geocode.py 1.4.0-1/mintpy/geocode.py
--- 1.3.3-2/mintpy/geocode.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/geocode.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,13 +9,12 @@
 import os
 import sys
 import time
-import argparse
 import numpy as np
 
 from mintpy.objects.resample import resample
 from mintpy.defaults.template import get_template_content
 from mintpy.utils import (
-    arg_group,
+    arg_utils,
     readfile,
     writefile,
     utils as ut,
@@ -51,10 +50,12 @@ degrees     --> meters on equator
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Resample radar coded files into geo coordinates, or reverse',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=TEMPLATE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Resample radar-coded files into geo-coordinates or vice versa'
+    epilog = TEMPLATE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = arg_utils.create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', nargs='+', help='File(s) to be geocoded')
     parser.add_argument('-d', '--dset', help='dataset to be geocoded, for example:\n' +
@@ -104,7 +105,7 @@ def create_parser():
     parser.add_argument('--outdir', '--output-dir', dest='out_dir', help='output directory.')
 
     # computing
-    parser = arg_group.add_memory_argument(parser)
+    parser = arg_utils.add_memory_argument(parser)
 
     return parser
 
@@ -159,15 +160,26 @@ def _check_inps(inps):
     # 2. lookup table is in radar coordinates
     if inps.laloStep:
         if not inps.radar2geo:
-            print('ERROR: --lalo-step can NOT be used together with --geo2radar!')
+            print('ERROR: "--lalo-step" can NOT be used together with "--geo2radar"!')
             sys.exit(0)
         atr = readfile.read_attribute(inps.lookupFile)
         if 'Y_FIRST' in atr.keys():
-            print('ERROR: --lalo-step can NOT be used with lookup table file in geo-coordinates!')
+            print('ERROR: "--lalo-step" can NOT be used with lookup table file in geo-coordinates!')
             sys.exit(0)
 
     # check 5 - number of processors for multiprocessingg
     inps.nprocs = check_num_processor(inps.nprocs)
+
+
+    # check 6 - geo2radar
+    if not inps.radar2geo:
+        if inps.SNWE:
+            print('ERROR: "--geo2radar" can NOT be used together with "--bbox"!')
+            sys.exit(0)
+        if inps.software == 'scipy':
+            print('ERROR: "--geo2radar" is NOT supported for "--software scipy"!')
+            sys.exit(0)
+
     return inps
 
 
@@ -177,7 +189,7 @@ def read_template2inps(template_file, in
     if not inps:
         inps = cmd_line_parse()
     inps_dict = vars(inps)
-    template = readfile.read_template(template_file)
+    template = readfile.read_template(template_file, skip_chars=['[', ']'])
     template = ut.check_template_auto_value(template)
 
     prefix = 'mintpy.geocode.'
@@ -186,8 +198,7 @@ def read_template2inps(template_file, in
         value = template[prefix + key]
         if value:
             if key in ['SNWE', 'laloStep']:
-                value = value.replace('[','').replace(']','').replace(',',' ')
-                inps_dict[key] = [float(i) for i in value.split()]
+                inps_dict[key] = [float(i) for i in value.split(',')]
             elif key in ['interpMethod']:
                 inps_dict[key] = value
             elif key == 'fillValue':
diff -pruN 1.3.3-2/mintpy/ifgram_inversion.py 1.4.0-1/mintpy/ifgram_inversion.py
--- 1.3.3-2/mintpy/ifgram_inversion.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/ifgram_inversion.py	2022-08-04 20:01:49.000000000 +0000
@@ -12,14 +12,13 @@
 import os
 import sys
 import time
-import argparse
 import h5py
 import numpy as np
 from scipy import linalg   # more effieint than numpy.linalg
 from mintpy.objects import ifgramStack, cluster
 from mintpy.simulation import decorrelation as decor
 from mintpy.defaults.template import get_template_content
-from mintpy.utils import readfile, writefile, ptime, utils as ut, arg_group
+from mintpy.utils import readfile, writefile, ptime, utils as ut, arg_utils
 
 
 # key configuration parameter name
@@ -34,15 +33,6 @@ configKeys = ['obsDatasetName',
 
 
 ################################################################################################
-EXAMPLE = """example:
-  ifgram_inversion.py inputs/ifgramStack.h5 -t smallbaselineApp.cfg --update
-  ifgram_inversion.py inputs/ifgramStack.h5 -w no  # turn off weight for fast processing
-  ifgram_inversion.py inputs/ifgramStack.h5 -c no  # turn off parallel processing
-  # offset
-  ifgram_inversion.py inputs/ifgramStack.h5 -i rangeOffset   -w no -m waterMask.h5 --md offsetSNR --mt 5
-  ifgram_inversion.py inputs/ifgramStack.h5 -i azimuthOffset -w no -m waterMask.h5 --md offsetSNR --mt 5
-"""
-
 TEMPLATE = get_template_content('invert_network')
 
 REFERENCE = """references:
@@ -63,11 +53,22 @@ REFERENCE = """references:
     offset time series: noise reduction and uncertainty quantification, ID 590, FRINGE 2021, 31 May – 4 Jun, 2021, Virtual.
 """
 
+EXAMPLE = """example:
+  ifgram_inversion.py inputs/ifgramStack.h5 -t smallbaselineApp.cfg --update
+  ifgram_inversion.py inputs/ifgramStack.h5 -w no  # turn off weight for fast processing
+  ifgram_inversion.py inputs/ifgramStack.h5 -c no  # turn off parallel processing
+  # offset
+  ifgram_inversion.py inputs/ifgramStack.h5 -i rangeOffset   -w no -m waterMask.h5 --md offsetSNR --mt 5
+  ifgram_inversion.py inputs/ifgramStack.h5 -i azimuthOffset -w no -m waterMask.h5 --md offsetSNR --mt 5
+"""
+
+def create_parser(subparsers=None):
+    synopsis = 'Invert network of interferograms into time-series.'
+    epilog = REFERENCE + '\n' + TEMPLATE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = arg_utils.create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Invert network of interferograms into time-series.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=REFERENCE+'\n'+TEMPLATE+'\n'+EXAMPLE)
     # input dataset
     parser.add_argument('ifgramStackFile', help='interferograms stack file to be inverted')
     parser.add_argument('-t','--template', dest='templateFile', help='template text file with options')
@@ -121,8 +122,8 @@ def create_parser():
     #                  help='minimum area size to diable/ignore the threshold-based masking [for offset only]')
 
     # computing
-    parser = arg_group.add_memory_argument(parser)
-    parser = arg_group.add_parallel_argument(parser)
+    parser = arg_utils.add_memory_argument(parser)
+    parser = arg_utils.add_parallel_argument(parser)
 
     # update / skip
     parser.add_argument('--update', dest='update_mode', action='store_true',
@@ -187,7 +188,10 @@ def cmd_line_parse(iargs=None):
     # --output option
     if not inps.outfile:
         if inps.obsDatasetName.startswith('unwrapPhase'):
-            inps.outfile = ['timeseries.h5', 'temporalCoherence.h5', 'numInvIfgram.h5']
+            if os.path.basename(inps.ifgramStackFile).startswith('ion'):
+                inps.outfile = ['timeseriesIon.h5', 'temporalCoherenceIon.h5', 'numInvIon.h5']
+            else:
+                inps.outfile = ['timeseries.h5', 'temporalCoherence.h5', 'numInvIfgram.h5']
 
         elif inps.obsDatasetName.startswith('azimuthOffset'):
             inps.outfile = ['timeseriesAz.h5', 'residualInvAz.h5', 'numInvOffAz.h5']
@@ -195,9 +199,6 @@ def cmd_line_parse(iargs=None):
         elif inps.obsDatasetName.startswith('rangeOffset'):
             inps.outfile = ['timeseriesRg.h5', 'residualInvRg.h5', 'numInvOffRg.h5']
 
-        elif inps.obsDatasetName.startswith('ion'):
-            inps.outfile = ['timeseriesIon.h5', 'temporalCoherenceIon.h5', 'numInvIon.h5']
-
         else:
             raise ValueError('un-recognized input observation dataset name: {}'.format(inps.obsDatasetName))
 
@@ -335,7 +336,7 @@ def estimate_timeseries(A, B, y, tbase_d
                 y                 - 2D np.ndarray in size of (num_pair, num_pixel),
                                     phase/offset of all interferograms with no-data value: NaN.
                 tbase_diff        - 2D np.ndarray in size of (num_date-1, 1),
-                                    differential temporal baseline history
+                                    differential temporal baseline history, in the unit of years
                 weight_sqrt       - 2D np.ndarray in size of (num_pair, num_pixel),
                                     square root of weight of all interferograms
                 min_norm_velocity - bool, assume minimum-norm deformation velocity, or not
@@ -345,6 +346,7 @@ def estimate_timeseries(A, B, y, tbase_d
                 inv_quality_name  - str, inversion quality type/name
                                     temporalCoherence for phase
                                     residual          for offset
+                                    no to turn OFF the calcualtion
     Returns:    ts                - 2D np.ndarray in size of (num_date, num_pixel), phase time-series
                 inv_quality       - 1D np.ndarray in size of (num_pixel), temporal coherence (for phase) or residual (for offset)
                 num_inv_obs       - 1D np.ndarray in size of (num_pixel), number of observations (ifgrams / offsets)
@@ -395,10 +397,11 @@ def estimate_timeseries(A, B, y, tbase_d
                 X, e2 = linalg.lstsq(B, y, cond=rcond)[:2]
 
             # calc inversion quality
-            inv_quality = calc_inv_quality(B, X, y, e2,
-                                           inv_quality_name=inv_quality_name,
-                                           weight_sqrt=weight_sqrt,
-                                           print_msg=print_msg)
+            if inv_quality_name != 'no':
+                inv_quality = calc_inv_quality(B, X, y, e2,
+                                               inv_quality_name=inv_quality_name,
+                                               weight_sqrt=weight_sqrt,
+                                               print_msg=print_msg)
 
             # assemble time-series
             ts_diff = X * np.tile(tbase_diff, (1, num_pixel))
@@ -414,10 +417,11 @@ def estimate_timeseries(A, B, y, tbase_d
                 X, e2 = linalg.lstsq(A, y, cond=rcond)[:2]
 
             # calc inversion quality
-            inv_quality = calc_inv_quality(A, X, y, e2,
-                                           inv_quality_name=inv_quality_name,
-                                           weight_sqrt=weight_sqrt,
-                                           print_msg=print_msg)
+            if inv_quality_name != 'no':
+                inv_quality = calc_inv_quality(A, X, y, e2,
+                                               inv_quality_name=inv_quality_name,
+                                               weight_sqrt=weight_sqrt,
+                                               print_msg=print_msg)
 
             # assemble time-series
             ts[1: ,:] = X
@@ -569,41 +573,6 @@ def calc_inv_quality(G, X, y, e2, inv_qu
 
 
 ###################################### File IO ############################################
-def split2boxes(ifgram_file, max_memory=4, print_msg=True):
-    """Split into chunks in rows to reduce memory usage
-    Parameters: dataset_shape - tuple of 3 int
-                max_memory    - float, max memory to use in GB
-                print_msg     - bool
-    Returns:    box_list      - list of tuple of 4 int
-                num_box       - int, number of boxes
-    """
-    ifg_obj = ifgramStack(ifgram_file)
-    ifg_obj.open(print_msg=False)
-
-    # dataset size: defo obs (phase / offset) + weight + time-series
-    length = ifg_obj.length
-    width = ifg_obj.width
-    ds_size = (ifg_obj.numIfgram * 2 + ifg_obj.numDate + 5) * length * width * 4
-
-    num_box = int(np.ceil(ds_size * 1.5 / (max_memory * 1024**3)))
-    y_step = int(np.ceil((length / num_box) / 10) * 10)
-    num_box = int(np.ceil(length / y_step))
-    if print_msg and num_box > 1:
-        print('maximum memory size: %.1E GB' % max_memory)
-        print('split %d lines into %d patches for processing' % (length, num_box))
-        print('    with each patch up to %d lines' % y_step)
-
-    # y_step / num_box --> box_list
-    box_list = []
-    for i in range(num_box):
-        y0 = i * y_step
-        y1 = min([length, y0 + y_step])
-        box = (0, y0, width, y1)
-        box_list.append(box)
-
-    return box_list, num_box
-
-
 def check_design_matrix(ifgram_file, weight_func='var'):
     """
     Check Rank of Design matrix for weighted inversion
@@ -859,6 +828,7 @@ def ifgram_inversion_patch(ifgram_file,
 
     stack_obj = ifgramStack(ifgram_file)
     stack_obj.open(print_msg=False)
+    stack_dir, stack_base = os.path.split(ifgram_file)
 
     ## debug on a specific pixel
     #y, x = 555, 612
@@ -982,17 +952,26 @@ def ifgram_inversion_patch(ifgram_file,
 
     # 1.3.3 Mask for zero quality measure (average spatial coherence/SNR)
     # usually due to lack of data in the processing
-    stack_quality_file = os.path.join(os.path.dirname(ifgram_file), '../avgSpatialCoh.h5')
-    inv_quality_name = 'temporalCoherence'
     if 'offset' in obs_ds_name.lower():
-        stack_quality_file = os.path.join(os.path.dirname(ifgram_file), '../avgSpatialSNR.h5')
         inv_quality_name = 'residual'
+        stack_quality_file = os.path.join(stack_dir, '../avgSpatialSNR.h5')
+
+    elif stack_base.startswith('ion'):
+        inv_quality_name = 'temporalCoherence'
+        stack_quality_file = os.path.join(stack_dir, '../avgSpatialCohIon.h5')
 
-    if stack_quality_file and os.path.isfile(stack_quality_file):
-        print('skip pixels with zero value in file: {}'.format(os.path.basename(stack_quality_file)))
-        quality = readfile.read(stack_quality_file, box=box)[0].flatten()
-        mask *= quality != 0.
-        del quality
+    else:
+        inv_quality_name = 'temporalCoherence'
+        stack_quality_file = os.path.join(stack_dir, '../avgSpatialCoh.h5')
+
+    if os.path.isfile(stack_quality_file):
+        atr_stack = readfile.read_attribute(stack_quality_file)
+        len_stack, wid_stack = int(atr_stack['LENGTH']), int(atr_stack['WIDTH'])
+        if (len_stack, wid_stack) == (stack_obj.length, stack_obj.width):
+            print('skip pixels with zero value in file: {}'.format(os.path.basename(stack_quality_file)))
+            quality = readfile.read(stack_quality_file, box=box)[0].flatten()
+            mask *= quality != 0.
+            del quality
 
     # invert pixels on mask 1+2
     num_pixel2inv = int(np.sum(mask))
@@ -1019,8 +998,19 @@ def ifgram_inversion_patch(ifgram_file,
         num_inv_obs = num_inv_obs.reshape(num_row, num_col)
         return ts, ts_cov, inv_quality, num_inv_obs, box
 
+    # common inversion options
+    kwargs = {
+        'A'                 : A,
+        'B'                 : B,
+        'tbase_diff'        : tbase_diff,
+        'min_norm_velocity' : min_norm_velocity,
+        'min_redundancy'    : min_redundancy,
+        'inv_quality_name'  : inv_quality_name,
+    }
+
     # 2.2 un-weighted inversion (classic SBAS)
     if weight_sqrt is None:
+        msg = f'estimating time-series for pixels with valid {obs_ds_name} in'
 
         # a. split mask into mask_all/part_net
         # mask for valid (~NaN) observations in ALL ifgrams (share one B in sbas inversion)
@@ -1031,19 +1021,14 @@ def ifgram_inversion_patch(ifgram_file,
 
         # b. invert once for all pixels with obs in all ifgrams
         if np.sum(mask_all_net) > 0:
-            print(('estimating time-series for pixels with valid {} in all  ifgrams'
-                   ' ({:.0f} pixels; {:.1f}%) ...').format(obs_ds_name,
-                                                           np.sum(mask_all_net),
-                                                           np.sum(mask_all_net)/num_pixel2inv*100))
-            (tsi,
-             inv_quali,
-             num_obsi) = estimate_timeseries(A, B,
-                                             y=stack_obs[:, mask_all_net],
-                                             tbase_diff=tbase_diff,
-                                             weight_sqrt=None,
-                                             min_norm_velocity=min_norm_velocity,
-                                             min_redundancy=min_redundancy,
-                                             inv_quality_name=inv_quality_name)
+            num_pixel2inv_all = int(np.sum(mask_all_net))
+            print(f'{msg} all  ifgrams ({num_pixel2inv_all} pixels; {num_pixel2inv_all/num_pixel2inv*100:.1f}%) ...')
+
+            # run
+            tsi, inv_quali, num_obsi = estimate_timeseries(
+                y=stack_obs[:, mask_all_net],
+                weight_sqrt=None,
+                **kwargs)
 
             # save result to output matrices
             ts[:, mask_all_net] = tsi
@@ -1052,29 +1037,25 @@ def ifgram_inversion_patch(ifgram_file,
 
         # c. pixel-by-pixel for pixels with obs not in all ifgrams
         if np.sum(mask_part_net) > 0:
-            print(('estimating time-series for pixels with valid {} in some ifgrams'
-                   ' ({:.0f} pixels; {:.1f}%) ...').format(obs_ds_name,
-                                                           np.sum(mask_part_net),
-                                                           np.sum(mask_all_net)/num_pixel2inv*100))
             num_pixel2inv_part = int(np.sum(mask_part_net))
             idx_pixel2inv_part = np.where(mask_part_net)[0]
+            print(f'{msg} some ifgrams ({num_pixel2inv_part} pixels; {num_pixel2inv_part/num_pixel2inv*100:.1f}%) ...')
+
             prog_bar = ptime.progressBar(maxValue=num_pixel2inv_part)
             for i in range(num_pixel2inv_part):
                 idx = idx_pixel2inv_part[i]
-                (tsi,
-                 inv_quali,
-                 num_obsi) = estimate_timeseries(A, B,
-                                                 y=stack_obs[:, idx],
-                                                 tbase_diff=tbase_diff,
-                                                 weight_sqrt=None,
-                                                 min_norm_velocity=min_norm_velocity,
-                                                 min_redundancy=min_redundancy,
-                                                 inv_quality_name=inv_quality_name)
+
+                # run
+                tsi, inv_quali, num_obsi = estimate_timeseries(
+                    y=stack_obs[:, idx],
+                    weight_sqrt=None,
+                    **kwargs)
 
                 # save result to output matrices
                 ts[:, idx] = tsi.flatten()
                 inv_quality[idx] = inv_quali
                 num_inv_obs[idx] = num_obsi
+
                 prog_bar.update(i+1, every=200, suffix='{}/{} pixels'.format(i+1, num_pixel2inv_part))
             prog_bar.close()
 
@@ -1084,15 +1065,12 @@ def ifgram_inversion_patch(ifgram_file,
         prog_bar = ptime.progressBar(maxValue=num_pixel2inv)
         for i in range(num_pixel2inv):
             idx = idx_pixel2inv[i]
-            (tsi,
-             inv_quali,
-             num_obsi) = estimate_timeseries(A, B,
-                                             y=stack_obs[:, idx],
-                                             tbase_diff=tbase_diff,
-                                             weight_sqrt=weight_sqrt[:, idx],
-                                             min_norm_velocity=min_norm_velocity,
-                                             min_redundancy=min_redundancy,
-                                             inv_quality_name=inv_quality_name)
+
+            # run
+            tsi, inv_quali, num_obsi = estimate_timeseries(
+                y=stack_obs[:, idx],
+                weight_sqrt=weight_sqrt[:, idx],
+                **kwargs)
 
             # save result to output matrices
             ts[:, idx] = tsi.flatten()
@@ -1302,7 +1280,7 @@ def ifgram_inversion(inps=None):
     ## 3. run the inversion / estimation and write to disk
 
     # 3.1 split ifgram_file into blocks to save memory
-    box_list, num_box = split2boxes(inps.ifgramStackFile, max_memory=inps.maxMemory)
+    box_list, num_box = stack_obj.split2boxes(max_memory=inps.maxMemory)
 
     # 3.2 prepare the input arguments for *_patch()
     data_kwargs = {
@@ -1433,7 +1411,7 @@ def main(iargs=None):
         raise NotImplementedError('L1 norm minimization is not fully tested.')
         #ut.timeseries_inversion_L1(inps.ifgramStackFile, inps.tsFile)
 
-    return inps.outfile
+    return
 
 
 ################################################################################################
diff -pruN 1.3.3-2/mintpy/ifgram_reconstruction.py 1.4.0-1/mintpy/ifgram_reconstruction.py
--- 1.3.3-2/mintpy/ifgram_reconstruction.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/ifgram_reconstruction.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,76 +0,0 @@
-#!/usr/bin/env python3
-############################################################
-# Program is part of MintPy                                #
-# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
-# Author: Zhang Yunjun, Heresh Fattahi, 2013               #
-############################################################
-
-
-import sys
-import argparse
-import numpy as np
-from mintpy.objects import ifgramStack
-from mintpy.utils import readfile, writefile
-
-
-#####################################################################################
-EXAMPLE = """example:
-  ifgram_reconstruction.py timeseries_ERA5_ramp_demErr.h5
-  ifgram_reconstruction.py timeseries_ERA5_ramp_demErr.h5 -r inputs/ifgramStack.h5 -o ifgramStackRecon.h5
-"""
-
-def create_parser():
-    parser = argparse.ArgumentParser(description='Reconstruct network of interferograms from time-series',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
-
-    parser.add_argument('timeseries_file', type=str, help='time-series file.')
-    parser.add_argument('-r', dest='ifgram_file', type=str, default='./inputs/ifgramStack.h5',
-                        help='reference interferograms stack file')
-    parser.add_argument('-o','--output', dest='out_file', default='ifgramStackRecon.h5',
-                        help='output filename for the reconstructed interferograms.')
-    return parser
-
-def cmd_line_parse(iargs=None):
-    parser = create_parser()
-    inps = parser.parse_args(args=iargs)
-    return inps
-
-
-#####################################################################################
-def timeseries2ifgram(ts_file, ifgram_file, out_file='reconUnwrapIfgram.h5'):
-    # read time-series
-    atr = readfile.read_attribute(ts_file)
-    range2phase = -4.*np.pi / float(atr['WAVELENGTH'])
-    print('reading timeseries data from file {} ...'.format(ts_file))
-    ts_data = readfile.read(ts_file)[0] * range2phase
-    num_date, length, width = ts_data.shape
-    ts_data = ts_data.reshape(num_date, -1)
-
-    # reconstruct unwrapPhase
-    print('reconstructing the interferograms from timeseries')
-    stack_obj = ifgramStack(ifgram_file)
-    stack_obj.open(print_msg=False)
-    date12_list = stack_obj.get_date12_list(dropIfgram=False)
-    A = stack_obj.get_design_matrix4timeseries(date12_list, refDate='no')[0]
-    ifgram_est = np.dot(A, ts_data).reshape(A.shape[0], length, width)
-    ifgram_est = np.array(ifgram_est, dtype=ts_data.dtype)
-    del ts_data
-
-    # write to ifgram file
-    dsDict = {}
-    dsDict['unwrapPhase'] = ifgram_est
-    writefile.write(dsDict, out_file=out_file, ref_file=ifgram_file)
-    return ifgram_file
-
-
-def main(iargs=None):
-    inps = cmd_line_parse(iargs)
-    timeseries2ifgram(inps.timeseries_file, inps.ifgram_file, inps.out_file)
-    return
-
-
-#####################################################################################
-if __name__ == '__main__':
-    main(sys.argv[1:])
-
diff -pruN 1.3.3-2/mintpy/image_math.py 1.4.0-1/mintpy/image_math.py
--- 1.3.3-2/mintpy/image_math.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/image_math.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,9 +8,9 @@
 
 import os
 import sys
-import argparse
 import numpy as np
 from mintpy.utils import readfile, writefile
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 #######################################################################################
@@ -23,10 +23,12 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Basic Mathmatic Operation of file',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Basic Mathmatic Operation of file'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', help='input file')
     parser.add_argument('-o', '--output', dest='outfile',
@@ -100,10 +102,10 @@ def file_operation(fname, operator, oper
 def main(iargs=None):
     inps = cmd_line_parse(iargs)
 
-    inps.outfile = file_operation(inps.file, inps.operator, inps.operand, inps.outfile)
+    file_operation(inps.file, inps.operator, inps.operand, inps.outfile)
 
     print('Done.')
-    return inps.outfile
+    return
 
 
 #######################################################################################
diff -pruN 1.3.3-2/mintpy/image_stitch.py 1.4.0-1/mintpy/image_stitch.py
--- 1.3.3-2/mintpy/image_stitch.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/image_stitch.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,7 +9,6 @@
 
 import os
 import sys
-import argparse
 import numpy as np
 import matplotlib.pyplot as plt
 try:
@@ -18,6 +17,7 @@ except ImportError:
     raise ImportError('Could not import skimage!')
 
 from mintpy.utils import readfile, writefile, plot as pp
+from mintpy.utils.arg_utils import create_argument_parser
 from mintpy.multilook import multilook_data
 
 
@@ -27,26 +27,33 @@ EXAMPLE = """example:
   image_stitch.py geom_AlosAT422.h5 geom_AlosAT423.h5 geom_AlosAT424.h5 geom_AlosAT425.h5 -o geom_AlosA.h5 --no-offset
 """
 
+NOTE = """
+  The function automatically:
+  1) finds the common area between adjacent input files
+  2) calculates the average offset between them
+  3) apply this average offset to the later file
+"""
+
+def create_parser(subparsers=None):
+    synopsis = 'Stitch/mosaic multiple geocoded datasets into one.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis+NOTE, epilog=epilog, subparsers=subparsers)
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Stitch >=2 geocoded datasets sharing common area into one.\n'
-                                                 '\tFunction automatically finds the common area and calculates\n'
-                                                 '\tthe average offset between the two velocity.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
     parser.add_argument('file1', help='file to stitch')
     parser.add_argument('file2s', nargs='+', metavar='file2', help='file(s) to stitch')
     parser.add_argument('-o', '--output', dest='outfile', required=True, help='output file name')
 
     # stitch option
     parser.add_argument('--no-offset','--no-off', dest='apply_offset', action='store_false',
-                        help='Do not apply offset if data sets are merely to be stitched '
-                             'and no adjustment of values needs to be made '
+                        help='Do not apply offset if 1) data sets are merely to be stitched '
+                             'AND 2) no adjustment of values needs to be made\n'
                              '(i.e., for two coherence maps), use this flag')
 
     # plot options
     parser.add_argument('--nodisplay', dest='disp_fig', action='store_false',
-                        help='do not display the result ploting.')
+                        help='do not display the result plotting.')
     return parser
 
 
@@ -355,7 +362,7 @@ def main(iargs=None):
                  apply_offset=inps.apply_offset,
                  disp_fig=inps.disp_fig)
 
-    return inps.outfile
+    return
 
 
 #############################################################################################
diff -pruN 1.3.3-2/mintpy/info.py 1.4.0-1/mintpy/info.py
--- 1.3.3-2/mintpy/info.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/info.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,11 +8,11 @@
 
 import os
 import sys
-import argparse
 import h5py
 import numpy as np
 
 from mintpy.utils import readfile, ptime
+from mintpy.utils.arg_utils import create_argument_parser
 from mintpy.objects import (
     giantIfgramStack,
     giantTimeseries,
@@ -51,11 +51,14 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
+def create_parser(subparsers=None):
     """Create command line parser."""
-    parser = argparse.ArgumentParser(description='Display Metadata / Structure information of ANY File',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+    synopsis = 'Display Metadata / Structure information of ANY File'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
     parser.add_argument('file', type=str, help='File to check')
     parser.add_argument('--compact', action='store_true',
                         help='show compact info by displaying only the top 20 metadata')
diff -pruN 1.3.3-2/mintpy/__init__.py 1.4.0-1/mintpy/__init__.py
--- 1.3.3-2/mintpy/__init__.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/__init__.py	2022-08-04 20:01:49.000000000 +0000
@@ -1,5 +1,5 @@
 # get version info
-from mintpy.version import version_num, logo
-__version__ = version_num
-__logo__ = logo
-
+from mintpy.version import (
+    version_num as __version__,
+    logo as __logo__,
+)
diff -pruN 1.3.3-2/mintpy/iono_tec.py 1.4.0-1/mintpy/iono_tec.py
--- 1.3.3-2/mintpy/iono_tec.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/iono_tec.py	2022-08-04 20:01:49.000000000 +0000
@@ -6,29 +6,30 @@
 ############################################################
 
 
+import datetime as dt
 import os
 import sys
 import time
-import argparse
+
 import h5py
 import numpy as np
 
 import mintpy
-from mintpy.objects import timeseries
+from mintpy.objects import timeseries, ionex
+from mintpy.objects.constants import SPEED_OF_LIGHT
 from mintpy.utils import ptime, readfile, writefile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 from mintpy.simulation import iono
 
 
-SPEED_OF_LIGHT = 299792458 # m/s
-
-
 
 #####################################################################################
 REFERENCE = """references:
-  Yunjun, Z., Fattahi, H., Pi, X., Rosen, P., Simons, M., Agram, P., & Aoki, Y. (2022). Range Geolocation Accuracy
-    of C/L-band SAR and its Implications for Operational Stack Coregistration. IEEE Trans. Geosci. Remote Sens. 
-  Schaer, S., Gurtner, W., & Feltens, J. (1998). IONEX: The ionosphere map exchange format version 1.1. 
-    Paper presented at the Proceedings of the IGS AC workshop, Darmstadt, Germany, Darmstadt, Germany.
+  Yunjun, Z., Fattahi, H., Pi, X., Rosen, P., Simons, M., Agram, P., & Aoki, Y. (2022).
+    Range Geolocation Accuracy of C-/L-band SAR and its Implications for Operational
+    Stack Coregistration. IEEE Trans. Geosci. Remote Sens., 60, doi:10.1109/TGRS.2022.3168509.
+  Schaer, S., Gurtner, W., & Feltens, J. (1998). IONEX: The ionosphere map exchange format
+    version 1.1. Paper presented at the Proceedings of the IGS AC workshop, Darmstadt, Germany.
 """
 
 EXAMPLE = """example:
@@ -36,21 +37,21 @@ EXAMPLE = """example:
   iono_tec.py timeseriesRg.h5 -g inputs/geometryRadar.h5 -s cod
 """
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Calculate ionospheric ramps using Global Iono Maps (GIM) from GNSS-based TEC products.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=REFERENCE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Calculate ionospheric ramps using Global Iono Maps  from GNSS-based TEC products.'
+    epilog = REFERENCE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
     parser.add_argument('dis_file', help='displacement time-series HDF5 file, i.e. timeseries.h5')
     parser.add_argument('-g','--geomtry', dest='geom_file', type=str, required=True,
                         help='geometry file including incidence/azimuthAngle.')
-    parser.add_argument('-s','--sol','--tec-sol', dest='tec_sol', default='jpl',
-                        help='TEC solution center (default: %(default)s). \n'
-                             '    jpl - JPL (Final)\n'
-                             '    igs - IGS (Final)\n'
-                             '    cod - CODE (Final)\n'
-                             'Check more at:\n'
-                             '    https://cddis.nasa.gov/Data_and_Derived_Products/GNSS/atmospheric_products.html')
-    parser.add_argument('--tec-dir', dest='tec_dir', default='${WEATHER_DIR}/GIM_IGS',
+    parser.add_argument('-s','--sol','--sol-code', dest='sol_code', default='jpl',
+                        choices={'cod','esa','igs','jpl','upc','uqr'},
+                        help='GIM solution center code (default: %(default)s).\n'
+                             'https://cddis.nasa.gov/Data_and_Derived_Products/GNSS/atmospheric_products.html')
+    parser.add_argument('--tec-dir', dest='tec_dir', default='${WEATHER_DIR}/IONEX',
                         help='directory of downloaded GNSS TEC data (default: %(default)s).')
 
     # output
@@ -59,13 +60,15 @@ def create_parser():
     #parser.add_argument('-o', dest='cor_dis_file', help='Output file name for the corrected time-series.')
 
     # GIM extraction
-    tec_cfg = parser.add_argument_group('GIM extraction',
-                                        'Parameters to extract TEC at point of interest from GIM (mainly for impact demonstration).')
-    tec_cfg.add_argument('-i','--interp', dest='interp_method', default='linear3d', choices={'nearest', 'linear2d', 'linear3d'},
-                         help='Interpolation method to grab the GIM value at the point of interest (default: %(default)s).')
+    tec_cfg = parser.add_argument_group('GIM extraction', 'Parameters to extract TEC at point of interest from '
+                                        'GIM (mainly for impact demonstration).')
+    tec_cfg.add_argument('-i','--interp', dest='interp_method', default='linear3d',
+                         choices={'nearest', 'linear2d', 'linear3d'},
+                         help='Interpolation method to grab the GIM value at the point of interest'
+                              ' (default: %(default)s).')
     tec_cfg.add_argument('--norotate', dest='rotate_tec_map', action='store_false',
-                         help="Rotate TEC maps along the longitude direction to compensate the correlation between\n"
-                              "the ionosphere and the Sun's position, as suggested by Schaer et al. (1998).\n"
+                         help="Rotate TEC maps along the longitude direction to compensate the correlation\n"
+                              "between the ionosphere and the Sun's position (Schaer et al. (1998).\n"
                               "For 'interp_method == linear3d' ONLY. (default: %(default)s).")
     tec_cfg.add_argument('--ratio', dest='sub_tec_ratio', type=str,
                          help='Ratio to calculate the sub-orbital TEC from the total TEC.\n'
@@ -107,7 +110,7 @@ def cmd_line_parse(iargs=None):
 
     if not inps.iono_file:
         geom_dir = os.path.dirname(inps.geom_file)
-        inps.iono_file = os.path.join(geom_dir, 'TEC{}lr{}.h5'.format(inps.tec_sol[0], suffix))
+        inps.iono_file = os.path.join(geom_dir, 'TEC{}lr{}.h5'.format(inps.sol_code[0], suffix))
 
     #if not inps.cor_dis_file:
     #    dis_dir = os.path.dirname(inps.dis_file)
@@ -145,7 +148,8 @@ def run_or_skip(iono_file, grib_files, d
         date_list_ion = timeseries(iono_file).get_date_list()
         if ds_size_ion != ds_size_dis or any (x not in date_list_ion for x in date_list_dis):
             flag = 'run'
-            print(f'2) output file does NOT have the same len/wid as the geometry file {geom_file} or does NOT contain all dates')
+            print(f'2) output file does NOT have the same len/wid as the geometry file {geom_file}'
+                  ' or does NOT contain all dates')
         else:
             print('2) output file has the same len/wid as the geometry file and contains all dates')
 
@@ -163,17 +167,17 @@ def run_or_skip(iono_file, grib_files, d
 
 
 #####################################################################################
-def download_igs_tec(date_list, tec_dir, tec_sol='jpl'):
-    """Download IGS TEC products for the input list of dates.
+def download_ionex_files(date_list, tec_dir, sol_code='jpl'):
+    """Download IGS TEC products in IONEX format for the input list of dates.
 
     Parameters: date_list - list of str, in YYYYMMDD
                 tec_dir   - str, path to IGS_TEC directory, e.g. ~/data/aux/IGS_TEC
-                tec_sol   - str, TEC solution center, e.g. jpl, cod, igs
+                sol_code   - str, TEC solution center, e.g. jpl, cod, igs
     Returns:    fnames    - list of str, path of the downloaded TEC files
     """
     print("\n------------------------------------------------------------------------------")
-    print("downloading GNSS-based TEC products from NASA's Archive of Space Geodesy Data (CDDIS) ...")
-    print('Link: https://cddis.nasa.gov/Data_and_Derived_Products/GNSS/atmospheric_products.html')
+    print("downloading GNSS-based TEC products in IONEX format from NASA/CDDIS ...")
+    print('https://cddis.nasa.gov/Data_and_Derived_Products/GNSS/atmospheric_products.html')
     num_date = len(date_list)
     n = len(str(num_date))
     print(f'number of TEC files to download: {num_date}')
@@ -182,7 +186,7 @@ def download_igs_tec(date_list, tec_dir,
     # output file names/sizes
     fnames = []
     for date_str in date_list:
-        fnames.append(iono.get_igs_tec_filename(tec_dir, date_str, sol=tec_sol))
+        fnames.append(ionex.get_ionex_filename(date_str, tec_dir=tec_dir, sol_code=sol_code))
 
     # remove all existing files
     debug_mode = False
@@ -206,23 +210,24 @@ def download_igs_tec(date_list, tec_dir,
 
     num_date2dload = len(date_list2dload)
     if num_date2dload == 0:
-        print(f'ALL files exists with consistent file size (~{fsizec:.0f} KB) --> skip re-downloading.\n')
+        print(f'ALL files exists with consistent file size (~{fsizec:.0f} KB)'
+              ' --> skip re-downloading.\n')
 
     else:
         for i, date_str in enumerate(date_list2dload):
             print('-'*20)
             print('DATE {}/{}: {}'.format(i+1, num_date2dload, date_str))
-            iono.dload_igs_tec(date_str, tec_dir, sol=tec_sol, print_msg=True)
+            ionex.dload_ionex(date_str, tec_dir=tec_dir, sol_code=sol_code, print_msg=True)
 
         # print file size info, after downloading
         fsizes = [os.path.getsize(i) / 1024 if os.path.isfile(i) else 0 for i in fnames]
         for i in range(num_date):
-            print('[{i:0{n}d}/{N}] {f}: {s:.2f} KB'.format(n=n, i=i+1, N=num_date, f=fnames[i], s=fsizes[i]))
+            print(f'[{i+1:0{n}d}/{num_date}] {fnames[i]}: {fsizes[i]:.2f} KB')
 
     return fnames
 
 
-def calc_iono_ramp_timeseries_igs(tec_dir, tec_sol, interp_method, ts_file, geom_file, iono_file,
+def calc_iono_ramp_timeseries_igs(tec_dir, sol_code, interp_method, ts_file, geom_file, iono_file,
                                   rotate_tec_map=True, sub_tec_ratio=None, update_mode=True):
     """Calculate the time-series of 2D ionospheric delay from IGS TEC data.
     Considering the variation of the incidence angle along range direction.
@@ -241,26 +246,31 @@ def calc_iono_ramp_timeseries_igs(tec_di
     date_list = timeseries(ts_file).get_date_list()
     meta = readfile.read_attribute(ts_file)
     utc_sec = float(meta['CENTER_LINE_UTC'])
-    h, s = divmod(utc_sec, 3600)
-    m, s = divmod(s, 60)
-    print('UTC time: {:02.0f}:{:02.0f}:{:02.1f}'.format(h, m, s))
+    print(f'CENTER_LINE_TUC: {utc_sec}')
+
+    # UTC time & local solar time
+    # use an arbitrary date to construct the datetime object
+    lon_c = (float(meta['LON_REF1']) + float(meta['LON_REF2'])) / 2
+    utc_dt = dt.datetime(2020, 1, 1) + dt.timedelta(seconds=utc_sec)
+    local_dt = ptime.utc2solar_time(utc_dt, lon_c)
+    print('UTC time:', utc_dt.strftime("%H:%M:%S"))
+    print('Local solar time:', local_dt.strftime("%I:%M %p"))
 
     # read IGS TEC
-    vtec_list = []
     print('read IGS TEC file ...')
-    print('interpolation method: {}'.format(interp_method))
+    print(f'interpolation method: {interp_method}')
+    if interp_method == 'linear3d':
+        print(f'rotate TEC maps: {rotate_tec_map}')
+
+    vtec_list = []
     prog_bar = ptime.progressBar(maxValue=len(date_list))
     for i, date_str in enumerate(date_list):
         # read zenith TEC
-        tec_file = iono.get_igs_tec_filename(tec_dir, date_str, sol=tec_sol)
-        vtec = iono.get_igs_tec_value(
-            tec_file,
-            utc_sec,
-            lat=iono_lat,
-            lon=iono_lon,
-            interp_method=interp_method,
-            rotate_tec_map=rotate_tec_map,
-        )
+        tec_file = ionex.get_ionex_filename(date_str, tec_dir=tec_dir, sol_code=sol_code)
+        vtec = ionex.get_ionex_value(tec_file, utc_sec,
+                                     lat=iono_lat, lon=iono_lon,
+                                     interp_method=interp_method,
+                                     rotate_tec_map=rotate_tec_map)
         vtec_list.append(vtec)
         prog_bar.update(i+1, suffix=date_str)
     prog_bar.close()
@@ -322,7 +332,7 @@ def vtec2iono_ramp_timeseries(date_list,
             dates = ptime.date_list2vector(date_list)[0]
             ydays = np.array([x.timetuple().tm_yday for x in dates])
             fc = np.loadtxt(top_perc_file, dtype=bytes).astype(np.float32)
-            print('multiply VTEC adaptively based on the day of the year from: {}'.format(top_perc_file))
+            print(f'multiply VTEC adaptively based on the day of the year from: {top_perc_file}')
             sub_perc = fc[:,2][np.array(ydays)]
             vtec_list = (np.array(vtec_list).flatten() * sub_perc).tolist()
 
@@ -392,13 +402,13 @@ def main(iargs=None):
 
     # download
     date_list = timeseries(inps.dis_file).get_date_list()
-    tec_files = download_igs_tec(date_list, tec_dir=inps.tec_dir, tec_sol=inps.tec_sol)
+    tec_files = download_ionex_files(date_list, tec_dir=inps.tec_dir, sol_code=inps.sol_code)
 
     # calculate
     if run_or_skip(inps.iono_file, tec_files, inps.dis_file, inps.geom_file) == 'run':
         calc_iono_ramp_timeseries_igs(
             tec_dir=inps.tec_dir,
-            tec_sol=inps.tec_sol,
+            sol_code=inps.sol_code,
             interp_method=inps.interp_method,
             ts_file=inps.dis_file,
             geom_file=inps.geom_file,
diff -pruN 1.3.3-2/mintpy/legacy/ifgram_reconstruction.py 1.4.0-1/mintpy/legacy/ifgram_reconstruction.py
--- 1.3.3-2/mintpy/legacy/ifgram_reconstruction.py	1970-01-01 00:00:00.000000000 +0000
+++ 1.4.0-1/mintpy/legacy/ifgram_reconstruction.py	2022-08-04 20:01:49.000000000 +0000
@@ -0,0 +1,76 @@
+#!/usr/bin/env python3
+############################################################
+# Program is part of MintPy                                #
+# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
+# Author: Zhang Yunjun, Heresh Fattahi, 2013               #
+############################################################
+
+
+import sys
+import argparse
+import numpy as np
+from mintpy.objects import ifgramStack
+from mintpy.utils import readfile, writefile
+
+
+#####################################################################################
+EXAMPLE = """example:
+  ifgram_reconstruction.py timeseries_ERA5_ramp_demErr.h5
+  ifgram_reconstruction.py timeseries_ERA5_ramp_demErr.h5 -r inputs/ifgramStack.h5 -o ifgramStackRecon.h5
+"""
+
+def create_parser():
+    parser = argparse.ArgumentParser(description='Reconstruct network of interferograms from time-series',
+                                     formatter_class=argparse.RawTextHelpFormatter,
+                                     epilog=EXAMPLE)
+
+    parser.add_argument('timeseries_file', type=str, help='time-series file.')
+    parser.add_argument('-r', dest='ifgram_file', type=str, default='./inputs/ifgramStack.h5',
+                        help='reference interferograms stack file')
+    parser.add_argument('-o','--output', dest='out_file', default='ifgramStackRecon.h5',
+                        help='output filename for the reconstructed interferograms.')
+    return parser
+
+def cmd_line_parse(iargs=None):
+    parser = create_parser()
+    inps = parser.parse_args(args=iargs)
+    return inps
+
+
+#####################################################################################
+def timeseries2ifgram(ts_file, ifgram_file, out_file='reconUnwrapIfgram.h5'):
+    # read time-series
+    atr = readfile.read_attribute(ts_file)
+    range2phase = -4.*np.pi / float(atr['WAVELENGTH'])
+    print('reading timeseries data from file {} ...'.format(ts_file))
+    ts_data = readfile.read(ts_file)[0] * range2phase
+    num_date, length, width = ts_data.shape
+    ts_data = ts_data.reshape(num_date, -1)
+
+    # reconstruct unwrapPhase
+    print('reconstructing the interferograms from timeseries')
+    stack_obj = ifgramStack(ifgram_file)
+    stack_obj.open(print_msg=False)
+    date12_list = stack_obj.get_date12_list(dropIfgram=False)
+    A = stack_obj.get_design_matrix4timeseries(date12_list, refDate='no')[0]
+    ifgram_est = np.dot(A, ts_data).reshape(A.shape[0], length, width)
+    ifgram_est = np.array(ifgram_est, dtype=ts_data.dtype)
+    del ts_data
+
+    # write to ifgram file
+    dsDict = {}
+    dsDict['unwrapPhase'] = ifgram_est
+    writefile.write(dsDict, out_file=out_file, ref_file=ifgram_file)
+    return ifgram_file
+
+
+def main(iargs=None):
+    inps = cmd_line_parse(iargs)
+    timeseries2ifgram(inps.timeseries_file, inps.ifgram_file, inps.out_file)
+    return
+
+
+#####################################################################################
+if __name__ == '__main__':
+    main(sys.argv[1:])
+
diff -pruN 1.3.3-2/mintpy/legacy/load2hdf5.py 1.4.0-1/mintpy/legacy/load2hdf5.py
--- 1.3.3-2/mintpy/legacy/load2hdf5.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/legacy/load2hdf5.py	2022-08-04 20:01:49.000000000 +0000
@@ -12,7 +12,7 @@ import argparse
 import numpy as np
 from mintpy.utils import (
     ptime,
-    arg_group,
+    arg_utils,
     readfile,
     writefile,
     utils as ut,
@@ -48,7 +48,7 @@ def create_parser():
     parser.add_argument('file', help='file to be loaded.')
     parser.add_argument('--dtype','--data-type', dest='data_type', choices=DATA_TYPE_STR2OBJ.keys(),
                         help='output data type')
-    parser = arg_group.add_subset_argument(parser, geo=False)
+    parser = arg_utils.add_subset_argument(parser, geo=False)
 
     # output
     parser.add_argument('--dname','--dset-name', dest='dset_name', help='output dataset name(s)')
diff -pruN 1.3.3-2/mintpy/legacy/prep_giant.py 1.4.0-1/mintpy/legacy/prep_giant.py
--- 1.3.3-2/mintpy/legacy/prep_giant.py	1970-01-01 00:00:00.000000000 +0000
+++ 1.4.0-1/mintpy/legacy/prep_giant.py	2022-08-04 20:01:49.000000000 +0000
@@ -0,0 +1,152 @@
+#!/usr/bin/env python3
+############################################################
+# Program is part of MintPy                                #
+# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
+# Author: Zhang Yunjun, Jul 2018                           #
+############################################################
+
+
+import os
+import sys
+import argparse
+from lxml import objectify
+from mintpy.utils import readfile, utils as ut
+from mintpy.objects import sensor
+
+
+key_giant2mintpy = {'xmin':'SUBSET_XMIN', 'xmax':'SUBSET_XMAX',
+                   'ymin':'SUBSET_YMIN', 'ymax':'SUBSET_YMAX',
+                  }
+
+
+##################################################################################################
+EXAMPLE = """example:
+  prep_giant.py  LS-PARAMS.h5
+  prep_giant.py  TS-PARAMS.h5
+  prep_giant.py  NSBAS-PARAMS.h5
+  prep_giant.py  RAW-STACK.h5
+  prep_giant.py  PROC-STACK.h5
+  prep_giant.py  LS-PARAMS.h5 -x ../data.xml ../sbas.xml ../mints.xml
+  prep_giant.py  LS-PARAMS.h5 -x ../data.xml ../sbas.xml ../mints.xml ../filt_fine.unw.rsc
+"""
+
+
+def create_parser():
+    parser = argparse.ArgumentParser(description='Prepare attributes for GIAnT timeseries file.',
+                                     formatter_class=argparse.RawTextHelpFormatter,
+                                     epilog=EXAMPLE)
+
+    parser.add_argument('file', help='GIAnT timeseries file')
+    parser.add_argument('-x','--xml', nargs='+', dest='xml_file',
+                        help='XML file with data setting info.')
+    return parser
+
+
+def cmd_line_parse(iargs=None):
+    parser = create_parser()
+    inps = parser.parse_args(args=iargs)
+    if not inps.xml_file:
+        inps.xml_file = auto_xml_file4giant(inps.file)
+    if not inps.xml_file:
+        parser.print_usage()
+        raise SystemExit('ERROR: no xml file found.')
+
+    return inps
+
+
+def auto_xml_file4giant(fname):
+    file_list = [os.path.join(os.path.dirname(fname), '../{}'.format(i))
+                 for i in ['data.xml',
+                           'sbas.xml',
+                           'mints.xml',
+                           'filt_fine.unw.rsc']]
+    file_list = [i for i in file_list if os.path.isfile(i)]
+    return file_list
+
+
+def read_giant_xml(fname):
+    odict = {}
+    root = objectify.parse(fname).getroot()
+
+    if root.find('master') is not None:
+        comp = root['master']
+        for key in ['wavelength', 'incidence']:
+            odict[key] = comp[key].value
+
+    if root.find('subimage') is not None:
+        comp = root['subimage']
+        for key in ['width', 'length',
+                    'xmin', 'xmax',
+                    'ymin', 'ymax',
+                    'rxmin', 'rxmax',
+                    'rymin', 'rymax']:
+            odict[key] = comp[key].value
+
+        odict = readfile.standardize_metadata(odict, standardKeys=key_giant2mintpy)
+        odict['REF_Y'] = int((int(odict['rymin']) +
+                              int(odict['rymax'])) / 2. + 0.5)
+        odict['REF_X'] = int((int(odict['rxmin']) +
+                              int(odict['rxmax'])) / 2. + 0.5)
+
+    if root.find('proc/masterdate') is not None:
+        odict['REF_DATE'] = root['proc']['masterdate'].value
+    return odict
+
+
+def prepare_metadata4giant(fname, meta_files=None):
+    """Extract metadata from xml files for GIAnT time-series file."""
+    # check xml files
+    if not meta_files:
+        meta_files = auto_xml_file4giant(fname)
+    if not meta_files:
+        raise FileNotFoundError("no xml file found.")
+
+    # extract metadata from xml files
+    rsc_files = [i for i in meta_files if i.endswith('.rsc')]
+    xml_files = [i for i in meta_files if i.endswith('.xml')]
+    xml_dict = {}
+    for rsc_file in rsc_files:
+        print('reading {}'.format(rsc_file))
+        rsc_dict = readfile.read_roipac_rsc(rsc_file)
+        for key in ['length', 'LENGTH', 'FILE_LENGTH', 'width', 'WIDTH']:
+            if key in rsc_dict.keys():
+                rsc_dict.pop(key)
+        xml_dict.update(rsc_dict)
+    for xml_file in xml_files:
+        print('reading {}'.format(xml_file))
+        xml_dict.update(read_giant_xml(xml_file))
+
+    if not xml_dict:
+        raise ValueError('No metadata found in file: '+xml_file)
+
+    # standardize metadata names
+    xml_dict = readfile.standardize_metadata(xml_dict)
+
+    # project name
+    sensor_name, project_name = sensor.project_name2sensor_name(os.path.abspath(fname))
+    if sensor_name:
+        xml_dict['PLATFORM'] = sensor_name
+    if project_name:
+        xml_dict['PROJECT_NAME'] = project_name
+        if sensor_name in project_name:
+            tmp = project_name.split(sensor_name)[1][0]
+            if tmp == 'A':
+                xml_dict['ORBIT_DIRECTION'] = 'ASCENDING'
+            else:
+                xml_dict['ORBIT_DIRECTION'] = 'DESCENDING'
+
+    # update GIAnT HDF5 file
+    fname = ut.add_attribute(fname, xml_dict, print_msg=True)
+    return fname
+
+
+##################################################################################################
+def main(iargs=None):
+    inps = cmd_line_parse(iargs)
+    prepare_metadata4giant(inps.file, inps.xml_file)
+    return
+
+
+###################################################################################################
+if __name__ == '__main__':
+    main(sys.argv[1:])
diff -pruN 1.3.3-2/mintpy/legacy/select_network.py 1.4.0-1/mintpy/legacy/select_network.py
--- 1.3.3-2/mintpy/legacy/select_network.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/legacy/select_network.py	2022-08-04 20:01:49.000000000 +0000
@@ -75,7 +75,7 @@ def create_parser():
     fig.add_argument('--fs', '--fontsize', type=int,
                      default=12, help='font size in points')
     fig.add_argument('--show-fig', dest='disp_fig', action='store_true',
-                     help='display network ploting result')
+                     help='display network plotting result')
     fig.add_argument('--figext', dest='figext', default='.pdf',
                      help='file extension to be saved.')
     fig.add_argument('--dpi', dest='figdpi', type=int, default=150,
diff -pruN 1.3.3-2/mintpy/legacy/tropo_pyaps.py 1.4.0-1/mintpy/legacy/tropo_pyaps.py
--- 1.3.3-2/mintpy/legacy/tropo_pyaps.py	1970-01-01 00:00:00.000000000 +0000
+++ 1.4.0-1/mintpy/legacy/tropo_pyaps.py	2022-08-04 20:01:49.000000000 +0000
@@ -0,0 +1,522 @@
+#!/usr/bin/env python
+############################################################
+# Program is part of MintPy                                #
+# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
+# Author: Heresh Fattahi, Zhang Yunjun, 2015               #
+############################################################
+
+
+import os
+import re
+import sys
+import subprocess
+try:
+    import pyaps as pa
+except ImportError:
+    raise ImportError('Cannot import pyaps!')
+
+import argparse
+import numpy as np
+from mintpy.objects import timeseries, geometry
+from mintpy.utils import readfile, writefile, ptime, utils as ut
+
+standardWeatherModelNames = {
+    'ERAI': 'ECMWF', 'ERAINT': 'ECMWF', 'ERAINTERIM': 'ECMWF',
+    'MERRA2': 'MERRA',
+}
+
+
+###############################################################
+EXAMPLE = """example:
+  # download reanalysys dataset, calculate tropospheric delays and correct time-series file.
+  tropo_pyaps.py -f timeseries.h5 -m ECMWF -g inputs/geometryRadar.h5 -w ${WEATHER_DIR}
+
+  # download reanalysys dataset, calculate tropospheric delays
+  tropo_pyaps.py -d date_list.txt     --hour 12 -m ECMWF -g inputs/geometryRadar.h5 --ref-yx 30 40
+  tropo_pyaps.py -d 20151002 20151003 --hour 12 -m MERRA -g inputs/geometryRadar.h5 --ref-yx 30 40
+
+  # download reanalysys dataset
+  tropo_pyaps.py -d date_list.txt     --hour 12 -m ECMWF
+"""
+
+REFERENCE = """reference:
+  Jolivet, R., R. Grandin, C. Lasserre, M.-P. Doin and G. Peltzer (2011), Systematic InSAR tropospheric
+  phase delay corrections from global meteorological reanalysis data, Geophys. Res. Lett., 38, L17311,
+  doi:10.1029/2011GL048757
+
+  Jolivet, R., P. S. Agram, N. Y. Lin, M. Simons, M. P. Doin, G. Peltzer, and Z. Li (2014), Improving
+  InSAR geodesy using global atmospheric models, Journal of Geophysical Research: Solid Earth, 119(3),
+  2324-2341.
+"""
+
+DATA_INFO = """
+  re-analysis_dataset        coverage   temporal_resolution    spatial_resolution      latency     analysis
+------------------------------------------------------------------------------------------------------------
+ERA-Interim (by ECMWF)        Global      00/06/12/18 UTC      0.75 deg (~83 km)       2-month      4D-var
+MERRA(2) (by NASA Goddard)    Global      00/06/12/18 UTC      0.5*0.625 (~50 km)     2-3 weeks     3D-var
+
+To download MERRA2, you need an Earthdata account, and pre-authorize the "NASA GESDISC DATA ARCHIVE" application
+    following https://disc.gsfc.nasa.gov/earthdata-login.
+"""
+
+WEATHER_DIR_DEMO = """--weather-dir ~/data/aux
+WEATHER/
+    /ECMWF
+        ERA-Int_20030329_06.grb
+        ERA-Int_20030503_06.grb
+    /MERRA
+        merra-20110126-06.nc4
+        merra-20110313-06.nc4
+"""
+
+
+def create_parser():
+    parser = argparse.ArgumentParser(description='Tropospheric correction using weather models\n' +
+                                     '  PyAPS is used to download and calculate the delay for each time-series epoch.',
+                                     formatter_class=argparse.RawTextHelpFormatter,
+                                     epilog=REFERENCE+'\n'+DATA_INFO+'\n'+EXAMPLE)
+    # For data download
+    parser.add_argument('-m', '--model', '-s', dest='trop_model', default='ECMWF',
+                        choices={'ECMWF', 'MERRA', 'NARR', 'ERA', 'MERRA1'},
+                        help='source of the atmospheric data.\nNARR is working for 1979-Jan to 2014-Oct.')
+    parser.add_argument('-d', '--date-list', dest='date_list', nargs='*',
+                        help='Read the first column of text file as list of date to download data\n' +
+                             'in YYYYMMDD or YYMMDD format')
+    parser.add_argument('--hour', help='time of data in HH, e.g. 12, 06')
+    parser.add_argument('-w', '--dir', '--weather-dir', dest='weather_dir', default='${WEATHER_DIR}',
+                        help='parent directory of downloaded weather data file. Default: ${WEATHER_DIR}\n' +
+                             'e.g.: '+WEATHER_DIR_DEMO)
+
+    # For delay calculation
+    parser.add_argument('-g','--geomtry', dest='geom_file', type=str,
+                        help='geometry file including height, incidenceAngle and/or latitude and longitude')
+    parser.add_argument('--ref-yx', dest='ref_yx', type=int,
+                        nargs=2, help='reference pixel in y/x')
+    parser.add_argument('--delay', dest='delay_type', default='comb', choices={'comb', 'dry', 'wet'},
+                        help='Delay type to calculate, comb contains both wet and dry delays')
+
+    # For delay correction
+    parser.add_argument('-f', '--file', dest='timeseries_file',
+                        help='timeseries HDF5 file, i.e. timeseries.h5')
+    parser.add_argument('-o', dest='outfile',
+                        help='Output file name for trospheric corrected timeseries.')
+    return parser
+
+
+def cmd_line_parse(iargs=None):
+    """Command line parser."""
+    parser = create_parser()
+    inps = parser.parse_args(args=iargs)
+
+    # check the input requirements
+    key_list = ['date_list', 'hour']
+    # with timeseries file
+    if inps.timeseries_file:
+        for key in key_list+['ref_yx']:
+            if vars(inps)[key]:
+                print(('input "{:<10}" is ignored because it will be extracted from '
+                       'timeseries file {}').format(key, inps.timeseries_file))
+
+    # without timeseries file
+    elif any(not vars(inps)[key] for key in key_list):
+        msg = 'No input timeseries file, all the following options are required: \n{}'.format(key_list)
+        msg += '\n\n'+EXAMPLE
+        raise ValueError(msg)
+
+
+    ## default values
+    # Get Grib Source
+    inps.trop_model = standardize_trop_model(inps.trop_model, standardWeatherModelNames)
+    print('weather model: '+inps.trop_model)
+
+    # weather_dir
+    inps.weather_dir = os.path.expanduser(inps.weather_dir)
+    inps.weather_dir = os.path.expandvars(inps.weather_dir)
+    # Fallback value if WEATHER_DIR is not defined as environment variable
+    if inps.weather_dir == '${WEATHER_DIR}':
+        inps.weather_dir = './'
+    print('weather data directory: '+inps.weather_dir)
+
+    return inps
+
+
+###############################################################
+def check_inputs(inps):
+    parser = create_parser()
+
+    # output directories/files
+    atr = dict()
+    mintpy_dir = None
+    if inps.timeseries_file:
+        atr = readfile.read_attribute(inps.timeseries_file)
+        mintpy_dir = os.path.dirname(inps.timeseries_file)
+        if not inps.outfile:
+            fbase = os.path.splitext(inps.timeseries_file)[0]
+            inps.outfile = '{}_{}.h5'.format(fbase, inps.trop_model)
+    elif inps.geom_file:
+        atr = readfile.read_attribute(inps.geom_file)
+        mintpy_dir = os.path.join(os.path.dirname(inps.geom_file), '..')
+    else:
+        mintpy_dir = os.path.abspath(os.getcwd())
+
+    # trop_file
+    inps.trop_file = os.path.join(mintpy_dir, 'inputs/{}.h5'.format(inps.trop_model))
+    print('output tropospheric delay file: {}'.format(inps.trop_file))
+
+    # hour
+    if not inps.hour:
+        if 'CENTER_LINE_UTC' in atr.keys():
+            inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'], inps.trop_model)
+        else:
+            parser.print_usage()
+            raise Exception('no input for hour')
+    print('time of cloest available product: {}:00 UTC'.format(inps.hour))
+
+    # date list
+    if inps.timeseries_file:
+        print('read date list from timeseries file: {}'.format(inps.timeseries_file))
+        ts_obj = timeseries(inps.timeseries_file)
+        ts_obj.open(print_msg=False)
+        inps.date_list = ts_obj.dateList
+    elif len(inps.date_list) == 1:
+        if os.path.isfile(inps.date_list[0]):
+            print('read date list from text file: {}'.format(inps.date_list[0]))
+            inps.date_list = ptime.yyyymmdd(np.loadtxt(inps.date_list[0],
+                                                       dtype=bytes,
+                                                       usecols=(0,)).astype(str).tolist())
+        else:
+            parser.print_usage()
+            raise Exception('ERROR: input date list < 2')
+
+    # Grib data directory
+    inps.grib_dir = os.path.join(inps.weather_dir, inps.trop_model)
+    if not os.path.isdir(inps.grib_dir):
+        os.makedirs(inps.grib_dir)
+        print('making directory: '+inps.grib_dir)
+
+    # Date list to grib file list
+    inps.grib_file_list = date_list2grib_file(inps.date_list,
+                                              inps.hour,
+                                              inps.trop_model,
+                                              inps.grib_dir)
+
+    if 'REF_Y' in atr.keys():
+        inps.ref_yx = [int(atr['REF_Y']), int(atr['REF_X'])]
+        print('reference pixel: {}'.format(inps.ref_yx))
+
+    # Coordinate system: geocoded or not
+    inps.geocoded = False
+    if 'Y_FIRST' in atr.keys():
+        inps.geocoded = True
+    print('geocoded: {}'.format(inps.geocoded))
+
+    # Prepare DEM, inc_angle, lat/lon file for PyAPS to read
+    if inps.geom_file:
+        geom_obj = geometry(inps.geom_file)
+        geom_obj.open()
+
+        print('converting DEM/incAngle for PyAPS to read')
+        # DEM
+        dem = readfile.read(inps.geom_file, datasetName='height', print_msg=False)[0]
+        inps.dem_file = 'pyapsDem.hgt'
+        writefile.write(dem, inps.dem_file, metadata=atr)
+
+        # inc_angle
+        if 'incidenceAngle' in geom_obj.datasetNames:
+            inps.inc_angle = readfile.read(inps.geom_file, datasetName='incidenceAngle', print_msg=False)[0]
+        else:
+            atr = readfile.read_attribute(inps.timeseries_file)
+            inps.inc_angle = ut.incidence_angle(atr, dem=dem, dimension=0)
+            inps.inc_angle = np.ones(dem.shape, dtype=np.float32) * inps.inc_angle
+        inps.inc_angle_file = 'pyapsIncAngle.flt'
+        writefile.write(inps.inc_angle, inps.inc_angle_file, metadata=atr)
+
+        # latitude
+        if 'latitude' in geom_obj.datasetNames:
+            data = readfile.read(inps.geom_file, datasetName='latitude', print_msg=False)[0]
+            print('converting lat for PyAPS to read')
+            inps.lat_file = 'pyapsLat.flt'
+            writefile.write(data, inps.lat_file, metadata=atr)
+        else:
+            inps.lat_file = None
+
+        # longitude
+        if 'longitude' in geom_obj.datasetNames:
+            data = readfile.read(inps.geom_file, datasetName='longitude', print_msg=False)[0]
+            print('converting lon for PyAPS to read')
+            inps.lon_file = 'pyapsLon.flt'
+            writefile.write(data, inps.lon_file, metadata=atr)
+        else:
+            inps.lon_file = None
+    return inps, atr
+
+
+###############################################################
+def closest_weather_product_time(sar_acquisition_time, grib_source='ECMWF'):
+    """Find closest available time of weather product from SAR acquisition time
+    Inputs:
+        sar_acquisition_time - string, SAR data acquisition time in seconds
+        grib_source - string, Grib Source of weather reanalysis product
+    Output:
+        grib_hr - string, time of closest available weather product
+    Example:
+        '06' = closest_weather_product_time(atr['CENTER_LINE_UTC'])
+        '12' = closest_weather_product_time(atr['CENTER_LINE_UTC'], 'NARR')
+    """
+    # Get hour/min of SAR acquisition time
+    sar_time = float(sar_acquisition_time)
+
+    # Find closest time in available weather products
+    grib_hr_list = [0, 6, 12, 18]
+    grib_hr = int(min(grib_hr_list, key=lambda x: abs(x-sar_time/3600.)))
+
+    # Adjust time output format
+    grib_hr = "%02d" % grib_hr
+    return grib_hr
+
+
+def standardize_trop_model(tropModel, standardWeatherModelNames):
+    tropModel = tropModel.replace('-', '').upper()
+    if tropModel in standardWeatherModelNames.keys():
+        tropModel = standardWeatherModelNames[tropModel]
+    return tropModel
+
+
+def date_list2grib_file(date_list, hour, trop_model, grib_dir):
+    grib_file_list = []
+    for d in date_list:
+        grib_file = grib_dir+'/'
+        if   trop_model == 'ECMWF' :  grib_file += 'ERA-Int_%s_%s.grb' % (d, hour)
+        elif trop_model == 'MERRA' :  grib_file += 'merra-%s-%s.nc4' % (d, hour)
+        elif trop_model == 'NARR'  :  grib_file += 'narr-a_221_%s_%s00_000.grb' % (d, hour)
+        elif trop_model == 'ERA'   :  grib_file += 'ERA_%s_%s.grb' % (d, hour)
+        elif trop_model == 'MERRA1':  grib_file += 'merra-%s-%s.hdf' % (d, hour)
+        grib_file_list.append(grib_file)
+    return grib_file_list
+
+
+def grib_file_name2trop_model_name(grib_file):
+    grib_file = os.path.basename(grib_file)
+    if grib_file.startswith('ERA-Int'):  trop_model = 'ECMWF'
+    elif grib_file.startswith('merra'):  trop_model = 'MERRA'
+    elif grib_file.startswith('narr'):   trop_model = 'NARR'
+    elif grib_file.startswith('ERA_'):   trop_model = 'ERA'
+    return trop_model
+
+
+def check_exist_grib_file(gfile_list, print_msg=True):
+    """Check input list of grib files, and return the existing ones with right size."""
+    gfile_exist = ut.get_file_list(gfile_list)
+    if gfile_exist:
+        file_sizes = [os.path.getsize(i) for i in gfile_exist
+                      if os.path.getsize(i) > 10e6]
+        if file_sizes:
+            comm_size = ut.most_common([i for i in file_sizes])
+            if print_msg:
+                print('common file size: {} bytes'.format(comm_size))
+                print('number of grib files existed    : {}'.format(len(gfile_exist)))
+
+            gfile_corrupt = []
+            for gfile in gfile_exist:
+                if os.path.getsize(gfile) < comm_size * 0.9:
+                    gfile_corrupt.append(gfile)
+        else:
+            gfile_corrupt = gfile_exist
+
+        if gfile_corrupt:
+            if print_msg:
+                print('------------------------------------------------------------------------------')
+                print('corrupted grib files detected! Delete them and re-download...')
+                print('number of grib files corrupted  : {}'.format(len(gfile_corrupt)))
+
+            for gfile in gfile_corrupt:
+                os.remove(gfile)
+                gfile_exist.remove(gfile)
+
+            if print_msg:
+                print('------------------------------------------------------------------------------')
+    return gfile_exist
+
+
+def dload_grib_pyaps(grib_file_list):
+    """Download weather re-analysis grib files using PyAPS
+    Parameters: grib_file_list : list of string of grib files
+    Returns:    grib_file_list : list of string
+    """
+    print('\n------------------------------------------------------------------------------')
+    print('downloading weather model data using PyAPS ...')
+
+    # Get date list to download (skip already downloaded files)
+    grib_file_exist = check_exist_grib_file(grib_file_list, print_msg=True)
+    grib_file2dload = sorted(list(set(grib_file_list) - set(grib_file_exist)))
+    date_list2dload = [str(re.findall('\d{8}', i)[0]) for i in grib_file2dload]
+    print('number of grib files to download: %d' % len(date_list2dload))
+    print('------------------------------------------------------------------------------\n')
+
+    # Download grib file using PyAPS
+    if len(date_list2dload) > 0:
+        hour = re.findall('\d{8}[-_]\d{2}', grib_file2dload[0])[0].replace('-', '_').split('_')[1]
+        grib_dir = os.path.dirname(grib_file2dload[0])
+
+        # try 3 times to download, then use whatever downloaded to calculate delay
+        trop_model = grib_file_name2trop_model_name(grib_file2dload[0])
+        i = 0
+        while i < 3:
+            i += 1
+            try:
+                if   trop_model == 'ECMWF' :  pa.ECMWFdload( date_list2dload, hour, grib_dir)
+                elif trop_model == 'MERRA' :  pa.MERRAdload( date_list2dload, hour, grib_dir)
+                elif trop_model == 'NARR'  :  pa.NARRdload(  date_list2dload, hour, grib_dir)
+                elif trop_model == 'ERA'   :  pa.ERAdload(   date_list2dload, hour, grib_dir)
+                elif trop_model == 'MERRA1':  pa.MERRA1dload(date_list2dload, hour, grib_dir)
+            except:
+                pass
+
+    grib_file_list = check_exist_grib_file(grib_file_list, print_msg=False)
+    return grib_file_list
+
+
+def get_delay(grib_file, inps):
+    """Get delay matrix using PyAPS for one acquisition
+    Inputs:
+        grib_file - strng, grib file path
+        atr       - dict, including the following attributes:
+                    dem_file    - string, DEM file path
+                    trop_model - string, Weather re-analysis data source
+                    delay_type  - string, comb/dry/wet
+                    ref_y/x     - string, reference pixel row/col number
+                    inc_angle   - np.array, 0/1/2 D
+    Output:
+        phs - 2D np.array, absolute tropospheric phase delay relative to ref_y/x
+    """
+    # initiate pyaps object
+    if inps.geocoded:
+        aps = pa.PyAPS_geo(grib_file, inps.dem_file, grib=inps.trop_model,
+                           demtype=np.float32, demfmt='RMG',
+                           verb=False, Del=inps.delay_type)
+    else:
+        aps = pa.PyAPS_rdr(grib_file, inps.dem_file, grib=inps.trop_model,
+                           demtype=np.float32, demfmt='RMG',
+                           verb=False, Del=inps.delay_type)
+
+    # estimate delay
+    phs = np.zeros((aps.ny, aps.nx), dtype=np.float32)
+    if not inps.geocoded and inps.lat_file is not None:
+        aps.getgeodelay(phs,
+                        lat=inps.lat_file,
+                        lon=inps.lon_file,
+                        inc=inps.inc_angle_file)
+    else:
+        aps.getdelay(phs, inc=0.)
+        phs /= np.cos(inps.inc_angle*np.pi/180.)
+
+    # Get relative phase delay in space
+    phs -= phs[inps.ref_yx[0], inps.ref_yx[1]]
+    phs *= -1    # reverse the sign for consistency between different phase correction steps/methods
+    return phs
+
+
+def get_delay_timeseries(inps, atr):
+    """Calculate delay time-series and write it to HDF5 file.
+    Parameters: inps : namespace, all input parameters
+                atr  : dict, metadata to be saved in trop_file
+    Returns:    trop_file : str, file name of ECMWF.h5
+    """
+    def get_dataset_size(fname):
+        atr = readfile.read_attribute(fname)
+        return (atr['LENGTH'], atr['WIDTH'])
+
+    if (ut.run_or_skip(out_file=inps.trop_file, in_file=inps.grib_file_list, print_msg=False) == 'skip' 
+            and get_dataset_size(inps.trop_file) == get_dataset_size(inps.geom_file)):
+        print('{} file exists and is newer than all GRIB files, skip updating.'.format(inps.trop_file))
+    else:
+        if any(i is None for i in [inps.geom_file, inps.ref_yx]):
+            print('No DEM / incidenceAngle / ref_yx found, skip calculating tropospheric delays.')
+            if not os.path.isfile(inps.trop_file):
+                inps.trop_file = None
+            return
+
+        # calculate phase delay
+        length, width = int(atr['LENGTH']), int(atr['WIDTH'])
+        num_date = len(inps.grib_file_list)
+        date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_file_list]
+        trop_data = np.zeros((num_date, length, width), np.float32)
+
+        print('calculating delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...')
+        print('number of grib files used: {}'.format(num_date))
+        prog_bar = ptime.progressBar(maxValue=num_date)
+        for i in range(num_date):
+            grib_file = inps.grib_file_list[i]
+            trop_data[i] = get_delay(grib_file, inps)
+            prog_bar.update(i+1, suffix=os.path.basename(grib_file))
+        prog_bar.close()
+
+        # Convert relative phase delay on reference date
+        try:
+            inps.ref_date = atr['REF_DATE']
+        except:
+            inps.ref_date = date_list[0]
+        print('convert to relative phase delay with reference date: '+inps.ref_date)
+        inps.ref_idx = date_list.index(inps.ref_date)
+        trop_data -= np.tile(trop_data[inps.ref_idx, :, :], (num_date, 1, 1))
+
+        # Write tropospheric delay to HDF5
+        atr['REF_Y'] = inps.ref_yx[0]
+        atr['REF_X'] = inps.ref_yx[1]
+        ts_obj = timeseries(inps.trop_file)
+        ts_obj.write2hdf5(data=trop_data,
+                          dates=date_list,
+                          metadata=atr,
+                          refFile=inps.timeseries_file)
+
+    # Delete temporary DEM file in ROI_PAC format
+    if inps.geom_file:
+        temp_files =[fname for fname in [inps.dem_file,
+                                         inps.inc_angle_file,
+                                         inps.lat_file,
+                                         inps.lon_file]
+                     if (fname is not None and 'pyaps' in fname)]
+        if temp_files:
+            print('delete temporary geometry files: {}'.format(temp_files))
+            for temp_file in temp_files:
+                os.remove(temp_file)
+                os.remove(temp_file+'.rsc')
+    return
+
+
+def correct_timeseries(dis_file, tropo_file, cor_dis_file):
+    # diff.py can handle different reference in space and time
+    # between the absolute tropospheric delay and the double referenced time-series
+    print('\n------------------------------------------------------------------------------')
+    print('correcting relative delay for input time-series using diff.py')
+    from mintpy import diff
+
+    iargs = [dis_file, tropo_file, '-o', cor_dis_file, '--force']
+    print('diff.py', ' '.join(iargs))
+    diff.main(iargs)
+    return cor_dis_file
+
+
+###############################################################
+def main(iargs=None):
+    inps = cmd_line_parse(iargs)
+    inps, atr = check_inputs(inps)
+
+    inps.grib_file_list = dload_grib_pyaps(inps.grib_file_list)
+
+    if inps.trop_file:
+        get_delay_timeseries(inps, atr)
+
+    if atr and atr['FILE_TYPE'] == 'timeseries':
+        inps.outfile = correct_timeseries(inps.timeseries_file,
+                                          inps.trop_file,
+                                          out_file=inps.outfile)
+    else:
+        print('No input timeseries file, skip correcting tropospheric delays.')
+
+    return inps.outfile
+
+
+###############################################################
+if __name__ == '__main__':
+    main(sys.argv[1:])
diff -pruN 1.3.3-2/mintpy/load_data.py 1.4.0-1/mintpy/load_data.py
--- 1.3.3-2/mintpy/load_data.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/load_data.py	2022-08-04 20:01:49.000000000 +0000
@@ -10,7 +10,6 @@ import os
 import sys
 import glob
 import time
-import argparse
 import warnings
 
 from mintpy.defaults import auto_path
@@ -28,26 +27,39 @@ from mintpy.objects.stackDict import (
     ifgramDict,
 )
 from mintpy.utils import readfile, ptime, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 from mintpy import subset
 
 
 #################################################################
 PROCESSOR_LIST = ['isce', 'aria', 'hyp3', 'gmtsar', 'snap', 'gamma', 'roipac', 'cosicorr']
 
-datasetName2templateKey = {
+# primary observation dataset names
+OBS_DSET_NAMES = ['unwrapPhase', 'rangeOffset', 'azimuthOffset']
+
+IFG_DSET_NAME2TEMPLATE_KEY = {
     'unwrapPhase'     : 'mintpy.load.unwFile',
     'coherence'       : 'mintpy.load.corFile',
     'connectComponent': 'mintpy.load.connCompFile',
     'wrapPhase'       : 'mintpy.load.intFile',
-    'ionoPhase'       : 'mintpy.load.ionoFile',
     'magnitude'       : 'mintpy.load.magFile',
+}
+
+ION_DSET_NAME2TEMPLATE_KEY = {
+    'unwrapPhase'     : 'mintpy.load.ionUnwFile',
+    'coherence'       : 'mintpy.load.ionCorFile',
+    'connectComponent': 'mintpy.load.ionConnCompFile',
+}
 
+OFF_DSET_NAME2TEMPLATE_KEY = {
     'azimuthOffset'   : 'mintpy.load.azOffFile',
     'azimuthOffsetStd': 'mintpy.load.azOffStdFile',
     'rangeOffset'     : 'mintpy.load.rgOffFile',
     'rangeOffsetStd'  : 'mintpy.load.rgOffStdFile',
     'offsetSNR'       : 'mintpy.load.offSnrFile',
+}
 
+GEOM_DSET_NAME2TEMPLATE_KEY = {
     'height'          : 'mintpy.load.demFile',
     'latitude'        : 'mintpy.load.lookupYFile',
     'longitude'       : 'mintpy.load.lookupXFile',
@@ -79,26 +91,42 @@ NOTE = """NOTE:
 """
 
 EXAMPLE = """example:
-  load_data.py -t GalapagosSenDT128.tempalte
+  # MUST run in the mintpy working directory!
+
+  # show example template file for ISCE/ROI_PAC/GAMMA products
+  load_data.py -H
+
+  # load & write the following HDF5 files:
+  # ./inputs/ifgramStack.h5   for interferogram        stack
+  # ./inputs/ionStack.h5      for ionosphere           stack
+  # ./inputs/offsetStack.h5   for range/azimuth offset stack
+  # ./inputs/geometryRadar.h5 for geometry in radar coordinates
+  # ./inputs/geometryGeo.h5   for geometry in geo   coordinates
   load_data.py -t smallbaselineApp.cfg
-  load_data.py -t smallbaselineApp.cfg GalapagosSenDT128.tempalte --project GalapagosSenDT128
-  load_data.py -H #Show example input template for ISCE/ROI_PAC/GAMMA products
+  load_data.py -t smallbaselineApp.cfg GalapagosSenDT128.txt --project GalapagosSenDT128
 
-  # load geometry only
-  # fill metaFile, baselineDir and geometry datasets in the template and run load_data.py
+  # load geometry ONLY
+  smallbaselineApp.py SaltonSeaSenDT173.txt -g
+  load_data.py -t smallbaselineApp.cfg --geom
 """
 
 
-def create_parser():
+def create_parser(subparsers=None):
     """Create command line parser."""
-    parser = argparse.ArgumentParser(description='Saving a stack of Interferograms to an HDF5 file',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=TEMPLATE+'\n'+NOTE+'\n'+EXAMPLE)
+    synopsis = 'Load stacks of interferograms to HDF5 files'
+    epilog = TEMPLATE + '\n' + NOTE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
     parser.add_argument('-H', dest='print_example_template', action='store_true',
                         help='Print/Show the example template file for loading.')
-    parser.add_argument('-t', '--template', type=str, nargs='+',
-                        dest='template_file', help='template file with path info.')
+    parser.add_argument('-t', '--template', dest='template_file', type=str, nargs='+',
+                        help='template file(s) with path info.')
+    parser.add_argument('--geom','--geometry', dest='only_load_geometry', action='store_true',
+                        help='Load the geometry file(s) ONLY.')
 
+    # options from template file name & content
     parser.add_argument('--project', type=str, dest='PROJECT_NAME',
                         help='project name of dataset for INSARMAPS Web Viewer')
     parser.add_argument('--processor', type=str, dest='processor', choices=PROCESSOR_LIST,
@@ -108,11 +136,6 @@ def create_parser():
     parser.add_argument('--compression', choices={'gzip', 'lzf', None}, default=None,
                         help='compress loaded geometry while writing HDF5 file, default: None.')
 
-    parser.add_argument('-o', '--output', type=str, nargs=3, dest='outfile',
-                        default=['./inputs/ifgramStack.h5',
-                                 './inputs/geometryRadar.h5',
-                                 './inputs/geometryGeo.h5'],
-                        help='output HDF5 file')
     return parser
 
 
@@ -136,21 +159,21 @@ def cmd_line_parse(iargs=None):
         print('{} -H to show the example template file'.format(os.path.basename(__file__)))
         sys.exit(1)
 
-    inps.outfile = [os.path.abspath(i) for i in inps.outfile]
-    inps.outdir = os.path.dirname(inps.outfile[0])
-
     return inps
 
 
 #################################################################
 def read_inps2dict(inps):
-    """Read input Namespace object info into iDict
+    """Read input Namespace object info into iDict.
 
     It grab the following contents into iDict
     1. inps & all template files
     2. configurations: processor, autoPath, updateMode, compression, x/ystep
     3. extra metadata: PLATFORM, PROJECT_NAME,
     4. translate autoPath
+
+    Parameters: inps  - namespace, input arguments from command line & template file
+    Returns:    iDict - dict,      input arguments from command line & template file
     """
     # Read input info into iDict
     iDict = vars(inps)
@@ -167,14 +190,13 @@ def read_inps2dict(inps):
     if 'processor' in template.keys():
         template['mintpy.load.processor'] = template['processor']
 
+    # group - load
     prefix = 'mintpy.load.'
     key_list = [i.split(prefix)[1] for i in template.keys() if i.startswith(prefix)]
     for key in key_list:
         value = template[prefix+key]
         if key in ['processor', 'autoPath', 'updateMode', 'compression']:
             iDict[key] = template[prefix+key]
-        elif key in ['xstep', 'ystep']:
-            iDict[key] = int(template[prefix+key])
         elif value:
             iDict[prefix+key] = template[prefix+key]
     print('processor : {}'.format(iDict['processor']))
@@ -182,8 +204,17 @@ def read_inps2dict(inps):
     if iDict['compression'] == False:
         iDict['compression'] = None
 
-    iDict['xstep'] = iDict.get('xstep', 1)
-    iDict['ystep'] = iDict.get('ystep', 1)
+    # group - multilook
+    prefix = 'mintpy.multilook.'
+    key_list = [i.split(prefix)[1] for i in template.keys() if i.startswith(prefix)]
+    for key in key_list:
+        value = template[prefix+key]
+        if key in ['xstep', 'ystep', 'method']:
+            iDict[key] = template[prefix+key]
+
+    iDict['xstep']  = int(iDict.get('xstep', 1))
+    iDict['ystep']  = int(iDict.get('ystep', 1))
+    iDict['method'] = str(iDict.get('method', 'nearest'))
 
     # PROJECT_NAME --> PLATFORM
     if not iDict['PROJECT_NAME']:
@@ -203,51 +234,47 @@ def read_inps2dict(inps):
     if iDict.get('autoPath', False):
         print('use auto path defined in mintpy.defaults.auto_path for options in auto')
         iDict = auto_path.get_auto_path(processor=iDict['processor'],
-                                        work_dir=os.path.dirname(iDict['outdir']),
+                                        work_dir=os.getcwd(),
                                         template=iDict)
 
-    # copy global var dsName2templateKey to iDict as a local var
-    iDict['ds_name2key'] = dict()
-    for key, value in datasetName2templateKey.items():
-        iDict['ds_name2key'][key] = value
-
     return iDict
 
 
 def read_subset_box(iDict):
     """read the following items:
-    geocoded
-    box
-    box4geo_lut
+    geocoded - bool, if the stack of observations geocoded or not
+    box      - tuple of 4 int, pixel box for stackObj and geomRadarObj, for obs in geo & radar coordinates
+    box4geo  - tuple of 4 int, pixel box for geomGeoObj, box4geo is the same as box, except for:
+               obs in radar coordinate with lookup table [for gamma and roipac], where box4geo is
+               the geo bounding box of the box above.
     """
     # Read subset info from template
     iDict['box'] = None
-    iDict['box4geo_lut'] = None
+    iDict['box4geo'] = None
     pix_box, geo_box = subset.read_subset_template2box(iDict['template_file'][0])
 
     # Grab required info to read input geo_box into pix_box
-    try:
-        lookupFile = [glob.glob(str(iDict['mintpy.load.lookupYFile']))[0],
-                      glob.glob(str(iDict['mintpy.load.lookupXFile']))[0]]
-    except:
-        lookupFile = None
-
-    try:
-        pathKey = [i for i in iDict['ds_name2key'].values()
-                   if i in iDict.keys()][0]
-        file = glob.glob(str(iDict[pathKey]))[0]
-        atr = readfile.read_attribute(file)
-    except:
-        atr = dict()
+    lookup_y_files = glob.glob(str(iDict['mintpy.load.lookupYFile']))
+    lookup_x_files = glob.glob(str(iDict['mintpy.load.lookupXFile']))
+    if len(lookup_y_files) > 0 and len(lookup_x_files) > 0:
+        lookup_file = [lookup_y_files[0], lookup_x_files[0]]
+    else:
+        lookup_file = None
 
-    geocoded = None
-    if 'Y_FIRST' in atr.keys():
-        geocoded = True
+    # use DEM file attribute as reference, because
+    # 1) it is required AND
+    # 2) it is in the same coordinate type as observation files
+    dem_files = glob.glob(iDict['mintpy.load.demFile'])
+    if len(dem_files) > 0:
+        atr = readfile.read_attribute(dem_files[0])
     else:
-        geocoded = False
+        atr = dict()
+
+    geocoded = True if 'Y_FIRST' in atr.keys() else False
+    iDict['geocoded'] = geocoded
 
     # Check conflict
-    if geo_box and not geocoded and lookupFile is None:
+    if geo_box and not geocoded and lookup_file is None:
         geo_box = None
         print(('WARNING: mintpy.subset.lalo is not supported'
                ' if 1) no lookup file AND'
@@ -266,7 +293,7 @@ def read_subset_box(iDict):
             return iDict
 
     # geo_box --> pix_box
-    coord = ut.coordinate(atr, lookup_file=lookupFile)
+    coord = ut.coordinate(atr, lookup_file=lookup_file)
     if geo_box is not None:
         pix_box = coord.bbox_geo2radar(geo_box)
         pix_box = coord.check_box_within_data_coverage(pix_box)
@@ -275,24 +302,24 @@ def read_subset_box(iDict):
 
     # Get box for geocoded lookup table (for gamma/roipac)
     box4geo_lut = None
-    if lookupFile is not None:
-        atrLut = readfile.read_attribute(lookupFile[0])
+    if lookup_file is not None:
+        atrLut = readfile.read_attribute(lookup_file[0])
         if not geocoded and 'Y_FIRST' in atrLut.keys():
             geo_box = coord.bbox_radar2geo(pix_box)
             box4geo_lut = ut.coordinate(atrLut).bbox_geo2radar(geo_box)
             print('box to read for geocoded lookup file in y/x: {}'.format(box4geo_lut))
 
-    iDict['geocoded'] = geocoded
     iDict['box'] = pix_box
-    iDict['box4geo_lut'] = box4geo_lut
+    iDict['box4geo'] = box4geo_lut if box4geo_lut else pix_box
     return iDict
 
 
+#################################################################
 def update_box4files_with_inconsistent_size(fnames):
     """Check the size (row / column number) of a list of files
     For SNAP geocoded products has one line missing in some interferograms, Andre, 2019-07-16
-    Parameters: fnames  : list of path for interferogram files
-    Returns:    pix_box : None if all files are in same size
+    Parameters: fnames  - list of path for interferogram files
+    Returns:    pix_box - None if all files are in same size
                           (0, 0, min_width, min_length) if not.
     """
     atr_list = [readfile.read_attribute(fname) for fname in fnames]
@@ -379,38 +406,51 @@ def skip_files_with_inconsistent_size(ds
     return dsPathDict
 
 
-def read_inps_dict2ifgram_stack_dict_object(iDict):
-    """Read input arguments into dict of ifgramStackDict object"""
+def read_inps_dict2ifgram_stack_dict_object(iDict, ds_name2template_key):
+    """Read input arguments into ifgramStackDict object.
+
+    Parameters: iDict                - dict, input arguments from command line & template file
+                ds_name2template_key - dict, to relate the HDF5 dataset name to the template key
+    Returns:    stackObj             - ifgramStackDict object or None
+    """
+    if iDict['only_load_geometry']:
+        return None
+
+    if 'mintpy.load.unwFile' in ds_name2template_key.values():
+        obs_type = 'interferogram'
+    elif 'mintpy.load.ionUnwFile' in ds_name2template_key.values():
+        obs_type = 'ionosphere'
+    elif 'mintpy.load.azOffFile' in ds_name2template_key.values():
+        obs_type = 'offset'
+
     # iDict --> dsPathDict
     print('-'*50)
-    print('searching interferometric pairs info')
+    print(f'searching {obs_type} pairs info')
     print('input data files:')
-    maxDigit = max([len(i) for i in list(iDict['ds_name2key'].keys())])
+    max_digit = max([len(i) for i in list(ds_name2template_key.keys())])
     dsPathDict = {}
-    for dsName in [i for i in ifgramDatasetNames
-                   if i in iDict['ds_name2key'].keys()]:
-        key = iDict['ds_name2key'][dsName]
+    for dsName in [i for i in ifgramDatasetNames if i in ds_name2template_key.keys()]:
+        key = ds_name2template_key[dsName]
         if key in iDict.keys():
             files = sorted(glob.glob(str(iDict[key])))
             if len(files) > 0:
                 dsPathDict[dsName] = files
-                print('{:<{width}}: {path}'.format(dsName,
-                                                   width=maxDigit,
-                                                   path=iDict[key]))
+                print(f'{dsName:<{max_digit}}: {iDict[key]}')
 
     # Check 1: required dataset
-    dsName0s = ['unwrapPhase', 'rangeOffset', 'azimuthOffset']
+    dsName0s = [x for x in OBS_DSET_NAMES if x in ds_name2template_key.keys()]
     dsName0 = [i for i in dsName0s if i in dsPathDict.keys()]
     if len(dsName0) == 0:
-        print('WARNING: No reqired {} data files found!'.format(dsName0s))
+        print(f'WARNING: No data files found for the required dataset: {dsName0s}! Skip loading for {obs_type} stack.')
         return None
     else:
         dsName0 = dsName0[0]
 
     # Check 2: data dimension for unwrapPhase files
-    dsPathDict = skip_files_with_inconsistent_size(dsPathDict,
-                                                   pix_box=iDict['box'],
-                                                   dsName=dsName0)
+    dsPathDict = skip_files_with_inconsistent_size(
+        dsPathDict=dsPathDict,
+        pix_box=iDict['box'],
+        dsName=dsName0)
 
     # Check 3: number of files for all dataset types
     # dsPathDict --> dsNumDict
@@ -418,7 +458,7 @@ def read_inps_dict2ifgram_stack_dict_obj
     for key in dsPathDict.keys():
         num_file = len(dsPathDict[key])
         dsNumDict[key] = num_file
-        print('number of {:<{width}}: {num}'.format(key, width=maxDigit, num=num_file))
+        print(f'number of {key:<{max_digit}}: {num_file}')
 
     dsNumList = list(dsNumDict.values())
     if any(i != dsNumList[0] for i in dsNumList):
@@ -456,7 +496,6 @@ def read_inps_dict2ifgram_stack_dict_obj
         # ifgramPathDict1 = {
         #     'unwrapPhase': /dirPathToFile/filt_fine.unw,
         #     'coherence'  : /dirPathToFile/filt_fine.cor,
-        #     'ionoPhase'  : /dirPathToFile/iono.bil,
         #     ...
         # }
         # All path of data file must contain the reference and secondary date, in file/dir name.
@@ -478,7 +517,6 @@ def read_inps_dict2ifgram_stack_dict_obj
 
                 if len(dsPath2) > 0:
                     ifgramPathDict[dsName] = dsPath2[0]
-
                 else:
                     print('WARNING: {:>18} file missing for pair {}'.format(dsName, date6s))
 
@@ -496,17 +534,23 @@ def read_inps_dict2ifgram_stack_dict_obj
     return stackObj
 
 
-def read_inps_dict2geometry_dict_object(iDict):
+def read_inps_dict2geometry_dict_object(iDict, dset_name2template_key):
+    """Read input arguments into geometryDict object(s).
+
+    Parameters: iDict        - dict, input arguments from command line & template file
+    Returns:    geomGeoObj   - geometryDict object in geo   coordinates or None
+                geomRadarObj - geometryDict object in radar coordinates or None
+    """
 
     # eliminate lookup table dsName for input files in radar-coordinates
     if iDict['processor'] in ['isce', 'doris']:
         # for processors with lookup table in radar-coordinates, remove azimuth/rangeCoord
-        iDict['ds_name2key'].pop('azimuthCoord')
-        iDict['ds_name2key'].pop('rangeCoord')
+        dset_name2template_key.pop('azimuthCoord')
+        dset_name2template_key.pop('rangeCoord')
     elif iDict['processor'] in ['roipac', 'gamma']:
         # for processors with lookup table in geo-coordinates, remove latitude/longitude
-        iDict['ds_name2key'].pop('latitude')
-        iDict['ds_name2key'].pop('longitude')
+        dset_name2template_key.pop('latitude')
+        dset_name2template_key.pop('longitude')
     elif iDict['processor'] in ['aria', 'gmtsar', 'hyp3', 'snap', 'cosicorr']:
         # for processors with geocoded products support only, do nothing for now.
         # check again when adding products support in radar-coordiantes
@@ -518,11 +562,10 @@ def read_inps_dict2geometry_dict_object(
     print('-'*50)
     print('searching geometry files info')
     print('input data files:')
-    maxDigit = max([len(i) for i in list(iDict['ds_name2key'].keys())])
+    max_digit = max([len(i) for i in list(dset_name2template_key.keys())])
     dsPathDict = {}
-    for dsName in [i for i in geometryDatasetNames
-                   if i in iDict['ds_name2key'].keys()]:
-        key = iDict['ds_name2key'][dsName]
+    for dsName in [i for i in geometryDatasetNames if i in dset_name2template_key.keys()]:
+        key = dset_name2template_key[dsName]
         if key in iDict.keys():
             files = sorted(glob.glob(str(iDict[key])))
             if len(files) > 0:
@@ -532,33 +575,30 @@ def read_inps_dict2geometry_dict_object(
                         date = ptime.yyyymmdd(os.path.basename(os.path.dirname(file)))
                         bperpDict[date] = file
                     dsPathDict[dsName] = bperpDict
-                    print('{:<{width}}: {path}'.format(dsName,
-                                                       width=maxDigit,
-                                                       path=iDict[key]))
-                    print('number of bperp files: {}'.format(len(list(bperpDict.keys()))))
+                    print(f'{dsName:<{max_digit}}: {iDict[key]}')
+                    print(f'number of bperp files: {len(list(bperpDict.keys()))}')
                 else:
                     dsPathDict[dsName] = files[0]
-                    print('{:<{width}}: {path}'.format(dsName,
-                                                       width=maxDigit,
-                                                       path=files[0]))
+                    print(f'{dsName:<{max_digit}}: {files[0]}')
 
     # Check required dataset
     dsName0 = geometryDatasetNames[0]
     if dsName0 not in dsPathDict.keys():
         print('WARNING: No reqired {} data files found!'.format(dsName0))
 
-    # metadata
-    ifgramMetaGeo = None
-    ifgramMetaRadar = None
-    ifgramKey = iDict['ds_name2key']['unwrapPhase']
-    if ifgramKey in iDict.keys():
-        ifgramFiles = glob.glob(str(iDict[ifgramKey]))
-        if len(ifgramFiles) > 0:
-            atr = readfile.read_attribute(ifgramFiles[0])
+    # extra metadata from observations
+    # e.g. EARTH_RADIUS, HEIGHT, etc.
+    obsMetaGeo = None
+    obsMetaRadar = None
+    for obsName in OBS_DSET_NAMES:
+        obsFiles = sorted(glob.glob(iDict[dset_name2template_key[obsName]]))
+        if len(obsFiles) > 0:
+            atr = readfile.read_attribute(obsFiles[0])
             if 'Y_FIRST' in atr.keys():
-                ifgramMetaGeo = atr.copy()
+                obsMetaGeo = atr.copy()
             else:
-                ifgramMetaRadar = atr.copy()
+                obsMetaRadar = atr.copy()
+            break
 
     # dsPathDict --> dsGeoPathDict + dsRadarPathDict
     dsNameList = list(dsPathDict.keys())
@@ -574,56 +614,93 @@ def read_inps_dict2geometry_dict_object(
         else:
             dsRadarPathDict[dsName] = dsPathDict[dsName]
 
-    geomRadarObj = None
     geomGeoObj = None
-    if len(dsRadarPathDict) > 0:
-        geomRadarObj = geometryDict(processor=iDict['processor'],
-                                    datasetDict=dsRadarPathDict,
-                                    extraMetadata=ifgramMetaRadar)
+    geomRadarObj = None
     if len(dsGeoPathDict) > 0:
-        geomGeoObj = geometryDict(processor=iDict['processor'],
-                                  datasetDict=dsGeoPathDict,
-                                  extraMetadata=ifgramMetaGeo)
-    return geomRadarObj, geomGeoObj
+        geomGeoObj = geometryDict(
+            processor=iDict['processor'],
+            datasetDict=dsGeoPathDict,
+            extraMetadata=obsMetaGeo)
+    if len(dsRadarPathDict) > 0:
+        geomRadarObj = geometryDict(
+            processor=iDict['processor'],
+            datasetDict=dsRadarPathDict,
+            extraMetadata=obsMetaRadar)
+
+    return geomGeoObj, geomRadarObj
+
+
+#################################################################
+def run_or_skip(outFile, inObj, box, updateMode=True, xstep=1, ystep=1, geom_obj=None):
+    """Check if re-writing is necessary.
+
+    Do not write HDF5 file if ALL the following meet:
+        1. HDF5 file exists and is readable,
+        2. HDF5 file constains all the datasets and in the same size
+        3. For ifgramStackDict, HDF5 file contains all date12.
+
+    Parameters: outFile    - str, path to the output HDF5 file
+                inObj      - ifgramStackDict or geometryDict, object to write
+                box        - tuple of int, bounding box in (x0, y0, x1, y1)
+                updateMode - bool
+                x/ystep    - int
+                geom_obj   - geometryDict object or None, for ionosphere only
+    Returns:    flag       - str, run or skip
+    """
+
+    flag = 'run'
 
+    # skip if there is no dict object to write
+    if not inObj:
+        flag = 'skip'
+        return flag
+
+    # run if not in update mode
+    if not updateMode:
+        return flag
+
+    if ut.run_or_skip(outFile, check_readable=True) == 'skip':
+        kwargs = dict(box=box, xstep=xstep, ystep=ystep)
 
-def update_object(outFile, inObj, box, updateMode=True, xstep=1, ystep=1):
-    """Do not write h5 file if: 1) h5 exists and readable,
-                                2) it contains all date12 from ifgramStackDict,
-                                            or all datasets from geometryDict"""
-    write_flag = True
-    if updateMode and ut.run_or_skip(outFile, check_readable=True) == 'skip':
         if inObj.name == 'ifgramStack':
-            in_size = inObj.get_size(box=box, xstep=xstep, ystep=ystep)[1:]
+            in_size = inObj.get_size(geom_obj=geom_obj, **kwargs)[1:]
+            in_dset_list = inObj.get_dataset_list()
             in_date12_list = inObj.get_date12_list()
 
             outObj = ifgramStack(outFile)
-            out_size = outObj.get_size()[1:]
-            out_date12_list = outObj.get_date12_list(dropIfgram=False)
+            outObj.open(print_msg=False)
+            out_size = (outObj.length, outObj.width)
+            out_dset_list = outObj.datasetNames
+            out_date12_list = outObj.date12List
 
-            if out_size == in_size and set(in_date12_list).issubset(set(out_date12_list)):
+            if (out_size == in_size
+                    and set(in_dset_list).issubset(set(out_dset_list))
+                    and set(in_date12_list).issubset(set(out_date12_list))):
                 print(('All date12   exists in file {} with same size as required,'
                        ' no need to re-load.'.format(os.path.basename(outFile))))
-                write_flag = False
+                flag = 'skip'
 
         elif inObj.name == 'geometry':
-            in_size = inObj.get_size(box=box, xstep=xstep, ystep=ystep)
+            in_size = inObj.get_size(**kwargs)
             in_dset_list = inObj.get_dataset_list()
 
             outObj = geometry(outFile)
             outObj.open(print_msg=False)
-            out_size = outObj.get_size()
+            out_size = (outObj.length, outObj.width)
             out_dset_list = outObj.datasetNames
 
-            if out_size == in_size and set(in_dset_list).issubset(set(out_dset_list)):
+            if (out_size == in_size
+                    and set(in_dset_list).issubset(set(out_dset_list))):
                 print(('All datasets exists in file {} with same size as required,'
                        ' no need to re-load.'.format(os.path.basename(outFile))))
-                write_flag = False
+                flag = 'skip'
 
-    return write_flag
+    return flag
 
 
 def prepare_metadata(iDict):
+    """Prepare metadata via prep_{insar_processor}.py scripts."""
+
     processor = iDict['processor']
     script_name = 'prep_{}.py'.format(processor)
     print('-'*50)
@@ -643,7 +720,10 @@ def prepare_metadata(iDict):
             from mintpy import prep_cosicorr as prep_module
 
         # run prep_{processor} module
-        for key in [i for i in iDict.keys() if (i.startswith('mintpy.load.') and i.endswith('File') and i != 'mintpy.load.metaFile')]:
+        for key in [i for i in iDict.keys()
+                    if (i.startswith('mintpy.load.')
+                        and i.endswith('File')
+                        and i != 'mintpy.load.metaFile')]:
             if len(glob.glob(str(iDict[key]))) > 0:
                 # print command line
                 script_name = '{}.py'.format(os.path.basename(prep_module.__name__).split('.')[-1])
@@ -652,15 +732,15 @@ def prepare_metadata(iDict):
                     iargs += ['--sensor', iDict['PLATFORM'].lower()]
                 elif processor == 'cosicorr':
                     iargs += ['--metadata', iDict['mintpy.load.metaFile']]
-                print(script_name, ' '.join(iargs))
+                ut.print_command_line(script_name, iargs)
                 # run
                 prep_module.main(iargs)
 
     elif processor == 'isce':
         from mintpy import prep_isce
-        from mintpy.utils.isce_utils import get_processor
+        from mintpy.utils import s1_utils, isce_utils
 
-        # metadata
+        # --meta-file
         meta_files = sorted(glob.glob(iDict['mintpy.load.metaFile']))
         if len(meta_files) > 0:
             meta_file = meta_files[0]
@@ -668,53 +748,58 @@ def prepare_metadata(iDict):
             warnings.warn('No input metadata file found: {}'.format(iDict['mintpy.load.metaFile']))
             meta_file = 'auto'
 
-        # auxliary data
+        # --baseline-dir / --geometry-dir
         baseline_dir = iDict['mintpy.load.baselineDir']
         geom_dir = os.path.dirname(iDict['mintpy.load.demFile'])
 
-        # observation
-        obs_keys = ['mintpy.load.unwFile', 'mintpy.load.rgOffFile', 'mintpy.load.azOffFile']
-        obs_keys = [i for i in obs_keys if i in iDict.keys()]
+        # --dset-dir / --file-pattern
+        obs_keys = [
+            'mintpy.load.unwFile',
+            'mintpy.load.ionUnwFile',
+            'mintpy.load.rgOffFile',
+            'mintpy.load.azOffFile',
+        ]
         obs_paths = [iDict[key] for key in obs_keys if iDict[key].lower() != 'auto']
-        if len(obs_paths) > 0:
-            processor = get_processor(meta_file) if os.path.isfile(meta_file) else 'topsStack'
-            if processor == 'alosStack':
-                obs_dir = os.path.dirname(obs_paths[0])
-            else:
-                obs_dir = os.path.dirname(os.path.dirname(obs_paths[0]))
-            obs_file = os.path.basename(obs_paths[0])
-        else:
-            obs_dir = None
-            obs_file = None
+        obs_paths = [x for x in obs_paths if len(glob.glob(x)) > 0]
 
-        # geometry
+        # --geom-files for the basenames only
         geom_names = ['dem', 'lookupY', 'lookupX', 'incAngle', 'azAngle', 'shadowMask', 'waterMask']
         geom_keys = ['mintpy.load.{}File'.format(i) for i in geom_names]
         geom_files = [os.path.basename(iDict[key]) for key in geom_keys
-                      if (iDict[key] and iDict[key].lower() != 'auto')]
+                      if (iDict.get(key, 'auto') != 'auto')]
 
         # compose list of input arguments
         iargs = ['-m', meta_file, '-g', geom_dir]
         if baseline_dir:
             iargs += ['-b', baseline_dir]
-        if obs_dir is not None:
-            iargs += ['-d', obs_dir, '-f', obs_file]
+        if len(obs_paths) > 0:
+            iargs += ['-f'] + obs_paths
         if geom_files:
             iargs += ['--geom-files'] + geom_files
 
         # run module
-        print('prep_isce.py', ' '.join(iargs))
+        ut.print_command_line(script_name, iargs)
         try:
             prep_isce.main(iargs)
         except:
             warnings.warn('prep_isce.py failed. Assuming its result exists and continue...')
 
+        # [optional] for topsStack: SAFE_files.txt --> S1A/B_date.txt
+        if os.path.isfile(meta_file) and isce_utils.get_processor(meta_file) == 'topsStack':
+            safe_list_file = os.path.join(os.path.dirname(os.path.dirname(meta_file)), 'SAFE_files.txt')
+            if os.path.isfile(safe_list_file):
+                s1_utils.get_s1ab_date_list_file(
+                    mintpy_dir=os.getcwd(),
+                    safe_list_file=safe_list_file,
+                    print_msg=True)
+
     elif processor == 'aria':
         from mintpy import prep_aria
 
         ## compose input arguments
         # use the default template file if exists & input
-        default_temp_files = [fname for fname in iDict['template_file'] if fname.endswith('smallbaselineApp.cfg')]
+        default_temp_files = [fname for fname in iDict['template_file']
+                              if fname.endswith('smallbaselineApp.cfg')]
         if len(default_temp_files) > 0:
             temp_file = default_temp_files[0]
         else:
@@ -750,7 +835,7 @@ def prepare_metadata(iDict):
             iargs += ['--update']
 
         ## run
-        print('prep_aria.py', ' '.join(iargs))
+        ut.print_command_line(script_name, iargs)
         try:
             prep_aria.main(iargs)
         except:
@@ -760,13 +845,14 @@ def prepare_metadata(iDict):
         from mintpy import prep_gmtsar
 
         # use the custom template file if exists & input
-        custom_temp_files = [fname for fname in iDict['template_file'] if not fname.endswith('smallbaselineApp.cfg')]
+        custom_temp_files = [fname for fname in iDict['template_file']
+                             if not fname.endswith('smallbaselineApp.cfg')]
         if len(custom_temp_files) == 0:
             raise FileExistsError('Custom template file NOT found and is required for GMTSAR!')
 
         # run prep_*.py
-        iargs = [custom_temp_files[0], '--mintpy-dir', os.path.dirname(iDict['outdir'])]
-        print('prep_gmtsar.py', ' '.join(iargs))
+        iargs = [custom_temp_files[0]]
+        ut.print_command_line(script_name, iargs)
         try:
             prep_gmtsar.main(iargs)
         except:
@@ -780,28 +866,12 @@ def prepare_metadata(iDict):
     return
 
 
-def print_write_setting(iDict):
-    updateMode = iDict['updateMode']
-    comp = iDict['compression']
-    print('-'*50)
-    print('updateMode : {}'.format(updateMode))
-    print('compression: {}'.format(comp))
-    print('x/ystep: {}/{}'.format(iDict['xstep'], iDict['ystep']))
-
-    # box
-    box = iDict['box']
-    # box for geometry file in geo-coordinates
-    if not iDict.get('geocoded', False):
-        boxGeo = iDict['box4geo_lut']
-    else:
-        boxGeo = box
-
-    return updateMode, comp, box, boxGeo
-
-
 def get_extra_metadata(iDict):
     """Extra metadata with key names in MACRO_CASE to be written into stack file.
-    E.g.: PROJECT_NAME, PLATFORM, ORBIT_DIRECTION, SUBSET_X/YMIN, etc.
+
+    Parameters: iDict     - dict, input arguments from command lines & template file
+                extraDict - dict, extra metadata from template file:
+                            E.g. PROJECT_NAME, PLATFORM, ORBIT_DIRECTION, SUBSET_X/YMIN, etc.
     """
     extraDict = {}
     # all keys in MACRO_CASE
@@ -824,70 +894,91 @@ def main(iargs=None):
     # read input options
     iDict = read_inps2dict(inps)
 
-    # prepare metadata
+    ## prepare metadata
     prepare_metadata(iDict)
+    extraDict = get_extra_metadata(iDict)
 
     # skip data writing for aria as it is included in prep_aria
     if iDict['processor'] == 'aria':
         return
 
+    ## search & write data files
+    print('-'*50)
+    print('updateMode : {}'.format(iDict['updateMode']))
+    print('compression: {}'.format(iDict['compression']))
+    print('multilook x/ystep: {}/{}'.format(iDict['xstep'], iDict['ystep']))
+    print('multilook method : {}'.format(iDict['method']))
+    kwargs = dict(updateMode=iDict['updateMode'], xstep=iDict['xstep'], ystep=iDict['ystep'])
+
+    # read subset info [need the metadata from above]
     iDict = read_subset_box(iDict)
-    extraDict = get_extra_metadata(iDict)
 
-    # initiate objects
-    stackObj = read_inps_dict2ifgram_stack_dict_object(iDict)
-    geomRadarObj, geomGeoObj = read_inps_dict2geometry_dict_object(iDict)
-
-    # prepare write
-    updateMode, comp, box, boxGeo = print_write_setting(iDict)
-    if any([stackObj, geomRadarObj, geomGeoObj]) and not os.path.isdir(inps.outdir):
-        os.makedirs(inps.outdir)
-        print('create directory: {}'.format(inps.outdir))
-
-    # write
-    if stackObj and update_object(inps.outfile[0], stackObj, box,
-                                  updateMode=updateMode,
-                                  xstep=iDict['xstep'],
-                                  ystep=iDict['ystep']):
-        print('-'*50)
-        stackObj.write2hdf5(outputFile=inps.outfile[0],
-                            access_mode='w',
-                            box=box,
-                            xstep=iDict['xstep'],
-                            ystep=iDict['ystep'],
-                            compression=comp,
-                            extra_metadata=extraDict)
-
-    if geomRadarObj and update_object(inps.outfile[1], geomRadarObj, box,
-                                      updateMode=updateMode,
-                                      xstep=iDict['xstep'],
-                                      ystep=iDict['ystep']):
-        print('-'*50)
-        geomRadarObj.write2hdf5(outputFile=inps.outfile[1],
-                                access_mode='w',
-                                box=box,
-                                xstep=iDict['xstep'],
-                                ystep=iDict['ystep'],
-                                compression='lzf',
-                                extra_metadata=extraDict)
-
-    if geomGeoObj and update_object(inps.outfile[2], geomGeoObj, boxGeo,
-                                    updateMode=updateMode,
-                                    xstep=iDict['xstep'],
-                                    ystep=iDict['ystep']):
-        print('-'*50)
-        geomGeoObj.write2hdf5(outputFile=inps.outfile[2],
-                              access_mode='w',
-                              box=boxGeo,
-                              xstep=iDict['xstep'],
-                              ystep=iDict['ystep'],
-                              compression='lzf')
+    # geometry in geo / radar coordinates
+    geom_dset_name2template_key = {
+        **GEOM_DSET_NAME2TEMPLATE_KEY,
+        **IFG_DSET_NAME2TEMPLATE_KEY,
+        **OFF_DSET_NAME2TEMPLATE_KEY,
+    }
+    geom_geo_obj, geom_radar_obj = read_inps_dict2geometry_dict_object(iDict, geom_dset_name2template_key)
+    geom_geo_file = os.path.abspath('./inputs/geometryGeo.h5')
+    geom_radar_file = os.path.abspath('./inputs/geometryRadar.h5')
+
+    if run_or_skip(geom_geo_file, geom_geo_obj, iDict['box4geo'], **kwargs) == 'run':
+        geom_geo_obj.write2hdf5(
+            outputFile=geom_geo_file,
+            access_mode='w',
+            box=iDict['box4geo'],
+            xstep=iDict['xstep'],
+            ystep=iDict['ystep'],
+            compression='lzf')
+
+    if run_or_skip(geom_radar_file, geom_radar_obj, iDict['box'], **kwargs) == 'run':
+        geom_radar_obj.write2hdf5(
+            outputFile=geom_radar_file,
+            access_mode='w',
+            box=iDict['box'],
+            xstep=iDict['xstep'],
+            ystep=iDict['ystep'],
+            compression='lzf',
+            extra_metadata=extraDict)
+
+    # observations: ifgram, ion or offset
+    # loop over obs stacks
+    stack_ds_name2tmpl_key_list = [
+        IFG_DSET_NAME2TEMPLATE_KEY,
+        ION_DSET_NAME2TEMPLATE_KEY,
+        OFF_DSET_NAME2TEMPLATE_KEY,
+    ]
+    stack_files = ['ifgramStack.h5', 'ionStack.h5', 'offsetStack.h5']
+    stack_files = [os.path.abspath(os.path.join('./inputs', x)) for x in stack_files]
+    for ds_name2tmpl_opt, stack_file in zip(stack_ds_name2tmpl_key_list, stack_files):
+
+        # initiate dict objects
+        stack_obj = read_inps_dict2ifgram_stack_dict_object(iDict, ds_name2tmpl_opt)
+
+        # use geom_obj as size reference while loading ionosphere
+        geom_obj = None
+        if os.path.basename(stack_file).startswith('ion'):
+            geom_obj = geom_geo_obj if iDict['geocoded'] else geom_radar_obj
+
+        # write dict objects to HDF5 files
+        if run_or_skip(stack_file, stack_obj, iDict['box'], geom_obj=geom_obj, **kwargs) == 'run':
+            stack_obj.write2hdf5(
+                outputFile=stack_file,
+                access_mode='w',
+                box=iDict['box'],
+                xstep=iDict['xstep'],
+                ystep=iDict['ystep'],
+                mli_method=iDict['method'],
+                compression=iDict['compression'],
+                extra_metadata=extraDict,
+                geom_obj=geom_obj)
 
     # time info
     m, s = divmod(time.time()-start_time, 60)
     print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s))
 
-    return inps.outfile
+    return
 
 
 #################################################################
diff -pruN 1.3.3-2/mintpy/load_gbis.py 1.4.0-1/mintpy/load_gbis.py
--- 1.3.3-2/mintpy/load_gbis.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/load_gbis.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,7 +8,6 @@
 
 import os
 import sys
-import argparse
 import numpy as np
 import scipy.io as sio
 import matplotlib.pyplot as plt
@@ -17,17 +16,21 @@ import warnings
 warnings.filterwarnings("ignore", category=UserWarning, module="matplotlib")
 
 from mintpy.utils import writefile
+from mintpy.utils.arg_utils import create_argument_parser
 
 
+##############################################################################
 EXAMPLE = """example:
   load_gbis.py invert_1_2_C.mat
   load_gbis.py invert_1_2_C.mat --nodisplay
 """
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Load GBIS inversion result to HDF5 format.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Load GBIS inversion result to HDF5 format.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', help='GBIS inversion mat file.')
     parser.add_argument('-o', '--output', dest='outfile', help='output file name.')
@@ -47,6 +50,7 @@ def cmd_line_parse(iargs=None):
     return inps
 
 
+##############################################################################
 def grab_data_paths_from_inp_file(inp_file):
     """Grab data paths from inp file."""
     data_paths = []
@@ -161,11 +165,11 @@ def gbis_mat2hdf5(inv_mat_file, display=
 def main(iargs=None):
     inps = cmd_line_parse(iargs)
 
-    out_files = gbis_mat2hdf5(inps.file, display=inps.disp_fig)
+    gbis_mat2hdf5(inps.file, display=inps.disp_fig)
 
-    return out_files
+    return
 
 
-##########################################################################
+##############################################################################
 if __name__ == '__main__':
     main(sys.argv[1:])
diff -pruN 1.3.3-2/mintpy/local_oscilator_drift.py 1.4.0-1/mintpy/local_oscilator_drift.py
--- 1.3.3-2/mintpy/local_oscilator_drift.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/local_oscilator_drift.py	2022-08-04 20:01:49.000000000 +0000
@@ -12,31 +12,34 @@
 
 import os
 import sys
-import argparse
 import numpy as np
+
 from mintpy.objects import timeseries
 from mintpy.defaults.template import get_template_content
 from mintpy.utils import readfile, writefile, ptime
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 #########################################################################################
 TEMPLATE = get_template_content('correct_LOD')
 
-EXAMPLE = """example:
-  local_oscilator_drift.py  timeseries.h5                 inputs/geometryRadar.h5
-  local_oscilator_drift.py  filt_101020_110220_4rlks.unw  inputs/geometryRadar.h5
-"""
-
 REFERENCE = """reference:
   Marinkovic, P., and Y. Larsen (2013), Consequences of long-term ASAR local oscillator 
   frequency decay - An empirical study of 10 years of data, in Living Planet Symposium,
   Edinburgh, U.K.
 """
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Local Oscilator Drift (LOD) correction of Envisat',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog='{}\n{}\n{}'.format(REFERENCE, TEMPLATE, EXAMPLE))
+EXAMPLE = """example:
+  local_oscilator_drift.py  timeseries.h5                 inputs/geometryRadar.h5
+  local_oscilator_drift.py  filt_101020_110220_4rlks.unw  inputs/geometryRadar.h5
+"""
+
+def create_parser(subparsers=None):
+    synopsis = 'Local Oscilator Drift (LOD) correction of Envisat'
+    epilog = REFERENCE + '\n' + TEMPLATE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument(dest='file', help='timeseries / interferograms file, i.e. timeseries.h5')
     parser.add_argument(dest='range_dist_file',
diff -pruN 1.3.3-2/mintpy/lookup_geo2radar.py 1.4.0-1/mintpy/lookup_geo2radar.py
--- 1.3.3-2/mintpy/lookup_geo2radar.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/lookup_geo2radar.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,12 +8,13 @@
 
 import os
 import sys
-import argparse
 import h5py
 import numpy as np
-from mintpy.utils import readfile
 from scipy.interpolate import griddata
 
+from mintpy.utils import readfile
+from mintpy.utils.arg_utils import create_argument_parser
+
 try:
     from tqdm import tqdm
 except ImportError:
@@ -25,12 +26,29 @@ except ImportError:
     raise ImportError('Can not import concurrent!')
 
 
+################################################################################
 EXAMPLE = '''examples:
     lookup_geo2radar.py geometryGeo.h5 
     lookup_geo2radar.py geometryGeo.h5 -w geometryRadar.h5 
     lookup_geo2radar.py geometryGeo.h5 -w geometryRadar.h5 --parallel 4
 '''
 
+def create_parser(subparsers=None):
+    synopsis = 'Convert lookup table from geo-coord (GAMMA, ROI_PAC) into radar-coord (ISCE)'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
+    parser.add_argument('geometryGeo',help='geometryGeo file which includes geo-coordinates based lookup-table')
+    parser.add_argument('-w','--write', dest='write', metavar='FILE', default = 'geometryRadar.h5',
+                      help='update geometryRadar.h5 file by adding the radar-coordinates based lookup-table.')
+    parser.add_argument('--parallel', dest='parallelNumb', type=int, metavar='NUM',default = 1,
+                      help='Enable parallel processing and specify the the used processor number.[default: 1]')
+
+    return parser
+
+
 def write_h5(datasetDict, out_file, metadata=None, ref_file=None, compression=None):
 
     if os.path.isfile(out_file):
@@ -150,18 +168,8 @@ def function(data0):
 
 
 def cmd_line_parse(iargs=None):
-    parser = argparse.ArgumentParser(description='Convert lookup table from geo-coord (GAMMA, ROI_PAC) into radar-coord (ISCE)',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
-
-    parser.add_argument('geometryGeo',help='geometryGeo file which includes geo-coordinates based lookup-table')
-    parser.add_argument('-w','--write', dest='write', metavar='FILE', default = 'geometryRadar.h5',
-                      help='update geometryRadar.h5 file by adding the radar-coordinates based lookup-table.')
-    parser.add_argument('--parallel', dest='parallelNumb', type=int, metavar='NUM',default = 1,
-                      help='Enable parallel processing and specify the the used processor number.[default: 1]')
-
+    parser = create_parser()
     inps = parser.parse_args(args=iargs)
-
     return inps
 
 
diff -pruN 1.3.3-2/mintpy/__main__.py 1.4.0-1/mintpy/__main__.py
--- 1.3.3-2/mintpy/__main__.py	1970-01-01 00:00:00.000000000 +0000
+++ 1.4.0-1/mintpy/__main__.py	2022-08-04 20:01:49.000000000 +0000
@@ -0,0 +1,646 @@
+############################################################
+# Program is part of MintPy                                #
+# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
+# Author: Antonio Valentino, Aug 2022                      #
+############################################################
+
+
+"""Command line interface for MintPy.
+
+The Miami INsar Time-series software in PYthon (MintPy as /mint pai/)
+is an open-source package for Interferometric Synthetic Aperture Radar
+(InSAR) time series analysis.
+
+It reads the stack of interferograms (coregistered and unwrapped) in
+ISCE, ARIA, FRInGE, HyP3, GMTSAR, SNAP, GAMMA or ROI_PAC format, and
+produces three dimensional (2D in space and 1D in time) ground surface
+displacement in line-of-sight direction.
+
+It includes a routine time series analysis (`smallbaselineApp.py`) and
+some independent toolbox.
+
+This is research code provided to you "as is" with NO WARRANTIES OF
+CORRECTNESS. Use at your own risk.
+"""
+
+# PYTHON_ARGCOMPLETE_OK
+
+import sys
+import logging
+import argparse
+
+try:
+    from os import EX_OK
+except ImportError:
+    EX_OK = 0
+EX_FAILURE = 1
+EX_INTERRUPT = 130
+
+
+from . import __version__
+
+PROG = __package__
+LOGFMT = '%(asctime)s %(levelname)-8s -- %(message)s'
+
+
+
+################################################################################################
+def _autocomplete(parser):
+    try:
+        import argcomplete
+    except ImportError:
+        pass
+    else:
+        argcomplete.autocomplete(parser)
+
+
+#######################################  Sub-Parsers  ##########################################
+# in alphabetical order
+# A-G
+def get_add_parser(subparsers=None):
+    from . import add
+    parser = add.create_parser(subparsers)
+    parser.set_defaults(func=add.main)
+    return parser
+
+
+def get_asc_desc2horz_vert_parser(subparsers=None):
+    from . import asc_desc2horz_vert
+    parser = asc_desc2horz_vert.create_parser(subparsers)
+    parser.set_defaults(func=asc_desc2horz_vert.main)
+    return parser
+
+
+def get_bulk_plate_motion_parser(subparsers=None):
+    from . import bulk_plate_motion
+    parser = bulk_plate_motion.create_parser(subparsers)
+    parser.set_defaults(func=bulk_plate_motion.main)
+    return parser
+
+
+def get_closure_phase_bias_parser(subparsers=None):
+    from . import closure_phase_bias
+    parser = closure_phase_bias.create_parser(subparsers)
+    parser.set_defaults(func=closure_phase_bias.main)
+    return parser
+
+
+def get_dem_error_parser(subparsers=None):
+    from . import dem_error
+    parser = dem_error.create_parser(subparsers)
+    parser.set_defaults(func=dem_error.main)
+    return parser
+
+
+def get_dem_gsi_parser(subparsers=None):
+    from . import dem_gsi
+    parser = dem_gsi.create_parser(subparsers)
+    parser.set_defaults(func=dem_gsi.main)
+    return parser
+
+
+def get_diff_parser(subparsers=None):
+    from . import diff
+    parser = diff.create_parser(subparsers)
+    parser.set_defaults(func=diff.main)
+    return parser
+
+
+def get_generate_mask_parser(subparsers=None):
+    from . import generate_mask
+    parser = generate_mask.create_parser(subparsers)
+    parser.set_defaults(func=generate_mask.main)
+    return parser
+
+
+def get_geocode_parser(subparsers=None):
+    from . import geocode
+    parser = geocode.create_parser(subparsers)
+    parser.set_defaults(func=geocode.main)
+    return parser
+
+
+# H-N
+def get_ifgram_inversion_parser(subparsers=None):
+    from . import ifgram_inversion
+    parser = ifgram_inversion.create_parser(subparsers)
+    parser.set_defaults(func=ifgram_inversion.main)
+    return parser
+
+
+def get_image_math_parser(subparsers=None):
+    from . import image_math
+    parser = image_math.create_parser(subparsers)
+    parser.set_defaults(func=image_math.main)
+    return parser
+
+
+def get_image_stitch_parser(subparsers=None):
+    from . import image_stitch
+    parser = image_stitch.create_parser(subparsers)
+    parser.set_defaults(func=image_stitch.main)
+    return parser
+
+
+def get_info_parser(subparsers=None):
+    from . import info
+    parser = info.create_parser(subparsers)
+    parser.set_defaults(func=info.main)
+    return parser
+
+
+def get_iono_tec_parser(subparsers=None):
+    from . import iono_tec
+    parser = iono_tec.create_parser(subparsers)
+    parser.set_defaults(func=iono_tec.main)
+    return parser
+
+
+def get_mask_parser(subparsers=None):
+    from . import mask
+    parser = mask.create_parser(subparsers)
+    parser.set_defaults(func=mask.main)
+    return parser
+
+
+def get_multilook_parser(subparsers=None):
+    from . import multilook
+    parser = multilook.create_parser(subparsers)
+    parser.set_defaults(func=multilook.main)
+    return parser
+
+
+def get_load_data_parser(subparsers=None):
+    from . import load_data
+    parser = load_data.create_parser(subparsers)
+    parser.set_defaults(func=load_data.main)
+    return parser
+
+
+def get_load_gbis_parser(subparsers=None):
+    from . import load_gbis
+    parser = load_gbis.create_parser(subparsers)
+    parser.set_defaults(func=load_gbis.main)
+    return parser
+
+
+def get_local_oscilator_drift_parser(subparsers=None):
+    from . import local_oscilator_drift
+    parser = local_oscilator_drift.create_parser(subparsers)
+    parser.set_defaults(func=local_oscilator_drift.main)
+    return parser
+
+
+def get_lookup_geo2radar_parser(subparsers=None):
+    from . import lookup_geo2radar
+    parser = lookup_geo2radar.create_parser(subparsers)
+    parser.set_defaults(func=lookup_geo2radar.main)
+    return parser
+
+
+def get_modify_network_parser(subparsers=None):
+    from . import modify_network
+    parser = modify_network.create_parser(subparsers)
+    parser.set_defaults(func=modify_network.main)
+    return parser
+
+
+# O-Q
+def get_plot_coherence_matrix_parser(subparsers=None):
+    from . import plot_coherence_matrix
+    parser = plot_coherence_matrix.create_parser(subparsers)
+    parser.set_defaults(func=plot_coherence_matrix.main)
+    return parser
+
+
+def get_plot_network_parser(subparsers=None):
+    from . import plot_network
+    parser = plot_network.create_parser(subparsers)
+    parser.set_defaults(func=plot_network.main)
+    return parser
+
+
+def get_plot_transection_parser(subparsers=None):
+    from . import plot_transection
+    parser = plot_transection.create_parser(subparsers)
+    parser.set_defaults(func=plot_transection.main)
+    return parser
+
+
+def get_prep_aria_parser(subparsers=None):
+    from . import prep_aria
+    parser = prep_aria.create_parser(subparsers)
+    parser.set_defaults(func=prep_aria.main)
+    return parser
+
+
+def get_prep_cosicorr_parser(subparsers=None):
+    from . import prep_cosicorr
+    parser = prep_cosicorr.create_parser(subparsers)
+    parser.set_defaults(func=prep_cosicorr.main)
+    return parser
+
+
+def get_prep_fringe_parser(subparsers=None):
+    from . import prep_fringe
+    parser = prep_fringe.create_parser(subparsers)
+    parser.set_defaults(func=prep_fringe.main)
+    return parser
+
+
+def get_prep_gamma_parser(subparsers=None):
+    from . import prep_gamma
+    parser = prep_gamma.create_parser(subparsers)
+    parser.set_defaults(func=prep_gamma.main)
+    return parser
+
+
+def get_prep_gmtsar_parser(subparsers=None):
+    from . import prep_gmtsar
+    parser = prep_gmtsar.create_parser(subparsers)
+    parser.set_defaults(func=prep_gmtsar.main)
+    return parser
+
+
+def get_prep_hyp3_parser(subparsers=None):
+    from . import prep_hyp3
+    parser = prep_hyp3.create_parser(subparsers)
+    parser.set_defaults(func=prep_hyp3.main)
+    return parser
+
+
+def get_prep_isce_parser(subparsers=None):
+    from . import prep_isce
+    parser = prep_isce.create_parser(subparsers)
+    parser.set_defaults(func=prep_isce.main)
+    return parser
+
+
+def get_prep_roipac_parser(subparsers=None):
+    from . import prep_roipac
+    parser = prep_roipac.create_parser(subparsers)
+    parser.set_defaults(func=prep_roipac.main)
+    return parser
+
+
+def get_prep_snap_parser(subparsers=None):
+    from . import prep_snap
+    parser = prep_snap.create_parser(subparsers)
+    parser.set_defaults(func=prep_snap.main)
+    return parser
+
+
+# R-T
+def get_remove_ramp_parser(subparsers=None):
+    from . import remove_ramp
+    parser = remove_ramp.create_parser(subparsers)
+    parser.set_defaults(func=remove_ramp.main)
+    return parser
+
+
+def get_reference_date_parser(subparsers=None):
+    from . import reference_date
+    parser = reference_date.create_parser(subparsers)
+    parser.set_defaults(func=reference_date.main)
+    return parser
+
+
+def get_reference_point_parser(subparsers=None):
+    from . import reference_point
+    parser = reference_point.create_parser(subparsers)
+    parser.set_defaults(func=reference_point.main)
+    return parser
+
+
+def get_remove_hdf5_dataset(subparsers=None):
+    from . import remove_hdf5_dataset
+    parser = remove_hdf5_dataset.create_parser(subparsers)
+    parser.set_defaults(func=remove_hdf5_dataset.main)
+    return parser
+
+
+def get_s1ab_range_bias_parser(subparsers=None):
+    from . import s1ab_range_bias
+    parser = s1ab_range_bias.create_parser(subparsers)
+    parser.set_defaults(func=s1ab_range_bias.main)
+    return parser
+
+
+def get_save_gbis_parser(subparsers=None):
+    from . import save_gbis
+    parser = save_gbis.create_parser(subparsers)
+    parser.set_defaults(func=save_gbis.main)
+    return parser
+
+
+def get_save_gdal_parser(subparsers=None):
+    from . import save_gdal
+    parser = save_gdal.create_parser(subparsers)
+    parser.set_defaults(func=save_gdal.main)
+    return parser
+
+
+def get_save_gmt_parser(subparsers=None):
+    from . import save_gmt
+    parser = save_gmt.create_parser(subparsers)
+    parser.set_defaults(func=save_gmt.main)
+    return parser
+
+
+def get_save_hdfeos5_parser(subparsers=None):
+    from . import save_hdfeos5
+    parser = save_hdfeos5.create_parser(subparsers)
+    parser.set_defaults(func=save_hdfeos5.main)
+    return parser
+
+
+def get_save_kite_parser(subparsers=None):
+    from . import save_kite
+    parser = save_kite.create_parser(subparsers)
+    parser.set_defaults(func=save_kite.main)
+    return parser
+
+
+def get_save_kmz_timeseries_parser(subparsers=None):
+    from . import save_kmz_timeseries
+    parser = save_kmz_timeseries.create_parser(subparsers)
+    parser.set_defaults(func=save_kmz_timeseries.main)
+    return parser
+
+
+def get_save_kmz_parser(subparsers=None):
+    from . import save_kmz
+    parser = save_kmz.create_parser(subparsers)
+    parser.set_defaults(func=save_kmz.main)
+    return parser
+
+
+def get_save_qgis_parser(subparsers=None):
+    from . import save_qgis
+    parser = save_qgis.create_parser(subparsers)
+    parser.set_defaults(func=save_qgis.main)
+    return parser
+
+
+def get_save_roipac_parser(subparsers=None):
+    from . import save_roipac
+    parser = save_roipac.create_parser(subparsers)
+    parser.set_defaults(func=save_roipac.main)
+    return parser
+
+
+def get_smallbaselineApp_parser(subparsers=None):
+    from . import smallbaselineApp
+    parser = smallbaselineApp.create_parser(subparsers)
+    parser.set_defaults(func=smallbaselineApp.main)
+    return parser
+
+
+def get_solid_earth_tides_parser(subparsers=None):
+    from . import solid_earth_tides
+    parser = solid_earth_tides.create_parser(subparsers)
+    parser.set_defaults(func=solid_earth_tides.main)
+    return parser
+
+
+def get_spatial_average_parser(subparsers=None):
+    from . import spatial_average
+    parser = spatial_average.create_parser(subparsers)
+    parser.set_defaults(func=spatial_average.main)
+    return parser
+
+
+def get_spatial_filter_parser(subparsers=None):
+    from . import spatial_filter
+    parser = spatial_filter.create_parser(subparsers)
+    parser.set_defaults(func=spatial_filter.main)
+    return parser
+
+
+def get_subset_parser(subparsers=None):
+    from . import subset
+    parser = subset.create_parser(subparsers)
+    parser.set_defaults(func=subset.main)
+    return parser
+
+
+def get_temporal_average_parser(subparsers=None):
+    from . import temporal_average
+    parser = temporal_average.create_parser(subparsers)
+    parser.set_defaults(func=temporal_average.main)
+    return parser
+
+
+def get_temporal_derivative_parser(subparsers=None):
+    from . import temporal_derivative
+    parser = temporal_derivative.create_parser(subparsers)
+    parser.set_defaults(func=temporal_derivative.main)
+    return parser
+
+
+def get_temporal_filter_parser(subparsers=None):
+    from . import temporal_filter
+    parser = temporal_filter.create_parser(subparsers)
+    parser.set_defaults(func=temporal_filter.main)
+    return parser
+
+
+def get_timeseries_rms_parser(subparsers=None):
+    from . import timeseries_rms
+    parser = timeseries_rms.create_parser(subparsers)
+    parser.set_defaults(func=timeseries_rms.main)
+    return parser
+
+
+def get_timeseries2velocity_parser(subparsers=None):
+    from . import timeseries2velocity
+    parser = timeseries2velocity.create_parser(subparsers)
+    parser.set_defaults(func=timeseries2velocity.main)
+    return parser
+
+
+def get_tropo_gacos_parser(subparsers=None):
+    from . import tropo_gacos
+    parser = tropo_gacos.create_parser(subparsers)
+    parser.set_defaults(func=tropo_gacos.main)
+    return parser
+
+
+def get_tropo_phase_elevation_parser(subparsers=None):
+    from . import tropo_phase_elevation
+    parser = tropo_phase_elevation.create_parser(subparsers)
+    parser.set_defaults(func=tropo_phase_elevation.main)
+    return parser
+
+
+def get_tropo_pyaps3_parser(subparsers=None):
+    from . import tropo_pyaps3
+    parser = tropo_pyaps3.create_parser(subparsers)
+    parser.set_defaults(func=tropo_pyaps3.main)
+    return parser
+
+
+def get_tsview_parser(subparsers=None):
+    from . import tsview
+    parser = tsview.create_parser(subparsers)
+    parser.set_defaults(func=tsview.main)
+    return parser
+
+
+# U-Z
+def get_unwrap_error_bridging_parser(subparsers=None):
+    from . import unwrap_error_bridging
+    parser = unwrap_error_bridging.create_parser(subparsers)
+    parser.set_defaults(func=unwrap_error_bridging.main)
+    return parser
+
+
+def get_unwrap_error_phase_closure_parser(subparsers=None):
+    from . import unwrap_error_phase_closure
+    parser = unwrap_error_phase_closure.create_parser(subparsers)
+    parser.set_defaults(func=unwrap_error_phase_closure.main)
+    return parser
+
+
+def get_view_parser(subparsers=None):
+    from . import view
+    parser = view.create_parser(subparsers)
+    parser.set_defaults(func=view.main)
+    return parser
+
+
+#######################################  Main Parser  ##########################################
+def get_parser():
+    """Instantiate the command line argument parser."""
+    parser = argparse.ArgumentParser(prog=PROG, description=__doc__)
+    parser.add_argument("--version", action="version", version=f"%(prog)s {__version__}")
+
+    # Sub-command management
+    sp = parser.add_subparsers(title="sub-commands", dest='func', required=True, metavar='')
+
+    # workflow
+    get_smallbaselineApp_parser(sp)
+
+    # standard processing
+    get_asc_desc2horz_vert_parser(sp)
+    get_geocode_parser(sp)
+    get_ifgram_inversion_parser(sp)
+    get_mask_parser(sp)
+    get_modify_network_parser(sp)
+    get_multilook_parser(sp)
+    get_reference_date_parser(sp)
+    get_reference_point_parser(sp)
+    get_spatial_average_parser(sp)
+    get_spatial_filter_parser(sp)
+    get_temporal_average_parser(sp)
+    get_temporal_derivative_parser(sp)
+    get_temporal_filter_parser(sp)
+    get_timeseries_rms_parser(sp)
+    get_timeseries2velocity_parser(sp)
+
+    # image operations
+    get_add_parser(sp)
+    get_diff_parser(sp)
+    get_image_math_parser(sp)
+    get_image_stitch_parser(sp)
+    get_subset_parser(sp)
+
+    # noise reduction / error correction
+    try:
+        get_bulk_plate_motion_parser(sp)
+    except ImportError:
+        pass
+    get_closure_phase_bias_parser(sp)
+    get_dem_error_parser(sp)
+    get_iono_tec_parser(sp)
+    get_local_oscilator_drift_parser(sp)
+    get_remove_ramp_parser(sp)
+    get_s1ab_range_bias_parser(sp)
+    get_solid_earth_tides_parser(sp)
+    get_tropo_gacos_parser(sp)
+    get_tropo_phase_elevation_parser(sp)
+    get_tropo_pyaps3_parser(sp)
+    get_unwrap_error_bridging_parser(sp)
+    get_unwrap_error_phase_closure_parser(sp)
+
+    # misc
+    # get_add_attribute_parser(sp)
+    get_dem_gsi_parser(sp)
+    get_generate_mask_parser(sp)
+    try:
+        get_lookup_geo2radar_parser(sp)
+    except ImportError:
+        pass
+
+    # pre-processing
+    get_prep_aria_parser(sp)
+    get_prep_cosicorr_parser(sp)
+    get_prep_fringe_parser(sp)
+    get_prep_gamma_parser(sp)
+    get_prep_gmtsar_parser(sp)
+    get_prep_hyp3_parser(sp)
+    get_prep_isce_parser(sp)
+    get_prep_roipac_parser(sp)
+    get_prep_snap_parser(sp)
+
+    # I/O
+    get_load_data_parser(sp)
+    get_load_gbis_parser(sp)
+    get_remove_hdf5_dataset(sp)
+    get_save_gbis_parser(sp)
+    get_save_gdal_parser(sp)
+    get_save_gmt_parser(sp)
+    get_save_hdfeos5_parser(sp)
+    get_save_kite_parser(sp)
+    get_save_kmz_timeseries_parser(sp)
+    get_save_kmz_parser(sp)
+    get_save_qgis_parser(sp)
+    get_save_roipac_parser(sp)
+
+    # visualization
+    get_info_parser(sp)
+    # get_multi_transect_parser(sp)
+    get_plot_coherence_matrix_parser(sp)
+    get_plot_network_parser(sp)
+    get_plot_transection_parser(sp)
+    get_tsview_parser(sp)
+    get_view_parser(sp)
+
+    _autocomplete(parser)
+
+    return parser
+
+
+################################################################################################
+def main(*argv):
+    """Main CLI interface."""
+    # setup logging
+    logging.basicConfig(format=LOGFMT)
+    logging.captureWarnings(True)
+    log = logging.getLogger(PROG)
+
+    # parse cmd line arguments
+    parser = get_parser()
+    args = parser.parse_args(argv if argv else None)
+
+    # execute main tasks
+    exit_code = EX_OK
+    try:
+        return args.func(sys.argv[2:])
+    except Exception as exc:
+        log.critical(
+            "unexpected exception caught: {!r} {}".format(
+                type(exc).__name__, exc)
+        )
+        log.debug("stacktrace:", exc_info=True)
+        exit_code = EX_FAILURE
+    except KeyboardInterrupt:
+        log.warning("Keyboard interrupt received: exit the program")
+        exit_code = EX_INTERRUPT
+
+    return exit_code
+
+
+################################################################################################
+if __name__ == "__main__":
+    sys.exit(main())
diff -pruN 1.3.3-2/mintpy/mask.py 1.4.0-1/mintpy/mask.py
--- 1.3.3-2/mintpy/mask.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/mask.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,9 +9,9 @@
 import os
 import sys
 import shutil
-import argparse
 import numpy as np
 from mintpy.utils import readfile, writefile
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ############################################################
@@ -25,10 +25,12 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Mask file',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Mask file'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', help='File to be masked')
     parser.add_argument('-m', '--mask', dest='mask_file', required=True,
diff -pruN 1.3.3-2/mintpy/modify_network.py 1.4.0-1/mintpy/modify_network.py
--- 1.3.3-2/mintpy/modify_network.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/modify_network.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,7 +8,6 @@
 
 import os
 import sys
-import argparse
 import h5py
 import numpy as np
 from matplotlib import pyplot as plt, dates as mdates
@@ -22,9 +21,12 @@ from mintpy.utils import (
     network as pnet,
     plot as pp,
 )
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ###############################  Usage  ################################
+TEMPLATE = get_template_content('modify_network')
+
 REFERENCE = """reference:
   Yunjun, Z., Fattahi, H. and Amelung, F. (2019), Small baseline InSAR time series analysis:
   Unwrapping error correction and noise reduction, Computers & Geosciences, 133, 104331,
@@ -40,8 +42,6 @@ REFERENCE = """reference:
   California. Remote Sensing of Environment, 258, 112400. doi:10.1016/j.rse.2021.112400
 """
 
-TEMPLATE = get_template_content('modify_network')
-
 EXAMPLE = """example:
   modify_network.py inputs/ifgramStack.h5 -t smallbaselineApp.cfg
   modify_network.py inputs/ifgramStack.h5 --reset
@@ -49,17 +49,20 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Modify the network of interferograms',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=REFERENCE+'\n'+TEMPLATE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Modify the network of interferograms'
+    epilog = REFERENCE + '\n' + TEMPLATE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
     parser.add_argument('file', help='Files to modify/drop network, e.g. inputs/ifgramStack.h5.')
     parser.add_argument('-t', '--template', dest='template_file',
                         help='Template file with input options')
     parser.add_argument('--reset', action='store_true',
                         help='restore all interferograms in the file, by marking all dropIfgram=True')
     parser.add_argument('--noaux', dest='update_aux', action='store_false',
-                        help='Do not update auxilary files, e.g.\n' +
+                        help='Do not update auxiliary files, e.g.\n' +
                              'maskConnComp.h5 or avgSpatialCoh.h5 from ifgramStack.h5')
 
     # 1. temp/perp baseline, num of conn., dates, pair index, etc.
@@ -169,7 +172,7 @@ def read_template2inps(template_file, in
         inps = cmd_line_parse()
     inpsDict = vars(inps)
     print('read options from template file: '+os.path.basename(template_file))
-    template = readfile.read_template(inps.template_file)
+    template = readfile.read_template(inps.template_file, skip_chars=['[', ']'])
     template = ut.check_template_auto_value(template)
 
     # Update inps if key existed in template file
@@ -187,12 +190,12 @@ def read_template2inps(template_file, in
             elif key in ['maskFile', 'referenceFile']:
                 inpsDict[key] = value
             elif key == 'aoiYX':
-                tmp = [i.replace('[','').replace(']','').strip() for i in value.split(',')]
+                tmp = [i.strip() for i in value.split(',')]
                 sub_y = sorted([int(i.strip()) for i in tmp[0].split(':')])
                 sub_x = sorted([int(i.strip()) for i in tmp[1].split(':')])
                 inps.aoi_pix_box = (sub_x[0], sub_y[0], sub_x[1], sub_y[1])
             elif key == 'aoiLALO':
-                tmp = [i.replace('[','').replace(']','').strip() for i in value.split(',')]
+                tmp = [i.strip() for i in value.split(',')]
                 sub_lat = sorted([float(i.strip()) for i in tmp[0].split(':')])
                 sub_lon = sorted([float(i.strip()) for i in tmp[1].split(':')])
                 inps.aoi_geo_box = (sub_lon[0], sub_lat[1], sub_lon[1], sub_lat[0])
@@ -204,11 +207,9 @@ def read_template2inps(template_file, in
             elif key in ['startDate', 'endDate']:
                 inpsDict[key] = ptime.yyyymmdd(value)
             elif key == 'excludeDate':
-                value = value.replace('[','').replace(']','').replace(',', ' ')
-                inpsDict[key] = ptime.yyyymmdd(value.split())
+                inpsDict[key] = ptime.yyyymmdd(value.split(','))
             elif key == 'excludeIfgIndex':
-                value = value.replace('[','').replace(']','').replace(',', ' ')
-                inpsDict[key] += value.split()
+                inpsDict[key] += value.split(',')
                 inpsDict[key] = read_input_index_list(inpsDict[key], stackFile=inps.file)
 
     # Turn reset on if 1) no input options found to drop ifgram AND 2) there is template input
diff -pruN 1.3.3-2/mintpy/multilook.py 1.4.0-1/mintpy/multilook.py
--- 1.3.3-2/mintpy/multilook.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/multilook.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,7 +8,6 @@
 
 import os
 import sys
-import argparse
 import warnings
 import h5py
 import numpy as np
@@ -24,6 +23,7 @@ from mintpy.utils import (
     utils1 as ut,
     attribute as attr,
 )
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ##################################################################################################
@@ -36,10 +36,12 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Multilook.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Multilook the input file'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', nargs='+', help='File(s) to multilook')
     parser.add_argument('-r','--range','-x', dest='lks_x', type=int, default=1,
@@ -48,7 +50,7 @@ def create_parser():
                         help='number of multilooking in azimuth/y direction (default: %(default)s).')
     parser.add_argument('-o', '--outfile',
                         help='Output file name. Disabled when more than 1 input files')
-    parser.add_argument('-m','--method', dest='method', type=str, default='average', choices=['average', 'nearest'],
+    parser.add_argument('-m','--method', dest='method', type=str, default='mean', choices=['mean', 'median', 'nearest'],
                         help='downsampling method (default: %(default)s) \n'
                              'e.g. nearest for geometry, average for observations')
     parser.add_argument('--margin', dest='margin', type=int, nargs=4, metavar=('TOP','BOTTOM','LEFT','RIGHT'),
@@ -186,7 +188,7 @@ def multilook_data(data, lks_y=1, lks_x=
     return coarse_data
 
 
-def multilook_file(infile, lks_y, lks_x, outfile=None, method='average', margin=[0,0,0,0], max_memory=4):
+def multilook_file(infile, lks_y, lks_x, outfile=None, method='mean', margin=[0,0,0,0], max_memory=4):
     """ Multilook input file
     Parameters: infile - str, path of input file to be multilooked.
                 lks_y  - int, number of looks in y / row direction.
@@ -279,7 +281,7 @@ def multilook_file(infile, lks_y, lks_x,
                                      box=box_i,
                                      print_msg=False)[0]
 
-                data = multilook_data(data, lks_y, lks_x)
+                data = multilook_data(data, lks_y, lks_x, method=method)
 
             # output block
             if data.ndim == 3:
diff -pruN 1.3.3-2/mintpy/multi_transect.py 1.4.0-1/mintpy/multi_transect.py
--- 1.3.3-2/mintpy/multi_transect.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/multi_transect.py	2022-08-04 20:01:49.000000000 +0000
@@ -720,7 +720,7 @@ def main(argv=None):
                     print(""" 
 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
       
-      WARNING: nan value for InSAR data at the refernce pixel!
+      WARNING: nan value for InSAR data at the reference pixel!
                reference station should be a pixel with valid value in InSAR data.
                                
                please select another GPS station as the reference station.
diff -pruN 1.3.3-2/mintpy/objects/cluster.py 1.4.0-1/mintpy/objects/cluster.py
--- 1.3.3-2/mintpy/objects/cluster.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/objects/cluster.py	2022-08-04 20:01:49.000000000 +0000
@@ -302,7 +302,7 @@ class DaskCluster:
             # message
             num_future += 1
             sub_t = time.time() - submission_time
-            print("FUTURE #{} complete. Time used: {:.0f} seconds".format(num_future, sub_t))
+            print(f"\nFUTURE #{num_future} complete. Time used: {sub_t:.0f} seconds")
 
             # catch result - sub_box
             # and convert the abosulte sub_box into local col/row start/end relative to the primary box
@@ -338,12 +338,14 @@ class DaskCluster:
     def close(self):
         """Close connections to dask client and cluster and moves dask output/error files. """
 
-        self.cluster.close()
-        print('close dask cluster')
-
+        # close client before cluster -> less likely to have the CancelledError
+        # https://github.com/dask/distributed/issues/2273
         self.client.close()
         print('close dask client')
 
+        self.cluster.close()
+        print('close dask cluster')
+
         # move *.o/.e files produced by dask in stdout/stderr
         self.move_dask_stdout_stderr_files()
 
diff -pruN 1.3.3-2/mintpy/objects/constants.py 1.4.0-1/mintpy/objects/constants.py
--- 1.3.3-2/mintpy/objects/constants.py	1970-01-01 00:00:00.000000000 +0000
+++ 1.4.0-1/mintpy/objects/constants.py	2022-08-04 20:01:49.000000000 +0000
@@ -0,0 +1,19 @@
+#!/usr/bin/env python3
+############################################################
+# Program is part of MintPy                                #
+# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
+# Author: Zhang Yunjun, Feb 2022                           #
+############################################################
+# Recommend usage:
+#   from mintpy.objects.constants import SPEED_OF_LIGHT
+
+
+SPEED_OF_LIGHT = 299792458  # meters per second
+
+# Earth radius
+# equatorial radius: a = 6378.1370e3 
+# polar      radius: b = 6356.7523e3
+# arithmetic mean radius: R_1 = (2 * a + b) / 3 = 6371.0088e3
+#   defined by IUGG and used in geophysics
+EARTH_RADIUS = 6371.0088e3   # the arithmetic mean radius in meters
+
diff -pruN 1.3.3-2/mintpy/objects/gps.py 1.4.0-1/mintpy/objects/gps.py
--- 1.3.3-2/mintpy/objects/gps.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/objects/gps.py	2022-08-04 20:01:49.000000000 +0000
@@ -23,10 +23,11 @@ from mintpy.utils import ptime, time_fun
 unr_site_list_file = 'http://geodesy.unr.edu/NGLStationPages/DataHoldings.txt'
 
 
-def dload_site_list(print_msg=True):
+def dload_site_list(out_file=None, print_msg=True):
     """download DataHoldings.txt"""
     url = unr_site_list_file
-    out_file = os.path.basename(url)
+    if not out_file:
+        out_file = os.path.basename(url)
     if print_msg:
         print('downloading site list from UNR Geod Lab: {}'.format(url))
     urlretrieve(url, out_file)
@@ -47,8 +48,9 @@ def search_gps(SNWE, start_date=None, en
     # download site list file if it's not found in current directory
     if site_list_file is None:
         site_list_file = os.path.basename(unr_site_list_file)
-        if not os.path.isfile(site_list_file):
-            dload_site_list(print_msg=print_msg)
+
+    if not os.path.isfile(site_list_file):
+        dload_site_list(site_list_file, print_msg=print_msg)
 
     txt_data = np.loadtxt(site_list_file,
                           dtype=bytes,
@@ -159,6 +161,16 @@ def get_gps_los_obs(meta, obs_type, site
         fc = np.genfromtxt(csv_file, dtype=col_types, delimiter=',', names=True)
         site_obs = fc[col_names[obs_ind]]
 
+        # get obs for the input site names only
+        # in case the site_names are not consistent with the CSV file.
+        if num_row != num_site:
+            temp_names = fc[col_names[0]]
+            temp_obs = np.array(site_obs, dtype=np.float32)
+            site_obs = np.zeros(num_site, dtype=np.float32) * np.nan
+            for i, site_name in enumerate(site_names):
+                if site_name in temp_names:
+                    site_obs[i] = temp_obs[temp_names == site_name][0]
+
     else:
         # calculate and save to CSV file
         data_list = []
@@ -451,11 +463,11 @@ class GPS:
         elif gps_comp in ['u2los', 'up2los']:
             unit_vec[0] = 0.
             unit_vec[1] = 0.
-        elif gps_comp in ['horz']:
+        elif gps_comp in ['horz','horizontal']:
             unit_vec[0] = np.sin(horz_az_angle) * -1
             unit_vec[1] = np.cos(horz_az_angle)
             unit_vec[2] = 0.
-        elif gps_comp in ['vertical']:
+        elif gps_comp in ['vert','vertical']:
             unit_vec[0] = 0.
             unit_vec[1] = 0.
             unit_vec[2] = 1.
diff -pruN 1.3.3-2/mintpy/objects/ionex.py 1.4.0-1/mintpy/objects/ionex.py
--- 1.3.3-2/mintpy/objects/ionex.py	1970-01-01 00:00:00.000000000 +0000
+++ 1.4.0-1/mintpy/objects/ionex.py	2022-08-04 20:01:49.000000000 +0000
@@ -0,0 +1,389 @@
+#!/usr/bin/env python3
+############################################################
+# Program is part of MintPy                                #
+# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
+# Author: Zhang Yunjun, Jun 2022                           #
+############################################################
+# Links:
+#   IGS (NASA): https://cddis.nasa.gov/Data_and_Derived_Products/GNSS/atmospheric_products.html
+#   IMPC (DLR): https://impc.dlr.de/products/total-electron-content/near-real-time-tec/nrt-tec-global/
+# Recommend import:
+#   from mintpy.objects import ionex
+
+
+import os
+import re
+import datetime as dt
+
+import numpy as np
+from scipy import interpolate
+from matplotlib import pyplot as plt
+from matplotlib.animation import FuncAnimation
+from cartopy import crs as ccrs
+
+from mintpy.utils import ptime
+from mintpy.utils.map import draw_lalo_label, round_to_1
+
+
+IGS_SOLUTION_NAMES = {
+    'cod' : 'CODE (1-hour)',
+    'esa' : 'ESA (2-hour)',
+    'igs' : 'IGS (2-hour)',
+    'jpl' : 'JPL (2-hour)',
+    'upc' : 'UPC (2-hour)',
+    'uqr' : 'UPC (15-min; rapid)'
+}
+
+
+################################## Download ####################################
+
+def dload_ionex(date_str, tec_dir, sol_code='jpl', date_fmt='%Y%m%d', print_msg=False):
+    """Download IGS vertical TEC files in IONEX format.
+
+    Parameters: date_str         - str, date in date_fmt format.
+                tec_dir          - str, local directory to save the downloaded files.
+                sol_code         - str, IGS TEC analysis center code.
+                date_fmt         - str, date format code
+    Returns:    fname_dst_uncomp - str, path to the local uncompressed IONEX file.
+    """
+
+    # get the source (remote) and destination (local) file path/url
+    kwargs = dict(sol_code=sol_code, date_fmt=date_fmt)
+    fname_src = get_ionex_filename(date_str, tec_dir=None, **kwargs)
+    fname_dst = get_ionex_filename(date_str, tec_dir=tec_dir, **kwargs) + '.Z'
+    fname_dst_uncomp = fname_dst[:-2]
+
+    # download - compose cmd
+    cmd = 'wget --continue --auth-no-challenge "{}"'.format(fname_src)
+    if os.path.isfile(fname_dst) and os.path.getsize(fname_dst) > 1000:
+        cmd += ' --timestamping'
+    cmd = cmd + ' --quiet' if not print_msg else print(cmd)
+
+    # downlload - run cmd in output dir
+    pwd = os.getcwd()
+    os.chdir(tec_dir)
+    os.system(cmd)
+    os.chdir(pwd)
+
+    # uncompress
+    # if output file 1) does not exist or 2) smaller than 400k in size or 3) older
+    if (not os.path.isfile(fname_dst_uncomp)
+            or os.path.getsize(fname_dst_uncomp) < 600e3
+            or os.path.getmtime(fname_dst_uncomp) < os.path.getmtime(fname_dst)):
+        cmd = "gzip --force --keep --decompress {}".format(fname_dst)
+        if print_msg:
+            print(cmd)
+        os.system(cmd)
+
+    return fname_dst_uncomp
+
+
+#################################### Read ######################################
+
+def get_ionex_value(tec_file, utc_sec, lat, lon, interp_method='linear3d', rotate_tec_map=False,
+                    print_msg=True):
+    """Get the TEC value from input IONEX file for the input lat/lon/datetime.
+
+    Reference:
+        Schaer, S., Gurtner, W., & Feltens, J. (1998). IONEX: The ionosphere map exchange format
+        version 1.1. Paper presented at the Proceedings of the IGS AC workshop, Darmstadt, Germany.
+
+    Parameters: tec_file       - str, path of local TEC file
+                utc_sec        - float or 1D np.ndarray, UTC time of the day in seconds
+                lat/lon        - float or 1D np.ndarray, latitude / longitude in degrees
+                interp_method  - str, interpolation method
+                rotate_tec_map - bool, rotate the TEC map along the SUN direction.
+                                 for "interp_method = linear3d" only.
+                print_msg      - bool, print out progress bar or not.
+    Returns:    tec_val        - float or 1D np.ndarray, vertical TEC value in TECU
+    """
+
+    def interp_3d_rotate(interpfs, mins, lats, lons, utc_min, lat, lon):
+        ind0 = np.where((mins - utc_min) <= 0)[0][-1]
+        ind1 = ind0 + 1
+        lon0 = lon + (utc_min - mins[ind0]) * 360. / (24. * 60.)
+        lon1 = lon + (utc_min - mins[ind1]) * 360. / (24. * 60.)
+        tec_val0 = interpfs[ind0](lon0, lat)
+        tec_val1 = interpfs[ind1](lon1, lat)
+        tec_val = (  (mins[ind1] - utc_min) / (mins[ind1] - mins[ind0]) * tec_val0
+                   + (utc_min - mins[ind0]) / (mins[ind1] - mins[ind0]) * tec_val1 )
+        return tec_val
+
+    # time info
+    utc_min = utc_sec / 60.
+
+    # read TEC file
+    mins, lats, lons, tec_maps = read_ionex(tec_file)[:4]
+
+    # resample
+    if interp_method == 'nearest':
+        lon_ind = np.abs(lons - lon).argmin()
+        lat_ind = np.abs(lats - lat).argmin()
+        time_ind = np.abs(mins - utc_min).argmin()
+        tec_val = tec_maps[time_ind, lat_ind, lon_ind]
+
+    elif interp_method in ['linear', 'linear2d', 'bilinear']:
+        time_ind = np.abs(mins.reshape(-1,1) - utc_min).argmin(axis=0)
+
+        if isinstance(utc_min, np.ndarray):
+            num_pts = len(utc_min)
+            tec_val = np.zeros(num_pts, dtype=np.float32)
+            prog_bar = ptime.progressBar(maxValue=num_pts, print_msg=print_msg)
+            for i in range(num_pts):
+                tec_val[i] = interpolate.interp2d(
+                    x=lons,
+                    y=lats,
+                    z=tec_maps[time_ind[i], :, :],
+                    kind='linear',
+                )(lon[i], lat[i])
+
+                prog_bar.update(i+1, every=200)
+            prog_bar.close()
+
+        else:
+            tec_val = interpolate.interp2d(
+                x=lons,
+                y=lats,
+                z=tec_maps[time_ind[0], :, :],
+                kind='linear',
+            )(lon, lat)
+
+    elif interp_method in ['linear3d', 'trilinear']:
+        if not rotate_tec_map:
+            # option 1: interpolate between consecutive TEC maps
+            # testings shows better agreement with SAR obs than option 2.
+            tec_val = interpolate.interpn(
+                points=(mins, np.flip(lats), lons),
+                values=np.flip(tec_maps, axis=1),
+                xi=(utc_min, lat, lon),
+                method='linear',
+            )
+
+        else:
+            # option 2: interpolate between consecutive rotated TEC maps
+            # reference: equation (3) in Schaer and Gurtner (1998)
+
+            # prepare interpolation functions in advance to speed up
+            interpfs = []
+            for i in range(len(mins)):
+                interpfs.append(
+                    interpolate.interp2d(
+                        x=lons,
+                        y=lats,
+                        z=tec_maps[i, :, :],
+                        kind='linear',
+                    ),
+                )
+
+            if isinstance(utc_min, np.ndarray):
+                num_pts = len(utc_min)
+                tec_val = np.zeros(num_pts, dtype=np.float32)
+                prog_bar = ptime.progressBar(maxValue=num_pts, print_msg=print_msg)
+                for i in range(num_pts):
+                    tec_val[i] = interp_3d_rotate(
+                        interpfs,
+                        mins, lats, lons,
+                        utc_min[i], lat[i], lon[i],
+                    )
+                    prog_bar.update(i+1, every=200)
+                prog_bar.close()
+
+            else:
+                tec_val = interp_3d_rotate(
+                    interpfs,
+                    mins, lats, lons,
+                    utc_min, lat, lon,
+                )
+
+    else:
+        msg = f'Un-recognized interp_method input: {interp_method}!'
+        msg += '\nSupported inputs: nearest, linear2d, linear3d.'
+        raise ValueError(msg)
+
+    return tec_val
+
+
+def get_ionex_filename(date_str, tec_dir=None, sol_code='jpl', date_fmt='%Y%m%d'):
+    """Get the file name of IONEX files.
+
+    Parameters: date_str - str, date in date_fmt format
+                tec_dir  - str, path to the local TEC file directory
+                           Set to None for the http path.
+                sol_code - str, GIM analysis center code in 3 digit
+                           https://cddis.nasa.gov/Data_and_Derived_Products/GNSS/atmospheric_products.html
+                date_fmt - str, date format code
+                           https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes
+    Returns:    tec_file - str, path to the local uncompressed TEC file OR remote compressed TECfile
+    """
+    dd = dt.datetime.strptime(date_str, date_fmt)
+    doy = '{:03d}'.format(dd.timetuple().tm_yday)
+    yy = str(dd.year)[2:4]
+
+    # file name base
+    fname = "{a}g{d}0.{y}i.Z".format(a=sol_code.lower(), d=doy, y=yy)
+
+    # full path
+    if tec_dir:
+        # local uncompressed file path
+        tec_file = os.path.join(tec_dir, fname[:-2])
+    else:
+        # remote compressed file path
+        url_dir = "https://cddis.nasa.gov/archive/gnss/products/ionex"
+        tec_file = os.path.join(url_dir, str(dd.year), doy, fname)
+
+    return tec_file
+
+
+def get_ionex_date(tec_file, date_fmt='%Y-%m-%d'):
+    """Get the date in the specified format from the input IONEX filename.
+
+    Parameters: tec_file - str, path to the TEC file in IONEX format
+                date_fmt - str, date format code
+    Returns:    date_str - str, date in date_fmt format
+                date_obj - datetime.datetime object
+    """
+    fbase = os.path.basename(tec_file)
+    year = fbase.split('.')[1][:2]
+    doy = fbase.split('.')[0].split('g')[1][:3]
+    date_obj = dt.datetime.strptime(year, '%y') + dt.timedelta(days=int(doy)-1)
+    date_str = dt.datetime.strftime(date_obj, date_fmt)
+    return date_str, date_obj
+
+
+def get_ionex_height(tec_file):
+    """Get the height of the thin-shell ionosphere from IONEX file.
+
+    Parameters: tec_file - str, path to the TEC file in IONEX format
+    Returns:    ion_hgt  - float, height above the surface in meters
+    """
+
+    with open(tec_file, 'r') as f:
+        lines = f.readlines()
+        for line in lines:
+            if line.strip().endswith('DHGT'):
+                ion_hgt = float(line.split()[0])
+                break
+
+    return ion_hgt
+
+
+def read_ionex(tec_file):
+    """Read TEC file in IONEX format.
+
+    Parameters: tec_file - str, path to the TEC file in IONEX format
+    Returns:    mins     - 1D np.ndarray in size of (num_map), time of the day in minutes
+                lats     - 1D np.ndarray in size of (num_lat), latitude  in degrees
+                lons     - 1D np.ndarray in size of (num_lon), longitude in degrees
+                tec_maps - 3D np.ndarray in size of (num_map, num_lat, num_lon), vertical TEC in TECU
+                rms_maps - 3D np.ndarray in size of (num_map, num_lat, num_lon), vertical TEC RMS in TECU
+    Examples:   tec_dir = os.path.expanduser('~/data/aux/IONEX')
+                tec_file = get_ionex_filename('20190519', tec_dir=tec_dir, sol_code='jpl')
+                mins, lats, lons, tec_maps = read_ionex(tec_file)[:4]
+    """
+
+    # functions for parsing ionex file
+    # link: https://github.com/daniestevez/jupyter_notebooks/blob/master/IONEX.ipynb
+    def parse_map(tec_map, key='TEC', exponent=-1):
+        tec_map = re.split(f'.*END OF {key} MAP', tec_map)[0]
+        tec_map = [np.fromstring(l, sep=' ') for l in re.split('.*LAT/LON1/LON2/DLON/H\\n', tec_map)[1:]]
+        return np.stack(tec_map) * 10**exponent
+
+    # read IONEX file
+    with open(tec_file, 'r') as f:
+        fc = f.read()
+
+        # read header
+        header = fc.split('END OF HEADER')[0].split('\n')
+        for line in header:
+            if line.strip().endswith('# OF MAPS IN FILE'):
+                num_map = int(line.split()[0])
+            elif line.strip().endswith('DLAT'):
+                lat0, lat1, lat_step = [float(x) for x in line.split()[:3]]
+            elif line.strip().endswith('DLON'):
+                lon0, lon1, lon_step = [float(x) for x in line.split()[:3]]
+            elif line.strip().endswith('EXPONENT'):
+                exponent = float(line.split()[0])
+
+        # spatial coordinates
+        num_lat = int((lat1 - lat0) / lat_step + 1)
+        num_lon = int((lon1 - lon0) / lon_step + 1)
+        lats = np.arange(lat0, lat0 + num_lat * lat_step, lat_step)
+        lons = np.arange(lon0, lon0 + num_lon * lon_step, lon_step)
+
+        # time stamps
+        min_step = 24 * 60 / (num_map - 1)
+        mins = np.arange(0, num_map * min_step, min_step)
+
+        # read TEC and its RMS maps
+        tec_maps = np.array([parse_map(t, key='TEC', exponent=exponent)
+                             for t in fc.split('START OF TEC MAP')[1:]], dtype=np.float32)
+        rms_maps = np.array([parse_map(t, key='RMS', exponent=exponent)
+                             for t in fc.split('START OF RMS MAP')[1:]], dtype=np.float32)
+
+    return mins, lats, lons, tec_maps, rms_maps
+
+
+#################################### Plot ######################################
+
+def plot_ionex(tec_file, save_fig=False):
+    """Plot the IONEX file as animation.
+
+    Parameters: tec_file - str, path to the local uncompressed IONEX file
+                save_fig - bool, save the animation to file
+    Returns:    out_fig  - str, path to the output animation file
+    """
+
+    # read TEC file
+    sol_code = os.path.basename(tec_file)[:3]
+    sol_name = IGS_SOLUTION_NAMES.get(sol_code, 'Unknown')
+    mins, lats, lons, tec_maps = read_ionex(tec_file)[:4]
+
+    # basic info
+    num_map = len(mins)
+    lat_step = np.median(np.diff(lats))
+    lon_step = np.median(np.diff(lons))
+    N = np.max(lats) - lat_step / 2;  S = np.min(lats) + lat_step / 2
+    W = np.min(lons) - lon_step / 2;  E = np.max(lons) + lon_step / 2
+    vmax = round_to_1(np.nanmax(tec_maps) * 0.9)
+    date_obj = get_ionex_date(tec_file)[1]
+
+    # init figure
+    proj_obj = ccrs.PlateCarree()
+    fig, ax = plt.subplots(figsize=[9, 4], subplot_kw=dict(projection=proj_obj))
+    im = ax.imshow(tec_maps[0,:,:], vmin=0, vmax=vmax, extent=(W, E, S, N),
+                   origin='upper', animated=True, interpolation='nearest')
+    ax.coastlines()
+    draw_lalo_label(ax, geo_box=(W, N, E, S), projection=proj_obj, print_msg=False)
+    # colorbar
+    cbar = fig.colorbar(im, shrink=0.5)
+    cbar.set_label('TECU')
+    fig.tight_layout()
+
+    # update image
+    global ind
+    ind = 0
+    def animate(*args):
+        global ind
+        ind += 1
+        if ind >= num_map:
+            ind -= num_map
+
+        # update image & title
+        im.set_array(tec_maps[ind,:,:])
+        dt_obj = date_obj + dt.timedelta(minutes=mins[ind])
+        ax.set_title(f'{dt_obj.isoformat()} - {sol_name}')
+        return im,
+
+    # play animation
+    ani = FuncAnimation(fig, animate, interval=200, blit=True, save_count=num_map)
+
+    # output
+    out_fig = '{}.gif'.format(os.path.abspath(tec_file))
+    if save_fig:
+        print('saving animation to {}'.format(out_fig))
+        ani.save(out_fig, writer='imagemagick', dpi=300)
+
+    print('showing animation ...')
+    plt.show()
+
+    return out_fig
diff -pruN 1.3.3-2/mintpy/objects/progress.py 1.4.0-1/mintpy/objects/progress.py
--- 1.3.3-2/mintpy/objects/progress.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/objects/progress.py	2022-08-04 20:01:49.000000000 +0000
@@ -4,8 +4,8 @@
 # Author: Zhang Yunjun, Dec 2020                           #
 ############################################################
 # Recommend import:
-#   from mintpy.objects.progress import progressBar OR
-#   from mintpy.utils.ptime import progressBar
+#    from mintpy.objects.progress import progressBar
+# OR from mintpy.utils import ptime
 
 
 import sys
diff -pruN 1.3.3-2/mintpy/objects/sensor.py 1.4.0-1/mintpy/objects/sensor.py
--- 1.3.3-2/mintpy/objects/sensor.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/objects/sensor.py	2022-08-04 20:01:49.000000000 +0000
@@ -16,7 +16,7 @@ SENSOR_NAME_VARIATION = {
     'csk'   : ['csk', 'csk1', 'csk2', 'csk3', 'csk4', 'cos', 'cosmo', 'cosmoskymed'],
     'env'   : ['env', 'envisat', 'asar'],
     'ers'   : ['ers', 'ers1', 'ers2', 'ers12'],
-    'gfen3' : ['gfen3', 'gaofen3', 'g3', 'gaofen'],
+    'gf3'   : ['gfen3', 'gaofen3', 'g3', 'gaofen'],
     'jers'  : ['jers', 'jers1'],
     'ksat5' : ['ksat5', 'kompsat5', 'kompsat', 'kmps5'],
     'ni'    : ['ni', 'nisar'],
@@ -283,22 +283,32 @@ RSAT2 = {
     'ground_range_pixel_size'    : 2.1,       # m
 }
 
+# GaoFen-3 
+# Table 2 & 6 in https://directory.eoportal.org/web/eoportal/satellite-missions/g/gaofen-3
+GF3 = {
+    'carrier_frequency'          : 5.4e9,     # Hz
+    'altitude'                   : 755e3,     # m
+    'antenna_length'             : 15,        # m
+    'sampling_frequency'         : 533.33e6,  # Hz
+}
+
 # Sentinel-1 Interferometric Wide (IW / TOPS) swath mode
 # Typical value:
 # azfact = azResolution / azPixelSize = 1.46
 # rgfact = rgResolution / rgPixelSize = 1.33
 # reference:
-#   1. Table 2 and Fig. 5d in Jung et al. (2014)
-#   2. Table 7-5 in https://sentinel.esa.int/documents/247904/1877131/Sentinel-1-Product-Definition
+#   1. Table 2 & Fig. 5d in Jung et al. (2014)
+#   2. Table 3-1 & 7-5 in https://sentinel.esa.int/documents/247904/1877131/Sentinel-1-Product-Definition
 SEN = {
     'carrier_frequency'          : 5.405e9,   # Hz
     'altitude'                   : 705e3,     # m, mean value
-    'antenna_length'             : 45.0,      # m
+    'antenna_length'             : 12.3,      # m
+    'antenna_width'              : 0.82,      # m
     'doppler_bandwidth'          : 380,       # Hz
-    'pulse_repetition_frequency' : 522,       # Hz
+    'pulse_repetition_frequency' : 1717.13,   # Hz, based on real data; 1000-3000 (programmable)
     'chirp_bandwidth'            : 56.50e6,   # Hz
     'sampling_frequency'         : 64.35e6,   # Hz
-    'azimuth_pixel_size'         : 14.1,      # m
+    'azimuth_pixel_size'         : 14.1,      # m, this is the ground azimuth pixel spacing, NOT on orbits!
     'range_pixel_size'           : 2.3,       # m
     'ground_range_pixel_size'    : 4.1,       # m
     'IW1' : {'range_resolution' : 2.7, 'azimuth_resolution': 22.5},
diff -pruN 1.3.3-2/mintpy/objects/stackDict.py 1.4.0-1/mintpy/objects/stackDict.py
--- 1.3.3-2/mintpy/objects/stackDict.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/objects/stackDict.py	2022-08-04 20:01:49.000000000 +0000
@@ -15,10 +15,7 @@ import time
 import warnings
 import h5py
 import numpy as np
-try:
-    from skimage.transform import resize
-except ImportError:
-    raise ImportError('Could not import skimage!')
+from skimage.transform import resize
 
 from mintpy.objects import (
     dataTypeDict,
@@ -31,7 +28,7 @@ from mintpy.utils import (
     utils0 as ut,
     attribute as attr,
 )
-
+from mintpy.multilook import multilook_data
 
 ########################################################################################
 class ifgramStackDict:
@@ -55,30 +52,37 @@ class ifgramStackDict:
         self.pairsDict = pairsDict
         self.dsName0 = dsName0        #reference dataset name, unwrapPhase OR azimuthOffset OR rangeOffset
 
-    def get_size(self, box=None, xstep=1, ystep=1):
-        self.numIfgram = len(self.pairsDict)
+    def get_size(self, box=None, xstep=1, ystep=1, geom_obj=None):
+        """Get size in 3D"""
+        num_ifgram = len(self.pairsDict)
         ifgramObj = [v for v in self.pairsDict.values()][0]
-        self.length, ifgramObj.width = ifgramObj.get_size(family=self.dsName0)
+        length, width = ifgramObj.get_size(family=self.dsName0)
+
+        # use the reference geometry obj size
+        # for low-reso ionosphere from isce2/topsStack
+        if geom_obj:
+            length, width = geom_obj.get_size()
 
         # update due to subset
         if box:
-            self.length = box[3] - box[1]
-            self.width = box[2] - box[0]
-        else:
-            self.length = ifgramObj.length
-            self.width = ifgramObj.width
+            length, width = box[3] - box[1], box[2] - box[0]
 
         # update due to multilook
-        self.length = self.length // ystep
-        self.width = self.width // xstep
+        length = length // ystep
+        width = width // xstep
 
-        return self.numIfgram, self.length, self.width
+        return num_ifgram, length, width
 
     def get_date12_list(self):
         pairs = [pair for pair in self.pairsDict.keys()]
         self.date12List = ['{}_{}'.format(i[0], i[1]) for i in pairs]
         return self.date12List
 
+    def get_dataset_list(self):
+        ifgramObj = [x for x in self.pairsDict.values()][0]
+        dsetList = [x for x in ifgramObj.datasetDict.keys()]
+        return dsetList
+
     def get_metadata(self):
         ifgramObj = [v for v in self.pairsDict.values()][0]
         self.metadata = ifgramObj.get_metadata(family=self.dsName0)
@@ -93,40 +97,73 @@ class ifgramStackDict:
         dataType = dataTypeDict[metadata.get('DATA_TYPE', 'float32').lower()]
         return dataType
 
-    def write2hdf5(self, outputFile='ifgramStack.h5', access_mode='w', box=None, xstep=1, ystep=1,
-                   compression=None, extra_metadata=None):
+    def write2hdf5(self, outputFile='ifgramStack.h5', access_mode='w', box=None, xstep=1, ystep=1, mli_method='nearest',
+                   compression=None, extra_metadata=None, geom_obj=None):
         """Save/write an ifgramStackDict object into an HDF5 file with the structure defined in:
 
         https://mintpy.readthedocs.io/en/latest/api/data_structure/#ifgramstack
 
-        Parameters: outputFile : str, Name of the HDF5 file for the InSAR stack
-                    access_mode : str, access mode of output File, e.g. w, r+
-                    box : tuple, subset range in (x0, y0, x1, y1)
-                    extra_metadata : dict, extra metadata to be added into output file
-        Returns:    outputFile
+        Parameters: outputFile     - str, Name of the HDF5 file for the InSAR stack
+                    access_mode    - str, access mode of output File, e.g. w, r+
+                    box            - tuple, subset range in (x0, y0, x1, y1)
+                    x/ystep        - int, multilook number in x/y direction
+                    mli_method     - str, multilook method, nearest, mean or median
+                    compression    - str, HDF5 dataset compression method, None, lzf or gzip
+                    extra_metadata - dict, extra metadata to be added into output file
+                    geom_obj       - geometryDict object, size reference to determine the resizing operation.
+        Returns:    outputFile     - str, Name of the HDF5 file for the InSAR stack
         """
+        print('-'*50)
+
+        # output directory
+        output_dir = os.path.dirname(outputFile)
+        if not os.path.isdir(output_dir):
+            os.makedirs(output_dir)
+            print(f'create directory: {output_dir}')
 
         self.pairs = sorted([pair for pair in self.pairsDict.keys()])
         self.dsNames = list(self.pairsDict[self.pairs[0]].datasetDict.keys())
         self.dsNames = [i for i in ifgramDatasetNames if i in self.dsNames]
         maxDigit = max([len(i) for i in self.dsNames])
-        self.get_size(box=box,
-                      xstep=xstep,
-                      ystep=ystep)
-
-        self.outputFile = outputFile
-        with h5py.File(self.outputFile, access_mode) as f:
-            print('create HDF5 file {} with {} mode'.format(self.outputFile, access_mode))
+        numIfgram, length, width = self.get_size(
+            box=box,
+            xstep=xstep,
+            ystep=ystep)
+
+        # check if resize is needed for a lower resolution stack, e.g. ionosphere from isce2/topsStack
+        resize2shape = None
+        if geom_obj and os.path.basename(outputFile).startswith('ion'):
+            # compare the original data size between ionosphere and geometry w/o subset/multilook
+            ion_size = self.get_size()[1:]
+            geom_size = geom_obj.get_size()
+            if ion_size != geom_size:
+                msg = 'lower resolution ionosphere file detected'
+                msg += f' --> resize from {ion_size} to {geom_size} via skimage.transform.resize ...'
+                print(msg)
+
+                # matrix shape for the original geometry size w/o subset/multilook
+                resize2shape = geom_size
+                # data size of the output HDF5 file w/ resize/subset/multilook
+                length, width = self.get_size(
+                    box=box,
+                    xstep=xstep,
+                    ystep=ystep,
+                    geom_obj=geom_obj)[1:]
+
+        # write HDF5 file
+        with h5py.File(outputFile, access_mode) as f:
+            print('create HDF5 file {} with {} mode'.format(outputFile, access_mode))
 
             ###############################
             # 3D datasets containing unwrapPhase, magnitude, coherence, connectComponent, wrapPhase, etc.
             for dsName in self.dsNames:
-                dsShape = (self.numIfgram, self.length, self.width)
+                dsShape = (numIfgram, length, width)
                 dsDataType = np.float32
                 dsCompression = compression
                 if dsName in ['connectComponent']:
                     dsDataType = np.int16
                     dsCompression = 'lzf'
+                    mli_method = 'nearest'
 
                 print(('create dataset /{d:<{w}} of {t:<25} in size of {s}'
                        ' with compression = {c}').format(d=dsName,
@@ -148,16 +185,24 @@ class ifgramStackDict:
                     if dsFile.endswith('cov.bip'):
                         print('convert variance to standard deviation.')
 
-                prog_bar = ptime.progressBar(maxValue=self.numIfgram)
-                for i in range(self.numIfgram):
-                    # read
-                    ifgramObj = self.pairsDict[self.pairs[i]]
+                # msg
+                if xstep * ystep > 1:
+                    print(f'apply {xstep} x {ystep} multilooking/downsampling via {mli_method} ...')
+
+                prog_bar = ptime.progressBar(maxValue=numIfgram)
+                for i, pair in enumerate(self.pairs):
+                    prog_bar.update(i+1, suffix=f'{pair[0]}_{pair[1]}')
+
+                    # read and/or resize
+                    ifgramObj = self.pairsDict[pair]
                     data = ifgramObj.read(dsName,
                                           box=box,
                                           xstep=xstep,
-                                          ystep=ystep)[0]
+                                          ystep=ystep,
+                                          mli_method=mli_method,
+                                          resize2shape=resize2shape)[0]
 
-                    # special handling to offset covariance file
+                    # special handling for offset covariance file
                     if dsName.endswith('OffsetStd'):
                         # set no-data value to np.nan
                         data[data == 99.] = np.nan
@@ -169,16 +214,15 @@ class ifgramStackDict:
 
                     # write
                     ds[i, :, :] = data
-                    prog_bar.update(i+1, suffix='{}_{}'.format(self.pairs[i][0],
-                                                               self.pairs[i][1]))
-                prog_bar.close()
+
                 ds.attrs['MODIFICATION_TIME'] = str(time.time())
+                prog_bar.close()
 
             ###############################
             # 2D dataset containing reference and secondary dates of all pairs
             dsName = 'date'
             dsDataType = np.string_
-            dsShape = (self.numIfgram, 2)
+            dsShape = (numIfgram, 2)
             print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format(d=dsName,
                                                                               w=maxDigit,
                                                                               t=str(dsDataType),
@@ -190,14 +234,14 @@ class ifgramStackDict:
             # 1D dataset containing perpendicular baseline of all pairs
             dsName = 'bperp'
             dsDataType = np.float32
-            dsShape = (self.numIfgram,)
+            dsShape = (numIfgram,)
             print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format(d=dsName,
                                                                               w=maxDigit,
                                                                               t=str(dsDataType),
                                                                               s=dsShape))
             # get bperp
-            data = np.zeros(self.numIfgram, dtype=dsDataType)
-            for i in range(self.numIfgram):
+            data = np.zeros(numIfgram, dtype=dsDataType)
+            for i in range(numIfgram):
                 ifgramObj = self.pairsDict[self.pairs[i]]
                 data[i] = ifgramObj.get_perp_baseline(family=self.dsName0)
             # write
@@ -207,7 +251,7 @@ class ifgramStackDict:
             # 1D dataset containing bool value of dropping the interferograms or not
             dsName = 'dropIfgram'
             dsDataType = np.bool_
-            dsShape = (self.numIfgram,)
+            dsShape = (numIfgram,)
             print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format(d=dsName,
                                                                               w=maxDigit,
                                                                               t=str(dsDataType),
@@ -217,30 +261,42 @@ class ifgramStackDict:
 
             ###############################
             # Attributes
-            self.get_metadata()
+            # read metadata from original data file w/o resize/subset/multilook
+            meta = self.get_metadata()
             if extra_metadata:
-                self.metadata.update(extra_metadata)
+                meta.update(extra_metadata)
                 print('add extra metadata: {}'.format(extra_metadata))
 
+            # update metadata due to resize
+            # for low resolution ionosphere from isce2/topsStack
+            if resize2shape:
+                print('update metadata due to resize')
+                meta = attr.update_attribute4resize(meta, resize2shape)
+
             # update metadata due to subset
-            self.metadata = attr.update_attribute4subset(self.metadata, box)
+            if box:
+                print('update metadata due to subset')
+                meta = attr.update_attribute4subset(meta, box)
+
             # update metadata due to multilook
             if xstep * ystep > 1:
-                self.metadata = attr.update_attribute4multilook(self.metadata, ystep, xstep)
+                print('update metadata due to multilook')
+                meta = attr.update_attribute4multilook(meta, ystep, xstep)
 
-            self.metadata['FILE_TYPE'] = self.name
-            for key, value in self.metadata.items():
+            # write metadata to HDF5 file at the root level
+            meta['FILE_TYPE'] = self.name
+            for key, value in meta.items():
                 f.attrs[key] = value
 
-        print('Finished writing to {}'.format(self.outputFile))
-        return self.outputFile
+        print('Finished writing to {}'.format(outputFile))
+        return outputFile
 
 
 ########################################################################################
 class ifgramDict:
     """
     Ifgram object for a single InSAR pair of interferogram. It includes dataset name (family) of:
-        'unwrapPhase','coherence','connectComponent','wrapPhase','ionoPhase','rangeOffset','azimuthOffset', etc.
+        'unwrapPhase','coherence','connectComponent','wrapPhase','rangeOffset','azimuthOffset', etc.
 
     Example:
         from mintpy.objects.insarobj import ifgramDict
@@ -248,7 +304,6 @@ class ifgramDict:
                        'coherence'       :'$PROJECT_DIR/merged/interferograms/20151220_20160206/filt_fine.cor',
                        'connectComponent':'$PROJECT_DIR/merged/interferograms/20151220_20160206/filt_fine.unw.conncomp',
                        'wrapPhase'       :'$PROJECT_DIR/merged/interferograms/20151220_20160206/filt_fine.int',
-                       'ionoPhase'       :'$PROJECT_DIR/merged/ionosphere/20151220_20160206/iono.bil.unwCor.filt',
                        'magnitude'       :'$PROJECT_DIR/merged/interferograms/20151220_20160206/filt_fine.unw',
                        ...
                       }
@@ -268,21 +323,71 @@ class ifgramDict:
             for key, value in metadata.items():
                 setattr(self, key, value)
 
-    def read(self, family, box=None, xstep=1, ystep=1):
+    def read(self, family, box=None, xstep=1, ystep=1, mli_method='nearest', resize2shape=None):
+        """Read data for the given dataset name.
+
+        Parameters: self         - ifgramDict object
+                    family       - str, dataset name
+                    box          -  tuple of 4 int, in (x0, y0, x1, y1) with respect to the full resolution
+                    x/ystep      - int, number of pixels to skip, with respect to the full resolution
+                    mli_method   - str, interpolation method, nearest, mean, median
+                    resize2shape - tuple of 2 int, resize the native matrix to the given shape
+                                   Set to None for not resizing
+        Returns:    data         - 2D np.ndarray
+                    meta         - dict, metadata
+        """
         self.file = self.datasetDict[family]
-        data, metadata = readfile.read(self.file,
-                                       datasetName=family,
-                                       box=box,
-                                       xstep=xstep,
-                                       ystep=ystep)
-        return data, metadata
+        box2read = None if resize2shape else box
+
+        # 1. read input file
+        data, meta = readfile.read(self.file,
+                                   datasetName=family,
+                                   box=box2read,
+                                   xstep=1,
+                                   ystep=1)
+
+        # 2. resize
+        if resize2shape:
+            # link: https://scikit-image.org/docs/dev/api/skimage.transform.html#skimage.transform.resize
+            data = resize(data,
+                          output_shape=resize2shape,
+                          order=1,
+                          mode='constant',
+                          anti_aliasing=True,
+                          preserve_range=True)
+
+            # 3. subset by box
+            if box:
+                data = data[box[1]:box[3],
+                            box[0]:box[2]]
+
+        # 4. multilook
+        if xstep * ystep > 1:
+            if mli_method == 'nearest':
+                # multilook - nearest resampling
+                # output data size
+                xsize = int(data.shape[1] / xstep)
+                ysize = int(data.shape[0] / ystep)
+                # sampling
+                data = data[int(ystep/2)::ystep,
+                            int(xstep/2)::xstep]
+                data = data[:ysize, :xsize]
+
+            else:
+                # multilook - mean or median resampling
+                data = multilook_data(data,
+                                      lks_y=ystep,
+                                      lks_x=xstep,
+                                      method=mli_method)
+
+        return data, meta
 
     def get_size(self, family=ifgramDatasetNames[0]):
         self.file = self.datasetDict[family]
         metadata = readfile.read_attribute(self.file)
-        self.length = int(metadata['LENGTH'])
-        self.width = int(metadata['WIDTH'])
-        return self.length, self.width
+        length = int(metadata['LENGTH'])
+        width = int(metadata['WIDTH'])
+        return length, width
 
     def get_perp_baseline(self, family=ifgramDatasetNames[0]):
         self.file = self.datasetDict[family]
@@ -298,23 +403,6 @@ class ifgramDict:
         self.length = int(self.metadata['LENGTH'])
         self.width = int(self.metadata['WIDTH'])
 
-        # if self.processor is None:
-        #    ext = self.file.split('.')[-1]
-        #    if 'PROCESSOR' in self.metadata.keys():
-        #        self.processor = self.metadata['PROCESSOR']
-        #    elif os.path.exists(self.file+'.xml'):
-        #        self.processor = 'isce'
-        #    elif os.path.exists(self.file+'.rsc'):
-        #        self.processor = 'roipac'
-        #    elif os.path.exists(self.file+'.par'):
-        #        self.processor = 'gamma'
-        #    elif ext == 'grd':
-        #        self.processor = 'gmtsar'
-        #    #what for DORIS/SNAP
-        #    else:
-        #        self.processor = 'isce'
-        #self.metadata['PROCESSOR'] = self.processor
-
         if self.track:
             self.metadata['TRACK'] = self.track
 
@@ -518,22 +606,6 @@ class geometryDict:
         if 'UNIT' in self.metadata.keys():
             self.metadata.pop('UNIT')
 
-        # if self.processor is None:
-        #    ext = self.file.split('.')[-1]
-        #    if 'PROCESSOR' in self.metadata.keys():
-        #        self.processor = self.metadata['PROCESSOR']
-        #    elif os.path.exists(self.file+'.xml'):
-        #        self.processor = 'isce'
-        #    elif os.path.exists(self.file+'.rsc'):
-        #        self.processor = 'roipac'
-        #    elif os.path.exists(self.file+'.par'):
-        #        self.processor = 'gamma'
-        #    elif ext == 'grd':
-        #        self.processor = 'gmtsar'
-        #    #what for DORIS/SNAP
-        #    else:
-        #        self.processor = 'isce'
-        #self.metadata['PROCESSOR'] = self.processor
         return self.metadata
 
     def write2hdf5(self, outputFile='geometryRadar.h5', access_mode='w', box=None, xstep=1, ystep=1,
@@ -541,10 +613,17 @@ class geometryDict:
         """Save/write to HDF5 file with structure defined in:
             https://mintpy.readthedocs.io/en/latest/api/data_structure/#geometry
         """
+        print('-'*50)
         if len(self.datasetDict) == 0:
             print('No dataset file path in the object, skip HDF5 file writing.')
             return None
 
+        # output directory
+        output_dir = os.path.dirname(outputFile)
+        if not os.path.isdir(output_dir):
+            os.makedirs(output_dir)
+            print(f'create directory: {output_dir}')
+
         maxDigit = max([len(i) for i in geometryDatasetNames])
         length, width = self.get_size(box=box, xstep=xstep, ystep=ystep)
 
@@ -575,15 +654,19 @@ class geometryDict:
 
                     print('read coarse grid baseline files and linear interpolate into full resolution ...')
                     prog_bar = ptime.progressBar(maxValue=self.numDate)
-                    for i in range(self.numDate):
-                        fname = self.datasetDict[dsName][self.dateList[i]]
+                    for i, date_str in enumerate(self.dateList):
+                        prog_bar.update(i+1, suffix=date_str)
+
+                        # read and resize
+                        fname = self.datasetDict[dsName][date_str]
                         data = read_isce_bperp_file(fname=fname,
                                                     full_shape=self.get_size(),
                                                     box=box,
                                                     xstep=xstep,
                                                     ystep=ystep)
+                        # write
                         ds[i, :, :] = data
-                        prog_bar.update(i+1, suffix=self.dateList[i])
+
                     prog_bar.close()
 
                     # Write 1D dataset date accompnay the 3D bperp
@@ -703,12 +786,12 @@ class geometryDict:
 
 ########################################################################################
 def read_isce_bperp_file(fname, full_shape, box=None, xstep=1, ystep=1):
-    """Read ISCE coarse grid perpendicular baseline file, and project it to full size
-    Parameters: self : geometry object,
-                fname : str, bperp file name
-                outShape : tuple of 2int, shape of file in full resolution
-                box : tuple of 4 int, subset range in (x0, y0, x1, y1) with respect to full resolution
-    Returns:    data : 2D array of float32
+    """Read ISCE-2 coarse grid perpendicular baseline file, and project it to full size
+    Parameters: fname      - str, bperp file name
+                full_shape - tuple of 2 int, shape of file in full resolution
+                box        - tuple of 4 int, subset range in (x0, y0, x1, y1) with respect to full resolution
+                x/ystep    - int, number of pixels to pick/multilook for each output pixel
+    Returns:    data       - 2D array of float32
     Example:    fname = '$PROJECT_DIR/merged/baselines/20160418/bperp'
                 data = self.read_sice_bperp_file(fname, (3600,2200), box=(200,400,1000,1000))
     """
@@ -792,8 +875,8 @@ class platformTrack:
         for pair in pairs:
             length.append(self.pairs[pair].length)
             width.append(self.pairs[pair].width)
-        self.length = median(length)
-        self.width = median(width)
+        self.length = np.median(length)
+        self.width = np.median(width)
 
     def getDatasetNames(self):
         # extract the name of the datasets which are actually the keys of
diff -pruN 1.3.3-2/mintpy/objects/stack.py 1.4.0-1/mintpy/objects/stack.py
--- 1.3.3-2/mintpy/objects/stack.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/objects/stack.py	2022-08-04 20:01:49.000000000 +0000
@@ -57,6 +57,7 @@ geometryDatasetNames = [
 ]
 
 ifgramDatasetNames = [
+    # interferogram
     'unwrapPhase',
     'unwrapPhase_bridging_phaseClosure',
     'unwrapPhase_bridging',
@@ -64,8 +65,8 @@ ifgramDatasetNames = [
     'coherence',
     'connectComponent',
     'wrapPhase',
-    'ionoPhase',
     'magnitude',
+    # offset
     'azimuthOffset',
     'azimuthOffsetStd',
     'rangeOffset',
@@ -80,7 +81,6 @@ datasetUnitDict = {
     'coherence'        : '1',
     'connectComponent' : '1',
     'wrapPhase'        : 'radian',
-    'ionoPhase'        : 'radian',
     'magnitude'        : '1',
 
     # offset
@@ -966,10 +966,158 @@ class ifgramStack:
             num_conn[i] = np.where(Ai == 1)[0] - np.where(Ai == -1)[0]
         return np.max(num_conn)
 
-    # Functions for Unwrap error correction
+
+    def split2boxes(self, max_memory=4, dim0_size=None, print_msg=True):
+        """Split into chunks in rows to reduce memory usage.
+
+        Parameters: max_memory - float, max memory to use in GB
+                    dim0_size  - the 1st dimension size of all used datasets
+                                 e.g., dim0_size = num_pair * 2 + num_date
+                    print_msg  - bool
+        Returns:    box_list   - list of tuple of 4 int
+                    num_box    - int, number of boxes
+        """
+        self.open(print_msg=False)
+        length = self.length
+        width = self.width
+
+        # dimension in time: phase/offset, weight, timeseries, etc.
+        if not dim0_size:
+            # for time series estimation
+            dim0_size = self.numIfgram * 2 + self.numDate
+        ds_size = dim0_size * length * width * 4
+
+        num_box = int(np.ceil(ds_size * 1.5 / (max_memory * 1024**3)))
+        y_step = int(np.ceil((length / num_box) / 10) * 10)
+        num_box = int(np.ceil(length / y_step))
+        if print_msg and num_box > 1:
+            print('maximum memory size: %.1E GB' % max_memory)
+            print('split %d lines into %d patches for processing' % (length, num_box))
+            print('    with each patch up to %d lines' % y_step)
+
+        # y_step / num_box --> box_list
+        box_list = []
+        for i in range(num_box):
+            y0 = i * y_step
+            y1 = min([length, y0 + y_step])
+            box = (0, y0, width, y1)
+            box_list.append(box)
+
+        return box_list, num_box
+
+
+    # Functions for closure phase bias
+    def get_closure_phase_index(self, conn, dropIfgram=True):
+        """Get the indices of interferograms that forms the given connection level closure loop.
+
+        Parameters: conn       - int, connection level
+                    dropIfgram - bool, exclude the dropped interferograms.
+        Returns:    cp_idx     - 2D np.ndarray in int16 in size of (num_cp, conn + 1)
+                                 Each row for the indices of interferograms for one closure loop.
+                                 num_cp <= num_date - conn
+        """
+        date12_list = self.get_date12_list(dropIfgram=False)
+        date_list = self.get_date_list(dropIfgram=dropIfgram)
+        num_date = len(date_list)
+
+        # get the closure index
+        cp_idx = []
+        for i in range(num_date - conn):
+            # compose the connection-n pairs
+            cp_date12_list = []
+            for j in range(conn):
+                cp_date12_list.append('{}_{}'.format(date_list[i+j], date_list[i+j+1]))
+            cp_date12_list.append('{}_{}'.format(date_list[i], date_list[i+conn]))
+
+            # add to cp_idx, ONLY IF all pairs exist for this closure loop
+            if all(x in date12_list for x in cp_date12_list):
+                cp_idx.append([date12_list.index(x) for x in cp_date12_list])
+
+        # list(list) to 2D array
+        cp_idx = np.array(cp_idx, dtype=np.int16)
+        cp_idx = np.unique(cp_idx, axis=0)
+
+        return cp_idx
+
+
+    def get_sequential_closure_phase(self, box, conn, post_proc=None):
+        """Computes wrapped sequential closure phases for a given conneciton level.
+
+        Reference: Equation (21) in Zheng et al. (2022, TGRS)
+        For conn = 5, seq_closure_phase = p12 + p23 + p34 + p45 + p56 - p16.
+
+        Parameters: box       - tuple of 4 int, bounding box in (x0, y0, x1, y1)
+                    conn      - int, connection level of the closure phase
+                    post_proc - str, post processing of the closure phase:
+                                None - 3D array in float32, seq closure phase
+                                sum  - 2D array in complex64, sum  in time of the complex seq closure phase
+                                mean - 2D array in complex64, mean in time of the complex seq closure phase
+        Returns:    cp_w      - 3D np.ndarray in float32 in size of (num_cp, box_len, box_wid)
+                                wrapped sequential  closure phase for the given connection level.
+                    sum_cp    - None or 2D np.ndarray in complex64 in size of (box_len, box_width)
+                                wrapped average seq closure phase for the given connection level,
+                                controlled by post_proc.
+                    num_cp    - int, number of  seq closure phase for the given connection level.
+        """
+        # basic info
+        num_date = len(self.get_date_list(dropIfgram=True))
+        box_wid = box[2] - box[0]
+        box_len = box[3] - box[1]
+
+        ## get the closure index
+        cp_idx = self.get_closure_phase_index(conn=conn, dropIfgram=True)
+        num_cp = cp_idx.shape[0]
+        print(f'number of closure measurements expected: {num_date - conn}')
+        print(f'number of closure measurements found   : {num_cp}')
+
+        if not post_proc:
+            if num_cp < num_date - conn:
+                msg = f'num_cp ({num_cp}) < num_date - conn ({num_date - conn})'
+                msg += ' --> some interferograms are missing!'
+                raise Exception(msg)
+        else:
+            if num_cp < 1:
+                raise Exception(f"No triplets found at connection level: {conn}!")
+
+        ## read data
+        phase = self.read(box=box, print_msg=False)
+        ref_phase = self.get_reference_phase(dropIfgram=False)
+        for i in range(phase.shape[0]):
+            mask = phase[i] != 0.
+            phase[i][mask] -= ref_phase[i]
+
+        ## calculate the 3D complex seq closure phase
+        cp_w = np.zeros((num_cp, box_len, box_wid), dtype=np.complex64)
+        for i in range(num_cp):
+
+            # calculate closure phase - cp0_w
+            idx_plus, idx_minor = cp_idx[i, :-1], cp_idx[i, -1]
+            cp0_w = np.sum(phase[idx_plus], axis=0) - phase[idx_minor]
+
+            # get the wrapped closure phase
+            cp_w[i] = np.exp(1j * cp0_w)
+
+        ## post-processing
+        if not post_proc:
+            sum_cp = None
+
+        elif post_proc == 'sum':
+            sum_cp = np.sum(cp_w, axis=0)
+
+        elif post_proc == 'mean':
+            sum_cp = np.mean(cp_w, axis=0)
+
+        else:
+            raise ValueError(f'un-recognized post_proc={post_proc}! Available choices: sum, mean.')
+
+        return np.angle(cp_w), sum_cp, num_cp
+
+
+    # Functions for unwrapping error correction
     @staticmethod
     def get_design_matrix4triplet(date12_list):
         """Generate the design matrix of ifgram triangle for unwrap error correction using phase closure
+
         Parameters: date12_list : list of string in YYYYMMDD_YYYYMMDD format
         Returns:    C : 2D np.array in size of (num_tri, num_ifgram) consisting 0, 1, -1
                         for 3 SAR acquisition in t1, t2 and t3 in time order,
@@ -1021,10 +1169,12 @@ class ifgramStack:
 
         return np.stack(C_list).astype(np.float32)
 
-    # Functions for Network Inversion
+
+    # Functions for network inversion / time series estimation
     @staticmethod
     def get_design_matrix4timeseries(date12_list, refDate=None):
         """Return design matrix of the input ifgramStack for timeseries estimation
+
         Parameters: date12_list - list of string in YYYYMMDD_YYYYMMDD format
                     refDate     - str, date in YYYYMMDD format
                                   set to None for the 1st date
diff -pruN 1.3.3-2/mintpy/plot_coherence_matrix.py 1.4.0-1/mintpy/plot_coherence_matrix.py
--- 1.3.3-2/mintpy/plot_coherence_matrix.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/plot_coherence_matrix.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,11 +8,12 @@
 
 import os
 import sys
-import argparse
 import numpy as np
 import matplotlib.pyplot as plt
+
 from mintpy.objects import ifgramStack
 from mintpy.utils import readfile, plot as pp, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 from mintpy import view
 
 
@@ -32,10 +33,12 @@ EXAMPLE = """example:
   plot_coherence_matrix.py inputs/ifgramStack.h5 --cmap-vlist 0 0.4 1
 """
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Plot the coherence matrix of one pixel (interactive)',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Plot the coherence matrix of one pixel (interactive)'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('ifgram_file', help='interferogram stack file')
     parser.add_argument('--yx', type=int, metavar=('Y', 'X'), nargs=2, 
@@ -269,7 +272,9 @@ class coherenceMatrixViewer():
             msg += 'temporal coherence: {:.2f}'.format(tcoh)
         vprint(msg)
 
-        self.fig.canvas.draw()
+        # update figure
+        self.fig.canvas.draw_idle()
+        self.fig.canvas.flush_events()
         return
 
     def update_coherence_matrix(self, event):
diff -pruN 1.3.3-2/mintpy/plot_network.py 1.4.0-1/mintpy/plot_network.py
--- 1.3.3-2/mintpy/plot_network.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/plot_network.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,11 +8,13 @@
 
 import os
 import sys
-import argparse
 import numpy as np
 import matplotlib.pyplot as plt
+
 from mintpy.objects import ifgramStack
 from mintpy.utils import readfile, utils as ut, plot as pp
+from mintpy.utils.arg_utils import create_argument_parser
+
 # suppress UserWarning from matplotlib
 import warnings
 warnings.filterwarnings("ignore", category=UserWarning, module="matplotlib")
@@ -31,6 +33,12 @@ DATE12_LIST = """
 20070824_20071009
 """
 
+TEMPLATE = """
+mintpy.network.maskFile  = auto  #[file name, no], auto for waterMask.h5 or no for all pixels
+mintpy.network.aoiYX     = auto  #[y0:y1,x0:x1 / no], auto for no, area of interest for coherence calculation
+mintpy.network.aoiLALO   = auto  #[lat0:lat1,lon0:lon1 / no], auto for no - use the whole area
+"""
+
 EXAMPLE = """example:
   plot_network.py inputs/ifgramStack.h5
   plot_network.py inputs/ifgramStack.h5 -t smallbaselineApp.cfg --nodisplay   #Save figures to files without display
@@ -43,17 +51,13 @@ EXAMPLE = """example:
   plot_network.py inputs/ifgramStack.h5 -d offsetSNR -v 0 20 --cmap-vlist 0 0.2 1
 """
 
-TEMPLATE = """
-mintpy.network.maskFile  = auto  #[file name, no], auto for waterMask.h5 or no for all pixels
-mintpy.network.aoiYX     = auto  #[y0:y1,x0:x1 / no], auto for no, area of interest for coherence calculation
-mintpy.network.aoiLALO   = auto  #[lat0:lat1,lon0:lon1 / no], auto for no - use the whole area
-"""
-
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Display Network of Interferograms',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Display Network of Interferograms'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', help='file with network information, ifgramStack.h5 or coherenceSpatialAvg.txt')
     parser.add_argument('--show-kept', dest='disp_drop', action='store_false',
@@ -182,7 +186,10 @@ def read_network_info(inps):
             inps.cohList = stack_obj.tbaseIfgram.tolist()
 
         else:
-            raise ValueError(f'{inps.dsetName} NOT found in file: {inps.file}!')
+            inps.cohList = None
+            msg = f'{inps.dsetName} NOT found in file: {inps.file}! '
+            msg += 'Disable the color coding and continue'
+            print(msg)
 
     elif ext == '.txt':
         inps.date12List = np.loadtxt(inps.file, dtype=bytes).astype(str)[:,0].tolist()
@@ -265,20 +272,28 @@ def main(iargs=None):
 
     # Plot settings
     inps = check_colormap(inps)
+    ext = '.pdf'
+    if os.path.basename(inps.file).startswith('ion'):
+        ext = f'_ion{ext}'
+    kwargs = dict(bbox_inches='tight', transparent=True, dpi=inps.fig_dpi)
+
     if inps.dsetName == 'coherence':
-        fig_names = [i+'.pdf' for i in ['pbaseHistory', 'coherenceHistory', 'coherenceMatrix', 'network']]
+        fig_names = [i+ext for i in ['pbaseHistory', 'coherenceHistory', 'coherenceMatrix', 'network']]
         inps.ds_name = 'Coherence'
         inps.cbar_label = 'Average Spatial Coherence'
+
     elif inps.dsetName == 'offsetSNR':
-        fig_names = [i+'.pdf' for i in ['pbaseHistory', 'SNRHistory', 'SNRMatrix', 'network']]
+        fig_names = [i+ext for i in ['pbaseHistory', 'SNRHistory', 'SNRMatrix', 'network']]
         inps.ds_name = 'SNR'
         inps.cbar_label = 'Average Spatial SNR'
+
     elif inps.dsetName == 'tbase':
-        fig_names = [i+'.pdf' for i in ['pbaseHistory', 'tbaseHistory', 'tbaseMatrix', 'network']]
+        fig_names = [i+ext for i in ['pbaseHistory', 'tbaseHistory', 'tbaseMatrix', 'network']]
         inps.ds_name = 'Temporal Baseline'
         inps.cbar_label = 'Temporal Baseline [day]'
+
     elif inps.dsetName == 'pbase':
-        fig_names = [i+'.pdf' for i in ['pbaseHistory', 'pbaseRangeHistory', 'pbaseMatrix', 'network']]
+        fig_names = [i+ext for i in ['pbaseHistory', 'pbaseRangeHistory', 'pbaseMatrix', 'network']]
         inps.ds_name = 'Perp Baseline'
         inps.cbar_label = 'Perp Baseline [m]'
 
@@ -290,7 +305,7 @@ def main(iargs=None):
                                     vars(inps),
                                     inps.dateList_drop)
     if inps.save_fig:
-        fig.savefig(fig_names[0], bbox_inches='tight', transparent=True, dpi=inps.fig_dpi)
+        fig.savefig(fig_names[0], **kwargs)
         print('save figure to {}'.format(fig_names[0]))
 
     if inps.cohList is not None:
@@ -301,7 +316,7 @@ def main(iargs=None):
                                        inps.cohList,
                                        p_dict=vars(inps))
         if inps.save_fig:
-            fig.savefig(fig_names[1], bbox_inches='tight', transparent=True, dpi=inps.fig_dpi)
+            fig.savefig(fig_names[1], **kwargs)
             print('save figure to {}'.format(fig_names[2]))
 
         # Fig 3 - Coherence Matrix
@@ -312,7 +327,7 @@ def main(iargs=None):
                                       inps.date12List_drop,
                                       p_dict=vars(inps))[0]
         if inps.save_fig:
-            fig.savefig(fig_names[2], bbox_inches='tight', transparent=True, dpi=inps.fig_dpi)
+            fig.savefig(fig_names[2], **kwargs)
             print('save figure to {}'.format(fig_names[1]))
 
     # Fig 4 - Interferogram Network
@@ -324,7 +339,7 @@ def main(iargs=None):
                          vars(inps),
                          inps.date12List_drop)
     if inps.save_fig:
-        fig.savefig(fig_names[3], bbox_inches='tight', transparent=True, dpi=inps.fig_dpi)
+        fig.savefig(fig_names[3], **kwargs)
         print('save figure to {}'.format(fig_names[3]))
 
     if inps.disp_fig:
diff -pruN 1.3.3-2/mintpy/plot_transection.py 1.4.0-1/mintpy/plot_transection.py
--- 1.3.3-2/mintpy/plot_transection.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/plot_transection.py	2022-08-04 20:01:49.000000000 +0000
@@ -6,13 +6,12 @@
 ############################################################
 
 
-import os
 import sys
 import argparse
 import numpy as np
 from matplotlib import pyplot as plt, ticker
 
-from mintpy.utils import arg_group, readfile, utils as ut, plot as pp
+from mintpy.utils import arg_utils, readfile, utils as ut, plot as pp
 from mintpy import view
 
 
@@ -29,17 +28,21 @@ EXAMPLE = """example:
   plot_transection.py velocity.h5 --start-lalo 30.125 129.988 --end-lalo 30.250 130.116
   plot_transection.py velocity.h5 --line-file  transect_lonlat.xy --dem gsi10m.dem
 
-  # Multiple files
+  # multiple files
   plot_transection.py AlosA*/velocity.h5 AlosD*/velocity.h5 --off 2
   plot_transection.py Kirishima2017*.h5 Kirishima2008*.h5 --off 0 0 10 10
   plot_transection.py Kirishima2017*.h5 Kirishima2008*.h5 --off 0 0 10 10 --lalo0 31.947 130.843 --lalo1 31.947 130.860
+
+  # interactive plot: click two points to draw a profile
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Generate transect/profile along a line',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Generate transect/profile along a line'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = arg_utils.create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', nargs='+',
                         help='input file to show transection')
@@ -71,8 +74,8 @@ def create_parser():
     lines.add_argument('--ms', '--markersize', dest='marker_size', type=float, default=2.0,
                        help='Point marker size. Default: 2.0')
 
-    parser = arg_group.add_figure_argument(parser)
-    parser = arg_group.add_save_argument(parser)
+    parser = arg_utils.add_figure_argument(parser)
+    parser = arg_utils.add_save_argument(parser)
     return parser
 
 
@@ -163,8 +166,8 @@ def get_view_cmd(iargs):
                         help='Display limits for matrix plotting.')
     parser.add_argument('--noverbose', dest='print_msg', action='store_false',
                         help='Disable the verbose message printing.')
-    parser = arg_group.add_figure_argument(parser)
-    parser = arg_group.add_save_argument(parser)
+    parser = arg_utils.add_figure_argument(parser)
+    parser = arg_utils.add_save_argument(parser)
 
     # get args that are applicable to view.py
     unique_args = parser.parse_known_args(iargs)[1]
@@ -203,7 +206,7 @@ class transectionViewer():
         self.ax_txn = None
 
         self.img = None
-        self.line = None
+        self.line_ann = None
         self.pts_idx = 0
         return
 
@@ -273,30 +276,68 @@ class transectionViewer():
             plt.show()
         return
 
+
+    ##---------- event function
+    def select_point(self, event):
+        """Event handling function for points selection"""
+        if event.inaxes == self.ax_img:
+            # get row/col number
+            if 'Y_FIRST' in self.atr.keys():
+                lalo = [event.ydata, event.xdata]
+                yx = self.coord.geo2radar(event.ydata, event.xdata, print_msg=False)[0:2]
+            else:
+                lalo = None
+                yx = [int(event.ydata+0.5), int(event.xdata+0.5)]
+
+            # insert selected points into self.start/end_yx member
+            # print('pts_idx: {}'.format(self.pts_idx)) #for debug
+            if self.pts_idx == 0:
+                self.start_lalo = lalo
+                self.start_yx = yx
+            else:
+                self.end_lalo = lalo
+                self.end_yx = yx
+
+            # update transection for every two clicks
+            self.pts_idx += 1
+            if self.pts_idx >= 2:
+                self.pts_idx = 0
+                self.draw_line(self.start_yx, self.end_yx)
+                self.draw_transection(self.start_yx, self.end_yx, self.start_lalo, self.end_lalo)
+        return
+
+
+    ##---------- plot functions
     def draw_line(self, start_yx, end_yx):
         """Draw the transect line in the map axes"""
         # erase existing line
-        if self.line is not None:
-            self.ax_img.lines.remove(self.line[0])
+        if self.line_ann is not None:
+            self.line_ann.remove()
 
         # convert coordinates accordingly
         if 'Y_FIRST' in self.atr.keys():
-            ys = self.coord.yx2lalo([self.start_yx[0], self.end_yx[0]], coord_type='y')
-            xs = self.coord.yx2lalo([self.start_yx[1], self.end_yx[1]], coord_type='x')
+            ys = self.coord.yx2lalo([start_yx[0], end_yx[0]], coord_type='y')
+            xs = self.coord.yx2lalo([start_yx[1], end_yx[1]], coord_type='x')
         else:
             ys = [start_yx[0], end_yx[0]]
             xs = [start_yx[1], end_yx[1]]
 
         # plot
-        self.line = self.ax_img.plot(xs, ys, 'k--')
-        self.fig.canvas.draw()
+        line = self.ax_img.plot(xs, ys, 'k--', alpha=0)[0]
+        self.line_ann = pp.add_arrow(line, position=xs[1])
+
+        self.fig.canvas.draw_idle()
+        self.fig.canvas.flush_events()
         return
 
+
     def draw_transection(self, start_yx, end_yx, start_lalo=None, end_lalo=None):
         """Plot the transect as dots"""
         self.ax_txn.cla()
 
-        # loop for all input files
+        # loop - extract transection data
+        txn_list = []
+        min_dist = 0
         for i in range(self.num_file):
             # get transection data
             if start_lalo is not None:
@@ -311,6 +352,12 @@ class transectionViewer():
                                      start_yx, end_yx,
                                      interpolation=self.interpolation)
 
+            # save txn
+            txn_list.append(txn)
+            min_dist = max(min_dist, txn['distance'][0])
+
+        # loop - plot transection
+        for i, txn in enumerate(txn_list):
             # distance unit and scaling
             if txn.get('distance_unit', 'm') == 'pixel':
                 dist_scale = 1.0
@@ -320,8 +367,9 @@ class transectionViewer():
                 dist_unit = 'km'
 
             # plot
-            self.ax_txn.scatter(txn['distance'] * dist_scale,
-                                txn['value'] - self.offset[i],
+            # update distance values by excluding the commonly masked out pixels in the begining
+            self.ax_txn.scatter(x=(txn['distance'] - min_dist) * dist_scale,
+                                y=txn['value'] - self.offset[i],
                                 c=pp.mplColors[i],
                                 s=self.marker_size**2)
 
@@ -342,39 +390,15 @@ class transectionViewer():
         self.ax_txn.yaxis.set_minor_locator(ticker.AutoMinorLocator(10))
         self.ax_txn.set_ylabel(self.disp_unit, fontsize=self.font_size)
         self.ax_txn.set_xlabel(f'Distance [{dist_unit}]', fontsize=self.font_size)
-        self.ax_txn.set_xlim(0, txn['distance'][-1] * dist_scale)
-        self.fig.canvas.draw()
-        return
-
-    def select_point(self, event):
-        """Event handling function for points selection"""
-        if event.inaxes == self.ax_img:
-            # get row/col number
-            if 'Y_FIRST' in self.atr.keys():
-                lalo = [event.ydata, event.xdata]
-                yx = self.coord.geo2radar(event.ydata, event.xdata, print_msg=False)[0:2]
-            else:
-                lalo = None
-                yx = [int(event.ydata+0.5), int(event.xdata+0.5)]
+        self.ax_txn.set_xlim(0, (txn['distance'][-1] - min_dist) * dist_scale)
 
-            # insert selected points into self.start/end_yx member
-            # print('pts_idx: {}'.format(self.pts_idx)) #for debug
-            if self.pts_idx == 0:
-                self.start_lalo = lalo
-                self.start_yx = yx
-            else:
-                self.end_lalo = lalo
-                self.end_yx = yx
-
-            # update transection for every two clicks
-            self.pts_idx += 1
-            if self.pts_idx >= 2:
-                self.draw_line(self.start_yx, self.end_yx)
-                self.draw_transection(self.start_yx, self.end_yx, self.start_lalo, self.end_lalo)
-                self.pts_idx = 0
+        # update figure
+        self.fig.canvas.draw_idle()
+        self.fig.canvas.flush_events()
         return
 
 
+
 ############################ Main ###################################
 def main(iargs=None):
     obj = transectionViewer(iargs=iargs)
diff -pruN 1.3.3-2/mintpy/prep_aria.py 1.4.0-1/mintpy/prep_aria.py
--- 1.3.3-2/mintpy/prep_aria.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/prep_aria.py	2022-08-04 20:01:49.000000000 +0000
@@ -10,7 +10,6 @@ import os
 import sys
 import time
 import glob
-import argparse
 import h5py
 import numpy as np
 
@@ -21,23 +20,12 @@ except ImportError:
 
 from mintpy.objects import ifgramStack, geometry, sensor
 from mintpy.utils import ptime, readfile, writefile, utils as ut, attribute as attr
+from mintpy.utils.arg_utils import create_argument_parser
 from mintpy.subset import read_subset_template2box
 from mintpy.multilook import multilook_data
 
 
 ####################################################################################
-EXAMPLE = """example:
-  prep_aria.py -t smallbaselineApp.cfg    # recommended
-  prep_aria.py -t SanFranSenDT42.txt
-  prep_aria.py -s ../stack/ -d ../DEM/SRTM_3arcsec.dem -i ../incidenceAngle/*.vrt
-  prep_aria.py -s ../stack/ -d ../DEM/SRTM_3arcsec.dem -i ../incidenceAngle/*.vrt -a ../azimuthAngle/*.vrt -w ../mask/watermask.msk
-
-  # download / extract / prepare inteferograms stack from ARIA using ARIA-tools:
-  # reference: https://github.com/aria-tools/ARIA-tools
-  ariaDownload.py -b '37.25 38.1 -122.6 -121.75' --track 42
-  ariaTSsetup.py -f 'products/*.nc' -b '37.25 38.1 -122.6 -121.75' --mask Download --num_threads 4 --verbose
-"""
-
 TEMPLATE = """template options:
   ########## 1. load_data
   ## no   - save   0% disk usage, fast [default]
@@ -56,22 +44,38 @@ TEMPLATE = """template options:
   mintpy.load.incAngleFile   = ../incidenceAngle/*.vrt
   mintpy.load.azAngleFile    = ../azimuthAngle/*.vrt
   mintpy.load.waterMaskFile  = ../mask/watermask.msk
-  ##---------multilook (optional):
-  ## multilook while loading data with nearest interpolation, to reduce dataset size
-  mintpy.load.ystep          = auto    #[int >= 1], auto for 1 - no multilooking
-  mintpy.load.xstep          = auto    #[int >= 1], auto for 1 - no multilooking
   ##---------subset (optional):
   ## if both yx and lalo are specified, use lalo option
   mintpy.subset.yx           = auto    #[y0:y1,x0:x1 / no], auto for no
   mintpy.subset.lalo         = auto    #[lat0:lat1,lon0:lon1 / no], auto for no
+  ##---------multilook (optional):
+  ## multilook while loading data with the specified method, to reduce dataset size
+  ## nearest, mean and median methods are applicable to interferogram/ionosphere/offset stack(s), except for:
+  ## connected components and all geometry datasets, for which nearest is hardwired.
+  mintpy.multilook.method    = auto    #[nearest, mean, median], auto for nearest - lines/rows skipping approach
+  mintpy.multilook.ystep     = auto    #[int >= 1], auto for 1 - no multilooking
+  mintpy.multilook.xstep     = auto    #[int >= 1], auto for 1 - no multilooking
 """
 
+EXAMPLE = """example:
+  prep_aria.py -t smallbaselineApp.cfg    # recommended
+  prep_aria.py -t SanFranSenDT42.txt
+  prep_aria.py -s ../stack/ -d ../DEM/SRTM_3arcsec.dem -i ../incidenceAngle/*.vrt
+  prep_aria.py -s ../stack/ -d ../DEM/SRTM_3arcsec.dem -i ../incidenceAngle/*.vrt -a ../azimuthAngle/*.vrt -w ../mask/watermask.msk
+
+  # download / extract / prepare inteferograms stack from ARIA using ARIA-tools:
+  # reference: https://github.com/aria-tools/ARIA-tools
+  ariaDownload.py -b '37.25 38.1 -122.6 -121.75' --track 42
+  ariaTSsetup.py -f 'products/*.nc' -b '37.25 38.1 -122.6 -121.75' --mask Download --num_threads 4 --verbose
+"""
 
-def create_parser():
+def create_parser(subparsers=None):
     """Command line parser."""
-    parser = argparse.ArgumentParser(description='Prepare ARIA processed products for MintPy.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=TEMPLATE+'\n'+EXAMPLE)
+    synopsis = 'Prepare ARIA processed products for MintPy.'
+    epilog = TEMPLATE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('-t','--template', dest='template_file', type=str,
                         help='template file with the options')
@@ -122,15 +126,17 @@ def cmd_line_parse(iargs = None):
     parser = create_parser()
     inps = parser.parse_args(args=iargs)
 
-    # default x/ystep
+    # default multilook options
     iDict = vars(inps)
-    iDict['xstep'] = iDict.get('xstep', 1)
-    iDict['ystep'] = iDict.get('ystep', 1)
+    iDict['xstep'] = int(iDict.get('xstep', 1))
+    iDict['ystep'] = int(iDict.get('ystep', 1))
+    iDict['method'] = str(iDict.get('method', 'nearest'))
 
     # --template
     if inps.template_file:
         inps = read_template2inps(inps.template_file, inps)
-    print('x/ystep: {}/{}'.format(iDict['xstep'], iDict['ystep']))
+    print('multilook x/ystep: {}/{}'.format(iDict['xstep'], iDict['ystep']))
+    print('multilook method : {}'.format(iDict['method']))
 
     # --stack-dir
     if inps.stackDir is not None:
@@ -189,19 +195,28 @@ def read_template2inps(template_file, in
             template.pop(key)
 
     # pass options from template to inps
+    # group - load
     key_prefix = 'mintpy.load.'
     keys = [i for i in list(iDict.keys()) if key_prefix+i in template.keys()]
     for key in keys:
         value = template[key_prefix+key]
         if key in ['updateMode', 'compression']:
             iDict[key] = value
-        elif key in ['xstep', 'ystep']:
-            iDict[key] = int(value)
         elif key in ['unwFile']:
             iDict['stackDir'] = os.path.dirname(value)
         elif value:
             iDict[key] = str(value)
 
+    # group - multilook
+    prefix = 'mintpy.multilook.'
+    key_list = [i.split(prefix)[1] for i in template.keys() if i.startswith(prefix)]
+    for key in key_list:
+        value = template[prefix+key]
+        if key in ['xstep', 'ystep']:
+            iDict[key] = int(template[prefix+key])
+        elif key in ['method']:
+            iDict[key] = template[prefix+key]
+
     return inps
 
 
@@ -481,7 +496,7 @@ def write_geometry(outfile, demFile, inc
 
 
 def write_ifgram_stack(outfile, unwStack, cohStack, connCompStack, ampStack=None,
-                       box=None, xstep=1, ystep=1):
+                       box=None, xstep=1, ystep=1, mli_method='nearest'):
     """Write ifgramStack HDF5 file from stack VRT files
     """
 
@@ -542,6 +557,11 @@ def write_ifgram_stack(outfile, unwStack
     else:
         kwargs = dict()
 
+    if xstep * ystep > 1:
+        msg = f'apply {xstep} x {ystep} multilooking/downsampling via {mli_method} to: unwrapPhase, coherence'
+        msg += ', magnitude' if dsAmp is not None else ''
+        msg += f'\napply {xstep} x {ystep} multilooking/downsampling via nearest to: connectComponent'
+        print(msg)
     print('writing data to HDF5 file {} with a mode ...'.format(outfile))
     with h5py.File(outfile, "a") as f:
 
@@ -557,7 +577,7 @@ def write_ifgram_stack(outfile, unwStack
 
             bnd = dsUnw.GetRasterBand(bndIdx)
             data = bnd.ReadAsArray(**kwargs)
-            data = multilook_data(data, ystep, xstep, method='nearest')
+            data = multilook_data(data, ystep, xstep, method=mli_method)
             data[data == noDataValueUnw] = 0      #assign pixel with no-data to 0
             data *= -1.0                          #date2_date1 -> date1_date2
             f["unwrapPhase"][ii,:,:] = data
@@ -568,7 +588,7 @@ def write_ifgram_stack(outfile, unwStack
 
             bnd = dsCoh.GetRasterBand(bndIdx)
             data = bnd.ReadAsArray(**kwargs)
-            data = multilook_data(data, ystep, xstep, method='nearest')
+            data = multilook_data(data, ystep, xstep, method=mli_method)
             data[data == noDataValueCoh] = 0      #assign pixel with no-data to 0
             f["coherence"][ii,:,:] = data
 
@@ -581,7 +601,7 @@ def write_ifgram_stack(outfile, unwStack
             if dsAmp is not None:
                 bnd = dsAmp.GetRasterBand(bndIdx)
                 data = bnd.ReadAsArray(**kwargs)
-                data = multilook_data(data, ystep, xstep, method='nearest')
+                data = multilook_data(data, ystep, xstep, method=mli_method)
                 data[data == noDataValueAmp] = 0  #assign pixel with no-data to 0
                 f["magnitude"][ii,:,:] = data
 
@@ -651,7 +671,8 @@ def main(iargs=None):
                            ampStack=inps.magFile,
                            box=box,
                            xstep=inps.xstep,
-                           ystep=inps.ystep)
+                           ystep=inps.ystep,
+                           mli_method=inps.method)
 
     ########## output file 2 - geometryGeo
     # define dataset structure for geometry
@@ -688,7 +709,7 @@ def main(iargs=None):
     m, s = divmod(time.time()-start_time, 60)
     print('time used: {:02.0f} mins {:02.1f} secs.'.format(m, s))
 
-    return inps.outfile
+    return
 
 
 ####################################################################################
diff -pruN 1.3.3-2/mintpy/prep_cosicorr.py 1.4.0-1/mintpy/prep_cosicorr.py
--- 1.3.3-2/mintpy/prep_cosicorr.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/prep_cosicorr.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,18 +8,14 @@
 
 import os
 import sys
-import argparse
 from datetime import datetime
 import numpy as np
+
 from mintpy.utils import readfile, writefile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 #########################################################################
-EXAMPLE = """example:
-  prep_cosicorr.py offsets/*offset.tif -m metadata.txt
-  prep_cosicorr.py snr/*snr.tif        -m metadata.txt
-"""
-
 EXAMPLE_META_FILE = """
 offset1NS.tif  20160206 20161122
 offset1EW.tif  20160206 20161122
@@ -30,11 +26,18 @@ offset2SNR.tif 20160206 20170225
 ...            ...   ...
 """
 
+EXAMPLE = """example:
+  prep_cosicorr.py offsets/*offset.tif -m metadata.txt
+  prep_cosicorr.py snr/*snr.tif        -m metadata.txt
+"""
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Prepare attributes file for COSI-Corr pixel offset product.\n',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    """Command line parser."""
+    synopsis = 'Prepare attributes file for COSI-Corr pixel offset product.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', nargs='+', help='cosicorr file(s)')
     parser.add_argument('-m', '--metadata', type=str, dest='meta_file',
diff -pruN 1.3.3-2/mintpy/prep_fringe.py 1.4.0-1/mintpy/prep_fringe.py
--- 1.3.3-2/mintpy/prep_fringe.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/prep_fringe.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,7 +9,6 @@
 import os
 import sys
 import glob
-import argparse
 import h5py
 import numpy as np
 import defusedxml.ElementTree as ET
@@ -20,7 +19,7 @@ except ImportError:
     raise ImportError("Can not import gdal!")
 
 from mintpy.utils import (
-    arg_group,
+    arg_utils,
     ptime,
     readfile,
     writefile,
@@ -48,11 +47,13 @@ EXAMPLE = """example:
   geocode.py velocity.h5 -l inputs/geometryRadar.h5
 """
 
-def create_parser():
+def create_parser(subparsers=None):
     """Command Line Parser"""
-    parser = argparse.ArgumentParser(description="Prepare FRInGE products for MintPy",
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+    synopsis = "Prepare FRInGE products for MintPy"
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = arg_utils.create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('-u', '--unw-file', dest='unwFile', type=str, default='./PS_DS/unwrap/*.unw',
                         help='path pattern of unwrapped interferograms (default: %(default)s).')
@@ -85,7 +86,7 @@ def create_parser():
     parser.add_argument('--geom-only', action='store_true',
                         help='Only create the geometry file (useful for geocoding a watermask).')
 
-    parser = arg_group.add_subset_argument(parser, geo=False)
+    parser = arg_utils.add_subset_argument(parser, geo=False)
 
     return parser
 
@@ -162,15 +163,6 @@ def prepare_metadata(meta_file, geom_src
                                                 box=box,
                                                 fext_list=[geom_ext])
 
-    # add LENGTH / WIDTH
-    atr = readfile.read_attribute(os.path.join(geom_src_dir, 'lat{}'.format(geom_ext)))
-    meta['LENGTH'] = atr['LENGTH']
-    meta['WIDTH'] = atr['WIDTH']
-
-    ## update metadata due to subset
-    print('update metadata due to subset with bounding box')
-    meta = attr.update_attribute4subset(meta, box)
-
     # apply optional user multilooking
     if nlks_x > 1:
         meta['RANGE_PIXEL_SIZE'] = str(float(meta['RANGE_PIXEL_SIZE']) * nlks_x)
@@ -360,7 +352,7 @@ def prepare_stack(outfile, unw_file, met
         return
 
     # get date info: date12_list
-    date12_list = [os.path.basename(x).split('.')[0] for x in unw_files]
+    date12_list = ptime.yyyymmdd_date12([os.path.basename(x).split('.')[0] for x in unw_files])
 
     # prepare baseline info
     if baseline_dir is not None:
@@ -492,7 +484,7 @@ def main(iargs=None):
         metadata=meta,
         box=pix_box)
 
-    ## 4 - prepare and ifgstack with connected components
+    ## 4 - ifgramStack for unwrapped phase and connected components
     prepare_stack(
         outfile=stack_file,
         unw_file=inps.unwFile,
diff -pruN 1.3.3-2/mintpy/prep_gamma.py 1.4.0-1/mintpy/prep_gamma.py
--- 1.3.3-2/mintpy/prep_gamma.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/prep_gamma.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,10 +9,10 @@
 import os
 import sys
 import re
-import argparse
 import numpy as np
 from mintpy.objects import sensor
 from mintpy.utils import readfile, writefile, ptime, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 SPEED_OF_LIGHT = 299792458  # m/s
@@ -21,27 +21,17 @@ PAR_EXT_LIST = ['.amp.par', '.ramp.par',
 
 
 ##################################################################################################
-EXAMPLE = """example:
-  prep_gamma.py  diff_filt_HDR_20130118_20130129_4rlks.unw
-  prep_gamma.py  interferograms/*/diff_*rlks.unw --sensor sen
-  prep_gamma.py  interferograms/*/filt_*rlks.cor
-  prep_gamma.py  interferograms/*/diff_*rlks.int
-  prep_gamma.py  sim_20150911_20150922.hgt_sim
-  prep_gamma.py  sim_20150911_20150922.utm.dem
-  prep_gamma.py  sim_20150911_20150922.UTM_TO_RDC
-"""
-
-DESCRIPTION = """
+NOTE = """
   For each interferogram, including unwrapped/wrapped interferograms and coherence, 3 metadata files are required:
   1) reference .par file, e.g. 130118_4rlks.amp.par
   2) secondary .par file, e.g. 130129_4rlks.amp.par
   3) interferogram .off file, e.g. 130118-130129_4rlks.off
 
   Other metadata files are recommended and can be generated from the above 3 if not existed, more specifically:
-  4) baseline files, e.g. 130118-130129_4rlks.baseline and 130118-130129_4rlks.base_perp
-      It can be generated from file 1-3 with Gamma command base_orbit and base_perp)
-  5) corner files, e.g. 130118_4rlks.amp.corner_full and 130118_4rlks.amp.corner
-      It can be generated from file 1 with Gamma command SLC_corners)
+  4) baseline files, e.g. 130118-130129_4rlks.baseline and 130118-130129_4rlks.base_perp,
+      which can be generated from file 1-3 with Gamma command base_orbit and base_perp.
+  5) corner files, e.g. 130118_4rlks.amp.corner_full and 130118_4rlks.amp.corner,
+      which can be generated from file 1 with Gamma command SLC_corners.
 
   This script will read all these files (generate 4 and 5 if not existed), merge them into one, convert their name from
   Gamma style to ROI_PAC style, and write to an metadata file, same name as input binary data file with suffix .rsc,
@@ -96,12 +86,23 @@ DESCRIPTION = """
          if no multilooking applied, do not add "_4rlks" in your file names.
 """
 
+EXAMPLE = """example:
+  prep_gamma.py  diff_filt_HDR_20130118_20130129_4rlks.unw
+  prep_gamma.py  interferograms/*/diff_*rlks.unw --sensor sen
+  prep_gamma.py  interferograms/*/filt_*rlks.cor
+  prep_gamma.py  interferograms/*/diff_*rlks.int
+  prep_gamma.py  sim_20150911_20150922.hgt_sim
+  prep_gamma.py  sim_20150911_20150922.utm.dem
+  prep_gamma.py  sim_20150911_20150922.UTM_TO_RDC
+"""
+
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Prepare attributes file for Gamma product.\n'+
-                                     DESCRIPTION,
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Prepare attributes file for Gamma product.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis+NOTE, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', nargs='+', help='Gamma file(s)')
     parser.add_argument('--sensor', dest='sensor', type=str, choices=sensor.SENSOR_NAMES,
diff -pruN 1.3.3-2/mintpy/prep_giant.py 1.4.0-1/mintpy/prep_giant.py
--- 1.3.3-2/mintpy/prep_giant.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/prep_giant.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,152 +0,0 @@
-#!/usr/bin/env python3
-############################################################
-# Program is part of MintPy                                #
-# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
-# Author: Zhang Yunjun, Jul 2018                           #
-############################################################
-
-
-import os
-import sys
-import argparse
-from lxml import objectify
-from mintpy.utils import readfile, utils as ut
-from mintpy.objects import sensor
-
-
-key_giant2mintpy = {'xmin':'SUBSET_XMIN', 'xmax':'SUBSET_XMAX',
-                   'ymin':'SUBSET_YMIN', 'ymax':'SUBSET_YMAX',
-                  }
-
-
-##################################################################################################
-EXAMPLE = """example:
-  prep_giant.py  LS-PARAMS.h5
-  prep_giant.py  TS-PARAMS.h5
-  prep_giant.py  NSBAS-PARAMS.h5
-  prep_giant.py  RAW-STACK.h5
-  prep_giant.py  PROC-STACK.h5
-  prep_giant.py  LS-PARAMS.h5 -x ../data.xml ../sbas.xml ../mints.xml
-  prep_giant.py  LS-PARAMS.h5 -x ../data.xml ../sbas.xml ../mints.xml ../filt_fine.unw.rsc
-"""
-
-
-def create_parser():
-    parser = argparse.ArgumentParser(description='Prepare attributes for GIAnT timeseries file.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
-
-    parser.add_argument('file', help='GIAnT timeseries file')
-    parser.add_argument('-x','--xml', nargs='+', dest='xml_file',
-                        help='XML file with data setting info.')
-    return parser
-
-
-def cmd_line_parse(iargs=None):
-    parser = create_parser()
-    inps = parser.parse_args(args=iargs)
-    if not inps.xml_file:
-        inps.xml_file = auto_xml_file4giant(inps.file)
-    if not inps.xml_file:
-        parser.print_usage()
-        raise SystemExit('ERROR: no xml file found.')
-
-    return inps
-
-
-def auto_xml_file4giant(fname):
-    file_list = [os.path.join(os.path.dirname(fname), '../{}'.format(i))
-                 for i in ['data.xml',
-                           'sbas.xml',
-                           'mints.xml',
-                           'filt_fine.unw.rsc']]
-    file_list = [i for i in file_list if os.path.isfile(i)]
-    return file_list
-
-
-def read_giant_xml(fname):
-    odict = {}
-    root = objectify.parse(fname).getroot()
-
-    if root.find('master') is not None:
-        comp = root['master']
-        for key in ['wavelength', 'incidence']:
-            odict[key] = comp[key].value
-
-    if root.find('subimage') is not None:
-        comp = root['subimage']
-        for key in ['width', 'length',
-                    'xmin', 'xmax',
-                    'ymin', 'ymax',
-                    'rxmin', 'rxmax',
-                    'rymin', 'rymax']:
-            odict[key] = comp[key].value
-
-        odict = readfile.standardize_metadata(odict, standardKeys=key_giant2mintpy)
-        odict['REF_Y'] = int((int(odict['rymin']) +
-                              int(odict['rymax'])) / 2. + 0.5)
-        odict['REF_X'] = int((int(odict['rxmin']) +
-                              int(odict['rxmax'])) / 2. + 0.5)
-
-    if root.find('proc/masterdate') is not None:
-        odict['REF_DATE'] = root['proc']['masterdate'].value
-    return odict
-
-
-def prepare_metadata4giant(fname, meta_files=None):
-    """Extract metadata from xml files for GIAnT time-series file."""
-    # check xml files
-    if not meta_files:
-        meta_files = auto_xml_file4giant(fname)
-    if not meta_files:
-        raise FileNotFoundError("no xml file found.")
-
-    # extract metadata from xml files
-    rsc_files = [i for i in meta_files if i.endswith('.rsc')]
-    xml_files = [i for i in meta_files if i.endswith('.xml')]
-    xml_dict = {}
-    for rsc_file in rsc_files:
-        print('reading {}'.format(rsc_file))
-        rsc_dict = readfile.read_roipac_rsc(rsc_file)
-        for key in ['length', 'LENGTH', 'FILE_LENGTH', 'width', 'WIDTH']:
-            if key in rsc_dict.keys():
-                rsc_dict.pop(key)
-        xml_dict.update(rsc_dict)
-    for xml_file in xml_files:
-        print('reading {}'.format(xml_file))
-        xml_dict.update(read_giant_xml(xml_file))
-
-    if not xml_dict:
-        raise ValueError('No metadata found in file: '+xml_file)
-
-    # standardize metadata names
-    xml_dict = readfile.standardize_metadata(xml_dict)
-
-    # project name
-    sensor_name, project_name = sensor.project_name2sensor_name(os.path.abspath(fname))
-    if sensor_name:
-        xml_dict['PLATFORM'] = sensor_name
-    if project_name:
-        xml_dict['PROJECT_NAME'] = project_name
-        if sensor_name in project_name:
-            tmp = project_name.split(sensor_name)[1][0]
-            if tmp == 'A':
-                xml_dict['ORBIT_DIRECTION'] = 'ASCENDING'
-            else:
-                xml_dict['ORBIT_DIRECTION'] = 'DESCENDING'
-
-    # update GIAnT HDF5 file
-    fname = ut.add_attribute(fname, xml_dict, print_msg=True)
-    return fname
-
-
-##################################################################################################
-def main(iargs=None):
-    inps = cmd_line_parse(iargs)
-    prepare_metadata4giant(inps.file, inps.xml_file)
-    return
-
-
-###################################################################################################
-if __name__ == '__main__':
-    main(sys.argv[1:])
diff -pruN 1.3.3-2/mintpy/prep_gmtsar.py 1.4.0-1/mintpy/prep_gmtsar.py
--- 1.3.3-2/mintpy/prep_gmtsar.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/prep_gmtsar.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,7 +9,6 @@
 import os
 import sys
 import glob
-import argparse
 import numpy as np
 
 try:
@@ -23,7 +22,7 @@ from mintpy.utils import (
     writefile,
     utils as ut,
 )
-
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 #########################################################################
@@ -31,11 +30,14 @@ EXAMPLE = """example:
   prep_gmtsar.py StHelensEnvDT156.txt
 """
 
-def create_parser():
+def create_parser(subparsers=None):
     """Command line parser."""
-    parser = argparse.ArgumentParser(description='Prepare GMTSAR metadata files.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+    synopsis = 'Prepare GMTSAR metadata files.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
     parser.add_argument('template_file', type=str, help='MintPy template file for GMTSAR products.')
     parser.add_argument('--mintpy-dir', dest='mintpy_dir', default='./',
                         help='MintPy directory (default: %(default)s).')
diff -pruN 1.3.3-2/mintpy/prep_hyp3.py 1.4.0-1/mintpy/prep_hyp3.py
--- 1.3.3-2/mintpy/prep_hyp3.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/prep_hyp3.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,25 +8,17 @@
 
 import os
 import sys
-import argparse
 from datetime import datetime
 from mintpy.objects import sensor
 from mintpy.utils import readfile, writefile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 SPEED_OF_LIGHT = 299792458  # m/s
 
 
 #########################################################################
-EXAMPLE = """example:
-  prep_hyp3.py  interferograms/*/*unw_phase_clip.tif
-  prep_hyp3.py  interferograms/*/*corr_clip.tif
-  prep_hyp3.py  interferograms/*/*dem_clip.tif
-  prep_hyp3.py  interferograms/*/*lv_theta_clip.tif
-  prep_hyp3.py  interferograms/*/*clip.tif
-"""
-
-DESCRIPTION = """
+NOTE = """
   For each interferogram, the unwrapped interferogram, coherence, and metadata the file name is required e.g.:
   1) S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_unw_phase.tif
   2) S1AA_20161223T070700_20170116T070658_VVP024_INT80_G_ueF_74C2_corr.tif
@@ -71,12 +63,21 @@ DESCRIPTION = """
     from other satellites, changes will be needed. 
 """
 
+EXAMPLE = """example:
+  prep_hyp3.py  interferograms/*/*unw_phase_clip.tif
+  prep_hyp3.py  interferograms/*/*corr_clip.tif
+  prep_hyp3.py  interferograms/*/*dem_clip.tif
+  prep_hyp3.py  interferograms/*/*lv_theta_clip.tif
+  prep_hyp3.py  interferograms/*/*clip.tif
+"""
+
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Prepare attributes file for HyP3 InSAR product.\n'+
-                                     DESCRIPTION,
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Prepare attributes file for HyP3 InSAR product.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis+NOTE, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', nargs='+', help='HyP3 file(s)')
     return parser
@@ -127,7 +128,7 @@ def add_hyp3_metadata(fname,meta,is_ifg=
     meta['HEADING'] = float(hyp3_meta['Heading']) % 360. - 360.
 
     # add LAT/LON_REF1/2/3/4 based on whether satellite ascending or descending
-    meta['ORBIT_DIRECTION'] = 'ASCENDING' if abs(meta['HEADING']) > 90 else 'DESCENDING'
+    meta['ORBIT_DIRECTION'] = 'ASCENDING' if abs(meta['HEADING']) < 90 else 'DESCENDING'
     N = float(meta['Y_FIRST'])
     W = float(meta['X_FIRST'])
     S = N + float(meta['Y_STEP']) * int(meta['LENGTH'])
diff -pruN 1.3.3-2/mintpy/prep_isce.py 1.4.0-1/mintpy/prep_isce.py
--- 1.3.3-2/mintpy/prep_isce.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/prep_isce.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,9 +9,15 @@
 import os
 import sys
 import glob
-import argparse
 import numpy as np
-from mintpy.utils import ptime, readfile, writefile, isce_utils
+from mintpy.utils import (
+    attribute as attr,
+    isce_utils,
+    ptime,
+    readfile,
+    writefile,
+)
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 
@@ -19,47 +25,65 @@ from mintpy.utils import ptime, readfile
 GEOMETRY_PREFIXS = ['hgt', 'lat', 'lon', 'los', 'shadowMask', 'waterMask', 'incLocal']
 
 EXAMPLE = """example:
-  # interferogram stack
-  prep_isce.py -d ./merged/interferograms -m ./reference/IW1.xml -b ./baselines -g ./merged/geom_reference      #for topsStack
-  prep_isce.py -d ./Igrams -m ./referenceShelve/data.dat -b ./baselines -g ./geom_reference                     #for stripmapStack
-  prep_isce.py -m 20120507_slc_crop.xml -g ./geometry                                                           #for stripmapApp
-  prep_isce.py -d "pairs/*-*/insar" -m "pairs/*-*/150408.track.xml" -b baseline -g dates_resampled/150408/insar #for alosStack with 150408 as ref date
-
-  # offset stack
-  prep_isce.py -d ./offsets -f *Off*.bip -m ./../reference/IW1.xml -b ./../baselines -g ./offsets/geom_reference  #for topsStack
-  prep_isce.py -d ./offsets -f *Off*.bip -m ./SLC/*/data.dat       -b random         -g ./geometry                #for UAVSAR coregStack
+  ## topsStack
+  prep_isce.py -f "./merged/interferograms/*/filt_*.unw" -m ./reference/IW1.xml -b ./baselines/ -g ./merged/geom_reference/
+
+  # topsStack with ionosphere
+  prep_isce.py -f "./merged/interferograms/*/filt_*.unw" "./ion/*/ion_cal/filt.ion" -m ./reference/IW1.xml -b ./baselines/ -g ./merged/geom_reference/
+
+  # topsStack for offset
+  prep_isce.py -f "./merged/offsets/*/*Off*.bip" -m ./reference/IW1.xml -b ./baselines/ -g ./merged/geom_reference/
+
+  ## stripmapStack
+  prep_isce.py -f "./Igrams/*/filt_*.unw" -m ./referenceShelve/data.dat -b ./baselines/ -g ./geom_reference/
+
+  # stripmapApp
+  prep_isce.py -m 20120507_slc_crop.xml -g ./geometry 
+
+  ## alosStack
+  # where 150408 is the reference date
+  prep_isce.py -f "./pairs/*/insar/filt_*.unw" -m "pairs/150408-*/150408.track.xml" -b ./baseline/ -g ./dates_resampled/150408/insar/
+
+  ## UAVSAR
+  prep_isce.py -f "./Igrams/*/filt_*.unw" -m ./referenceShelve/data.dat -b ./baselines/ -g ./geometry/
+
+  # UAVSAR for offset
+  prep_isce.py -f "./offsets/*/*Off*.bip" -m "SLC/*/data.dat" -b random -g ./geometry/
 """
 
-def create_parser():
+def create_parser(subparsers=None):
     """Command line parser."""
-    parser = argparse.ArgumentParser(description='Prepare ISCE metadata files.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
-    # interferograms
-    parser.add_argument('-d', '--ds-dir', '--dset-dir', dest='dsetDir', type=str, default=None, required=True,
-                        help='The directory which contains all pairs\n'
-                             'e.g.: $PROJECT_DIR/merged/interferograms OR \n'
-                             '      $PROJECT_DIR/pairs/*-*/insar OR \n'
-                             '      $PROJECT_DIR/merged/offsets')
-    parser.add_argument('-f', '--file-pattern', nargs = '+', dest='dsetFiles', type=str, default=['filt_*.unw'],
-                        help='List of observation file basenames, e.g.: filt_fine.unw OR filtAz*.off')
+    synopsis = 'Prepare ISCE metadata files.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
+    # observations
+    parser.add_argument('-f', dest='obs_files', type=str, nargs='+', default='./merged/interferograms/*/filt_*.unw',
+                        help='Wildcard path pattern for the primary observation files.\n'
+                             'E.g.: topsStack          : {dset_dir}/merged/interferograms/*/filt_*.unw\n'
+                             '      topsStack / iono   : {dset_dir}/ion/*/ion_cal/filt.ion\n'
+                             '      topsStack / offset : {dset_dir}/merged/offsets/*/*Off*.bip\n'
+                             '      stripmapStack      : {dset_dir}/Igrams/*_*/filt_*.unw\n'
+                             '      alosStack          : {dset_dir}/pairs/*/insar/filt_*.unw\n'
+                             '      UAVSAR / offset    : {dset_dir}/offsets/*/*Off*.bip')
 
     # metadata
-    parser.add_argument('-m', '--meta-file', dest='metaFile', type=str, default=None, required=True,
-                        help='Metadata file to extract common metada for the stack:\n'
-                             'e.g.: for ISCE/topsStack    : reference/IW3.xml;\n'
-                             '      for ISCE/stripmapStack: referenceShelve/data.dat;\n'
-                             '      for ISCE/alosStack    : pairs/150408-150701/150408.track.xml\n'
-                             '          where 150408 is the reference date of stack processing')
+    parser.add_argument('-m', '--meta-file', dest='meta_file', type=str, default=None, required=True,
+                        help='Metadata file to extract common metada for the stack.\n'
+                             'E.g.: topsStack     : reference/IW3.xml\n'
+                             '      stripmapStack : referenceShelve/data.dat\n'
+                             '      alosStack     : pairs/{ref_date}-*/{ref_date}.track.xml\n'
+                             '      UAVSAR        : SLC/*/data.dat')
 
     # geometry
-    parser.add_argument('-b', '--baseline-dir', dest='baselineDir', type=str, default=None,
-                        help='Directory with baselines.'
-                             'Set "random" to generate baseline with random value from [-10,10].'
-                             'Set "random-100" to generate baseline with random value from [-100,100].')
-    parser.add_argument('-g', '--geometry-dir', dest='geometryDir', type=str, default=None, required=True,
+    parser.add_argument('-b', '--baseline-dir', dest='baseline_dir', type=str, default=None,
+                        help='Directory with baselines. '
+                             'Set "random" to generate baseline with random value from [-10,10].')
+    parser.add_argument('-g', '--geometry-dir', dest='geom_dir', type=str, default=None, required=True,
                         help='Directory with geometry files ')
-    parser.add_argument('--geom-files', dest='geometryFiles', type=str, nargs='*',
+    parser.add_argument('--geom-files', dest='geom_files', type=str, nargs='*',
                         default=['{}.rdr'.format(i) for i in GEOMETRY_PREFIXS],
                         help='List of geometry file basenames. Default: %(default)s.\n'
                              'All geometry files need to be in the same directory.')
@@ -73,17 +97,13 @@ def cmd_line_parse(iargs = None):
     parser = create_parser()
     inps = parser.parse_args(args=iargs)
 
-    # translate wildcard in metaFile
-    if "*" in inps.metaFile:
-        fnames = glob.glob(inps.metaFile)
+    # translate wildcard in meta_file
+    if "*" in inps.meta_file:
+        fnames = glob.glob(inps.meta_file)
         if len(fnames) > 0:
-            inps.metaFile = fnames[0]
+            inps.meta_file = fnames[0]
         else:
-            raise FileNotFoundError(inps.metaFile)
-
-    # random baseline input checking
-    if inps.baselineDir.lower().startswith('rand'):
-        inps.baselineDir = inps.baselineDir.lower().replace('_','-')
+            raise FileNotFoundError(inps.meta_file)
 
     return inps
 
@@ -114,7 +134,13 @@ def add_ifgram_metadata(metadata_in, dat
 
 
 def prepare_geometry(geom_dir, geom_files=[], metadata=dict(), processor='tops', update_mode=True):
-    """Prepare and extract metadata from geometry files"""
+    """Prepare and extract metadata from geometry files.
+
+    Parameters: geom_dir   - str, path to the directorry for the geometry data files
+                geom_files - list(str), basenames of geometry data files
+                metadata   - dict, common metadata for the stack
+                processor  - str, isce-2 stack processor
+    """
 
     print('preparing RSC file for geometry files')
     geom_dir = os.path.abspath(geom_dir)
@@ -150,26 +176,26 @@ def prepare_geometry(geom_dir, geom_file
     # write rsc file for each file
     for geom_file in geom_files:
         # prepare metadata for current file
+        geom_meta = {**metadata}
         if os.path.isfile(geom_file+'.xml'):
-            geom_metadata = readfile.read_attribute(geom_file, metafile_ext='.xml')
+            geom_meta.update(readfile.read_attribute(geom_file, metafile_ext='.xml'))
         else:
-            geom_metadata = readfile.read_attribute(geom_file)
-        geom_metadata.update(metadata)
+            geom_meta.update(readfile.read_attribute(geom_file))
 
         # write .rsc file
         rsc_file = geom_file+'.rsc'
-        writefile.write_roipac_rsc(geom_metadata, rsc_file,
+        writefile.write_roipac_rsc(geom_meta, rsc_file,
                                    update_mode=update_mode,
                                    print_msg=True)
     return
 
 
-def gen_random_baseline_timeseries(dset_dir, dset_file, max_bperp=10):
-    """Generate a baseline time series with random values.
+def gen_random_baseline_timeseries(obs_file, max_bperp=10):
+    """Generate a baseline time series with random values
+    with date12 values grabbed from the directory names of the given path pattern from obs_file.
     """
     # list of dates
-    fnames = glob.glob(os.path.join(dset_dir, '*', dset_file))
-    date12s = sorted([os.path.basename(os.path.dirname(x)) for x in fnames])
+    date12s = sorted([os.path.basename(os.path.dirname(x)) for x in glob.glob(obs_file)])
     date1s = [x.split('_')[0] for x in date12s]
     date2s = [x.split('_')[1] for x in date12s]
     date_list = sorted(list(set(date1s + date2s)))
@@ -185,42 +211,50 @@ def gen_random_baseline_timeseries(dset_
     return bDict
 
 
-def prepare_stack(inputDir, filePattern, metadata=dict(), baseline_dict=dict(), processor='tops', update_mode=True):
-    print('preparing RSC file for ', filePattern)
-    if processor in ['tops', 'stripmap']:
-        isce_files = sorted(glob.glob(os.path.join(os.path.abspath(inputDir), '*', filePattern)))
-    elif processor == 'alosStack':
-        isce_files = sorted(glob.glob(os.path.join(os.path.abspath(inputDir), filePattern)))
-    else:
-        raise ValueError('Un-recognized ISCE stack processor: {}'.format(processor))
+def prepare_stack(obs_file, metadata=dict(), baseline_dict=dict(), update_mode=True):
+    """Prepare metadata for a stack of observation data files.
 
+    Parameters: obs_file     : path pattern with wildcards for the primary observation files, e.g. *.unw file.
+                metadata     : dict, common metadata for the stack
+                baseline_dir : dict, baseline time series
+    """
+    print(f'preparing RSC file for: {obs_file}')
+    isce_files = sorted(glob.glob(obs_file))
     if len(isce_files) == 0:
-        raise FileNotFoundError('no file found in pattern: {}'.format(filePattern))
+        raise FileNotFoundError('NO file found with path pattern: {}'.format(obs_file))
+
+    # make a copy
+    meta = {**metadata}
+
+    # update A/RLOOKS, RANGE/AZIMUTH_PIXEL_SIZE, NCORRLOOKS
+    # for low resolution ionosphere from isce2/topsStack
+    keys = ['LENGTH', 'WIDTH']
+    if all(x in meta.keys() for x in keys):
+        atr = readfile.read_attribute(isce_files[0], metafile_ext='.xml')
+        if any(int(meta[x]) != int(atr[x]) for x in keys):
+            resize2shape = (int(atr['LENGTH']), int(atr['WIDTH']))
+            meta = attr.update_attribute4resize(meta, resize2shape)
 
     # write .rsc file for each interferogram file
     num_file = len(isce_files)
-    prog_bar = ptime.progressBar(maxValue=num_file)
-    for i in range(num_file):
-        # prepare metadata for current file
-        isce_file = isce_files[i]
-        if processor in ['tops', 'stripmap']:
-            dates = os.path.basename(os.path.dirname(isce_file)).split('_')  # to modify to YYYYMMDDTHHMMSS
-        elif processor == 'alosStack':
-            dates = os.path.basename(os.path.dirname(os.path.dirname(isce_file))).split('-')  # to modify to YYYYMMDDTHHMMSS
-            dates = ptime.yyyymmdd(dates)
-        else:
-            raise ValueError('Un-recognized ISCE stack processor: {}'.format(processor))
-
-        ifg_metadata = readfile.read_attribute(isce_file, metafile_ext='.xml')
-        ifg_metadata.update(metadata)
-        ifg_metadata = add_ifgram_metadata(ifg_metadata, dates, baseline_dict)
+    print_msg = True if num_file > 5 else False   # do not print progress bar for <=5 files
+    prog_bar = ptime.progressBar(maxValue=num_file, print_msg=print_msg)
+    for i, isce_file in enumerate(isce_files):
+        # get date1/2
+        date12 = ptime.get_date12_from_path(isce_file)
+        dates = ptime.yyyymmdd(date12.replace('-','_').split('_'))
+        prog_bar.update(i+1, suffix=f'{dates[0]}_{dates[1]} {i+1}/{num_file}')
+
+        # merge metadata from: data.rsc, *.unw.xml and DATE12/P_BASELINE_TOP/BOTTOM_HDR
+        ifg_meta = {**meta}
+        ifg_meta.update(readfile.read_attribute(isce_file, metafile_ext='.xml'))
+        ifg_meta = add_ifgram_metadata(ifg_meta, dates, baseline_dict)
 
         # write .rsc file
         rsc_file = isce_file+'.rsc'
-        writefile.write_roipac_rsc(ifg_metadata, rsc_file,
+        writefile.write_roipac_rsc(ifg_meta, rsc_file,
                                    update_mode=update_mode,
                                    print_msg=False)
-        prog_bar.update(i+1, suffix='{}_{}'.format(dates[0], dates[1]))
     prog_bar.close()
     return
 
@@ -228,50 +262,46 @@ def prepare_stack(inputDir, filePattern,
 #########################################################################
 def main(iargs=None):
     inps = cmd_line_parse(iargs)
-    inps.processor = isce_utils.get_processor(inps.metaFile)
+    inps.processor = isce_utils.get_processor(inps.meta_file)
 
     # read common metadata
     metadata = {}
-    if inps.metaFile:
-        rsc_file = os.path.join(os.path.dirname(inps.metaFile), 'data.rsc')
-        metadata = isce_utils.extract_isce_metadata(inps.metaFile,
-                                                    geom_dir=inps.geometryDir,
-                                                    rsc_file=rsc_file,
-                                                    update_mode=inps.update_mode)[0]
+    if inps.meta_file:
+        rsc_file = os.path.join(os.path.dirname(inps.meta_file), 'data.rsc')
+        metadata = isce_utils.extract_isce_metadata(
+            inps.meta_file,
+            geom_dir=inps.geom_dir,
+            rsc_file=rsc_file,
+            update_mode=inps.update_mode)[0]
 
     # prepare metadata for geometry file
-    if inps.geometryDir:
-        prepare_geometry(inps.geometryDir,
-                         geom_files=inps.geometryFiles,
-                         metadata=metadata,
-                         processor=inps.processor,
-                         update_mode=inps.update_mode)
+    if inps.geom_dir:
+        prepare_geometry(
+            inps.geom_dir,
+            geom_files=inps.geom_files,
+            metadata=metadata,
+            processor=inps.processor,
+            update_mode=inps.update_mode)
 
     # read baseline info
     baseline_dict = {}
-    if inps.baselineDir:
-        if inps.baselineDir.startswith('rand') and inps.dsetDir and inps.dsetFiles:
-            if '-' in inps.baselineDir:
-                max_bperp = float(inps.baselineDir.split('-')[1])
-            else:
-                max_bperp = 10
-            baseline_dict = gen_random_baseline_timeseries(dset_dir=inps.dsetDir,
-                                                           dset_file=inps.dsetFiles[0],
-                                                           max_bperp=max_bperp)
-
+    if inps.baseline_dir:
+        if inps.baseline_dir.startswith('rand') and inps.obs_files:
+            baseline_dict = gen_random_baseline_timeseries(inps.obs_files[0])
         else:
-            baseline_dict = isce_utils.read_baseline_timeseries(inps.baselineDir,
-                                                                processor=inps.processor)
+            baseline_dict = isce_utils.read_baseline_timeseries(
+                inps.baseline_dir,
+                processor=inps.processor)
 
     # prepare metadata for ifgram file
-    if inps.dsetDir and inps.dsetFiles:
-        for namePattern in inps.dsetFiles:
-            prepare_stack(inps.dsetDir,
-                          namePattern,
-                          metadata=metadata,
-                          baseline_dict=baseline_dict,
-                          processor=inps.processor,
-                          update_mode=inps.update_mode)
+    if inps.obs_files:
+        for obs_file in inps.obs_files:
+            prepare_stack(
+                obs_file,
+                metadata=metadata,
+                baseline_dict=baseline_dict,
+                update_mode=inps.update_mode)
+
     print('Done.')
     return
 
diff -pruN 1.3.3-2/mintpy/prep_roipac.py 1.4.0-1/mintpy/prep_roipac.py
--- 1.3.3-2/mintpy/prep_roipac.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/prep_roipac.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,20 +9,12 @@
 import os
 import sys
 import shutil
-import argparse
 from mintpy.utils import readfile, writefile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ##################################################################################################
-EXAMPLE = """example:
-  prep_roipac.py  filt_100901-110117-sim_HDR_4rlks_c10.unw
-  prep_roipac.py  ./interferograms/*/filt_*.unw
-  prep_roipac.py  ./interferograms/*/filt_*rlks.cor
-  prep_roipac.py  ./interferograms/*/filt_*rlks.int
-  prep_roipac.py  ./interferograms/*/filt_*_snap_connect.byt
-"""
-
-DESCRIPTION = """
+NOTE = """
   For each binary file (unwrapped/wrapped interferogram, spatial coherence file), there are 2 .rsc files:
   1) basic metadata file and 2) baseline parameter file. This script find those two rsc files based on
   input binary file name, and merge those two metadata files into one.
@@ -32,16 +24,23 @@ DESCRIPTION = """
   one file: filt_100901-110117-sim_HDR_4rlks_c10.unw.rsc
 """
 
+EXAMPLE = """example:
+  prep_roipac.py  filt_100901-110117-sim_HDR_4rlks_c10.unw
+  prep_roipac.py  ./interferograms/*/filt_*.unw
+  prep_roipac.py  ./interferograms/*/filt_*rlks.cor
+  prep_roipac.py  ./interferograms/*/filt_*rlks.int
+  prep_roipac.py  ./interferograms/*/filt_*_snap_connect.byt
+"""
+
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Prepare attributes file for ROI_PAC products.\n' +
-                                     DESCRIPTION,
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Prepare attributes file for ROI_PAC products.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis+NOTE, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', nargs='+', help='Gamma file(s)')
-    parser.add_argument('--no-parallel', dest='parallel', action='store_false', default=True,
-                        help='Disable parallel processing. Diabled auto for 1 input file.')
     return parser
 
 
diff -pruN 1.3.3-2/mintpy/prep_snap.py 1.4.0-1/mintpy/prep_snap.py
--- 1.3.3-2/mintpy/prep_snap.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/prep_snap.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,21 +9,21 @@
 
 import os
 import sys
-import argparse
 from mintpy.utils import readfile, writefile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 SPEED_OF_LIGHT = 299792458  # m / s
 
 
 ##################################################################################################
-DESCRIPTION = """
+NOTE = """
   For each interferogram, coherence or unwrapped .dim product this script will prepare.rsc 
   metadata files for for mintpy based on .dim metadata file.
 
   The SNAP .dim file should contain all the required sensor / baseline metadata needed.
   The baseline metadata gets written during snap back-geocoding (co-registration).
-  prep_snap is run seperately for unw/ifg/cor files so neeeds seperate .dim/.data products
+  prep_snap is run separately for unw/ifg/cor files so needs separate .dim/.data products
   with only the relevant band in each product. Use Band Subset > save BEAM-DIMAP file.
 
   The file name should be yyyymmdd_yyyymmdd_type_tc.dim where type can be filt/unw/coh.
@@ -39,10 +39,12 @@ EXAMPLE = """example:
   prep_snap.py  ../dem_tc.data/dem*.img
 """
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Prepare attributes file for SNAP products.\n'+DESCRIPTION,
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Prepare attributes file for SNAP products.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis+NOTE, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', nargs='+', help='SNAP data file(s) in *.img format.')
     return parser
diff -pruN 1.3.3-2/mintpy/reference_date.py 1.4.0-1/mintpy/reference_date.py
--- 1.3.3-2/mintpy/reference_date.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/reference_date.py	2022-08-04 20:01:49.000000000 +0000
@@ -10,14 +10,13 @@ import os
 import sys
 import time
 import shutil
-import argparse
 import h5py
 import numpy as np
 
 from mintpy.objects import timeseries
 from mintpy.objects.cluster import split_box2sub_boxes
 from mintpy.defaults.template import get_template_content
-from mintpy.utils import arg_group, readfile, writefile, ptime, utils as ut
+from mintpy.utils import arg_utils, readfile, writefile, ptime, utils as ut
 
 
 ##################################################################
@@ -29,10 +28,12 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Change reference date of timeseries.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=TEMPLATE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Change reference date of timeseries.'
+    epilog = TEMPLATE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = arg_utils.create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('timeseries_file', nargs='+', help='timeseries file(s)')
     parser.add_argument('-r', '--ref-date', dest='refDate', default='minRMS',
@@ -48,7 +49,7 @@ def create_parser():
                         help='Force updating the data matrix.')
 
     # computing
-    parser = arg_group.add_memory_argument(parser)
+    parser = arg_utils.add_memory_argument(parser)
 
     return parser
 
diff -pruN 1.3.3-2/mintpy/reference_point.py 1.4.0-1/mintpy/reference_point.py
--- 1.3.3-2/mintpy/reference_point.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/reference_point.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,13 +8,14 @@
 
 import os
 import sys
-import argparse
 import h5py
 import numpy as np
 import random
+
 from mintpy.objects import timeseries
 from mintpy.defaults.template import get_template_content
-from mintpy.utils import readfile, writefile, utils as ut
+from mintpy.utils import ptime, readfile, writefile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 #########################################  Usage  ##############################################
@@ -23,7 +24,7 @@ TEMPLATE = get_template_content('referen
 NOTE = """note: Reference value cannot be nan, thus, all selected reference point must be:
   a. non zero in mask, if mask is given
   b. non nan  in data (stack)
-  
+
   Priority:
       input reference_lat/lon
       input reference_y/x
@@ -55,10 +56,12 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Reference to the same pixel in space.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=NOTE+'\n'+TEMPLATE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Reference to the same pixel in space.'
+    epilog = NOTE + '\n' + TEMPLATE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', type=str, help='file to be referenced.')
     parser.add_argument('-t', '--template', dest='template_file',
@@ -66,7 +69,7 @@ def create_parser():
     parser.add_argument('-m', '--mask', dest='maskFile', help='mask file')
 
     parser.add_argument('-o', '--outfile', type=str, default=None,
-                        help='output file name (default: %(default)s). This option is diabled for ifgramStack file.\n'
+                        help='output file name (default: %(default)s). This option is disabled for ifgramStack file.\n'
                              'None (default) for update data value directly without writing to a new file.\n')
 
     parser.add_argument('--write-data', dest='write_data', action='store_true',
@@ -129,7 +132,7 @@ def read_template_file2inps(template_fil
     if not inps:
         inps = cmd_line_parse([''])
     inps_dict = vars(inps)
-    template = readfile.read_template(template_file)
+    template = readfile.read_template(template_file, skip_chars=['[', ']'])
     template = ut.check_template_auto_value(template)
 
     prefix = 'mintpy.reference.'
@@ -147,14 +150,12 @@ def read_template_file2inps(template_fil
     if key in template.keys():
         value = template[key]
         if value:
-            value = value.replace('[','').replace(']','')
             inps.ref_y, inps.ref_x = [int(i) for i in value.split(',')]
 
     key = prefix+'lalo'
     if key in template.keys():
         value = template[key]
         if value:
-            value = value.replace('[','').replace(']','')
             inps.ref_lat, inps.ref_lon = [float(i) for i in value.split(',')]
 
     return inps
@@ -180,8 +181,8 @@ def reference_file(inps):
     atr = readfile.read_attribute(inps.file)
 
     # update_mode
-    if (not inps.force 
-            and inps.ref_y is not None and inps.ref_y == int(atr.get('REF_Y', -999)) 
+    if (not inps.force
+            and inps.ref_y is not None and inps.ref_y == int(atr.get('REF_Y', -999))
             and inps.ref_x is not None and inps.ref_x == int(atr.get('REF_X', -999))):
         print('SAME reference pixel is already selected/saved in file, skip updating.')
         return inps.file
@@ -206,9 +207,10 @@ def reference_file(inps):
     else:
         # Find reference y/x
         if inps.method == 'maxCoherence':
-            inps.ref_y, inps.ref_x = select_max_coherence_yx(coh_file=inps.coherenceFile,
-                                                             mask=mask,
-                                                             min_coh=inps.minCoherence)
+            inps.ref_y, inps.ref_x = select_max_coherence_yx(
+                coh_file=inps.coherenceFile,
+                mask=mask,
+                min_coh=inps.minCoherence)
         elif inps.method == 'random':
             inps.ref_y, inps.ref_x = random_select_reference_yx(mask)
         elif inps.method == 'manual':
@@ -234,13 +236,22 @@ def reference_file(inps):
 
         if fext == '.h5':
             if inps.outfile == inps.file:
-                print('updating data value without re-writing to a new file')
+                print('updating dataset values without re-writing to a new file')
 
                 if k == 'ifgramStack':
                     with h5py.File(inps.file, 'r+') as f:
                         ds = f['unwrapPhase']
-                        for i in range(ds.shape[0]):
-                            ds[i, :, :] -= ds[i, inps.ref_y, inps.ref_x]
+                        num_date12 = ds.shape[0]
+                        prog_bar = ptime.progressBar(maxValue=num_date12)
+                        for i in range(num_date12):
+                            prog_bar.update(i+1, suffix=f'{i+1} / {num_date12}')
+
+                            # make a copy of ds[i] because h5py allows fancy indexing for 1D arrays only.
+                            data_2d = ds[i, :, :]
+                            # apply spatial referencing (skip pixels with no-data-value)
+                            data_2d[data_2d != 0.] -= data_2d[inps.ref_y, inps.ref_x]
+                            ds[i, :, :] = data_2d
+                        prog_bar.close()
 
                         print('update metadata')
                         f.attrs.update(atrNew)
@@ -265,7 +276,7 @@ def reference_file(inps):
                 print('writing the referenced data into file: {}'.format(inps.outfile))
 
                 # 1. read and update data value
-                data, atr = readfile.read(inps.file)
+                data, atr = readfile.read(inps.file, datasetName=k)
                 if len(data.shape) == 3:
                     # 3D matrix
                     for i in range(data.shape[0]):
@@ -321,7 +332,7 @@ def manual_select_reference_yx(data, inp
     """
     from matplotlib import pyplot as plt
     print('\nManual select reference point ...')
-    print('Click on a pixel that you want to choose as the refernce ')
+    print('Click on a pixel that you want to choose as the reference ')
     print('    pixel in the time-series analysis;')
     print('Then close the displayed window to continue.\n')
     if mask is not None:
@@ -464,9 +475,9 @@ def read_reference_input(inps):
         print('no input reference y/x.')
         if not inps.method:
             # Use existing REF_Y/X if 1) no ref_y/x input and 2) no method input and 3) ref_yx is in coverage
-            if (not inps.force 
+            if (not inps.force
                     and 'REF_X' in atr.keys()
-                    and 0 <= float(atr['REF_Y']) <= length 
+                    and 0 <= float(atr['REF_Y']) <= length
                     and 0 <= float(atr['REF_X']) <= width):
                 print('REF_Y/X exists in input file, skip updating.')
                 print('REF_Y: '+atr['REF_Y'])
@@ -502,6 +513,7 @@ def main(iargs=None):
 
     if inps.go_reference:
         reference_file(inps)
+
     print('Done.')
     return
 
diff -pruN 1.3.3-2/mintpy/remove_hdf5_dataset.py 1.4.0-1/mintpy/remove_hdf5_dataset.py
--- 1.3.3-2/mintpy/remove_hdf5_dataset.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/remove_hdf5_dataset.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,9 +8,9 @@
 
 import os
 import sys
-import argparse
 import h5py
 from mintpy.utils import writefile
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ###########################################################################################
@@ -20,13 +20,17 @@ EXAMPLE = """Example:
   remove_hdf5_dataset.py  velocity.h5     velocityStd
 """
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Remove an existing dataset from HDF5 file',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+
+def create_parser(subparsers=None):
+    synopsis = 'Remove an existing dataset from HDF5 file'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', type=str, help='HDF5 file of interest')
     parser.add_argument('dset', type=str, nargs='+', help='dataset to be removed.')
+
     return parser
 
 def cmd_line_parse(iargs=None):
@@ -46,9 +50,10 @@ def main(iargs=None):
         raise ValueError(('input dataset do not exists: {}'
                           '\navailable datasets:\n{}').format(inps.dset, dset_list))
 
-    inps.file = writefile.remove_hdf5_dataset(inps.file, inps.dset, print_msg=True)
+    writefile.remove_hdf5_dataset(inps.file, inps.dset, print_msg=True)
+
     print('Done.')
-    return inps.file
+    return
 
 
 ###########################################################################################
diff -pruN 1.3.3-2/mintpy/remove_ramp.py 1.4.0-1/mintpy/remove_ramp.py
--- 1.3.3-2/mintpy/remove_ramp.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/remove_ramp.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,16 +8,17 @@
 
 import os
 import sys
-import argparse
 import warnings
 from mintpy.objects import RAMP_LIST
 from mintpy.utils import readfile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 # key configuration parameter name
-configKeys = ['mintpy.deramp',
-              'mintpy.deramp.maskFile',
-             ]
+configKeys = [
+    'mintpy.deramp',
+    'mintpy.deramp.maskFile',
+]
 
 
 ###########################################################################################
@@ -28,10 +29,12 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Remove phase ramp',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Remove 2D ramp(s) from the input file.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', help='File for ramp removal')
     parser.add_argument('-m', '--mask', dest='mask_file', default='maskTempCoh.h5',
@@ -113,12 +116,13 @@ def main(iargs=None):
     if inps.update_mode and run_or_skip(inps) == 'skip':
         return inps.outfile
 
-    out_file = ut.run_deramp(inps.file,
-                             ramp_type=inps.surface_type,
-                             mask_file=inps.mask_file,
-                             out_file=inps.outfile,
-                             datasetName=inps.dset,
-                             save_ramp_coeff=inps.save_ramp_coeff)
+    out_file = ut.run_deramp(
+        inps.file,
+        ramp_type=inps.surface_type,
+        mask_file=inps.mask_file,
+        out_file=inps.outfile,
+        datasetName=inps.dset,
+        save_ramp_coeff=inps.save_ramp_coeff)
 
     # config parameter
     print('add/update the following configuration metadata to file:\n{}'.format(configKeys))
@@ -126,6 +130,7 @@ def main(iargs=None):
     atr_new['mintpy.deramp'] = inps.surface_type
     atr_new['mintpy.deramp.maskFile'] = inps.mask_file
     ut.add_attribute(out_file, atr_new)
+
     return
 
 
diff -pruN 1.3.3-2/mintpy/s1ab_range_bias.py 1.4.0-1/mintpy/s1ab_range_bias.py
--- 1.3.3-2/mintpy/s1ab_range_bias.py	1970-01-01 00:00:00.000000000 +0000
+++ 1.4.0-1/mintpy/s1ab_range_bias.py	2022-08-04 20:01:49.000000000 +0000
@@ -0,0 +1,355 @@
+#!/usr/bin/env python3
+############################################################
+# Program is part of MintPy                                #
+# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
+# Author: Zhang Yunjun, Apr 2022                           #
+############################################################
+
+
+import os
+import sys
+import numpy as np
+from matplotlib import pyplot as plt, ticker, colors
+
+from mintpy.objects import timeseries
+from mintpy.utils import readfile, writefile, s1_utils, plot as pp
+from mintpy.utils.arg_utils import create_argument_parser
+
+
+
+####################################################################################
+REFERENCE = """reference:
+  Yunjun, Z., Fattahi, H., Pi, X., Rosen, P., Simons, M., Agram, P., & Aoki, Y. (2022). Range
+    Geolocation Accuracy of C-/L-band SAR and its Implications for Operational Stack Coregistration.
+    IEEE Trans. Geosci. Remote Sens., 60, doi:10.1109/TGRS.2022.3168509.
+"""
+
+EXAMPLE = """example:
+  # Requires a text file named "SAFE_files.txt" containing all Sentinel-1 SAFE filenames.
+  # It is generated in ISCE-2/topsStack by default, and could be generated as below if missing:
+  # ls ./SLC > SAFE_files.txt
+
+  # 1. compute the S1A/B range bias
+  # based on partially corrected TS file, for a more accurate estimation
+  s1ab_range_bias.py timeseriesRg_SET_ERA5.h5 -a compute
+  s1ab_range_bias.py timeseriesRg_SET_ERA5.h5 -a compute -b data
+  s1ab_range_bias.py timeseriesRg_SET_ERA5.h5 -a compute -b data --force
+  s1ab_range_bias.py timeseriesRg_SET_ERA5.h5 -a compute -b data --nodisplay
+
+  # 2. correct for the S1A/B range bias [from the 1st/raw TS file]
+  s1ab_range_bias.py timeseriesRg.h5 -a correct
+"""
+
+def create_parser(subparsers=None):
+    synopsis = 'Sentinel-1 A/B range bias correction'
+    epilog = REFERENCE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
+    # input/output files
+    parser.add_argument('ts_file', help='Range offset timeseries file to be corrrected, e.g. timeseriesRg_SET_ERA5.h5.')
+    parser.add_argument('-g', '--geom', '--geometry', dest='geom_file', help='geometry file including datasets:\nheight')
+    parser.add_argument('-m', '--mask', dest='mask_file', help='mask file')
+
+    parser.add_argument('-s', '--safe-list', dest='safe_list_file',
+                        help='path to the SAFE_files.txt file, default: in the parent dir of mintpy work dir.')
+    parser.add_argument('-o', '--outfile', dest='ts_cor_file',
+                        help='Output file name for corrected time-series. Default: add "_S1Bias" suffix.')
+
+    # config
+    parser.add_argument('-a', '--action', dest='action', choices={'compute', 'correct'}, default='compute',
+                        help='Action to be executed:\n'
+                             'compute - estimate the S1A/B range bias and write to HDF5 file.\n'
+                             'correct - correct the input TS file using the bias file.')
+    parser.add_argument('-b','--method','--bias-method', dest='bias_method', choices={'hardwired', 'data'}, default='hardwired',
+                        help='Bias estimation method (default: %(default)s):\n'
+                             'hardwired - use hardwired values from section VII-A in Yunjun et al. (2022)\n'
+                             'data      - estimate from the input TS file, using the same method as in Yunjun et al. (2022)')
+    parser.add_argument('--force', dest='force', action='store_true', help='Force to re-generate the S1Bias.h5 file.')
+
+    # figure
+    fig = parser.add_argument_group('Plot the bias estimation result', 'For "--bias-method data" ONLY')
+    fig.add_argument('--save', dest='save_fig', action='store_true', help='save the figure')
+    fig.add_argument('--nodisplay', dest='disp_fig', action='store_false', help='save and do not display the figure')
+
+    return parser
+
+
+def cmd_line_parse(iargs=None):
+    """Command line parser."""
+    parser = create_parser()
+    inps = parser.parse_args(args=iargs)
+
+    inps.mintpy_dir = os.path.dirname(inps.ts_file)
+
+    # --geom
+    if not inps.geom_file:
+        inps.geom_file = os.path.join(inps.mintpy_dir, 'inputs', 'geometryRadar.h5')
+    if not os.path.isfile(inps.geom_file):
+        raise FileNotFoundError(f'No geometry file found in: {inps.geom_file}!')
+
+    # --mask
+    if not inps.mask_file:
+        inps.mask_file = os.path.join(inps.mintpy_dir, 'maskResInv.h5')
+    if not os.path.isfile(inps.mask_file):
+        inps.mask_file = None
+
+    # --save/nodisplay
+    if not inps.disp_fig:
+        inps.save_fig = True
+    if not inps.disp_fig:
+        plt.switch_backend('Agg')
+
+    return inps
+
+
+####################################################################################
+def estimate_s1ab_range_bias(ts_file, mask_file=None, safe_list_file=None):
+    """Estimate the S1A/B range bias based on the time series file.
+
+    Parameters: ts_file        - str, path of the time series range offset file
+                mask_file      - str, path of the mask file, e.g., maskResInv.h5 file.
+                safe_list_file - str, path of the SAFE_files.txt file
+    Returns:    bias_list      - list of float32, median bias per subswath in meters
+                bias_est       - 2D np.ndarray in size of (length, width) in float32, bias in meters    
+                mask_list      - list of 2D np.ndarray in size of (length, width) in bool
+    """
+    mintpy_dir = os.path.dirname(ts_file)
+    print(f'Estimating S1A/B range bias from file: {ts_file}')
+
+    # read time series
+    ts_data = readfile.read(ts_file)[0]
+    length, width = ts_data.shape[-2:]
+
+    # read mask
+    mask_file = mask_file if mask_file else os.path.join(mintpy_dir, 'maskResInv.h5')
+    if mask_file and os.path.isfile(mask_file):
+        mask = readfile.read(mask_file)[0]
+    else:
+        mask = np.ones((length, width), dtype=np.bool_)
+
+    # estimate bias - 2D map
+    bias_est_poi = s1_utils.estimate_s1ab_bias(
+        mintpy_dir,
+        ts_data[:, mask],
+        safe_list_file=safe_list_file)[0]
+
+    if bias_est_poi is None:
+        print('Exit without estimating S1A/B range bias from the input time series.')
+        return None, None, None
+
+    bias_est = np.ones((length, width), dtype=np.float32) * np.nan
+    bias_est[mask] = bias_est_poi
+
+    # estimate bias - median
+    geom_file = os.path.join(mintpy_dir, 'inputs', 'geometryRadar.h5')
+    flag = readfile.read(geom_file, datasetName='height')[0] != 0
+    mask_list = s1_utils.get_subswath_masks(flag, cut_overlap_in_half=False)[:3]
+    bias_list = [np.nanmedian(bias_est[x]) for x in mask_list]
+    print('IW1 : {:.3f} m'.format(bias_list[0]))
+    print('IW2 : {:.3f} m'.format(bias_list[1]))
+    print('IW3 : {:.3f} m'.format(bias_list[2]))
+
+    return bias_list, bias_est, mask_list
+
+
+def plot_s1ab_range_bias_est(bias_list, bias_est, mask_list, out_dir=None,
+                             save_fig=False, disp_fig=True):
+    """Plot the S1A/B range bias estimation results.
+
+    Parameters: bias_list - list of float, mean S1A/B range bias in meters
+                bias_est  - 2D np.ndarray in float32, pixelwised S1A/B range bias in meters
+                mask_list - list of 2D np.ndarray in bool, mask array for IW1/2/3
+                out_dir   - str, output directory for the plotted figure.
+    """
+    vmin, vmax = 7, 14
+    font_size = 12
+    out_dir = out_dir if out_dir else os.getcwd()
+
+    ## figure 1 - map
+    fig_size = pp.auto_figure_size(ds_shape=bias_est.shape, disp_cbar=True, print_msg=True)
+    fig, ax = plt.subplots(figsize=fig_size)
+    cmap = colors.LinearSegmentedColormap.from_list('magma_t', plt.get_cmap('magma')(np.linspace(0.3, 1.0, 100)))
+    im = ax.imshow(bias_est*100., cmap=cmap, vmin=vmin, vmax=vmax, interpolation='nearest')
+
+    # axis format
+    ax.tick_params(which='both', direction='out', bottom=True, top=False, left=True, right=False)
+    ax.set_xlabel('Range [pixel]', fontsize=font_size)
+    ax.set_ylabel('Azimuth [pixel]', fontsize=font_size)
+    cbar = fig.colorbar(im, ax=ax)
+    cbar.set_label('S1A/B range bias [cm]', fontsize=font_size)
+
+    # output
+    if save_fig:
+        out_fig = os.path.join(out_dir, 's1ab_range_bias_map.pdf')
+        print('save figure to file', out_fig)
+        plt.savefig(out_fig, bbox_inches='tight', transparent=True, dpi=300)
+
+    ## figure 2 - histogram
+    clist = [cmap((x*100. - vmin) / (vmax - vmin)) for x in bias_list]
+    fig, ax = plt.subplots(nrows=1, ncols=1, figsize=[6, 2])
+    for bias, mask, c in zip(bias_list, mask_list, clist):
+        ax.hist(bias_est[mask].flatten()*100, bins=70, range=(vmin, vmax), density=False, alpha=0.7, color=c)
+        ax.axvline(bias*100, color='k')
+    # plot median value
+    ax.tick_params(which='both', direction='out', bottom=True, top=True, left=True, right=True)
+    ax.xaxis.set_minor_locator(ticker.AutoMinorLocator())
+    ax.set_xlim(vmin, vmax)
+    ax.set_xlabel('S1A/B range bias [cm]')
+    ax.set_ylabel('# of pixels')
+    fig.tight_layout()
+
+    # output
+    if save_fig:
+        out_fig = os.path.join(out_dir, 's1ab_range_bias_hist.pdf')
+        print('save figure to file', out_fig)
+        plt.savefig(out_fig, bbox_inches='tight', transparent=True, dpi=300)
+
+    if disp_fig:
+        print('showing ....')
+        plt.show()
+    else:
+        plt.close()
+
+    return
+
+
+def write_s1ab_bias_file(bias_file, bias_list, geom_file, force=False):
+    """Write estimated S1A/B range bias to HDF5 file.
+
+    Parameters: bias_file - str, path to the S1A/B range bias file
+                bias_list - list of float, constant S1A/B range bias per IW1/2/3
+                geom_file - str, path to the geometry file
+                force     - bool, overwrite existing bias file.
+    Returns:    bias_file - str, path to the S1A/B range bias file
+    """
+    # run or skip
+    if os.path.isfile(bias_file) and not force:
+        print(f'S1Bias file exists in: {bias_file}, skip re-writing.')
+        return bias_file
+
+    # get the list of masks for IW1/2/3
+    flag = readfile.read(geom_file, datasetName='height')[0] != 0
+    mask_list = s1_utils.get_subswath_masks(flag, cut_overlap_in_half=False)[:3]
+
+    bias_mat = np.zeros(flag.shape, dtype=np.float32) * np.nan
+    for bias, mask in zip(bias_list, mask_list):
+        bias_mat[mask] = bias
+
+    # write file
+    atr = readfile.read_attribute(geom_file)
+    atr['FILE_TYPE'] = 'offset'
+    atr['UNIT'] = 'm'
+    print('writing S1A/B range bias to file: {}'.format(bias_file))
+    writefile.write(bias_mat, out_file=bias_file, metadata=atr)
+
+    return bias_file
+
+
+def correct_s1ab_range_bias(ts_file, bias_file, ts_cor_file=None, safe_list_file=None):
+    """Correct input time series for the S1A/B range bias.
+
+    Parameters: ts_file        - str, path to the range offset time series file
+                bias_file      - str, path to the S1A/B range bias file
+                ts_cor_file    - str, path to the corrected range offset time series file
+                safe_list_file - str, path to the SAFE_files.txt file
+    Returns:    ts_cor_file    - str, path to the corrected range offset time series file
+    """
+
+    if not os.path.isfile(bias_file):
+        msg = f'No bias file found in: {bias_file}!'
+        msg += '\nRe-run with "--action compute" to generate it.'
+        raise FileNotFoundError(msg)
+
+    date_list = timeseries(ts_file).get_date_list()
+    num_date = len(date_list)
+
+    # date info for Sentinel-1B
+    mintpy_dir = os.path.dirname(os.path.dirname(bias_file))
+    s1b_date_list_file = s1_utils.get_s1ab_date_list_file(mintpy_dir, safe_list_file)[1]
+    s1b_date_list = np.loadtxt(s1b_date_list_file, dtype=str).tolist()
+    s1b_flag = np.array([x in s1b_date_list for x in date_list], dtype=np.bool_)
+
+    # read data
+    ts_data = readfile.read(ts_file)[0].reshape(num_date, -1)
+    bias = readfile.read(bias_file)[0].flatten()
+
+    # correct bias
+    mask = ts_data == 0
+    ts_data[s1b_flag] -= np.tile(bias.reshape(1, -1), (np.sum(s1b_flag), 1))
+    ts_data[mask] = 0                    # Do not change zero value in the input TS file
+    ts_data[:, np.isnan(bias)] = np.nan  # set to nan for pixels with nan in bias file 
+
+    # write file
+    if not ts_cor_file:
+        ts_cor_file = '{}_S1Bias.h5'.format(os.path.splitext(ts_file)[0])
+    atr = readfile.read_attribute(ts_file)
+    length = int(atr['LENGTH'])
+    width = int(atr['WIDTH'])
+    writefile.write(ts_data.reshape(num_date, length, width),
+                    out_file=ts_cor_file,
+                    metadata=atr,
+                    ref_file=ts_file)
+
+    return ts_cor_file
+
+
+
+####################################################################################
+def main(iargs=None):
+    inps = cmd_line_parse(iargs)
+
+    # default bias file path
+    inps.bias_file = os.path.join(os.path.dirname(inps.geom_file), 'S1Bias.h5')
+
+    # calculate the S1A/B range bias
+    if inps.action == 'compute':
+        if inps.bias_method == 'hardwired':
+            # option 1 - use the hardwired value from section VII-A in Yunjun et al. (2022)
+            bias_list = [0.087, 0.106, 0.123]   # m
+            print('Used hardwired S1A/B range bias values from Yunjun et al. (2022):')
+            print('IW1 : {:.3f} m'.format(bias_list[0]))
+            print('IW2 : {:.3f} m'.format(bias_list[1]))
+            print('IW3 : {:.3f} m'.format(bias_list[2]))
+
+        else:
+            # option 2 - estimate from the time series of its dataset itself
+            # estimate optimal (median) value for each subswath from SenDT156
+            bias_list, bias_est, mask_list = estimate_s1ab_range_bias(
+                ts_file=inps.ts_file,
+                mask_file=inps.mask_file,
+                safe_list_file=inps.safe_list_file)
+
+            # plot the estimation result
+            if bias_list:
+                plot_s1ab_range_bias_est(
+                    bias_list,
+                    bias_est,
+                    mask_list,
+                    out_dir=os.path.dirname(inps.ts_file),
+                    save_fig=inps.save_fig,
+                    disp_fig=inps.disp_fig)
+
+        # write S1Bias.h5 file
+        if bias_list:
+            write_s1ab_bias_file(
+                bias_file=inps.bias_file,
+                bias_list=bias_list,
+                geom_file=inps.geom_file,
+                force=inps.force)
+
+    # correct time series range offset file
+    elif inps.action == 'correct':
+        correct_s1ab_range_bias(
+            ts_file=inps.ts_file,
+            bias_file=inps.bias_file,
+            ts_cor_file=inps.ts_cor_file,
+            safe_list_file=inps.safe_list_file)
+
+    return
+
+
+####################################################################################
+if __name__ == '__main__':
+    main(sys.argv[1:])
diff -pruN 1.3.3-2/mintpy/save_gbis.py 1.4.0-1/mintpy/save_gbis.py
--- 1.3.3-2/mintpy/save_gbis.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/save_gbis.py	2022-08-04 20:01:49.000000000 +0000
@@ -6,10 +6,8 @@
 ############################################################
 
 
-
 import os
 import sys
-import argparse
 import numpy as np
 import scipy.io as sio
 import matplotlib.pyplot as plt
@@ -19,14 +17,10 @@ warnings.filterwarnings("ignore", catego
 
 from mintpy.objects import sensor
 from mintpy.utils import ptime, readfile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 
 
-EXAMPLE = """example:
-  save_gbis.py velocity.h5 -g inputs/geometryGeo.h5 -o AlosDT73_20081012_20100302.mat
-  save_gbis.py 20150223_20161031_msk.unw -g inputs/geometryGeo.h5 -o Alos2DT23_20150223_20161031.mat
-  save_gbis.py 20150223_20161031.unw -g inputs/geometryGeo.h5 --out-data ../Model/data --ellipsoid2geoid
-"""
-
+##############################################################################
 REFERENCE = """references:
   Bagnardi, M., and A. Hooper (2018), Inversion of Surface Deformation Data for Rapid Estimates of Source 
   Parameters and Uncertainties: A Bayesian Approach, Geochemistry, Geophysics, Geosystems, 19, 
@@ -36,10 +30,18 @@ REFERENCE = """references:
   with L-band InSAR time series, Geophysical Research Letters, 48(11), e2021GL092879. doi:10.1029/2021GL092879
 """
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Convert MintPy product to GBIS .mat format.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=REFERENCE+'\n'+EXAMPLE)
+EXAMPLE = """example:
+  save_gbis.py velocity.h5 -g inputs/geometryGeo.h5 -o AlosDT73_20081012_20100302.mat
+  save_gbis.py 20150223_20161031_msk.unw -g inputs/geometryGeo.h5 -o Alos2DT23_20150223_20161031.mat
+  save_gbis.py 20150223_20161031.unw -g inputs/geometryGeo.h5 --out-data ../Model/data --ellipsoid2geoid
+"""
+
+def create_parser(subparsers=None):
+    synopsis = 'Convert MintPy product to GBIS .mat format.'
+    epilog = REFERENCE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', help='deformation file.')
     parser.add_argument('dset', nargs='?',
@@ -77,6 +79,7 @@ def cmd_line_parse(iargs=None):
     return inps
 
 
+##############################################################################
 def read_data(inps):
     """
     Returns: defo: 2D np.array with in-valid/masked-out pixel in NaN
@@ -246,7 +249,8 @@ def main(iargs=None):
     if inps.disp_fig:
         print('showing...')
         plt.show()
-    return inps.outfile
+
+    return
 
 
 ##############################################################################
diff -pruN 1.3.3-2/mintpy/save_gdal.py 1.4.0-1/mintpy/save_gdal.py
--- 1.3.3-2/mintpy/save_gdal.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/save_gdal.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,10 +8,9 @@
 
 import os
 import sys
-import argparse
-import numpy as np
 from osgeo import gdal, osr
 from mintpy.utils import readfile, utils0 as ut, plot as pp
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 # link: https://gdal.org/drivers/raster/index.html
@@ -33,10 +32,12 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Generate GDAL raster from MintPy h5 file.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Generate GDAL raster from MintPy h5 file.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', help='file to be converted, in geo coordinate.')
     parser.add_argument('-d', '--dset', '--dataset', dest='dset',
diff -pruN 1.3.3-2/mintpy/save_gmt.py 1.4.0-1/mintpy/save_gmt.py
--- 1.3.3-2/mintpy/save_gmt.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/save_gmt.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,26 +8,28 @@
 
 
 import sys
-import argparse
 import numpy as np
 from scipy.io import netcdf
 from mintpy.utils import readfile, plot as pp
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ####################################################################################
 EXAMPLE = """example:
   save_gmt.py  geo_velocity.h5
-  save_gmt.py  geo_timeseries.h5     20071031
+  save_gmt.py  geo_timeseries.h5  20071031
   save_gmt.py  geo_timeseries.h5
   save_gmt.py  geo_filt_100608-101024-sim_HDR_16rlks_c10.unw
   save_gmt.py  gsi10m.dem
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Export geocoded file to GMT grd file',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Export geocoded file to GMT grd file'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', help='file to be converted, in geo coordinate.')
     parser.add_argument('dset', nargs='?',
@@ -169,9 +171,10 @@ def main(iargs=None):
         outbase = pp.auto_figure_title(inps.file, datasetNames=inps.dset, inps_dict=vars(inps))
         inps.outfile = '{}.grd'.format(outbase)
 
-    inps.outfile = write_grd_file(data, atr, inps.outfile)
+    write_grd_file(data, atr, inps.outfile)
+
     print('Done.')
-    return inps.outfile
+    return
 
 
 ####################################################################################
diff -pruN 1.3.3-2/mintpy/save_hdfeos5.py 1.4.0-1/mintpy/save_hdfeos5.py
--- 1.3.3-2/mintpy/save_hdfeos5.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/save_hdfeos5.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,7 +8,6 @@
 
 import os
 import sys
-import argparse
 import datetime as dt
 import h5py
 import numpy as np
@@ -16,6 +15,7 @@ import numpy as np
 from mintpy.objects import timeseries, geometry, sensor
 from mintpy.defaults.template import get_template_content
 from mintpy.utils import ptime, readfile
+from mintpy.utils.arg_utils import create_argument_parser
 from mintpy import info
 
 
@@ -34,13 +34,17 @@ EXAMPLE = """example:
   save_hdfeos5.py timeseries_ERA5_ramp_demErr.h5 --tc temporalCoherence.h5 --asc avgSpatialCoh.h5 -m maskTempCoh.h5 -g inputs/geometryGeo.h5
 """
 
+NOTE = """
+  https://earthdata.nasa.gov/esdis/eso/standards-and-references/hdf-eos5
+  https://mintpy.readthedocs.io/en/latest/hdfeos5/
+"""
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Convert MintPy timeseries product into HDF-EOS5 format\n' +
-                                     '  https://earthdata.nasa.gov/esdis/eso/standards-and-references/hdf-eos5\n' +
-                                     '  https://mintpy.readthedocs.io/en/latest/hdfeos5/',
-                                     formatter_class=argparse.RawDescriptionHelpFormatter,
-                                     epilog=TEMPALTE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Convert MintPy timeseries product into HDF-EOS5 format'
+    epilog = TEMPALTE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis+NOTE, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('ts_file', default='timeseries.h5', help='Timeseries file')
     parser.add_argument('-t', '--template', dest='template_file',
@@ -52,6 +56,7 @@ def create_parser():
                         help='Average spatial coherence file, i.e. avgSpatialCoh.h5')
     parser.add_argument('-m', '--mask', dest='mask_file', help='Mask file')
     parser.add_argument('-g', '--geometry', dest='geom_file', help='geometry file')
+    parser.add_argument('--suffix', dest='suffix', help='suffix to be appended to file name (e.g. PS).')
 
     parser.add_argument('--update', action='store_true',
                         help='Enable update mode, a.k.a. put XXXXXXXX as endDate in filename if endDate < 1 year')
@@ -242,7 +247,7 @@ def metadata_mintpy2unavco(meta_in, date
     return unavco_meta
 
 
-def get_output_filename(metadata, update_mode=False, subset_mode=False):
+def get_output_filename(metadata, suffix=None, update_mode=False, subset_mode=False):
     """Get output file name of HDF-EOS5 time-series file."""
     SAT = metadata['mission']
     SW = metadata['beam_mode']
@@ -263,7 +268,10 @@ def get_output_filename(metadata, update
         print('Update mode is ON, put endDate as XXXXXXXX.')
         DATE2 = 'XXXXXXXX'
 
-    outName = SAT+'_'+SW+'_'+RELORB+'_'+FRAME+'_'+DATE1+'_'+DATE2+'.he5'
+    if not suffix:
+       outName = SAT+'_'+SW+'_'+RELORB+'_'+FRAME+'_'+DATE1+'_'+DATE2+'.he5'
+    else:
+       outName = SAT+'_'+SW+'_'+RELORB+'_'+FRAME+'_'+DATE1+'_'+DATE2+'_'+suffix+'.he5'
 
     if subset_mode:
         print('Subset mode is enabled, put subset range info in output filename.')
@@ -457,24 +465,29 @@ def main(iargs=None):
     inps, template = read_template2inps(inps.template_file, inps)
 
     # Prepare Metadata
-    meta = prep_metadata(ts_file=inps.ts_file,
-                         template=template,
-                         print_msg=True)
+    meta = prep_metadata(
+        ts_file=inps.ts_file,
+        template=template,
+        print_msg=True)
 
     # Get output filename
-    out_file = get_output_filename(metadata=meta,
-                                   update_mode=inps.update,
-                                   subset_mode=inps.subset)
+    out_file = get_output_filename(
+        metadata=meta,
+        suffix=inps.suffix,
+        update_mode=inps.update,
+        subset_mode=inps.subset)
 
     # Open HDF5 File
-    write_hdf5_file(metadata=meta,
-                    out_file=out_file,
-                    ts_file=inps.ts_file,
-                    tcoh_file=inps.tcoh_file,
-                    scoh_file=inps.scoh_file,
-                    mask_file=inps.mask_file,
-                    geom_file=inps.geom_file)
-    return out_file
+    write_hdf5_file(
+        metadata=meta,
+        out_file=out_file,
+        ts_file=inps.ts_file,
+        tcoh_file=inps.tcoh_file,
+        scoh_file=inps.scoh_file,
+        mask_file=inps.mask_file,
+        geom_file=inps.geom_file)
+
+    return
 
 
 ################################################################
diff -pruN 1.3.3-2/mintpy/save_kite.py 1.4.0-1/mintpy/save_kite.py
--- 1.3.3-2/mintpy/save_kite.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/save_kite.py	2022-08-04 20:01:49.000000000 +0000
@@ -7,15 +7,18 @@
 
 
 import sys
-import argparse
 import datetime as dt
 import numpy as np
-from mintpy.utils import ptime, readfile, arg_group, attribute
+
+from mintpy.utils import ptime, readfile, arg_utils, attribute
 from mintpy import subset
 
+
 d2r = np.pi / 180.
 r2d = 180. / np.pi
 
+
+#########################################################################################################
 EXAMPLE = """example:
   ## displacement [event-type inversion]
   # option 1: use velocity file with step estimation from timeseries2velocity.py for co-seismic displacement
@@ -35,10 +38,13 @@ EXAMPLE = """example:
 
 KITE_URL = 'https://github.com/pyrocko/kite'
 
-def create_parser():
-    parser = argparse.ArgumentParser(description=f'Generate KITE ({KITE_URL}) npz and yaml from MintPy HDF5 file.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+
+def create_parser(subparsers=None):
+    synopsis = f'Generate KITE ({KITE_URL}) npz and yaml from MintPy HDF5 file.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = arg_utils.create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', type=str, help='file to be converted, in geo coordinate.')
     parser.add_argument('-d', '--dset', '--dataset', dest='dset', type=str, required=True,
@@ -52,7 +58,7 @@ def create_parser():
                         help='mask file, or run mask.py to mask the input file beforehand.')
     parser.add_argument('-o', '--output', dest='outfile', type=str,
                         help='output filename')
-    parser = arg_group.add_subset_argument(parser)
+    parser = arg_utils.add_subset_argument(parser)
     return parser
 
 def cmd_line_parse(iargs=None):
@@ -198,9 +204,9 @@ def main(iargs=None):
         attr = attribute.update_attribute4subset(attr, inps.pix_box)
 
     # create kite container
-    scene = mintpy2kite(dis, attr, date1, date2, inc_angle, az_angle, out_file=inps.outfile)
+    mintpy2kite(dis, attr, date1, date2, inc_angle, az_angle, out_file=inps.outfile)
 
-    return scene
+    return
 
 #########################################################################################################
 if __name__ == "__main__":
diff -pruN 1.3.3-2/mintpy/save_kmz.py 1.4.0-1/mintpy/save_kmz.py
--- 1.3.3-2/mintpy/save_kmz.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/save_kmz.py	2022-08-04 20:01:49.000000000 +0000
@@ -11,7 +11,6 @@
 import os
 import sys
 import shutil
-import argparse
 import numpy as np
 from lxml import etree
 from zipfile import ZipFile
@@ -25,7 +24,7 @@ except ImportError:
 import mintpy
 from mintpy.objects import timeseriesKeyNames
 from mintpy.utils import (
-    arg_group,
+    arg_utils,
     attribute as attr,
     ptime,
     readfile,
@@ -53,18 +52,24 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Generate Google Earth KMZ file (overlay / placemarks for files in geo / radar coordinates).',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Generate Google Earth KMZ file (overlay / placemarks for files in geo / radar coordinates).'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = arg_utils.create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', help='file to be converted, in geo or radar coordinate.\n'
                         'Note: for files in radar-coordinate, the corresponding lookup table\n'
                         'in radar-coordinate (as provided by ISCE) is required.')
-    parser.add_argument('dset', nargs='?', help='date of timeseries, or date12 of interferograms to be converted')
-    parser.add_argument('-m','--mask', dest='mask_file', metavar='FILE', help='mask file for display')
-    parser.add_argument('--zero-mask', dest='zero_mask', action='store_true', help='Mask pixels with zero value.')
-    parser.add_argument('-o', '--output', dest='outfile', help='output file base name. Extension is fixed with .kmz')
+    parser.add_argument('dset', nargs='?',
+                        help='date of timeseries, or date12 of interferograms to be converted')
+    parser.add_argument('-m','--mask', dest='mask_file', metavar='FILE',
+                        help='mask file for display')
+    parser.add_argument('--zero-mask', dest='zero_mask', action='store_true',
+                        help='Mask pixels with zero value.')
+    parser.add_argument('-o', '--output', dest='outfile',
+                        help='output file base name. Extension is fixed with .kmz')
     parser.add_argument('--kk','--keep-kml','--keep-kml-file', dest='keep_kml_file', action='store_true',
                         help='Do not remove KML and data/resource files after compressing into KMZ file.')
 
@@ -76,11 +81,16 @@ def create_parser():
                              'For file in radar-coordinate ONLY.')
 
     # Data
-    parser.add_argument('-v','--vlim', dest='vlim', nargs=2, metavar=('MIN', 'MAX'), type=float, help='Y/value limits for plotting.')
+    parser.add_argument('-v','--vlim', dest='vlim', nargs=2, metavar=('MIN', 'MAX'), type=float,
+                        help='Y/value limits for plotting.')
     parser.add_argument('-u', dest='disp_unit', metavar='UNIT', help='unit for display.')
-    parser.add_argument('-c', '--cm', '--colormap', dest='colormap', default='jet', help='Colormap for plotting. Default: jet')
-    parser.add_argument('--wrap', action='store_true', help='re-wrap data to display data in fringes.')
-    parser.add_argument('--wrap-range', dest='wrap_range', type=float, nargs=2, default=[-1.*np.pi, np.pi], metavar=('MIN', 'MAX'),
+    parser.add_argument('-c', '--cm', '--colormap', dest='cmap_name', default='jet',
+                        help='Colormap for plotting (default: %(default)s), such as jet, RdBu, etc.\n'
+                             'More details at https://mintpy.readthedocs.io/en/latest/api/colormaps/')
+    parser.add_argument('--wrap', action='store_true',
+                        help='re-wrap data to display data in fringes.')
+    parser.add_argument('--wrap-range', dest='wrap_range', type=float, nargs=2,
+                        default=[-1.*np.pi, np.pi], metavar=('MIN', 'MAX'),
                         help='range of one cycle after wrapping, default: [-pi, pi]')
 
     # Figure
@@ -89,8 +99,9 @@ def create_parser():
                      help='Figure DPI (dots per inch). Default: 600')
     fig.add_argument('--figsize', dest='fig_size', metavar=('WID', 'LEN'), type=float, nargs=2,
                      help='Figure size in inches - width and length')
-    fig.add_argument('--cbar-loc', dest='cbar_loc', choices=['lower left','lower right','upper left', 'upper right'],
-                     default='lower left', help='Location of colorbar in the screen. Default: lower left.')
+    fig.add_argument('--cbar-loc', dest='cbar_loc', default='lower left',
+                     choices=['lower left','lower right','upper left', 'upper right'],
+                     help='Location of colorbar in the screen. Default: lower left.')
     fig.add_argument('--cbar-label', dest='cbar_label', metavar='LABEL', default='Mean LOS velocity',
                      help='Colorbar label. Default: Mean LOS velocity')
     fig.add_argument('--cbar-bin-num', dest='cbar_bin_num', metavar='NUM', type=int,
@@ -108,7 +119,7 @@ def create_parser():
                      help='marker symbol of reference point')
 
     # subset
-    parser = arg_group.add_subset_argument(parser)
+    parser = arg_utils.add_subset_argument(parser)
 
     return parser
 
@@ -163,8 +174,18 @@ def plot_colorbar(out_file, vmin, vmax,
     return out_file
 
 
-def generate_cbar_element(cbar_file, vmin, vmax, unit='cm/year', cmap='jet', loc='lower left',
+def generate_cbar_element(cbar_file, cmap, vmin, vmax, unit='cm/year', loc='lower left',
                           nbins=None, label='Mean LOS velocity'):
+    """Generate colorbar as an screen overlay object.
+
+    Parameters: cbar_file - str, colorbar image file path
+                cmap      - matplotlib.colors.Colormap instance
+                vmin/vmax - float, min/max value to display
+                unit      - str, display unit
+                loc       - str, location of colorbar on the screen.
+                            lower-left, lower-right, upper-left, upper-right
+    Returns:    cbar_overlay - KML.ScreenOverlay object
+    """
     # plot colobar and save as an image
     cbar_file = plot_colorbar(
         out_file=cbar_file,
@@ -173,8 +194,7 @@ def generate_cbar_element(cbar_file, vmi
         unit=unit,
         cmap=cmap,
         nbins=nbins,
-        label=label,
-    )
+        label=label)
 
     # colobar location
     if loc.split()[0] == 'lower':
@@ -276,7 +296,10 @@ def write_kmz_file(out_file_base, kml_do
     """Write KML and KMZ files.
     Parameters: out_file_base - str, output file name without extension
                 kml_doc       - KML.Document() object
+                data_files    - list of str, rel path of data files
                 res_files     - list of str, rel path of resource files
+                keep_kml_file - bool, do not remove KML files after zipping.
+    Returns:    kmz_file      - str, zipped KMZ file.
     """
     # default values
     data_files = [] if data_files is None else data_files
@@ -396,14 +419,13 @@ def write_kmz_overlay(data, meta, out_fi
     cbar_file = '{}_cbar.png'.format(out_file_base)
     cbar_overlay = generate_cbar_element(
         cbar_file,
+        cmap=inps.colormap,
         vmin=inps.vlim[0],
         vmax=inps.vlim[1],
         unit=inps.disp_unit,
-        cmap=inps.colormap,
         loc=inps.cbar_loc,
         nbins=inps.cbar_bin_num,
-        label=inps.cbar_label,
-    )
+        label=inps.cbar_label)
     kml_doc.append(cbar_overlay)
 
     # Write KML file
@@ -411,8 +433,7 @@ def write_kmz_overlay(data, meta, out_fi
         out_file_base,
         kml_doc,
         data_files=[data_png_file, cbar_file],
-        keep_kml_file=inps.keep_kml_file,
-    )
+        keep_kml_file=inps.keep_kml_file)
 
     return kmz_file
 
@@ -446,14 +467,13 @@ def write_kmz_placemark(data, meta, out_
     cbar_file = '{}_cbar.png'.format(out_file_base)
     cbar_overlay = generate_cbar_element(
         cbar_file,
+        cmap=inps.colormap,
         vmin=inps.vlim[0],
         vmax=inps.vlim[1],
         unit=inps.disp_unit,
-        cmap=inps.colormap,
         loc=inps.cbar_loc,
         nbins=inps.cbar_bin_num,
-        label=inps.cbar_label,
-    )
+        label=inps.cbar_label)
     kml_doc.append(cbar_overlay)
 
     # 2. reference point
@@ -472,8 +492,7 @@ def write_kmz_placemark(data, meta, out_
             col=rx + xmin,
             val=0.0,
             icon_file=star_file,
-            inps=inps,
-        )
+            inps=inps)
         ref_point.name = 'ReferencePoint'
         ref_point.Style.IconStyle.scale = 1.0
         kml_doc.append(ref_point)
@@ -502,8 +521,7 @@ def write_kmz_placemark(data, meta, out_
                     col=x + xmin,
                     val=value,
                     icon_file=dot_file,
-                    inps=inps,
-                )
+                    inps=inps)
                 data_folder.append(placemark)
 
         prog_bar.update(y+1, every=1, suffix=f'row={y+1}/{length}')
@@ -514,9 +532,9 @@ def write_kmz_placemark(data, meta, out_
     kmz_file = write_kmz_file(
         out_file_base,
         kml_doc,
+        data_files=[cbar_file],
         res_files=[dot_file, star_file],
-        keep_kml_file=inps.keep_kml_file,
-    )
+        keep_kml_file=inps.keep_kml_file)
 
     return kmz_file
 
@@ -547,7 +565,8 @@ def main(iargs=None):
         data -= readfile.read(inps.file, datasetName=inps.ref_date, box=inps.pix_box)[0]
 
     # mask
-    mask = pp.read_mask(inps.file, mask_file=inps.mask_file, datasetName=inps.dset, box=inps.pix_box)[0]
+    mask = pp.read_mask(inps.file, mask_file=inps.mask_file,
+                        datasetName=inps.dset, box=inps.pix_box)[0]
     if mask is not None:
         print('masking out pixels with zero value in file: {}'.format(inps.mask_file))
         data[mask == 0] = np.nan
@@ -574,9 +593,9 @@ def main(iargs=None):
     # disp min/max and colormap
     cmap_lut = 256
     if not inps.vlim:
-        cmap_lut, inps.vlim = pp.auto_adjust_colormap_lut_and_disp_limit(data)
-    inps.colormap = pp.auto_colormap_name(atr, inps.colormap)
-    inps.colormap = pp.ColormapExt(inps.colormap, cmap_lut).colormap
+        cmap_lut, inps.vlim = pp.auto_adjust_colormap_lut_and_disp_limit(data)[:2]
+    inps.cmap_name = pp.auto_colormap_name(atr, inps.cmap_name)
+    inps.colormap = pp.ColormapExt(inps.cmap_name, cmap_lut).colormap
     inps.norm = colors.Normalize(vmin=inps.vlim[0], vmax=inps.vlim[1])
 
     # Output filename
@@ -592,8 +611,7 @@ def main(iargs=None):
             data,
             meta=atr,
             out_file=inps.outfile,
-            inps=inps,
-        )
+            inps=inps)
 
     else:
         # create placemark KML for file in radar-coord
@@ -602,10 +620,9 @@ def main(iargs=None):
             meta=atr,
             out_file=inps.outfile,
             geom_file=inps.geom_file,
-            inps=inps,
-        )
+            inps=inps)
 
-    return inps.outfile
+    return
 
 
 #######################################################
diff -pruN 1.3.3-2/mintpy/save_kmz_timeseries.py 1.4.0-1/mintpy/save_kmz_timeseries.py
--- 1.3.3-2/mintpy/save_kmz_timeseries.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/save_kmz_timeseries.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,7 +8,6 @@
 
 import os
 import sys
-import argparse
 from lxml import etree
 from zipfile import ZipFile
 import shutil
@@ -23,7 +22,8 @@ except ImportError:
 
 import mintpy
 from mintpy.objects import timeseries, deramp
-from mintpy.utils import readfile, plot, utils as ut
+from mintpy.utils import readfile, plot as pp, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 from mintpy import save_kmz
 
 
@@ -35,10 +35,12 @@ EXAMPLE = """example:
   save_kmz_timeseries.py timeseries_ERA5_demErr.h5 --vel velocity.h5 --tcoh temporalCoherence.h5 --mask maskTempCoh.h5
 """
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Generare Google Earth KMZ file for time-series file.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Generare Google Earth KMZ file for time-series file.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     args = parser.add_argument_group('Input files', 'File/Dataset to display')
 
@@ -62,10 +64,9 @@ def create_parser():
                       help='min/max range in cm/yr for color coding.')
     opts.add_argument('--wrap', dest='wrap', action='store_true',
                       help='re-wrap data to [VMIN, VMAX) for color coding.')
-    opts.add_argument('--colormap','-c', dest='colormap', default='jet',
+    opts.add_argument('--colormap','-c', dest='cmap_name', default='jet',
                       help='colormap used for display, i.e. jet, RdBu, hsv, jet_r, temperature, viridis,  etc.\n'
-                           'colormaps in Matplotlib - http://matplotlib.org/users/colormaps.html\n'
-                           'colormaps in GMT - http://soliton.vm.bytemark.co.uk/pub/cpt-city/')
+                           'More details at https://mintpy.readthedocs.io/en/latest/api/colormaps/')
 
     defo = parser.add_argument_group('HD for deforming areas', 'High resolution output for deforming areas')
     defo.add_argument('--cutoff', dest='cutoff', type=int, default=3,
@@ -231,7 +232,6 @@ def get_boxes4deforming_area(vel_file, m
 
 def create_reference_point_element(inps, lats, lons, ts_obj):
     """Create reference point element"""
-    colormap = mpl.cm.get_cmap(inps.colormap)  # set colormap
     norm = mpl.colors.Normalize(vmin=inps.vlim[0], vmax=inps.vlim[1])
 
     ref_yx = (int(ts_obj.metadata['REF_Y']), int(ts_obj.metadata['REF_X']))
@@ -240,7 +240,7 @@ def create_reference_point_element(inps,
     ref_point = KML.Placemark(
         KML.Style(
             KML.IconStyle(
-                KML.color(save_kmz.get_hex_color(0.0, colormap, norm)),
+                KML.color(save_kmz.get_hex_color(0.0, inps.colormap, norm)),
                 KML.scale(1.),
                 KML.Icon(
                     KML.href("{}".format(os.path.basename(inps.star_file)))
@@ -395,8 +395,7 @@ def create_kml_region_document(inps, box
         ## 2. Create KML Document
         kml_doc = KML.Document()
 
-        # 2.1 Set and normalize colormap to defined vlim
-        colormap = mpl.cm.get_cmap(inps.colormap)
+        # 2.1 Normalize colormap to defined vlim
         norm = mpl.colors.Normalize(vmin=inps.vlim[0], vmax=inps.vlim[1])
 
         # 2.2 Set number of pixels to use
@@ -423,7 +422,7 @@ def create_kml_region_document(inps, box
                     # 2.3.1 Create KML icon style element
                     style = KML.Style(
                         KML.IconStyle(
-                            KML.color(save_kmz.get_hex_color(vc, colormap, norm)),
+                            KML.color(save_kmz.get_hex_color(vc, inps.colormap, norm)),
                             KML.scale(0.5),
                             KML.Icon(KML.href("{}".format(dot_file)))
                         )
@@ -578,7 +577,7 @@ def main(iargs=None):
     if inps.outfile:
         inps.outfile_base = os.path.splitext(os.path.basename(inps.outfile))[0]
     else:
-        inps.outfile_base = plot.auto_figure_title(inps.ts_file, inps_dict=vars(inps))
+        inps.outfile_base = pp.auto_figure_title(inps.ts_file, inps_dict=vars(inps))
     kml_root_file = os.path.join(inps.work_dir, '{}_root.kml'.format(inps.outfile_base))
     kmz_file = os.path.join(inps.work_dir, '{}.kmz'.format(inps.outfile_base))
 
@@ -591,11 +590,13 @@ def main(iargs=None):
     print('input data shape in row/col: {}/{}'.format(length, width))
 
     vel = readfile.read(inps.vel_file, datasetName='velocity')[0] * 100.
-    # Set min/max velocity for colormap
+    # Set vmin/max and colormap
     if inps.vlim is None:
         inps.vlim = [np.nanmin(vel), np.nanmax(vel)]
     if inps.wrap:
         print('re-wrapping data to {} cm/year for color coding'.format(inps.vlim))
+    inps.colormap = pp.ColormapExt(inps.cmap_name).colormap
+
 
     ##--------- Create root KML file with network links to data KML files --------------##
     kml_root_doc = KML.Document()
@@ -603,9 +604,9 @@ def main(iargs=None):
     # 1 Create Overlay element for colorbar
     cbar_overlay = save_kmz.generate_cbar_element(
         cbar_file=inps.cbar_file,
+        cmap=inps.colormap,
         vmin=inps.vlim[0],
         vmax=inps.vlim[1],
-        cmap=inps.colormap,
     )
     kml_root_doc.append(cbar_overlay)
 
@@ -666,6 +667,7 @@ def main(iargs=None):
     print('merged all files to {}'.format(kmz_file))
     print('Done.')
     print('Open {} in Google Earth and play!'.format(kmz_file))
+
     return
 
 
diff -pruN 1.3.3-2/mintpy/save_qgis.py 1.4.0-1/mintpy/save_qgis.py
--- 1.3.3-2/mintpy/save_qgis.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/save_qgis.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,14 +9,16 @@
 import os
 import sys
 import errno
-import argparse
 import h5py
 import numpy as np
 from osgeo import ogr
+
 from mintpy.objects import timeseries
 from mintpy.utils import ptime, readfile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 
 
+#########################################################################################
 EXAMPLE = """example:
   save_qgis.py timeseries_ERA5_ramp_demErr.h5 -g inputs/geometrygeo.h5
   save_qgis.py timeseries_ERA5_ramp_demErr.h5 -g inputs/geometryRadar.h5
@@ -24,13 +26,13 @@ EXAMPLE = """example:
   save_qgis.py timeseries_ERA5_ramp_demErr.h5 -g inputs/geometryRadar.h5 -b 200 150 400 350
 """
 
-def cmd_line_parse(iargs=None):
-    '''
-    Command line parser.
-    '''
-    parser = argparse.ArgumentParser(description='Convert to QGIS compatible ps time-series',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Convert to QGIS compatible ps time-series'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
     parser.add_argument('ts_file', type=str, help='time-series HDF5 file')
     parser.add_argument('-g', '--geom', dest='geom_file', type=str, required=True,
                         help='geometry HDF5 file')
@@ -42,10 +44,22 @@ def cmd_line_parse(iargs=None):
     parser.add_argument('-B', '--geo-bbox', dest='geo_bbox', type=float, nargs=4, default=None,
                         metavar=('S','N','W','E'), help='bounding box in lat lon: South North West East')
 
-    return parser.parse_args(iargs)
+    return parser
+
 
+def cmd_line_parse(iargs=None):
+    '''Command line parser.'''
+    parser = create_parser()
+    inps = parser.parse_args(args=iargs)
 
-#################################################################
+    # --outshp option
+    if not inps.shp_file:
+        inps.shp_file = os.path.splitext(inps.ts_file)[0] + '.shp'
+
+    return inps
+
+
+#########################################################################################
 def add_metadata(feature, location, attrs):
     '''
     Create one point in compatible shape format.
@@ -250,30 +264,25 @@ def write_shape_file(fDict, shp_file, bo
     return shp_file
 
 
-#################################################################
+#########################################################################################
 def main(iargs=None):
-    '''Main driver
-    '''
-
-    #Parse command line
+    # Parse command line
     inps = cmd_line_parse(iargs)
 
-    #Read bounding box
+    # Read bounding box
     box = read_bounding_box(pix_box=inps.pix_bbox,
                             geo_box=inps.geo_bbox,
                             geom_file=inps.geom_file)
 
-    #Gather data files
+    # Gather data files
     fDict = gather_files(inps.ts_file, inps.geom_file)
 
-    #Write shape file
-    if not inps.shp_file:
-        inps.shp_file = os.path.splitext(inps.ts_file)[0]+'.shp'
+    # Write shape file
     write_shape_file(fDict, inps.shp_file, box=box)
 
-    return inps.shp_file
+    return
 
 
-#################################################################
+#########################################################################################
 if __name__ == '__main__':
     main(sys.argv[1:])
diff -pruN 1.3.3-2/mintpy/save_roipac.py 1.4.0-1/mintpy/save_roipac.py
--- 1.3.3-2/mintpy/save_roipac.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/save_roipac.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,10 +8,10 @@
 
 import os
 import sys
-import argparse
 import numpy as np
 from mintpy.objects import timeseries, HDFEOS
 from mintpy.utils import readfile, writefile, ptime, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 from mintpy import view
 
 
@@ -47,10 +47,12 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Convert MintPy HDF5 file to ROI_PAC format.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Convert MintPy HDF5 file to ROI_PAC format.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', help='HDF5 file to be converted.')
     parser.add_argument('dset', nargs='?', help='date/date12 of timeseries, or date12 of interferograms to be converted')
@@ -336,7 +338,8 @@ def main(iargs=None):
         atr = clean_metadata4roipac(atr)
 
     writefile.write(data, out_file=out_file, metadata=atr)
-    return inps.outfile
+
+    return
 
 
 ##########################################################################
diff -pruN 1.3.3-2/mintpy/sh/compare_velocity_with_diff_tropo.sh 1.4.0-1/mintpy/sh/compare_velocity_with_diff_tropo.sh
--- 1.3.3-2/mintpy/sh/compare_velocity_with_diff_tropo.sh	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/sh/compare_velocity_with_diff_tropo.sh	1970-01-01 00:00:00.000000000 +0000
@@ -1,44 +0,0 @@
-#! /bin/sh
-## Compare the estimated velocities from time-series with different troposphric delay corrections
-## Created by Zhang Yunjun, Sep 6th, 2018
-
-## Run Control: change to 0 to skip the part
-run_proc=1
-run_plot=1
-
-## Run Customized Processing
-if [ $run_proc -eq 1 ]; then
-    generate_mask.py temporalCoherence.h5 -m 0.8 -o maskTempCoh.h5
-
-    # Tropospheric Correction with ERA-Interim
-    tropo_pyaps.py -f timeseries.h5 -g inputs/geometryRadar.h5 -m ECMWF -w ~/insarlab/WEATHER
-    dem_error.py  timeseries_ECMWF.h5 -g inputs/geometryRadar.h5
-    remove_ramp.py timeseries_ECMWF_demErr.h5 -m maskTempCoh.h5 -s linear
-    timeseries2velocity.py timeseries_ECMWF_demErr_ramp.h5 -o velocity_tropECMWF.h5
-
-    # Tropospheric Correction with MERRA-2
-    tropo_pyaps.py -f timeseries.h5 -g inputs/geometryRadar.h5 -m MERRA -w ~/insarlab/WEATHER
-    dem_error.py  timeseries_MERRA.h5 -g inputs/geometryRadar.h5
-    remove_ramp.py timeseries_MERRA_demErr.h5 -m maskTempCoh.h5 -s linear
-    timeseries2velocity.py timeseries_MERRA_demErr_ramp.h5 -o velocity_tropMERRA.h5
-
-    # Tropospheric Correction with Phase/Elevation Ratio
-    dem_error.py timeseries.h5 -g -g inputs/geomtropo_phase_elevationetryRadar.h5
-    tropo_phase_elevation.py timeseries_demErr.h5 -g inputs/geometryRadar.h5 -m maskTempCoh.h5
-    remove_ramp.py timeseries_demErr_tropHgt.h5 -m maskTempCoh.h5 -s linear
-    timeseries2velocity.py timeseries_demErr_tropHgt_ramp.h5 -o velocity_tropHgt.h5
-
-    # No tropospheric Correction
-    remove_ramp.py timeseries_demErr.h5 -m maskTempCoh.h5 -s linear
-    timeseries2velocity.py timeseries_demErr_ramp.h5 -o velocity_tropNone.h5
-fi
-
-## Run plotting
-opt=' velocity --wrap --wrap-range -1 1 --nodisplay --cbar-nbins 2 --notitle --figsize 3 3 --fontsize 12 --notick --dpi 600 --ref-size 3 --dem inputs/geometryRadar.h5 --dem-nocontour '
-proj_name='AlcedoSenDT128'
-if [ $run_plot -eq 1 ]; then
-    view.py velocity_tropNone.h5  $opt -o ${proj_name}_velocity_0_tropNone.png
-    view.py velocity_tropECMWF.h5 $opt -o ${proj_name}_velocity_1_tropECMWF.png
-    view.py velocity_tropMERRA.h5 $opt -o ${proj_name}_velocity_2_tropMERRA.png
-    view.py velocity_tropHgt.h5   $opt -o ${proj_name}_velocity_3_tropHgt.png
-fi
diff -pruN 1.3.3-2/mintpy/sh/load_data_aoi.sh 1.4.0-1/mintpy/sh/load_data_aoi.sh
--- 1.3.3-2/mintpy/sh/load_data_aoi.sh	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/sh/load_data_aoi.sh	1970-01-01 00:00:00.000000000 +0000
@@ -1,62 +0,0 @@
-#! /bin/sh
-# Extract input datasets in mintpy/inputs for an area of interest (AOI) from the one of the whole frame
-# Parameters: SNWE : string for the AOI
-#             step : string/number, output resolution in degree
-#             inputs_dir_src : source mintpy/inputs folder for the whole track in radar coordinates
-# Returns:    inputs_dir_dst : destination mintpy/inputs folder for the AOI in geo coordinates
-# Author: Zhang Yunjun, 2019-05-18
-
-# setup input/output directories
-inputs_dir_src=$HOME'/insarlab/Kirishima/Alos2DT23F2970/mintpy/inputs'        #input dir
-inputs_dir_dst=$HOME'/insarlab/Kirishima/ShinmoedakeAlos2DT23/mintpy/inputs'  #output dir
-
-# setup AOI
-SNWE="31.88 31.94 130.85 130.91"
-step="0.000185185"   #degrees
-# degrees --> meters on equator
-# 0.000925926 --> 100 m
-# 0.000555556 --> 60 m
-# 0.000277778 --> 30 m
-# 0.000185185 --> 20 m
-# 0.000092593 --> 10 m
-
-
-###########################################################################
-# create destination directory
-if [ ! -d $inputs_dir_dst ]; then
-    echo "create "$inputs_dir_dst
-    mkdir -p $inputs_dir_dst
-fi
-
-# geocode - prepare script options
-lut_file="$inputs_dir_src/geometryRadar.h5"
-geocode_opt="-l $lut_file --bbox $SNWE --lat-step -$step --lon-step $step"
-
-echo "geocode & subset - geometry file"
-src_file=$inputs_dir_src"/geometryRadar.h5"
-dst_file=$inputs_dir_dst"/geometryGeo.h5"
-echo "geocode.py $src_file -o $dst_file $geocode_opt"
-geocode.py $src_file -o $dst_file $geocode_opt
-
-echo "geocode & subset - ifgramStack file"
-src_file=$inputs_dir_src"/ifgramStack.h5"
-dst_file=$inputs_dir_dst"/ifgramStack.h5"
-echo "geocode.py $src_file -o $dst_file $geocode_opt"
-geocode.py $src_file -o $dst_file $geocode_opt
-
-# subset - prepare script options
-# split SNWE into four variables to be used by subset.py
-S="$(cut -d' ' -f1 <<<"$SNWE")"
-N="$(cut -d' ' -f2 <<<"$SNWE")"
-W="$(cut -d' ' -f3 <<<"$SNWE")"
-E="$(cut -d' ' -f4 <<<"$SNWE")"
-subset_opt=" --lat $S $N --lon $W $E"
-
-echo "subset - DEM"
-src_file=$inputs_dir_src"/../../../DEM/gsi10m.dem.wgs84"   #adjust filename for specific dataset
-dst_file=$inputs_dir_dst"/gsi10m.dem.wgs84"                #adjust filename for specific dataset
-echo "subset.py $src_file -o $dst_file $subset_opt"
-subset.py $src_file -o $dst_file $subset_opt
-
-echo "Done."
-
diff -pruN 1.3.3-2/mintpy/sh/plot_smallbaselineApp.sh 1.4.0-1/mintpy/sh/plot_smallbaselineApp.sh
--- 1.3.3-2/mintpy/sh/plot_smallbaselineApp.sh	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/sh/plot_smallbaselineApp.sh	1970-01-01 00:00:00.000000000 +0000
@@ -1,109 +0,0 @@
-#! /bin/sh
-###############################################################
-# Plot Results from Routine Workflow with smallbaselineApp.py
-# Author: Zhang Yunjun, 2017-07-23
-# Latest update: 2021-03-08
-###############################################################
-# Update the date above to enable auto copyover/overwrite
-
-
-## Change to 0 if you do not want to re-plot loaded dataset again
-plot_key_files=1
-plot_loaded_data=1
-plot_loaded_data_aux=1
-plot_timeseries=1
-plot_geocoded_data=1
-plot_the_rest=1
-
-
-# Default file name
-mask_file='maskTempCoh.h5'
-dem_file='./inputs/geometryRadar.h5'
-if [ ! -f $dem_file ]; then
-    dem_file='./inputs/geometryGeo.h5'
-fi
-
-## Log File
-log_file='plot_smallbaselineApp.log'
-touch $log_file
-printf "\n\n\n\n\n" >> $log_file
-echo "########################  ./plot_smallbaselineApp.sh  ########################" >> $log_file
-date >> $log_file
-echo "##############################################################################" >> $log_file
-#use "echo 'yoyoyo' | tee -a log" to output message to both screen and file.
-#use "echo 'yoyoyo' >> log" to output message to file only.
-
-## Create pic folder
-if [ ! -d "pic" ]; then
-    echo 'Create ./pic folder'
-    mkdir pic
-fi
-
-## common view.py option for all files
-view='view.py --nodisplay --dpi 150 --update '
-
-## Plot Key files
-opt=' --dem '$dem_file' --mask '$mask_file' -u cm '
-#opt=' --dem '$dem_file' --mask '$mask_file' -u cm --vlim -2 2'
-if [ $plot_key_files -eq 1 ]; then
-    file='velocity.h5';              test -f $file && $view $file $opt               >> $log_file
-    file='temporalCoherence.h5';     test -f $file && $view $file -c gray --vlim 0 1 >> $log_file
-    file='maskTempCoh.h5';           test -f $file && $view $file -c gray --vlim 0 1 >> $log_file
-    file='inputs/geometryRadar.h5';  test -f $file && $view $file                    >> $log_file
-    file='inputs/geometryGeo.h5';    test -f $file && $view $file                    >> $log_file
-fi
-
-
-## Loaded Dataset
-if [ $plot_loaded_data -eq 1 ]; then
-    file='inputs/ifgramStack.h5'
-    test -f $file && h5ls $file/unwrapPhase      && $view $file unwrapPhase- --zero-mask --wrap -c cmy >> $log_file
-    test -f $file && h5ls $file/unwrapPhase      && $view $file unwrapPhase- --zero-mask               >> $log_file
-    test -f $file && h5ls $file/coherence        && $view $file coherence-         --mask no -v 0 1    >> $log_file
-    test -f $file && h5ls $file/connectComponent && $view $file connectComponent-  --mask no           >> $log_file
-
-    # phase-unwrapping error correction
-    for dset in 'unwrapPhase_bridging' 'unwrapPhase_phaseClosure' 'unwrapPhase_bridging_phaseClosure'; do
-        test -f $file && h5ls $file/$dset        && $view $file $dset-             --zero-mask        >> $log_file
-    done
-fi
-
-
-## Auxliary Files from loaded dataset
-if [ $plot_loaded_data_aux -eq 1 ]; then
-    file='avgPhaseVelocity.h5';   test -f $file && $view $file                      >> $log_file
-    file='avgSpatialCoh.h5';      test -f $file && $view $file -c gray --vlim 0 1   >> $log_file
-    file='maskConnComp.h5';       test -f $file && $view $file -c gray --vlim 0 1   >> $log_file
-fi
-
-
-## Time-series files
-opt='--mask '$mask_file' --noaxis -u cm --wrap --wrap-range -10 10 '
-if [ $plot_timeseries -eq 1 ]; then
-    file='timeseries.h5'; test -f $file && $view $file $opt >> $log_file
-    find . -name 'timeseries_*.h5' -exec   $view {}    $opt >> $log_file \;
-fi
-
-
-## Geo coordinates for UNAVCO Time-series InSAR Archive Product
-if [ $plot_geocoded_data -eq 1 ]; then
-    file='./geo/geo_maskTempCoh.h5';          test -f $file && $view $file -c gray  >> $log_file
-    file='./geo/geo_temporalCoherence.h5';    test -f $file && $view $file -c gray  >> $log_file
-    file='./geo/geo_velocity.h5';             test -f $file && $view $file velocity >> $log_file
-    find . -name './geo/geo_timeseries_*.h5' -exec             $view {}    $opt     >> $log_file \;
-fi
-
-
-if [ $plot_the_rest -eq 1 ]; then
-    for tropo in 'ERA5' 'ERAI' 'ECMWF' 'MERRA' 'NARR'; do
-        file='velocity'${tropo}'.h5';  test -f $file && $view $file --mask no >> $log_file
-    done
-    file='numInvIfgram.h5';            test -f $file && $view $file --mask no >> $log_file
-fi
-
-
-## Move/copy picture files to pic folder
-echo "Copy *.txt files into ./pic folder."
-cp *.txt pic/
-echo "Move *.png/pdf/kmz files into ./pic folder."
-mv *.png *.pdf *.kmz ./geo/*.kmz pic/
diff -pruN 1.3.3-2/mintpy/sh/post_aoi.sh 1.4.0-1/mintpy/sh/post_aoi.sh
--- 1.3.3-2/mintpy/sh/post_aoi.sh	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/sh/post_aoi.sh	1970-01-01 00:00:00.000000000 +0000
@@ -1,141 +0,0 @@
-#! /bin/sh
-# This is an example for creating your own processing recipe using MintPy
-# Created by Zhang Yunjun, 2016
-
-##### Initial Setting #####
-satellite='AlosA'
-track='T424'
-frame='F610_640'
-
-sub='-l 31.14:31.30 -L 130.48:130.68'
-seed='-l 31.2352 -L 130.5029'
-row_col='-r 5 -p 6'
-
-##### Process Flag ########
-subset=0
-timeseries=0
-velocity=0
-point_ts=1
-key_ifg=0
-
-
-##------------- Data Source ---------------------------------------##
-projectDir=$KYUSHU_PROJECT/${satellite}${track}${frame}/TSSAR
-#projectDir=$SC/Kyushu${track}${frame}${satellite}/TSSAR/Product
-workDir=$KYUSHU_PROJECT/Volcanoes/Ata/${satellite}${track}
-
-msk=$projectDir'/subset_geo_Mask_tempCoh_dis.h5'
-ts=$projectDir'/subset_Seeded_geo_timeseries_ECMWF_demCor_refDate_quadratic.h5'
-vel=$projectDir'/subset_Seeded_geo_velocity_ex_masked.h5'
-dem=$projectDir'/subset_gsi10m_30m.dem'
-coh=$projectDir'/subset_geo_temporal_coherence.h5'
-
-cd $workDir
-
-##--------------- Subset -------------------------------------------##
-if [ $subset -eq 1 ]; then
-
-    subset.py --outfill-nan  $sub -o ts.h5          -f ${ts}
-    subset.py --outfill-zero $sub -o gsi10m_30m.dem -f ${dem}
-    subset.py --outfill-zero $sub -o Mask.h5        -f ${msk}
-    subset.py --outfill-zero $sub -o tempCoh.h5     -f ${coh}
-    generate_mask.py -f tempCoh.h5 -m 0.7 -o Mask_tempCoh.h5
-
-    view.py -c gray --nodisplay -f Mask.h5
-    view.py -c gray --nodisplay -f tempCoh.h5
-    view.py -c gray --nodisplay -f Mask_tempCoh.h5
-    view.py         --nodisplay -f gsi10m_30m.dem
-
-    msk='Mask_tempCoh.h5'
-    view='view.py '${row_col}' -u cm -m -5 -M 5 -D gsi10m_30m.dem --dem-noshade --mask '$msk' --noaxis --nodisplay'
-    $view -f ts.h5
-
-    #generate_mask.py -f velocity_ex_masked.h5 -m 0.01 -o mask_vel_1.h5
-    #diff.py Mask_tempCoh.h5 mask_vel_1.h5 Mask_tempCoh_dis.h5 
-fi
-msk='Mask_tempCoh_dis.h5'
-dem='gsi10m_30m.dem'
-
-
-##--------------- Timeseries ----------------------------------------##
-if [ $timeseries -eq 1 ]; then
-
-    view='view.py '${row_col}' -u cm -m -5 -M 5 -D gsi10m_30m.dem --mask '$msk' --noaxis --nodisplay'
-
-    reference_point.py $seed -f ts.h5 -m $msk
-    $view -f Seeded_ts.h5
-
-    sum_epochs.py Seeded_ts.h5
-    $view -u 1 -m 0 -M 1 -f sum_Seeded_ts.h5 -c gray
-
-    #mean_spatial.py -f sum_Seeded_ts.h5 -m $msk
-    cir_par='31.1800,130.5290,60;31.1892,130.6147,40'
-    mean_spatial.py -f sum_Seeded_ts.h5 -m $msk --circle $cir_par
-
-    reference_date.py Seeded_ts.h5 reference_date.txt
-    masking.py -f Seeded_ts_refDate.h5 -m $msk
-    $view -f Seeded_ts_refDate.h5
-    $view -f Seeded_ts_refDate.h5 -E drop_date.txt
-
-    #$view -f Seeded_ts_refDate.h5 --ref-epoch 20110420
-fi
-
-ts='Seeded_ts_refDate.h5'
-##--------------- Velocity -------------------------------------------##
-if [ $velocity -eq 1 ]; then
-    timeseries2velocity.py -f $ts -E drop_date.txt
-    view='view.py -u cm -m -2 -M 2 -D '$dem' --nodisplay --mask '${msk}' -f '
-    $view velocity_ex.h5
-    masking.py -f velocity_ex.h5 -m $msk
-    save_kmz.py -f velocity_ex_masked.h5 -m -0.02 -M 0.02 
-fi
-
-
-##---------------- Point Time Series Displacement --------------------##
-if [ $point_ts -eq 1 ]; then
-
-    view='view.py --displacement -u cm -m -5 -M 5 --mask '$msk' -D '$dem' --nodisplay -f'
-    ##--- 1.1 Pre-eruptive Inflation
-    #save_unw.py Seeded_ts.h5 20100102 20101120
-    #$view 100102_101120.unw
-    #masking.py -f 100102_101120.unw -m Mask.h5
-    #save_kmz.py -f 100102_101120_masked.unw -m -0.05 -M 0.05 --displacement --cbar-label 'LOS displacement'
-
-    ##--- 2. Point TS
-    #masking.py -f Seeded_ts_refDate.h5 -m $msk
-    ts='Seeded_ts_refDate_masked.h5'
-    vel='velocity_ex_masked.h5'
-    tsview='tsview.py -f '${ts}' -E drop_date.txt -F '$ts' -r 3 -D '$dem' -v '$vel' -a -0.015 -b 0.015 --rect-color crimson'
-
-    #$tsview -l -10 -h 4 --lalo 31.1783,130.5364 --LALO 31.1853,130.5231      --nodisplay -t 20060623 -T 20110421
-    #$tsview -l -7  -h 4 --lalo 31.1923,130.6184 --LALO 31.1959,130.5971 -r 2 --nodisplay -t 20060623 -T 20110421
-    $tsview -l -7  -h 4 --lalo 31.1923,130.6184 -r 2 --nodisplay -t 20060623 -T 20110421
-
-    point_lalo='31.1923,130.6184'
-    view='view.py -u cm -m -2 -M 2 --mask '$msk' -D '$dem' --nodisplay'
-    sub='-l 31.155:31.21 -L 130.505:130.635'
-    lineFile='../transect_lonlat.xy'
-    $view -f velocity_ex.h5 --point-lalo $point_lalo -o velocity_ex_poi.png $sub --line-lalo $lineFile
-fi
-
-
-##----------------- Velocity and Interferograms ----------------------##
-if [ $key_ifg -eq 1 ]; then
-
-    view='view.py --mask '$msk' --displacement -u cm -m -5 -M 5 -D '$dem' --nodisplay'
-    save_kmz='save_kmz.py -m -0.05 -M 0.05 --displacement'
-
-    #### Post-eruptive
-    d1='110305'
-    d2='110420'
-    save_unw.py Seeded_ts.h5 $d1 $d2
-    #$view -f 091130_110118.unw
-    masking.py -f ${d1}_${d2}.unw -m $msk
-    $save_kmz -f ${d1}_${d2}_masked.unw --cbar-label 'LOS displacement'
-
-    ts='Seeded_ts_refDate_masked.h5'
-    vel='091130_110118_masked.unw'
-    tsview='tsview.py -f '${ts}' -E drop_date.txt -F '$ts' -r 3 -D '$dem' -v '$vel' --displacement -a -0.05 -b 0.05 --rect-color black'
-    #$tsview --lalo 31.9076,130.8284 -l -3 -h 3 --nodisplay --zoom-y 200:900 --zoom-x 700:1600
-fi
-
diff -pruN 1.3.3-2/mintpy/sh/post_process_Alos.sh 1.4.0-1/mintpy/sh/post_process_Alos.sh
--- 1.3.3-2/mintpy/sh/post_process_Alos.sh	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/sh/post_process_Alos.sh	1970-01-01 00:00:00.000000000 +0000
@@ -1,285 +0,0 @@
-#! /bin/sh
-
-##### Customized Processing After 1st smallbaselineApp
-project='KyushuT424F610_640AlosA'
-#####
-trans='geomap_4rlks.trans'
-tmpl=$TE/$project.template
-
-
-#cd $SC/$project/TSSAR
-
-smallbaselineApp=0
-plot_load_data=0
-modify_network=0
-unwrap_error=0
-phaseCor=0
-ts_sum=0
-ref_dates=0
-deramp=0
-timeseries_view=0
-velocity=0
-geo=1
-geo2=1
-
-
-##------------ MintPy routine processing --------------------##
-if [ $smallbaselineApp -eq 1 ]; then
-    smallbaselineApp.py $tmpl
-fi
-
-
-##------------ Plot Load Data ------------------------------##
-if [ $plot_load_data -eq 1 ]; then
-    ## Average Spatial Coherence, more reliable mask source than non-nan and temporal coherence
-    #generate_mask.py -f average_spatial_coherence.h5 -m 0.7 -o Mask_spatialCoh.h5
-
-    view='view4job.py -c gray -m 0 -M 1 --nodisplay -f'
-    #$view average_spatial_coherence.h5
-    #$view Mask_spatialCoh.h5
-    #$view Mask.h5
-    #$view temporal_coherence.h5
-
-    msk='Mask_spatialCoh.h5'
-    #remove_ramp.py Seeded_LoadedData.h5 quadratic $msk
-
-    view='view4job.py -r 3 -p 12 --nodisplay'
-    #$view -m  0 -M 1 -c gray     -f Coherence.h5
-    #$view -m -7 -M 7 --mask $msk -f Seeded_LoadedData.h5
-    #$view -m -7 -M 7 --mask $msk -f Seeded_LoadedData_quadratic.h5
-
-    #unwrap_error.py -f Seeded_LoadedData.h5 -m $msk -o Seeded_LoadedData_unwCorTri.h5
-    #remove_plane.py Seeded_LoadedData_unwCorTri.h5 quadratic $msk
-fi
-
-
-## Based on plotted Coherence and LoadedData image, select low coherent ifgs and put them in template file.
-##------------ Modify Network and Re-run smallbaselineApp ----------##
-if [ $modify_network -eq 1  ]; then
-    modify_network.py -f LoadedData.h5 -C Coherence.h5 -T $tmpl
-    mean_spatial_coherence.py Modified_Coherence.h5 Modified_average_spatial_coherence.h5
-#    generate_mask.py -f Modified_average_spatial_coherence.h5 -m 0.75 -o Mask_spatialCoh_Modified.h5
-
-    view='view4job.py -m 0 -M 1 -c gray --nodisplay -f'
-    $view Modified_Mask.h5
-    $view Modified_average_spatial_coherence.h5
-    #plot_network.py -f Modified_LoadedData.h5 --nodisplay
-fi
-
-
-
-##------------ PU Error correction -------------------------##
-##### PU Correction using bonding points
-igram='Seeded_Modified_LoadedData'
-if [ $unwrap_error -eq 1 ]; then
-
-  ## Generate Individual Mask for Unwrapping Error Correction
-  ue_generate_mask=0
-  if [ $ue_generate_mask -eq 1 ]; then
-    #reference_point.py -f Modified_LoadedData.h5 -t $tmpl
-    #cp temporal_coherence.h5 temporal_coherence_1.h5
-    #generate_mask.py -f temporal_coherence_1.h5 -m 0.7 -o mask_1.h5
-
-    ##### Calculate with Subset, faster
-    ## width = 2832, length = 6728
-    sub='-y 4500:6700 -x 0:1500'
-    sub2='-y -4500:2228 -x 0:2832'
-    subset.py -f ${igram}.h5 $sub
-    subset.py -f Mask_spatialCoh.h5 $sub
-
-    reference_point.py       -f subset_${igram}.h5 -x 550 -y 550 -M subset_Mask_spatialCoh.h5 -o subset_${igram}_2.h5
-    igram_inversion.py -f subset_${igram}_2.h5 -o subset_timeseries_2.h5
-    temporal_coherence.py subset_${igram}_2.h5    subset_timeseries_2.h5 subset_temporal_coherence_3.h5
-    subset.py -f subset_temporal_coherence_3.h5 $sub2 -o temporal_coherence_3.h5
-    generate_mask.py -f temporal_coherence_3.h5 -m 0.7 -o mask_3.h5
-    image_math.py mask_3.h5 '*' 3 mask_3_3.h5
-
-
-    #reference_point.py       -f ${igram}.h5 -x 840 -y 5390 -o ${igram}_2.h5
-    #igram_inversion.py -f ${igram}_2.h5 -o timeseries_2.h5
-    #temporal_coherence.py ${igram}_2.h5    timeseries_2.h5 temporal_coherence_2.h5
-    #generate_mask.py -f temporal_coherence_2.h5 -m 0.7 -o mask_2.h5
-    #image_math.py mask_2.h5 '*' 2 mask_2_2.h5
-
-    reference_point.py       -f ${igram}.h5 -x 550 -y 5050 -o ${igram}_2.h5
-    igram_inversion.py -f ${igram}_2.h5 -o timeseries_2.h5
-    temporal_coherence.py ${igram}_2.h5    timeseries_2.h5 temporal_coherence_3.h5
-    generate_mask.py -f temporal_coherence_3.h5 -m 0.7 -o mask_3.h5
-    image_math.py mask_3.h5 '*' 3 mask_3_3.h5
-
-    reference_point.py       -f ${igram}.h5 -x 630 -y 5860 -o ${igram}_2.h5
-    igram_inversion.py -f ${igram}_2.h5 -o timeseries_2.h5
-    temporal_coherence.py ${igram}_2.h5    timeseries_2.h5 temporal_coherence_4.h5
-    generate_mask.py -f temporal_coherence_4.h5 -m 0.7 -o mask_4.h5
-    image_math.py mask_4.h5 '*' 4 mask_4_4.h5
-
-  fi
-
-    #cd mask
-    #generate_mask.py -f temporal_coherence_2.h5 -m 0.7 -x 660:930 -y 5200:5500 -o mask_2.h5
-    #image_math.py mask_2.h5 '*' 2 mask_2_2.h5
-
-    #diff.py mask_1.h5 mask_2.h5 mask_1_2.h5
-    #generate_mask.py -f mask_1_2.h5 -m 0.5 -x 910:944 -y 5457:5496 -o mask_small_1.h5
-    #generate_mask.py -f mask_1_2.h5 -m 0.5 -x 877:985 -y 5263:5357 -o mask_small_2.h5
-    #diff.py mask_1_2.h5 mask_small_1.h5 mask_1_3.h5
-    #diff.py mask_1_3.h5 mask_small_2.h5 mask_1_4.h5
-
-    #generate_mask.py -f mask_1_4.h5 -m 0.5 -o mask_1_1.h5
-    #cd ..
-
-    #add.py -f mask_1_1.h5,mask_2_2.h5,mask_3_3.h5,mask_4_4.h5 -o Mask_all.h5
-    msk='Mask_all.h5'
-    view='view.py --nodisplay -f'
-    #$view mask_1.h5
-    #$view mask_2.h5
-    #$view $msk
-
-    #reference_point.py -f Modified_LoadedData.h5 -t $tmpl -M $msk
-
-    unwrap_error.py -f ${igram}.h5 -m $msk -t $tmpl --ramp quadratic --save-detrend
-    remove_plane.py ${igram}_unwCor.h5 quadratic $msk
-
-    view='view4job.py -r 6 -p 19 -m -7 -M 7 --mask Mask_all.h5 --noaxis --nodisplay -f'
-    #$view ${igram}.h5
-    $view ${igram}_unwCor.h5
-    $view ${igram}_unwCor_quadratic.h5
-
-    #### Mask with Bonding Points
-    #point_yx='5430,955,5429,865,5293,834,5178,832,5403,780,5714,612'
-    #line_yx='5430,955,5429,865;5293,834,5178,832;5403,780,5714,612'
-    #view.py --point=${point_yx} --line=${line_yx} --nodisplay -f $msk -o bonding_points.png
-
-    #smallbaselineApp.py $tmpl
-
-fi
-
-
-##### Display Iono Pairs
-view='view.py -r 3 -p 5 -m -7 -M 7 -f Seeded_Modified_LoadedData_unwCor_plane_masked.h5 --nodisplay'
-#$view -d 100606 -o Seeded_Modified_LoadedData_unwCor_plane_masked_100606.png
-#$view -d 060526 -o Seeded_Modified_LoadedData_unwCor_plane_masked_060526.png
-
-##------------ Mask ----------------------------------------##
-#igram_inversion.py    Seeded_Modified_LoadedData_unwCor.h5
-#temporal_coherence.py Seeded_Modified_LoadedData_unwCor.h5 timeseries.h5
-#generate_mask.py -f temporal_coherence.h5 -m 0.7 -x 180:2560 -o Mask_tempCoh_dis.h5
-msk='Mask_tempCoh_dis.h5'
-view='view.py -m 0 -M 1 -c gray --nodisplay -f'
-#$view $msk
-#$view temporal_coherence.h5
-
-
-##------------ Phase Correction ----------------------------##
-if [ $phaseCor -eq 1 ]; then
-    remove_plane.py -f timeseries_ECMWF.h5 -t $tmpl
-    dem_error.py timeseries_ECMWF_plane.h5 Seeded_Modified_LoadedData_unwCor.h5
-    masking.py -f DEM_error.h5 -m $msk
-    view.py -m -25 -M 25 --nodisplay -f DEM_error_masked.h5
-fi
-ts='timeseries_ECMWF_plane_demCor.h5'
-
-
-
-##------------ Reference & Exclude Dates -------------------##
-## Reference date and drop turbulence dates
-if [ $ts_sum -eq 1 ]; then
-    #sum_epochs.py timeseries_ECMWF_demCor_plane.h5 ts_sum_0.h5
-    view.py -r 3  -p 10 -m 0 -M 1 --mask $msk --noaxis --nodisplay -f ts_sum_0.h5
-fi
-
-#ex_date='20060526,20060826,20070111,20091019,20100606,20100906'
-ex_date='20060624,20070812,20080629,20090702,20090817,20091002,20100705'
-ref_date='20090214'
-
-if [ $ref_dates -eq 1 ]; then
-    #reference_date.py timeseries_ECMWF_demCor.h5 $ref_date
-    #remove_plane.py -f timeseries_ECMWF_demCor_ref${ref_date}.h5 -s quadratic -m $msk
-    ts='timeseries_ECMWF_demCor_ref'${ref_date}'_quadratic.h5'
-    #sum_epochs.py $ts ts_sum.h5
-    view4job.py -r 3 -p 10 -m 0 -M 1 --noaxis --nodisplay --mask $msk -f ts_sum.h5
-
-fi
-
-if [ $deramp -eq 1 ]; then
-    ysub='0,4000,3800,6728'
-    remove_plane.py -f timeseries_ECMWF_demCor_ref${ref_date}.h5 -s quadratic -m $msk -y $ysub --save-mask
-    ts='timeseries_ECMWF_demCor_ref'${ref_date}'_quadratic.h5'
-
-    #sum_epochs.py $ts ts_sum.h5
-    #view4job.py -r 2 -p 11 -m 0 -M 1 --noaxis --nodisplay --mask $msk -f ts_sum.h5
-    #diff.py timeseries_ECMWF_demCor_ref${ref_date}.h5 ${ts} 2quadratic.h5
-
-    view='view.py -r 4 -p 8 --noaxis --nodisplay --mask '$msk
-    #$view       -m  0  -M 1  -f ts_sum.h5
-    #$view -u cm -m -25 -M 25 -f quadratic.h5
-    #$view -u cm -m -5  -M 5  -f timeseries_ECMWF_demCor_quadratic.h5
-
-    #reference_date.py timeseries_ECMWF_demCor_quadratic.h5 $ref_date
-    #$view -u cm -m -5 -M 5 -f timeseries_ECMWF_demCor_quadratic_ref${ref_date}.h5
-    #$view -u cm -m -5 -M 5 -f timeseries_ECMWF_demCor_quadratic_ref${ref_date}.h5 -E ${ex_date}
-fi
-
-
-ts='timeseries_ECMWF_demCor_ref'${ref_date}'_quadratic.h5'
-##------------ Time Series Correction View -----------------##
-## MERRA
-#tropo_pyaps.py -f timeseries.h5 -d radar_4rlks.hgt -s MERRA -h 12:00 -i incidence_angle.h5
-if [ $timeseries_view -eq 1 ]; then
-    view='view4job.py -r 3 -p 10 -u cm -m -25 -M 25 --mask '$msk' --noaxis --nodisplay -f'
-    #$view timeseries.h5
-    #$view timeseries_ECMWF.h5
-    #$view timeseries_ECMWF_demCor.h5
-    #$view timeseries_ECMWF_demCor_ref${ref_date}.h5
-    #$view ${ts} -o ${ts}_0.png
-
-    view='view4job.py -r 3 -p 10 -u cm -m  -5 -M  5 --mask '$msk' --noaxis --nodisplay -f'
-    #$view ${ts}
-    $view ${ts} -E $ex_date
-
-    ##### error
-    #diff.py timeseries_ECMWF_demCor_plane.h5 timeseries_ECMWF_demCor.h5 plane.h5
-    #masking.py -m $msk -f ECMWF.h5
-    #masking.py -m $msk -f MERRA.h5
-    #masking.py -m $msk -f DEM_error.h5
-    #masking.py -m $msk -f plane.h5
-    #view.py -r 4 -p 8 -u cm  -m -5  -M 5   --noaxis --nodisplay -f ECMWF_masked.h5
-    #view.py -r 4 -p 8 -c jet -m -25 -M 25           --nodisplay -f DEM_error_masked.h5
-    #view.py -r 4 -p 8 -u cm  -m -25 -M 25  --noaxis --nodisplay -f plane_masked.h5
-fi
-
-
-##------------ Velocity ---------------------------------------##
-if [ $velocity -eq 1 ]; then
-    timeseries2velocity.py -f $ts -E $ex_date
-    view.py -u cm/yr -m -2 -M 2 --mask $msk --nodisplay -f velocity_ex.h5
-fi
-
-if [ $geo -eq 1 ]; then
-    ##### Geocoding
-    #geocode.py $msk $trans
-    #geocode.py velocity_ex.h5 $trans
-    #geocode.py ${ts} $trans
-    geocode.py Modified_Coherence.h5 $trans
-
-    ## Find subset.lalo based on geo_velocity.h5
-
-    ##### Seeding
-    #reference_point.py -t $tmpl -f geo_velocity_ex.h5
-    #reference_point.py -t $tmpl -f geo_${ts}
-fi
-
-if [ $geo2 -eq 1 ]; then
-    ##### Subset
-    #subset.py -t $tmpl -f Seeded_geo_*.h5,geo_$msk,gsi10m_30m.dem --parallel
-    subset.py -t $tmpl -f geo_Modified_Coherence.h5
-
-    ##### masking
-    #masking.py -m subset_geo_$msk -f subset_Seeded_geo_velocity_ex.h5
-    #masking.py -m subset_geo_$msk -f subset_Seeded_geo_${ts}
-
-    #multi_looking.py subset_gsi10m_30m.dem 5 5 subset_gsi10m_150m.dem
-    #view.py -u cm/yr -m -2 -M 2 --mask subset_geo_$msk -D subset_gsi10m_150m.dem --save -f subset_Seeded_geo_velocity_ex.h5
-    #save_kml.py -m -0.02 -M 0.02 -f subset_Seeded_geo_velocity_ex_masked.h5
-fi
-
diff -pruN 1.3.3-2/mintpy/sh/run_stripmap_stack.sh 1.4.0-1/mintpy/sh/run_stripmap_stack.sh
--- 1.3.3-2/mintpy/sh/run_stripmap_stack.sh	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/sh/run_stripmap_stack.sh	1970-01-01 00:00:00.000000000 +0000
@@ -1,26 +0,0 @@
-#! /bin/sh
-
-# clean folders before re-run
-if [ -d "Igrams" ]; then
-    echo "clean obsolete files/folders before reruning"
-    rm -r baselines/ configs/ coregSLC/ geom_reference/ Igrams/ merged/ offsets/ refineSecondaryTiming/ run_files/ SLC/
-    rm run_unPackALOS
-    cd download
-    rm -rf 20*
-    mv ARCHIVED_FILES/* .
-    cd ..
-fi
-
-# prepare SAR data
-prepRawALOS.py -i download/ -o SLC -t '' --dual2single
-chmod 755 run_unPackALOS
-./run_unPackALOS
-
-# stack processing
-stackStripMap.py -s SLC/ -d DEM/gsi*.dem -t 1800 -b 1800 -a 20 -r 8 -u snaphu -W interferogram -m 20080212 -f 0.5 --applyWaterMask
-submit_run_files4stripmap_stack.py
-
-# mintpy
-cd mintpy
-smallbaselineApp.py ../KirishimaAlosAT424F620_630.txt
-
diff -pruN 1.3.3-2/mintpy/simulation/defo_model.py 1.4.0-1/mintpy/simulation/defo_model.py
--- 1.3.3-2/mintpy/simulation/defo_model.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/simulation/defo_model.py	2022-08-04 20:01:49.000000000 +0000
@@ -10,7 +10,7 @@
 
 import numpy as np
 from matplotlib import pyplot as plt
-from mintpy.utils import utils0 as ut
+from mintpy.utils import utils0 as ut0
 
 
 def mogi(geometry, xloc, nu=0.25):
@@ -81,12 +81,9 @@ def mogi_los(shape, source_geom, resolut
     dis_n = dis_map[1, :].reshape(length, width)
     dis_u = dis_map[2, :].reshape(length, width)
 
-    dis_los = ut.enu2los(dis_e,
-                         dis_n,
-                         dis_u,
-                         inc_angle=34.,
-                         head_angle=-168.)
-
+    dis_los = ut0.enu2los(dis_e, dis_n, dis_u,
+                          inc_angle=34.,
+                          head_angle=-168.)
     dis_los[mask == 0.] = np.nan
     dis_los *= scale
 
diff -pruN 1.3.3-2/mintpy/simulation/iono.py 1.4.0-1/mintpy/simulation/iono.py
--- 1.3.3-2/mintpy/simulation/iono.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/simulation/iono.py	2022-08-04 20:01:49.000000000 +0000
@@ -2,16 +2,11 @@
 ############################################################
 # Program is part of MintPy                                #
 # Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
-# Author: Zhang Yunjun, Heresh Fattahi, Feb 2020           #
+# Author: Zhang Yunjun, Feb 2020                           #
 ############################################################
 # Useful links:
-#   IGS (at NASA's archive): https://cddis.nasa.gov/Data_and_Derived_Products/GNSS/atmospheric_products.html
+#   IGS (NASA): https://cddis.nasa.gov/Data_and_Derived_Products/GNSS/atmospheric_products.html
 #   IMPC (DLR): https://impc.dlr.de/products/total-electron-content/near-real-time-tec/nrt-tec-global/
-# Contents
-#   Ionospheric Mapping Functions
-#   JPL high resolution GIM - I/O
-#   IGS low  resolution GIM - download, I/O, plot
-#   Test
 # Recommend usage:
 #   from mintpy.simulation import iono
 
@@ -24,11 +19,7 @@ import numpy as np
 import matplotlib.pyplot as plt
 from scipy import interpolate
 
-from mintpy.utils import (
-    ptime,
-    readfile,
-    utils0 as ut,
-)
+from mintpy.utils import readfile, utils0 as ut
 
 
 # constants
@@ -47,10 +38,9 @@ SAR_BAND = {
 
 ######################## Ionospheric Mapping Functions #########################
 # Reference:
-#   Yunjun, Z., Fattahi, H., Pi, X., Rosen, P., Simons, M., Agram, P., & Aoki, Y. (2022). Range Geolocation Accuracy
-#     of C/L-band SAR and its Implications for Operational Stack Coregistration. IEEE Trans. Geosci. Remote Sens. 
-#   Schaer, S., Gurtner, W., & Feltens, J. (1998). IONEX: The ionosphere map exchange format version 1.1. 
-#     Paper presented at the Proceedings of the IGS AC workshop, Darmstadt, Germany, Darmstadt, Germany.
+#   Yunjun, Z., Fattahi, H., Pi, X., Rosen, P., Simons, M., Agram, P., & Aoki, Y. (2022). Range
+#     Geolocation Accuracy of C-/L-band SAR and its Implications for Operational Stack Coregistration.
+#     IEEE Trans. Geosci. Remote Sens., 60, doi:10.1109/TGRS.2022.3168509.
 
 def vtec2range_delay(vtec, inc_angle, freq, obs_type='phase'):
     """Calculate/predict the range delay in SAR from TEC in zenith direction.
@@ -111,6 +101,7 @@ def prep_geometry_iono(geom_file, box=No
                 iono_lat/lon   - float, latitude/longitude of LOS vector at the thin-shell in deg
                 iono_height    - float, height of the assume effective thin-shell ionosphere in m
     """
+
     def get_center_lat_lon(geom_file, box=None):
         """Get the lat/lon of the scene center"""
         meta = readfile.read_attribute(geom_file)
@@ -543,435 +534,3 @@ def check_date_list_against_reference(da
         dset_list[flag] = dset_list_bk
 
     return date_list, dset_list
-
-
-
-################################## IGS TEC #####################################
-# Low resolution ionospheric TEC products from IGS
-#   Including solutions from various centers, e.g., CODE, JPL, etc.
-#   Webiste: https://cddis.nasa.gov/Data_and_Derived_Products/GNSS/atmospheric_products.html
-#   Spatial  resolution in latitude / longitude [deg]: 2.5 / 5.0
-#   Temporal resolution [hour]: 2.0 / 1.0
-
-def calc_igs_iono_ramp(tec_dir, date_str, geom_file, box=None, print_msg=True):
-    """Get 2D ionospheric delay from IGS TEC data for one acquisition.
-    due to the variation of the incidence angle along LOS.
-
-    Parameters: tec_dir        - str, path of the local TEC directory, i.e. ~/data/aux/IGS_TEC
-                date_str       - str, date of interest in YYYYMMDD format
-                geom_file      - str, path of the geometry file including incidenceAngle data
-                box            - tuple of 4 int, subset in (x0, y0, x1, y1)
-    Returns:    range_delay    - 2D np.ndarray for the range delay in meters
-                vtec           - float, TEC value in zenith direction in TECU
-                iono_lat/lon   - float, latitude/longitude in degrees of LOS vector in ionosphere shell
-                iono_height    - float, height             in meter   of LOS vector in ionosphere shell
-                iono_inc_angle - float, incidence angle    in degrees of LOS vector in ionosphere shell
-    """
-    # geometry
-    tec_file = dload_igs_tec(date_str, tec_dir, print_msg=print_msg)
-    iono_height = grab_ionex_height(tec_file)
-    (iono_inc_angle,
-     iono_lat,
-     iono_lon) = prep_geometry_iono(geom_file, box=box, iono_height=iono_height)[:3]
-
-    # time
-    meta = readfile.read_attribute(geom_file)
-    utc_sec = float(meta['CENTER_LINE_UTC'])
-    if print_msg:
-        h, s = divmod(utc_sec, 3600)
-        m, s = divmod(s, 60)
-        print('UTC time: {:02.0f}:{:02.0f}:{:02.1f}'.format(h, m, s))
-
-    # extract zenith TEC
-    freq = SPEED_OF_LIGHT / float(meta['WAVELENGTH'])
-    vtec = get_igs_tec_value(tec_file, utc_sec, iono_lon, iono_lat)
-    rang_delay = vtec2range_delay(vtec, iono_inc_angle, freq)
-
-    return rang_delay, vtec, iono_lat, iono_lon, iono_height, iono_inc_angle
-
-
-def get_igs_tec_value(tec_file, utc_sec, lat, lon, interp_method='linear3d', rotate_tec_map=False, print_msg=True):
-    """Get the TEC value based on input lat/lon/datetime
-    Parameters: tec_file - str, path of local TEC file
-                utc_sec  - float or 1D np.ndarray, UTC time of the day in seconds
-                lat/lon  - float or 1D np.ndarray, latitude / longitude in degrees
-                interp_method  - str, interpolation method
-                rotate_tec_map - bool, rotate the TEC map along the SUN direction, for linear3d only.
-                print_msg      - bool, print out progress bar or not.
-    Returns:    tec_val  - float or 1D np.ndarray, TEC value in TECU
-    """
-
-    def interp_3d_rotate(interpfs, lons, lats, mins, lon, lat, utc_min):
-        ind0 = np.where((mins - utc_min) <=0)[0][-1]
-        ind1 = ind0 + 1
-        lon0 = lon + (utc_min - mins[ind0]) * 360. / (24. * 60.)
-        lon1 = lon + (utc_min - mins[ind1]) * 360. / (24. * 60.)
-        tec_val0 = interpfs[ind0](lon0, lat)
-        tec_val1 = interpfs[ind1](lon1, lat)
-        tec_val = (  (mins[ind1] - utc_min) / (mins[ind1] - mins[ind0]) * tec_val0
-                   + (utc_min - mins[ind0]) / (mins[ind1] - mins[ind0]) * tec_val1 )
-        return tec_val
-
-    # read TEC file
-    lons, lats, mins, tecs = read_ionex_tec(tec_file)[:4]
-    tec_maps = tecs[0]
-
-    # time info
-    utc_min = utc_sec / 60.
-
-    # resample
-    if interp_method == 'nearest':
-        lon_ind = np.abs(lons - lon).argmin()
-        lat_ind = np.abs(lats - lat).argmin()
-        time_ind = np.abs(mins - utc_min).argmin()
-        tec_val = tec_maps[lon_ind, lat_ind, time_ind]
-
-    elif interp_method in ['linear', 'linear2d', 'bilinear']:
-        time_ind = np.abs(mins.reshape(-1,1) - utc_min).argmin(axis=0)
-        if isinstance(time_ind, np.ndarray):
-            num = len(time_ind)
-            tec_val = np.zeros(num, dtype=np.float32)
-            prog_bar = ptime.progressBar(maxValue=num, print_msg=print_msg)
-            for i in range(num):
-                tec_val[i] = interpolate.interp2d(lons, lats, tec_maps[:, :, time_ind[i]].T, kind='linear')(lon[i], lat[i])
-                prog_bar.update(i+1, every=200)
-            prog_bar.close()
-        else:
-            tec_val = interpolate.interp2d(lons, lats, tec_maps[:, :, time_ind].T, kind='linear')(lon, lat)
-
-    elif interp_method in ['linear3d', 'trilinear']:
-        if not rotate_tec_map:
-            # option 1: interpolate between consecutive TEC maps
-            # testings shows better agreement with SAR obs than option 2.
-            tec_val = interpolate.interpn((lons, np.flip(lats), mins),
-                                          np.flip(tec_maps, axis=1),
-                                          (lon, lat, utc_min),
-                                          method='linear')
-
-        else:
-            # option 2: interpolate between consecutive rotated TEC maps
-            # reference: equation (3) in Schaer and Gurtner (1998)
-
-            # prepare interpolation functions in advance to speed up
-            interpfs = []
-            for i in range(len(mins)):
-                interpfs.append(interpolate.interp2d(lons, lats, tec_maps[:, :, i].T, kind='linear'))
-
-            if isinstance(utc_min, np.ndarray):
-                num = len(utc_min)
-                tec_val = np.zeros(num, dtype=np.float32)
-                prog_bar = ptime.progressBar(maxValue=num, print_msg=print_msg)
-                for i in range(num):
-                    tec_val[i] = interp_3d_rotate(interpfs, lons, lats, mins, lon[i], lat[i], utc_min[i])
-                    prog_bar.update(i+1, every=200)
-                prog_bar.close()
-            else:
-                tec_val = interp_3d_rotate(interpfs, lons, lats, mins, lon, lat, utc_min)
-
-    return tec_val
-
-
-def get_igs_tec_filename(tec_dir, date_str, sol='jpl', datefmt='%Y%m%d'):
-    """Get the local file name of downloaded IGS TEC product."""
-    dd = dt.datetime.strptime(date_str, datefmt)
-    doy = '{:03d}'.format(dd.timetuple().tm_yday)
-    yy = str(dd.year)[2:4]
-
-    fname = "{a}g{d}0.{y}i.Z".format(a=sol.lower(), d=doy, y=yy)
-    fbase = fname[:-2]
-    tec_file = os.path.join(tec_dir, fbase)
-    return tec_file
-
-
-def dload_igs_tec(d, out_dir, sol='jpl', datefmt='%Y%m%d', print_msg=False):
-    """Download IGS vertical TEC files computed by JPL
-    Link: https://cddis.nasa.gov/Data_and_Derived_Products/GNSS/atmospheric_products.html
-    """
-    # date info
-    dd = dt.datetime.strptime(d, datefmt)
-    doy = '{:03d}'.format(dd.timetuple().tm_yday)
-    yy = str(dd.year)[2:4]
-
-    fbase = "{a}g{d}0.{y}i.Z".format(a=sol.lower(), d=doy, y=yy)
-    src_dir = "https://cddis.nasa.gov/archive/gnss/products/ionex/{0}/{1}".format(dd.year, doy)
-
-    # input/output filename
-    fname_src = os.path.join(src_dir, fbase)
-    fname_dst = os.path.join(out_dir, fbase)
-    fname_dst_uncomp = fname_dst[:-2]
-
-    # download
-    cmd = 'wget --continue --auth-no-challenge "{}"'.format(fname_src)
-    if os.path.isfile(fname_dst) and os.path.getsize(fname_dst) > 1000:
-        cmd += ' --timestamping '
-    if not print_msg:
-        cmd += ' --quiet '
-    else:
-        print(cmd)
-
-    # run cmd in output dir
-    pwd = os.getcwd()
-    os.chdir(out_dir)
-    os.system(cmd)
-    os.chdir(pwd)
-
-    # uncompress
-    # if output file 1) does not exist or 2) smaller than 400k in size or 3) older
-    if (not os.path.isfile(fname_dst_uncomp)
-            or os.path.getsize(fname_dst_uncomp) < 600e3
-            or os.path.getmtime(fname_dst_uncomp) < os.path.getmtime(fname_dst)):
-        cmd = "gzip --force --keep --decompress {}".format(fname_dst)
-        if print_msg:
-            print(cmd)
-        os.system(cmd)
-
-    return fname_dst_uncomp
-
-
-def grab_ionex_height(tec_file):
-    """Grab the height of the thin-shell ionosphere from IONEX file"""
-    # read ionex file into list of lines
-    with open(tec_file, 'r') as f:
-        lines = f.readlines()
-
-    # search for height - DHGT
-    iono_height = None
-    for line in lines:
-        c = [i.strip() for i in line.strip().replace('\n', '').split()]
-        if c[-1] == 'DHGT':
-            iono_height = float(c[0]) * 1e3
-            break
-    return iono_height
-
-
-def read_ionex_tec(igs_file):
-    """Read IGS TEC file in IONEX format.
-    Download from https://cddis.nasa.gov/Data_and_Derived_Products/GNSS/atmospheric_products.html.
-    Parameters: igs_file  - str, path of the TEC file
-    Returns:    lon       - 1D np.ndarray for the longitude in size of (num_lon,) in degrees
-                lat       - 1D np.ndarray for the latitude  in size of (num_lat,) in degrees
-                tec_times - 1D np.ndarray for the time of the day in size of (num_map,) in minutes
-                tec_array - 4D np.ndarray for the vertical TEC value in size of (2, num_lon, num_lat, num_map) in TECU
-                            1 TECU = 10^16 electrons / m^2
-                tec_type  - str, TEC solution
-    """
-
-    igs_code_dict = {
-        'igr' : 'IGS (Rapid)',
-        'igs' : 'IGS (Final)',
-        'jpr' : 'JPL (Rapid)',
-        'jpl' : 'JPL (Final)',
-        'cor' : 'CODE (Rapid)',
-        'cod' : 'CODE (Final)',
-    }
-    tec_code = os.path.basename(igs_file)[:3]
-    tec_type = igs_code_dict.get(tec_code, None)
-
-    #print(tec_type)
-    ## =========================================================================
-    ##
-    ## The following section reads the lines of the ionex file for 1 day
-    ## (13 maps total) into an array a[]. It also retrieves the thin-shell
-    ## ionosphere height used by IGS, the lat./long. spacing, etc. for use
-    ## later in this script.
-    ##
-    ## =========================================================================
-    #print 'Transfering IONEX data format to a TEC/DTEC array for ',ymd_date
-
-    ## Opening and reading the IONEX file into memory as a list
-    linestring = open(igs_file, 'r').read()
-    LongList = linestring.split('\n')
-
-    ## Create two lists without the header and only with the TEC and DTEC maps (based on code from ionFR.py)
-    AddToList = 0
-    TECLongList = []
-    DTECLongList = []
-    for i in range(len(LongList)-1):
-        ## Once LongList[i] gets to the DTEC maps, append DTECLongList
-        if LongList[i].split()[-1] == 'MAP':
-            if LongList[i].split()[-2] == 'RMS':
-                AddToList = 2
-        if AddToList == 1:
-            TECLongList.append(LongList[i])
-        if AddToList == 2:
-            DTECLongList.append(LongList[i])
-        ## Determine the number of TEC/DTEC maps
-        if LongList[i].split()[-1] == 'FILE':
-            if LongList[i].split()[-3:-1] == ['MAPS','IN']:
-                num_maps = float(LongList[i].split()[0])
-        ## Determine the shell ionosphere height (usually 450 km for IGS IONEX files)
-        if LongList[i].split()[-1] == 'DHGT':
-            ion_H = float(LongList[i].split()[0])
-        ## Determine the range in lat. and long. in the ionex file
-        if LongList[i].split()[-1] == 'DLAT':
-            start_lat = float(LongList[i].split()[0])
-            end_lat   = float(LongList[i].split()[1])
-            incr_lat  = float(LongList[i].split()[2])
-        if LongList[i].split()[-1] == 'DLON':
-            start_lon = float(LongList[i].split()[0])
-            end_lon   = float(LongList[i].split()[1])
-            incr_lon  = float(LongList[i].split()[2])
-        ## Find the end of the header so TECLongList can be appended
-        if LongList[i].split()[0] == 'END':
-            if LongList[i].split()[2] == 'HEADER':
-                AddToList = 1
-
-    ## Variables that indicate the number of points in Lat. and Lon.
-    points_lon = ((end_lon - start_lon)/incr_lon) + 1
-    points_lat = ((end_lat - start_lat)/incr_lat) + 1   ## Note that incr_lat is defined as '-' here
-    num_row = int(np.ceil(points_lon/16))               ## Note there are 16 columns of data in IONEX format
-
-    ## 4-D array that will contain TEC & DTEC (a[0] and a[1], respectively) values
-    a = np.zeros((2,int(points_lon),int(points_lat),int(num_maps)))
-
-    ## Selecting only the TEC/DTEC values to store in the 4-D array.
-    for Titer in range(2):
-        counterMaps = 1
-        UseList = []
-        if Titer == 0:
-            UseList = TECLongList
-        elif Titer == 1:
-            UseList = DTECLongList
-        for i in range(len(UseList)):
-            ## Pointing to first map (out of 13 maps) then by changing 'counterMaps' the other maps are selected
-            if UseList[i].split()[0] == ''+str(counterMaps)+'':
-                if UseList[i].split()[-4] == 'START':
-                    ## Pointing to the starting Latitude then by changing 'counterLat' we select TEC data
-                    ## at other latitudes within the selected map
-                    counterLat = 0
-                    newstartLat = float(str(start_lat))
-                    for iLat in range(int(points_lat)):
-                        if UseList[i+2+counterLat].split()[0].split('-')[0] == ''+str(newstartLat)+'':
-                            ## Adding to array a[] a line of Latitude TEC data
-                            counterLon = 0
-                            for row_iter in range(num_row):
-                                for item in range(len(UseList[i+3+row_iter+counterLat].split())):
-                                    a[Titer,counterLon,iLat,counterMaps-1] = UseList[i+3+row_iter+counterLat].split()[item]
-                                    counterLon = counterLon + 1
-                        if '-'+UseList[i+2+counterLat].split()[0].split('-')[1] == ''+str(newstartLat)+'':
-                            ## Adding to array a[] a line of Latitude TEC data. Same chunk as above but
-                            ## in this case we account for the TEC values at negative latitudes
-                            counterLon = 0
-                            for row_iter in range(num_row):
-                                for item in range(len(UseList[i+3+row_iter+counterLat].split())):
-                                    a[Titer,counterLon,iLat,counterMaps-1] = UseList[i+3+row_iter+counterLat].split()[item]
-                                    counterLon = counterLon + 1
-                        counterLat = counterLat + row_iter + 2
-                        newstartLat = newstartLat + incr_lat
-                    counterMaps = counterMaps + 1
-
-    ## =========================================================================
-    ##
-    ## The section creates a new array that is a copy of a[], but with the lower
-    ## left-hand corner defined as the initial element (whereas a[] has the
-    ## upper left-hand corner defined as the initial element).  This also
-    ## accounts for the fact that IONEX data is in 0.1*TECU.
-    ##
-    ## =========================================================================
-
-    ## The native sampling of the IGS maps minutes
-    incr_time = 24*60/int(num_maps-1)
-    tec_array = np.zeros((2,int(points_lon),int(points_lat),int(num_maps)))
-
-    for Titer in range(2):
-        #incr = 0
-        for ilat in range(int(points_lat)):
-            tec_array[Titer,:,ilat,:] = 0.1*a[Titer,:,int(points_lat-1-ilat),:]
-
-    lat = np.arange(start_lat, start_lat + points_lat*incr_lat, incr_lat)
-    lon = np.arange(start_lon, start_lon + points_lon*incr_lon, incr_lon)
-    tec_times = np.arange(0, incr_time*num_maps, incr_time)
-
-    return lon, lat, tec_times, tec_array, tec_type
-
-
-def plot_tec_animation(tec_file, save=False):
-    """Plot the input tec file as animation"""
-    from cartopy import crs as ccrs
-    from matplotlib.animation import FuncAnimation
-    from mintpy.utils import plot as pp
-
-    def grab_date(tec_file, datefmt='%Y-%m-%d'):
-        """Grab the date in YYYYMMDD format from the TEC filename"""
-        tec_file = os.path.basename(tec_file)
-        # year
-        year = tec_file.split('.')[1][:2]
-        if year[0] == '9':
-            year = '19' + year
-        else:
-            year = '20' + year
-        year = int(year)
-
-        # month and day
-        doy = int(tec_file.split('.')[0].split('g')[1][:3])
-        dt_obj = dt.datetime(year, 1, 1) + dt.timedelta(doy - 1)
-        date_str = dt_obj.strftime(datefmt)
-        return date_str
-
-    # read TEC file
-    lon, lat, tec_times, tec_array, tec_type = read_ionex_tec(tec_file)
-    vmax = ut.round_to_1(np.nanmax(tec_array[0]) * 0.9)
-    # SNWE info
-    geo_box = (np.min(lon), np.max(lat), np.max(lon), np.min(lat))  # (W, N, E, S)
-    extent = (geo_box[0], geo_box[2], geo_box[3], geo_box[1])       # (W, E, S, N)
-    # date/time info
-    date_str = grab_date(tec_file)
-    num_map = len(tec_times)
-
-    # init figure
-    fig, ax = plt.subplots(figsize=[9, 4], subplot_kw=dict(projection=ccrs.PlateCarree()))
-    ax.coastlines()
-    pp.draw_lalo_label(geo_box, ax, projection=ccrs.PlateCarree(), print_msg=False)
-    # init image
-    im = ax.imshow(tec_array[0,:,:,0].T, vmin=0, vmax=vmax, extent=extent,
-                   origin='upper', animated=True, interpolation='nearest')
-    # colorbar
-    cbar = fig.colorbar(im, shrink=0.5)
-    cbar.set_label('TECU')
-    fig.tight_layout()
-
-    # update image
-    global ind
-    ind = 0
-    def animate(*args):
-        global ind
-        ind += 1
-        if ind >= num_map:
-            ind -= num_map
-
-        # update image
-        data = tec_array[0,:,:,ind].T
-        im.set_array(data)
-
-        # update title
-        dt_obj = dt.datetime.strptime(date_str, '%Y-%m-%d') + dt.timedelta(minutes=tec_times[ind])
-        title = dt_obj.isoformat()
-        if tec_type:
-            title += ' - {}'.format(tec_type)
-        ax.set_title(title)
-
-        return im,
-
-    # play animation
-    ani = FuncAnimation(fig, animate, interval=300, blit=True, save_count=num_map)
-
-    # output
-    if save:
-        outfig = '{}.gif'.format(os.path.abspath(tec_file))
-        print('saving animation to {}'.format(outfig))
-        ani.save(outfig, writer='imagemagick', dpi=300)
-    print('showing animation ...')
-    plt.show()
-    return
-
-
-
-################################## test ########################################
-
-def main(iargs=None):
-    tec_dir = os.path.expanduser('~/data/aux/GIM_IGS')
-    tec_file = dload_igs_tec('20190409', tec_dir)
-    plot_tec_animation(tec_file, save=True)
-    return
-if __name__ == '__main__':
-    main(sys.argv[1:])
diff -pruN 1.3.3-2/mintpy/smallbaselineApp.py 1.4.0-1/mintpy/smallbaselineApp.py
--- 1.3.3-2/mintpy/smallbaselineApp.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/smallbaselineApp.py	2022-08-04 20:01:49.000000000 +0000
@@ -14,12 +14,12 @@ import glob
 import time
 import datetime
 import shutil
-import argparse
 import numpy as np
 
 import mintpy
 from mintpy.objects import sensor, cluster, RAMP_LIST
 from mintpy.utils import readfile, writefile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 from mintpy.defaults.template import STEP_LIST
 import mintpy.workflow   # dynamic import of modules for smallbaselineApp
 
@@ -36,6 +36,12 @@ previous run was done using one of the s
 through the step immediately preceding the starting step of the current run.
 """.format(STEP_LIST[0:5], STEP_LIST[5:11], STEP_LIST[11:])
 
+REFERENCE = """reference:
+  Yunjun, Z., H. Fattahi, and F. Amelung (2019), Small baseline InSAR time series analysis:
+  Unwrapping error correction and noise reduction, Computers & Geosciences, 133, 104331,
+  doi:10.1016/j.cageo.2019.104331.
+"""
+
 EXAMPLE = """example:
   smallbaselineApp.py                         #run with default template 'smallbaselineApp.cfg'
   smallbaselineApp.py <custom_template>       #run with default and custom templates
@@ -50,17 +56,13 @@ EXAMPLE = """example:
   smallbaselineApp.py GalapagosSenDT128.template --end load_data    #end after step 'load_data'
 """
 
-REFERENCE = """reference:
-  Yunjun, Z., H. Fattahi, and F. Amelung (2019), Small baseline InSAR time series analysis: 
-  Unwrapping error correction and noise reduction, Computers & Geosciences, 133, 104331,
-  doi:10.1016/j.cageo.2019.104331.
-"""
-
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Routine Time Series Analysis for Small Baseline InSAR Stack',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=REFERENCE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Routine Time Series Analysis for Small Baseline InSAR Stack'
+    epilog = REFERENCE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('customTemplateFile', nargs='?',
                         help='custom template with option settings.\n' +
@@ -294,6 +296,10 @@ class TimeSeriesAnalysis:
             self.templateFile = ut.update_template_file(self.templateFile, self.customTemplate)
 
         # 2) backup custome/default template file in inputs/pic folder
+        flen = len(os.path.basename(self.templateFile))
+        if self.customTemplateFile:
+            flen = max(flen, len(os.path.basename(self.customTemplateFile)))
+
         for backup_dirname in ['inputs', 'pic']:
             backup_dir = os.path.join(self.workDir, backup_dirname)
             # create directory
@@ -306,8 +312,9 @@ class TimeSeriesAnalysis:
                                              check_readable=False,
                                              print_msg=False) == 'run':
                     shutil.copy2(tfile, backup_dir)
-                    print('copy {} to {:<8} directory for backup.'.format(os.path.basename(tfile),
-                                                                          os.path.basename(backup_dir)))
+                    print('copy {f:<{l}} to {d:<8} directory for backup.'.format(f=os.path.basename(tfile),
+                                                                                 l=flen,
+                                                                                 d=os.path.basename(backup_dir)))
 
         # 3) read default template file
         print('read default template file:', self.templateFile)
@@ -352,24 +359,18 @@ class TimeSeriesAnalysis:
         os.chdir(self.workDir)
 
         # 3) check loading result
-        load_complete, stack_file, geom_file = ut.check_loaded_dataset(self.workDir, print_msg=True)[0:3]
+        stack_file, geom_file, _, ion_file = ut.check_loaded_dataset(self.workDir, print_msg=True)[:4]
 
         # 4) add custom metadata (optional)
         if self.customTemplateFile:
-            print('updating {}, {} metadata based on custom template file: {}'.format(
-                os.path.basename(stack_file),
-                os.path.basename(geom_file),
-                os.path.basename(self.customTemplateFile)))
             # use ut.add_attribute() instead of add_attribute.py because of
             # better control of special metadata, such as SUBSET_X/YMIN
-            ut.add_attribute(stack_file, self.customTemplate)
-            ut.add_attribute(geom_file, self.customTemplate)
+            msg = f'updating metadata based on custom template file {os.path.basename(self.customTemplateFile)}'
+            for fname in [stack_file, ion_file, geom_file]:
+                if fname:
+                    print(f'{msg} for file: {os.path.basename(fname)}')
+                    ut.add_attribute(fname, self.customTemplate)
 
-        # 5) if not load_complete, plot and raise exception
-        if not load_complete:
-            self.plot_result(print_aux=False)
-            self.close(normal_end=False)
-            raise RuntimeError('step {}: NOT all required dataset found, exit.'.format(step_name))
         return
 
 
@@ -391,7 +392,7 @@ class TimeSeriesAnalysis:
     def run_network_modification(self, step_name):
         """Modify network of interferograms before the network inversion."""
         # check the existence of ifgramStack.h5
-        stack_file, geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1:3]
+        stack_file, geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[:2]
         coh_txt = os.path.join(self.workDir, 'coherenceSpatialAvg.txt')
         net_fig = [os.path.join(self.workDir, i, 'network.pdf') for i in ['', 'pic']]
         try:
@@ -444,7 +445,7 @@ class TimeSeriesAnalysis:
 
     def generate_ifgram_aux_file(self):
         """Generate auxiliary files from ifgramStack file"""
-        stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1]
+        stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[0]
         dsNames = readfile.get_dataset_list(stack_file)
         mask_file = os.path.join(self.workDir, 'maskConnComp.h5')
         coh_file = os.path.join(self.workDir, 'avgSpatialCoh.h5')
@@ -476,7 +477,7 @@ class TimeSeriesAnalysis:
         self.generate_ifgram_aux_file()
 
         # 3) add REF_X/Y(/LAT/LON) of the reference point
-        stack_file, _, lookup_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1:4]
+        stack_file, _, lookup_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[:3]
         coh_file = os.path.join(self.workDir, 'avgSpatialCoh.h5')
 
         iargs = [stack_file, '-t', self.templateFile, '-c', coh_file]
@@ -493,7 +494,7 @@ class TimeSeriesAnalysis:
             2) numTriNonzeroIntAmbiguity.h5: phase unwrapping errors through the integer ambiguity of phase closure
         """
         # check the existence of ifgramStack.h5
-        stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1]
+        stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[0]
 
         # 1) stack interferograms
         pha_vel_file = os.path.join(self.workDir, 'avgPhaseVelocity.h5')
@@ -517,7 +518,7 @@ class TimeSeriesAnalysis:
             return
 
         # check required input files
-        stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1]
+        stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[0]
         mask_file = os.path.join(self.workDir, 'maskConnComp.h5')
 
         iargs_bridge = [stack_file, '--template', self.templateFile, '--update']
@@ -553,7 +554,7 @@ class TimeSeriesAnalysis:
         2) temporalCoherence.h5 --> maskTempCoh.h5
         """
         # check the existence of ifgramStack.h5
-        stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1]
+        stack_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[0]
 
         # 1) invert ifgramStack for time-series
         iargs = [stack_file, '-t', self.templateFile, '--update']
@@ -567,7 +568,7 @@ class TimeSeriesAnalysis:
 
     def generate_temporal_coherence_mask(self):
         """Generate reliable pixel mask from temporal coherence"""
-        geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2]
+        geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1]
         tcoh_file = os.path.join(self.workDir, 'temporalCoherence.h5')
         mask_file = os.path.join(self.workDir, 'maskTempCoh.h5')
         tcoh_min = self.template['mintpy.networkInversion.minTempCoh']
@@ -717,7 +718,7 @@ class TimeSeriesAnalysis:
         Automatically applied for Envisat data.
         Automatically skipped for all the other data.
         """
-        geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2]
+        geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1]
         fnames = self.get_timeseries_filename(self.template, self.workDir)[step_name]
         in_file = fnames['input']
         out_file = fnames['output']
@@ -735,7 +736,7 @@ class TimeSeriesAnalysis:
 
     def run_solid_earth_tides_correction(self, step_name):
         """Correct solid Earth tides (SET)."""
-        geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2]
+        geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1]
         fnames = self.get_timeseries_filename(self.template, self.workDir)[step_name]
         in_file = fnames['input']
         out_file = fnames['output']
@@ -753,7 +754,7 @@ class TimeSeriesAnalysis:
 
     def run_tropospheric_delay_correction(self, step_name):
         """Correct tropospheric delays."""
-        geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2]
+        geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1]
         mask_file = os.path.join(self.workDir, 'maskTempCoh.h5')
 
         fnames = self.get_timeseries_filename(self.template, self.workDir)[step_name]
@@ -850,7 +851,7 @@ class TimeSeriesAnalysis:
         """step - correct_topography
         Topographic residual (DEM error) correction (optional).
         """
-        geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2]
+        geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1]
         fnames = self.get_timeseries_filename(self.template, self.workDir)[step_name]
         in_file = fnames['input']
         out_file = fnames['output']
@@ -929,7 +930,7 @@ class TimeSeriesAnalysis:
                 out_dir = os.path.join(self.workDir, 'geo')
                 os.makedirs(out_dir, exist_ok=True)
 
-                geom_file, lookup_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2:4]
+                geom_file, lookup_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1:3]
                 in_files = [geom_file, 'temporalCoherence.h5', 'avgSpatialCoh.h5', ts_file, 'velocity.h5']
                 iargs = in_files + ['-l', lookup_file, '-t', self.templateFile, '--outdir', out_dir, '--update']
                 print('\ngeocode.py', ' '.join(iargs))
@@ -943,7 +944,7 @@ class TimeSeriesAnalysis:
 
                 iargs = [tcoh_file, '-m', tcoh_min, '-o', mask_file]
                 # exclude pixels in shadow if shadowMask dataset is available
-                if (self.template['mintpy.networkInversion.shadowMask'] is True 
+                if (self.template['mintpy.networkInversion.shadowMask'] is True
                         and 'shadowMask' in readfile.get_dataset_list(geom_file)):
                     iargs += ['--base', geom_file, '--base-dataset', 'shadowMask', '--base-value', '1']
                 print('\ngenerate_mask.py', ' '.join(iargs))
@@ -1000,7 +1001,7 @@ class TimeSeriesAnalysis:
             tcoh_file = os.path.join(self.workDir, 'temporalCoherence.h5')
             scoh_file = os.path.join(self.workDir, 'avgSpatialCoh.h5')
             mask_file = os.path.join(self.workDir, 'maskTempCoh.h5')
-            geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2]
+            geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1]
             if 'geo' in ts_file:
                 tcoh_file = os.path.join(self.workDir, 'geo/geo_temporalCoherence.h5')
                 scoh_file = os.path.join(self.workDir, 'geo/geo_avgSpatialCoh.h5')
@@ -1099,7 +1100,10 @@ class TimeSeriesAnalysis:
         max_memory = abs(float(self.template['mintpy.compute.maxMemory']))
         fig_dpi = int(self.template['mintpy.plot.dpi'])
 
-        stack_file, geom_file, lookup_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[1:]
+        (stack_file,
+         geom_file,
+         lookup_file,
+         ion_file) = ut.check_loaded_dataset(self.workDir, print_msg=False)[:4]
         mask_file = os.path.join(self.workDir, 'maskTempCoh.h5')
         geo_dir = os.path.join(self.workDir, 'geo')
         pic_dir = os.path.join(self.workDir, 'pic')
@@ -1158,6 +1162,12 @@ class TimeSeriesAnalysis:
             ['numInvIfgram.h5',           '--mask', 'no'],
         ]
 
+        if ion_file:
+            iargs_list0 += [
+                [ion_file, 'unwrapPhase-', '--zero-mask'],
+                [ion_file, 'coherence-',   '--mask', 'no', '-v', '0', '1'],
+            ]
+
         # translate element list whose file path has *
         iargs_list = []
         for iargs in iargs_list0:
@@ -1239,13 +1249,13 @@ class TimeSeriesAnalysis:
         msg = """Explore more info & visualization options with the following scripts:
         info.py                    #check HDF5 file structure and metadata
         view.py                    #2D map view
-        tsview.py                  #1D point time-series (interactive)   
+        tsview.py                  #1D point time-series (interactive)
         transect.py                #1D profile (interactive)
         plot_coherence_matrix.py   #plot coherence matrix for one pixel (interactive)
-        plot_network.py            #plot network configuration of the dataset    
+        plot_network.py            #plot network configuration of the dataset
         plot_transection.py        #plot 1D profile along a line of a 2D matrix (interactive)
         save_kmz.py                #generate Google Earth KMZ file in raster image
-        save_kmz_timeseries.py     #generate Goodle Earth KMZ file in points for time-series (interactive)
+        save_kmz_timeseries.py     #generate Google Earth KMZ file in points for time-series (interactive)
         """
         if print_aux:
             print(msg)
diff -pruN 1.3.3-2/mintpy/solid_earth_tides.py 1.4.0-1/mintpy/solid_earth_tides.py
--- 1.3.3-2/mintpy/solid_earth_tides.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/solid_earth_tides.py	2022-08-04 20:01:49.000000000 +0000
@@ -12,7 +12,6 @@ import os
 import sys
 import time
 import datetime as dt
-import argparse
 import warnings
 import h5py
 import numpy as np
@@ -32,29 +31,33 @@ from mintpy.utils import (
     readfile,
     writefile,
     utils as ut,
-    attribute as attr,
 )
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ###############################################################
 TEMPLATE = get_template_content('correct_SET')
 
+REFERENCE = """reference:
+  Milbert, D. (2018), "solid: Solid Earth Tide", [Online]. Available: http://geodesyworld.github.io/
+    SOFTS/solid.htm. Accessd on: 2020-09-06.
+  Yunjun, Z., Fattahi, H., Pi, X., Rosen, P., Simons, M., Agram, P., & Aoki, Y. (2022). Range 
+    Geolocation Accuracy of C-/L-band SAR and its Implications for Operational Stack Coregistration.
+    IEEE Trans. Geosci. Remote Sens., 60, doi:10.1109/TGRS.2022.3168509.
+"""
+
 EXAMPLE = """example:
   solid_earth_tides.py timeseries.h5 -g inputs/geometryRadar.h5
   solid_earth_tides.py timeseries.h5 -g inputs/geometryGeo.h5
   solid_earth_tides.py geo/geo_timeseries_ERA5_demErr.h5 -g geo/geo_geometryRadar.h5
 """
 
-REFERENCE = """reference:
-  Milbert, D., Solid Earth Tide, http://geodesyworld.github.io/SOFTS/solid.htm, Accessd 2020 September 6.
-  Fattahi, H., Z. Yunjun, X. Pi, P. S. Agram, P. Rosen, and Y. Aoki (2020), Absolute geolocation of SAR 
-    Big-Data: The first step for operational InSAR time-series analysis, AGU Fall Meeting 2020, 1-17 Dec 2020.
-"""
-
-def create_parser():
-    parser = argparse.ArgumentParser(description='Solid Earth tides (SET) correction',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog='{}\n{}\n{}'.format(REFERENCE, TEMPLATE, EXAMPLE))
+def create_parser(subparsers=None):
+    synopsis = 'Solid Earth tides (SET) correction via PySolid'
+    epilog = REFERENCE + '\n' + TEMPLATE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('dis_file', help='timeseries HDF5 file, i.e. timeseries.h5')
     parser.add_argument('-g','--geomtry', dest='geom_file', type=str, required=True,
@@ -113,78 +116,6 @@ def cmd_line_parse(iargs=None):
 
 
 ###############################################################
-def prepare_los_geometry(geom_file):
-    """Prepare LOS geometry data/info in geo-coordinates
-    Parameters: geom_file  - str, path of geometry file
-    Returns:    inc_angle  - 2D np.ndarray, incidence angle in radians
-                head_angle - 2D np.ndarray, heading   angle in radians
-                atr        - dict, metadata in geo-coordinate
-    """
-
-    print('prepare LOS geometry in geo-coordinates from file: {}'.format(geom_file))
-    atr = readfile.read_attribute(geom_file)
-
-    print('read incidenceAngle from file: {}'.format(geom_file))
-    inc_angle = readfile.read(geom_file, datasetName='incidenceAngle')[0]
-
-    if 'azimuthAngle' in readfile.get_dataset_list(geom_file):
-        print('read azimuthAngle   from file: {}'.format(geom_file))
-        print('convert azimuth angle to heading angle')
-        az_angle  = readfile.read(geom_file, datasetName='azimuthAngle')[0]
-        head_angle = ut.azimuth2heading_angle(az_angle)
-    else:
-        print('use the HEADING attribute as the mean heading angle')
-        head_angle = np.ones(inc_angle.shape, dtype=np.float32) * float(atr['HEADING'])
-
-    # geocode inc/az angle data if in radar-coord
-    if 'Y_FIRST' not in atr.keys():
-        print('-'*50)
-        print('geocoding the incidence / heading angles ...')
-        res_obj = resample(lut_file=geom_file, src_file=geom_file)
-        res_obj.open()
-        res_obj.prepare()
-
-        # resample data
-        box = res_obj.src_box_list[0]
-        inc_angle  = res_obj.run_resample(src_data=inc_angle[box[1]:box[3], box[0]:box[2]])
-        head_angle = res_obj.run_resample(src_data=head_angle[box[1]:box[3], box[0]:box[2]])
-
-        # update attribute
-        atr = attr.update_attribute4radar2geo(atr, res_obj=res_obj)
-
-    # for 'Y_FIRST' not in 'degree'
-    # e.g. meters for UTM projection from ASF HyP3
-    if not atr['Y_UNIT'].lower().startswith('deg'):
-        # get SNWE in meter
-        length, width = int(atr['LENGTH']), int(atr['WIDTH'])
-        N = float(atr['Y_FIRST'])
-        W = float(atr['X_FIRST'])
-        y_step = float(atr['Y_STEP'])
-        x_step = float(atr['X_STEP'])
-        S = N + y_step * length
-        E = W + x_step * width
-
-        # SNWE in meter --> degree
-        lat0, lon0 = ut.to_latlon(atr['OG_FILE_PATH'], W, N)
-        lat1, lon1 = ut.to_latlon(atr['OG_FILE_PATH'], E, S)
-        lat_step = (lat1 - lat0) / length
-        lon_step = (lon1 - lon0) / width
-
-        # update Y/X_FIRST/STEP/UNIT
-        atr['Y_FIRST'] = lat0
-        atr['X_FIRST'] = lon0
-        atr['Y_STEP'] = lat_step
-        atr['X_STEP'] = lon_step
-        atr['Y_UNIT'] = 'degrees'
-        atr['X_UNIT'] = 'degrees'
-
-    # unit: degree to radian
-    inc_angle *= np.pi / 180.
-    head_angle *= np.pi / 180.
-
-    return inc_angle, head_angle, atr
-
-
 def get_datetime_list(ts_file, date_wise_acq_time=False):
     """Prepare exact datetime for each acquisition in the time-series file.
 
@@ -284,7 +215,7 @@ def calc_solid_earth_tides_timeseries(ts
         return set_file
 
     # prepare LOS geometry: geocoding if in radar-coordinates
-    inc_angle, head_angle, atr_geo = prepare_los_geometry(geom_file)
+    inc_angle, head_angle, atr_geo = ut.prepare_geo_los_geometry(geom_file, unit='rad')
 
     # get LOS unit vector
     with warnings.catch_warnings():
@@ -308,7 +239,7 @@ def calc_solid_earth_tides_timeseries(ts
 
     # loop for calc
     print('\n'+'-'*50)
-    print('calculating solid Earth tides using solid.for (D. Milbert, 2018) ...')
+    print('calculating solid Earth tides using PySolid (Milbert, 2018; Yunjun et al., 2022) ...')
     prog_bar = ptime.progressBar(maxValue=num_date, print_msg=not verbose)
     for i, dt_obj in enumerate(dt_objs):
         # calculate tide in ENU direction
@@ -397,7 +328,7 @@ def main(iargs=None):
 
     m, s = divmod(time.time() - start_time, 60)
     print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s))
-    return inps.cor_dis_file
+    return
 
 ###############################################################
 if __name__ == '__main__':
diff -pruN 1.3.3-2/mintpy/spatial_average.py 1.4.0-1/mintpy/spatial_average.py
--- 1.3.3-2/mintpy/spatial_average.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/spatial_average.py	2022-08-04 20:01:49.000000000 +0000
@@ -7,9 +7,9 @@
 
 
 import sys
-import argparse
 import matplotlib.pyplot as plt
 from mintpy.utils import readfile, ptime, utils as ut, plot as pp
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 #################################  Usage  ####################################
@@ -19,10 +19,13 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Calculate average in space',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Calculate average in space'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
     parser.add_argument('file', help='File to calculate spatial average')
     parser.add_argument('-d', '--dset', '--dataset', dest='datasetName',
                         help='dataset used to calculate, for ifgramStack file only.')
@@ -42,24 +45,28 @@ def cmd_line_parse(iargs=None):
 #############################  Main Function  ################################
 def main(iargs=None):
     inps = cmd_line_parse(iargs)
+
     print('\n*************** Spatial Average ******************')
-    mean_list, date_list = ut.spatial_average(inps.file,
-                                              datasetName=inps.datasetName,
-                                              maskFile=inps.mask_file,
-                                              saveList=True)
+    mean_list, date_list = ut.spatial_average(
+        inps.file,
+        datasetName=inps.datasetName,
+        maskFile=inps.mask_file,
+        saveList=True,
+    )
+
     atr = readfile.read_attribute(inps.file)
-    k = atr['FILE_TYPE']
-    if inps.disp_fig and k == 'timeseries':
-        dates, datevector = ptime.date_list2vector(date_list)
+    if inps.disp_fig and atr['FILE_TYPE'] == 'timeseries':
+        dates = ptime.date_list2vector(date_list)[0]
         # plot
         fig = plt.figure()
         ax = fig.add_subplot(111)
         ax.plot(dates, mean_list, '-o')#, lw=2, ms=16, alpha=0.7) #, mfc='crimson')
         ax.set_title('Spatial Average', fontsize=12)
-        ax = pp.auto_adjust_xaxis_date(ax, datevector)[0]
+        ax = pp.auto_adjust_xaxis_date(ax, dates)[0]
         ax.set_xlabel('Time [years]', fontsize=12)
         ax.set_ylabel('Mean', fontsize=12)
         plt.show()
+
     return
 
 
diff -pruN 1.3.3-2/mintpy/spatial_filter.py 1.4.0-1/mintpy/spatial_filter.py
--- 1.3.3-2/mintpy/spatial_filter.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/spatial_filter.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,16 +9,16 @@
 
 import os
 import sys
-import argparse
+import numpy as np
+from scipy import ndimage
 
 try:
     from skimage import filters, feature, morphology
 except ImportError:
     raise ImportError('Could not import skimage!')
 
-import numpy as np
-from scipy import ndimage
 from mintpy.utils import readfile, writefile
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ################################################################################################
@@ -40,10 +40,12 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Spatial filtering of 2D image.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=REFERENCE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Spatial filtering of 2D image.'
+    epilog = REFERENCE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', help='File to be filtered')
     parser.add_argument('dset', type=str, nargs='*', default=[],
@@ -231,13 +233,16 @@ def filter_file(fname, ds_names=None, fi
 def main(iargs=None):
     inps = cmd_line_parse(iargs)
 
-    inps.outfile = filter_file(inps.file,
-                               ds_names=inps.dset,
-                               filter_type=inps.filter_type,
-                               filter_par=inps.filter_par,
-                               fname_out=inps.outfile)
+    filter_file(
+        fname=inps.file,
+        ds_names=inps.dset,
+        filter_type=inps.filter_type,
+        filter_par=inps.filter_par,
+        fname_out=inps.outfile,
+    )
+
     print('Done.')
-    return inps.outfile
+    return
 
 
 ################################################################################################
diff -pruN 1.3.3-2/mintpy/subset.py 1.4.0-1/mintpy/subset.py
--- 1.3.3-2/mintpy/subset.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/subset.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,7 +8,6 @@
 
 import os
 import sys
-import argparse
 import h5py
 import numpy as np
 
@@ -19,6 +18,7 @@ from mintpy.utils import (
     utils as ut,
     attribute as attr,
 )
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ###########################################################################################
@@ -47,10 +47,13 @@ EXAMPLE = """example:
   subset.py geomap_4rlks.trans --tight
 """
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Generate a subset from file/dataset',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=TEMPLATE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Generate a subset from file/dataset'
+    epilog = TEMPLATE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
     parser.add_argument('file', nargs='+', help='File(s) to subset/crop')
 
     parser.add_argument('-x','--sub-x','--subset-x', dest='subset_x', type=int, nargs=2,
@@ -77,8 +80,6 @@ def create_parser():
                         help="fill subset area out of data coverage with input value. i.e. \n"
                              "np.nan, 0, 1000, ... \n"
                              "By default, it's None for no-outfill.")
-    parser.add_argument('--no-parallel', dest='parallel', action='store_false', default=True,
-                        help='Disable parallel processing. Diabled auto for 1 input file.\n\n')
 
     parser.add_argument('-o', '--output', dest='outfile',
                         help='output file name\n' +
@@ -120,7 +121,7 @@ def get_coverage_box(atr):
     width = int(atr['WIDTH'])
 
     # Get geo box
-    try:
+    if all(x in atr.keys() for x in ['Y_STEP', 'X_STEP', 'Y_FIRST', 'X_FIRST']):
         lat_step = float(atr['Y_STEP'])
         lon_step = float(atr['X_STEP'])
         ul_lat = float(atr['Y_FIRST'])
@@ -128,16 +129,18 @@ def get_coverage_box(atr):
         lr_lat = ul_lat + lat_step*length
         lr_lon = ul_lon + lon_step*width
         geo_box = (ul_lon, ul_lat, lr_lon, lr_lat)
-    except ValueError:
+    else:
         geo_box = None
 
     # Get pixel box
-    try:
-        pix_box = (int(atr['SUBSET_XMIN']),
-                   int(atr['SUBSET_YMIN']),
-                   int(atr['SUBSET_XMAX']),
-                   int(atr['SUBSET_YMAX']))
-    except ValueError:
+    if all(f'SUBSET_{x}' in atr.keys() for x in ['YMIN', 'XMIN', 'YMAX', 'XMAX']):
+        pix_box = (
+            int(atr['SUBSET_XMIN']),
+            int(atr['SUBSET_YMIN']),
+            int(atr['SUBSET_XMAX']),
+            int(atr['SUBSET_YMAX']),
+        )
+    else:
         pix_box = None
 
     return pix_box, geo_box
diff -pruN 1.3.3-2/mintpy/temporal_average.py 1.4.0-1/mintpy/temporal_average.py
--- 1.3.3-2/mintpy/temporal_average.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/temporal_average.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,9 +9,9 @@
 import os
 import sys
 import time
-import argparse
 import h5py
 from mintpy.utils import utils as ut, readfile
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 #################################  Usage  ####################################
@@ -20,10 +20,12 @@ EXAMPLE = """example:
   temporal_average.py ./inputs/ifgramStack.h5 -d coherence   -o avgSpatialCoh.h5
 """
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Calculate temporal average (stacking) of multi-temporal datasets',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Calculate temporal average (stacking) of multi-temporal datasets'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('file', type=str, help='input file with multi-temporal datasets')
     parser.add_argument('-d', '--ds', '--dataset', dest='datasetName', default='coherence',
diff -pruN 1.3.3-2/mintpy/temporal_derivative.py 1.4.0-1/mintpy/temporal_derivative.py
--- 1.3.3-2/mintpy/temporal_derivative.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/temporal_derivative.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,9 +8,9 @@
 
 import os
 import sys
-import argparse
 import numpy as np
 from mintpy.objects import timeseries
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ############################################################################
@@ -18,10 +18,13 @@ EXAMPLE = """example:
   temporal_derivative.py  timeseries.h5 
 """
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Calculate the temporal derivative of time-series.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Calculate the temporal derivative of time-series.'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
     parser.add_argument('file', type=str, help='time-series displacement file.')
     parser.add_argument('-o','--output', dest='outfile', type=str, help='output derivative time-series file.')
     return parser
@@ -54,7 +57,7 @@ def main(iargs=None):
     obj_out = timeseries(inps.outfile)
     obj_out.write2hdf5(ts_data_1d, refFile=inps.file)
 
-    return inps.outfile
+    return
 
 
 ############################################################################
diff -pruN 1.3.3-2/mintpy/temporal_filter.py 1.4.0-1/mintpy/temporal_filter.py
--- 1.3.3-2/mintpy/temporal_filter.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/temporal_filter.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,26 +8,28 @@
 
 import os
 import sys
-import argparse
 import numpy as np
 from mintpy.objects import timeseries
 from mintpy.utils import ptime
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ############################################################
+REFERENCE="""reference:
+  Wikipedia: https://en.wikipedia.org/wiki/Gaussian_blur
+"""
+
 EXAMPLE = """example:
  temporal_filter.py timeseries_ERA5_demErr.h5
  temporal_filter.py timeseries_ERA5_demErr.h5 -t 0.1
 """
 
-REFERENCE="""reference:
-  Wikipedia: https://en.wikipedia.org/wiki/Gaussian_blur
-"""
-
-def create_parser():
-    parser = argparse.ArgumentParser(description='Smoothing timeseries in time domain with a moving Gaussian window',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=REFERENCE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Smoothing timeseries in time domain with a moving filter'
+    epilog = REFERENCE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('timeseries_file',
                         help='timeseries file to be smoothed.')
@@ -79,7 +81,8 @@ def main(iargs=None):
         inps.outfile = '{}_tempGaussian.h5'.format(os.path.splitext(inps.timeseries_file)[0])
     obj_out = timeseries(inps.outfile)
     obj_out.write2hdf5(ts_data_filt, refFile=inps.timeseries_file)
-    return inps.outfile
+
+    return
 
 
 ############################################################
diff -pruN 1.3.3-2/mintpy/timeseries2velocity.py 1.4.0-1/mintpy/timeseries2velocity.py
--- 1.3.3-2/mintpy/timeseries2velocity.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/timeseries2velocity.py	2022-08-04 20:01:49.000000000 +0000
@@ -2,33 +2,40 @@
 ############################################################
 # Program is part of MintPy                                #
 # Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
-# Author: Zhang Yunjun, Heresh Fattahi, 2013               #
+# Author: Zhang Yunjun, Heresh Fattahi, Yuan-Kai Liu, 2013 #
 ############################################################
-# Add bootstrap method for std. dev. estimation, Emre Havazli, May 2020.
-# Add poly, periodic and step func., Yuan-Kai Liu, Aug 2020.
-# Add exp and log func., Yuan-Kai Liu, Jun 2021.
+# Recommend import:
+#   from mintpy import timeseries2velocity as ts2vel
 
 
 import os
 import sys
 import time
-import argparse
 import numpy as np
 from scipy import linalg
 
 from mintpy.defaults.template import get_template_content
 from mintpy.objects import timeseries, giantTimeseries, HDFEOS, cluster
-from mintpy.utils import arg_group, ptime, time_func, readfile, writefile, utils as ut
+from mintpy.utils import arg_utils, ptime, time_func, readfile, writefile, utils as ut
 
 
 dataType = np.float32
 # key configuration parameter name
-key_prefix = 'mintpy.velocity.'
+key_prefix = 'mintpy.timeFunc.'
 configKeys = [
+    # date
     'startDate',
     'endDate',
     'excludeDate',
-    'bootstrap',
+    # time functions
+    'polynomial',
+    'periodic',
+    'stepDate',
+    'exp',
+    'log',
+    # uncertainty quantification
+    'uncertaintyQuantification',
+    'timeSeriesCovFile',
     'bootstrapCount',
 ]
 
@@ -38,30 +45,27 @@ TEMPLATE = get_template_content('velocit
 
 REFERENCE = """references:
   Fattahi, H., and F. Amelung (2015), InSAR bias and uncertainty due to the systematic and stochastic
-  tropospheric delay, Journal of Geophysical Research: Solid Earth, 120(12), 8758-8773, doi:10.1002/2015JB012419.
-
+    tropospheric delay, J. Geophy. Res. Solid Earth, 120(12), 8758-8773, doi:10.1002/2015JB012419.
   Efron, B., and R. Tibshirani (1986), Bootstrap methods for standard errors, confidence intervals,
-  and other measures of statistical accuracy, Statistical science, 54-75, doi:10.1214/ss/1177013815.
+    and other measures of statistical accuracy, Statistical Science, 54-75, doi:10.1214/ss/1177013815.
 """
 
 EXAMPLE = """example:
-  timeseries2velocity.py  timeseries_ERA5_demErr.h5
-  timeseries2velocity.py  timeseries_ERA5_demErr_ramp.h5  -t KyushuT73F2980_2990AlosD.template
-  timeseries2velocity.py  timeseries.h5  --start-date 20080201  --end-date 20100508
-  timeseries2velocity.py  timeseries.h5  --exclude exclude_date.txt
-
-  timeseries2velocity.py  LS-PARAMS.h5
-  timeseries2velocity.py  NSBAS-PARAMS.h5
-  timeseries2velocity.py  TS-PARAMS.h5
-
-  # bootstrapping for STD calculation
-  timeseries2velocity.py timeseries_ERA5_demErr.h5 --bootstrap
+  timeseries2velocity.py timeseries_ERA5_demErr.h5
+  timeseries2velocity.py timeseries_ERA5_demErr_ramp.h5 -t KyushuAlosDT73.txt
+  timeseries2velocity.py timeseries.h5  --start-date 20080201  --end-date 20100508
+  timeseries2velocity.py timeseries.h5  --ex exclude_date.txt
 
   # complex time functions
-  timeseries2velocity.py timeseries_ERA5_ramp_demErr.h5 --poly 3 --period 1 0.5 --step 20170910
-  timeseries2velocity.py timeseries_ERA5_demErr.h5      --poly 1 --exp 20170910 90
-  timeseries2velocity.py timeseries_ERA5_demErr.h5      --poly 1 --log 20170910 60.4
-  timeseries2velocity.py timeseries_ERA5_demErr.h5      --poly 1 --log 20170910 60.4 200 --log 20171026 200.7
+  timeseries2velocity.py timeseries_ERA5_demErr.h5 --poly 3 --period 1 0.5 --step 20170910
+  timeseries2velocity.py timeseries_ERA5_demErr.h5 --poly 1 --exp 20170910 90
+  timeseries2velocity.py timeseries_ERA5_demErr.h5 --poly 1 --log 20170910 60.4
+  timeseries2velocity.py timeseries_ERA5_demErr.h5 --poly 1 --log 20170910 60.4 200 --log 20171026 200.7
+
+  # uncertainty quantification of the estimated time functions
+  timeseries2velocity.py timeseries_ERA5_demErr.h5 --uq residue
+  timeseries2velocity.py timeseries_ERA5_demErr.h5 --uq covariance --ts-cov timeseriesCov.h5
+  timeseries2velocity.py timeseries_ERA5_demErr.h5 --uq bootstrap
 """
 
 DROP_DATE_TXT = """exclude_date.txt:
@@ -71,55 +75,52 @@ DROP_DATE_TXT = """exclude_date.txt:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Estimate velocity / time functions from time-series.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=TEMPLATE+'\n'+REFERENCE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Estimate velocity / time functions from time-series.'
+    epilog = REFERENCE + '\n' + TEMPLATE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = arg_utils.create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     # inputs
-    parser.add_argument('timeseries_file',
-                        help='Time series file for velocity inversion.')
+    parser.add_argument('timeseries_file', help='Time series file for time function estimation.')
     parser.add_argument('--template', '-t', dest='template_file', help='template file with options')
-    parser.add_argument('--ts-cov-file', dest='ts_cov_file',
-                        help='Time-series (co)variance file for velocity STD calculation')
 
     # outputs
     parser.add_argument('-o', '--output', dest='outfile', help='output file name')
     parser.add_argument('--update', dest='update_mode', action='store_true',
                         help='Enable update mode, and skip estimation if:\n'+
-                             '1) output velocity file already exists, readable '+
+                             '1) output file already exists, readable '+
                              'and newer than input file\n' +
                              '2) all configuration parameters are the same.')
 
     # reference in time and space
-    # for input file without reference info, e.g. ERA5.h5
-    parser.add_argument('--ref-lalo', dest='ref_lalo', metavar=('LAT', 'LON'), type=float, nargs=2,
-                        help='Change referene point LAT LON for estimation.')
-    parser.add_argument('--ref-yx', dest='ref_yx', metavar=('Y', 'X'), type=int, nargs=2,
-                        help='Change referene point Y X for estimation.')
-    parser.add_argument('--ref-date', dest='ref_date', metavar='DATE',
-                        help='Change reference date for estimation.')
+    # useful for input file without reference info, e.g. ERA5.h5
+    parser = arg_utils.add_reference_argument(parser, plot=False)
 
     # dates of interest
-    date = parser.add_argument_group('dates of interest')
-    date.add_argument('--start-date','-s', dest='startDate',
-                      help='start date for velocity estimation')
-    date.add_argument('--end-date','-e', dest='endDate',
-                      help='end date for velocity estimation')
-    date.add_argument('--exclude', '--ex', dest='excludeDate', nargs='+', default=[],
-                      help='date(s) not included in velocity estimation, i.e.:\n' +
+    date = parser.add_argument_group('Dates of interest')
+    date.add_argument('-s','--start-date', dest='startDate',
+                      help='start date for time function estimation')
+    date.add_argument('-e','--end-date', dest='endDate',
+                      help='end date for time function estimation')
+    date.add_argument('--ex','--ex-date', dest='excludeDate', nargs='+', default=[],
+                      help='date(s) not included in time function estimation, i.e.:\n' +
                            '--exclude 20040502 20060708 20090103\n' +
                            '--exclude exclude_date.txt\n'+DROP_DATE_TXT)
 
-    # bootstrap
-    bootstrap = parser.add_argument_group('bootstrapping', 'estimating the mean / STD of the velocity estimator')
-    bootstrap.add_argument('--bootstrap', '--bootstrapping', dest='bootstrap', action='store_true',
-                           help='Enable bootstrapping to estimate the mean and STD of the velocity estimator.')
-    bootstrap.add_argument('--bc', '--bootstrap-count', dest='bootstrapCount', type=int, default=400,
-                           help='number of iterations for bootstrapping (default: %(default)s).')
+    # Uncertainty quantification
+    uq = parser.add_argument_group('Uncertainty quantification (UQ)', 'Estimating the time function parameters STD')
+    uq.add_argument('--uq', '--uncertainty', dest='uncertaintyQuantification', metavar='VAL',
+                    default='residue', choices={'residue', 'covariance', 'bootstrap'},
+                    help='Uncertainty quantification method (default: %(default)s).')
+    uq.add_argument('--ts-cov','--ts-cov-file', dest='timeSeriesCovFile',
+                    help='4D time-series (co)variance file for time function STD calculation')
+    uq.add_argument('--bc', '--bootstrap-count', dest='bootstrapCount', type=int, default=400,
+                    help='number of iterations for bootstrapping (default: %(default)s).')
 
     # time functions
-    parser = arg_group.add_timefunc_argument(parser)
+    parser = arg_utils.add_timefunc_argument(parser)
 
     # residual file
     resid = parser.add_argument_group('Residual file', 'Save residual displacement time-series to HDF5 file.')
@@ -129,7 +130,7 @@ def create_parser():
                        help='Output file name for the residual time-series file (default: %(default)s).')
 
     # computing
-    parser = arg_group.add_memory_argument(parser)
+    parser = arg_utils.add_memory_argument(parser)
 
     return parser
 
@@ -138,26 +139,31 @@ def cmd_line_parse(iargs=None):
     """Command line parser."""
     parser = create_parser()
     inps = parser.parse_args(args=iargs)
-    inps.key = readfile.read_attribute(inps.timeseries_file)['FILE_TYPE']
-    if inps.key not in ['timeseries', 'giantTimeseries', 'HDFEOS']:
-        raise Exception('input file is {}, NOT timeseries!'.format(inps.key))
-
-    # check bootstrap count number
-    if inps.bootstrap and inps.bootstrapCount <= 1:
-        inps.bootstrap = False
-        print('bootstrap-count should be larger than 1, otherwise it does not make sense')
-        print('turn OFF bootstrapping and continue without it.')
-
-    if inps.bootstrap:
-        print('bootstrapping is turned ON.')
-        if (inps.polynomial != 1 or inps.periodic or inps.step or inps.exp or inps.log):
-            raise ValueError('bootstrapping currently support polynomial ONLY and ONLY with the order of 1!')
+
+    # check if input file is time series
+    inps.file_type = readfile.read_attribute(inps.timeseries_file)['FILE_TYPE']
+    if inps.file_type not in ['timeseries', 'giantTimeseries', 'HDFEOS']:
+        raise Exception('input file is {}, NOT timeseries!'.format(inps.file_type))
 
     if inps.template_file:
         inps = read_template2inps(inps.template_file, inps)
 
-    # Initialize the dictionaries of exp and log funcs
-    inps = init_exp_log_dicts(inps)
+    # --uq
+    if inps.uncertaintyQuantification == 'bootstrap':
+        # check 1 - bootstrap count number
+        if inps.bootstrapCount <= 1:
+            inps.uncertaintyQuantification = 'residue'
+            print('WARNING: bootstrapCount should be larger than 1!')
+            print('Change the uncertainty quantification method from bootstrap to residue, and continue.')
+        # check 2 - advanced time func
+        if (inps.polynomial != 1 or inps.periodic or inps.stepDate or inps.exp or inps.log):
+            raise ValueError('bootstrapping support polynomial with the order of 1 ONLY!')
+
+    elif inps.uncertaintyQuantification == 'covariance':
+        if not inps.timeSeriesCovFile or not os.path.isfile(inps.timeSeriesCovFile):
+            inps.uncertaintyQuantification = 'residue'
+            print('WARNING: NO time series covariance file found!')
+            print('Change the uncertainty quantification method from covariance to residue, and continue.')
 
     # --ref-lalo option
     if inps.ref_lalo:
@@ -169,50 +175,16 @@ def cmd_line_parse(iargs=None):
             print('input reference point in (lat, lon): ({}, {})'.format(inps.ref_lalo[0], inps.ref_lalo[1]))
             print('corresponding   point in (y, x): ({}, {})'.format(inps.ref_yx[0], inps.ref_yx[1]))
 
-    return inps
-
-
-def init_exp_log_dicts(inps):
-    """Initialize the dictionaries of exp and log funcs
-    By trarnslating inps.exp/log into inps.expDict/logDict.
-    """
-    # --exp option: convert cmd inputs into dict format
-    inps.expDict = dict()
-    if inps.exp:
-        for exp_list in inps.exp:
-            onset_time, char_times = exp_list[0], exp_list[1:]
-            if len(onset_time) == 8:
-                if len(char_times) > 0:
-                    inps.expDict[onset_time] = np.array(char_times).astype(float).tolist()
-
-                else:
-                    msg = 'NO characteristic time found: {}\n'.format(char_times)
-                    msg += 'one or more characteristic time(s) are required for each onset date'
-                    msg += ' for the exp function, e.g.:\n'
-                    msg += '--exp 20181026 60 OR\n'
-                    msg += '--exp 20161231 80.5 200  # append as many char_times as you like!'
-                    raise ValueError(msg)
-            else:
-                raise ValueError('input onset time is NOT in YYYYMMDD format: {}'.format(onset_time))
-
-    # --log option: convert cmd inputs into dict format
-    inps.logDict = dict()
-    if inps.log:
-        for log_list in inps.log:
-            onset_time, char_times = log_list[0], log_list[1:]
-            if len(onset_time) == 8:
-                if len(char_times) > 0:
-                    inps.logDict[onset_time] = np.array(char_times).astype(float).tolist()
-
-                else:
-                    msg = 'NO characteristic time found: {}\n'.format(char_times)
-                    msg += 'one or more characteristic time(s) are required for each onset date'
-                    msg += ' for the log function, e.g.:\n'
-                    msg += '--exp 20181026 60 OR\n'
-                    msg += '--exp 20161231 80.5 200  # append as many char_times as you like!'
-                    raise ValueError(msg)
-            else:
-                raise ValueError('input onset time is NOT in YYYYMMDD format: {}'.format(onset_time))
+    # --output
+    if not inps.outfile:
+        # get suffix
+        ts_file_base = os.path.splitext(os.path.basename(inps.timeseries_file))[0]
+        if ts_file_base in ['timeseriesRg', 'timeseriesAz']:
+            suffix = ts_file_base.split('timeseries')[-1]
+        else:
+            suffix = ''
+        # compose default output filename
+        inps.outfile = f'velocity{suffix}.h5'
 
     return inps
 
@@ -223,23 +195,35 @@ def read_template2inps(template_file, in
         inps = cmd_line_parse()
     iDict = vars(inps)
     print('read options from template file: '+os.path.basename(template_file))
-    template = readfile.read_template(inps.template_file)
+    template = readfile.read_template(inps.template_file, skip_chars=['[', ']'])
     template = ut.check_template_auto_value(template)
 
     # Read template option
-    prefix = 'mintpy.velocity.'
+    prefix = 'mintpy.timeFunc.'
     keyList = [i for i in list(iDict.keys()) if prefix+i in template.keys()]
     for key in keyList:
         value = template[prefix+key]
-        if key in ['bootstrap']:
-            iDict[key] = value
         if value:
             if key in ['startDate', 'endDate']:
                 iDict[key] = ptime.yyyymmdd(value)
+
             elif key in ['excludeDate']:
-                value = value.replace('[','').replace(']','').replace(',', ' ')
-                iDict[key] = ptime.yyyymmdd(value.split())
-            elif key in ['bootstrapCount']:
+                iDict[key] = ptime.yyyymmdd(value.split(','))
+
+            elif key in ['periodic']:
+                iDict[key] = [float(x) for x in value.replace(';',',').split(',')]
+
+            elif key in ['stepDate']:
+                iDict[key] = value.replace(';',',').split(',')
+
+            elif key in ['exp', 'log']:
+                value = value.replace('/',';').replace('|',';')
+                iDict[key] = [x.split(',') for x in value.split(';')]
+
+            elif key in ['uncertaintyQuantification', 'timeSeriesCovFile']:
+                iDict[key] = value
+
+            elif key in ['polynomial', 'bootstrapCount']:
                 iDict[key] = int(value)
 
     key = 'mintpy.compute.maxMemory'
@@ -282,180 +266,93 @@ def run_or_skip(inps):
 
 
 ############################################################################
-def read_exclude_date(inps, dateListAll):
-    # Merge ex_date/startDate/endDate into ex_date
-    yy_list_all = ptime.yyyymmdd2years(dateListAll)
-    exDateList = []
-
-    # ex_date
-    exDateList += ptime.read_date_list(list(inps.excludeDate), date_list_all=dateListAll)
-    if exDateList:
-        print('exclude date:'+str(exDateList))
-
-    # startDate
-    if inps.startDate:
-        print('start date: '+inps.startDate)
-        yy_min = ptime.yyyymmdd2years(ptime.yyyymmdd(inps.startDate))
-        for i in range(len(dateListAll)):
-            date = dateListAll[i]
-            if yy_list_all[i] < yy_min and date not in exDateList:
-                print('  remove date: '+date)
-                exDateList.append(date)
-
-    # endDate
-    if inps.endDate:
-        print('end date: '+inps.endDate)
-        yy_max = ptime.yyyymmdd2years(ptime.yyyymmdd(inps.endDate))
-        for i in range(len(dateListAll)):
-            date = dateListAll[i]
-            if yy_list_all[i] > yy_max and date not in exDateList:
-                print('  remove date: '+date)
-                exDateList.append(date)
-    exDateList = sorted(list(set(exDateList)))
-    return exDateList
-
-
 def read_date_info(inps):
     """Read dates used in the estimation and its related info.
+
     Parameters: inps - Namespace
-    Returns:    inps - Namespace
+    Returns:    inps - Namespace, adding the following new fields:
+                       date_list - list of str, dates used for estimation
+                       dropDate  - 1D np.ndarray in bool in size of all available dates
     """
-    if inps.key == 'timeseries':
-        tsobj = timeseries(inps.timeseries_file)
-    elif inps.key == 'giantTimeseries':
-        tsobj = giantTimeseries(inps.timeseries_file)
-    elif inps.key == 'HDFEOS':
-        tsobj = HDFEOS(inps.timeseries_file)
-    tsobj.open()
-    inps.excludeDate = read_exclude_date(inps, tsobj.dateList)
+    if inps.file_type == 'timeseries':
+        ts_obj = timeseries(inps.timeseries_file)
+    elif inps.file_type == 'giantTimeseries':
+        ts_obj = giantTimeseries(inps.timeseries_file)
+    elif inps.file_type == 'HDFEOS':
+        ts_obj = HDFEOS(inps.timeseries_file)
+    else:
+        raise ValueError('Un-recognized time series ')
+    ts_obj.open()
 
-    # exclude dates without obs data [for offset time-series only for now]
+    # exclude dates - user inputs
+    ex_date_list = ptime.get_exclude_date_list(
+        date_list=ts_obj.dateList,
+        start_date=inps.startDate,
+        end_date=inps.endDate,
+        exclude_date=inps.excludeDate)
+
+    # exclude dates - no obs data [for offset time-series only for now]
     if os.path.basename(inps.timeseries_file).startswith('timeseriesRg'):
-        date_list = timeseries(inps.timeseries_file).get_date_list()
         data, atr = readfile.read(inps.timeseries_file)
         flag = np.nansum(data, axis=(1,2)) == 0
-        flag[date_list.index(atr['REF_DATE'])] = 0
+        flag[ts_obj.dateList.index(atr['REF_DATE'])] = 0
         if np.sum(flag) > 0:
             print('number of empty dates to exclude: {}'.format(np.sum(flag)))
-            inps.excludeDate += np.array(date_list)[flag].tolist()
-            inps.excludeDate = sorted(list(set(inps.excludeDate)))
+            ex_date_list += np.array(ts_obj.dateList)[flag].tolist()
+            ex_date_list = sorted(list(set(ex_date_list)))
+
+    # dates used for estimation - inps.date_list
+    inps.date_list = [i for i in ts_obj.dateList if i not in ex_date_list]
 
-    # Date used for estimation inps.dateList
-    inps.dateList = [i for i in tsobj.dateList if i not in inps.excludeDate]
-    inps.numDate = len(inps.dateList)
-    inps.startDate = inps.dateList[0]
-    inps.endDate = inps.dateList[-1]
+    # flag array for ts data reading
+    inps.dropDate = np.array([i not in ex_date_list for i in ts_obj.dateList], dtype=np.bool_)
+
+    # print out msg
     print('-'*50)
-    print('dates from input file: {}\n{}'.format(tsobj.numDate, tsobj.dateList))
+    print('dates from input file: {}\n{}'.format(ts_obj.numDate, ts_obj.dateList))
     print('-'*50)
-    if len(inps.dateList) == len(tsobj.dateList):
-        print('using all dates to calculate the velocity')
+    if len(inps.date_list) == len(ts_obj.dateList):
+        print('using all dates to calculate the time function')
     else:
-        print('dates used to estimate the velocity: {}\n{}'.format(inps.numDate, inps.dateList))
+        print(f'dates used to estimate the time function: {len(inps.date_list)}\n{inps.date_list}')
     print('-'*50)
 
-    # flag array for ts data reading
-    inps.dropDate = np.array([i not in inps.excludeDate for i in tsobj.dateList], dtype=np.bool_)
-
-    # output file name
-    if not inps.outfile:
-        fbase = os.path.splitext(os.path.basename(inps.timeseries_file))[0]
-        outname = 'velocity'
-        if inps.key == 'giantTimeseries':
-            prefix = os.path.basename(inps.timeseries_file).split('PARAMS')[0]
-            outname = prefix + outname
-        elif fbase in ['timeseriesRg', 'timeseriesAz']:
-            suffix = fbase.split('timeseries')[-1]
-            outname = outname + suffix
-        outname += '.h5'
-        inps.outfile = outname
-
     return inps
 
 
-def read_inps2model(inps, date_list=None):
-    """get model info from inps"""
-    # check model date limits
-    if not date_list:
-        date_list = inps.dateList
-    dmin, dmax = date_list[0], date_list[-1]
-    ymin = ptime.yyyymmdd2years(dmin)
-    ymax = ptime.yyyymmdd2years(dmax)
-
-    if inps.step:
-        for d_step in inps.step:
-            y_step = ptime.yyyymmdd2years(d_step)
-            if not (ymin < y_step < ymax):
-                raise ValueError(f'input step date "{d_step}" exceed date list min/max: {dmin}, {dmax}')
-
-    if inps.expDict:
-        for d_onset in inps.expDict.keys():
-            y_onset = ptime.yyyymmdd2years(d_onset)
-            if y_onset >= ymax:
-                raise ValueError(f'input exp onset date "{d_onset}" >= the last date: {dmax}')
-
-    if inps.logDict:
-        for d_onset in inps.logDict.keys():
-            y_onset = ptime.yyyymmdd2years(d_onset)
-            if y_onset >= ymax:
-                raise ValueError(f'input log onset date "{d_onset}" >= the last date: {dmax}')
-
-    model = dict()
-    model['polynomial'] = inps.polynomial
-    model['periodic']   = inps.periodic
-    model['step']       = inps.step
-    model['exp']        = inps.expDict
-    model['log']        = inps.logDict
-
-    # msg
-    print('estimate deformation model with the following assumed time functions:')
-    for key, value in model.items():
-        print('    {:<10} : {}'.format(key, value))
-
-    if 'polynomial' not in model.keys():
-        raise ValueError('linear/polynomial model is NOT included! Are you sure?!')
-
-    # number of parameters
-    num_param = (
-        model['polynomial'] + 1
-        + len(model['periodic']) * 2
-        + len(model['step'])
-        + sum([len(val) for key, val in model['exp'].items()])
-        + sum([len(val) for key, val in model['log'].items()])
-    )
-
-    return model, num_param
-
-
-############################################################################
 def run_timeseries2time_func(inps):
 
-    # basic info
+    # basic file info
     atr = readfile.read_attribute(inps.timeseries_file)
     length, width = int(atr['LENGTH']), int(atr['WIDTH'])
-    num_date = inps.numDate
-    dates = np.array(inps.dateList)
+
+    # read date info
+    inps = read_date_info(inps)
+    num_date = len(inps.date_list)
+    dates = np.array(inps.date_list)
     seconds = atr.get('CENTER_LINE_UTC', 0)
 
     # use the 1st date as reference if not found, e.g. timeseriesResidual.h5 file
     if "REF_DATE" not in atr.keys() and not inps.ref_date:
-        inps.ref_date = inps.dateList[0]
+        inps.ref_date = inps.date_list[0]
         print('WARNING: No REF_DATE found in time-series file or input in command line.')
-        print('  Set "--ref-date {}" and continue.'.format(inps.dateList[0]))
+        print('  Set "--ref-date {}" and continue.'.format(inps.date_list[0]))
 
-    # get deformation model from parsers
-    model, num_param = read_inps2model(inps)
+    # get deformation model from inputs
+    model = time_func.inps2model(inps, date_list=inps.date_list)
+    num_param = time_func.get_num_param(model)
 
 
     ## output preparation
 
     # time_func_param: attributes
+    date0, date1 = inps.date_list[0], inps.date_list[-1]
     atrV = dict(atr)
     atrV['FILE_TYPE'] = 'velocity'
     atrV['UNIT'] = 'm/year'
-    atrV['START_DATE'] = inps.dateList[0]
-    atrV['END_DATE'] = inps.dateList[-1]
-    atrV['DATE12'] = '{}_{}'.format(inps.dateList[0], inps.dateList[-1])
+    atrV['START_DATE'] = date0
+    atrV['END_DATE'] = date1
+    atrV['DATE12'] = f'{date0}_{date1}'
     if inps.ref_yx:
         atrV['REF_Y'] = inps.ref_yx[0]
         atrV['REF_X'] = inps.ref_yx[1]
@@ -482,19 +379,19 @@ def run_timeseries2time_func(inps):
             if key in atrR.keys():
                 atrR.pop(key)
         # prepare ds_name_dict manually, instead of using ref_file, to support --ex option
-        date_len = len(inps.dateList[0])
+        date_digit = len(inps.date_list[0])
         ds_name_dict = {
-            "date"       : [np.dtype(f'S{date_len}'), (num_date,), np.array(inps.dateList, dtype=np.string_)],
-            "timeseries" : [np.float32,               (num_date, length, width), None]
+            "date" : [np.dtype(f'S{date_digit}'), (num_date,), np.array(inps.date_list, np.string_)],
+            "timeseries" : [np.float32, (num_date, length, width), None]
         }
         writefile.layout_hdf5(inps.res_file, ds_name_dict=ds_name_dict, metadata=atrR)
-    
+
 
     ## estimation
 
     # calc number of box based on memory limit
     memoryAll = (num_date + num_param * 2 + 2) * length * width * 4
-    if inps.bootstrap:
+    if inps.uncertaintyQuantification == 'bootstrap':
         memoryAll += inps.bootstrapCount * num_param * length * width * 4
     num_box = int(np.ceil(memoryAll * 3 / (inps.maxMemory * 1024**3)))
     box_list = cluster.split_box2sub_boxes(box=(0, 0, width, length),
@@ -524,7 +421,7 @@ def run_timeseries2time_func(inps):
         # for file w/o reference info. e.g. ERA5.h5
         if inps.ref_date:
             print('referecing to date: {}'.format(inps.ref_date))
-            ref_ind = inps.dateList.index(inps.ref_date)
+            ref_ind = inps.date_list.index(inps.ref_date)
             ts_data -= np.tile(ts_data[ref_ind, :, :], (ts_data.shape[0], 1, 1))
 
         if inps.ref_yx:
@@ -534,26 +431,26 @@ def run_timeseries2time_func(inps):
             ts_data -= np.tile(ref_val.reshape(ts_data.shape[0], 1, 1),
                                (1, ts_data.shape[1], ts_data.shape[2]))
 
-        ts_data = ts_data[inps.dropDate, :, :].reshape(inps.numDate, -1)
+        ts_data = ts_data[inps.dropDate, :, :].reshape(num_date, -1)
         if atrV['UNIT'] == 'mm':
             ts_data *= 1./1000.
 
         ts_cov = None
-        if inps.ts_cov_file:
-            print(f'reading time-series covariance matrix from file {inps.ts_cov_file} ...')
-            ts_cov = readfile.read(inps.ts_cov_file, box=box)[0]
+        if inps.uncertaintyQuantification == 'covariance':
+            print(f'reading time-series covariance matrix from file {inps.timeSeriesCovFile} ...')
+            ts_cov = readfile.read(inps.timeSeriesCovFile, box=box)[0]
             if len(ts_cov.shape) == 4:
                 # full covariance matrix in 4D --> 3D
-                if inps.numDate < ts_cov.shape[0]:
+                if num_date < ts_cov.shape[0]:
                     ts_cov = ts_cov[inps.dropDate, :, :, :]
                     ts_cov = ts_cov[:, inps.dropDate, :, :]
-                ts_cov = ts_cov.reshape(inps.numDate, inps.numDate, -1)
+                ts_cov = ts_cov.reshape(num_date, num_date, -1)
 
             elif len(ts_cov.shape) == 3:
                 # diaginal variance matrix in 3D --> 2D
-                if inps.numDate < ts_cov.shape[0]:
+                if num_date < ts_cov.shape[0]:
                     ts_cov = ts_cov[inps.dropDate, :, :]
-                ts_cov = ts_cov.reshape(inps.numDate, -1)
+                ts_cov = ts_cov.reshape(num_date, -1)
 
             ## set zero value to a fixed small value to avoid divide by zero
             #epsilon = 1e-5
@@ -585,7 +482,7 @@ def run_timeseries2time_func(inps):
         ### estimation / solve Gm = d
         print('estimating time functions via linalg.lstsq ...')
 
-        if inps.bootstrap:
+        if inps.uncertaintyQuantification == 'bootstrap':
             ## option 1 - least squares with bootstrapping
             # Bootstrapping is a resampling method which can be used to estimate properties
             # of an estimator. The method relies on independently sampling the data set with
@@ -599,7 +496,7 @@ def run_timeseries2time_func(inps):
             prog_bar = ptime.progressBar(maxValue=inps.bootstrapCount)
             for i in range(inps.bootstrapCount):
                 # bootstrap resampling
-                boot_ind = rng.choice(inps.numDate, size=inps.numDate, replace=True)
+                boot_ind = rng.choice(num_date, size=num_date, replace=True)
                 boot_ind.sort()
 
                 # estimation
@@ -619,43 +516,46 @@ def run_timeseries2time_func(inps):
             del m_boot
 
             # get design matrix to calculate the residual time series
-            G = time_func.get_design_matrix4time_func(inps.dateList, model=model, ref_date=inps.ref_date, seconds=seconds)
+            G = time_func.get_design_matrix4time_func(inps.date_list, model=model, ref_date=inps.ref_date, seconds=seconds)
 
 
         else:
             ## option 2 - least squares with uncertainty propagation
             G, m[:, mask], e2 = time_func.estimate_time_func(
                 model=model,
-                date_list=inps.dateList,
+                date_list=inps.date_list,
                 dis_ts=ts_data,
                 seconds=seconds)
             #del ts_data
 
             ## Compute the covariance matrix for model parameters:
-            #       G * m = d
-            #     C_m_hat = G+ * C_d * G+.T
+            #       G * m = d                                       (1)
+            #       m_hat = G+ * d                                  (2)
+            #     C_m_hat = G+ * C_d * G+.T                         (3)
             #
-            # For ordinary least squares estimation:
-            #     G+ = (G.T * G)^-1 * G.T                       (option 2.1)
+            # [option 2.1] For weighted least squares estimation:
+            #          G+ = (G.T * C_d^-1 * G)^-1 * G.T * C_d^-1    (4)
+            # =>  C_m_hat = (G.T * C_d^-1 * G)^-1                   (5)
             #
-            # For weighted least squares estimation:
-            #          G+ = (G.T * C_d^-1 * G)^-1 * G.T * C_d^-1
-            # =>  C_m_hat = (G.T * C_d^-1 * G)^-1               (option 2.2)
+            # [option 2.2] For ordinary least squares estimation:
+            #          G+ = (G.T * G)^-1 * G.T                      (6)
+            #     C_m_hat = G+ * C_d * G+.T                         (7)
             #
-            # Assuming normality of the observation errors (in the time domain) with a variance of sigma^2
-            # we have C_d = sigma^2 * I, then the above equation is simplfied into:
-            #     C_m_hat = sigma^2 * (G.T * G)^-1              (option 2.3)
+            # [option 2.3] Assuming normality of the observation errors (in the time domain) with
+            # the variance of sigma^2, we have C_d = sigma^2 * I, then eq. (3) is simplfied into:
+            #     C_m_hat = sigma^2 * (G.T * G)^-1                  (8)
             #
-            # Based on the law of integrated expectation, we estimate the obs sigma^2 using
+            # Using the law of integrated expectation, we estimate the obs sigma^2 using
             # the OLS estimation residual as:
-            #           e_hat = d - d_hat
-            # =>  sigma_hat^2 = (e_hat.T * e_hat) / N
-            # =>      sigma^2 = sigma_hat^2 * N / (N - P)       (option 2.4)
-            #                 = (e_hat.T * e_hat) / (N - P)
-            # which is the equation (10) from Fattahi and Amelung (2015, JGR)
+            #           e_hat = d - d_hat                           (9)
+            # =>  sigma_hat^2 = (e_hat.T * e_hat) / N               (10)
+            # =>      sigma^2 = sigma_hat^2 * N / (N - P)           (11)
+            #                 = (e_hat.T * e_hat) / (N - P)         (12)
+            # 
+            # Eq. (10) in Fattahi & Amelung (2015, JGR) is a simplified form of eq. (12) for linear velocity.
 
-            if ts_cov is not None:
-                # option 2.1 - linear propagation from time-series (co)variance matrix
+            if inps.uncertaintyQuantification == 'covariance':
+                # option 2.2 - linear propagation from time-series (co)variance matrix
                 # TO DO: save the full covariance matrix of the time function parameters
                 # only the STD is saved right now
                 covar_flag = True if len(ts_cov.shape) == 3 else False
@@ -681,15 +581,15 @@ def run_timeseries2time_func(inps):
                     prog_bar.update(i+1, every=200, suffix='{}/{} pixels'.format(i+1, num_pixel2inv))
                 prog_bar.close()
 
-            else:
+            elif inps.uncertaintyQuantification == 'residue':
                 # option 2.3 - assume obs errors following normal dist. in time
                 print('estimating time function STD from time-series fitting residual ...')
                 G_inv = linalg.inv(np.dot(G.T, G))
                 m_var = e2.reshape(1, -1) / (num_date - num_param)
                 m_std[:, mask] = np.sqrt(np.dot(np.diag(G_inv).reshape(-1, 1), m_var))
 
-                # option 2.4 - simplified form for linear velocity (without matrix linear algebra)
-                # The STD can also be calculated using Eq. (10) from Fattahi and Amelung (2015, JGR)
+                # simplified form for linear velocity (without matrix linear algebra)
+                # equation (10) in Fattahi & Amelung (2015, JGR)
                 # ts_diff = ts_data - np.dot(G, m)
                 # t_diff = G[:, 1] - np.mean(G[:, 1])
                 # vel_std = np.sqrt(np.sum(ts_diff ** 2, axis=0) / np.sum(t_diff ** 2)  / (num_date - 2))
@@ -738,7 +638,7 @@ def model2hdf5_dataset(model, m=None, m_
     # deformation model info
     poly_deg   = model['polynomial']
     num_period = len(model['periodic'])
-    num_step   = len(model['step'])
+    num_step   = len(model['stepDate'])
     num_exp    = sum([len(val) for key, val in model['exp'].items()])
 
     # init output
@@ -793,7 +693,7 @@ def model2hdf5_dataset(model, m=None, m_
         elif period == 0.5:
             dsNames = [f'semiAnnual{x}' for x in dsNameSuffixes]
         else:
-            dsNames = [f'periodY{period}{x}' for x in dsNameSuffixes]
+            dsNames = [f'period{period}Y{x}' for x in dsNameSuffixes]
 
         # calculate the amplitude and phase of the periodic signal
         # following equation (9-10) in Minchew et al. (2017, JGR)
@@ -804,7 +704,8 @@ def model2hdf5_dataset(model, m=None, m_
             period_pha = np.zeros(num_pixel, dtype=dataType)
             # avoid divided by zero warning
             if not np.all(coef_sin[mask] == 0):
-                period_pha[mask] = np.arctan(coef_cos[mask] / coef_sin[mask])
+                # use atan2, instead of atan, to get phase within [-pi, pi]
+                period_pha[mask] = np.arctan2(coef_cos[mask], coef_sin[mask])
 
             # assign ds_dict
             for dsName, data in zip(dsNames, [period_amp, period_pha]):
@@ -820,7 +721,7 @@ def model2hdf5_dataset(model, m=None, m_
     p0 = (poly_deg + 1) + (2 * num_period)
     for i in range(num_step):
         # dataset name
-        dsName = 'step{}'.format(model['step'][i])
+        dsName = 'step{}'.format(model['stepDate'][i])
 
         # assign ds_dict
         if m is not None:
@@ -839,7 +740,7 @@ def model2hdf5_dataset(model, m=None, m_
     for exp_onset in model['exp'].keys():
         for exp_tau in model['exp'][exp_onset]:
             # dataset name
-            dsName = 'exp{}Tau{}'.format(exp_onset, exp_tau)
+            dsName = 'exp{}Tau{}D'.format(exp_onset, exp_tau)
 
             # assign ds_dict
             if m is not None:
@@ -861,7 +762,7 @@ def model2hdf5_dataset(model, m=None, m_
     for log_onset in model['log'].keys():
         for log_tau in model['log'][log_onset]:
             # dataset name
-            dsName = 'log{}Tau{}'.format(log_onset, log_tau)
+            dsName = 'log{}Tau{}D'.format(log_onset, log_tau)
 
             # assign ds_dict
             if m is not None:
@@ -885,19 +786,17 @@ def main(iargs=None):
     inps = cmd_line_parse(iargs)
     start_time = time.time()
 
-    inps = read_date_info(inps)
-
     # --update option
     if inps.update_mode and run_or_skip(inps) == 'skip':
         return inps.outfile
 
+    # run
     run_timeseries2time_func(inps)
 
     # time info
     m, s = divmod(time.time()-start_time, 60)
     print('time used: {:02.0f} mins {:02.1f} secs.'.format(m, s))
-
-    return inps.outfile
+    return
 
 
 ############################################################################
diff -pruN 1.3.3-2/mintpy/timeseries_rms.py 1.4.0-1/mintpy/timeseries_rms.py
--- 1.3.3-2/mintpy/timeseries_rms.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/timeseries_rms.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,34 +8,39 @@
 
 import os
 import sys
-import argparse
 import numpy as np
 import matplotlib.pyplot as plt
 from mpl_toolkits.axes_grid1 import make_axes_locatable
 from mintpy.defaults.template import get_template_content
 from mintpy.utils import readfile, ptime, utils as ut, plot as pp
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ######################################################################################################
 TEMPLATE = get_template_content('residual_RMS')
 
+REFERENCE="""reference:
+  Yunjun, Z., Fattahi, H. and Amelung, F. (2019), Small baseline InSAR time series analysis:
+    Unwrapping error correction and noise reduction, Computers & Geosciences, 133, 104331,
+    doi:10.1016/j.cageo.2019.104331.
+  Rousseeuw, P. J., and M. Hubert (2011), Robust statistics for outlier detection,
+    Wiley Interdisciplinary Reviews: Data Mining and Knowledge Discovery, 1(1),
+    73-79, doi:doi:10.1002/widm.2.
+"""
+
 EXAMPLE = """example:
   timeseries_rms.py  timeseriesResidual.h5
   timeseries_rms.py  timeseriesResidual.h5  --template smallbaselineApp.cfg
   timeseries_rms.py  timeseriesResidual.h5  -m maskTempCoh.h5  --cutoff 3
 """
 
-REFERENCE="""reference:
-Rousseeuw, P. J., and M. Hubert (2011), Robust statistics for outlier detection,
-    Wiley Interdisciplinary Reviews: Data Mining and Knowledge Discovery, 1(1),
-    73-79, doi:doi:10.1002/widm.2.
-"""
-
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Calculate Root Mean Square (RMS) of deramped residual phase time-series.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=TEMPLATE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Calculate Root Mean Square (RMS) of deramped residual phase time-series.'
+    epilog = TEMPLATE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('timeseries_file', help='Timeseries file')
     parser.add_argument('-t', '--template', dest='template_file',
diff -pruN 1.3.3-2/mintpy/tropo_gacos.py 1.4.0-1/mintpy/tropo_gacos.py
--- 1.3.3-2/mintpy/tropo_gacos.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/tropo_gacos.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,7 +9,6 @@
 import os
 import sys
 import re
-import argparse
 import h5py
 import numpy as np
 from skimage.transform import resize
@@ -17,7 +16,7 @@ from scipy.interpolate import RegularGri
 
 from mintpy.objects import timeseries
 from mintpy.utils import ptime, readfile, writefile, utils as ut
-
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 ############################################################################
@@ -46,10 +45,12 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Tropospheric correction using GACOS (http://www.gacos.net) delays\n',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=REFERENCE+'\n'+DIR_DEMO+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Tropospheric correction using GACOS (http://www.gacos.net) delays'
+    epilog = REFERENCE + '\n' + DIR_DEMO + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('-f', '--file', dest='dis_file', required=True,
                         help='timeseries HDF5 file, i.e. timeseries.h5')
@@ -358,22 +359,25 @@ def main(iargs=None):
     inps = cmd_line_parse(iargs)
 
     # calculate tropo delay and savee to h5 file
-    calculate_delay_timeseries(tropo_file=inps.tropo_file,
-                               dis_file=inps.dis_file,
-                               geom_file=inps.geom_file,
-                               GACOS_dir=inps.GACOS_dir)
+    calculate_delay_timeseries(
+        tropo_file=inps.tropo_file,
+        dis_file=inps.dis_file,
+        geom_file=inps.geom_file,
+        GACOS_dir=inps.GACOS_dir)
 
     # correct tropo delay from dis time-series
     ftype = readfile.read_attribute(inps.dis_file)['FILE_TYPE']
     if ftype == 'timeseries':
-        correct_timeseries(dis_file=inps.dis_file,
-                           tropo_file=inps.tropo_file,
-                           cor_dis_file=inps.cor_dis_file)
+        correct_timeseries(
+            dis_file=inps.dis_file,
+            tropo_file=inps.tropo_file,
+            cor_dis_file=inps.cor_dis_file)
 
     elif ftype == '.unw':
-        correct_single_ifgram(dis_file=inps.dis_file,
-                              tropo_file=inps.tropo_file,
-                              cor_dis_file=inps.cor_dis_file)
+        correct_single_ifgram(
+            dis_file=inps.dis_file,
+            tropo_file=inps.tropo_file,
+            cor_dis_file=inps.cor_dis_file)
     else:
         print('input file {} is not timeseries nor .unw, correction is not supported yet.'.format(ftype))
 
diff -pruN 1.3.3-2/mintpy/tropo_phase_elevation.py 1.4.0-1/mintpy/tropo_phase_elevation.py
--- 1.3.3-2/mintpy/tropo_phase_elevation.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/tropo_phase_elevation.py	2022-08-04 20:01:49.000000000 +0000
@@ -10,29 +10,32 @@ import os
 import sys
 import argparse
 import numpy as np
+
 from mintpy.objects import timeseries
 from mintpy.utils import readfile, writefile
+from mintpy.utils.arg_utils import create_argument_parser
 from mintpy.multilook import multilook_data
 from mintpy.mask import mask_matrix
 
 
 ############################################################################
-EXAMPLE = """example:
-  tropo_phase_elevation.py  timeseries_demErr.h5      -g inputs/geometryRadar.h5  -m maskTempCoh.h5    
-  tropo_phase_elevation.py  geo_timeseries_demErr.h5  -g geo_geometryRadar.h5     -m geo_maskTempCoh.h5
-"""
-
 REFERENCE = """reference:
   Doin, M. P., C. Lasserre, G. Peltzer, O. Cavalie, and C. Doubre (2009), Corrections of stratified 
   tropospheric delays in SAR interferometry: Validation with global atmospheric models, J App. Geophy.,
   69(1), 35-50, doi:http://dx.doi.org/10.1016/j.jappgeo.2009.03.010.
 """
 
+EXAMPLE = """example:
+  tropo_phase_elevation.py  timeseries_demErr.h5      -g inputs/geometryRadar.h5  -m maskTempCoh.h5    
+  tropo_phase_elevation.py  geo_timeseries_demErr.h5  -g geo_geometryRadar.h5     -m geo_maskTempCoh.h5
+"""
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Correct Topo-correlated Stratified tropospheric delay',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=REFERENCE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Correct Topo-correlated Stratified tropospheric delay'
+    epilog = REFERENCE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('timeseries_file', help='time-series file to be corrected')
     parser.add_argument('-g', '--geometry', dest='geom_file', required=True,
@@ -220,12 +223,13 @@ def main(iargs=None):
     ts_data[mask] = 0.
 
     # write time-series file
-    metadata = dict(obj.metadata)
-    metadata['mintpy.troposphericDelay.polyOrder'] = str(inps.poly_order)
+    meta = dict(obj.metadata)
+    meta['mintpy.troposphericDelay.polyOrder'] = str(inps.poly_order)
     if not inps.outfile:
         inps.outfile = '{}_tropHgt.h5'.format(os.path.splitext(inps.timeseries_file)[0])
-    writefile.write(ts_data, out_file=inps.outfile, metadata=metadata, ref_file=inps.timeseries_file)
-    return inps.outfile
+    writefile.write(ts_data, out_file=inps.outfile, metadata=meta, ref_file=inps.timeseries_file)
+
+    return
 
 
 ############################################################################
diff -pruN 1.3.3-2/mintpy/tropo_pyaps3.py 1.4.0-1/mintpy/tropo_pyaps3.py
--- 1.3.3-2/mintpy/tropo_pyaps3.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/tropo_pyaps3.py	2022-08-04 20:01:49.000000000 +0000
@@ -10,11 +10,12 @@ import os
 import sys
 import re
 from configparser import ConfigParser
-import argparse
 import h5py
 import numpy as np
+
 from mintpy.objects import timeseries, geometry
 from mintpy.utils import ptime, readfile, writefile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 
 try:
     import pyaps3 as pa
@@ -30,6 +31,21 @@ WEATHER_MODEL_HOURS = {
 
 
 ###############################################################
+REFERENCE = """reference:
+  Jolivet, R., R. Grandin, C. Lasserre, M.-P. Doin and G. Peltzer (2011), Systematic InSAR tropospheric
+  phase delay corrections from global meteorological reanalysis data, Geophys. Res. Lett., 38, L17311,
+  doi:10.1029/2011GL048757
+
+  Jolivet, R., P. S. Agram, N. Y. Lin, M. Simons, M. P. Doin, G. Peltzer, and Z. Li (2014), Improving
+  InSAR geodesy using global atmospheric models, Journal of Geophysical Research: Solid Earth, 119(3),
+  2324-2341, doi:10.1002/2013JB010588.
+
+  # ERA5
+  Hersbach, H., Bell, B., Berrisford, P., Hirahara, S., Horányi, A., Muñoz-Sabater, J., et al. (2020). 
+  The ERA5 global reanalysis. Quarterly Journal of the Royal Meteorological Society, 146(730), 1999–2049.
+  https://doi.org/10.1002/qj.3803
+"""
+
 EXAMPLE = """example:
   # download datasets, calculate tropospheric delays and correct time-series file.
   tropo_pyaps3.py -f timeseries.h5 -g inputs/geometryRadar.h5
@@ -52,21 +68,6 @@ SAFE_FILE = """SAFE_files.txt:
     ...
 """
 
-REFERENCE = """reference:
-  Jolivet, R., R. Grandin, C. Lasserre, M.-P. Doin and G. Peltzer (2011), Systematic InSAR tropospheric
-  phase delay corrections from global meteorological reanalysis data, Geophys. Res. Lett., 38, L17311,
-  doi:10.1029/2011GL048757
-
-  Jolivet, R., P. S. Agram, N. Y. Lin, M. Simons, M. P. Doin, G. Peltzer, and Z. Li (2014), Improving
-  InSAR geodesy using global atmospheric models, Journal of Geophysical Research: Solid Earth, 119(3),
-  2324-2341, doi:10.1002/2013JB010588.
-
-  # ERA5
-  Hersbach, H., Bell, B., Berrisford, P., Hirahara, S., Horányi, A., Muñoz-Sabater, J., et al. (2020). 
-  The ERA5 global reanalysis. Quarterly Journal of the Royal Meteorological Society, 146(730), 1999–2049.
-  https://doi.org/10.1002/qj.3803
-"""
-
 DATA_INFO = """Global Atmospheric Models:
   re-analysis_dataset      coverage  temp_resolution  spatial_resolution       latency       assimilation
   --------------------------------------------------------------------------------------------------------
@@ -94,11 +95,12 @@ atmosphere/
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Tropospheric correction using weather models\n' +
-                                     '  PyAPS is used to download and calculate the delay for each acquisition.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=REFERENCE+'\n'+DATA_INFO+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Tropospheric correction using weather models via PyAPS'
+    epilog = REFERENCE + '\n' + DATA_INFO + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('-f', '--file', dest='dis_file',
                         help='timeseries HDF5 file, i.e. timeseries.h5')
@@ -556,7 +558,7 @@ def check_pyaps_account_config(tropo_mod
         for opt in SECTION_OPTS[section]:
             val = cfg.get(section, opt)
             if not val or val in default_values:
-                raise ValueError('PYAPS: No account info found for {} in {} section in file: {}'.format(tropo_model, section, cfg_file))
+                raise ValueError(f'PYAPS: No account info found for {tropo_model} in {section} section in file: {cfg_file}')
 
     return
 
@@ -837,9 +839,10 @@ def main(iargs=None):
     get_grib_info(inps)
 
     # download
-    inps.grib_files = dload_grib_files(inps.grib_files, 
-                                       tropo_model=inps.tropo_model,
-                                       snwe=inps.snwe)
+    inps.grib_files = dload_grib_files(
+        inps.grib_files, 
+        tropo_model=inps.tropo_model,
+        snwe=inps.snwe)
 
     # calculate tropo delay and save to h5 file
     if inps.geom_file:
@@ -852,21 +855,22 @@ def main(iargs=None):
     if inps.dis_file:
         ftype = inps.atr['FILE_TYPE']
         if ftype == 'timeseries':
-            correct_timeseries(dis_file=inps.dis_file,
-                               tropo_file=inps.tropo_file,
-                               cor_dis_file=inps.cor_dis_file)
+            correct_timeseries(
+                dis_file=inps.dis_file,
+                tropo_file=inps.tropo_file,
+                cor_dis_file=inps.cor_dis_file)
 
         elif ftype == '.unw':
-            correct_single_ifgram(dis_file=inps.dis_file,
-                                  tropo_file=inps.tropo_file,
-                                  cor_dis_file=inps.cor_dis_file)
+            correct_single_ifgram(
+                dis_file=inps.dis_file,
+                tropo_file=inps.tropo_file,
+                cor_dis_file=inps.cor_dis_file)
         else:
             print('input file {} is not timeseries nor .unw, correction is not supported yet.'.format(ftype))
 
     else:
         print('No input displacement file, skip correcting tropospheric delays.')
 
-    
     return
 
 ###############################################################
diff -pruN 1.3.3-2/mintpy/tropo_pyaps.py 1.4.0-1/mintpy/tropo_pyaps.py
--- 1.3.3-2/mintpy/tropo_pyaps.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/tropo_pyaps.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,522 +0,0 @@
-#!/usr/bin/env python
-############################################################
-# Program is part of MintPy                                #
-# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
-# Author: Heresh Fattahi, Zhang Yunjun, 2015               #
-############################################################
-
-
-import os
-import re
-import sys
-import subprocess
-try:
-    import pyaps as pa
-except ImportError:
-    raise ImportError('Cannot import pyaps!')
-
-import argparse
-import numpy as np
-from mintpy.objects import timeseries, geometry
-from mintpy.utils import readfile, writefile, ptime, utils as ut
-
-standardWeatherModelNames = {
-    'ERAI': 'ECMWF', 'ERAINT': 'ECMWF', 'ERAINTERIM': 'ECMWF',
-    'MERRA2': 'MERRA',
-}
-
-
-###############################################################
-EXAMPLE = """example:
-  # download reanalysys dataset, calculate tropospheric delays and correct time-series file.
-  tropo_pyaps.py -f timeseries.h5 -m ECMWF -g inputs/geometryRadar.h5 -w ${WEATHER_DIR}
-
-  # download reanalysys dataset, calculate tropospheric delays
-  tropo_pyaps.py -d date_list.txt     --hour 12 -m ECMWF -g inputs/geometryRadar.h5 --ref-yx 30 40
-  tropo_pyaps.py -d 20151002 20151003 --hour 12 -m MERRA -g inputs/geometryRadar.h5 --ref-yx 30 40
-
-  # download reanalysys dataset
-  tropo_pyaps.py -d date_list.txt     --hour 12 -m ECMWF
-"""
-
-REFERENCE = """reference:
-  Jolivet, R., R. Grandin, C. Lasserre, M.-P. Doin and G. Peltzer (2011), Systematic InSAR tropospheric
-  phase delay corrections from global meteorological reanalysis data, Geophys. Res. Lett., 38, L17311,
-  doi:10.1029/2011GL048757
-
-  Jolivet, R., P. S. Agram, N. Y. Lin, M. Simons, M. P. Doin, G. Peltzer, and Z. Li (2014), Improving
-  InSAR geodesy using global atmospheric models, Journal of Geophysical Research: Solid Earth, 119(3),
-  2324-2341.
-"""
-
-DATA_INFO = """
-  re-analysis_dataset        coverage   temporal_resolution    spatial_resolution      latency     analysis
-------------------------------------------------------------------------------------------------------------
-ERA-Interim (by ECMWF)        Global      00/06/12/18 UTC      0.75 deg (~83 km)       2-month      4D-var
-MERRA(2) (by NASA Goddard)    Global      00/06/12/18 UTC      0.5*0.625 (~50 km)     2-3 weeks     3D-var
-
-To download MERRA2, you need an Earthdata account, and pre-authorize the "NASA GESDISC DATA ARCHIVE" application
-    following https://disc.gsfc.nasa.gov/earthdata-login.
-"""
-
-WEATHER_DIR_DEMO = """--weather-dir ~/data/aux
-WEATHER/
-    /ECMWF
-        ERA-Int_20030329_06.grb
-        ERA-Int_20030503_06.grb
-    /MERRA
-        merra-20110126-06.nc4
-        merra-20110313-06.nc4
-"""
-
-
-def create_parser():
-    parser = argparse.ArgumentParser(description='Tropospheric correction using weather models\n' +
-                                     '  PyAPS is used to download and calculate the delay for each time-series epoch.',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=REFERENCE+'\n'+DATA_INFO+'\n'+EXAMPLE)
-    # For data download
-    parser.add_argument('-m', '--model', '-s', dest='trop_model', default='ECMWF',
-                        choices={'ECMWF', 'MERRA', 'NARR', 'ERA', 'MERRA1'},
-                        help='source of the atmospheric data.\nNARR is working for 1979-Jan to 2014-Oct.')
-    parser.add_argument('-d', '--date-list', dest='date_list', nargs='*',
-                        help='Read the first column of text file as list of date to download data\n' +
-                             'in YYYYMMDD or YYMMDD format')
-    parser.add_argument('--hour', help='time of data in HH, e.g. 12, 06')
-    parser.add_argument('-w', '--dir', '--weather-dir', dest='weather_dir', default='${WEATHER_DIR}',
-                        help='parent directory of downloaded weather data file. Default: ${WEATHER_DIR}\n' +
-                             'e.g.: '+WEATHER_DIR_DEMO)
-
-    # For delay calculation
-    parser.add_argument('-g','--geomtry', dest='geom_file', type=str,
-                        help='geometry file including height, incidenceAngle and/or latitude and longitude')
-    parser.add_argument('--ref-yx', dest='ref_yx', type=int,
-                        nargs=2, help='reference pixel in y/x')
-    parser.add_argument('--delay', dest='delay_type', default='comb', choices={'comb', 'dry', 'wet'},
-                        help='Delay type to calculate, comb contains both wet and dry delays')
-
-    # For delay correction
-    parser.add_argument('-f', '--file', dest='timeseries_file',
-                        help='timeseries HDF5 file, i.e. timeseries.h5')
-    parser.add_argument('-o', dest='outfile',
-                        help='Output file name for trospheric corrected timeseries.')
-    return parser
-
-
-def cmd_line_parse(iargs=None):
-    """Command line parser."""
-    parser = create_parser()
-    inps = parser.parse_args(args=iargs)
-
-    # check the input requirements
-    key_list = ['date_list', 'hour']
-    # with timeseries file
-    if inps.timeseries_file:
-        for key in key_list+['ref_yx']:
-            if vars(inps)[key]:
-                print(('input "{:<10}" is ignored because it will be extracted from '
-                       'timeseries file {}').format(key, inps.timeseries_file))
-
-    # without timeseries file
-    elif any(not vars(inps)[key] for key in key_list):
-        msg = 'No input timeseries file, all the following options are required: \n{}'.format(key_list)
-        msg += '\n\n'+EXAMPLE
-        raise ValueError(msg)
-
-
-    ## default values
-    # Get Grib Source
-    inps.trop_model = standardize_trop_model(inps.trop_model, standardWeatherModelNames)
-    print('weather model: '+inps.trop_model)
-
-    # weather_dir
-    inps.weather_dir = os.path.expanduser(inps.weather_dir)
-    inps.weather_dir = os.path.expandvars(inps.weather_dir)
-    # Fallback value if WEATHER_DIR is not defined as environment variable
-    if inps.weather_dir == '${WEATHER_DIR}':
-        inps.weather_dir = './'
-    print('weather data directory: '+inps.weather_dir)
-
-    return inps
-
-
-###############################################################
-def check_inputs(inps):
-    parser = create_parser()
-
-    # output directories/files
-    atr = dict()
-    mintpy_dir = None
-    if inps.timeseries_file:
-        atr = readfile.read_attribute(inps.timeseries_file)
-        mintpy_dir = os.path.dirname(inps.timeseries_file)
-        if not inps.outfile:
-            fbase = os.path.splitext(inps.timeseries_file)[0]
-            inps.outfile = '{}_{}.h5'.format(fbase, inps.trop_model)
-    elif inps.geom_file:
-        atr = readfile.read_attribute(inps.geom_file)
-        mintpy_dir = os.path.join(os.path.dirname(inps.geom_file), '..')
-    else:
-        mintpy_dir = os.path.abspath(os.getcwd())
-
-    # trop_file
-    inps.trop_file = os.path.join(mintpy_dir, 'inputs/{}.h5'.format(inps.trop_model))
-    print('output tropospheric delay file: {}'.format(inps.trop_file))
-
-    # hour
-    if not inps.hour:
-        if 'CENTER_LINE_UTC' in atr.keys():
-            inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'], inps.trop_model)
-        else:
-            parser.print_usage()
-            raise Exception('no input for hour')
-    print('time of cloest available product: {}:00 UTC'.format(inps.hour))
-
-    # date list
-    if inps.timeseries_file:
-        print('read date list from timeseries file: {}'.format(inps.timeseries_file))
-        ts_obj = timeseries(inps.timeseries_file)
-        ts_obj.open(print_msg=False)
-        inps.date_list = ts_obj.dateList
-    elif len(inps.date_list) == 1:
-        if os.path.isfile(inps.date_list[0]):
-            print('read date list from text file: {}'.format(inps.date_list[0]))
-            inps.date_list = ptime.yyyymmdd(np.loadtxt(inps.date_list[0],
-                                                       dtype=bytes,
-                                                       usecols=(0,)).astype(str).tolist())
-        else:
-            parser.print_usage()
-            raise Exception('ERROR: input date list < 2')
-
-    # Grib data directory
-    inps.grib_dir = os.path.join(inps.weather_dir, inps.trop_model)
-    if not os.path.isdir(inps.grib_dir):
-        os.makedirs(inps.grib_dir)
-        print('making directory: '+inps.grib_dir)
-
-    # Date list to grib file list
-    inps.grib_file_list = date_list2grib_file(inps.date_list,
-                                              inps.hour,
-                                              inps.trop_model,
-                                              inps.grib_dir)
-
-    if 'REF_Y' in atr.keys():
-        inps.ref_yx = [int(atr['REF_Y']), int(atr['REF_X'])]
-        print('reference pixel: {}'.format(inps.ref_yx))
-
-    # Coordinate system: geocoded or not
-    inps.geocoded = False
-    if 'Y_FIRST' in atr.keys():
-        inps.geocoded = True
-    print('geocoded: {}'.format(inps.geocoded))
-
-    # Prepare DEM, inc_angle, lat/lon file for PyAPS to read
-    if inps.geom_file:
-        geom_obj = geometry(inps.geom_file)
-        geom_obj.open()
-
-        print('converting DEM/incAngle for PyAPS to read')
-        # DEM
-        dem = readfile.read(inps.geom_file, datasetName='height', print_msg=False)[0]
-        inps.dem_file = 'pyapsDem.hgt'
-        writefile.write(dem, inps.dem_file, metadata=atr)
-
-        # inc_angle
-        if 'incidenceAngle' in geom_obj.datasetNames:
-            inps.inc_angle = readfile.read(inps.geom_file, datasetName='incidenceAngle', print_msg=False)[0]
-        else:
-            atr = readfile.read_attribute(inps.timeseries_file)
-            inps.inc_angle = ut.incidence_angle(atr, dem=dem, dimension=0)
-            inps.inc_angle = np.ones(dem.shape, dtype=np.float32) * inps.inc_angle
-        inps.inc_angle_file = 'pyapsIncAngle.flt'
-        writefile.write(inps.inc_angle, inps.inc_angle_file, metadata=atr)
-
-        # latitude
-        if 'latitude' in geom_obj.datasetNames:
-            data = readfile.read(inps.geom_file, datasetName='latitude', print_msg=False)[0]
-            print('converting lat for PyAPS to read')
-            inps.lat_file = 'pyapsLat.flt'
-            writefile.write(data, inps.lat_file, metadata=atr)
-        else:
-            inps.lat_file = None
-
-        # longitude
-        if 'longitude' in geom_obj.datasetNames:
-            data = readfile.read(inps.geom_file, datasetName='longitude', print_msg=False)[0]
-            print('converting lon for PyAPS to read')
-            inps.lon_file = 'pyapsLon.flt'
-            writefile.write(data, inps.lon_file, metadata=atr)
-        else:
-            inps.lon_file = None
-    return inps, atr
-
-
-###############################################################
-def closest_weather_product_time(sar_acquisition_time, grib_source='ECMWF'):
-    """Find closest available time of weather product from SAR acquisition time
-    Inputs:
-        sar_acquisition_time - string, SAR data acquisition time in seconds
-        grib_source - string, Grib Source of weather reanalysis product
-    Output:
-        grib_hr - string, time of closest available weather product
-    Example:
-        '06' = closest_weather_product_time(atr['CENTER_LINE_UTC'])
-        '12' = closest_weather_product_time(atr['CENTER_LINE_UTC'], 'NARR')
-    """
-    # Get hour/min of SAR acquisition time
-    sar_time = float(sar_acquisition_time)
-
-    # Find closest time in available weather products
-    grib_hr_list = [0, 6, 12, 18]
-    grib_hr = int(min(grib_hr_list, key=lambda x: abs(x-sar_time/3600.)))
-
-    # Adjust time output format
-    grib_hr = "%02d" % grib_hr
-    return grib_hr
-
-
-def standardize_trop_model(tropModel, standardWeatherModelNames):
-    tropModel = tropModel.replace('-', '').upper()
-    if tropModel in standardWeatherModelNames.keys():
-        tropModel = standardWeatherModelNames[tropModel]
-    return tropModel
-
-
-def date_list2grib_file(date_list, hour, trop_model, grib_dir):
-    grib_file_list = []
-    for d in date_list:
-        grib_file = grib_dir+'/'
-        if   trop_model == 'ECMWF' :  grib_file += 'ERA-Int_%s_%s.grb' % (d, hour)
-        elif trop_model == 'MERRA' :  grib_file += 'merra-%s-%s.nc4' % (d, hour)
-        elif trop_model == 'NARR'  :  grib_file += 'narr-a_221_%s_%s00_000.grb' % (d, hour)
-        elif trop_model == 'ERA'   :  grib_file += 'ERA_%s_%s.grb' % (d, hour)
-        elif trop_model == 'MERRA1':  grib_file += 'merra-%s-%s.hdf' % (d, hour)
-        grib_file_list.append(grib_file)
-    return grib_file_list
-
-
-def grib_file_name2trop_model_name(grib_file):
-    grib_file = os.path.basename(grib_file)
-    if grib_file.startswith('ERA-Int'):  trop_model = 'ECMWF'
-    elif grib_file.startswith('merra'):  trop_model = 'MERRA'
-    elif grib_file.startswith('narr'):   trop_model = 'NARR'
-    elif grib_file.startswith('ERA_'):   trop_model = 'ERA'
-    return trop_model
-
-
-def check_exist_grib_file(gfile_list, print_msg=True):
-    """Check input list of grib files, and return the existing ones with right size."""
-    gfile_exist = ut.get_file_list(gfile_list)
-    if gfile_exist:
-        file_sizes = [os.path.getsize(i) for i in gfile_exist
-                      if os.path.getsize(i) > 10e6]
-        if file_sizes:
-            comm_size = ut.most_common([i for i in file_sizes])
-            if print_msg:
-                print('common file size: {} bytes'.format(comm_size))
-                print('number of grib files existed    : {}'.format(len(gfile_exist)))
-
-            gfile_corrupt = []
-            for gfile in gfile_exist:
-                if os.path.getsize(gfile) < comm_size * 0.9:
-                    gfile_corrupt.append(gfile)
-        else:
-            gfile_corrupt = gfile_exist
-
-        if gfile_corrupt:
-            if print_msg:
-                print('------------------------------------------------------------------------------')
-                print('corrupted grib files detected! Delete them and re-download...')
-                print('number of grib files corrupted  : {}'.format(len(gfile_corrupt)))
-
-            for gfile in gfile_corrupt:
-                os.remove(gfile)
-                gfile_exist.remove(gfile)
-
-            if print_msg:
-                print('------------------------------------------------------------------------------')
-    return gfile_exist
-
-
-def dload_grib_pyaps(grib_file_list):
-    """Download weather re-analysis grib files using PyAPS
-    Parameters: grib_file_list : list of string of grib files
-    Returns:    grib_file_list : list of string
-    """
-    print('\n------------------------------------------------------------------------------')
-    print('downloading weather model data using PyAPS ...')
-
-    # Get date list to download (skip already downloaded files)
-    grib_file_exist = check_exist_grib_file(grib_file_list, print_msg=True)
-    grib_file2dload = sorted(list(set(grib_file_list) - set(grib_file_exist)))
-    date_list2dload = [str(re.findall('\d{8}', i)[0]) for i in grib_file2dload]
-    print('number of grib files to download: %d' % len(date_list2dload))
-    print('------------------------------------------------------------------------------\n')
-
-    # Download grib file using PyAPS
-    if len(date_list2dload) > 0:
-        hour = re.findall('\d{8}[-_]\d{2}', grib_file2dload[0])[0].replace('-', '_').split('_')[1]
-        grib_dir = os.path.dirname(grib_file2dload[0])
-
-        # try 3 times to download, then use whatever downloaded to calculate delay
-        trop_model = grib_file_name2trop_model_name(grib_file2dload[0])
-        i = 0
-        while i < 3:
-            i += 1
-            try:
-                if   trop_model == 'ECMWF' :  pa.ECMWFdload( date_list2dload, hour, grib_dir)
-                elif trop_model == 'MERRA' :  pa.MERRAdload( date_list2dload, hour, grib_dir)
-                elif trop_model == 'NARR'  :  pa.NARRdload(  date_list2dload, hour, grib_dir)
-                elif trop_model == 'ERA'   :  pa.ERAdload(   date_list2dload, hour, grib_dir)
-                elif trop_model == 'MERRA1':  pa.MERRA1dload(date_list2dload, hour, grib_dir)
-            except:
-                pass
-
-    grib_file_list = check_exist_grib_file(grib_file_list, print_msg=False)
-    return grib_file_list
-
-
-def get_delay(grib_file, inps):
-    """Get delay matrix using PyAPS for one acquisition
-    Inputs:
-        grib_file - strng, grib file path
-        atr       - dict, including the following attributes:
-                    dem_file    - string, DEM file path
-                    trop_model - string, Weather re-analysis data source
-                    delay_type  - string, comb/dry/wet
-                    ref_y/x     - string, reference pixel row/col number
-                    inc_angle   - np.array, 0/1/2 D
-    Output:
-        phs - 2D np.array, absolute tropospheric phase delay relative to ref_y/x
-    """
-    # initiate pyaps object
-    if inps.geocoded:
-        aps = pa.PyAPS_geo(grib_file, inps.dem_file, grib=inps.trop_model,
-                           demtype=np.float32, demfmt='RMG',
-                           verb=False, Del=inps.delay_type)
-    else:
-        aps = pa.PyAPS_rdr(grib_file, inps.dem_file, grib=inps.trop_model,
-                           demtype=np.float32, demfmt='RMG',
-                           verb=False, Del=inps.delay_type)
-
-    # estimate delay
-    phs = np.zeros((aps.ny, aps.nx), dtype=np.float32)
-    if not inps.geocoded and inps.lat_file is not None:
-        aps.getgeodelay(phs,
-                        lat=inps.lat_file,
-                        lon=inps.lon_file,
-                        inc=inps.inc_angle_file)
-    else:
-        aps.getdelay(phs, inc=0.)
-        phs /= np.cos(inps.inc_angle*np.pi/180.)
-
-    # Get relative phase delay in space
-    phs -= phs[inps.ref_yx[0], inps.ref_yx[1]]
-    phs *= -1    # reverse the sign for consistency between different phase correction steps/methods
-    return phs
-
-
-def get_delay_timeseries(inps, atr):
-    """Calculate delay time-series and write it to HDF5 file.
-    Parameters: inps : namespace, all input parameters
-                atr  : dict, metadata to be saved in trop_file
-    Returns:    trop_file : str, file name of ECMWF.h5
-    """
-    def get_dataset_size(fname):
-        atr = readfile.read_attribute(fname)
-        return (atr['LENGTH'], atr['WIDTH'])
-
-    if (ut.run_or_skip(out_file=inps.trop_file, in_file=inps.grib_file_list, print_msg=False) == 'skip' 
-            and get_dataset_size(inps.trop_file) == get_dataset_size(inps.geom_file)):
-        print('{} file exists and is newer than all GRIB files, skip updating.'.format(inps.trop_file))
-    else:
-        if any(i is None for i in [inps.geom_file, inps.ref_yx]):
-            print('No DEM / incidenceAngle / ref_yx found, skip calculating tropospheric delays.')
-            if not os.path.isfile(inps.trop_file):
-                inps.trop_file = None
-            return
-
-        # calculate phase delay
-        length, width = int(atr['LENGTH']), int(atr['WIDTH'])
-        num_date = len(inps.grib_file_list)
-        date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_file_list]
-        trop_data = np.zeros((num_date, length, width), np.float32)
-
-        print('calculating delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...')
-        print('number of grib files used: {}'.format(num_date))
-        prog_bar = ptime.progressBar(maxValue=num_date)
-        for i in range(num_date):
-            grib_file = inps.grib_file_list[i]
-            trop_data[i] = get_delay(grib_file, inps)
-            prog_bar.update(i+1, suffix=os.path.basename(grib_file))
-        prog_bar.close()
-
-        # Convert relative phase delay on reference date
-        try:
-            inps.ref_date = atr['REF_DATE']
-        except:
-            inps.ref_date = date_list[0]
-        print('convert to relative phase delay with reference date: '+inps.ref_date)
-        inps.ref_idx = date_list.index(inps.ref_date)
-        trop_data -= np.tile(trop_data[inps.ref_idx, :, :], (num_date, 1, 1))
-
-        # Write tropospheric delay to HDF5
-        atr['REF_Y'] = inps.ref_yx[0]
-        atr['REF_X'] = inps.ref_yx[1]
-        ts_obj = timeseries(inps.trop_file)
-        ts_obj.write2hdf5(data=trop_data,
-                          dates=date_list,
-                          metadata=atr,
-                          refFile=inps.timeseries_file)
-
-    # Delete temporary DEM file in ROI_PAC format
-    if inps.geom_file:
-        temp_files =[fname for fname in [inps.dem_file,
-                                         inps.inc_angle_file,
-                                         inps.lat_file,
-                                         inps.lon_file]
-                     if (fname is not None and 'pyaps' in fname)]
-        if temp_files:
-            print('delete temporary geometry files: {}'.format(temp_files))
-            for temp_file in temp_files:
-                os.remove(temp_file)
-                os.remove(temp_file+'.rsc')
-    return
-
-
-def correct_timeseries(dis_file, tropo_file, cor_dis_file):
-    # diff.py can handle different reference in space and time
-    # between the absolute tropospheric delay and the double referenced time-series
-    print('\n------------------------------------------------------------------------------')
-    print('correcting relative delay for input time-series using diff.py')
-    from mintpy import diff
-
-    iargs = [dis_file, tropo_file, '-o', cor_dis_file, '--force']
-    print('diff.py', ' '.join(iargs))
-    diff.main(iargs)
-    return cor_dis_file
-
-
-###############################################################
-def main(iargs=None):
-    inps = cmd_line_parse(iargs)
-    inps, atr = check_inputs(inps)
-
-    inps.grib_file_list = dload_grib_pyaps(inps.grib_file_list)
-
-    if inps.trop_file:
-        get_delay_timeseries(inps, atr)
-
-    if atr and atr['FILE_TYPE'] == 'timeseries':
-        inps.outfile = correct_timeseries(inps.timeseries_file,
-                                          inps.trop_file,
-                                          out_file=inps.outfile)
-    else:
-        print('No input timeseries file, skip correcting tropospheric delays.')
-
-    return inps.outfile
-
-
-###############################################################
-if __name__ == '__main__':
-    main(sys.argv[1:])
diff -pruN 1.3.3-2/mintpy/tsview.py 1.4.0-1/mintpy/tsview.py
--- 1.3.3-2/mintpy/tsview.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/tsview.py	2022-08-04 20:01:49.000000000 +0000
@@ -12,10 +12,10 @@ import sys
 import argparse
 import numpy as np
 from scipy import linalg, stats
-from matplotlib import pyplot as plt, ticker, widgets, patches
+from matplotlib import pyplot as plt, widgets, patches
 
 from mintpy.objects import timeseries, giantTimeseries, HDFEOS
-from mintpy.utils import arg_group, ptime, time_func, readfile, utils as ut, plot as pp
+from mintpy.utils import arg_utils, ptime, time_func, readfile, utils as ut, plot as pp
 from mintpy.multilook import multilook_data
 from mintpy import subset, view, timeseries2velocity as ts2vel
 
@@ -36,31 +36,50 @@ EXAMPLE = """example:
 """
 
 
-def create_parser():
-    parser = argparse.ArgumentParser(description='Interactive time-series viewer',
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Interactive time-series viewer'
+    epilog = EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = arg_utils.create_argument_parser(
+        name, synopsis=synopsis, description=synopsis, epilog=epilog, subparsers=subparsers)
+
     parser.add_argument('file', nargs='+',
                         help='time-series file to display\n'
                              'i.e.: timeseries_ERA5_ramp_demErr.h5 (MintPy)\n'
                              '      LS-PARAMS.h5 (GIAnT)\n'
                              '      S1_IW12_128_0593_0597_20141213_20180619.he5 (HDF-EOS5)')
-    parser.add_argument('--label', dest='file_label', nargs='*', help='labels to display for multiple input files')
-    parser.add_argument('--ylim', dest='ylim', nargs=2, metavar=('YMIN', 'YMAX'), type=float, help='Y limits for point plotting.')
-    parser.add_argument('--tick-right', dest='tick_right', action='store_true', help='set tick and tick label to the right')
-    parser.add_argument('-l','--lookup', dest='lookup_file', type=str, help='lookup table file')
-
-    parser.add_argument('-n', dest='idx', metavar='NUM', type=int, help='Epoch/slice number for initial display.')
-    parser.add_argument('--error', dest='error_file', help='txt file with error for each date.')
+    parser.add_argument('--label', dest='file_label', nargs='*',
+                        help='labels to display for multiple input files')
+    parser.add_argument('--ylim', dest='ylim', nargs=2, metavar=('YMIN', 'YMAX'), type=float,
+                        help='Y limits for point plotting.')
+    parser.add_argument('--tick-right', dest='tick_right', action='store_true',
+                        help='set tick and tick label to the right')
+    parser.add_argument('-l','--lookup', dest='lookup_file', type=str,
+                        help='lookup table file')
+    parser.add_argument('--no-show-img','--not-show-image', dest='disp_fig_img', action='store_false',
+                        help='do NOT show the map figure.\n'
+                             'Useful for plotting a point time series only.\n'
+                             'This option requires --yx/lalo input.')
+
+    parser.add_argument('-n', dest='idx', metavar='NUM', type=int,
+                        help='Epoch/slice number for initial display.')
+    parser.add_argument('--error', dest='error_file',
+                        help='txt file with error for each date.')
 
     # time info
-    parser.add_argument('--start-date', dest='start_date', type=str, help='start date of displacement to display')
-    parser.add_argument('--end-date', dest='end_date', type=str, help='end date of displacement to display')
-    parser.add_argument('--exclude', '--ex', dest='ex_date_list', nargs='*', default=['exclude_date.txt'], help='Exclude date shown as gray.')
-    parser.add_argument('--zf', '--zero-first', dest='zero_first', action='store_true', help='Set displacement at first acquisition to zero.')
-    parser.add_argument('--off','--offset', dest='offset', type=float, help='Offset for each timeseries file.')
+    parser.add_argument('--start-date', dest='start_date', type=str,
+                        help='start date of displacement to display')
+    parser.add_argument('--end-date', dest='end_date', type=str,
+                        help='end date of displacement to display')
+    parser.add_argument('--exclude', '--ex', dest='ex_date_list', nargs='*', default=['exclude_date.txt'],
+                        help='Exclude date shown as gray.')
+    parser.add_argument('--zf', '--zero-first', dest='zero_first', action='store_true',
+                        help='Set displacement at first acquisition to zero.')
+    parser.add_argument('--off','--offset', dest='offset', type=float,
+                        help='Offset for each timeseries file.')
 
-    parser.add_argument('--noverbose', dest='print_msg', action='store_false', help='Disable the verbose message printing.')
+    parser.add_argument('--noverbose', dest='print_msg', action='store_false',
+                        help='Disable the verbose message printing.')
 
     # temporal model fitting
     parser.add_argument('--nomodel', '--nofit', dest='plot_model', action='store_false',
@@ -68,31 +87,38 @@ def create_parser():
     parser.add_argument('--plot-model-conf-int', '--plot-fit-conf-int', dest='plot_model_conf_int', action='store_true',
                         help='Plot the time function prediction confidence intervals.\n'
                              '[!-- Preliminary feature alert! --!]\n'
-                             '[!-- This feature is NOT throughly checked. Read the code before use. Interpret at your own risk! --!]')
+                             '[!-- This feature is NOT throughly checked. '
+                             'Read the code before use. Interpret at your own risk! --!]')
 
-    parser = arg_group.add_timefunc_argument(parser)
+    parser = arg_utils.add_timefunc_argument(parser)
 
     # pixel of interest
     pixel = parser.add_argument_group('Pixel Input')
-    pixel.add_argument('--yx', type=int, metavar=('Y', 'X'), nargs=2, help='initial pixel to plot in Y/X coord')
-    pixel.add_argument('--lalo', type=float, metavar=('LAT', 'LON'), nargs=2, help='initial pixel to plot in lat/lon coord')
-
-    pixel.add_argument('--marker', type=str, default='o', help='marker style (default: %(default)s).')
-    pixel.add_argument('--ms', '--markersize', dest='marker_size', type=float, default=6.0, help='marker size (default: %(default)s).')
-    pixel.add_argument('--lw', '--linewidth', dest='linewidth', type=float, default=0, help='line width (default: %(default)s).')
-    pixel.add_argument('--ew', '--edgewidth', dest='edge_width', type=float, default=1.0, help='Edge width for the error bar (default: %(default)s)')
+    pixel.add_argument('--yx', type=int, metavar=('Y', 'X'), nargs=2,
+                       help='initial pixel to plot in Y/X coord')
+    pixel.add_argument('--lalo', type=float, metavar=('LAT', 'LON'), nargs=2,
+                       help='initial pixel to plot in lat/lon coord')
+
+    pixel.add_argument('--marker', type=str, default='o',
+                       help='marker style (default: %(default)s).')
+    pixel.add_argument('--ms', '--markersize', dest='marker_size', type=float, default=6.0,
+                       help='marker size (default: %(default)s).')
+    pixel.add_argument('--lw', '--linewidth', dest='linewidth', type=float, default=0,
+                       help='line width (default: %(default)s).')
+    pixel.add_argument('--ew', '--edgewidth', dest='edge_width', type=float, default=1.0,
+                       help='Edge width for the error bar (default: %(default)s)')
 
     # other groups
-    parser = arg_group.add_data_disp_argument(parser)
-    parser = arg_group.add_dem_argument(parser)
-    parser = arg_group.add_figure_argument(parser)
-    parser = arg_group.add_gps_argument(parser)
-    parser = arg_group.add_mask_argument(parser)
-    parser = arg_group.add_map_argument(parser)
-    parser = arg_group.add_memory_argument(parser)
-    parser = arg_group.add_reference_argument(parser)
-    parser = arg_group.add_save_argument(parser)
-    parser = arg_group.add_subset_argument(parser)
+    parser = arg_utils.add_data_disp_argument(parser)
+    parser = arg_utils.add_dem_argument(parser)
+    parser = arg_utils.add_figure_argument(parser)
+    parser = arg_utils.add_gps_argument(parser)
+    parser = arg_utils.add_mask_argument(parser)
+    parser = arg_utils.add_map_argument(parser)
+    parser = arg_utils.add_memory_argument(parser)
+    parser = arg_utils.add_reference_argument(parser)
+    parser = arg_utils.add_save_argument(parser)
+    parser = arg_utils.add_subset_argument(parser)
 
     return parser
 
@@ -124,13 +150,15 @@ def cmd_line_parse(iargs=None):
     inps.colormap = inps.colormap if inps.colormap else 'jet'
     inps.fig_size = inps.fig_size if inps.fig_size else [8.0, 4.5]
 
-    # temporal model fitting, initialize the dicts of exp and log funcs
-    inps = ts2vel.init_exp_log_dicts(inps)
-
     # verbose print using --noverbose option
     global vprint
     vprint = print if inps.print_msg else lambda *args, **kwargs: None
 
+    if not inps.disp_fig_img:
+        if not inps.yx and not inps.lalo:
+            inps.disp_fig_img = True
+            print('WARNING: NO --yx/lalo input found for --no-show-img, turn it OFF and continue')
+
     if not inps.disp_fig:
         plt.switch_backend('Agg')
 
@@ -331,7 +359,7 @@ def read_init_info(inps):
     inps.cbar_label += '[{}]'.format(inps.disp_unit_img)
 
     ## fit a suite of time func to the time series
-    inps.model, inps.num_param = ts2vel.read_inps2model(inps, date_list=inps.date_list)
+    inps.model = time_func.inps2model(inps, date_list=inps.date_list, print_msg=inps.print_msg)
 
     # dense TS for plotting
     inps.date_list_fit = ptime.get_date_range(inps.date_list[0], inps.date_list[-1])
@@ -447,7 +475,7 @@ def read_timeseries_data(inps):
     if not inps.vlim:
         inps.cmap_lut, inps.vlim = pp.auto_adjust_colormap_lut_and_disp_limit(ts_data[0],
                                                                               num_multilook=10,
-                                                                              print_msg=inps.print_msg)
+                                                                              print_msg=inps.print_msg)[:2]
     vprint('data    range: {} {}'.format(inps.dlim, inps.disp_unit))
     vprint('display range: {} {}'.format(inps.vlim, inps.disp_unit))
 
@@ -768,17 +796,15 @@ class timeseriesViewer():
                 disp_cbar=True,
                 disp_slider=True,
                 print_msg=self.print_msg)
-        self.fig_img = plt.figure(self.figname_img, figsize=self.figsize_img)
+        subplot_kw = dict(projection=self.map_proj_obj) if self.map_proj_obj is not None else {}
+        self.fig_img, self.ax_img = plt.subplots(figsize=self.figsize_img, subplot_kw=subplot_kw)
 
         # Figure 1 - Axes 1 - Displacement Map
-        axes_kw = dict(projection=self.map_proj_obj) if self.map_proj_obj is not None else {}
-        self.ax_img = self.fig_img.add_axes([0.125, 0.25, 0.75, 0.65], **axes_kw)
         img_data = np.array(self.ts_data[0][self.idx, :, :])
         img_data[self.mask == 0] = np.nan
         self.plot_init_image(img_data)
 
         # Figure 1 - Axes 2 - Time Slider
-        self.ax_tslider = self.fig_img.add_axes([0.125, 0.1, 0.75, 0.07])
         self.plot_init_time_slider(init_idx=self.idx, ref_idx=self.ref_idx)
         self.tslider.on_changed(self.update_time_slider)
 
@@ -792,8 +818,8 @@ class timeseriesViewer():
                 save_ts_data_and_plot(self.yx, d_ts, m_strs, self)
 
         # Final linking of the canvas to the plots.
-        self.fig_img.canvas.mpl_connect('button_press_event', self.update_plot_timeseries)
-        self.fig_img.canvas.mpl_connect('key_press_event', self.on_key_event)
+        self.fig_img.canvas.mpl_connect('button_press_event', self.update_point_timeseries)
+        self.fig_img.canvas.mpl_connect('key_press_event', self.update_image)
         if self.disp_fig:
             vprint('showing ...')
             msg = '\n------------------------------------------------------------------------'
@@ -802,11 +828,90 @@ class timeseriesViewer():
             msg += '\n2) Press left or right arrow key (if not responding, click the image and try again).'
             msg += '\n------------------------------------------------------------------------'
             vprint(msg)
+
+            # --no-show-map option
+            # requires --yx/lalo input
+            if self.yx and not self.disp_fig_img:
+                plt.close(self.fig_img)
+
             plt.show()
         return
 
 
+    ##---------- event functions
+    def update_point_timeseries(self, event):
+        """Event function to get y/x from button press"""
+        if event.inaxes == self.ax_img:
+            # get row/col number
+            if self.fig_coord == 'geo':
+                y, x = self.coord.geo2radar(event.ydata, event.xdata, print_msg=False)[0:2]
+            else:
+                y, x = int(event.ydata+0.5), int(event.xdata+0.5)
+
+            # plot time-series displacement
+            self.plot_point_timeseries((y, x))
+        return
+
+
+    def update_image(self, event):
+        """Slide images with left/right key on keyboard"""
+        if event.inaxes and event.inaxes.figure == self.fig_img:
+            idx = None
+            if event.key == 'left':
+                idx = max(self.idx - 1, 0)
+            elif event.key == 'right':
+                idx = min(self.idx + 1, self.num_date - 1)
+
+            if idx is not None and idx != self.idx:
+                # update title
+                disp_date = self.dates[idx].strftime(self.disp_date_format)
+                sub_title = 'N = {n}, Time = {t}'.format(n=idx, t=disp_date)
+                self.ax_img.set_title(sub_title, fontsize=self.font_size)
+
+                # read data
+                data_img = np.array(self.ts_data[0][idx, :, :])
+                data_img[self.mask == 0] = np.nan
+                if self.wrap:
+                    if self.disp_unit_img == 'radian':
+                        data_img *= self.range2phase
+                    data_img = ut.wrap(data_img, wrap_range=self.wrap_range)
+
+                # update
+                self.tslider.set_val(idx)         # update slider
+                self.img.set_data(data_img)       # update image
+                self.idx = idx
+                self.fig_img.canvas.draw_idle()
+                self.fig_img.canvas.flush_events()
+        return
+
+
+    def update_time_slider(self, val):
+        """Update Displacement Map using Slider"""
+        self.idx = self.tslider.val
+
+        # update title
+        disp_date = self.dates[self.idx].strftime(self.disp_date_format)
+        sub_title = 'N = {n}, Time = {t}'.format(n=self.idx, t=disp_date)
+        self.ax_img.set_title(sub_title, fontsize=self.font_size)
+
+        # read/update 2D image data
+        data_img = self.ts_data[0][self.idx, :, :]
+        data_img[self.mask == 0] = np.nan
+        if self.wrap:
+            if self.disp_unit_img == 'radian':
+                data_img *= self.range2phase
+            data_img = ut.wrap(data_img, wrap_range=self.wrap_range)
+        self.img.set_data(data_img)
+
+        # update figure
+        self.fig_img.canvas.draw_idle()
+        self.fig_img.canvas.flush_events()
+        return
+
+
+    ##---------- plot functions
     def plot_init_image(self, img_data):
+        """Plot the initial 2D image."""
         # prepare data
         if self.wrap:
             if self.disp_unit_img == 'radian':
@@ -814,7 +919,8 @@ class timeseriesViewer():
             img_data = ut.wrap(img_data, wrap_range=self.wrap_range)
 
         # Title and Axis Label
-        disp_date = self.dates[self.idx].strftime('%Y-%m-%d')
+        self.disp_date_format = ptime.get_compact_isoformat(self.date_list[0])
+        disp_date = self.dates[self.idx].strftime(self.disp_date_format)
         self.fig_title = 'N = {}, Time = {}'.format(self.idx, disp_date)
 
         # Initial Pixel of interest
@@ -827,63 +933,34 @@ class timeseriesViewer():
 
         # call view.py to plot
         self.img, self.cbar_img = view.plot_slice(self.ax_img, img_data, self.atr, self)[2:4]
+        self.fig_img.canvas.set_window_title(self.figname_img)
+        self.fig_img.tight_layout(rect=(0,0,1,0.97))
+
         return self.img, self.cbar_img
 
 
     def plot_init_time_slider(self, init_idx=-1, ref_idx=None):
-        val_step = np.min(np.diff(self.yearList))
-        val_min = self.yearList[0]
-        val_max = self.yearList[-1]
+        """Plot the initial slider."""
+        # initiate axes
+        self.fig_img.subplots_adjust(bottom=0.16)
+        self.ax_tslider = self.fig_img.add_axes([0.125, 0.05, 0.75, 0.03])
 
+        # plot slider
         self.tslider = widgets.Slider(
-            self.ax_tslider,
-            label='Time',
-            valinit=self.yearList[init_idx],
-            valmin=val_min,
-            valmax=val_max,
-            valstep=val_step)
-
-        bar_width = val_step / 4.
-        datex = np.array(self.yearList) - bar_width / 2.
-        self.tslider.ax.bar(datex, np.ones(len(datex)), bar_width, facecolor='black', ecolor=None)
+            ax=self.ax_tslider,
+            label='Image',
+            valinit=init_idx,
+            valmin=0,
+            valmax=self.num_date-1,
+            valstep=1)
+
+        # plot reference date as a gray dot
         if ref_idx is not None:
-            self.tslider.ax.bar(datex[ref_idx], 1., bar_width*3, facecolor='crimson', ecolor=None)
+            self.tslider.ax.scatter(ref_idx, 0.5, s=8**2, marker='o', color='gray', edgecolors='w')
 
-        # xaxis tick format
-        if np.floor(val_max) == np.floor(val_min):
-            digit = 10.
-        else:
-            digit = 1.
-        self.tslider.ax.set_xticks(np.round(np.linspace(val_min, val_max, num=5) * digit) / digit)
-        self.tslider.ax.xaxis.set_minor_locator(ticker.MultipleLocator(1./12.))
-        self.tslider.ax.set_xlim([val_min, val_max])
-        self.tslider.ax.set_yticks([])
-        self.tslider.valtext.set_visible(False)   #hide slider values
         return self.tslider
 
 
-    def update_time_slider(self, val):
-        """Update Displacement Map using Slider"""
-        idx = np.argmin(np.abs(np.array(self.yearList) - self.tslider.val))
-        # update title
-        disp_date = self.dates[idx].strftime('%Y-%m-%d')
-        sub_title = 'N = {n}, Time = {t}'.format(n=idx, t=disp_date)
-        self.ax_img.set_title(sub_title, fontsize=self.font_size)
-
-        # read data
-        data_img = np.array(self.ts_data[0][idx, :, :])
-        data_img[self.mask == 0] = np.nan
-        if self.wrap:
-            if self.disp_unit_img == 'radian':
-                data_img *= self.range2phase
-            data_img = ut.wrap(data_img, wrap_range=self.wrap_range)
-
-        # update data
-        self.img.set_data(data_img)
-        self.idx = idx
-        self.fig_img.canvas.draw()
-        return
-
     def plot_point_timeseries(self, yx):
         """Plot point displacement time-series at pixel [y, x]
         Parameters: yx     : list of 2 int
@@ -987,56 +1064,14 @@ class timeseriesViewer():
                 vprint(f'    {m_str}')
 
             # update figure
-            self.fig_pts.canvas.draw()
+            # use fig.canvas.draw_idel() instead of fig.canvas.draw()
+            # reference: https://stackoverflow.com/questions/64789437
+            self.fig_pts.canvas.draw_idle()
+            self.fig_pts.canvas.flush_events()
 
         return ts_dis, m_strs
 
 
-    def update_plot_timeseries(self, event):
-        """Event function to get y/x from button press"""
-        if event.inaxes == self.ax_img:
-            # get row/col number
-            if self.fig_coord == 'geo':
-                y, x = self.coord.geo2radar(event.ydata, event.xdata, print_msg=False)[0:2]
-            else:
-                y, x = int(event.ydata+0.5), int(event.xdata+0.5)
-
-            # plot time-series displacement
-            self.plot_point_timeseries((y, x))
-        return
-
-
-    def on_key_event(self, event):
-        """Slide images with left/right key on keyboard"""
-        if event.inaxes and event.inaxes.figure == self.fig_img:
-            idx = None
-            if event.key == 'left':
-                idx = max(self.idx - 1, 0)
-            elif event.key == 'right':
-                idx = min(self.idx + 1, self.num_date - 1)
-
-            if idx is not None and idx != self.idx:
-                # update title
-                disp_date = self.dates[idx].strftime('%Y-%m-%d')
-                sub_title = 'N = {n}, Time = {t}'.format(n=idx, t=disp_date)
-                self.ax_img.set_title(sub_title, fontsize=self.font_size)
-
-                # read data
-                data_img = np.array(self.ts_data[0][idx, :, :])
-                data_img[self.mask == 0] = np.nan
-                if self.wrap:
-                    if self.disp_unit_img == 'radian':
-                        data_img *= self.range2phase
-                    data_img = ut.wrap(data_img, wrap_range=self.wrap_range)
-
-                # update
-                self.tslider.set_val(self.yearList[idx]) # update slider
-                self.img.set_data(data_img)              # update image
-                self.idx = idx
-                self.fig_img.canvas.draw()
-        return
-
-
 ###########################################################################################
 def main(iargs=None):
     obj = timeseriesViewer(iargs=iargs)
diff -pruN 1.3.3-2/mintpy/unwrap_error_bridging.py 1.4.0-1/mintpy/unwrap_error_bridging.py
--- 1.3.3-2/mintpy/unwrap_error_bridging.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/unwrap_error_bridging.py	2022-08-04 20:01:49.000000000 +0000
@@ -9,16 +9,14 @@
 import os
 import sys
 import time
-import argparse
 import h5py
 import numpy as np
+
 from mintpy.objects import ifgramStack
 from mintpy.objects.conncomp import connectComponent
 from mintpy.defaults.template import get_template_content
-from mintpy.utils import (ptime,
-                          readfile,
-                          writefile,
-                          utils as ut)
+from mintpy.utils import ptime, readfile, writefile, utils as ut
+from mintpy.utils.arg_utils import create_argument_parser
 
 
 # key configuration parameter name
@@ -32,11 +30,7 @@ configKeys = [
 
 
 ####################################################################################################
-EXAMPLE = """Example:
-  unwrap_error_bridging.py  ./inputs/ifgramStack.h5  -t GalapagosSenDT128.template --update
-  unwrap_error_bridging.py  ./inputs/ifgramStack.h5  --water-mask waterMask.h5
-  unwrap_error_bridging.py  20180502_20180619.unw    --water-mask waterMask.h5
-"""
+TEMPLATE = get_template_content('correct_unwrap_error')
 
 REFERENCE = """reference:
   Yunjun, Z., H. Fattahi, and F. Amelung (2019), Small baseline InSAR time series analysis:
@@ -44,18 +38,23 @@ REFERENCE = """reference:
   doi:10.1016/j.cageo.2019.104331.
 """
 
+EXAMPLE = """Example:
+  unwrap_error_bridging.py  ./inputs/ifgramStack.h5  -t GalapagosSenDT128.template --update
+  unwrap_error_bridging.py  ./inputs/ifgramStack.h5  --water-mask waterMask.h5
+  unwrap_error_bridging.py  20180502_20180619.unw    --water-mask waterMask.h5
+"""
+
 NOTE = """
   by connecting reliable regions with MST bridges. This method assumes the phase differences
   between neighboring regions are less than pi rad in magnitude.
 """
 
-TEMPLATE = get_template_content('correct_unwrap_error')
-
-
-def create_parser():
-    parser = argparse.ArgumentParser(description='Unwrapping Error Correction with Bridging'+NOTE,
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=REFERENCE+'\n'+TEMPLATE+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Unwrapping Error Correction with Bridging'
+    epilog = REFERENCE + '\n' + TEMPLATE + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis+NOTE, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('ifgram_file', type=str, help='interferograms file to be corrected')
     parser.add_argument('-r','--radius', dest='bridgePtsRadius', type=int, default=50,
@@ -297,13 +296,14 @@ def main(iargs=None):
 
     start_time = time.time()
     # run bridging
-    run_unwrap_error_bridge(inps.ifgram_file,
-                            water_mask_file=inps.waterMaskFile,
-                            ramp_type=inps.ramp,
-                            radius=inps.bridgePtsRadius,
-                            cc_min_area=inps.connCompMinArea,
-                            dsNameIn=inps.datasetNameIn,
-                            dsNameOut=inps.datasetNameOut)
+    run_unwrap_error_bridge(
+        ifgram_file=inps.ifgram_file,
+        water_mask_file=inps.waterMaskFile,
+        ramp_type=inps.ramp,
+        radius=inps.bridgePtsRadius,
+        cc_min_area=inps.connCompMinArea,
+        dsNameIn=inps.datasetNameIn,
+        dsNameOut=inps.datasetNameOut)
 
     # config parameter
     if os.path.splitext(inps.ifgram_file)[1] in ['.h5', '.he5']:
@@ -315,7 +315,7 @@ def main(iargs=None):
 
     m, s = divmod(time.time()-start_time, 60)
     print('\ntime used: {:02.0f} mins {:02.1f} secs\nDone.'.format(m, s))
-    return inps.ifgram_file
+    return
 
 
 ####################################################################################################
diff -pruN 1.3.3-2/mintpy/unwrap_error_phase_closure.py 1.4.0-1/mintpy/unwrap_error_phase_closure.py
--- 1.3.3-2/mintpy/unwrap_error_phase_closure.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/unwrap_error_phase_closure.py	2022-08-04 20:01:49.000000000 +0000
@@ -8,7 +8,6 @@
 
 import os
 import sys
-import argparse
 import time
 import h5py
 import numpy as np
@@ -26,6 +25,7 @@ except ImportError:
 from mintpy.objects import ifgramStack, conncomp
 from mintpy.defaults.template import get_template_content
 from mintpy.utils import ptime, readfile, writefile, utils as ut, plot as pp
+from mintpy.utils.arg_utils import create_argument_parser
 from mintpy.utils.solvers import l1regls
 from mintpy import ifgram_inversion as ifginv
 
@@ -33,6 +33,15 @@ from mintpy import ifgram_inversion as i
 key_prefix = 'mintpy.unwrapError.'
 
 ##########################################################################################
+TEMPLATE1 = get_template_content('quick_overview')
+TEMPLATE2 = get_template_content('correct_unwrap_error')
+
+REFERENCE = """reference:
+  Yunjun, Z., H. Fattahi, and F. Amelung (2019), Small baseline InSAR time series analysis:
+  Unwrapping error correction and noise reduction, Computers & Geosciences, 133, 104331,
+  doi:10.1016/j.cageo.2019.104331.
+"""
+
 EXAMPLE = """example:
   # correct phase unwrapping error with phase closure
   unwrap_error_phase_closure.py  ./inputs/ifgramStack.h5  --cc-mask maskConnComp.h5  -t smallbaselineApp.cfg   --update
@@ -54,21 +63,12 @@ NOTE = """
      unwrapping errors, then the minor right interferograms will turn into wrong.
 """
 
-REFERENCE = """reference:
-  Yunjun, Z., H. Fattahi, and F. Amelung (2019), Small baseline InSAR time series analysis:
-  Unwrapping error correction and noise reduction, Computers & Geosciences, 133, 104331,
-  doi:10.1016/j.cageo.2019.104331.
-"""
-
-TEMPLATE1 = get_template_content('quick_overview')
-TEMPLATE2 = get_template_content('correct_unwrap_error')
-
-
-
-def create_parser():
-    parser = argparse.ArgumentParser(description='Unwrapping Error Correction based on Phase Closure'+NOTE,
-                                     formatter_class=argparse.RawTextHelpFormatter,
-                                     epilog=REFERENCE+'\n'+TEMPLATE1+'\n'+TEMPLATE2+'\n'+EXAMPLE)
+def create_parser(subparsers=None):
+    synopsis = 'Unwrapping Error Correction based on Phase Closure'
+    epilog = REFERENCE + '\n' + TEMPLATE1 + '\n' + TEMPLATE2 + '\n' + EXAMPLE
+    name = __name__.split('.')[-1]
+    parser = create_argument_parser(
+        name, synopsis=synopsis, description=synopsis+NOTE, epilog=epilog, subparsers=subparsers)
 
     parser.add_argument('ifgram_file', help='interferograms file to be corrected')
     parser.add_argument('-c','--cc-mask', dest='cc_mask_file', default='maskConnComp.h5',
diff -pruN 1.3.3-2/mintpy/utils/arg_group.py 1.4.0-1/mintpy/utils/arg_group.py
--- 1.3.3-2/mintpy/utils/arg_group.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/utils/arg_group.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,378 +0,0 @@
-#!/usr/bin/env python3
-#############################################################
-# Program is part of MintPy                                 #
-# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi          #
-# Author: Zhang Yunjun, Nov 2020                            #
-#############################################################
-# Recommend import:
-#     from mintpy.utils import arg_group
-
-
-import argparse
-import numpy as np
-
-
-
-def add_data_disp_argument(parser):
-    """Argument group parser for data display options"""
-    data = parser.add_argument_group('Data Display Options', 'Options to adjust the dataset display')
-    data.add_argument('-v','--vlim', dest='vlim', nargs=2, metavar=('VMIN', 'VMAX'), type=float,
-                      help='Display limits for matrix plotting.')
-    data.add_argument('-u', '--unit', dest='disp_unit', metavar='UNIT',
-                      help='unit for display.  Its priority > wrap')
-    data.add_argument('--nd','--no-data-val','--no-data-value', dest='no_data_value', type=float,
-                      help='Specify the no-data-value to be ignored and masked.')
-
-    data.add_argument('--wrap', action='store_true',
-                      help='re-wrap data to display data in fringes.')
-    data.add_argument('--wrap-range', dest='wrap_range', type=float, nargs=2,
-                      default=[-1.*np.pi, np.pi], metavar=('MIN', 'MAX'),
-                      help='range of one cycle after wrapping (default: %(default)s).')
-
-    data.add_argument('--flip-lr', dest='flip_lr',
-                      action='store_true', help='flip left-right')
-    data.add_argument('--flip-ud', dest='flip_ud',
-                      action='store_true', help='flip up-down')
-    data.add_argument('--noflip', dest='auto_flip', action='store_false',
-                      help='turn off auto flip for radar coordinate file')
-
-    data.add_argument('--nmli','--num-multilook','--multilook-num', dest='multilook_num', type=int, default=1, metavar='NUM',
-                      help='multilook data in X and Y direction with a factor for display (default: %(default)s).')
-    data.add_argument('--nomultilook', '--no-multilook', dest='multilook', action='store_false',
-                      help='do not multilook, for high quality display. \n'
-                           'If multilook is True and multilook_num=1, multilook_num will be estimated automatically.\n'
-                           'Useful when displaying big datasets.')
-    data.add_argument('--alpha', dest='transparency', type=float,
-                      help='Data transparency. \n'
-                           '0.0 - fully transparent, 1.0 - no transparency.')
-    return parser
-
-
-def add_dem_argument(parser):
-    """Argument group parser for DEM display options"""
-    dem = parser.add_argument_group('DEM', 'display topography in the background')
-    dem.add_argument('-d', '--dem', dest='dem_file', metavar='DEM_FILE',
-                     help='DEM file to show topography as background')
-    dem.add_argument('--mask-dem', dest='mask_dem', action='store_true',
-                     help='Mask out DEM pixels not coincident with valid data pixels')
-    dem.add_argument('--dem-noshade', dest='disp_dem_shade', action='store_false',
-                     help='do not show DEM shaded relief')
-    dem.add_argument('--dem-nocontour', dest='disp_dem_contour', action='store_false',
-                     help='do not show DEM contour lines')
-
-    dem.add_argument('--contour-smooth', dest='dem_contour_smooth', type=float, default=3.0,
-                     help='Background topography contour smooth factor - sigma of Gaussian filter. \n'
-                          'Set to 0.0 for no smoothing; (default: %(default)s).')
-    dem.add_argument('--contour-step', dest='dem_contour_step', metavar='NUM', type=float, default=200.0,
-                     help='Background topography contour step in meters (default: %(default)s).')
-    dem.add_argument('--contour-linewidth', dest='dem_contour_linewidth', metavar='NUM', type=float, default=0.5,
-                     help='Background topography contour linewidth (default: %(default)s).')
-
-    dem.add_argument('--shade-az', dest='shade_azdeg', type=float, default=315., metavar='DEG',
-                     help='The azimuth (0-360, degrees clockwise from North) of the light source (default: %(default)s).')
-    dem.add_argument('--shade-alt', dest='shade_altdeg', type=float, default=45., metavar='DEG',
-                     help='The altitude (0-90, degrees up from horizontal) of the light source (default: %(default)s).')
-
-    dem.add_argument('--shade-min', dest='shade_min', type=float, default=-4000., metavar='MIN',
-                     help='The min height in m of colormap of shaded relief topography (default: %(default)s).')
-    dem.add_argument('--shade-max', dest='shade_max', type=float, default=999., metavar='MAX',
-                     help='The max height of colormap of shaded relief topography (default: max(DEM)+2000).')
-    dem.add_argument('--shade-exag', dest='shade_exag', type=float, default=0.5,
-                     help='Vertical exaggeration ratio (default: %(default)s).')
-    return parser
-
-
-def add_figure_argument(parser):
-    """Argument group parser for figure options"""
-    fig = parser.add_argument_group('Figure', 'Figure settings for display')
-    fig.add_argument('--fontsize', dest='font_size',
-                     type=int, help='font size')
-    fig.add_argument('--fontcolor', dest='font_color',
-                     default='k', help='font color (default: %(default)s).')
-
-    # axis format
-    fig.add_argument('--nowhitespace', dest='disp_whitespace',
-                     action='store_false', help='do not display white space')
-    fig.add_argument('--noaxis', dest='disp_axis',
-                     action='store_false', help='do not display axis')
-    fig.add_argument('--notick', dest='disp_tick',
-                     action='store_false', help='do not display tick in x/y axis')
-
-    # colormap
-    fig.add_argument('-c', '--colormap', dest='colormap',
-                     help='colormap used for display, i.e. jet, cmy, RdBu, hsv, jet_r, temperature, viridis, etc.\n'
-                          'More at https://mintpy.readthedocs.io/en/latest/api/colormaps/')
-    fig.add_argument('--cm-lut','--cmap-lut', dest='cmap_lut', type=int, default=256, metavar='NUM',
-                     help='number of increment of colormap lookup table (default: %(default)s).')
-    fig.add_argument('--cm-vlist','--cmap-vlist', dest='cmap_vlist', type=float, nargs=3, default=[0.0, 0.7, 1.0],
-                     help='list of 3 float numbers, for truncated colormap only (default: %(default)s).')
-
-    # colorbar
-    fig.add_argument('--nocbar', '--nocolorbar', dest='disp_cbar',
-                     action='store_false', help='do not display colorbar')
-    fig.add_argument('--cbar-nbins', dest='cbar_nbins', metavar='NUM',
-                     type=int, help='number of bins for colorbar.')
-    fig.add_argument('--cbar-ext', dest='cbar_ext', default=None,
-                     choices={'neither', 'min', 'max', 'both', None},
-                     help='Extend setting of colorbar; based on data stat by default.')
-    fig.add_argument('--cbar-label', dest='cbar_label', default=None, help='colorbar label')
-    fig.add_argument('--cbar-loc', dest='cbar_loc', type=str, default='right',
-                     help='colorbar location for single plot (default: %(default)s).')
-    fig.add_argument('--cbar-size', dest='cbar_size', type=str, default="2%",
-                     help='colorbar size and pad (default: %(default)s).')
-
-    # title
-    fig.add_argument('--notitle', dest='disp_title',
-                     action='store_false', help='do not display title')
-    fig.add_argument('--title-in', dest='fig_title_in',
-                     action='store_true', help='draw title in/out of axes')
-    fig.add_argument('--figtitle', dest='fig_title',
-                     help='Title shown in the figure.')
-    fig.add_argument('--title4sen','--title4sentinel1', dest='disp_title4sentinel1', action='store_true',
-                     help='display Sentinel-1 A/B and IPF info in title.')
-
-    # size, subplots number and space
-    fig.add_argument('--figsize', dest='fig_size', metavar=('WID', 'LEN'), type=float, nargs=2,
-                     help='figure size in inches - width and length')
-    fig.add_argument('--dpi', dest='fig_dpi', metavar='DPI', type=int, default=300,
-                     help='DPI - dot per inch - for display/write (default: %(default)s).')
-    fig.add_argument('--figext', dest='fig_ext', default='.png',
-                     choices=['.emf', '.eps', '.pdf', '.png', '.ps', '.raw', '.rgba', '.svg', '.svgz'],
-                     help='File extension for figure output file (default: %(default)s).')
-
-    fig.add_argument('--fignum', dest='fig_num', type=int, metavar='NUM',
-                     help='number of figure windows')
-    fig.add_argument('--nrows', dest='fig_row_num', type=int, default=1, metavar='NUM',
-                     help='subplot number in row')
-    fig.add_argument('--ncols', dest='fig_col_num', type=int, default=1, metavar='NUM',
-                     help='subplot number in column')
-
-    fig.add_argument('--wspace', dest='fig_wid_space', type=float,
-                     help='width space between subplots in inches')
-    fig.add_argument('--hspace', dest='fig_hei_space', type=float,
-                     help='height space between subplots in inches')
-    fig.add_argument('--no-tight-layout', dest='fig_tight_layout', action='store_false',
-                     help='disable automatic tight layout for multiple subplots')
-
-    fig.add_argument('--coord', dest='fig_coord', choices=['radar', 'geo'], default='geo',
-                     help='Display in radar/geo coordination system (for geocoded file only; default: %(default)s).')
-    fig.add_argument('--animation', action='store_true',
-                     help='enable animation mode')
-
-    return parser
-
-
-def add_gps_argument(parser):
-    """Argument group parser for GPS options"""
-    gps = parser.add_argument_group('GPS', 'GPS data to display')
-    gps.add_argument('--show-gps', dest='disp_gps', action='store_true',
-                     help='Show UNR GPS location within the coverage.')
-    gps.add_argument('--mask-gps', dest='mask_gps', action='store_true',
-                     help='Mask out GPS stations not coincident with valid data pixels')
-    gps.add_argument('--gps-label', dest='disp_gps_label', action='store_true',
-                     help='Show GPS site name')
-    gps.add_argument('--gps-ms', dest='gps_marker_size', type=float, default=6,
-                     help='Plot GPS value as scatter in size of ms**2 (default: %(default)s).')
-    gps.add_argument('--gps-comp', dest='gps_component', choices={'enu2los', 'hz2los', 'up2los', 'horz', 'vert'},
-                     help='Plot GPS in color indicating deformation velocity direction')
-    gps.add_argument('--gps-redo', dest='gps_redo', action='store_true',
-                     help='Re-calculate GPS observations in LOS direction, instead of read from existing CSV file.')
-    gps.add_argument('--ref-gps', dest='ref_gps_site', type=str, help='Reference GPS site')
-    gps.add_argument('--ex-gps', dest='ex_gps_sites', type=str, nargs='*', help='Exclude GPS sites, require --gps-comp.')
-
-    gps.add_argument('--gps-start-date', dest='gps_start_date', type=str, metavar='YYYYMMDD',
-                     help='start date of GPS data, default is date of the 1st SAR acquisition')
-    gps.add_argument('--gps-end-date', dest='gps_end_date', type=str, metavar='YYYYMMDD',
-                     help='start date of GPS data, default is date of the last SAR acquisition')
-    gps.add_argument('--horz-az','--hz-az', dest='horz_az_angle', type=float, default=-90.,
-                     help='Azimuth angle (anti-clockwise from the north) of the horizontal movement in degrees\n'
-                             'E.g.: -90. for east  direction [default]\n'
-                             '       0.  for north direction\n'
-                             'Set to the azimuth angle of the strike-slip fault to show the fault-parallel displacement.')
-    return parser
-
-
-def add_mask_argument(parser):
-    """Argument group parser for mask options"""
-    mask = parser.add_argument_group('Mask', 'Mask file/options')
-    mask.add_argument('-m','--mask', dest='mask_file', metavar='FILE',
-                      help='mask file for display. "no" to turn OFF masking.')
-    mask.add_argument('--mask-vmin', dest='mask_vmin', type=float,
-                      help='hide pixels with mask value < vmin (default: %(default)s).')
-    mask.add_argument('--mask-vmax', dest='mask_vmax', type=float,
-                      help='hide pixels with mask value > vmax (default: %(default)s).')
-
-    mask.add_argument('--zm','--zero-mask', dest='zero_mask', action='store_true',
-                      help='mask pixels with zero value.')
-    return parser
-
-
-def add_map_argument(parser):
-    """Argument group parser for map options"""
-    mapg = parser.add_argument_group('Map', 'for one subplot in geo-coordinates only')
-    mapg.add_argument('--coastline', dest='coastline', type=str, choices={'10m', '50m', '110m'},
-                      help="Draw coastline with specified resolution (default: %(default)s).\n"
-                           "This will enable --lalo-label option.\n"
-                           "Link: https://scitools.org.uk/cartopy/docs/latest/matplotlib/geoaxes.html"
-                           "#cartopy.mpl.geoaxes.GeoAxes.coastlines")
-
-    # lalo label
-    mapg.add_argument('--lalo-label', dest='lalo_label', action='store_true',
-                      help='Show N, S, E, W tick label for plot in geo-coordinate.\n'
-                           'Useful for final figure output.')
-    mapg.add_argument('--lalo-step', dest='lalo_step', metavar='DEG',
-                      type=float, help='Lat/lon step for lalo-label option.')
-    mapg.add_argument('--lalo-max-num', dest='lalo_max_num', type=int, default=3, metavar='NUM',
-                      help='Maximum number of lalo tick label (default: %(default)s).')
-    mapg.add_argument('--lalo-loc', dest='lalo_loc', type=int, nargs=4, default=[1, 0, 0, 1],
-                      metavar=('left', 'right', 'top', 'bottom'),
-                      help='Draw lalo label in [left, right, top, bottom] (default: %(default)s).')
-
-    #mapg.add_argument('--proj', '--projection', '--map-proj', dest='map_projection', metavar='NAME',
-    #                  help='map projection when plotting in geo-coordinate.\n'
-    #                       'Default: PlateCarree / UTM for units in degrees / meters.\n'
-    #                       'Check the link below for the full list of supported projections:\n'
-    #                       'https://scitools.org.uk/cartopy/docs/latest/crs/projections.html\n\n')
-
-    # scale bar
-    mapg.add_argument('--scalebar', nargs=3, metavar=('LEN', 'X', 'Y'), type=float,
-                      default=[0.2, 0.2, 0.1],
-                      help='scale bar distance and location in ratio (default: %(default)s).\n' +
-                           '\tdistance in ratio of total width\n' +
-                           '\tlocation in X/Y in ratio with respect to the lower left corner\n' +
-                           '--scalebar 0.2 0.2 0.1  #for lower left  corner\n' +
-                           '--scalebar 0.2 0.2 0.8  #for upper left  corner\n' +
-                           '--scalebar 0.2 0.8 0.1  #for lower right corner\n' +
-                           '--scalebar 0.2 0.8 0.8  #for upper right corner\n')
-    mapg.add_argument('--noscalebar', '--nosbar', dest='disp_scalebar',
-                      action='store_false', help='do not display scale bar.')
-    mapg.add_argument('--scalebar-pad','--sbar-pad', dest='scalebar_pad', type=float,
-                      default=0.05, help='scale bar label pad in ratio of scalebar width (default: %(default)s).')
-    return parser
-
-
-def add_memory_argument(parser):
-    """Argument parser for memory usage options"""
-    parser.add_argument('--ram', '--memory', dest='maxMemory', type=float, default=4.0,
-                        help='Max amount of memory in GB to use (default: %(default)s).\n' +
-                             'Adjust according to your computer memory.')
-    return parser
-
-
-def add_parallel_argument(parser):
-    """Argument group parser for parallel computing options"""
-    from mintpy.objects.cluster import CLUSTER_LIST
-
-    par = parser.add_argument_group('parallel', 'parallel processing using dask')
-    par.add_argument('-c', '--cluster', '--cluster-type', dest='cluster', type=str,
-                     choices=CLUSTER_LIST,
-                     help='Cluster to use for parallel computing (default: %(default)s to turn OFF).')
-    par.add_argument('--num-worker', dest='numWorker', type=str, default='4',
-                     help='Number of workers to use (default: %(default)s).')
-    par.add_argument('--config', '--config-name', dest='config', type=str, default=None,
-                     help='Configuration name to use in dask.yaml (default: %(default)s).')
-    return parser
-
-
-def add_point_argument(parser):
-    """Argument group parser for point display options"""
-    ppt = parser.add_argument_group('Point', 'Plot points defined by y/x or lat/lon')
-    ppt.add_argument('--pts-marker', dest='pts_marker', type=str, default='k^',
-                     help='Marker of points of interest (default: %(default)s).')
-    ppt.add_argument('--pts-ms', dest='pts_marker_size', type=float, default=6.,
-                     help='Marker size for points of interest (default: %(default)s).')
-
-    pts = ppt.add_mutually_exclusive_group(required=False)
-    pts.add_argument('--pts-yx', dest='pts_yx', type=int, nargs=2, metavar=('Y', 'X'),
-                     help='Point in Y/X')
-    pts.add_argument('--pts-lalo', dest='pts_lalo', type=float, nargs=2, metavar=('LAT', 'LON'),
-                     help='Point in Lat/Lon')
-    pts.add_argument('--pts-file', dest='pts_file', type=str,
-                     help='Text file for point(s) in lat/lon column')
-    return parser
-
-
-def add_reference_argument(parser):
-    """Argument group parser for (spatial / temporal) referencing options"""
-    ref = parser.add_argument_group('Reference', 'Show / Modify reference in time and space for display')
-    # reference date
-    ref.add_argument('--ref-date', dest='ref_date', metavar='DATE',
-                     help='Change reference date for display')
-
-    # reference pixel
-    ref.add_argument('--ref-lalo', dest='ref_lalo', metavar=('LAT', 'LON'), type=float, nargs=2,
-                     help='Change referene point LAT LON for display')
-    ref.add_argument('--ref-yx', dest='ref_yx', metavar=('Y', 'X'), type=int, nargs=2,
-                     help='Change referene point Y X for display')
-
-    # reference pixel style
-    ref.add_argument('--noreference', dest='disp_ref_pixel',
-                     action='store_false', help='do not show reference point')
-    ref.add_argument('--ref-marker', dest='ref_marker', default='ks',
-                     help='marker of reference pixel (default: %(default)s).')
-    ref.add_argument('--ref-size', dest='ref_marker_size', metavar='NUM', type=int, default=6,
-                     help='marker size of reference point (default: %(default)s).')
-    return parser
-
-
-def add_save_argument(parser):
-    """Argument group parser for figure save options"""
-    save = parser.add_argument_group('Save/Output', 'Save figure and write to file(s)')
-    save.add_argument('-o', '--outfile', type=str, nargs='*',
-                      help="save the figure with assigned filename.\n"
-                           "By default, it's calculated based on the input file name.")
-    save.add_argument('--save', dest='save_fig', action='store_true',
-                      help='save the figure')
-    save.add_argument('--nodisplay', dest='disp_fig', action='store_false',
-                      help='save and do not display the figure')
-    save.add_argument('--update', dest='update_mode', action='store_true',
-                      help='enable update mode for save figure: skip running if\n'+
-                           '\t1) output file already exists AND\n'+
-                           '\t2) output file is newer than input file.')
-    return parser
-
-
-def add_subset_argument(parser, geo=True):
-    """Argument group parser for subset options"""
-    sub = parser.add_argument_group('Subset', 'Display dataset in subset range')
-    sub.add_argument('--sub-x','--subx','--subset-x', dest='subset_x', type=int, nargs=2, metavar=('XMIN', 'XMAX'),
-                     help='subset display in x/cross-track/range direction')
-    sub.add_argument('--sub-y','--suby','--subset-y', dest='subset_y', type=int, nargs=2, metavar=('YMIN', 'YMAX'),
-                     help='subset display in y/along-track/azimuth direction')
-    if geo:
-        sub.add_argument('--sub-lat','--sublat','--subset-lat', dest='subset_lat', type=float, nargs=2, metavar=('LATMIN', 'LATMAX'),
-                         help='subset display in latitude')
-        sub.add_argument('--sub-lon','--sublon','--subset-lon', dest='subset_lon', type=float, nargs=2, metavar=('LONMIN', 'LONMAX'),
-                         help='subset display in longitude')
-    return parser
-
-
-def add_timefunc_argument(parser):
-    """Argument group parser for time functions"""
-    model = parser.add_argument_group('Deformation Model', 'A suite of time functions')
-    model.add_argument('--poly', '--polynomial', '--poly-order', dest='polynomial', type=int, default=1,
-                      help='a polynomial function with the input degree (default: %(default)s). E.g.:\n' +
-                           '--poly 1                                  # linear\n' +
-                           '--poly 2                                  # quadratic\n' +
-                           '--poly 3                                  # cubic\n')
-    model.add_argument('--periodic', '--period', '--peri', dest='periodic', type=float, nargs='+', default=[],
-                      help='periodic function(s) with period in decimal years (default: %(default)s). E.g.:\n' +
-                           '--periodic 1.0                            # an annual cycle\n' +
-                           '--periodic 1.0 0.5                        # an annual cycle plus a semi-annual cycle\n')
-    model.add_argument('--step', dest='step', type=str, nargs='+', default=[],
-                      help='step function(s) at YYYYMMDD (default: %(default)s). E.g.:\n' +
-                           '--step 20061014                           # coseismic step  at 2006-10-14T00:00\n' +
-                           '--step 20110311 20120928T1733             # coseismic steps at 2011-03-11T00:00 and 2012-09-28T17:33\n')
-    model.add_argument('--exp', '--exponential', dest='exp', type=str, nargs='+', action='append', default=[],
-                      help='exponential function(s) at YYYYMMDD with characteristic time(s) tau in decimal days (default: %(default)s). E.g.:\n' +
-                           '--exp  20181026 60                        # exp onset at 2006-10-14T00:00 with tau=60 days\n' +
-                           '--exp  20181026T1355 60 120               # exp onset at 2006-10-14T13:55 with tau=60 days overlayed by a tau=145 days\n' +
-                           '--exp  20161231 80.5 --exp 20190125 100   # 1st exp onset at 2011-03-11 with tau=80.5 days and\n' +
-                           '                                          # 2nd exp onset at 2012-09-28 with tau=100  days')
-    model.add_argument('--log', '--logarithmic', dest='log', type=str, nargs='+', action='append', default=[],
-                      help='logarithmic function(s) at YYYYMMDD with characteristic time(s) tau in decimal days (default: %(default)s). E.g.:\n' +
-                           '--log  20181016 90.4                      # log onset at 2006-10-14T00:00 with tau=90.4 days\n' +
-                           '--log  20181016T1733 90.4 240             # log onset at 2006-10-14T17:33 with tau=90.4 days overlayed by a tau=240 days\n' +
-                           '--log  20161231 60 --log 20190125 180.2   # 1st log onset at 2011-03-11 with tau=60 days and\n' +
-                           '                                          # 2nd log onset at 2012-09-28 with tau=180.2 days\n')
-    return parser
diff -pruN 1.3.3-2/mintpy/utils/arg_utils.py 1.4.0-1/mintpy/utils/arg_utils.py
--- 1.3.3-2/mintpy/utils/arg_utils.py	1970-01-01 00:00:00.000000000 +0000
+++ 1.4.0-1/mintpy/utils/arg_utils.py	2022-08-04 20:01:49.000000000 +0000
@@ -0,0 +1,448 @@
+#!/usr/bin/env python3
+#############################################################
+# Program is part of MintPy                                 #
+# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi          #
+# Author: Zhang Yunjun, Nov 2020                            #
+#############################################################
+# Recommend import:
+#     from mintpy.utils import arg_utils
+#     from mintpy.utils.arg_utils import create_argument_parser
+
+
+import argparse
+import numpy as np
+
+
+##################################  generic parser  ####################################
+def create_argument_parser(name=None, synopsis=None, description=None, epilog=None,
+                           subparsers=None, formatter_class=argparse.RawTextHelpFormatter):
+    """Create an argument parser.
+
+    Parameters: name            - str, sub-command name, for sub-parser
+                synopsis        - str, a brief summary of the script, for sub-parser
+                description     - str, same as synopsis, plus optional note
+                epilog          - str, reference, template options and example usage
+                subparsers      - argparse._SubParsersAction
+                                  https://docs.python.org/3/library/argparse.html#sub-commands
+                formatter_class - argparse formatting class object
+                                  https://docs.python.org/3/library/argparse.html#formatter-class
+    Returns:    parser          - argparse.ArgumentParser object
+    Examples:
+        def create_parser(subparsers=None):
+            synopsis = 'Resample radar-coded files into geo-coordinates or vice versa.'
+            epilog = REFERENCE + '\n' + EXAMPLE
+            name = __name__.split('.')[-1]
+            parser = create_argument_parser(
+                name, synopsis=synopsis, description=synopsis+NOTE, epilog=epilog, subparsers=subparsers)
+    """
+    if subparsers:
+        # for mintpy sub-command [used in linux with apt install]
+        parser = subparsers.add_parser(
+            name, description=description, formatter_class=formatter_class, epilog=epilog, help=synopsis)
+
+    else:
+        # for regular command usage
+        parser = argparse.ArgumentParser(
+            description=description, formatter_class=formatter_class, epilog=epilog)
+
+    return parser
+
+
+
+##################################  argument group  ####################################
+def add_data_disp_argument(parser):
+    """Argument group parser for data display options"""
+    data = parser.add_argument_group('Data Display Options', 'Options to adjust the dataset display')
+    data.add_argument('-v','--vlim', dest='vlim', nargs=2, metavar=('VMIN', 'VMAX'), type=float,
+                      help='Display limits for matrix plotting.')
+    data.add_argument('-u', '--unit', dest='disp_unit', metavar='UNIT',
+                      help='unit for display.  Its priority > wrap')
+    data.add_argument('--nd','--no-data-val','--no-data-value', dest='no_data_value', type=float,
+                      help='Specify the no-data-value to be ignored and masked.')
+
+    data.add_argument('--wrap', action='store_true',
+                      help='re-wrap data to display data in fringes.')
+    data.add_argument('--wrap-range', dest='wrap_range', type=float, nargs=2,
+                      default=[-1.*np.pi, np.pi], metavar=('MIN', 'MAX'),
+                      help='range of one cycle after wrapping (default: %(default)s).')
+
+    data.add_argument('--flip-lr', dest='flip_lr',
+                      action='store_true', help='flip left-right')
+    data.add_argument('--flip-ud', dest='flip_ud',
+                      action='store_true', help='flip up-down')
+    data.add_argument('--noflip', dest='auto_flip', action='store_false',
+                      help='turn off auto flip for radar coordinate file')
+
+    data.add_argument('--nmli','--num-multilook','--multilook-num', dest='multilook_num',
+                      type=int, default=1, metavar='NUM',
+                      help='multilook data in X and Y direction with a factor for display '
+                           '(default: %(default)s).')
+    data.add_argument('--nomultilook', '--no-multilook', dest='multilook', action='store_false',
+                      help='do not multilook, for high quality display. \n'
+                           'If multilook is True and multilook_num=1, '
+                           'multilook_num will be estimated automatically.\n'
+                           'Useful when displaying big datasets.')
+    data.add_argument('--alpha', dest='transparency', type=float,
+                      help='Data transparency. \n'
+                           '0.0 - fully transparent, 1.0 - no transparency.')
+    return parser
+
+
+def add_dem_argument(parser):
+    """Argument group parser for DEM display options"""
+    dem = parser.add_argument_group('DEM', 'display topography in the background')
+    dem.add_argument('-d', '--dem', dest='dem_file', metavar='DEM_FILE',
+                     help='DEM file to show topography as background')
+    dem.add_argument('--mask-dem', dest='mask_dem', action='store_true',
+                     help='Mask out DEM pixels not coincident with valid data pixels')
+    dem.add_argument('--dem-noshade', dest='disp_dem_shade', action='store_false',
+                     help='do not show DEM shaded relief')
+    dem.add_argument('--dem-nocontour', dest='disp_dem_contour', action='store_false',
+                     help='do not show DEM contour lines')
+
+    dem.add_argument('--contour-smooth', dest='dem_contour_smooth', type=float, default=3.0,
+                     help='Background topography contour smooth factor - sigma of Gaussian filter. \n'
+                          'Set to 0.0 for no smoothing; (default: %(default)s).')
+    dem.add_argument('--contour-step', dest='dem_contour_step', metavar='NUM', type=float, default=200.0,
+                     help='Background topography contour step in meters (default: %(default)s).')
+    dem.add_argument('--contour-lw','--contour-linewidth', dest='dem_contour_linewidth',
+                     metavar='NUM', type=float, default=0.5,
+                     help='Background topography contour linewidth (default: %(default)s).')
+
+    dem.add_argument('--shade-az', dest='shade_azdeg', type=float, default=315., metavar='DEG',
+                     help='The azimuth (0-360, degrees clockwise from North) of the light source '
+                          '(default: %(default)s).')
+    dem.add_argument('--shade-alt', dest='shade_altdeg', type=float, default=45., metavar='DEG',
+                     help='The altitude (0-90, degrees up from horizontal) of the light source '
+                          '(default: %(default)s).')
+
+    dem.add_argument('--shade-min', dest='shade_min', type=float, default=-4000., metavar='MIN',
+                     help='The min height in m of colormap of shaded relief topography (default: %(default)s).')
+    dem.add_argument('--shade-max', dest='shade_max', type=float, default=999., metavar='MAX',
+                     help='The max height of colormap of shaded relief topography (default: max(DEM)+2000).')
+    dem.add_argument('--shade-exag', dest='shade_exag', type=float, default=0.5,
+                     help='Vertical exaggeration ratio (default: %(default)s).')
+    return parser
+
+
+def add_figure_argument(parser):
+    """Argument group parser for figure options"""
+    fig = parser.add_argument_group('Figure', 'Figure settings for display')
+    fig.add_argument('--fontsize', dest='font_size',
+                     type=int, help='font size')
+    fig.add_argument('--fontcolor', dest='font_color',
+                     default='k', help='font color (default: %(default)s).')
+
+    # axis format
+    fig.add_argument('--nowhitespace', dest='disp_whitespace',
+                     action='store_false', help='do not display white space')
+    fig.add_argument('--noaxis', dest='disp_axis',
+                     action='store_false', help='do not display axis')
+    fig.add_argument('--notick', dest='disp_tick',
+                     action='store_false', help='do not display tick in x/y axis')
+    fig.add_argument('--ylabel-rot', dest='ylabel_rot', type=float,
+                     help='Y-axis tick label rotation in degree anti-clockwisely (default: %(default)s).\n'
+                          'Set to 90 for a vertical y-axis tick labels')
+
+    # colormap
+    fig.add_argument('-c', '--colormap', dest='colormap',
+                     help='colormap used for display, i.e. jet, cmy, RdBu, hsv, jet_r, viridis, etc.\n'
+                          'More at https://mintpy.readthedocs.io/en/latest/api/colormaps/')
+    fig.add_argument('--cm-lut','--cmap-lut', dest='cmap_lut', type=int, default=256, metavar='NUM',
+                     help='number of increment of colormap lookup table (default: %(default)s).')
+    fig.add_argument('--cm-vlist','--cmap-vlist', dest='cmap_vlist', type=float, nargs=3, default=[0.0, 0.7, 1.0],
+                     help='list of 3 float numbers, for truncated colormap only (default: %(default)s).')
+
+    # colorbar
+    fig.add_argument('--nocbar', '--nocolorbar', dest='disp_cbar',
+                     action='store_false', help='do not display colorbar')
+    fig.add_argument('--cbar-nbins', dest='cbar_nbins', metavar='NUM',
+                     type=int, help='number of bins for colorbar.')
+    fig.add_argument('--cbar-ext', dest='cbar_ext', default=None,
+                     choices={'neither', 'min', 'max', 'both', None},
+                     help='Extend setting of colorbar; based on data stat by default.')
+    fig.add_argument('--cbar-label', dest='cbar_label', default=None, help='colorbar label')
+    fig.add_argument('--cbar-loc', dest='cbar_loc', type=str, default='right',
+                     help='colorbar location for single plot (default: %(default)s).')
+    fig.add_argument('--cbar-size', dest='cbar_size', type=str, default="2%",
+                     help='colorbar size and pad (default: %(default)s).')
+
+    # title
+    fig.add_argument('--notitle', dest='disp_title',
+                     action='store_false', help='do not display title')
+    fig.add_argument('--title-in', dest='fig_title_in',
+                     action='store_true', help='draw title in/out of axes')
+    fig.add_argument('--title','--fig-title','--figtitle', dest='fig_title',
+                     help='Title shown in the figure.')
+    fig.add_argument('--title4sen','--title4sentinel1', dest='disp_title4sentinel1', action='store_true',
+                     help='display Sentinel-1 A/B and IPF info in title.')
+
+    # size, subplots number and space
+    fig.add_argument('--figsize', dest='fig_size', metavar=('WID', 'LEN'), type=float, nargs=2,
+                     help='figure size in inches - width and length')
+    fig.add_argument('--dpi', dest='fig_dpi', metavar='DPI', type=int, default=300,
+                     help='DPI - dot per inch - for display/write (default: %(default)s).')
+    fig.add_argument('--figext', dest='fig_ext', default='.png',
+                     choices=['.emf', '.eps', '.pdf', '.png', '.ps', '.raw', '.rgba', '.svg', '.svgz'],
+                     help='File extension for figure output file (default: %(default)s).')
+
+    fig.add_argument('--fignum', dest='fig_num', type=int, metavar='NUM',
+                     help='number of figure windows')
+    fig.add_argument('--nrows', dest='fig_row_num', type=int, default=1, metavar='NUM',
+                     help='subplot number in row')
+    fig.add_argument('--ncols', dest='fig_col_num', type=int, default=1, metavar='NUM',
+                     help='subplot number in column')
+
+    fig.add_argument('--wspace', dest='fig_wid_space', type=float,
+                     help='width space between subplots in inches')
+    fig.add_argument('--hspace', dest='fig_hei_space', type=float,
+                     help='height space between subplots in inches')
+    fig.add_argument('--no-tight-layout', dest='fig_tight_layout', action='store_false',
+                     help='disable automatic tight layout for multiple subplots')
+
+    fig.add_argument('--coord', dest='fig_coord', choices=['radar', 'geo'], default='geo',
+                     help='Display in radar/geo coordination system '
+                          '(for geocoded file only; default: %(default)s).')
+    fig.add_argument('--animation', action='store_true',
+                     help='enable animation mode')
+
+    return parser
+
+
+def add_gps_argument(parser):
+    """Argument group parser for GPS options"""
+    gps = parser.add_argument_group('GPS', 'GPS data to display')
+    gps.add_argument('--show-gps', dest='disp_gps', action='store_true',
+                     help='Show UNR GPS location within the coverage.')
+    gps.add_argument('--mask-gps', dest='mask_gps', action='store_true',
+                     help='Mask out GPS stations not coincident with valid data pixels')
+    gps.add_argument('--gps-label', dest='disp_gps_label', action='store_true',
+                     help='Show GPS site name')
+    gps.add_argument('--gps-ms', dest='gps_marker_size', type=float, default=6,
+                     help='Plot GPS value as scatter in size of ms**2 (default: %(default)s).')
+    gps.add_argument('--gps-comp', dest='gps_component',
+                     choices={'enu2los', 'hz2los', 'up2los', 'horz', 'vert'},
+                     help='Plot GPS in color indicating deformation velocity direction')
+    gps.add_argument('--gps-redo', dest='gps_redo', action='store_true',
+                     help='Re-calculate GPS observations in LOS direction, '
+                          'instead of read from existing CSV file.')
+    gps.add_argument('--ref-gps', dest='ref_gps_site', type=str, help='Reference GPS site')
+    gps.add_argument('--ex-gps', dest='ex_gps_sites', type=str, nargs='*',
+                     help='Exclude GPS sites, require --gps-comp.')
+
+    gps.add_argument('--gps-start-date', dest='gps_start_date', type=str, metavar='YYYYMMDD',
+                     help='start date of GPS data, default is date of the 1st SAR acquisition')
+    gps.add_argument('--gps-end-date', dest='gps_end_date', type=str, metavar='YYYYMMDD',
+                     help='start date of GPS data, default is date of the last SAR acquisition')
+    gps.add_argument('--horz-az','--hz-az', dest='horz_az_angle', type=float, default=-90.,
+                     help='Azimuth angle (anti-clockwise from the north) of the horizontal movement in degrees\n'
+                          'E.g.: -90. for east  direction [default]\n'
+                          '       0.  for north direction\n'
+                          'Set to the azimuth angle of the strike-slip fault to '
+                          'show the fault-parallel displacement.')
+    return parser
+
+
+def add_mask_argument(parser):
+    """Argument group parser for mask options"""
+    mask = parser.add_argument_group('Mask', 'Mask file/options')
+    mask.add_argument('-m','--mask', dest='mask_file', metavar='FILE',
+                      help='mask file for display. "no" to turn OFF masking.')
+    mask.add_argument('--mask-vmin', dest='mask_vmin', type=float,
+                      help='hide pixels with mask value < vmin (default: %(default)s).')
+    mask.add_argument('--mask-vmax', dest='mask_vmax', type=float,
+                      help='hide pixels with mask value > vmax (default: %(default)s).')
+
+    mask.add_argument('--zm','--zero-mask', dest='zero_mask', action='store_true',
+                      help='mask pixels with zero value.')
+    return parser
+
+
+def add_map_argument(parser):
+    """Argument group parser for map options"""
+    mapg = parser.add_argument_group('Map', 'for one subplot in geo-coordinates only')
+    mapg.add_argument('--coastline', dest='coastline', type=str, choices={'10m', '50m', '110m'},
+                      help="Draw coastline with specified resolution (default: %(default)s).\n"
+                           "This will enable --lalo-label option.\n"
+                           "Link: https://scitools.org.uk/cartopy/docs/latest/matplotlib/geoaxes.html"
+                           "#cartopy.mpl.geoaxes.GeoAxes.coastlines")
+    mapg.add_argument('--coastline-lw', '--coastline-linewidth', dest='coastline_linewidth',
+                      metavar='NUM', type=float, default=1,
+                      help='Coastline linewidth (default: %(default)s).')
+
+    # lalo label
+    mapg.add_argument('--lalo-label', dest='lalo_label', action='store_true',
+                      help='Show N, S, E, W tick label for plot in geo-coordinate.\n'
+                           'Useful for final figure output.')
+    mapg.add_argument('--lalo-step', dest='lalo_step', metavar='DEG',
+                      type=float, help='Lat/lon step for lalo-label option.')
+    mapg.add_argument('--lalo-max-num', dest='lalo_max_num', type=int, default=3, metavar='NUM',
+                      help='Maximum number of lalo tick label (default: %(default)s).')
+    mapg.add_argument('--lalo-loc', dest='lalo_loc', type=int, nargs=4, default=[1, 0, 0, 1],
+                      metavar=('left', 'right', 'top', 'bottom'),
+                      help='Draw lalo label in [left, right, top, bottom] (default: %(default)s).')
+    mapg.add_argument('--lalo-off','--lalo-offset', dest='lalo_offset', type=float, nargs=2,
+                      help='Distance between tick and label in points (default: %(default)s).\n'
+                           'Set to negative value, e.g. -36 -18, to move the ticklabel inside the plot.')
+    mapg.add_argument('--lalo-fs','--lalo-fontsize', dest='lalo_font_size', type=float,
+                      help='Lalo label font size in points (default: %(default)s).')
+
+    #mapg.add_argument('--proj', '--projection', '--map-proj', dest='map_projection', metavar='NAME',
+    #                  help='map projection when plotting in geo-coordinate.\n'
+    #                       'Default: PlateCarree / UTM for units in degrees / meters.\n'
+    #                       'Check the link below for the full list of supported projections:\n'
+    #                       'https://scitools.org.uk/cartopy/docs/latest/crs/projections.html\n\n')
+
+    # scale bar
+    mapg.add_argument('--scalebar', nargs=3, metavar=('LEN', 'X', 'Y'), type=float,
+                      default=[0.2, 0.2, 0.1],
+                      help='scale bar distance and location in ratio (default: %(default)s).\n' +
+                           '\tdistance in ratio of total width\n' +
+                           '\tlocation in X/Y in ratio with respect to the lower left corner\n' +
+                           '--scalebar 0.2 0.2 0.1  #for lower left  corner\n' +
+                           '--scalebar 0.2 0.2 0.8  #for upper left  corner\n' +
+                           '--scalebar 0.2 0.8 0.1  #for lower right corner\n' +
+                           '--scalebar 0.2 0.8 0.8  #for upper right corner\n')
+    mapg.add_argument('--noscalebar', '--nosbar', dest='disp_scalebar',
+                      action='store_false', help='do not display scale bar.')
+    mapg.add_argument('--scalebar-pad','--sbar-pad', dest='scalebar_pad', type=float, default=0.05,
+                      help='scale bar label pad in ratio of scalebar width (default: %(default)s).')
+    return parser
+
+
+def add_memory_argument(parser):
+    """Argument parser for memory usage options"""
+    parser.add_argument('--ram', '--memory', dest='maxMemory', type=float, default=4.0,
+                        help='Max amount of memory in GB to use (default: %(default)s).\n' +
+                             'Adjust according to your computer memory.')
+    return parser
+
+
+def add_parallel_argument(parser):
+    """Argument group parser for parallel computing options"""
+    from mintpy.objects.cluster import CLUSTER_LIST
+
+    par = parser.add_argument_group('parallel', 'parallel processing using dask')
+    par.add_argument('-c', '--cluster', '--cluster-type', dest='cluster', type=str,
+                     choices=CLUSTER_LIST,
+                     help='Cluster to use for parallel computing (default: %(default)s to turn OFF).')
+    par.add_argument('--num-worker', dest='numWorker', type=str, default='4',
+                     help='Number of workers to use (default: %(default)s).')
+    par.add_argument('--config', '--config-name', dest='config', type=str, default=None,
+                     help='Configuration name to use in dask.yaml (default: %(default)s).')
+    return parser
+
+
+def add_point_argument(parser):
+    """Argument group parser for point display options"""
+    ppt = parser.add_argument_group('Point', 'Plot points defined by y/x or lat/lon')
+    ppt.add_argument('--pts-marker', dest='pts_marker', type=str, default='k^',
+                     help='Marker of points of interest (default: %(default)s).')
+    ppt.add_argument('--pts-ms', dest='pts_marker_size', type=float, default=6.,
+                     help='Marker size for points of interest (default: %(default)s).')
+
+    pts = ppt.add_mutually_exclusive_group(required=False)
+    pts.add_argument('--pts-yx', dest='pts_yx', type=int, nargs=2, metavar=('Y', 'X'),
+                     help='Point in Y/X')
+    pts.add_argument('--pts-lalo', dest='pts_lalo', type=float, nargs=2, metavar=('LAT', 'LON'),
+                     help='Point in Lat/Lon')
+    pts.add_argument('--pts-file', dest='pts_file', type=str,
+                     help='Text file for point(s) in lat/lon column')
+    return parser
+
+
+def add_reference_argument(parser, plot=True):
+    """Argument group parser for (spatial / temporal) referencing options"""
+
+    goal = 'display' if plot else 'estimation'
+    ref = parser.add_argument_group('Reference date / point',
+                                    f'Modify reference in time / space for {goal}')
+
+    # reference date / pixel
+    ref.add_argument('--ref-date', dest='ref_date', metavar='DATE',
+                     help=f'Change reference date for {goal}')
+    ref.add_argument('--ref-lalo', dest='ref_lalo', metavar=('LAT', 'LON'), type=float, nargs=2,
+                     help=f'Change reference point in LAT/LON for {goal}')
+    ref.add_argument('--ref-yx', dest='ref_yx', metavar=('Y', 'X'), type=int, nargs=2,
+                     help=f'Change reference point in Y/X for {goal}')
+
+    # reference pixel - plotting style
+    if plot:
+        ref.add_argument('--noreference', dest='disp_ref_pixel',
+                         action='store_false', help='do not show reference point')
+        ref.add_argument('--ref-marker', dest='ref_marker', default='ks',
+                         help='marker of reference pixel (default: %(default)s).')
+        ref.add_argument('--ref-size', dest='ref_marker_size', metavar='NUM', type=int, default=6,
+                         help='marker size of reference point (default: %(default)s).')
+    return parser
+
+
+def add_save_argument(parser):
+    """Argument group parser for figure save options"""
+    save = parser.add_argument_group('Save/Output', 'Save figure and write to file(s)')
+    save.add_argument('-o', '--outfile', type=str, nargs='*',
+                      help="save the figure with assigned filename.\n"
+                           "By default, it's calculated based on the input file name.")
+    save.add_argument('--save', dest='save_fig', action='store_true',
+                      help='save the figure')
+    save.add_argument('--nodisplay', dest='disp_fig', action='store_false',
+                      help='save and do not display the figure')
+    save.add_argument('--update', dest='update_mode', action='store_true',
+                      help='enable update mode for save figure: skip running if\n'+
+                           '\t1) output file already exists AND\n'+
+                           '\t2) output file is newer than input file.')
+    return parser
+
+
+def add_subset_argument(parser, geo=True):
+    """Argument group parser for subset options"""
+    sub = parser.add_argument_group('Subset', 'Display dataset in subset range')
+    sub.add_argument('--sub-x','--subx','--subset-x', dest='subset_x', type=int, nargs=2,
+                     metavar=('XMIN', 'XMAX'), help='subset display in x/cross-track/range direction')
+    sub.add_argument('--sub-y','--suby','--subset-y', dest='subset_y', type=int, nargs=2,
+                     metavar=('YMIN', 'YMAX'), help='subset display in y/along-track/azimuth direction')
+    if geo:
+        sub.add_argument('--sub-lat','--sublat','--subset-lat', dest='subset_lat', type=float, nargs=2,
+                         metavar=('LATMIN', 'LATMAX'), help='subset display in latitude')
+        sub.add_argument('--sub-lon','--sublon','--subset-lon', dest='subset_lon', type=float, nargs=2,
+                         metavar=('LONMIN', 'LONMAX'), help='subset display in longitude')
+    return parser
+
+
+def add_timefunc_argument(parser):
+    """Argument group parser for time functions"""
+    model = parser.add_argument_group('Deformation Model', 'A suite of time functions')
+
+    model.add_argument('--poly', '--polynomial', '--poly-order', dest='polynomial', type=int, default=1,
+                       help='a polynomial function with the input degree (default: %(default)s). E.g.:\n'
+                            '--poly 1                               # linear\n'
+                            '--poly 2                               # quadratic\n'
+                            '--poly 3                               # cubic\n')
+
+    model.add_argument('--periodic', '--period', '--peri', dest='periodic', type=float, nargs='+', default=[],
+                       help='periodic function(s) with period in decimal years (default: %(default)s). E.g.:\n'
+                            '--periodic 1.0                         # an annual cycle\n'
+                            '--periodic 1.0 0.5                     # an annual cycle plus a semi-annual cycle\n')
+
+    model.add_argument('--step','--step-date', dest='stepDate', type=str, nargs='+', default=[],
+                       help='step function(s) at YYYYMMDD (default: %(default)s). E.g.:\n'
+                            '--step 20061014                        # coseismic step  at 2006-10-14T00:00\n'
+                            '--step 20110311 20120928T1733          # coseismic steps at 2011-03-11T00:00 and 2012-09-28T17:33\n')
+
+    model.add_argument('--exp', '--exponential', dest='exp', type=str, nargs='+', action='append', default=[],
+                       help='exponential function(s) defined by onset time(s) and characteristic time(s) tau in days (default: %(default)s). E.g.:\n'
+                            '--exp  20181026 60                     # one exp w/ onset at 2018-10-26       w/ tau=60  days\n'
+                            '--exp  20181026T1355 60 120            # 1st exp w/ onset at 2018-10-26T13:55 w/ tau=60  days\n'
+                            '                                       # 2nd exp w/ onset at 2018-10-26T13:55 w/ tau=120 days\n'
+                            '--exp  20161231 80 --exp 20190125 100  # 1st exp w/ onset at 2016-12-31       w/ tau=80  days\n'
+                            '                                       # 2nd exp w/ onset at 2019-01-25       w/ tau=100 days')
+
+    model.add_argument('--log', '--logarithmic', dest='log', type=str, nargs='+', action='append', default=[],
+                       help='logarithmic function(s) defined by onset time(s) and characteristic time(s) tau in days (default: %(default)s). E.g.:\n'
+                            '--log  20181016 90                     # one log w/ onset at 2018-10-16       w/ tau=90  days\n'
+                            '--log  20181016T1733 90 240            # 1st log w/ onset at 2018-10-16T17:33 w/ tau=90  days\n'
+                            '                                       # 2nd log w/ onset at 2018-10-16T17:33 w/ tau=240 days\n'
+                            '--log  20161231 60 --log 20190125 180  # 1st log w/ onset at 2016-12-31       w/ tau=60  days\n'
+                            '                                       # 2nd log w/ onset at 2019-01-25       w/ tau=180 days\n')
+
+    return parser
diff -pruN 1.3.3-2/mintpy/utils/attribute.py 1.4.0-1/mintpy/utils/attribute.py
--- 1.3.3-2/mintpy/utils/attribute.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/utils/attribute.py	2022-08-04 20:01:49.000000000 +0000
@@ -15,6 +15,42 @@ from mintpy.utils import readfile
 
 
 
+def update_attribute4resize(atr_in, resize2shape, print_msg=True):
+    """update input dictionary of attributes due to resizing
+
+    Parameters: atr_in       - dict, input dictionary of attributes
+                resize2shape - tuple of 2 int, for the resized shape
+    Returns:    atr          - dict, updated dictionary of attributes
+    """
+    vprint = print if print_msg else lambda *args, **kwargs: None
+    # make a copy of original meta dict
+    atr = {**atr_in}
+
+    yscale = int(atr['LENGTH']) / resize2shape[0]
+    xscale = int(atr['WIDTH']) / resize2shape[1]
+    vprint('output data in size: {}, {}'.format(resize2shape[0], resize2shape[1]))
+
+    atr['LENGTH'] = resize2shape[0]
+    atr['WIDTH'] = resize2shape[1]
+    atr['ALOOKS'] = np.rint(int(atr.get('ALOOKS', 1)) * yscale).astype(int)
+    atr['RLOOKS'] = np.rint(int(atr.get('RLOOKS', 1)) * xscale).astype(int)
+    vprint('update LENGTH, WIDTH, Y/XMIN/MAX, A/RLOOKS')
+
+    if 'AZIMUTH_PIXEL_SIZE' in atr.keys():
+        atr['AZIMUTH_PIXEL_SIZE'] = float(atr['AZIMUTH_PIXEL_SIZE']) * yscale
+        vprint('update AZIMUTH_PIXEL_SIZE')
+
+    if 'RANGE_PIXEL_SIZE' in atr.keys():
+        atr['RANGE_PIXEL_SIZE'] = float(atr['RANGE_PIXEL_SIZE']) * xscale
+        vprint('update RANGE_PIXEL_SIZE')
+
+    if 'NCORRLOOKS' in atr.keys():
+        atr['NCORRLOOKS'] = float(atr['NCORRLOOKS']) * yscale * xscale
+        vprint('update NCORRLOOKS')
+
+    return atr
+
+
 def update_attribute4multilook(atr_in, lks_y, lks_x, box=None, print_msg=True):
     """update input dictionary of attributes due to multilooking
 
@@ -25,11 +61,10 @@ def update_attribute4multilook(atr_in, l
                          if --margin option is used in multilook.py
     Returns:    atr    - dict, updated dictionary of attributes
     """
+    vprint = print if print_msg else lambda *args, **kwargs: None
 
     # make a copy of original meta dict
-    atr = dict()
-    for key, value in iter(atr_in.items()):
-        atr[key] = str(value)
+    atr = {**atr_in}
 
     if box is None:
         box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH']))
@@ -37,7 +72,7 @@ def update_attribute4multilook(atr_in, l
 
     length_mli = length // lks_y
     width_mli = width // lks_x
-    print('output data in size: {}, {}'.format(length_mli, width_mli))
+    vprint('output data in size: {}, {}'.format(length_mli, width_mli))
 
     # Update attributes
     atr['LENGTH'] = str(length_mli)
@@ -48,38 +83,32 @@ def update_attribute4multilook(atr_in, l
     atr['YMAX'] = str(length_mli - 1 + box[1])
     atr['RLOOKS'] = str(int(atr.get('RLOOKS', '1')) * lks_x)
     atr['ALOOKS'] = str(int(atr.get('ALOOKS', '1')) * lks_y)
-    if print_msg:
-        print('update LENGTH, WIDTH, Y/XMIN/MAX, A/RLOOKS')
+    vprint('update LENGTH, WIDTH, Y/XMIN/MAX, A/RLOOKS')
 
     if 'Y_STEP' in atr.keys():
         atr['Y_STEP'] = str(lks_y * float(atr['Y_STEP']))
         atr['X_STEP'] = str(lks_x * float(atr['X_STEP']))
-        if print_msg:
-            print('update Y/X_STEP')
+        vprint('update Y/X_STEP')
 
     if 'AZIMUTH_PIXEL_SIZE' in atr.keys():
         atr['AZIMUTH_PIXEL_SIZE'] = str(lks_y * float(atr['AZIMUTH_PIXEL_SIZE']))
-        if print_msg:
-            print('update AZIMUTH_PIXEL_SIZE')
+        vprint('update AZIMUTH_PIXEL_SIZE')
 
     if 'RANGE_PIXEL_SIZE' in atr.keys():
         atr['RANGE_PIXEL_SIZE'] = str(lks_x * float(atr['RANGE_PIXEL_SIZE']))
-        if print_msg:
-            print('update RANGE_PIXEL_SIZE')
+        vprint('update RANGE_PIXEL_SIZE')
 
     if 'REF_Y' in atr.keys():
         atr['REF_Y'] = str( (int(atr['REF_Y']) - box[1]) // lks_y )
         atr['REF_X'] = str( (int(atr['REF_X']) - box[0]) // lks_x )
-        if print_msg:
-            print('update REF_Y/X')
+        vprint('update REF_Y/X')
 
     if 'SUBSET_XMIN' in atr.keys():
         atr['SUBSET_YMIN'] = str( (int(atr['SUBSET_YMIN']) - box[1]) // lks_y )
         atr['SUBSET_YMAX'] = str( (int(atr['SUBSET_YMAX']) - box[1]) // lks_y )
         atr['SUBSET_XMIN'] = str( (int(atr['SUBSET_XMIN']) - box[0]) // lks_x )
         atr['SUBSET_XMAX'] = str( (int(atr['SUBSET_XMAX']) - box[0]) // lks_x )
-        if print_msg:
-            print('update SUBSET_XMIN/XMAX/YMIN/YMAX')
+        vprint('update SUBSET_XMIN/XMAX/YMIN/YMAX')
     return atr
 
 
@@ -94,7 +123,7 @@ def update_attribute4geo2radar(atr_in, s
     Returns:    atr     - dict, updated dictionary of attributes
     """
     # make a copy of original meta dict
-    atr = dict(atr_in)
+    atr = {**atr_in}
 
     # grab info from res_obj
     if res_obj is not None:
@@ -128,7 +157,7 @@ def update_attribute4radar2geo(atr_in, s
     Returns:    atr     - dict, updated dictionary of attributes
     """
     # make a copy of original meta dict
-    atr = dict(atr_in)
+    atr = {**atr_in}
 
     # grab info from res_obj
     if res_obj is not None:
@@ -185,6 +214,8 @@ def update_attribute4subset(atr_in, subs
                 subset_box - 4-tuple of int, subset box defined in (x0, y0, x1, y1)
     Returns:    atr        - dict, updated data attributes
     """
+    vprint = print if print_msg else lambda *args, **kwargs: None
+
     if subset_box is None:
         return atr_in
 
@@ -192,49 +223,43 @@ def update_attribute4subset(atr_in, subs
     sub_y = [subset_box[1], subset_box[3]]
 
     # Update attribute variable
-    atr = dict(atr_in)
+    atr = {**atr_in}
     atr['LENGTH'] = str(sub_y[1]-sub_y[0])
     atr['WIDTH'] = str(sub_x[1]-sub_x[0])
     atr['YMAX'] = str(sub_y[1]-sub_y[0] - 1)
     atr['XMAX'] = str(sub_x[1]-sub_x[0] - 1)
-    if print_msg:
-        print('update LENGTH, WIDTH, Y/XMAX')
+    vprint('update LENGTH, WIDTH, Y/XMAX')
 
     # Subset atribute
     atr['SUBSET_YMAX'] = str(sub_y[1] + int(atr_in.get('SUBSET_YMIN', '0')))
     atr['SUBSET_YMIN'] = str(sub_y[0] + int(atr_in.get('SUBSET_YMIN', '0')))
     atr['SUBSET_XMAX'] = str(sub_x[1] + int(atr_in.get('SUBSET_XMIN', '0')))
     atr['SUBSET_XMIN'] = str(sub_x[0] + int(atr_in.get('SUBSET_XMIN', '0')))
-    if print_msg:
-        print(('update/add SUBSET_XMIN/YMIN/XMAX/YMAX: '
-               '{x0}/{y0}/{x1}/{y1}').format(x0=atr['SUBSET_XMIN'],
-                                             y0=atr['SUBSET_YMIN'],
-                                             x1=atr['SUBSET_XMAX'],
-                                             y1=atr['SUBSET_YMAX']))
+    vprint(('update/add SUBSET_XMIN/YMIN/XMAX/YMAX: '
+            '{x0}/{y0}/{x1}/{y1}').format(x0=atr['SUBSET_XMIN'],
+                                          y0=atr['SUBSET_YMIN'],
+                                          x1=atr['SUBSET_XMAX'],
+                                          y1=atr['SUBSET_YMAX']))
 
     # Geo coord
     if 'Y_FIRST' in atr.keys():
         atr['Y_FIRST'] = str(float(atr['Y_FIRST']) + sub_y[0]*float(atr['Y_STEP']))
         atr['X_FIRST'] = str(float(atr['X_FIRST']) + sub_x[0]*float(atr['X_STEP']))
-        if print_msg:
-            print('update Y/X_FIRST')
+        vprint('update Y/X_FIRST')
 
     # Reference in space
     if 'REF_Y' in atr.keys():
         atr['REF_Y'] = str(int(atr['REF_Y']) - sub_y[0])
         atr['REF_X'] = str(int(atr['REF_X']) - sub_x[0])
-        if print_msg:
-            print('update REF_Y/X')
+        vprint('update REF_Y/X')
 
     # Starting Range for file in radar coord
     if 'Y_FIRST' not in atr_in.keys():
         try:
             atr['STARTING_RANGE'] = float(atr['STARTING_RANGE'])
             atr['STARTING_RANGE'] += float(atr['RANGE_PIXEL_SIZE'])*sub_x[0]
-            if print_msg:
-                print('update STARTING_RANGE')
+            vprint('update STARTING_RANGE')
         except:
             pass
 
     return atr
-
diff -pruN 1.3.3-2/mintpy/utils/constants.py 1.4.0-1/mintpy/utils/constants.py
--- 1.3.3-2/mintpy/utils/constants.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/utils/constants.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,19 +0,0 @@
-#!/usr/bin/env python3
-############################################################
-# Program is part of MintPy                                #
-# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi         #
-# Author: Zhang Yunjun, Feb 2022                           #
-############################################################
-# Recommend usage:
-#   from mintpy.utils.constants import SPEED_OF_LIGHT
-
-
-SPEED_OF_LIGHT = 299792458  # meters per second
-
-# Earth radius
-# equatorial radius: a = 6378.1370e3 
-# polar      radius: b = 6356.7523e3
-# arithmetic mean radius: R_1 = (2 * a + b) / 3 = 6371.0088e3
-#   defined by IUGG and used in geophysics
-EARTH_RADIUS = 6371.0088e3   # the arithmetic mean radius in meters
-
diff -pruN 1.3.3-2/mintpy/utils/isce_utils.py 1.4.0-1/mintpy/utils/isce_utils.py
--- 1.3.3-2/mintpy/utils/isce_utils.py	2022-04-14 21:14:12.000000000 +0000
+++ 1.4.0-1/mintpy/utils/isce_utils.py	2022-08-04 20:01:49.000000000 +0000
@@ -5,6 +5,7 @@
 ############################################################
 # 2020-07: Talib Oliver-Cabrera, add UAVSAR support w/in stripmapStack
 # 2020-10: Cunren Liang, add alosStack support
+# 2022-06: Yujie Zheng, add standard processing from isce2
 # Group contents:
 #     metadata
 #     geometry
@@ -16,13 +17,24 @@
 
 
 import os
-import re
+import datetime
 import glob
 import shelve
-import datetime
+import re
+import time
+
 import numpy as np
+from scipy import ndimage
+
+from mintpy.objects.constants import SPEED_OF_LIGHT, EARTH_RADIUS
 from mintpy.objects import sensor
-from mintpy.utils import ptime, readfile, writefile, utils1 as ut
+from mintpy.utils import (
+    ptime,
+    readfile,
+    writefile,
+    attribute as attr,
+    utils1 as ut,
+)
 
 # suppress matplotlib DEBUG message
 import logging
@@ -30,10 +42,6 @@ mpl_logger = logging.getLogger('matplotl
 mpl_logger.setLevel(logging.WARNING)
 
 
-SPEED_OF_LIGHT = 299792458  # m/s
-EARTH_RADIUS = 6378122.65   # m
-
-
 
 def get_processor(meta_file):
     """
@@ -360,16 +368,16 @@ def extract_alosStack_metadata(meta_file
     width = img.width
     length = img.length
     data = np.memmap(lat_file, dtype='float64', mode='r', shape=(length, width))
-    meta['LAT_REF1'] = str(data[0+edge, 0+edge])
-    meta['LAT_REF2'] = str(data[0+edge, -1-edge])
-    meta['LAT_REF3'] = str(data[-1-edge, 0+edge])
+    meta['LAT_REF1'] = str(data[ 0+edge,  0+edge])
+    meta['LAT_REF2'] = str(data[ 0+edge, -1-edge])
+    meta['LAT_REF3'] = str(data[-1-edge,  0+edge])
     meta['LAT_REF4'] = str(data[-1-edge, -1-edge])
 
     lon_file = glob.glob(os.path.join(geom_dir, '*_{}rlks_{}alks.lon'.format(rlooks, alooks)))[0]
     data = np.memmap(lon_file, dtype='float64', mode='r', shape=(length, width))
-    meta['LON_REF1'] = str(data[0+edge, 0+edge])
-    meta['LON_REF2'] = str(data[0+edge, -1-edge])
-    meta['LON_REF3'] = str(data[-1-edge, 0+edge])
+    meta['LON_REF1'] = str(data[ 0+edge,  0+edge])
+    meta['LON_REF2'] = str(data[ 0+edge, -1-edge])
+    meta['LON_REF3'] = str(data[-1-edge,  0+edge])
     meta['LON_REF4'] = str(data[-1-edge, -1-edge])
 
     los_file = glob.glob(os.path.join(geom_dir, '*_{}rlks_{}alks.los'.format(rlooks, alooks)))[0]
@@ -477,6 +485,9 @@ def extract_geometry_metadata(geom_dir,
 
     extract A/RLOOKS by comparing hgt.xml and hgt.full.xml file
     update azimuthPixelSize / rangePixelSize based on A/RLOOKS
+
+    extract LENGTH/WIDTH from the first geom file
+    update corresponding metadata if box is not None
     """
 
     def get_nonzero_row_number(data, buffer=2):
@@ -516,6 +527,15 @@ def extract_geometry_metadata(geom_dir,
     meta['rangePixelSize'] *= meta['RLOOKS']
     meta['azimuthPixelSize'] *= meta['ALOOKS']
 
+    # get LENGTH/WIDTH
+    atr = readfile.read_attribute(geom_files[0])
+    meta['LENGTH'] = atr['LENGTH']
+    meta['WIDTH'] = atr['WIDTH']
+
+    # update due to subset
+    if box:
+        meta = attr.update_attribute4subset(meta, box)
+
     # get LAT/LON_REF1/2/3/4 into metadata
     for geom_file in geom_files:
         if 'lat' in os.path.basename(geom_file):
@@ -570,7 +590,7 @@ def read_tops_baseline(baseline_file):
 def read_stripmap_baseline(baseline_file):
     """Read baseline file generated by ISCE/stripmapStack processor.
 
-    Example: 
+    Example:
     baselines/20200111_20200125.txt
         PERP_BASELINE_BOTTOM 173.97914535263297
         PERP_BASELINE_TOP 174.05612879066618
@@ -801,56 +821,6 @@ def get_IPF(proj_dir, ts_file):
     return date_list, IPF_IW1, IPF_IW2, IPF_IW3
 
 
-def safe_list_file2sensor_list(safe_list_file, date_list=None, print_msg=True):
-    """Get list of Sentinel-1 sensor names from txt file with SAFE file names.
-
-    Parameters: safe_list_file - str, path of the text file with Sentinel-1 SAFE file path
-                                 E.g. SAFE_files.txt
-                date_list      - list of str in YYYYMMDD format, reference list of dates
-    Returns:    sensor_list    - list of str in S1A or S1B
-                date_list      - list of str in YYYYMMDD format
-    Example:
-        date_list = timeseries('timeseries.h5').get_date_list()
-        sensor_list = safe_list_file2sensor_list('../SAFE_files.txt',
-                                                 date_list=date_list,
-                                                 print_msg=False)[0]
-        s1b_dates = [i for i, j in zip(date_list, sensor_list) if j == 'S1B']
-        np.savetxt('S1B_date.txt', np.array(s1b_dates).reshape(-1,1), fmt='%s')
-    """
-    # read txt file
-    fc = np.loadtxt(safe_list_file, dtype=str).astype(str).tolist()
-    safe_fnames = [os.path.basename(i) for i in fc]
-
-    # get date_list
-    date_list_out = [re.findall('_\d{8}T', i)[0][1:-1] for i in safe_fnames]
-    date_list_out = sorted(list(set(date_list_out)))
-
-    # get sensor_list
-    sensor_list = []
-    for d in date_list_out:
-        safe_fname = [i for i in safe_fnames if d in i][0]
-        sensor = safe_fname.split('_')[0]
-        sensor_list.append(sensor)
-
-    # update against date_list_ref
-    if date_list is not None:
-        # check possible missing dates
-        dates_missing = [i for i in date_list if i not in date_list_out]
-        if dates_missing:
-            raise ValueError('The following dates are missing:\n{}'.format(dates_missing))
-
-        # prune dates not-needed
-        flag = np.array([i in date_list for i in date_list_out], dtype=np.bool_)
-        if np.sum(flag) > 0:
-            sensor_list = np.array(sensor_list)[flag].tolist()
-            dates_removed = np.array(date_list_out)[~flag].tolist()
-            date_list_out = np.array(date_list_out)[flag].tolist()
-            if print_msg:
-                print('The following dates are not needed and removed:\n{}'.format(dates_removed))
-
-    return sensor_list, date_list
-
-
 def get_sensing_datetime_list(proj_dir, date_list=None):
     """Get the sensing datetime objects from ISCE stack results.
     It assumes the default directory structure from topsStack, as below:
@@ -917,3 +887,212 @@ def get_sensing_datetime_list(proj_dir,
 
     return sensingMid, sensingStart, sensingStop
 
+
+
+############################## Standard Processing ###########################################
+
+def gaussian_kernel(sx, sy, sig_x, sig_y):
+    '''Generate a guassian kernal (with all elements sum to 1).
+
+    Parameters: sx/y    - int, dimensions of kernal
+                sig_x/y - float, standard deviation of the guassian distribution
+    '''
+    # ensure sx/y are odd number
+    sx += 1 if np.mod(sx, 2) == 0 else 0
+    sy += 1 if np.mod(sy, 2) == 0 else 0
+
+    x, y = np.meshgrid(np.arange(sx), np.arange(sy))
+    x += 1
+    y += 1
+
+    xc = (sx + 1) / 2
+    yc = (sy + 1) / 2
+    fx = ((x-xc)**2.) / (2.*sig_x**2.)
+    fy = ((y-yc)**2.) / (2.*sig_y**2.)
+
+    k = np.exp(-1.0 * (fx+fy))
+    a = 1./np.sum(k)
+    k = a * k
+
+    return k
+
+
+def convolve(data, kernel):
+    '''Convolve / filter the complex data based on the given kernel.
+
+    Parameters: data   - 2D np.ndarray in complex
+                kernel - 2D np.ndarray in float, convolution kernel
+    '''
+    R = ndimage.convolve(data.real, kernel, mode='constant', cval=0.0)
+    I = ndimage.convolve(data.imag, kernel, mode='constant', cval=0.0)
+    return R + 1J*I
+
+
+def estimate_coherence(intfile, corfile):
+    '''Estimate the spatial coherence (phase sigma) of the wrapped interferogram.
+
+    Parameters: intfile - str, path to the *.int file
+                corfile - str, path to the output correlation file
+    '''
+    import isce
+    import isceobj
+    from mroipac.icu.Icu import Icu
+
+    # create filt interferogram file object
+    filtImage = isceobj.createIntImage()
+    filtImage.load(intfile + '.xml')
+    filtImage.setAccessMode('read')
+    filtImage.createImage()
+
+    # create phase sigma correlation file object
+    phsigImage = isceobj.createImage()
+    phsigImage.dataType='FLOAT'
+    phsigImage.bands = 1
+    phsigImage.setWidth(filtImage.getWidth())
+    phsigImage.setFilename(corfile)
+    phsigImage.setAccessMode('write')
+    phsigImage.createImage()
+
+    # setup Icu() object
+    icuObj = Icu(name='sentinel_filter_icu')
+    icuObj.configure()
+    icuObj.unwrappingFlag = False
+    icuObj.useAmplitudeFlag = False
+    #icuObj.correlationType = 'NOSLOPE'
+
+    # run
+    icuObj.icu(intImage=filtImage, phsigImage=phsigImage)
+    phsigImage.renderHdr()
+
+    # close
+    filtImage.finalizeImage()
+    phsigImage.finalizeImage()
+
+    return
+
+
+def unwrap_snaphu(int_file, cor_file, unw_file, defo_max=2.0, max_comp=32,
+                  init_only=True, init_method='MCF', cost_mode='SMOOTH'):
+    '''Unwrap interferograms using SNAPHU via isce2.
+
+    Modified from ISCE-2/topsStack/unwrap.py
+    Notes from Piyush:
+        SNAPHU is an iterative solver, starting from the initial solution. It can get
+            stuck in an infinite loop.
+        The initial solution is created using MCF or MST method. The MST initial solution
+            typically require lots of iterations and may not be a good starting point.
+        DEFO cost mode requires geometry info for the program to interpret the coherence
+            correctly and setup costs based on that. DEFO always sounds more theoretical
+            to me, but I haven not fully explored it. TOPO cost mode requires spatial baseline.
+            SMOOTH cost mode is purely data driven.
+        Amplitude of zero is a mask in all cost modes. For TOPO mode, amplitude is used to find
+            layover; for SMOOTH mode, only non-zero amplitude matters.
+
+    Default configurations in ISCE-2/topsStack:
+        init_only = True
+        init_method = 'MCF'
+        cost_mode = 'SMOOTH'
+    Default configurations in FRInGE:
+        init_only = False
+        init_method = 'MST'
+        cost_mode = 'DEFO'
+
+    Parameters: int_file    - str, path to the wrapped interferogram file
+                cor_file    - str, path to the correlation file
+                unw_file    - str, path to the output unwrapped interferogram file
+                defo_max    - float, maximum number of cycles for the deformation phase
+                max_comp    - int, maximum number of connected components
+                init_only   - bool, initlize-only mode
+                init_method - str, algo used for initialization: MCF, MST
+                cost_mode   - str, statistical-cost mode: TOPO, DEFO, SMOOTH, NOSTATCOSTS
+    Returns:    unw_file    - str, path to the output unwrapped interferogram file
+    '''
+    import isce
+    from contrib.Snaphu.Snaphu import Snaphu
+
+    start_time = time.time()
+
+    # configurations - atr
+    atr = readfile.read_attribute(int_file)
+    width = int(atr['WIDTH'])
+    length = int(atr['LENGTH'])
+    altitude = float(atr['HEIGHT'])
+    earth_radius = float(atr['EARTH_RADIUS'])
+    wavelength = float(atr['WAVELENGTH'])
+    rg_looks = int(atr['RLOOKS'])
+    az_looks = int(atr['ALOOKS'])
+    corr_looks = float(atr.get('NCORRLOOKS', rg_looks * az_looks / 1.94))
+
+    ## setup SNAPHU
+    # https://web.stanford.edu/group/radar/softwareandlinks/sw/snaphu/snaphu.conf.full
+    # https://github.com/isce-framework/isce2/blob/main/contrib/Snaphu/Snaphu.py
+    print('phase unwrapping with SNAPHU ...')
+    print('SNAPHU cost mode: {}'.format(cost_mode))
+    print('SNAPHU init only: {}'.format(init_only))
+    print('SNAPHU init method: {}'.format(init_method))
+    print('SNAPHU max number of connected components: {}'.format(max_comp))
+
+    snp = Snaphu()
+
+    # file IO
+    snp.setInput(int_file)
+    snp.setOutput(unw_file)
+    snp.setCorrfile(cor_file)
+    snp.setCorFileFormat('FLOAT_DATA')
+    snp.setWidth(width)
+
+    # runtime options
+    snp.setCostMode(cost_mode)
+    snp.setInitOnly(init_only)
+    snp.setInitMethod(init_method)
+
+    # geometry parameters
+    # ba