diff --git a/setup.py b/setup.py
index 0c6072ba..4f08b57a 100644
--- a/setup.py
+++ b/setup.py
@@ -49,9 +49,16 @@
# If third party package is found compile as well
if os.path.isdir(relaxPath):
print('Installing ARIA-tools with support for RelaxIV')
- module1 = Extension('ARIAtools.demo', sources=['tools/bindings/relaxIVdriver.cpp',
- 'tools/bindings/unwcompmodule.cpp',
- os.path.join(relaxPath, 'RelaxIV/RelaxIV.C')], include_dirs=['tools/include', os.path.join(relaxPath, 'MCFClass'), os.path.join(relaxPath, 'OPTUtils'), os.path.join(relaxPath, 'RelaxIV')])
+ module1 = Extension(
+ 'ARIAtools.demo',
+ sources=[
+ 'tools/bindings/relaxIVdriver.cpp',
+ 'tools/bindings/unwcompmodule.cpp',
+ os.path.join(relaxPath, 'RelaxIV/RelaxIV.C')],
+ include_dirs=[
+ 'tools/include', os.path.join(relaxPath, 'MCFClass'),
+ os.path.join(relaxPath, 'OPTUtils'),
+ os.path.join(relaxPath, 'RelaxIV')])
setup(name='ARIAtools',
version=version0,
@@ -59,7 +66,10 @@
ext_modules=[module1],
packages=['ARIAtools'],
package_dir={'': 'tools'},
- scripts=['tools/bin/ariaPlot.py', 'tools/bin/ariaDownload.py', 'tools/bin/ariaExtract.py', 'tools/bin/ariaTSsetup.py', 'tools/bin/ariaAOIassist.py', 'tools/bin/ariaMisclosure.py'])
+ scripts=['tools/bin/ariaPlot.py', 'tools/bin/ariaDownload.py',
+ 'tools/bin/ariaExtract.py', 'tools/bin/ariaTSsetup.py',
+ 'tools/bin/ariaAOIassist.py', 'tools/bin/ariaMisclosure.py'
+ 'tools/bin/export_product.py'])
else:
# Third party package RelaxIV not found
print('Installing ARIA-tools without support for RelaxIV')
@@ -69,4 +79,7 @@
description='This is the ARIA tools package without RelaxIV support',
packages=['ARIAtools'],
package_dir={'': 'tools'},
- scripts=['tools/bin/ariaPlot.py', 'tools/bin/ariaDownload.py', 'tools/bin/ariaExtract.py', 'tools/bin/ariaTSsetup.py', 'tools/bin/ariaAOIassist.py', 'tools/bin/ariaMisclosure.py'])
+ scripts=['tools/bin/ariaPlot.py', 'tools/bin/ariaDownload.py',
+ 'tools/bin/ariaExtract.py', 'tools/bin/ariaTSsetup.py',
+ 'tools/bin/ariaAOIassist.py', 'tools/bin/ariaMisclosure.py',
+ 'tools/bin/export_product.py'])
diff --git a/tests/regression/validate_test.py b/tests/regression/validate_test.py
index 3a51236e..4da07a79 100755
--- a/tests/regression/validate_test.py
+++ b/tests/regression/validate_test.py
@@ -233,7 +233,7 @@ class AriaToolsScriptTester():
@pytest.fixture(scope='class')
def tester(self):
with tarfile.open(os.path.join(
- 'golden_test_outputs', self.FLAVOR+'.tar.gz')) as tar:
+ 'golden_test_outputs', self.FLAVOR + '.tar.gz')) as tar:
tar.extractall(os.path.join('golden_test_outputs'))
test_dir = os.path.join('test_outputs', self.FLAVOR)
@@ -271,7 +271,7 @@ class TestAriaDownload():
@pytest.fixture(scope='class')
def tester(self):
with tarfile.open(os.path.join(
- 'golden_test_outputs', self.FLAVOR+'.tar.gz')) as tar:
+ 'golden_test_outputs', self.FLAVOR + '.tar.gz')) as tar:
tar.extractall(os.path.join('golden_test_outputs'))
test_dir = os.path.join('test_outputs', self.FLAVOR)
diff --git a/tools/ARIAtools/extractProduct.py b/tools/ARIAtools/extractProduct.py
index d7d772d4..0ccddaa0 100644
--- a/tools/ARIAtools/extractProduct.py
+++ b/tools/ARIAtools/extractProduct.py
@@ -530,25 +530,25 @@ def merged_productbbox(
def create_raster_from_gunw(fname, data_lis, proj, driver, hgt_field=None):
"""Wrapper to create raster and apply projection"""
# open original raster
- osgeo.gdal.BuildVRT(fname+'_temp.vrt', data_lis)
- da = rioxarray.open_rasterio(fname+'_temp.vrt', masked=True)
+ osgeo.gdal.BuildVRT(fname + '_temp.vrt', data_lis)
+ da = rioxarray.open_rasterio(fname + '_temp.vrt', masked=True)
# Reproject the raster to the desired projection
reproj_da = da.rio.reproject(f'EPSG:{proj}',
resampling=rasterio.enums.Resampling.nearest,
nodata=0)
reproj_da.rio.to_raster(fname, driver=driver, crs=f'EPSG:{proj}')
- os.remove(fname+'_temp.vrt')
+ os.remove(fname + '_temp.vrt')
da.close()
reproj_da.close()
buildvrt_options = osgeo.gdal.BuildVRTOptions(outputSRS=f'EPSG:{proj}')
- osgeo.gdal.BuildVRT(fname+'.vrt', fname, options=buildvrt_options)
+ osgeo.gdal.BuildVRT(fname + '.vrt', fname, options=buildvrt_options)
if hgt_field is not None:
# write height layers
hgt_meta = osgeo.gdal.Open(data_lis[0]).GetMetadataItem(hgt_field)
osgeo.gdal.Open(
- fname+'.vrt').SetMetadataItem(hgt_field, hgt_meta)
+ fname + '.vrt').SetMetadataItem(hgt_field, hgt_meta)
return
@@ -1374,7 +1374,7 @@ def export_products(
end_time = time.time()
LOGGER.debug(
- "export_product_worker took %f seconds" % (end_time-start_time))
+ "export_product_worker took %f seconds" % (end_time - start_time))
# delete directory for quality control plots if empty
plots_subdir = os.path.abspath(
@@ -1398,7 +1398,6 @@ def finalize_metadata(outname, bbox_bounds, arrres, dem_bounds, prods_TOTbbox,
3D layers with a DEM.
Lat/lon arrays must also be passed for this process.
"""
- #arrshape = [dem.RasterYSize, dem.RasterXSize]
ref_geotrans = dem.GetGeoTransform()
dem_arrres = [abs(ref_geotrans[1]), abs(ref_geotrans[-1])]
@@ -1498,8 +1497,9 @@ def finalize_metadata(outname, bbox_bounds, arrres, dem_bounds, prods_TOTbbox,
gdal_warp_kwargs = {
'format': outputFormat, 'cutlineDSName': prods_TOTbbox,
'outputBounds': dem_bounds, 'dstNodata': data_array_nodata,
- 'xRes': dem_arrres[0], 'yRes': dem_arrres[1], 'targetAlignedPixels': True,
- 'multithread': True, 'options': [f'NUM_THREADS={num_threads}']}
+ 'xRes': dem_arrres[0], 'yRes': dem_arrres[1],
+ 'targetAlignedPixels': True, 'multithread': True,
+ 'options': [f'NUM_THREADS={num_threads}']}
warp_options = osgeo.gdal.WarpOptions(**gdal_warp_kwargs)
osgeo.gdal.Warp(tmp_name + '_temp', tmp_name, options=warp_options)
diff --git a/tools/ARIAtools/phaseMinimization.py b/tools/ARIAtools/phaseMinimization.py
index 3981da3e..6908e9db 100644
--- a/tools/ARIAtools/phaseMinimization.py
+++ b/tools/ARIAtools/phaseMinimization.py
@@ -392,7 +392,8 @@ def __init__(self, x=None, y=None, phase=None, compNum=None, redArcs=0):
else:
self.loops = self.__createTriangulation(
delauneyTri, vertices, edges)
- self.neutralResidue, self.neutralNodeIdx = self.__computeNeutralResidue()
+ self.neutralResidue, self.neutralNodeIdx = (
+ self.__computeNeutralResidue())
# Saving some variables for plotting
self.__redArcs = redArcs
@@ -420,16 +421,19 @@ def __getTriSeq(va, vb, vc):
'''
Get Sequence of triangle points
'''
- def line(va, vb, vc): return (((vc.y - va.y) * (vb.x - va.x))
- ) - ((vc.x - va.x) * (vb.y - va.y))
+ def line(va, vb, vc):
+ return (((vc.y - va.y) * (vb.x - va.x)) -
+ ((vc.x - va.x) * (vb.y - va.y)))
# Line equation through pt0 and pt1
# Test for pt3 - Does it lie to the left or to the right ?
pos3 = line(va, vb, vc)
- if(pos3 > 0):
- # left
+
+ # left
+ if pos3 > 0:
return (va, vc, vb)
- else: # right
+ # right
+ else:
return (va, vb, vc)
# Create Delaunay Triangulation.
@@ -578,8 +582,9 @@ def __createTriangulation(self, delauneyTri, vertices, edges):
# Generate the points in a sequence
def getSeq(va, vb, vc):
- def line(va, vb, vc): return (
- ((vc.y - va.y) * (vb.x - va.x))) - ((vc.x - va.x) * (vb.y - va.y))
+ def line(va, vb, vc):
+ return (((vc.y - va.y) * (vb.x - va.x)) -
+ ((vc.x - va.x) * (vb.y - va.y)))
# Line equation through pt0 and pt1
# Test for pt3 - Does it lie to the left or to the right ?
@@ -739,8 +744,8 @@ def __MCFRelaxIV(edgeLen, fileName="network.dmx"):
try:
from . import unwcomp
except BaseException:
- raise Exception("MCF requires RelaxIV solver - Please drop the RelaxIV software \
- into the src folder and re-make")
+ raise Exception("MCF requires RelaxIV solver - Please drop the "
+ "RelaxIV software into the src folder and re-make")
return unwcomp.relaxIVwrapper_Py(fileName)
def solve(self, solver, filename="network.dmx"):
@@ -764,19 +769,24 @@ def __solveEdgeCost__(self, solver, fileName="network.dmx"):
# Solve the objective function
if solver == 'glpk':
LOGGER.info('Using GLPK MIP solver')
- def MIPsolver(): return self.__prob__.solve(pulp.GLPK(msg=0))
+
+ def MIPsolver():
+ return self.__prob__.solve(pulp.GLPK(msg=0))
+
elif solver == 'pulp':
LOGGER.info('Using PuLP MIP solver')
- def MIPsolver(): return self.__prob__.solve()
+
+ def MIPsolver():
+ return self.__prob__.solve()
+
elif solver == 'gurobi':
LOGGER.info('Using Gurobi MIP solver')
- def MIPsolver(): return self.__prob__.solve(pulp.GUROBI_CMD())
+
+ def MIPsolver():
+ return self.__prob__.solve(pulp.GUROBI_CMD())
LOGGER.info(
- 'Time Taken (in sec) to solve: %f',
- T.timeit(
- MIPsolver,
- number=1))
+ 'Time Taken (in sec) to solve: %f', T.timeit(MIPsolver, number=1))
# Get solution
for v, edge in self.__edges.items():
@@ -916,18 +926,24 @@ def firstPassCommandLine():
parser = argparse.ArgumentParser(description='Phase Unwrapping')
# Positional argument - Input XML file
- parser.add_argument('--inputType', choices=['plane', 'sinc', 'sine'],
- help='Type of input to unwrap', default='plane', dest='inputType')
- parser.add_argument('--dim', type=int, default=100,
- help='Dimension of the image (square)', dest='dim')
- parser.add_argument('-c', action='store_true',
- help='Component-wise unwrap test', dest='compTest')
- parser.add_argument('-MCF', action='store_true',
- help='Minimum Cost Flow', dest='mcf')
- parser.add_argument('--redArcs', type=int, default=0,
- help='Redundant Arcs', dest='redArcs')
- parser.add_argument('--solver', choices=['glpk', 'pulp', 'gurobi'],
- help='Type of solver', default='pulp', dest='solver')
+ parser.add_argument(
+ '--inputType', choices=['plane', 'sinc', 'sine'],
+ help='Type of input to unwrap', default='plane', dest='inputType')
+ parser.add_argument(
+ '--dim', type=int, default=100,
+ help='Dimension of the image (square)', dest='dim')
+ parser.add_argument(
+ '-c', action='store_true',
+ help='Component-wise unwrap test', dest='compTest')
+ parser.add_argument(
+ '-MCF', action='store_true',
+ help='Minimum Cost Flow', dest='mcf')
+ parser.add_argument(
+ '--redArcs', type=int, default=0,
+ help='Redundant Arcs', dest='redArcs')
+ parser.add_argument(
+ '--solver', choices=['glpk', 'pulp', 'gurobi'],
+ help='Type of solver', default='pulp', dest='solver')
# Parse input
args = parser.parse_args()
@@ -1011,7 +1027,8 @@ def main():
xidx, yidx, wrapImg[xidx, yidx], redArcs=redArcs)
else:
phaseunwrap = PhaseUnwrap(
- xidx, yidx, wrapImg[xidx, yidx], compImg[xidx, yidx], redArcs=redArcs)
+ xidx, yidx, wrapImg[xidx, yidx], compImg[xidx, yidx],
+ redArcs=redArcs)
# Including the neutral node for min cost flow
phaseunwrap.solve(solver)
@@ -1022,10 +1039,6 @@ def main():
# phaseunwrap.plotSpanningTree("spanningTree%d.png"%(redArcs))
phaseunwrap.plotResult("final%d.png" % (redArcs))
- #import pdb; pdb.set_trace()
- #fig = plt.figure()
- #ax = fig.add_subplot(111, projection='3d')
-
if __name__ == '__main__':
diff --git a/tools/ARIAtools/product.py b/tools/ARIAtools/product.py
index 77a59a2e..e80887e5 100644
--- a/tools/ARIAtools/product.py
+++ b/tools/ARIAtools/product.py
@@ -87,8 +87,9 @@ def package_dict(scene, new_scene, scene_ind,
# IFG corresponding to reference product already exists, append to dict
if sorted_dict:
dict_vals = [[
- subitem for item in a for subitem in (item if
- isinstance(item, list) else [item])] for a in zip(
+ subitem for item in a for subitem in (
+ item if isinstance(item, list) else [item])]
+ for a in zip(
sorted_dict[dict_ind][scene_ind].values(),
new_scene[scene_ind].values())]
@@ -924,7 +925,7 @@ def __NISARmappingData__(self, fname, rdrmetadata_dict, sdskeys, version):
datalyr_dict[
'productBoundingBoxFrames'] = fname + '":' + sdskeys[0]
for i in enumerate(layerkeys):
- datalyr_dict[i[1]] = fname + '":'+sdskeys[i[0]]
+ datalyr_dict[i[1]] = fname + '":' + sdskeys[i[0]]
# Rewrite tropo and iono keys
datalyr_dict['ionosphere'] = datalyr_dict.pop(
diff --git a/tools/ARIAtools/stack.py b/tools/ARIAtools/stack.py
index 994dce2a..81e2e8a7 100644
--- a/tools/ARIAtools/stack.py
+++ b/tools/ARIAtools/stack.py
@@ -19,6 +19,7 @@
LOGGER = logging.getLogger(__name__)
+
# STACK OBJECT ---
class Stack:
'''
@@ -170,7 +171,8 @@ def __formatDates__(self):
def __formatExcludePairs__(self):
'''
Check that exclude dates are in one of two formats:
- 1. a string containing the pairs in YOUNGER_OLDER format, space-separated
+ 1. a string containing the pairs in YOUNGER_OLDER format,
+ space-separated
2. a .txt file with lines of the same formatting
Formatting should match "pair" formatting: [[master,slave]]
'''
@@ -231,8 +233,9 @@ def plotPairs(self):
# Legend
handles, labels = pairAx.get_legend_handles_labels()
uniqueLabels = dict(zip(labels, handles))
- pairAx.legend(uniqueLabels.values(), uniqueLabels.keys(),
- bbox_to_anchor=(0.005, 0.99), loc='upper left', borderaxespad=0.)
+ pairAx.legend(
+ uniqueLabels.values(), uniqueLabels.keys(),
+ bbox_to_anchor=(0.005, 0.99), loc='upper left', borderaxespad=0.)
# Other formatting
pairAx.set_yticks([])
@@ -279,14 +282,15 @@ def createTriplets(self, minTime=None, maxTime=None, printTriplets=False):
self.nTriplets = len(self.triplets)
# Print to text file
- with open(os.path.join(self.workdir, 'ValidTriplets.txt'), 'w') as tripletFile:
+ with open(os.path.join(
+ self.workdir, 'ValidTriplets.txt'), 'w') as tripletFile:
for triplet in self.triplets:
strPair = [self.__datePair2strPair__(pair) for pair in triplet]
tripletFile.write('{}\n'.format(strPair))
tripletFile.close()
# Report if requested
- if printTriplets == True:
+ if printTriplets:
# Print to screen
LOGGER.info('Existing triplets:')
@@ -294,7 +298,7 @@ def createTriplets(self, minTime=None, maxTime=None, printTriplets=False):
LOGGER.info([
self.__datePair2strPair__(pair) for pair in triplet])
- if self.verbose == True:
+ if self.verbose:
LOGGER.info(
'%s existing triplets found based on search criteria',
self.nTriplets)
@@ -425,11 +429,11 @@ def XY2LoLa(self, x, y):
# Reference point formatting
def __referencePoint__(self, refXY, refLoLa):
'''
- Determine the reference point in XY coordinates. The reference point can be
- automatically or manually selected by the user and is subtracted
- from each interferogram.
- The point can be given in pixels or lon/lat coordinates. If given in Lat/Lon, determine
- the location in XY.
+ Determine the reference point in XY coordinates. The reference point
+ can be automatically or manually selected by the user and is
+ subtracted from each interferogram.
+ The point can be given in pixels or lon/lat coordinates. If given in
+ Lat/Lon, determine the location in XY.
'''
LOGGER.debug('Determining reference point...')
@@ -460,7 +464,8 @@ def __referencePoint__(self, refXY, refLoLa):
# Random reference point
def __autoReferencePoint__(self):
'''
- Use the coherence stack to automatically determine a suitable reference point.
+ Use the coherence stack to automatically determine a suitable
+ reference point.
'''
# Load coherence data from cohStack.vrt
cohfile = os.path.join(self.imgdir, 'cohStack.vrt')
@@ -479,7 +484,7 @@ def __autoReferencePoint__(self):
# Loop until suitable reference point is found
n = 0
- while cohMask[self.refY, self.refX] == False:
+ while not cohMask[self.refY, self.refX]:
# Reselect reference points
self.refX = np.random.randint(cohDS.RasterXSize)
@@ -606,7 +611,7 @@ def __plotSeries__(self, ax, data, title):
Plot misclosure timeseries.
'''
# Plot data
- if self.plotTimeIntervals == False:
+ if not self.plotTimeIntervals:
ax.plot([tripletDate[1]
for tripletDate in self.tripletDates], data, '-k.')
else:
@@ -760,7 +765,8 @@ def __misclosureQuery__(self, queryXY=None, queryLoLa=None):
qLon, qLat = self.XY2LoLa(queryXY[0], queryXY[1])
LOGGER.debug(
- 'Query point: X %s / Y %s; Lon %.4f / Lat %.4f', qx, qy, qLon, qLat)
+ 'Query point: X %s / Y %s; Lon %.4f / Lat %.4f',
+ qx, qy, qLon, qLat)
# Plot query points on map
self.netMscAx.plot(
diff --git a/tools/ARIAtools/unwrapStitching.py b/tools/ARIAtools/unwrapStitching.py
index c0a6223b..a5a3de88 100644
--- a/tools/ARIAtools/unwrapStitching.py
+++ b/tools/ARIAtools/unwrapStitching.py
@@ -327,11 +327,10 @@ def __createImages__(self):
self.outFileUnw + '.vrt', unwFiles,
options=osgeo.gdal.BuildVRTOptions(srcNodata=0))
-
warp_options = osgeo.gdal.WarpOptions(
format=self.outputFormat, cutlineDSName=self.setTotProdBBoxFile,
- outputBounds=self.bbox_file, xRes=self.arrres[0], yRes=self.arrres[1],
- targetAlignedPixels=True)
+ outputBounds=self.bbox_file, xRes=self.arrres[0],
+ yRes=self.arrres[1], targetAlignedPixels=True)
osgeo.gdal.Warp(
self.outFileUnw, self.outFileUnw + '.vrt', options=warp_options)
@@ -343,7 +342,8 @@ def __createImages__(self):
# Apply mask (if specified).
if self.mask is not None:
- update_file = osgeo.gdal.Open(self.outFileUnw, osgeo.gdal.GA_Update)
+ update_file = osgeo.gdal.Open(
+ self.outFileUnw, osgeo.gdal.GA_Update)
msk_arr = self.mask.ReadAsArray() * \
osgeo.gdal.Open(self.outFileUnw + '.vrt').ReadAsArray()
update_file = update_file.GetRasterBand(1).WriteArray(msk_arr)
@@ -421,11 +421,13 @@ def UnwrapOverlap(self):
raise Exception
if self.ccFile is None:
- LOGGER.error("Input Connected Components file(s) is (are) not set.")
+ LOGGER.error(
+ "Input Connected Components file(s) is (are) not set.")
raise Exception
if self.prodbboxFile is None:
- LOGGER.error("Input product Bounding box file(s) is (are) not set.")
+ LOGGER.error(
+ "Input product Bounding box file(s) is (are) not set.")
raise Exception
# Verify if all the inputs are well-formed/GDAL compatible
@@ -503,18 +505,20 @@ def __calculateCyclesOverlap__(self):
warp_options = osgeo.gdal.WarpOptions(
format="MEM", cutlineDSName=outname,
- outputBounds=polyOverlap.bounds, dstNodata=connCompNoData1,
- xRes=self.arrres[0], yRes=self.arrres[1],
- targetAlignedPixels=True)
+ outputBounds=polyOverlap.bounds,
+ dstNodata=connCompNoData1,
+ xRes=self.arrres[0], yRes=self.arrres[1],
+ targetAlignedPixels=True)
connCompFile1 = osgeo.gdal.Warp(
'', self.ccFile[counter], options=warp_options)
# need to specify spacing to avoid inconsistent dimensions
warp_options = osgeo.gdal.WarpOptions(
format="MEM", cutlineDSName=outname,
- outputBounds=polyOverlap.bounds, dstNodata=connCompNoData2,
- xRes=self.arrres[0], yRes=self.arrres[1],
- targetAlignedPixels=True, multithread=True)
+ outputBounds=polyOverlap.bounds,
+ dstNodata=connCompNoData2, xRes=self.arrres[0],
+ yRes=self.arrres[1], targetAlignedPixels=True,
+ multithread=True)
connCompFile2 = osgeo.gdal.Warp(
'', self.ccFile[counter + 1], options=warp_options)
@@ -719,7 +723,8 @@ def unwrapComponents(self):
raise Exception
if self.ccFile is None:
- LOGGER.error("Input Connected Components file(s) is (are) not set.")
+ LOGGER.error(
+ "Input Connected Components file(s) is (are) not set.")
raise Exception
if self.solver not in SOLVER_TYPES:
@@ -729,7 +734,8 @@ def unwrapComponents(self):
str(unwTreeTypes))
if self.redArcs not in REDARCS_TYPES.keys():
- raise ValueError(self.redArcs + ' must be in ' + str(REDARCS_TYPES))
+ raise ValueError(
+ self.redArcs + ' must be in ' + str(REDARCS_TYPES))
# Verify if all the inputs are well-formed/GDAL compatible
# Update files to be vrt if they exist and remove files which failed
@@ -741,8 +747,8 @@ def unwrapComponents(self):
# in "polyTableDict" variable.
# Key is a unique polygon [1, max number of unique polygons of all
# products]
- # Note: there can be components with mutiple polygons (e.g. inner/outer
- # edges)
+ # Note: there can be components with mutiple polygons (e.g.
+ # inner/outer edges)
# Each key will have a corresponding dictionary with keys:
# "connFile" = Original connected component file
# "connCompID" = Original ID connected component id (wrt original
@@ -832,8 +838,8 @@ def __populatePolyTable__(self):
polygonDict['sizeData'] = sizeData
# Track the mapping from connCompID to connCompID_unique
- connCompMapping.append(
- [ccomp[poly_counter],
+ connCompMapping.append([
+ ccomp[poly_counter],
ccomp[poly_counter] + connComp_offset])
# tracking how much the next file needs to be offsetted in
@@ -963,8 +969,8 @@ def __populatePointTable__(self):
# parse inputs as a tuple
pairing = (
polygon1, polygon2, connFile1, connFile2)
- pointDistance_temp, points_temp = minDistancePoints(
- pairing)
+ pointDistance_temp, points_temp = \
+ minDistancePoints(pairing)
points.append(points_temp)
pointDistance.append(pointDistance_temp)
@@ -1048,8 +1054,9 @@ def __populatePointTable__(self):
connCompID1_temp = self.tablePoints[counter, 1]
connFile1_temp = self.tablePoints[counter, 2]
unwFile1_temp = self.tablePoints[counter, 3]
- point1_temp = np.array(
- [self.tablePoints[counter, 4], self.tablePoints[counter, 5]])
+ point1_temp = np.array([
+ self.tablePoints[counter, 4],
+ self.tablePoints[counter, 5]])
geoTrans1_temp = self.tablePoints[counter, 6]
# parse inputs as a tuple
@@ -1083,8 +1090,9 @@ def __populatePointTable__(self):
connCompID1_temp = self.tablePoints[counter, 1]
connFile1_temp = self.tablePoints[counter, 2]
unwFile1_temp = self.tablePoints[counter, 3]
- point1_temp = np.array(
- [self.tablePoints[counter, 4], self.tablePoints[counter, 5]])
+ point1_temp = np.array([
+ self.tablePoints[counter, 4],
+ self.tablePoints[counter, 5]])
geoTrans1_temp = self.tablePoints[counter, 6]
# parse inputs as a tuple
inputs = (
@@ -1517,8 +1525,8 @@ def createConnComp_Int(inputs):
unwVRTName = os.path.abspath(
os.path.join(saveDir, 'unw', saveNameID + '.vrt'))
buildSumVRT(
- unwVRTName, unwRangeOffsetVRTName, scaleVRTName, connProj, connGeoTrans,
- length, width, description=inputs['description'])
+ unwVRTName, unwRangeOffsetVRTName, scaleVRTName, connProj,
+ connGeoTrans, length, width, description=inputs['description'])
return [connDataName, unwVRTName]
@@ -1624,7 +1632,8 @@ def buildScaleOffsetVRT(
"""
- #
+ #
# the inputs needed to build the vrt
# load the width and length from the GDAL file in case not specified
@@ -1688,8 +1697,9 @@ def buildSumVRT(output, File1, File2, proj, geoTrans,
with open('{0}'.format(output), 'w') as fid:
fid.write(
vrttmpl.format(
- width=width, length=length, File1=File1, File2=File2, proj=proj,
- geoTrans=str(geoTrans)[1:-1], description=description))
+ width=width, length=length, File1=File1, File2=File2,
+ proj=proj, geoTrans=str(geoTrans)[1:-1],
+ description=description))
def point2unwPhase(inputs):
"""
diff --git a/tools/ARIAtools/util/dem.py b/tools/ARIAtools/util/dem.py
index 3cbafcae..64668bb7 100644
--- a/tools/ARIAtools/util/dem.py
+++ b/tools/ARIAtools/util/dem.py
@@ -20,6 +20,7 @@
LOGGER = logging.getLogger(__name__)
+
def prep_dem(demfilename, bbox_file, prods_TOTbbox, prods_TOTbbox_metadatalyr,
proj, arrres=None, workdir='./',
outputFormat='ENVI', num_threads='2', dem_name: str = 'glo_90',
@@ -47,8 +48,8 @@ def prep_dem(demfilename, bbox_file, prods_TOTbbox, prods_TOTbbox_metadatalyr,
if demfilename.lower() == 'download':
if dem_name not in dem_stitcher.datasets.DATASETS:
raise ValueError(
- '%s must be in %s' % (dem_name, ', '.join(
- dem_stitcher.datasets.DATASETS)))
+ '%s must be in %s' % (
+ dem_name, ', '.join(dem_stitcher.datasets.DATASETS)))
LOGGER.info("Downloading DEM...")
demfilename = download_dem(
@@ -146,5 +147,3 @@ def download_dem(
vrt_path = f'{root}_uncropped.vrt'
ds = osgeo.gdal.BuildVRT(vrt_path, dem_tile_paths)
return vrt_path
-
-
diff --git a/tools/ARIAtools/util/interp.py b/tools/ARIAtools/util/interp.py
index a0273177..0a53706c 100644
--- a/tools/ARIAtools/util/interp.py
+++ b/tools/ARIAtools/util/interp.py
@@ -8,6 +8,7 @@
import numpy as np
import scipy.interpolate
+
class InterpCube(object):
"""Class to interpolate intersection of cube with DEM."""
@@ -34,4 +35,3 @@ def __call__(self, line, pix, h):
vals = np.array([x(line, pix)[0, 0] for x in self.interp])
est = scipy.interpolate.interp1d(self.hgts, vals, kind='cubic')
return est(h) + self.offset
-
diff --git a/tools/ARIAtools/util/log.py b/tools/ARIAtools/util/log.py
index 0d60a7e3..0b9e741e 100644
--- a/tools/ARIAtools/util/log.py
+++ b/tools/ARIAtools/util/log.py
@@ -59,18 +59,3 @@ def formatMessage(self, record):
if record.levelno >= logging.WARNING:
message = ": ".join((record.levelname, message))
return message
-
-# TODO decide to keep or remove this older code
-# stdout_handler = logging.StreamHandler(sys.stdout)
-# stdout_handler.setFormatter(CustomFormatter(use_color=os.name != "nt"))
-#
-# errorfile_handler = logging.FileHandler("error.log")
-# errorfile_handler.setFormatter(logging.Formatter(
-# "[{asctime}] {funcName:>20}:{lineno:<5} {levelname:<10} {message}",
-# style="{"))
-# errorfile_handler.setLevel(logging.WARNING)
-#
-# logger.addHandler(stdout_handler)
-# logger.addHandler(errorfile_handler)
-# logger = logging.getLogger("ARIAtools")
-# logger.setLevel(logging.INFO)
diff --git a/tools/ARIAtools/util/mask.py b/tools/ARIAtools/util/mask.py
index 336bd548..9d7454c7 100644
--- a/tools/ARIAtools/util/mask.py
+++ b/tools/ARIAtools/util/mask.py
@@ -195,4 +195,3 @@ def prep_mask(
# return filename of mask
return maskfilename
-
diff --git a/tools/ARIAtools/util/misc.py b/tools/ARIAtools/util/misc.py
index 18075255..056a5552 100644
--- a/tools/ARIAtools/util/misc.py
+++ b/tools/ARIAtools/util/misc.py
@@ -9,6 +9,7 @@
import time
import numpy as np
+
class ProgressBar:
"""Creates a text-based progress bar. Call the object with
the simple print command to see the progress bar, which looks
diff --git a/tools/ARIAtools/util/plot.py b/tools/ARIAtools/util/plot.py
index 6b935471..22e60b2b 100644
--- a/tools/ARIAtools/util/plot.py
+++ b/tools/ARIAtools/util/plot.py
@@ -21,6 +21,7 @@
LOGGER = logging.getLogger(__name__)
+
class PlotClass(object):
""" Class to generate standard plots for ARIA products. """
mpl._log.setLevel('ERROR')
@@ -108,8 +109,8 @@ def __design_matrix__(self):
pbaseline_nodata = pbaseline_nodata.GetRasterBand(
1).GetNoDataValue()
pbaseline_val = gdal.BuildVRT('', i[1]).ReadAsArray()
- pbaseline_val = np.ma.masked_where(pbaseline_val == pbaseline_nodata,
- pbaseline_val)
+ pbaseline_val = np.ma.masked_where(
+ pbaseline_val == pbaseline_nodata, pbaseline_val)
pbaseline_val = pbaseline_val.mean()
# Record baseline val for histogram
baseline_hist.append(pbaseline_val)
@@ -454,13 +455,13 @@ def plotbperpcoh(self):
slaves.append(pd.to_datetime(i[1][:8]))
masters.append(pd.to_datetime(i[1][9:]))
# Open coherence file
- coh_file = gdal.Warp('', self.product_dict[2][i[0]], format="MEM",
- cutlineDSName=self.prods_TOTbbox,
- outputBounds=self.bbox_file, resampleAlg='average',
- targetAlignedPixels=True,
- xRes=self.arrres[0], yRes=self.arrres[1],
- multithread=True,
- options=[f'NUM_THREADS={self.num_threads}'])
+ coh_file = gdal.Warp(
+ '', self.product_dict[2][i[0]], format="MEM",
+ cutlineDSName=self.prods_TOTbbox,
+ outputBounds=self.bbox_file, resampleAlg='average',
+ targetAlignedPixels=True,
+ xRes=self.arrres[0], yRes=self.arrres[1], multithread=True,
+ options=[f'NUM_THREADS={self.num_threads}'])
# Apply mask (if specified).
if self.mask is not None:
diff --git a/tools/ARIAtools/util/vrt.py b/tools/ARIAtools/util/vrt.py
index d5edfdaa..3e1154fe 100644
--- a/tools/ARIAtools/util/vrt.py
+++ b/tools/ARIAtools/util/vrt.py
@@ -159,10 +159,12 @@ def resampleRaster(
unwmap = np.array(unwmap)
# finalize unw array shape
- indx0 = int(decimal.Decimal(ds_unw.shape[0] / multilooking).quantize(
- 0, decimal.ROUND_HALF_UP))
- indx1 = int(decimal.Decimal(ds_unw.shape[1] / multilooking).quantize(
- 0, decimal.ROUND_HALF_UP))
+ indx0 = int(decimal.Decimal(
+ ds_unw.shape[0] / multilooking).quantize(
+ 0, decimal.ROUND_HALF_UP))
+ indx1 = int(decimal.Decimal(
+ ds_unw.shape[1] / multilooking).quantize(
+ 0, decimal.ROUND_HALF_UP))
unwmap = unwmap[0:indx0, 0:indx1]
unwmap = np.ma.masked_invalid(unwmap)
np.ma.set_fill_value(unwmap, ds_unw_nodata)
diff --git a/tools/bin/ariaAOIassist.py b/tools/bin/ariaAOIassist.py
index 1cf3eb53..47c1093b 100755
--- a/tools/bin/ariaAOIassist.py
+++ b/tools/bin/ariaAOIassist.py
@@ -25,32 +25,62 @@ def createParser():
Use product metadata to assist in area of interest (AOI) creation.
'''
parser = argparse.ArgumentParser(
- description='Preparing preliminary plot of frame extents. First go to the ASF search page, push all SLCs over defined search area to cart, download CSV under the metadata option, and pass the CSV through to this script with the -f flag.')
- parser.add_argument('-f', '--file', dest='imgfile', type=str, required=True,
- help='Full path to CSV file containing SLC frame metadata.')
- parser.add_argument('-w', '--workdir', dest='workdir', default='./',
- help='Specify directory to deposit all outputs. Default is local directory where script is launched.')
- parser.add_argument('-t', '--tracks', dest='tracks', type=str, default='all',
- help='Include only specified track number in results. Can be multiple, separated by spaces. Default : All')
- parser.add_argument('-l', '--lat_bounds', dest='latBounds', type=str, default=None,
- help='Specify a search for only frames that fall within these lat bounds. Default : None')
- parser.add_argument('-s', '--start_date', dest='startDate', type=str, default=None,
- help='Start date. Default : None')
- parser.add_argument('-e', '--end_date', dest='endDate', type=str, default=None,
- help='End date. Default : None')
- parser.add_argument('-x', '--exclude_dates', dest='excludeDates', type=str, default=None,
- help='List of dates to exclude from kml generation. This can be provided as space-separated string in format YYYYMMDD (e.g., \'20180101 20181213 20190428\'), or as a text file with one date to exclude per line. Default : None')
- parser.add_argument('--plot_raw', dest='plotRaw', action='store_true',
- help='Plot raw frames if included in .csv')
- parser.add_argument('--flag_partial_coverage', dest='flagPartialCoverage', action='store_true',
- help='Flag dates that do not cover the full lat/lon extent. This does not remove dates from the lat centers plot, only highlights the dates in red.')
- parser.add_argument('--remove_incomplete_dates', dest='removeIncomplete', action='store_true',
- help='Automatically detect and remove dates that do not entirely fill the given latitude bounds. Note that if lat bounds are left as default, only dates with gaps will be automatically excluded.')
- parser.add_argument('--approximate_AOI', dest='approxAOI', action='store_true',
- help='Create KML of approximate AOI. NOTE: ~20 km or 1 burst must be removed from either end of the AOI--this must be confirmed by the user.')
- parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
- help='Verbose mode')
-
+ description='Preparing preliminary plot of frame extents. First go '
+ 'to the ASF search page, push all SLCs over defined '
+ 'search area to cart, download CSV under the metadata '
+ 'option, and pass the CSV through to this script with '
+ 'the -f flag.')
+ parser.add_argument(
+ '-f', '--file', dest='imgfile', type=str, required=True,
+ help='Full path to CSV file containing SLC frame metadata.')
+ parser.add_argument(
+ '-w', '--workdir', dest='workdir', default='./',
+ help='Specify directory to deposit all outputs. Default is local '
+ 'directory where script is launched.')
+ parser.add_argument(
+ '-t', '--tracks', dest='tracks', type=str, default='all',
+ help='Include only specified track number in results. Can be '
+ 'multiple, separated by spaces. Default : All')
+ parser.add_argument(
+ '-l', '--lat_bounds', dest='latBounds', type=str, default=None,
+ help='Specify a search for only frames that fall within these lat '
+ 'bounds. Default : None')
+ parser.add_argument(
+ '-s', '--start_date', dest='startDate', type=str, default=None,
+ help='Start date. Default : None')
+ parser.add_argument(
+ '-e', '--end_date', dest='endDate', type=str, default=None,
+ help='End date. Default : None')
+ parser.add_argument(
+ '-x', '--exclude_dates', dest='excludeDates', type=str, default=None,
+ help='List of dates to exclude from kml generation. This can be '
+ 'provided as space-separated string in format YYYYMMDD '
+ '(e.g., \'20180101 20181213 20190428\'), or as a text file '
+ 'with one date to exclude per line. Default : None')
+ parser.add_argument(
+ '--plot_raw', dest='plotRaw', action='store_true',
+ help='Plot raw frames if included in .csv')
+ parser.add_argument(
+ '--flag_partial_coverage', dest='flagPartialCoverage',
+ action='store_true',
+ help='Flag dates that do not cover the full lat/lon extent. This '
+ 'does not remove dates from the lat centers plot, only '
+ 'highlights the dates in red.')
+ parser.add_argument(
+ '--remove_incomplete_dates', dest='removeIncomplete',
+ action='store_true',
+ help='Automatically detect and remove dates that do not entirely '
+ 'fill the given latitude bounds. Note that if lat bounds are '
+ 'left as default, only dates with gaps will be automatically '
+ 'excluded.')
+ parser.add_argument(
+ '--approximate_AOI', dest='approxAOI', action='store_true',
+ help='Create KML of approximate AOI. NOTE: ~20 km or 1 burst must '
+ 'be removed from either end of the AOI--this must be confirmed '
+ 'by the user.')
+ parser.add_argument(
+ '-v', '--verbose', dest='verbose', action='store_true',
+ help='Verbose mode')
return parser
@@ -62,19 +92,22 @@ def cmdLineParse(iargs=None):
# Metadata class
class SentinelMetadata:
'''
- Class for parsing, filting, and displaying metadata from ASF Vertex csv using Pandas
- functionality. The self.metadata property is a pandas dataframe in which all metadata are
- carried. Columns are originally those from the ASF csv spreadsheet, and some others will
- be added for further sorting. Filtering is done in-place using the dataframe. Additional
- parameters and flags are added without removing metadata entries.
+ Class for parsing, filting, and displaying metadata from ASF Vertex
+ csv using Pandas functionality. The self.metadata property is a pandas
+ dataframe in which all metadata are carried. Columns are originally
+ those from the ASF csv spreadsheet, and some others will be added for
+ further sorting. Filtering is done in-place using the dataframe.
+ Additional parameters and flags are added without removing metadata
+ entries.
'''
# Load data from csv and pre-format
def __init__(self, imgfile, track, workdir='./',
excludeDates=None, verbose=False):
'''
- Initialize class for parsing Sentinel-1 metadata downloaded from the ASF archive.
- See help(SentinelMetadata) for further description.
+ Initialize class for parsing Sentinel-1 metadata downloaded from
+ the ASF archive.
+ See help(SentinelMetadata) for further description.
'''
# Record parameters
self.track = track
@@ -109,27 +142,28 @@ def __init__(self, imgfile, track, workdir='./',
def __assignFrameID__(self):
'''
- Develop a unique "frame identification code" comprising orbit+path+frame
+ Develop a unique "frame identification code" comprising
+ orbit+path+frame
'''
frameIDs = []
for frameNdx, frame in self.metadata.iterrows():
frameProperties = list(
frame[['Orbit', 'Path Number', 'Frame Number']])
- frameProperties = [str(frameProperty)
- for frameProperty in frameProperties] # to string
- frameID = ''.join(frameProperties) # concatenate to single string
+ frameProperties = [
+ str(frameProperty) for frameProperty in frameProperties]
+ frameID = ''.join(frameProperties)
frameIDs.append(frameID)
self.metadata['frameID'] = frameIDs
def __assignDatetimes__(self):
'''
- Add a column with Python datetime objects to represent precisely the date and time at
- which a frame was captured in a format that can be used computationally.
- Additionally, assign a "common" date to which all acquisitions closely correspond. This
- will help with sorting by date, especially in cases where the satellite crosses the
- midnight boundary.
- This function assumes that acqusitions within 24 hours of each other correspond to the
- same date.
+ Add a column with Python datetime objects to represent precisely
+ the date and time at which a frame was captured in a format that
+ can be used computationally. Additionally, assign a "common" date
+ to which all acquisitions closely correspond. This will help with
+ sorting by date, especially in cases where the satellite crosses the
+ midnight boundary. This function assumes that acqusitions within 24
+ hours of each other correspond to the same date.
'''
# Reformat Acquistion Date string into datetime object
dateFmt = '%Y-%m-%dT%H:%M:%S'
@@ -157,9 +191,9 @@ def __assignDatetimes__(self):
def __formatExcludeDates__(self):
'''
- Determine whether the --exclude_dates input is specified as an input string or a text
- file. If a text file is given, format and transfer the contents to a list attached to the
- input object.
+ Determine whether the --exclude_dates input is specified as an
+ input string or a text file. If a text file is given, format and
+ transfer the contents to a list attached to the input object.
'''
# Format user-specified dates to exclude
# Check whether exclude dates list is given, otherwise, provide an
@@ -174,13 +208,14 @@ def __formatExcludeDates__(self):
# remove new line formatting
self.excludeDates = [line.strip('\n') for line in lines]
else:
- self.excludeDates = self.excludeDates.split() # split by spaces
+ self.excludeDates = self.excludeDates.split()
else:
self.excludeDates = []
def __determineLatBounds__(self):
'''
- Automatically determine the latitude bounds baesd on the existing data set.
+ Automatically determine the latitude bounds baesd on the existing
+ data set.
'''
# Determine min/max of data set
self.minLat = self.metadata['Center Lat'].min()
@@ -190,8 +225,8 @@ def __determineLatBounds__(self):
def __filterByTrack__(self):
'''
- Remove tracks not specified by user. Modify self.tracks parameter to include only those
- tracks.
+ Remove tracks not specified by user. Modify self.tracks parameter
+ to include only those tracks.
'''
dropIndicesTrack = self.metadata[self.metadata['Path Number']
!= self.track].index
@@ -205,24 +240,26 @@ def __filterByTrack__(self):
def __filterByBeamMode__(self):
'''
- Remove frames if beam mode is not IW.
+ Remove frames if beam mode is not IW.
'''
dropIndicesIW = self.metadata[self.metadata['Beam Mode'] != 'IW'].index
self.metadata.drop(dropIndicesIW, inplace=True)
def __filterByProcessingLevel__(self):
'''
- Remove if processing level is not "SLC" or "RAW". Only include "RAW" frames if "SLC" is
- not available for that location and time.
+ Remove if processing level is not "SLC" or "RAW". Only include
+ "RAW" frames if "SLC" is not available for that location and time.
'''
# Ensure that only "RAW" and "SLC" frames are included
- dropIndicesProc = self.metadata[(self.metadata['Processing Level'] != 'SLC') &
- (self.metadata['Processing Level'] != 'RAW')].index
+ dropIndicesProc = self.metadata[
+ (self.metadata['Processing Level'] != 'SLC') &
+ (self.metadata['Processing Level'] != 'RAW')].index
self.metadata.drop(dropIndicesProc, inplace=True)
# SLC frame IDs as integers to compare with RAW frame IDs
- slcIDs = np.array([int(frame['frameID']) for ndx, frame in self.metadata.iterrows() if
- frame['Processing Level'] == 'SLC'])
+ slcIDs = np.array([
+ int(frame['frameID']) for ndx, frame in self.metadata.iterrows() if
+ frame['Processing Level'] == 'SLC'])
# Loop through raw frames to check that a similar SLC does not exist,
# based on frameID
@@ -238,8 +275,8 @@ def __filterByProcessingLevel__(self):
# Additional filtering
def filterByDate(self, startDate=None, endDate=None):
'''
- Clip the data set based on start and end dates if provided. Provide dates in format
- YYYYMMDD.
+ Clip the data set based on start and end dates if provided. Provide
+ dates in format YYYYMMDD.
'''
# Convert start and end dates to datetimes
if startDate is not None:
@@ -259,8 +296,9 @@ def filterByDate(self, startDate=None, endDate=None):
def filterByLatitude(self, minLat=None, maxLat=None):
'''
- Remove scenes if they do not meet the specified latitude requirements.
- Loop through all IW SLCs frames to confirm which fall within latitude bounds.
+ Remove scenes if they do not meet the specified latitude requirements.
+ Loop through all IW SLCs frames to confirm which fall within latitude
+ bounds.
'''
# Update latitude bounds
if minLat is not None:
@@ -272,8 +310,9 @@ def filterByLatitude(self, minLat=None, maxLat=None):
dropIndicesLats = []
for frameNdx, frame in self.metadata.iterrows():
# All latitude positions
- frameLats = frame[['Near Start Lat',
- 'Far Start Lat', 'Near End Lat', 'Far End Lat']]
+ frameLats = frame[[
+ 'Near Start Lat', 'Far Start Lat',
+ 'Near End Lat', 'Far End Lat']]
# Check that frames are within lat bounds
if frameLats.max() < self.minLat:
@@ -285,16 +324,17 @@ def filterByLatitude(self, minLat=None, maxLat=None):
# Spatial checks
def checkContinuity(self, removeIncompleteDates=False):
'''
- Check whether an SLC is missing in the track for any given date. Specifically, we want to
- see whether the spacing between frames exceeds a threshold (e.g., one and a half frames).
- If that is the case, we want to assign different colors to the different subsets of frames.
- This information will be carried through the metadata as new columns: "nGaps" (i.e., the
- number of gaps with frames missing), and "subGroup".
- This function also checks whether the frames cover the full latitude extent provided.
- Dates with incomplete coverage will not be automatically excluded, they will simply
- be flagged.
- If the --removeIncompleteDates option is selected, update the self.excludeDates
- property with the incomplete dates.
+ Check whether an SLC is missing in the track for any given date.
+ Specifically, we want to see whether the spacing between frames
+ exceeds a threshold (e.g., one and a half frames). If that is the
+ case, we want to assign different colors to the different subsets of
+ frames. This information will be carried through the metadata as new
+ columns: "nGaps" (i.e., the number of gaps with frames missing), and
+ "subGroup". This function also checks whether the frames cover the
+ full latitude extent provided. Dates with incomplete coverage will
+ not be automatically excluded, they will simply be flagged.
+ If the --removeIncompleteDates option is selected, update the
+ self.excludeDates property with the incomplete dates.
'''
# Add columns for min/max Lat extremes to each frame
self.__addPassLats__()
@@ -320,7 +360,8 @@ def checkContinuity(self, removeIncompleteDates=False):
passIndices = list(set(SLCindices).intersection(dateIndices))
# Sort tracks south-north and compare latitude extents
- # "satPass" refers to all the acquisitions from a single satellite pass
+ # "satPass" refers to all the acquisitions from a single satellite
+ # pass
satPass = self.metadata.loc[passIndices,
:].sort_values(by='Center Lat')
@@ -331,8 +372,8 @@ def checkContinuity(self, removeIncompleteDates=False):
ndxSouth = satPass.index[i]
ndxNorth = satPass.index[i + 1]
- # Action required if N extent of south frame < S extent of north frame
- # i.e., no overlap
+ # Action required if N extent of south frame < S extent of
+ # north frame i.e., no overlap
if satPass.loc[ndxSouth,
'maxLat'] < satPass.loc[ndxNorth, 'minLat']:
# Update gaps detected for all acquisitions in pass
@@ -355,19 +396,19 @@ def checkContinuity(self, removeIncompleteDates=False):
# If there are no gaps and latitude extremes are covered, consider
# coverage complete
if (sum(self.metadata.loc[passIndices, 'gaps']) == 0) and (
- latExtremesMet == True):
+ latExtremesMet):
self.metadata.loc[passIndices, 'Extent Covered'] = True
# If removeIncompleteDates is specified, update self.excludeDates
self.partialDatesRemoved = False
- if removeIncompleteDates == True:
+ if removeIncompleteDates:
# Update parameter
self.partialDatesRemoved = True
# Check extent covered
for frameNdx, frame in self.metadata.iterrows():
- if frame['Extent Covered'] == False:
+ if not frame['Extent Covered']:
self.excludeDates.append(frame['Common Date'])
self.excludeDates = list(set(self.excludeDates))
self.excludeDates.sort()
@@ -382,8 +423,9 @@ def checkContinuity(self, removeIncompleteDates=False):
# Warn user if no dates remain which satisfy all spatial and temporal
# criteria
- remainingDates = [date for date in self.metadata['Common Date'] if date not in
- self.excludeDates]
+ remainingDates = [
+ date for date in self.metadata['Common Date'] if date not in
+ self.excludeDates]
self.nRemainingDates = len(remainingDates)
if self.nRemainingDates == 0:
excludeDates_warning = '''
@@ -394,8 +436,8 @@ def checkContinuity(self, removeIncompleteDates=False):
def __addPassLats__(self):
'''
- Add one column each for the minimum and maximum latitude extents. This is designed to ease
- continuity checks.
+ Add one column each for the minimum and maximum latitude extents.
+ This is designed to ease continuity checks.
'''
minLats = []
maxLats = []
@@ -424,8 +466,7 @@ def plotFrameCenters(self, flagPartialCoverage=False, plotRaw=False):
labels = []
for frameNdx, frame in self.metadata.loc[SLCindices, :].iterrows():
# Color based on spatial extent, if extent is not automatic
- if (flagPartialCoverage == True) and (
- frame['Extent Covered'] == False):
+ if flagPartialCoverage and not frame['Extent Covered']:
color = 'r'
label = 'Incomplete for lat.s {:.1f}-{:.1f}'.format(
self.minLat, self.maxLat)
@@ -434,7 +475,7 @@ def plotFrameCenters(self, flagPartialCoverage=False, plotRaw=False):
label = 'Complete track'
# Color based on gaps
- if (flagPartialCoverage == True) and (frame['gaps'] > 0):
+ if (flagPartialCoverage) and (frame['gaps'] > 0):
color = (0.6, 0, 0) # dark red
label = 'Incomplete due to gaps'
@@ -444,18 +485,20 @@ def plotFrameCenters(self, flagPartialCoverage=False, plotRaw=False):
label = 'Excluded date'
# Plot frame
- self.ax.scatter(frame['Common Datetime'], frame['Center Lat'], s=100, color=color,
- label=label)
+ self.ax.scatter(
+ frame['Common Datetime'], frame['Center Lat'], s=100,
+ color=color, label=label)
# Plot RAW frames if specified
- if plotRaw == True:
+ if plotRaw:
RAWindices = self.metadata[self.metadata['Processing Level']
== 'RAW'].index
# Loop through each RAW acquisition
for frameNdx, frame in self.metadata.loc[RAWindices, :].iterrows():
- self.ax.scatter(frame['Common Datetime'], frame['Center Lat'], s=100, color='m',
- label='Raw frame')
+ self.ax.scatter(
+ frame['Common Datetime'], frame['Center Lat'], s=100,
+ color='m', label='Raw frame')
# Format x-axis
dates = sorted(set(self.metadata['Common Datetime']))
@@ -468,24 +511,26 @@ def plotFrameCenters(self, flagPartialCoverage=False, plotRaw=False):
self.ax.set_xlabel('Date')
# Highlight dates with only partial coverage
- if flagPartialCoverage == True:
+ if flagPartialCoverage:
slcIndices = self.metadata[self.metadata['Processing Level']
== 'SLC'].index
- partialIndices = self.metadata[self.metadata['Extent Covered']
- == False].index
+ partialIndices = self.metadata[
+ not self.metadata['Extent Covered']].index
partialIndices = list(set(slcIndices).intersection(partialIndices))
- partialDates = list(
- set([date for date in self.metadata.loc[partialIndices, 'Common Date']]))
+ partialDates = list(set([
+ date for date in
+ self.metadata.loc[partialIndices, 'Common Date']]))
# Change date label to red if only partial coverage
- [self.ax.get_xticklabels()[n].set_color('r') for n, date in enumerate(datelabels) if
- date in partialDates]
+ [self.ax.get_xticklabels()[n].set_color('r')
+ for n, date in enumerate(datelabels) if date in partialDates]
# Format legend
handles, labels = self.ax.get_legend_handles_labels()
uniqueLabels = dict(zip(labels, handles))
- self.ax.legend(uniqueLabels.values(), uniqueLabels.keys(),
- bbox_to_anchor=(1.005, 1), loc='upper left', borderaxespad=0.)
+ self.ax.legend(
+ uniqueLabels.values(), uniqueLabels.keys(),
+ bbox_to_anchor=(1.005, 1), loc='upper left', borderaxespad=0.)
# Other formatting
title = 'Track {}'.format(self.trackCode)
@@ -502,7 +547,7 @@ def plotFrameCenters(self, flagPartialCoverage=False, plotRaw=False):
def saveEpochs(self):
'''
- Save a list of unique epochs and report if requested.
+ Save a list of unique epochs and report if requested.
'''
# Detect number of epochs
epochs = self.metadata['Common Date'].to_list()
@@ -510,7 +555,7 @@ def saveEpochs(self):
Nepochs = len(epochs)
# Report if requested
- if self.verbose == True:
+ if self.verbose:
print('{} unique epochs'.format(Nepochs))
# Save to list
@@ -525,7 +570,8 @@ def saveEpochs(self):
def save2kml(self):
'''
- Save frame boundaries to kml file - use only SLC frames and non-exluded dates.
+ Save frame boundaries to kml file - use only SLC frames and
+ non-exluded dates.
'''
# Open KML data set
kmlname = 'track{}_frames.kml'.format(self.trackCode)
@@ -548,10 +594,8 @@ def save2kml(self):
# Create KML layer
layer = DS.CreateLayer(date, None, ogr.wkbPolygon)
- layer.CreateField(
- ogr.FieldDefn(
- 'id',
- ogr.OFTInteger)) # add 1 attribute
+ # add 1 attribute
+ layer.CreateField(ogr.FieldDefn('id', ogr.OFTInteger))
# Add frame polygons
for frameNdx, frame in self.metadata.loc[dateIndices, :].iterrows(
@@ -570,25 +614,24 @@ def save2kml(self):
def __polygonFromFrame__(self, frame):
'''
- Create a Shapely polygon from the coordinates of a frame.
+ Create a Shapely polygon from the coordinates of a frame.
'''
- P = Polygon([(float(frame['Near Start Lon']), float(frame['Near Start Lat'])),
- (float(frame['Far Start Lon']),
- float(frame['Far Start Lat'])),
- (float(frame['Far End Lon']),
- float(frame['Far End Lat'])),
- (float(frame['Near End Lon']), float(frame['Near End Lat']))])
+ P = Polygon([
+ (float(frame['Near Start Lon']), float(frame['Near Start Lat'])),
+ (float(frame['Far Start Lon']), float(frame['Far Start Lat'])),
+ (float(frame['Far End Lon']), float(frame['Far End Lat'])),
+ (float(frame['Near End Lon']), float(frame['Near End Lat']))])
return P
# Suggested AOI based on intersection of all polygons
def intersectionAOI(self):
'''
- Create a "suggested" AOI based on the intersection of all frame polygons.
- This function does not guarantee a result.
+ Create a "suggested" AOI based on the intersection of all frame
+ polygons. This function does not guarantee a result.
'''
# Exclude dates based on spatial criteria if not already done
- if self.partialDatesRemoved == False:
+ if not self.partialDatesRemoved:
self.checkContinuity(removeIncompleteDates=True)
# Use only SLCs
@@ -603,7 +646,8 @@ def intersectionAOI(self):
dateUnion = self.__mergeFramesbyDate__(dates[0])
sceneIntersection = dateUnion
- validOverlap = True # assume all frames overlap until detected otherwise
+ # assume all frames overlap until detected otherwise
+ validOverlap = True
for date in dates[1:]:
# Take union of frames in date
dateUnion = self.__mergeFramesbyDate__(date)
@@ -611,8 +655,8 @@ def intersectionAOI(self):
# Take intersection of dates
sceneIntersection = sceneIntersection.intersection(dateUnion)
- # Check whether area of intersection remains a single polygon, or if it is broken into
- # multiple pieces
+ # Check whether area of intersection remains a single polygon,
+ # or if it is broken into multiple pieces
if isinstance(sceneIntersection, MultiPolygon):
print('COULD NOT AUTOMATICALLY DETERMINE AOI DUE TO GAPS.')
print(
@@ -621,7 +665,7 @@ def intersectionAOI(self):
break
# Remove ~20km from either end and save suggested AOI
- if validOverlap == True:
+ if validOverlap:
# Simply polygon by removing points within several hundred meters
# of each other
sceneIntersection = self.__simplifyAOI__(sceneIntersection)
@@ -644,7 +688,8 @@ def __mergeFramesbyDate__(self, date):
== 'SLC'].index
# Collect indices of date
- dateIndices = self.metadata[self.metadata['Common Date'] == date].index
+ dateIndices = self.metadata[
+ self.metadata['Common Date'] == date].index
dateIndices = list(set(slcIndices).intersection(dateIndices))
# Compute polygons
@@ -662,7 +707,8 @@ def __mergeFramesbyDate__(self, date):
def __simplifyAOI__(self, AOI):
'''
- Simplify AOI polygon by removing points within several kilometers of each other.
+ Simplify AOI polygon by removing points within several kilometers
+ of each other.
'''
# Assign search distance
d = 0.05 # 0.01 degrees ~ 1 km
@@ -704,9 +750,9 @@ def __simplifyAOI__(self, AOI):
def __trimAOIedges__(self, AOI):
'''
- Remove ~20 km or 1 burst from the N-S edges of the suggested AOI.
- First, remove data centroid. Then, rotate into a semi-vertical orientation.
- Remove ~20 km from top and bottom points.
+ Remove ~20 km or 1 burst from the N-S edges of the suggested AOI.
+ First, remove data centroid. Then, rotate into a semi-vertical
+ orientation. Remove ~20 km from top and bottom points.
'''
# Assign trim distance
trimDist = 0.2 # degrees
@@ -796,9 +842,9 @@ def __saveAOI__(self, AOI):
print('Generating outputs for track: {}'.format(track))
# Instantiate metadata object and load metadata from csv
- track_metadata = SentinelMetadata(imgfile=inps.imgfile, track=track, workdir=inps.workdir,
- excludeDates=inps.excludeDates,
- verbose=inps.verbose)
+ track_metadata = SentinelMetadata(
+ imgfile=inps.imgfile, track=track, workdir=inps.workdir,
+ excludeDates=inps.excludeDates, verbose=inps.verbose)
# Filtering -- remove frames from metadata
# Clip based on start and end date
@@ -815,16 +861,16 @@ def __saveAOI__(self, AOI):
track_metadata.filterByLatitude(
minLat=inps.latBounds[0], maxLat=inps.latBounds[1])
- # Check spatial criteria -- does not remove scenes, only highlights potentially problematic
- # ones
+ # Check spatial criteria -- does not remove scenes, only highlights
+ # potentially problematic ones
# Check for gaps
track_metadata.checkContinuity(
removeIncompleteDates=inps.removeIncomplete)
# Outputs
# Plot frame centers
- track_metadata.plotFrameCenters(flagPartialCoverage=inps.flagPartialCoverage,
- plotRaw=inps.plotRaw)
+ track_metadata.plotFrameCenters(
+ flagPartialCoverage=inps.flagPartialCoverage, plotRaw=inps.plotRaw)
# Save epochs to list
track_metadata.saveEpochs()
@@ -833,7 +879,7 @@ def __saveAOI__(self, AOI):
track_metadata.save2kml()
# Create suggested AOI based on intersection of all polygons
- if inps.approxAOI == True:
+ if inps.approxAOI:
track_metadata.intersectionAOI()
print('Products generated.')
diff --git a/tools/bin/ariaDownload.py b/tools/bin/ariaDownload.py
index 654a4df7..e518abd2 100755
--- a/tools/bin/ariaDownload.py
+++ b/tools/bin/ariaDownload.py
@@ -33,14 +33,14 @@ def createParser():
parser = argparse.ArgumentParser(
description='Command line interface to download GUNW products from '
'the ASF DAAC. GUNW products are hosted at the NASA ASF '
- 'DAAC.\nDownloading them requires a NASA Earthdata URS '
- 'user login and requires users to add "GRFN Door (PROD)" '
- 'and "ASF Datapool Products" to their URS approved '
- 'applications.',
+ 'DAAC.\nDownloading them requires a NASA Earthdata URS '
+ 'user login and requires users to add "GRFN Door (PROD)" '
+ 'and "ASF Datapool Products" to their URS approved '
+ 'applications.',
epilog='Examples of use:\n'
- '\t ariaDownload.py --track 004 --output count\n'
- '\t ariaDownload.py --bbox "36.75 37.225 -76.655 -75.928"\n'
- '\t ariaDownload.py -t 004,077 --start 20190101 -o count',
+ '\t ariaDownload.py --track 004 --output count\n'
+ '\t ariaDownload.py --bbox "36.75 37.225 -76.655 -75.928"\n'
+ '\t ariaDownload.py -t 004,077 --start 20190101 -o count',
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
@@ -69,8 +69,9 @@ def createParser():
help='End date as YYYYMMDD. If none provided, ends today.')
parser.add_argument(
'-u', '--user', default=None, type=str,
- help='NASA Earthdata URS user login. Users must add "GRFN Door (PROD)" '
- 'and "ASF Datapool Products" to their URS approved applications.')
+ help='NASA Earthdata URS user login. Users must add '
+ '"GRFN Door (PROD)" and "ASF Datapool Products" to their URS '
+ 'approved applications.')
parser.add_argument(
'-p', '--pass', dest='passw', default=None, type=str,
help='NASA Earthdata URS user password. Users must add "GRFN Door '
@@ -78,7 +79,8 @@ def createParser():
'applications.')
parser.add_argument(
'-l', '--daysless', dest='dayslt', default=math.inf, type=int,
- help='Take pairs with a temporal baseline -- days less than this value.')
+ help='Take pairs with a temporal baseline -- days less than this '
+ 'value.')
parser.add_argument(
'-m', '--daysmore', dest='daysgt', default=0, type=int,
help='Take pairs with a temporal baseline -- days greater than this '
@@ -87,8 +89,8 @@ def createParser():
parser.add_argument(
'-nt', '--num_threads', default='1', type=str,
help='Specify number of threads for multiprocessing download. By '
- 'default "1". Can also specify "All" to use all available '
- 'threads.')
+ 'default "1". Can also specify "All" to use all available '
+ 'threads.')
parser.add_argument(
'-i', '--ifg', default=None, type=str,
help='Retrieve one interferogram by its start/end date, specified as '
@@ -137,8 +139,8 @@ def make_bbox(inp_bbox):
except BaseException:
raise Exception('Cannot understand the --bbox argument. '
- 'Input string was entered incorrectly or path does not '
- 'exist.')
+ 'Input string was entered incorrectly or path '
+ 'does not exist.')
return shapely.geometry.Polygon([(W, N), (W, S), (E, S), (E, N)])
@@ -228,7 +230,8 @@ def __call__(self):
else:
sten_chk = sti >= self.args.start and eni <= self.args.end
elap = (eni - sti).days
- elap_chk = elap >= self.args.daysgt and elap <= self.args.dayslt
+ elap_chk = (
+ elap >= self.args.daysgt and elap <= self.args.dayslt)
if sten_chk and elap_chk:
idx.append(i)
else:
@@ -266,7 +269,8 @@ def __call__(self):
# Suppress warnings on files already downloaded
with warnings.catch_warnings():
warnings.simplefilter("ignore")
- scenes.download(self.args.wd, processes=nt, session=session)
+ scenes.download(
+ self.args.wd, processes=nt, session=session)
else:
# Suppress warnings on files already downloaded
@@ -311,6 +315,7 @@ def query_asf(self):
return scenes
+
def main():
parser = createParser()
args = parser.parse_args()
@@ -328,5 +333,6 @@ def main():
raise Exception('Must specify either a bbox or track')
Downloader(args)()
+
if __name__ == '__main__':
main()
diff --git a/tools/bin/ariaKml2box.py b/tools/bin/ariaKml2box.py
index 0e9c080f..81ba5463 100755
--- a/tools/bin/ariaKml2box.py
+++ b/tools/bin/ariaKml2box.py
@@ -16,6 +16,7 @@
LOGGER = logging.getLogger('ariaKml2box.py')
+
def createParser():
'''
Convert .kml files of Google Earth polygons to GeoJSON files which
@@ -39,6 +40,7 @@ def createParser():
'--log-level', default='warning', help='Logger log level')
return parser
+
def main():
parser = createParser()
args = parser.parse_args()
diff --git a/tools/bin/ariaMisclosure.py b/tools/bin/ariaMisclosure.py
index 8c0da68c..48dd4b1b 100755
--- a/tools/bin/ariaMisclosure.py
+++ b/tools/bin/ariaMisclosure.py
@@ -42,8 +42,8 @@
history of any pixel by clicking on the maps.
Thumbnail images of the misclosure associated with any given triplet are
-saved in the MisclosureFigs folder. Additionally, georeferenced tiffs of the cumulative misclosure and absolute cumulative
-misclosure maps are saved.
+saved in the MisclosureFigs folder. Additionally, georeferenced tiffs of the
+cumulative misclosure and absolute cumulative misclosure maps are saved.
'''
EXAMPLES = '''EXAMPLES
@@ -59,6 +59,7 @@
ariaMisclosure.py -f stack/unwrapStack.vrt --mintime 12 --maxtime 48
'''
+
def create_parser():
parser = argparse.ArgumentParser(
description=DESCRIPTION, formatter_class=argparse.RawTextHelpFormatter,
@@ -166,7 +167,7 @@ def main(inps=None):
verbose=args.verbose)
# Plot pairs if requested
- if args.plotPairs == True:
+ if args.plotPairs:
dataStack.plotPairs()
# Create list of triplets
@@ -175,7 +176,7 @@ def main(inps=None):
printTriplets=args.printTriplets)
# Plot triplets if requested
- if args.plotTriplets == True:
+ if args.plotTriplets:
dataStack.plotTriplets()
# Compute misclosure
diff --git a/tools/bin/ariaPlot.py b/tools/bin/ariaPlot.py
index da15b0f2..08373061 100755
--- a/tools/bin/ariaPlot.py
+++ b/tools/bin/ariaPlot.py
@@ -24,6 +24,7 @@
osgeo.gdal.PushErrorHandler('CPLQuietErrorHandler')
LOGGER = logging.getLogger('ariaPlot.py')
+
def createParser():
'''
Make any of the following specified plot(s): ⊥ baseline + histogram,
@@ -53,8 +54,8 @@ def createParser():
'have invalid/valid data represented by 0/1, respectively. '
'If "Download", will use GSHHS water mask. If "NLCD", will mask '
'classes 11, 12, 90, 95; see: https://www.mrlc.gov/national'
- '-land-cover-database-nlcd-201://www.mrlc.gov/national-land-cover'
- '-database-nlcd-2016')
+ '-land-cover-database-nlcd-201://www.mrlc.gov/national-land-cover'
+ '-database-nlcd-2016')
parser.add_argument(
'-at', '--amp_thresh', dest='amp_thresh', default=None, type=str,
help='Amplitude threshold below which to mask. Specify "None" to '
@@ -71,16 +72,18 @@ def createParser():
parser.add_argument(
'-croptounion', '--croptounion', action='store_true',
dest='croptounion',
- help='If turned on, IFGs cropped to bounds based off of union and bbox '
- '(if specified). Program defaults to crop all IFGs to bounds '
- 'based off of common intersection and bbox (if specified).')
+ help='If turned on, IFGs cropped to bounds based off of union and '
+ 'bbox (if specified). Program defaults to crop all IFGs to '
+ 'bounds based off of common intersection and bbox (if '
+ 'specified).')
parser.add_argument(
'-plottracks', '--plottracks', action='store_true', dest='plottracks',
help='Make plot of track latitude extents vs bounding bbox/common '
'track extent.')
parser.add_argument(
'-plotbperp', '--plotbperp', action='store_true', dest='plotbperp',
- help="Make a baseline plot, and a histogram of perpendicular baseline.")
+ help="Make a baseline plot, and a histogram of perpendicular "
+ "baseline.")
parser.add_argument(
'-plotbperpcoh', '--plotbperpcoh', action='store_true',
dest='plotbperpcoh',
@@ -115,7 +118,8 @@ def createParser():
help='Specify version as str, e.g. 2_0_4 or all prods; default: all')
parser.add_argument(
'--nc_version', dest='nc_version', default='1b',
- help='Specify netcdf version as str, e.g. 1c or all prods; default: 1b')
+ help='Specify netcdf version as str, e.g. 1c or all prods; default: '
+ '1b')
parser.add_argument(
'-v', '--verbose', action='store_true', dest='verbose',
help="Toggle verbose mode on.")
@@ -123,6 +127,7 @@ def createParser():
'--log-level', default='warning', help='Logger log level')
return parser
+
def main(inps=None):
parser = createParser()
args = parser.parse_args()
@@ -158,31 +163,35 @@ def main(inps=None):
# pass number of threads for gdal multiprocessing computation
if args.num_threads.lower() == 'all':
import multiprocessing
- LOGGER.info('User specified use of all %s threads for gdal multiprocessing',
- multiprocessing.cpu_count())
+ LOGGER.info(
+ 'User specified use of all %s threads for gdal multiprocessing',
+ multiprocessing.cpu_count())
args.num_threads = 'ALL_CPUS'
LOGGER.info(
'Thread count specified for gdal multiprocessing = %s',
args.num_threads)
if args.plottracks or args.plotcoh or args.makeavgoh or args.plotbperpcoh:
- # extract/merge productBoundingBox layers for each pair and update dict,
- # report common track bbox (default is to take common intersection, but
- # user may specify union), and expected shape for DEM.
+ # extract/merge productBoundingBox layers for each pair and update
+ # dict, report common track bbox (default is to take common
+ # intersection, but user may specify union), and expected shape for
+ # DEM.
# TODO make LHS a tuple
standardproduct_info.products[0], standardproduct_info.products[1], \
standardproduct_info.bbox_file, prods_TOTbbox, \
- prods_TOTbbox_metadatalyr, arrres, proj = ARIAtools.extractProduct.merged_productbbox(
- standardproduct_info.products[0], standardproduct_info.products[1],
- os.path.join(args.workdir, 'productBoundingBox'),
- standardproduct_info.bbox_file, args.croptounion,
- num_threads=args.num_threads,
- minimumOverlap=args.minimumOverlap, verbose=args.verbose)
+ prods_TOTbbox_metadatalyr, arrres, proj = \
+ ARIAtools.extractProduct.merged_productbbox(
+ standardproduct_info.products[0],
+ standardproduct_info.products[1],
+ os.path.join(args.workdir, 'productBoundingBox'),
+ standardproduct_info.bbox_file, args.croptounion,
+ num_threads=args.num_threads,
+ minimumOverlap=args.minimumOverlap, verbose=args.verbose)
# Load or download mask (if specified).
if args.mask is not None:
- # TODO refactor these list comps, this is not understandable in any way
+ # TODO refactor these list comps, this is not understandable
args.mask = ARIAtools.util.mask.prep_mask(
[[item for sublist in [list(set(d['amplitude'])) for d in standardproduct_info.products[1] if 'amplitude' in d] for item in sublist],
[item for sublist in [list(set(d['pair_name'])) for d in standardproduct_info.products[1] if 'pair_name' in d] for item in sublist]],
diff --git a/tools/bin/export_product.py b/tools/bin/export_product.py
index eaf33126..c7434a38 100755
--- a/tools/bin/export_product.py
+++ b/tools/bin/export_product.py
@@ -12,6 +12,7 @@
import ARIAtools.extractProduct
+
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
@@ -34,5 +35,6 @@ def main():
with open(outfile, 'w') as ofp:
json.dump(outputs, ofp)
+
if __name__ == "__main__":
main()