Skip to content

Commit

Permalink
pep8fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
alexfore committed May 7, 2024
1 parent fa13bf0 commit 5d7b87f
Show file tree
Hide file tree
Showing 20 changed files with 401 additions and 305 deletions.
23 changes: 18 additions & 5 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,17 +49,27 @@
# If third party package is found compile as well
if os.path.isdir(relaxPath):
print('Installing ARIA-tools with support for RelaxIV')
module1 = Extension('ARIAtools.demo', sources=['tools/bindings/relaxIVdriver.cpp',
'tools/bindings/unwcompmodule.cpp',
os.path.join(relaxPath, 'RelaxIV/RelaxIV.C')], include_dirs=['tools/include', os.path.join(relaxPath, 'MCFClass'), os.path.join(relaxPath, 'OPTUtils'), os.path.join(relaxPath, 'RelaxIV')])
module1 = Extension(
'ARIAtools.demo',
sources=[
'tools/bindings/relaxIVdriver.cpp',
'tools/bindings/unwcompmodule.cpp',
os.path.join(relaxPath, 'RelaxIV/RelaxIV.C')],
include_dirs=[
'tools/include', os.path.join(relaxPath, 'MCFClass'),
os.path.join(relaxPath, 'OPTUtils'),
os.path.join(relaxPath, 'RelaxIV')])

setup(name='ARIAtools',
version=version0,
description='This is the ARIA tools package with RelaxIV support',
ext_modules=[module1],
packages=['ARIAtools'],
package_dir={'': 'tools'},
scripts=['tools/bin/ariaPlot.py', 'tools/bin/ariaDownload.py', 'tools/bin/ariaExtract.py', 'tools/bin/ariaTSsetup.py', 'tools/bin/ariaAOIassist.py', 'tools/bin/ariaMisclosure.py'])
scripts=['tools/bin/ariaPlot.py', 'tools/bin/ariaDownload.py',
'tools/bin/ariaExtract.py', 'tools/bin/ariaTSsetup.py',
'tools/bin/ariaAOIassist.py', 'tools/bin/ariaMisclosure.py'
'tools/bin/export_product.py'])
else:
# Third party package RelaxIV not found
print('Installing ARIA-tools without support for RelaxIV')
Expand All @@ -69,4 +79,7 @@
description='This is the ARIA tools package without RelaxIV support',
packages=['ARIAtools'],
package_dir={'': 'tools'},
scripts=['tools/bin/ariaPlot.py', 'tools/bin/ariaDownload.py', 'tools/bin/ariaExtract.py', 'tools/bin/ariaTSsetup.py', 'tools/bin/ariaAOIassist.py', 'tools/bin/ariaMisclosure.py'])
scripts=['tools/bin/ariaPlot.py', 'tools/bin/ariaDownload.py',
'tools/bin/ariaExtract.py', 'tools/bin/ariaTSsetup.py',
'tools/bin/ariaAOIassist.py', 'tools/bin/ariaMisclosure.py',
'tools/bin/export_product.py'])
4 changes: 2 additions & 2 deletions tests/regression/validate_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ class AriaToolsScriptTester():
@pytest.fixture(scope='class')
def tester(self):
with tarfile.open(os.path.join(
'golden_test_outputs', self.FLAVOR+'.tar.gz')) as tar:
'golden_test_outputs', self.FLAVOR + '.tar.gz')) as tar:
tar.extractall(os.path.join('golden_test_outputs'))

test_dir = os.path.join('test_outputs', self.FLAVOR)
Expand Down Expand Up @@ -271,7 +271,7 @@ class TestAriaDownload():
@pytest.fixture(scope='class')
def tester(self):
with tarfile.open(os.path.join(
'golden_test_outputs', self.FLAVOR+'.tar.gz')) as tar:
'golden_test_outputs', self.FLAVOR + '.tar.gz')) as tar:
tar.extractall(os.path.join('golden_test_outputs'))

test_dir = os.path.join('test_outputs', self.FLAVOR)
Expand Down
18 changes: 9 additions & 9 deletions tools/ARIAtools/extractProduct.py
Original file line number Diff line number Diff line change
Expand Up @@ -530,25 +530,25 @@ def merged_productbbox(
def create_raster_from_gunw(fname, data_lis, proj, driver, hgt_field=None):
"""Wrapper to create raster and apply projection"""
# open original raster
osgeo.gdal.BuildVRT(fname+'_temp.vrt', data_lis)
da = rioxarray.open_rasterio(fname+'_temp.vrt', masked=True)
osgeo.gdal.BuildVRT(fname + '_temp.vrt', data_lis)
da = rioxarray.open_rasterio(fname + '_temp.vrt', masked=True)

# Reproject the raster to the desired projection
reproj_da = da.rio.reproject(f'EPSG:{proj}',
resampling=rasterio.enums.Resampling.nearest,
nodata=0)
reproj_da.rio.to_raster(fname, driver=driver, crs=f'EPSG:{proj}')
os.remove(fname+'_temp.vrt')
os.remove(fname + '_temp.vrt')
da.close()
reproj_da.close()
buildvrt_options = osgeo.gdal.BuildVRTOptions(outputSRS=f'EPSG:{proj}')
osgeo.gdal.BuildVRT(fname+'.vrt', fname, options=buildvrt_options)
osgeo.gdal.BuildVRT(fname + '.vrt', fname, options=buildvrt_options)

if hgt_field is not None:
# write height layers
hgt_meta = osgeo.gdal.Open(data_lis[0]).GetMetadataItem(hgt_field)
osgeo.gdal.Open(
fname+'.vrt').SetMetadataItem(hgt_field, hgt_meta)
fname + '.vrt').SetMetadataItem(hgt_field, hgt_meta)

return

Expand Down Expand Up @@ -1374,7 +1374,7 @@ def export_products(

end_time = time.time()
LOGGER.debug(
"export_product_worker took %f seconds" % (end_time-start_time))
"export_product_worker took %f seconds" % (end_time - start_time))

# delete directory for quality control plots if empty
plots_subdir = os.path.abspath(
Expand All @@ -1398,7 +1398,6 @@ def finalize_metadata(outname, bbox_bounds, arrres, dem_bounds, prods_TOTbbox,
3D layers with a DEM.
Lat/lon arrays must also be passed for this process.
"""
#arrshape = [dem.RasterYSize, dem.RasterXSize]
ref_geotrans = dem.GetGeoTransform()
dem_arrres = [abs(ref_geotrans[1]), abs(ref_geotrans[-1])]

Expand Down Expand Up @@ -1498,8 +1497,9 @@ def finalize_metadata(outname, bbox_bounds, arrres, dem_bounds, prods_TOTbbox,
gdal_warp_kwargs = {
'format': outputFormat, 'cutlineDSName': prods_TOTbbox,
'outputBounds': dem_bounds, 'dstNodata': data_array_nodata,
'xRes': dem_arrres[0], 'yRes': dem_arrres[1], 'targetAlignedPixels': True,
'multithread': True, 'options': [f'NUM_THREADS={num_threads}']}
'xRes': dem_arrres[0], 'yRes': dem_arrres[1],
'targetAlignedPixels': True, 'multithread': True,
'options': [f'NUM_THREADS={num_threads}']}
warp_options = osgeo.gdal.WarpOptions(**gdal_warp_kwargs)
osgeo.gdal.Warp(tmp_name + '_temp', tmp_name, options=warp_options)

Expand Down
81 changes: 47 additions & 34 deletions tools/ARIAtools/phaseMinimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -392,7 +392,8 @@ def __init__(self, x=None, y=None, phase=None, compNum=None, redArcs=0):
else:
self.loops = self.__createTriangulation(
delauneyTri, vertices, edges)
self.neutralResidue, self.neutralNodeIdx = self.__computeNeutralResidue()
self.neutralResidue, self.neutralNodeIdx = (
self.__computeNeutralResidue())

# Saving some variables for plotting
self.__redArcs = redArcs
Expand Down Expand Up @@ -420,16 +421,19 @@ def __getTriSeq(va, vb, vc):
'''
Get Sequence of triangle points
'''
def line(va, vb, vc): return (((vc.y - va.y) * (vb.x - va.x))
) - ((vc.x - va.x) * (vb.y - va.y))
def line(va, vb, vc):
return (((vc.y - va.y) * (vb.x - va.x)) -
((vc.x - va.x) * (vb.y - va.y)))

# Line equation through pt0 and pt1
# Test for pt3 - Does it lie to the left or to the right ?
pos3 = line(va, vb, vc)
if(pos3 > 0):
# left

# left
if pos3 > 0:
return (va, vc, vb)
else: # right
# right
else:
return (va, vb, vc)

# Create Delaunay Triangulation.
Expand Down Expand Up @@ -578,8 +582,9 @@ def __createTriangulation(self, delauneyTri, vertices, edges):

# Generate the points in a sequence
def getSeq(va, vb, vc):
def line(va, vb, vc): return (
((vc.y - va.y) * (vb.x - va.x))) - ((vc.x - va.x) * (vb.y - va.y))
def line(va, vb, vc):
return (((vc.y - va.y) * (vb.x - va.x)) -
((vc.x - va.x) * (vb.y - va.y)))

# Line equation through pt0 and pt1
# Test for pt3 - Does it lie to the left or to the right ?
Expand Down Expand Up @@ -739,8 +744,8 @@ def __MCFRelaxIV(edgeLen, fileName="network.dmx"):
try:
from . import unwcomp
except BaseException:
raise Exception("MCF requires RelaxIV solver - Please drop the RelaxIV software \
into the src folder and re-make")
raise Exception("MCF requires RelaxIV solver - Please drop the "
"RelaxIV software into the src folder and re-make")
return unwcomp.relaxIVwrapper_Py(fileName)

def solve(self, solver, filename="network.dmx"):
Expand All @@ -764,19 +769,24 @@ def __solveEdgeCost__(self, solver, fileName="network.dmx"):
# Solve the objective function
if solver == 'glpk':
LOGGER.info('Using GLPK MIP solver')
def MIPsolver(): return self.__prob__.solve(pulp.GLPK(msg=0))

def MIPsolver():
return self.__prob__.solve(pulp.GLPK(msg=0))

elif solver == 'pulp':
LOGGER.info('Using PuLP MIP solver')
def MIPsolver(): return self.__prob__.solve()

def MIPsolver():
return self.__prob__.solve()

elif solver == 'gurobi':
LOGGER.info('Using Gurobi MIP solver')
def MIPsolver(): return self.__prob__.solve(pulp.GUROBI_CMD())

def MIPsolver():
return self.__prob__.solve(pulp.GUROBI_CMD())

LOGGER.info(
'Time Taken (in sec) to solve: %f',
T.timeit(
MIPsolver,
number=1))
'Time Taken (in sec) to solve: %f', T.timeit(MIPsolver, number=1))

# Get solution
for v, edge in self.__edges.items():
Expand Down Expand Up @@ -916,18 +926,24 @@ def firstPassCommandLine():
parser = argparse.ArgumentParser(description='Phase Unwrapping')

# Positional argument - Input XML file
parser.add_argument('--inputType', choices=['plane', 'sinc', 'sine'],
help='Type of input to unwrap', default='plane', dest='inputType')
parser.add_argument('--dim', type=int, default=100,
help='Dimension of the image (square)', dest='dim')
parser.add_argument('-c', action='store_true',
help='Component-wise unwrap test', dest='compTest')
parser.add_argument('-MCF', action='store_true',
help='Minimum Cost Flow', dest='mcf')
parser.add_argument('--redArcs', type=int, default=0,
help='Redundant Arcs', dest='redArcs')
parser.add_argument('--solver', choices=['glpk', 'pulp', 'gurobi'],
help='Type of solver', default='pulp', dest='solver')
parser.add_argument(
'--inputType', choices=['plane', 'sinc', 'sine'],
help='Type of input to unwrap', default='plane', dest='inputType')
parser.add_argument(
'--dim', type=int, default=100,
help='Dimension of the image (square)', dest='dim')
parser.add_argument(
'-c', action='store_true',
help='Component-wise unwrap test', dest='compTest')
parser.add_argument(
'-MCF', action='store_true',
help='Minimum Cost Flow', dest='mcf')
parser.add_argument(
'--redArcs', type=int, default=0,
help='Redundant Arcs', dest='redArcs')
parser.add_argument(
'--solver', choices=['glpk', 'pulp', 'gurobi'],
help='Type of solver', default='pulp', dest='solver')

# Parse input
args = parser.parse_args()
Expand Down Expand Up @@ -1011,7 +1027,8 @@ def main():
xidx, yidx, wrapImg[xidx, yidx], redArcs=redArcs)
else:
phaseunwrap = PhaseUnwrap(
xidx, yidx, wrapImg[xidx, yidx], compImg[xidx, yidx], redArcs=redArcs)
xidx, yidx, wrapImg[xidx, yidx], compImg[xidx, yidx],
redArcs=redArcs)

# Including the neutral node for min cost flow
phaseunwrap.solve(solver)
Expand All @@ -1022,10 +1039,6 @@ def main():
# phaseunwrap.plotSpanningTree("spanningTree%d.png"%(redArcs))
phaseunwrap.plotResult("final%d.png" % (redArcs))

#import pdb; pdb.set_trace()
#fig = plt.figure()
#ax = fig.add_subplot(111, projection='3d')


if __name__ == '__main__':

Expand Down
7 changes: 4 additions & 3 deletions tools/ARIAtools/product.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,9 @@ def package_dict(scene, new_scene, scene_ind,
# IFG corresponding to reference product already exists, append to dict
if sorted_dict:
dict_vals = [[
subitem for item in a for subitem in (item if
isinstance(item, list) else [item])] for a in zip(
subitem for item in a for subitem in (
item if isinstance(item, list) else [item])]
for a in zip(
sorted_dict[dict_ind][scene_ind].values(),
new_scene[scene_ind].values())]

Expand Down Expand Up @@ -924,7 +925,7 @@ def __NISARmappingData__(self, fname, rdrmetadata_dict, sdskeys, version):
datalyr_dict[
'productBoundingBoxFrames'] = fname + '":' + sdskeys[0]
for i in enumerate(layerkeys):
datalyr_dict[i[1]] = fname + '":'+sdskeys[i[0]]
datalyr_dict[i[1]] = fname + '":' + sdskeys[i[0]]

# Rewrite tropo and iono keys
datalyr_dict['ionosphere'] = datalyr_dict.pop(
Expand Down
36 changes: 21 additions & 15 deletions tools/ARIAtools/stack.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

LOGGER = logging.getLogger(__name__)


# STACK OBJECT ---
class Stack:
'''
Expand Down Expand Up @@ -170,7 +171,8 @@ def __formatDates__(self):
def __formatExcludePairs__(self):
'''
Check that exclude dates are in one of two formats:
1. a string containing the pairs in YOUNGER_OLDER format, space-separated
1. a string containing the pairs in YOUNGER_OLDER format,
space-separated
2. a .txt file with lines of the same formatting
Formatting should match "pair" formatting: [[master,slave]]
'''
Expand Down Expand Up @@ -231,8 +233,9 @@ def plotPairs(self):
# Legend
handles, labels = pairAx.get_legend_handles_labels()
uniqueLabels = dict(zip(labels, handles))
pairAx.legend(uniqueLabels.values(), uniqueLabels.keys(),
bbox_to_anchor=(0.005, 0.99), loc='upper left', borderaxespad=0.)
pairAx.legend(
uniqueLabels.values(), uniqueLabels.keys(),
bbox_to_anchor=(0.005, 0.99), loc='upper left', borderaxespad=0.)

# Other formatting
pairAx.set_yticks([])
Expand Down Expand Up @@ -279,22 +282,23 @@ def createTriplets(self, minTime=None, maxTime=None, printTriplets=False):
self.nTriplets = len(self.triplets)

# Print to text file
with open(os.path.join(self.workdir, 'ValidTriplets.txt'), 'w') as tripletFile:
with open(os.path.join(
self.workdir, 'ValidTriplets.txt'), 'w') as tripletFile:
for triplet in self.triplets:
strPair = [self.__datePair2strPair__(pair) for pair in triplet]
tripletFile.write('{}\n'.format(strPair))
tripletFile.close()

# Report if requested
if printTriplets == True:
if printTriplets:

# Print to screen
LOGGER.info('Existing triplets:')
for triplet in self.triplets:
LOGGER.info([
self.__datePair2strPair__(pair) for pair in triplet])

if self.verbose == True:
if self.verbose:
LOGGER.info(
'%s existing triplets found based on search criteria',
self.nTriplets)
Expand Down Expand Up @@ -425,11 +429,11 @@ def XY2LoLa(self, x, y):
# Reference point formatting
def __referencePoint__(self, refXY, refLoLa):
'''
Determine the reference point in XY coordinates. The reference point can be
automatically or manually selected by the user and is subtracted
from each interferogram.
The point can be given in pixels or lon/lat coordinates. If given in Lat/Lon, determine
the location in XY.
Determine the reference point in XY coordinates. The reference point
can be automatically or manually selected by the user and is
subtracted from each interferogram.
The point can be given in pixels or lon/lat coordinates. If given in
Lat/Lon, determine the location in XY.
'''
LOGGER.debug('Determining reference point...')

Expand Down Expand Up @@ -460,7 +464,8 @@ def __referencePoint__(self, refXY, refLoLa):
# Random reference point
def __autoReferencePoint__(self):
'''
Use the coherence stack to automatically determine a suitable reference point.
Use the coherence stack to automatically determine a suitable
reference point.
'''
# Load coherence data from cohStack.vrt
cohfile = os.path.join(self.imgdir, 'cohStack.vrt')
Expand All @@ -479,7 +484,7 @@ def __autoReferencePoint__(self):

# Loop until suitable reference point is found
n = 0
while cohMask[self.refY, self.refX] == False:
while not cohMask[self.refY, self.refX]:

# Reselect reference points
self.refX = np.random.randint(cohDS.RasterXSize)
Expand Down Expand Up @@ -606,7 +611,7 @@ def __plotSeries__(self, ax, data, title):
Plot misclosure timeseries.
'''
# Plot data
if self.plotTimeIntervals == False:
if not self.plotTimeIntervals:
ax.plot([tripletDate[1]
for tripletDate in self.tripletDates], data, '-k.')
else:
Expand Down Expand Up @@ -760,7 +765,8 @@ def __misclosureQuery__(self, queryXY=None, queryLoLa=None):
qLon, qLat = self.XY2LoLa(queryXY[0], queryXY[1])

LOGGER.debug(
'Query point: X %s / Y %s; Lon %.4f / Lat %.4f', qx, qy, qLon, qLat)
'Query point: X %s / Y %s; Lon %.4f / Lat %.4f',
qx, qy, qLon, qLat)

# Plot query points on map
self.netMscAx.plot(
Expand Down
Loading

0 comments on commit 5d7b87f

Please sign in to comment.