From e49e11c4a760b09657da49d93c711cdd5e9bb6bc Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Thu, 29 Aug 2024 16:00:08 -0500 Subject: [PATCH 01/53] update readme --- README.md | 33 +++++++++++++++++++++++++++------ 1 file changed, 27 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 8619a4d..5b8d713 100644 --- a/README.md +++ b/README.md @@ -50,19 +50,40 @@ In order for Docker to be able to use the host's GPU, the host must have the [NV The process is different for different OS's and Linux distros. The setup process for the most common distros, including Ubuntu, can be found [here](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html#configuration). Make sure to follow the [Docker configuration steps](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html#configuration) after installing the package. +The AWS ECS-optimized GPU AMI has the configuration described already set up. You can find the latest version of this AMI by calling: +```bash +aws ssm get-parameters --names /aws/service/ecs/optimized-ami/amazon-linux-2/gpu/recommended --region us-west-2 +``` + +### GPU Docker Container +Once you have a compute environment set up as described above, you can build the GPU version of the container by running: +```bash +docker build --build-arg="GPU_ARCH={YOUR_ARCH}" -t ghcr.io/asfhyp3/hyp3-srg:{RELEASE}.gpu -f Dockerfile.gpu . +``` + +You can get the value of `COMPUTE_CAPABILITY_VERSION` by running `nvidia-smi` on the instance to obtain GPU type, then cross-reference this information with NVIDIA's [GPU type compute capability list](https://developer.nvidia.com/cuda-gpus). For a g6.2xlarge instance, this would be: +```bash +docker --build-arg="GPU_ARCH=89" -t ghcr.io/asfhyp3/hyp3-srg:{RELEASE}.gpu -f Dockerfile.gpu . +``` +The compute capability version will always be the same for a given instance type, so you will only need to look this up once per instance type. +The default value for this argument is `89` - the correct value for g6.2xlarge instances. +**THE COMPUTE CAPABILITY VERSION MUST MATCH ON BOTH THE BUILDING AND RUNNING MACHINE!** + +The value of `RELEASE` can be obtained from the git tags. + +You can push a manual container to HyP3-SRG's container repository by following [this guide](https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry#pushing-container-images). + ### EC2 Setup > [!CAUTION] -> Running the docker container on an Amazon Linux 2023 Deep Learning AMI runs, but will result in all zero outputs. Work is ongoing to determine what is causing this issue. For now, we recommend using option 2.i. +> Running the docker container on an Amazon Linux 2023 Deep Learning AMI runs, but will result in all zero outputs. Work is ongoing to determine what is causing this issue. For now, we recommend using option 2.3. When running on an EC2 instance, the following setup is recommended: 1. Create a [G6-family EC2 instance](https://aws.amazon.com/ec2/instance-types/g6/) that has **at least 32 GB of memory**. 2. Launch your instance with one of the following setups (**option i is recommended**): 1. Use the latest [Amazon Linux 2023 AMI](https://docs.aws.amazon.com/linux/al2023/ug/ec2.html) with `scripts/amazon_linux_setup.sh` as the [user script on launch](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/user-data.html). 2. Use the latest [Ubuntu AMI](https://cloud-images.ubuntu.com/locator/ec2/) with the `scripts/ubuntu_setup.sh` as the [user script on launch](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/user-data.html). - 3. Use the [Ubuntu Deep Learning Base OSS Nvidia Driver GPU AMI](https://aws.amazon.com/releasenotes/aws-deep-learning-base-gpu-ami-ubuntu-22-04/) (no install script required). -3. Build the GPU docker container with the correct compute capability version. To determine this value, run `nvidia-smi` on the instance to obtain GPU type, then cross-reference this information with NVIDIA's [GPU type compute capability list](https://developer.nvidia.com/cuda-gpus). For a g6.2xlarge instance, this would be: + 3. Use the latest AWS ECS-optimized GPU AMI (`aws ssm get-parameters --names /aws/service/ecs/optimized-ami/amazon-linux-2/gpu/recommended --region us-west-2`) +3. Build the GPU docker container with the correct compute capability version (see section above). To determine this value, run `nvidia-smi` on the instance to obtain GPU type, then cross-referencke this information with NVIDIA's [GPU type compute capability list](https://developer.nvidia.com/cuda-gpus). For a g6.2xlarge instance, this would be: ```bash -docker --build-arg="GPU_ARCH=89" -t hyp3-srg:gpu-89 -f Dockerfile.gpu . +docker --build-arg="GPU_ARCH=89" -t ghcr.io/asfhyp3/hyp3-srg:{RELEASE}.gpu -f Dockerfile.gpu . ``` -The compute capability version will always be the same for a given instance type, so you will only need to look this up once per instance type. -The default value for this argument is `89` - the correct value for g6.2xlarge instances. From 6d1fc13e8e7da3f6176375db42ef42a7a18c7055 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Wed, 4 Sep 2024 13:11:23 -0500 Subject: [PATCH 02/53] time series entrypoint --- pyproject.toml | 1 + src/hyp3_srg/timeseries.py | 48 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+) create mode 100644 src/hyp3_srg/timeseries.py diff --git a/pyproject.toml b/pyproject.toml index 2fb98be..e1d4492 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,6 +43,7 @@ Documentation = "https://hyp3-docs.asf.alaska.edu" [project.entry-points.hyp3] back_projection = "hyp3_srg.back_projection:main" +timeseries = "hyp3_srg.timeseries:main" [tool.pytest.ini_options] testpaths = ["tests"] diff --git a/src/hyp3_srg/timeseries.py b/src/hyp3_srg/timeseries.py new file mode 100644 index 0000000..436444f --- /dev/null +++ b/src/hyp3_srg/timeseries.py @@ -0,0 +1,48 @@ +""" +Sentinel-1 GSLC timeseries interferogram processing +""" + +import argparse +import logging +from pathlib import Path +from typing import Iterable, Optional + +log = logging.getLogger(__name__) + + +def timeseries( + granules: Iterable[str], + bucket: str = None, + bucket_prefix: str = '', + work_dir: Optional[Path] = None, +): + """Create a timeseries interferogram from a set of Sentinel-1 GSLCs. + + Args: + granules: List of Sentinel-1 GSLCs + bucket: AWS S3 bucket for uploading the final product(s) + bucket_prefix: Add a bucket prefix to the product(s) + work_dir: Working directory for processing + """ + pass + + +def main(): + """Timeseries entrypoint. + + Example command: + python -m hyp3_srg ++process timeseries \ + S1A_IW_RAW__0SDV_20231229T134339_20231229T134411_051870_064437_4F42.geo \ + S1A_IW_RAW__0SDV_20231229T134404_20231229T134436_051870_064437_5F38.geo + """ + parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument('--bucket', help='AWS S3 bucket HyP3 for upload the final product(s)') + parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)') + parser.add_argument('granules', type=str.split, nargs='+', help='GSLC granules.') + args = parser.parse_args() + args.granules = [item for sublist in args.granules for item in sublist] + timeseries(**args.__dict__) + + +if __name__ == '__main__': + main() From ec782b52fb9f77de063d08a6a0f0217e929a91aa Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Wed, 4 Sep 2024 13:25:19 -0500 Subject: [PATCH 03/53] add entrypoint to __main__ --- src/hyp3_srg/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hyp3_srg/__main__.py b/src/hyp3_srg/__main__.py index f02c266..2ebb1d6 100644 --- a/src/hyp3_srg/__main__.py +++ b/src/hyp3_srg/__main__.py @@ -14,7 +14,7 @@ def main(): parser = argparse.ArgumentParser(prefix_chars='+', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( '++process', - choices=['back_projection'], + choices=['back_projection', 'timeseries'], default='back_projection', help='Select the HyP3 entrypoint to use', # HyP3 entrypoints are specified in `pyproject.toml` ) From 67e75efa1d058d52b2276b0a63202f6d1bade636 Mon Sep 17 00:00:00 2001 From: jacquelynsmale Date: Wed, 4 Sep 2024 11:48:34 -0800 Subject: [PATCH 04/53] add in basics for running timeseries --- README.md | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 5b8d713..8bfde6d 100644 --- a/README.md +++ b/README.md @@ -9,6 +9,7 @@ HyP3 plugin for Stanford Radar Group (SRG) SAR Processor The HyP3-SRG plugin provides a set of workflows (currently only accessible via the docker container) that can be used to process SAR data using the [Stanford Radar Group Processor](https://github.com/asfhyp3/srg). The workflows currently included in this plugin are: - `back_projection`: A workflow for creating geocoded Sentinel-1 SLCs from Level-0 data using the [back-projection methodology](https://doi.org/10.1109/LGRS.2017.2753580). +- `timeseries`: A workflow for creating a deformation timeseries of geocoded Sentinel-1 SLCs from Level-0 data using the [time-series methodology](#TO DO: Add citation here ?https://doi.org/10.1109/JSTARS.2022.3216964). To run a workflow, you'll first need to build the docker container: ```bash @@ -33,7 +34,17 @@ docker run -it --rm \ S1A_IW_RAW__0SDV_20231229T134339_20231229T134411_051870_064437_4F42-RAW \ S1A_IW_RAW__0SDV_20231229T134404_20231229T134436_051870_064437_5F38-RAW ``` - +Similary, an example command for the `timeseries` workflow is: +``` +docker run -it --rm \ + -e EARTHDATA_USERNAME=[YOUR_USERNAME_HERE] \ + -e EARTHDATA_PASSWORD=[YOUR_PASSWORD_HERE] \ + hyp3-srg:latest \ + ++process timeseries \ + S1A_IW_RAW__0SDV_20240828T020812_20240828T020844_055407_06C206_6EA7 \ + S1A_IW_RAW__0SDV_20240816T020812_20240816T020844_055232_06BB8A_C7CA \ + S1A_IW_RAW__0SDV_20240804T020812_20240804T020844_055057_06B527_1346 +``` ## Earthdata Login For all workflows, the user must provide their Earthdata Login credentials in order to download input data. From 3c003464b1a06fcd5e58aebd6470feecae56e4f0 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Wed, 4 Sep 2024 15:49:33 -0500 Subject: [PATCH 05/53] move create_param_file to utils --- src/hyp3_srg/back_projection.py | 15 +-------------- 1 file changed, 1 insertion(+), 14 deletions(-) diff --git a/src/hyp3_srg/back_projection.py b/src/hyp3_srg/back_projection.py index a62f6ba..60881ad 100644 --- a/src/hyp3_srg/back_projection.py +++ b/src/hyp3_srg/back_projection.py @@ -18,19 +18,6 @@ log = logging.getLogger(__name__) -def create_param_file(dem_path: Path, dem_rsc_path: Path, output_dir: Path): - """Create a parameter file for the processor. - - Args: - dem_path: Path to the DEM file - dem_rsc_path: Path to the DEM RSC file - output_dir: Directory to save the parameter file in - """ - lines = [str(dem_path), str(dem_rsc_path)] - with open(output_dir / 'params', 'w') as f: - f.write('\n'.join(lines)) - - def check_required_files(required_files: Iterable, work_dir: Path) -> None: for file in required_files: if not (work_dir / file).exists(): @@ -129,7 +116,7 @@ def back_project( full_bbox = unary_union(bboxs).buffer(0.1) dem_path = dem.download_dem_for_srg(full_bbox, work_dir) - create_param_file(dem_path, dem_path.with_suffix('.dem.rsc'), work_dir) + utils.create_param_file(dem_path, dem_path.with_suffix('.dem.rsc'), work_dir) back_project_granules(granule_orbit_pairs, work_dir=work_dir, gpu=gpu) From 0d61ab1acba8f1d1ed5af39f84269b5e5af8e5f6 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Wed, 4 Sep 2024 15:50:02 -0500 Subject: [PATCH 06/53] get bboxs and dem --- src/hyp3_srg/timeseries.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/src/hyp3_srg/timeseries.py b/src/hyp3_srg/timeseries.py index 436444f..a659b80 100644 --- a/src/hyp3_srg/timeseries.py +++ b/src/hyp3_srg/timeseries.py @@ -7,6 +7,10 @@ from pathlib import Path from typing import Iterable, Optional +from shapely import unary_union + +from hyp3_srg import dem, utils + log = logging.getLogger(__name__) @@ -24,6 +28,17 @@ def timeseries( bucket_prefix: Add a bucket prefix to the product(s) work_dir: Working directory for processing """ + if work_dir is None: + work_dir = Path.cwd() + + bboxs = [] + for granule in granules: + bboxs.append(utils.get_bbox(granule)) + + full_bbox = unary_union(bboxs).buffer(0.1) + dem_path = dem.download_dem_for_srg(full_bbox, work_dir) + utils.create_param_file(dem_path, dem_path.with_suffix('.dem.rsc'), work_dir) + pass @@ -32,8 +47,8 @@ def main(): Example command: python -m hyp3_srg ++process timeseries \ - S1A_IW_RAW__0SDV_20231229T134339_20231229T134411_051870_064437_4F42.geo \ - S1A_IW_RAW__0SDV_20231229T134404_20231229T134436_051870_064437_5F38.geo + S1A_IW_RAW__0SDV_20231229T134339_20231229T134411_051870_064437_4F42 \ + S1A_IW_RAW__0SDV_20231229T134404_20231229T134436_051870_064437_5F38 """ parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--bucket', help='AWS S3 bucket HyP3 for upload the final product(s)') From 7b9dc8e635b49807686e30428fddc0eddc26405b Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Wed, 4 Sep 2024 15:57:49 -0500 Subject: [PATCH 07/53] get_bbox and create_param_file --- src/hyp3_srg/utils.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/src/hyp3_srg/utils.py b/src/hyp3_srg/utils.py index 7611cea..f708c26 100644 --- a/src/hyp3_srg/utils.py +++ b/src/hyp3_srg/utils.py @@ -154,6 +154,26 @@ def download_raw_granule(granule_name: str, output_dir: Path, unzip: bool = Fals return out_path, bbox +def get_bbox(granule_name: str) -> Tuple[Path, Polygon]: + """Get the buffered extent from asf_search. + + Args: + granule_name: Name of the granule to download + + Returns: + bbox: the buffered extent polygon + """ + granule_name = granule_name.split('.')[0] + + if not granule_name.endswith('-RAW'): + granule_name += '-RAW' + + result = asf_search.granule_search([granule_name])[0] + bbox = shape(result.geojson()['geometry']) + + return bbox + + def download_orbit(granule_name: str, output_dir: Path) -> Path: """Download a S1 orbit file. Prefer using the ESA API, but fallback to ASF if needed. @@ -169,6 +189,19 @@ def download_orbit(granule_name: str, output_dir: Path) -> Path: return orbit_path +def create_param_file(dem_path: Path, dem_rsc_path: Path, output_dir: Path): + """Create a parameter file for the processor. + + Args: + dem_path: Path to the DEM file + dem_rsc_path: Path to the DEM RSC file + output_dir: Directory to save the parameter file in + """ + lines = [str(dem_path), str(dem_rsc_path)] + with open(output_dir / 'params', 'w') as f: + f.write('\n'.join(lines)) + + def call_stanford_module(local_name, args: List = [], work_dir: Optional[Path] = None) -> None: """Call a Stanford Processor modules (via subprocess) with the given arguments. From 0e2e779f7f8383007d0088562906b38a3444ce60 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Wed, 4 Sep 2024 16:04:06 -0500 Subject: [PATCH 08/53] next todos --- src/hyp3_srg/timeseries.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/hyp3_srg/timeseries.py b/src/hyp3_srg/timeseries.py index a659b80..a0c754e 100644 --- a/src/hyp3_srg/timeseries.py +++ b/src/hyp3_srg/timeseries.py @@ -28,6 +28,18 @@ def timeseries( bucket_prefix: Add a bucket prefix to the product(s) work_dir: Working directory for processing """ + + """ + DONE: Get the BBOXs for the DEM. + DONE: Retrieve the SRG DEM files. + TODO: Default values for looks, baselines, tropo, and thresholds + TODO: Call merge_slcs.py if necessary + TODO: Create the SBAS list with sbas_list.py + TODO: Get the size of the .geo files from the .rsc file. + TODO: Form the interferograms. + TODO: ... + """ + if work_dir is None: work_dir = Path.cwd() From b160d707264e2e73832405c24254a8324615f610 Mon Sep 17 00:00:00 2001 From: jacquelynsmale Date: Wed, 4 Sep 2024 15:10:44 -0800 Subject: [PATCH 09/53] add in more info for each job type --- README.md | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 8bfde6d..f36b14d 100644 --- a/README.md +++ b/README.md @@ -6,10 +6,10 @@ HyP3 plugin for Stanford Radar Group (SRG) SAR Processor > [!WARNING] > Running the workflows in this repository requires a compiled version of the [Stanford Radar Group Processor](https://github.com/asfhyp3/srg). For this reason, running this repository's workflows in a standard Python is not implemented yet. Instead, we recommend running the workflows from the docker container as outlined below. -The HyP3-SRG plugin provides a set of workflows (currently only accessible via the docker container) that can be used to process SAR data using the [Stanford Radar Group Processor](https://github.com/asfhyp3/srg). The workflows currently included in this plugin are: +The HyP3-SRG plugin provides a set of workflows (currently only accessible via the docker container) that can be used to process SAR data using the [Stanford Radar Group Processor](https://github.com/asfhyp3/srg). This set of workflow uses the [SRG alogorithms]((https://doi.org/10.1109/LGRS.2017.2753580)) to process Level-0 Sentinel-1 (S1) data to geocoded, user-friendly products that can be used for time-series analysis. The workflows currently included in this plugin are: -- `back_projection`: A workflow for creating geocoded Sentinel-1 SLCs from Level-0 data using the [back-projection methodology](https://doi.org/10.1109/LGRS.2017.2753580). -- `timeseries`: A workflow for creating a deformation timeseries of geocoded Sentinel-1 SLCs from Level-0 data using the [time-series methodology](#TO DO: Add citation here ?https://doi.org/10.1109/JSTARS.2022.3216964). +- [`back_projection`](#back-projection): A workflow for creating geocoded Sentinel-1 SLCs, +- [`timeseries`](#time-series-analysis): A workflow for creating a deformation timeseries of geocoded Sentinel-1 SLCs. To run a workflow, you'll first need to build the docker container: ```bash @@ -24,7 +24,10 @@ docker run -it --rm \ ++process [WORKFLOW_NAME] \ [WORKFLOW_ARGS] ``` -Here is an example command for the `back_projection` workflow: + +### Back-projection +The `back_projection` processing type produces geocoded SLCs for raw S1 data. The workflow takes a list of Level-0 S1 granule names and outputs them as geocoded SLCs (GSLCs). +An example command for the `back_projection` workflow is: ```bash docker run -it --rm \ -e EARTHDATA_USERNAME=[YOUR_USERNAME_HERE] \ @@ -34,7 +37,11 @@ docker run -it --rm \ S1A_IW_RAW__0SDV_20231229T134339_20231229T134411_051870_064437_4F42-RAW \ S1A_IW_RAW__0SDV_20231229T134404_20231229T134436_051870_064437_5F38-RAW ``` -Similary, an example command for the `timeseries` workflow is: + +### Time-series +The `timeseries` workflow takes a list of up to 50 Level-0 S1 granule names and produces a time-series of the respective geocoded SLCs. Stacks are created with looks `10x10`and baselines of `1000x1000`. A trophospheric correction is applied using an elevation-dependent regression. +This workflow will output interferograms and time-series files for all input granules. + The following command will run the `timeseries` workflow: ``` docker run -it --rm \ -e EARTHDATA_USERNAME=[YOUR_USERNAME_HERE] \ @@ -45,7 +52,7 @@ docker run -it --rm \ S1A_IW_RAW__0SDV_20240816T020812_20240816T020844_055232_06BB8A_C7CA \ S1A_IW_RAW__0SDV_20240804T020812_20240804T020844_055057_06B527_1346 ``` -## Earthdata Login +### Earthdata Login For all workflows, the user must provide their Earthdata Login credentials in order to download input data. @@ -56,7 +63,9 @@ Your credentials can be passed to the workflows via environment variables (`EART If you haven't set up a `.netrc` file before, check out this [guide](https://harmony.earthdata.nasa.gov/docs#getting-started) to get started. -## GPU Setup: + +## Developer setup +### GPU Setup In order for Docker to be able to use the host's GPU, the host must have the [NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/index.html) installed and configured. The process is different for different OS's and Linux distros. The setup process for the most common distros, including Ubuntu, can be found [here](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html#configuration). Make sure to follow the [Docker configuration steps](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html#configuration) after installing the package. From b352ed454a9bb4c5167631abf1aa61880c61fca3 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 5 Sep 2024 14:27:33 -0500 Subject: [PATCH 10/53] added all steps from sentinel_timeseries --- src/hyp3_srg/timeseries.py | 179 ++++++++++++++++++++++++++++++++++--- 1 file changed, 165 insertions(+), 14 deletions(-) diff --git a/src/hyp3_srg/timeseries.py b/src/hyp3_srg/timeseries.py index a0c754e..30e27f9 100644 --- a/src/hyp3_srg/timeseries.py +++ b/src/hyp3_srg/timeseries.py @@ -4,6 +4,7 @@ import argparse import logging +from os import cp from pathlib import Path from typing import Iterable, Optional @@ -14,13 +15,172 @@ log = logging.getLogger(__name__) +def get_size_from_dem(dem_file: str) -> tuple[int]: + """ Get the length and width from a .rsc DEM file + + Args: + dem_file: path to the .rsc dem file. + + Returns: + dem_width, dem_length: tuple containing the dem width and dem length + """ + fe = open(dem_file,'r') + width_line = fe.readline() + dem_width = width_line.split()[1] + length_line = fe.readline() + dem_length = length_line.split()[1] + fe.close() + + return dem_width, dem_length + + +def generate_wrapped_interferograms( + looks: tuple[int], + baselines: tuple[int], + dem_shape: tuple[int], + work_dir: Path +) -> None: + """ Generates wrapped interferograms from GSLCs + + Args: + looks: tuple containing the number range looks and azimuth looks + baselines: tuple containing the time baseline and spatial baseline + dem_shape: tuple containing the dem width and dem length + work_dir: the directory containing the GSLCs + """ + dem_width, dem_length = dem_shape + looks_down, looks_across = looks + time_baseline, spatial_baseline = baselines + + utils.call_stanford_module( + 'sentinel/sbas_list.py', + args=[time_baseline, spatial_baseline], + work_dir=work_dir + ) + + sbas_args = [ + 'sbas_list ../elevation.dem.rsc 1 1', + dem_width, + dem_length, + looks_down, + looks_across + ] + utils.call_stanford_module('sentinel/ps_sbas_igrams.py', args=sbas_args, work_dir=work_dir) + + +def unwrap_interferograms( + dem_shape: tuple[int], + unw_shape: tuple[int], + work_dir: Path +) -> None: + """ Unwraps wrapped interferograms in parallel + + Args: + dem_shape: tuple containing the dem width and dem length + unw_shape: tuple containing the width and length from the dem.rsc file + work_dir: the directory containing the wrapped interferograms + """ + dem_width, dem_length = dem_shape + unw_width, unw_length = unw_shape + + reduce_dem_args = [ + '../elevation.dem dem', + dem_width, + dem_width // unw_width, + dem_length // unw_length + ] + utils.call_stanford_module('util/nbymi2.py', args=reduce_dem_args, work_dir=work_dir) + utils.call_stanford_module('util/unwrap_parallel.py', args=[unw_width], work_dir=work_dir) + + +def compute_sbas_velocity_solution( + threshold: float, + do_tropo_correction: bool, + unw_shape: tuple[int], + work_dir: Path +) -> None: + """ Computes the sbas velocity solution from the unwrapped interferograms + + Args: + threshold: ... + do_tropo_correction: ... + unw_shape: tuple containing the width and length from the dem.rsc file + work_dir: the directory containing the wrapped interferograms + """ + utils.call_stanford_module('sbas/sbas_setup.py', args=['sbas_list geolist'], work_dir=work_dir) + cp('./intlist', 'unwlist') + utils.call_stanford_module('util/sed.py', args=["'s/int/unw/g' unwlist"], work_dir=work_dir) + + num_unw_files = 0 + num_slcs = 0 + with (open('unwlist', 'r'), open('geolist', 'r')) as (unw_list, slc_list): + num_unw_files = len(unw_list.readlines()) + num_slcs = len(slc_list.readlines()) + + ref_point_args = [ + 'unwlist', + unw_shape[0], + unw_shape[1], + threshold + ] + utils.call_stanford_module('int/findrefpoints', args=ref_point_args, work_dir=work_dir) + + if do_tropo_correction: + tropo_correct_args = [ + 'unwlist', + unw_shape[0], + unw_shape[1] + ] + utils.call_stanford_module('int/tropocorrect.py', args=tropo_correct_args, work_dir=work_dir) + + sbas_velocity_args = [ + 'unwlist', + str(num_unw_files.decode()).rstrip(), + str(num_slcs.decode()).rstrip(), + unw_shape[0], + 'ref_locs' + ] + utils.call_stanford_module('sbas/sbas', args=sbas_velocity_args, work_dir=work_dir) + + + +def create_timeseries( + looks: tuple[int] = (10, 10), + baselines: tuple[int] = (1000, 1000), + threshold: float = 0.5, + do_tropo_correction: bool = True, + work_dir: Path | None = None +) -> None: + """ Creates a timeseries from a stack of GSLCs consisting of interferograms and a velocity solution + + Args: + looks: tuple containing the number range looks and azimuth looks + baselines: tuple containing the time baseline and spatial baseline + threshold: ... + do_tropo_correction: ... + work_dir: the directory containing the GSLCs to do work in + """ + dem_shape = get_size_from_dem('../elevation.dem.rsc') + generate_wrapped_interferograms(looks=looks, baselines=baselines, dem_shape=dem_shape) + + unw_shape = get_size_from_dem('../dem.rsc') + unwrap_interferograms(dem_shape=dem_shape, unw_shape=unw_shape) + + compute_sbas_velocity_solution( + threshold=threshold, + do_tropo_correction=do_tropo_correction, + unw_shape=unw_shape, + work_dir=work_dir + ) + + def timeseries( granules: Iterable[str], bucket: str = None, bucket_prefix: str = '', work_dir: Optional[Path] = None, ): - """Create a timeseries interferogram from a set of Sentinel-1 GSLCs. + """Create and package a timeseries stack from a set of Sentinel-1 GSLCs. Args: granules: List of Sentinel-1 GSLCs @@ -28,18 +188,6 @@ def timeseries( bucket_prefix: Add a bucket prefix to the product(s) work_dir: Working directory for processing """ - - """ - DONE: Get the BBOXs for the DEM. - DONE: Retrieve the SRG DEM files. - TODO: Default values for looks, baselines, tropo, and thresholds - TODO: Call merge_slcs.py if necessary - TODO: Create the SBAS list with sbas_list.py - TODO: Get the size of the .geo files from the .rsc file. - TODO: Form the interferograms. - TODO: ... - """ - if work_dir is None: work_dir = Path.cwd() @@ -51,7 +199,10 @@ def timeseries( dem_path = dem.download_dem_for_srg(full_bbox, work_dir) utils.create_param_file(dem_path, dem_path.with_suffix('.dem.rsc'), work_dir) - pass + utils.call_stanford_module('util/merge_slcs.py', work_dir=work_dir) + + create_timeseries(work_dir=work_dir) + def main(): From 1381c9e9f2d607cf0e01537f1173729329ec26f2 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 5 Sep 2024 14:29:52 -0500 Subject: [PATCH 11/53] timeseries to time_series --- pyproject.toml | 2 +- src/hyp3_srg/__main__.py | 2 +- src/hyp3_srg/{timeseries.py => time_series.py} | 12 ++++++------ 3 files changed, 8 insertions(+), 8 deletions(-) rename src/hyp3_srg/{timeseries.py => time_series.py} (95%) diff --git a/pyproject.toml b/pyproject.toml index e1d4492..6404d46 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,7 @@ Documentation = "https://hyp3-docs.asf.alaska.edu" [project.entry-points.hyp3] back_projection = "hyp3_srg.back_projection:main" -timeseries = "hyp3_srg.timeseries:main" +time_series = "hyp3_srg.time_series:main" [tool.pytest.ini_options] testpaths = ["tests"] diff --git a/src/hyp3_srg/__main__.py b/src/hyp3_srg/__main__.py index 2ebb1d6..6d863a4 100644 --- a/src/hyp3_srg/__main__.py +++ b/src/hyp3_srg/__main__.py @@ -14,7 +14,7 @@ def main(): parser = argparse.ArgumentParser(prefix_chars='+', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( '++process', - choices=['back_projection', 'timeseries'], + choices=['back_projection', 'time_series'], default='back_projection', help='Select the HyP3 entrypoint to use', # HyP3 entrypoints are specified in `pyproject.toml` ) diff --git a/src/hyp3_srg/timeseries.py b/src/hyp3_srg/time_series.py similarity index 95% rename from src/hyp3_srg/timeseries.py rename to src/hyp3_srg/time_series.py index 30e27f9..b5ecd02 100644 --- a/src/hyp3_srg/timeseries.py +++ b/src/hyp3_srg/time_series.py @@ -144,14 +144,14 @@ def compute_sbas_velocity_solution( -def create_timeseries( +def create_time_series( looks: tuple[int] = (10, 10), baselines: tuple[int] = (1000, 1000), threshold: float = 0.5, do_tropo_correction: bool = True, work_dir: Path | None = None ) -> None: - """ Creates a timeseries from a stack of GSLCs consisting of interferograms and a velocity solution + """ Creates a time series from a stack of GSLCs consisting of interferograms and a velocity solution Args: looks: tuple containing the number range looks and azimuth looks @@ -174,13 +174,13 @@ def create_timeseries( ) -def timeseries( +def time_series( granules: Iterable[str], bucket: str = None, bucket_prefix: str = '', work_dir: Optional[Path] = None, ): - """Create and package a timeseries stack from a set of Sentinel-1 GSLCs. + """Create and package a time series stack from a set of Sentinel-1 GSLCs. Args: granules: List of Sentinel-1 GSLCs @@ -201,7 +201,7 @@ def timeseries( utils.call_stanford_module('util/merge_slcs.py', work_dir=work_dir) - create_timeseries(work_dir=work_dir) + create_time_series(work_dir=work_dir) @@ -219,7 +219,7 @@ def main(): parser.add_argument('granules', type=str.split, nargs='+', help='GSLC granules.') args = parser.parse_args() args.granules = [item for sublist in args.granules for item in sublist] - timeseries(**args.__dict__) + time_series(**args.__dict__) if __name__ == '__main__': From f3da67ed62ad375b567d97d820dc34ac1d408a2e Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 5 Sep 2024 14:33:02 -0500 Subject: [PATCH 12/53] os to shutil --- src/hyp3_srg/time_series.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index b5ecd02..fb1d7e3 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -4,7 +4,7 @@ import argparse import logging -from os import cp +from shutil import copyfile from pathlib import Path from typing import Iterable, Optional @@ -108,7 +108,7 @@ def compute_sbas_velocity_solution( work_dir: the directory containing the wrapped interferograms """ utils.call_stanford_module('sbas/sbas_setup.py', args=['sbas_list geolist'], work_dir=work_dir) - cp('./intlist', 'unwlist') + copyfile('./intlist', 'unwlist') utils.call_stanford_module('util/sed.py', args=["'s/int/unw/g' unwlist"], work_dir=work_dir) num_unw_files = 0 From e731dd48c4dd8b4d83ce9204d897dd06fa2099b2 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 5 Sep 2024 14:35:04 -0500 Subject: [PATCH 13/53] comments --- src/hyp3_srg/time_series.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index fb1d7e3..4307a53 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -1,5 +1,5 @@ """ -Sentinel-1 GSLC timeseries interferogram processing +Sentinel-1 GSLC time series processing """ import argparse @@ -206,7 +206,7 @@ def time_series( def main(): - """Timeseries entrypoint. + """Entrypoint for the GSLC time series workflow. Example command: python -m hyp3_srg ++process timeseries \ From 4287d21338e03ac9574a36626bdad9c09b8843f1 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 5 Sep 2024 14:36:25 -0500 Subject: [PATCH 14/53] .open to width --- src/hyp3_srg/time_series.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index 4307a53..ebcd85d 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -24,12 +24,11 @@ def get_size_from_dem(dem_file: str) -> tuple[int]: Returns: dem_width, dem_length: tuple containing the dem width and dem length """ - fe = open(dem_file,'r') - width_line = fe.readline() - dem_width = width_line.split()[1] - length_line = fe.readline() - dem_length = length_line.split()[1] - fe.close() + with open(dem_file) as dem: + width_line = dem.readline() + dem_width = width_line.split()[1] + length_line = dem.readline() + dem_length = length_line.split()[1] return dem_width, dem_length From f23899c46a502e4cbc76df9ed06b9edf5a086126 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 5 Sep 2024 14:37:18 -0500 Subject: [PATCH 15/53] formatting --- src/hyp3_srg/time_series.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index ebcd85d..9180bb2 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -178,7 +178,7 @@ def time_series( bucket: str = None, bucket_prefix: str = '', work_dir: Optional[Path] = None, -): +) -> None: """Create and package a time series stack from a set of Sentinel-1 GSLCs. Args: @@ -203,7 +203,6 @@ def time_series( create_time_series(work_dir=work_dir) - def main(): """Entrypoint for the GSLC time series workflow. From d0466ed4f4f2dfb3067e17c9d866c2f09ded7e98 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 5 Sep 2024 17:38:37 -0500 Subject: [PATCH 16/53] various fixes and cleanup --- src/hyp3_srg/time_series.py | 106 +++++++++++++++++++++++++++--------- 1 file changed, 81 insertions(+), 25 deletions(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index 9180bb2..fb9078f 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -4,10 +4,13 @@ import argparse import logging +import zipfile +from os import mkdir from shutil import copyfile from pathlib import Path from typing import Iterable, Optional +from hyp3lib.aws import upload_file_to_s3 from shapely import unary_union from hyp3_srg import dem, utils @@ -30,7 +33,7 @@ def get_size_from_dem(dem_file: str) -> tuple[int]: length_line = dem.readline() dem_length = length_line.split()[1] - return dem_width, dem_length + return int(dem_width), int(dem_length) def generate_wrapped_interferograms( @@ -58,7 +61,10 @@ def generate_wrapped_interferograms( ) sbas_args = [ - 'sbas_list ../elevation.dem.rsc 1 1', + 'sbas_list', + '../elevation.dem.rsc', + 1, + 1, dem_width, dem_length, looks_down, @@ -83,12 +89,13 @@ def unwrap_interferograms( unw_width, unw_length = unw_shape reduce_dem_args = [ - '../elevation.dem dem', + '../elevation.dem', + 'dem', dem_width, dem_width // unw_width, dem_length // unw_length ] - utils.call_stanford_module('util/nbymi2.py', args=reduce_dem_args, work_dir=work_dir) + utils.call_stanford_module('util/nbymi2', args=reduce_dem_args, work_dir=work_dir) utils.call_stanford_module('util/unwrap_parallel.py', args=[unw_width], work_dir=work_dir) @@ -106,20 +113,23 @@ def compute_sbas_velocity_solution( unw_shape: tuple containing the width and length from the dem.rsc file work_dir: the directory containing the wrapped interferograms """ - utils.call_stanford_module('sbas/sbas_setup.py', args=['sbas_list geolist'], work_dir=work_dir) - copyfile('./intlist', 'unwlist') - utils.call_stanford_module('util/sed.py', args=["'s/int/unw/g' unwlist"], work_dir=work_dir) + unw_width, unw_length = unw_shape + + utils.call_stanford_module('sbas/sbas_setup.py', args=['sbas_list', 'geolist'], work_dir=work_dir) + copyfile(work_dir / 'intlist', work_dir / 'unwlist') + utils.call_stanford_module('util/sed.py', args=['s/int/unw/g', 'unwlist'], work_dir=work_dir) num_unw_files = 0 num_slcs = 0 - with (open('unwlist', 'r'), open('geolist', 'r')) as (unw_list, slc_list): + with open(work_dir / 'unwlist', 'r') as unw_list: num_unw_files = len(unw_list.readlines()) + with open(work_dir / 'geolist', 'r') as slc_list: num_slcs = len(slc_list.readlines()) ref_point_args = [ 'unwlist', - unw_shape[0], - unw_shape[1], + unw_width, + unw_length, threshold ] utils.call_stanford_module('int/findrefpoints', args=ref_point_args, work_dir=work_dir) @@ -127,22 +137,21 @@ def compute_sbas_velocity_solution( if do_tropo_correction: tropo_correct_args = [ 'unwlist', - unw_shape[0], - unw_shape[1] + unw_width, + unw_length ] utils.call_stanford_module('int/tropocorrect.py', args=tropo_correct_args, work_dir=work_dir) sbas_velocity_args = [ 'unwlist', - str(num_unw_files.decode()).rstrip(), - str(num_slcs.decode()).rstrip(), - unw_shape[0], + num_unw_files, + num_slcs, + unw_width, 'ref_locs' ] utils.call_stanford_module('sbas/sbas', args=sbas_velocity_args, work_dir=work_dir) - def create_time_series( looks: tuple[int] = (10, 10), baselines: tuple[int] = (1000, 1000), @@ -159,11 +168,20 @@ def create_time_series( do_tropo_correction: ... work_dir: the directory containing the GSLCs to do work in """ - dem_shape = get_size_from_dem('../elevation.dem.rsc') - generate_wrapped_interferograms(looks=looks, baselines=baselines, dem_shape=dem_shape) + dem_shape = get_size_from_dem('elevation.dem.rsc') + generate_wrapped_interferograms( + looks=looks, + baselines=baselines, + dem_shape=dem_shape, + work_dir=work_dir + ) - unw_shape = get_size_from_dem('../dem.rsc') - unwrap_interferograms(dem_shape=dem_shape, unw_shape=unw_shape) + unw_shape = get_size_from_dem(work_dir / 'dem.rsc') + unwrap_interferograms( + dem_shape=dem_shape, + unw_shape=unw_shape, + work_dir=work_dir + ) compute_sbas_velocity_solution( threshold=threshold, @@ -173,6 +191,34 @@ def create_time_series( ) +# TODO: Package the time series files +def package_time_series(work_dir) -> Path: + """Package the time series into a product zip file. + + Args: + work_dir: Working directory for completed back-projection run + + Returns: + Path to the created zip file + """ + gslc_path = list(work_dir.glob('S1*.geo'))[0] + product_name = gslc_path.with_suffix('').name + zip_path = work_dir / f'{product_name}.zip' + + parameter_file = work_dir / f'{product_name}.txt' + input_granules = [x.with_suffix('').name for x in work_dir.glob('S1*.SAFE')] + with open(parameter_file, 'w') as f: + f.write('Process: time-series\n') + f.write(f"Input Granules: {', '.join(input_granules)}\n") + + # We don't compress the data because SLC data is psuedo-random + with zipfile.ZipFile(zip_path, 'w', compression=zipfile.ZIP_STORED) as z: + z.write(gslc_path, gslc_path.name) + z.write(parameter_file, parameter_file.name) + + return zip_path + + def time_series( granules: Iterable[str], bucket: str = None, @@ -189,27 +235,37 @@ def time_series( """ if work_dir is None: work_dir = Path.cwd() + sbas_dir = work_dir / 'sbas' + if not sbas_dir.exists(): + mkdir(sbas_dir) bboxs = [] for granule in granules: bboxs.append(utils.get_bbox(granule)) - full_bbox = unary_union(bboxs).buffer(0.1) + dem_path = dem.download_dem_for_srg(full_bbox, work_dir) + utils.create_param_file(dem_path, dem_path.with_suffix('.dem.rsc'), work_dir) utils.call_stanford_module('util/merge_slcs.py', work_dir=work_dir) - create_time_series(work_dir=work_dir) + create_time_series(work_dir=sbas_dir) + + zip_path = package_time_series(work_dir) + if bucket: + upload_file_to_s3(zip_path, bucket, bucket_prefix) + + print(f'Finished time-series processing for {list(sbas_dir.glob("S1*.geo"))[0].with_suffix("").name}!') def main(): """Entrypoint for the GSLC time series workflow. Example command: - python -m hyp3_srg ++process timeseries \ - S1A_IW_RAW__0SDV_20231229T134339_20231229T134411_051870_064437_4F42 \ - S1A_IW_RAW__0SDV_20231229T134404_20231229T134436_051870_064437_5F38 + python -m hyp3_srg ++process time_series \ + S1A_IW_RAW__0SDV_20231229T134339_20231229T134411_051870_064437_4F42.geo \ + S1A_IW_RAW__0SDV_20231229T134404_20231229T134436_051870_064437_5F38.geo """ parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--bucket', help='AWS S3 bucket HyP3 for upload the final product(s)') From e45865e45f0f6e2aff1a068c6de689737c0be524 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 5 Sep 2024 17:39:29 -0500 Subject: [PATCH 17/53] copy snaphu to the correct dir --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 23baba4..08efa49 100644 --- a/Dockerfile +++ b/Dockerfile @@ -47,6 +47,7 @@ SHELL ["/bin/bash", "-l", "-c"] USER ${CONDA_UID} WORKDIR /home/conda/ +COPY --chown=${CONDA_UID}:${CONDA_GID} --from=builder /srg/snaphu_v2.0b0.0.0/bin/snaphu /srg/bin/snaphu COPY --chown=${CONDA_UID}:${CONDA_GID} --from=builder /srg /srg COPY --chown=${CONDA_UID}:${CONDA_GID} --from=builder /hyp3-srg /hyp3-srg From 2e627a173d260b361569530ae0dee424898b782b Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Fri, 6 Sep 2024 12:53:26 +0000 Subject: [PATCH 18/53] fix formatting --- src/hyp3_srg/time_series.py | 97 ++++++++----------------------------- 1 file changed, 21 insertions(+), 76 deletions(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index fb9078f..d419d65 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -6,8 +6,8 @@ import logging import zipfile from os import mkdir -from shutil import copyfile from pathlib import Path +from shutil import copyfile from typing import Iterable, Optional from hyp3lib.aws import upload_file_to_s3 @@ -15,11 +15,12 @@ from hyp3_srg import dem, utils + log = logging.getLogger(__name__) def get_size_from_dem(dem_file: str) -> tuple[int]: - """ Get the length and width from a .rsc DEM file + """Get the length and width from a .rsc DEM file Args: dem_file: path to the .rsc dem file. @@ -37,12 +38,9 @@ def get_size_from_dem(dem_file: str) -> tuple[int]: def generate_wrapped_interferograms( - looks: tuple[int], - baselines: tuple[int], - dem_shape: tuple[int], - work_dir: Path + looks: tuple[int], baselines: tuple[int], dem_shape: tuple[int], work_dir: Path ) -> None: - """ Generates wrapped interferograms from GSLCs + """Generates wrapped interferograms from GSLCs Args: looks: tuple containing the number range looks and azimuth looks @@ -54,31 +52,14 @@ def generate_wrapped_interferograms( looks_down, looks_across = looks time_baseline, spatial_baseline = baselines - utils.call_stanford_module( - 'sentinel/sbas_list.py', - args=[time_baseline, spatial_baseline], - work_dir=work_dir - ) + utils.call_stanford_module('sentinel/sbas_list.py', args=[time_baseline, spatial_baseline], work_dir=work_dir) - sbas_args = [ - 'sbas_list', - '../elevation.dem.rsc', - 1, - 1, - dem_width, - dem_length, - looks_down, - looks_across - ] + sbas_args = ['sbas_list', '../elevation.dem.rsc', 1, 1, dem_width, dem_length, looks_down, looks_across] utils.call_stanford_module('sentinel/ps_sbas_igrams.py', args=sbas_args, work_dir=work_dir) -def unwrap_interferograms( - dem_shape: tuple[int], - unw_shape: tuple[int], - work_dir: Path -) -> None: - """ Unwraps wrapped interferograms in parallel +def unwrap_interferograms(dem_shape: tuple[int], unw_shape: tuple[int], work_dir: Path) -> None: + """Unwraps wrapped interferograms in parallel Args: dem_shape: tuple containing the dem width and dem length @@ -88,24 +69,15 @@ def unwrap_interferograms( dem_width, dem_length = dem_shape unw_width, unw_length = unw_shape - reduce_dem_args = [ - '../elevation.dem', - 'dem', - dem_width, - dem_width // unw_width, - dem_length // unw_length - ] + reduce_dem_args = ['../elevation.dem', 'dem', dem_width, dem_width // unw_width, dem_length // unw_length] utils.call_stanford_module('util/nbymi2', args=reduce_dem_args, work_dir=work_dir) utils.call_stanford_module('util/unwrap_parallel.py', args=[unw_width], work_dir=work_dir) def compute_sbas_velocity_solution( - threshold: float, - do_tropo_correction: bool, - unw_shape: tuple[int], - work_dir: Path + threshold: float, do_tropo_correction: bool, unw_shape: tuple[int], work_dir: Path ) -> None: - """ Computes the sbas velocity solution from the unwrapped interferograms + """Computes the sbas velocity solution from the unwrapped interferograms Args: threshold: ... @@ -115,7 +87,7 @@ def compute_sbas_velocity_solution( """ unw_width, unw_length = unw_shape - utils.call_stanford_module('sbas/sbas_setup.py', args=['sbas_list', 'geolist'], work_dir=work_dir) + utils.call_stanford_module('sbas/sbas_setup.py', args=['sbas_list', 'geolist'], work_dir=work_dir) copyfile(work_dir / 'intlist', work_dir / 'unwlist') utils.call_stanford_module('util/sed.py', args=['s/int/unw/g', 'unwlist'], work_dir=work_dir) @@ -126,29 +98,14 @@ def compute_sbas_velocity_solution( with open(work_dir / 'geolist', 'r') as slc_list: num_slcs = len(slc_list.readlines()) - ref_point_args = [ - 'unwlist', - unw_width, - unw_length, - threshold - ] + ref_point_args = ['unwlist', unw_width, unw_length, threshold] utils.call_stanford_module('int/findrefpoints', args=ref_point_args, work_dir=work_dir) if do_tropo_correction: - tropo_correct_args = [ - 'unwlist', - unw_width, - unw_length - ] + tropo_correct_args = ['unwlist', unw_width, unw_length] utils.call_stanford_module('int/tropocorrect.py', args=tropo_correct_args, work_dir=work_dir) - sbas_velocity_args = [ - 'unwlist', - num_unw_files, - num_slcs, - unw_width, - 'ref_locs' - ] + sbas_velocity_args = ['unwlist', num_unw_files, num_slcs, unw_width, 'ref_locs'] utils.call_stanford_module('sbas/sbas', args=sbas_velocity_args, work_dir=work_dir) @@ -157,9 +114,9 @@ def create_time_series( baselines: tuple[int] = (1000, 1000), threshold: float = 0.5, do_tropo_correction: bool = True, - work_dir: Path | None = None + work_dir: Path | None = None, ) -> None: - """ Creates a time series from a stack of GSLCs consisting of interferograms and a velocity solution + """Creates a time series from a stack of GSLCs consisting of interferograms and a velocity solution Args: looks: tuple containing the number range looks and azimuth looks @@ -169,25 +126,13 @@ def create_time_series( work_dir: the directory containing the GSLCs to do work in """ dem_shape = get_size_from_dem('elevation.dem.rsc') - generate_wrapped_interferograms( - looks=looks, - baselines=baselines, - dem_shape=dem_shape, - work_dir=work_dir - ) + generate_wrapped_interferograms(looks=looks, baselines=baselines, dem_shape=dem_shape, work_dir=work_dir) unw_shape = get_size_from_dem(work_dir / 'dem.rsc') - unwrap_interferograms( - dem_shape=dem_shape, - unw_shape=unw_shape, - work_dir=work_dir - ) + unwrap_interferograms(dem_shape=dem_shape, unw_shape=unw_shape, work_dir=work_dir) compute_sbas_velocity_solution( - threshold=threshold, - do_tropo_correction=do_tropo_correction, - unw_shape=unw_shape, - work_dir=work_dir + threshold=threshold, do_tropo_correction=do_tropo_correction, unw_shape=unw_shape, work_dir=work_dir ) From db309b5021ac851bc515a8e2fa46164ed3930def Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Fri, 6 Sep 2024 13:12:28 +0000 Subject: [PATCH 19/53] update product loading --- src/hyp3_srg/time_series.py | 48 ++++++++++++++++++++++++++++++++++++- 1 file changed, 47 insertions(+), 1 deletion(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index d419d65..421518e 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -4,21 +4,65 @@ import argparse import logging +import shutil import zipfile from os import mkdir from pathlib import Path from shutil import copyfile from typing import Iterable, Optional +from boto3 import client from hyp3lib.aws import upload_file_to_s3 +from hyp3lib.fetch import download_file as download_from_http from shapely import unary_union from hyp3_srg import dem, utils +S3 = client('s3') log = logging.getLogger(__name__) +def download_from_s3(uri: str, dest_dir: Optional[Path] = None) -> None: + """Download a file from an S3 bucket + + Args: + uri: URI of the file to download + """ + if dest_dir is None: + dest_dir = Path.cwd() + + simple_s3_uri = Path(uri.replace('s3://', '')) + bucket = simple_s3_uri.parts[0] + key = '/'.join(simple_s3_uri.parts[1:]) + out_path = dest_dir / simple_s3_uri.parts[-1] + S3.download_file(bucket, key, out_path) + return out_path + + +def load_products(uris: Iterable[str]): + """Load the products from the provided URIs + + Args: + uris: list of URIs to the SRG GSLC products + """ + work_dir = Path.cwd() + for uri in uris: + name = Path(Path(uri).name) + + if name.with_suffix('.zip').exists() or name.with_suffix('.geo').exists(): + pass + elif uri.startswith('s3'): + download_from_s3(uri, dest_dir=work_dir) + elif uri.startswith('http'): + download_from_http(uri, directory=work_dir) + elif len(Path(uri).parts) > 1: + shutil.copy(uri, work_dir) + + if not name.with_suffix('.geo').exists(): + shutil.unpack_archive(name.with_suffix('.zip'), work_dir) + + def get_size_from_dem(dem_file: str) -> tuple[int]: """Get the length and width from a .rsc DEM file @@ -184,6 +228,8 @@ def time_series( if not sbas_dir.exists(): mkdir(sbas_dir) + load_products(granules) + bboxs = [] for granule in granules: bboxs.append(utils.get_bbox(granule)) @@ -201,7 +247,7 @@ def time_series( if bucket: upload_file_to_s3(zip_path, bucket, bucket_prefix) - print(f'Finished time-series processing for {list(sbas_dir.glob("S1*.geo"))[0].with_suffix("").name}!') + print(f'Finished time-series processing for {list(work_dir.glob("S1*.geo"))[0].with_suffix("").name}!') def main(): From 4ee738ccaedc91d5699bbee1441da76517bf14e7 Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Fri, 6 Sep 2024 13:31:39 +0000 Subject: [PATCH 20/53] further product loading updates --- src/hyp3_srg/time_series.py | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index 421518e..1fe981c 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -40,17 +40,22 @@ def download_from_s3(uri: str, dest_dir: Optional[Path] = None) -> None: return out_path -def load_products(uris: Iterable[str]): +def load_products(uris: Iterable[str], overwrite: bool = False): """Load the products from the provided URIs Args: uris: list of URIs to the SRG GSLC products + overwrite: overwrite existing products """ work_dir = Path.cwd() + granule_names = [] for uri in uris: name = Path(Path(uri).name) + geo_name = name.with_suffix('.geo') + zip_name = name.with_suffix('.zip') - if name.with_suffix('.zip').exists() or name.with_suffix('.geo').exists(): + product_exists = geo_name.exists() or zip_name.exists() + if product_exists and not overwrite: pass elif uri.startswith('s3'): download_from_s3(uri, dest_dir=work_dir) @@ -59,9 +64,13 @@ def load_products(uris: Iterable[str]): elif len(Path(uri).parts) > 1: shutil.copy(uri, work_dir) - if not name.with_suffix('.geo').exists(): + if not geo_name.exists(): shutil.unpack_archive(name.with_suffix('.zip'), work_dir) + granule_names.append(str(name)) + + return granule_names + def get_size_from_dem(dem_file: str) -> tuple[int]: """Get the length and width from a .rsc DEM file @@ -228,11 +237,12 @@ def time_series( if not sbas_dir.exists(): mkdir(sbas_dir) - load_products(granules) + granule_names = load_products(granules) bboxs = [] - for granule in granules: - bboxs.append(utils.get_bbox(granule)) + for name in granule_names: + # TODO: This may not work for a GSLC product created using multiple L0 granules + bboxs.append(utils.get_bbox(name)) full_bbox = unary_union(bboxs).buffer(0.1) dem_path = dem.download_dem_for_srg(full_bbox, work_dir) @@ -247,7 +257,7 @@ def time_series( if bucket: upload_file_to_s3(zip_path, bucket, bucket_prefix) - print(f'Finished time-series processing for {list(work_dir.glob("S1*.geo"))[0].with_suffix("").name}!') + print(f'Finished time-series processing for {", ".join(granule_names)}!') def main(): From a298753284276edaced1ced0fd14c82c44f99046 Mon Sep 17 00:00:00 2001 From: Forrest Williams Date: Fri, 6 Sep 2024 14:08:05 +0000 Subject: [PATCH 21/53] update packaging --- src/hyp3_srg/time_series.py | 40 +++++++++++++++++++++++-------------- 1 file changed, 25 insertions(+), 15 deletions(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index 1fe981c..5d81159 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -189,8 +189,7 @@ def create_time_series( ) -# TODO: Package the time series files -def package_time_series(work_dir) -> Path: +def package_time_series(work_dir: Optional[Path] = None) -> Path: """Package the time series into a product zip file. Args: @@ -199,21 +198,32 @@ def package_time_series(work_dir) -> Path: Returns: Path to the created zip file """ - gslc_path = list(work_dir.glob('S1*.geo'))[0] - product_name = gslc_path.with_suffix('').name + if work_dir is None: + work_dir = Path.cwd() + sbas_dir = work_dir / 'sbas' + # TODO: create name based on input granules + product_name = 'time_series' + product_path = work_dir / product_name + product_path.mkdir(exist_ok=True, parents=True) zip_path = work_dir / f'{product_name}.zip' - parameter_file = work_dir / f'{product_name}.txt' - input_granules = [x.with_suffix('').name for x in work_dir.glob('S1*.SAFE')] - with open(parameter_file, 'w') as f: - f.write('Process: time-series\n') - f.write(f"Input Granules: {', '.join(input_granules)}\n") - - # We don't compress the data because SLC data is psuedo-random - with zipfile.ZipFile(zip_path, 'w', compression=zipfile.ZIP_STORED) as z: - z.write(gslc_path, gslc_path.name) - z.write(parameter_file, parameter_file.name) - + to_keep = [ + # Metadata + 'sbas_list', + 'parameters', + 'reflocs', + 'dem.rsc', + # Datasets + 'dem', + 'locs', + 'npts', + 'displacement', + 'stackmht', + 'stacktime', + 'velocity', + ] + [shutil.copy(sbas_dir / f, product_path / f) for f in to_keep] + shutil.make_archive(product_path, 'zip', product_path) return zip_path From 2ff749f6d6778e603533b46000b21aeef17100ea Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Fri, 6 Sep 2024 11:57:40 -0500 Subject: [PATCH 22/53] fixed name --- src/hyp3_srg/time_series.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index 5d81159..cf79f7f 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -211,7 +211,7 @@ def package_time_series(work_dir: Optional[Path] = None) -> Path: # Metadata 'sbas_list', 'parameters', - 'reflocs', + 'ref_locs', 'dem.rsc', # Datasets 'dem', From 8f07db1472287a482c7a1d1328d6e2789641915b Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Fri, 6 Sep 2024 13:08:52 -0500 Subject: [PATCH 23/53] unused import --- src/hyp3_srg/time_series.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index cf79f7f..224ee35 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -5,7 +5,6 @@ import argparse import logging import shutil -import zipfile from os import mkdir from pathlib import Path from shutil import copyfile From e99dca129bc75766cdcaf62866157801c217d9c1 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Fri, 6 Sep 2024 14:58:00 -0500 Subject: [PATCH 24/53] add elevation.dem.rsc to product --- src/hyp3_srg/back_projection.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/hyp3_srg/back_projection.py b/src/hyp3_srg/back_projection.py index a62f6ba..265584f 100644 --- a/src/hyp3_srg/back_projection.py +++ b/src/hyp3_srg/back_projection.py @@ -77,6 +77,7 @@ def create_product(work_dir) -> Path: gslc_path = list(work_dir.glob('S1*.geo'))[0] product_name = gslc_path.with_suffix('').name orbit_path = work_dir / f'{product_name}.orbtiming' + rsc_path = work_dir / f'elevation.dem.rsc' zip_path = work_dir / f'{product_name}.zip' parameter_file = work_dir / f'{product_name}.txt' @@ -89,6 +90,7 @@ def create_product(work_dir) -> Path: with zipfile.ZipFile(zip_path, 'w', compression=zipfile.ZIP_STORED) as z: z.write(gslc_path, gslc_path.name) z.write(orbit_path, orbit_path.name) + z.write(rsc_path, rsc_path.name) z.write(parameter_file, parameter_file.name) return zip_path From e997bd63f4bfbf14a5be6b63274c7d3db94bf008 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Fri, 6 Sep 2024 15:00:49 -0500 Subject: [PATCH 25/53] flake8 --- src/hyp3_srg/back_projection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hyp3_srg/back_projection.py b/src/hyp3_srg/back_projection.py index 265584f..bc89a9c 100644 --- a/src/hyp3_srg/back_projection.py +++ b/src/hyp3_srg/back_projection.py @@ -77,7 +77,7 @@ def create_product(work_dir) -> Path: gslc_path = list(work_dir.glob('S1*.geo'))[0] product_name = gslc_path.with_suffix('').name orbit_path = work_dir / f'{product_name}.orbtiming' - rsc_path = work_dir / f'elevation.dem.rsc' + rsc_path = work_dir / 'elevation.dem.rsc' zip_path = work_dir / f'{product_name}.zip' parameter_file = work_dir / f'{product_name}.txt' From 43bf051f97253556c748b6a226749f6924cc4946 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Fri, 6 Sep 2024 15:00:57 -0500 Subject: [PATCH 26/53] updated changelog --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6ecb3b8..f3999df 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.7.1] + +### Added +* The back-projection product now includes the elevation.dem.rsc file. + ## [0.7.0] ### Changed From bbb19da81ee1bcf413a4172cc87ea51914d66613 Mon Sep 17 00:00:00 2001 From: jacquelynsmale Date: Mon, 9 Sep 2024 11:52:56 -0800 Subject: [PATCH 27/53] add - to timeseries --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index f36b14d..f65d2dd 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ HyP3 plugin for Stanford Radar Group (SRG) SAR Processor The HyP3-SRG plugin provides a set of workflows (currently only accessible via the docker container) that can be used to process SAR data using the [Stanford Radar Group Processor](https://github.com/asfhyp3/srg). This set of workflow uses the [SRG alogorithms]((https://doi.org/10.1109/LGRS.2017.2753580)) to process Level-0 Sentinel-1 (S1) data to geocoded, user-friendly products that can be used for time-series analysis. The workflows currently included in this plugin are: - [`back_projection`](#back-projection): A workflow for creating geocoded Sentinel-1 SLCs, -- [`timeseries`](#time-series-analysis): A workflow for creating a deformation timeseries of geocoded Sentinel-1 SLCs. +- [`time_series`](#time-series): A workflow for creating a deformation timeseries of geocoded Sentinel-1 SLCs. To run a workflow, you'll first need to build the docker container: ```bash @@ -39,15 +39,15 @@ docker run -it --rm \ ``` ### Time-series -The `timeseries` workflow takes a list of up to 50 Level-0 S1 granule names and produces a time-series of the respective geocoded SLCs. Stacks are created with looks `10x10`and baselines of `1000x1000`. A trophospheric correction is applied using an elevation-dependent regression. +The `time_series` workflow takes a list of up to 50 Level-0 S1 granule names and produces a time-series of the respective geocoded SLCs. Stacks are created with looks `10x10`and baselines of `1000x1000`. A trophospheric correction is applied using an elevation-dependent regression. This workflow will output interferograms and time-series files for all input granules. - The following command will run the `timeseries` workflow: + The following command will run the `time_series` workflow: ``` docker run -it --rm \ -e EARTHDATA_USERNAME=[YOUR_USERNAME_HERE] \ -e EARTHDATA_PASSWORD=[YOUR_PASSWORD_HERE] \ hyp3-srg:latest \ - ++process timeseries \ + ++process time_series \ S1A_IW_RAW__0SDV_20240828T020812_20240828T020844_055407_06C206_6EA7 \ S1A_IW_RAW__0SDV_20240816T020812_20240816T020844_055232_06BB8A_C7CA \ S1A_IW_RAW__0SDV_20240804T020812_20240804T020844_055057_06B527_1346 From 1ee8440691e06c228130204d7955ce8d55393732 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Tue, 17 Sep 2024 12:20:45 -0500 Subject: [PATCH 28/53] pick only 1 bbox and run with it --- src/hyp3_srg/time_series.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index 224ee35..e5b7fca 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -247,17 +247,10 @@ def time_series( mkdir(sbas_dir) granule_names = load_products(granules) - - bboxs = [] - for name in granule_names: - # TODO: This may not work for a GSLC product created using multiple L0 granules - bboxs.append(utils.get_bbox(name)) - full_bbox = unary_union(bboxs).buffer(0.1) - - dem_path = dem.download_dem_for_srg(full_bbox, work_dir) + bbox = utils.get_bbox(granule_names[0]).buffer(0.1) + dem_path = dem.download_dem_for_srg(bbox, work_dir) utils.create_param_file(dem_path, dem_path.with_suffix('.dem.rsc'), work_dir) - utils.call_stanford_module('util/merge_slcs.py', work_dir=work_dir) create_time_series(work_dir=sbas_dir) From 86ffc9960e99f8c7503b08f2eb0cf13a6b5b5cbc Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Tue, 17 Sep 2024 14:00:59 -0500 Subject: [PATCH 29/53] create product name for timeseries --- src/hyp3_srg/time_series.py | 56 +++++++++++++++++++++++++++++++++---- 1 file changed, 51 insertions(+), 5 deletions(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index e5b7fca..91bbf1b 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -7,14 +7,14 @@ import shutil from os import mkdir from pathlib import Path +from secrets import token_hex from shutil import copyfile from typing import Iterable, Optional from boto3 import client from hyp3lib.aws import upload_file_to_s3 from hyp3lib.fetch import download_file as download_from_http -from shapely import unary_union - +from shapely.geometry import Polygon from hyp3_srg import dem, utils @@ -188,7 +188,54 @@ def create_time_series( ) -def package_time_series(work_dir: Optional[Path] = None) -> Path: +def create_time_series_product_name( + granule_names: list[str], + bbox: Polygon, +): + prefix = "S1_SRG_SBAS" + split_names = [granule.split("_") for granule in granule_names] + + absolute_orbit = split_names[0][7] + if split_names[0][0] == "S1A": + relative_orbit = str(((int(absolute_orbit) - 73) % 175) + 1) + else: + relative_orbit = str(((int(absolute_orbit) - 27) % 175) + 1) + + start_dates = sorted([name[5] for name in split_names]) + earliest_granule = start_dates[0] + latest_granule = start_dates[-1] + + lons, lats = bbox.exterior.coords.xy + + def lat_string(lat): + return ('N' if lat >= 0 else 'S') + f"{('%.1f' % abs(lat)).zfill(4)}".replace('.', '_') + + def lon_string(lon): + return ('E' if lon >= 0 else 'W') + f"{('%.1f' % abs(lon)).zfill(5)}".replace('.', '_') + + lat_lims = [lat_string(lat) for lat in [min(lats), max(lats)]] + lon_lims = [lon_string(lon) for lon in [min(lons), max(lons)]] + + product_id = token_hex(2).upper() + + return '_'.join([ + prefix, + relative_orbit, + lon_lims[0], + lat_lims[0], + lon_lims[1], + lat_lims[1], + earliest_granule, + latest_granule, + product_id + ]) + + +def package_time_series( + granules: list[str], + bbox: Polygon, + work_dir: Optional[Path] = None +) -> Path: """Package the time series into a product zip file. Args: @@ -200,8 +247,7 @@ def package_time_series(work_dir: Optional[Path] = None) -> Path: if work_dir is None: work_dir = Path.cwd() sbas_dir = work_dir / 'sbas' - # TODO: create name based on input granules - product_name = 'time_series' + product_name = create_time_series_product_name(granules, bbox) product_path = work_dir / product_name product_path.mkdir(exist_ok=True, parents=True) zip_path = work_dir / f'{product_name}.zip' From 26c03d35893bc4b5268257601138820400ad1f83 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Tue, 17 Sep 2024 14:02:10 -0500 Subject: [PATCH 30/53] import newline --- src/hyp3_srg/time_series.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index 91bbf1b..d8d40e5 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -15,6 +15,7 @@ from hyp3lib.aws import upload_file_to_s3 from hyp3lib.fetch import download_file as download_from_http from shapely.geometry import Polygon + from hyp3_srg import dem, utils From 0c1cae64be50f637c68d23623653fcdcea83366d Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Tue, 17 Sep 2024 14:04:41 -0500 Subject: [PATCH 31/53] moved create_param_file test --- tests/test_back_projection.py | 15 --------------- tests/test_utils.py | 15 +++++++++++++++ 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/tests/test_back_projection.py b/tests/test_back_projection.py index bf4c564..99cb8b5 100644 --- a/tests/test_back_projection.py +++ b/tests/test_back_projection.py @@ -5,21 +5,6 @@ from hyp3_srg import back_projection, utils -def test_create_param_file(tmp_path): - dem_path = tmp_path / 'elevation.dem' - dem_rsc_path = tmp_path / 'elevation.dem.rsc' - output_dir = tmp_path - back_projection.create_param_file(dem_path, dem_rsc_path, output_dir) - assert (tmp_path / 'params').exists() - - with open(tmp_path / 'params') as f: - lines = [x.strip() for x in f.readlines()] - - assert len(lines) == 2 - assert lines[0] == str(dem_path) - assert lines[1] == str(dem_rsc_path) - - def test_back_project_granules(tmp_path, monkeypatch): granule_path = tmp_path / 'granule.SAFE' orbit_path = tmp_path / 'orbit.xml' diff --git a/tests/test_utils.py b/tests/test_utils.py index 3a66718..fad3bad 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -8,6 +8,21 @@ from hyp3_srg import utils +def test_create_param_file(tmp_path): + dem_path = tmp_path / 'elevation.dem' + dem_rsc_path = tmp_path / 'elevation.dem.rsc' + output_dir = tmp_path + utils.create_param_file(dem_path, dem_rsc_path, output_dir) + assert (tmp_path / 'params').exists() + + with open(tmp_path / 'params') as f: + lines = [x.strip() for x in f.readlines()] + + assert len(lines) == 2 + assert lines[0] == str(dem_path) + assert lines[1] == str(dem_rsc_path) + + def test_get_proc_home(tmp_path, monkeypatch): with monkeypatch.context() as m: m.setenv('PROC_HOME', str(tmp_path)) From 376f688fb93f93dcb7f125a27427cb7dcbc4ae68 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Tue, 17 Sep 2024 14:06:34 -0500 Subject: [PATCH 32/53] updated changelog --- CHANGELOG.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6ecb3b8..6213b7a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,11 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.8.0] + +### Added +* Added `time_series` workflow for time series processing of GSLC stacks. + ## [0.7.0] ### Changed From 9e0dde1068a5bcb0b37ed7ef8f246c2f53c47a6a Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Tue, 17 Sep 2024 14:08:39 -0500 Subject: [PATCH 33/53] merged changelog --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6213b7a..33ea153 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,7 +9,8 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [0.8.0] ### Added -* Added `time_series` workflow for time series processing of GSLC stacks. +* `time_series` workflow for time series processing of GSLC stacks. +* The back-projection product now includes the elevation.dem.rsc file. ## [0.7.0] @@ -79,4 +80,3 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ### Added * Initial version of repository. - From 8b1b66468e53b15c6d2623598f2d7b0c1af57b20 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Wed, 18 Sep 2024 15:52:51 -0500 Subject: [PATCH 34/53] add bounds as optional arg to back-projection --- src/hyp3_srg/back_projection.py | 10 +++++++--- src/hyp3_srg/dem.py | 15 +++++++++++++++ 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/src/hyp3_srg/back_projection.py b/src/hyp3_srg/back_projection.py index 60881ad..dd102d9 100644 --- a/src/hyp3_srg/back_projection.py +++ b/src/hyp3_srg/back_projection.py @@ -83,6 +83,7 @@ def create_product(work_dir) -> Path: def back_project( granules: Iterable[str], + bounds: list[float] = None, earthdata_username: str = None, earthdata_password: str = None, bucket: str = None, @@ -113,9 +114,11 @@ def back_project( orbit_path = utils.download_orbit(granule, work_dir) bboxs.append(granule_bbox) granule_orbit_pairs.append((granule_path, orbit_path)) - - full_bbox = unary_union(bboxs).buffer(0.1) - dem_path = dem.download_dem_for_srg(full_bbox, work_dir) + if bounds == None: + full_bbox = unary_union(bboxs).buffer(0.1) + dem_path = dem.download_dem_for_srg(full_bbox, work_dir) + else: + dem_path = dem.download_dem_from_bounds(bounds, work_dir) utils.create_param_file(dem_path, dem_path.with_suffix('.dem.rsc'), work_dir) back_project_granules(granule_orbit_pairs, work_dir=work_dir, gpu=gpu) @@ -143,6 +146,7 @@ def main(): parser.add_argument('--bucket', help='AWS S3 bucket HyP3 for upload the final product(s)') parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)') parser.add_argument('--gpu', default=False, action='store_true', help='Use the GPU-based version of the workflow.') + parser.add_argument('--bounds', default=None, help='Bounds for DEM (max lat, min lat, min lon, max lon)') parser.add_argument('granules', type=str.split, nargs='+', help='Level-0 S1 granule(s) to back-project.') args = parser.parse_args() args.granules = [item for sublist in args.granules for item in sublist] diff --git a/src/hyp3_srg/dem.py b/src/hyp3_srg/dem.py index faffe5d..d09dce2 100644 --- a/src/hyp3_srg/dem.py +++ b/src/hyp3_srg/dem.py @@ -1,6 +1,7 @@ """Prepare a Copernicus GLO-30 DEM virtual raster (VRT) covering a given geometry""" import logging from pathlib import Path +from typing import Optional import requests from shapely.geometry import Polygon @@ -50,3 +51,17 @@ def download_dem_for_srg( args = [str(dem_path), str(dem_rsc), *stanford_bounds] utils.call_stanford_module('DEM/createDEMcop.py', args, work_dir=work_dir) return dem_path + + +def download_dem_from_bounds(bounds: list[float], work_dir: Optional[Path]): + if (bounds[0] <= bounds[1] or bounds[2] >= bounds[3]): + raise ValueError("Improper bounding box formatting, should be [max latitude, min latitude, min longitude, max longitude].") + + dem_path = work_dir / 'elevation.dem' + dem_rsc = work_dir / 'elevation.dem.rsc' + + ensure_egm_model_available() + + args = [str(dem_path), str(dem_rsc), *bounds] + utils.call_stanford_module('DEM/createDEMcop.py', args, work_dir=work_dir) + return dem_path From 26640dcf301ffc585374e65c7bba689aa9743c9c Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Wed, 18 Sep 2024 16:52:11 -0500 Subject: [PATCH 35/53] correct bounds type and save bounds file --- src/hyp3_srg/back_projection.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/hyp3_srg/back_projection.py b/src/hyp3_srg/back_projection.py index 33e3dd0..c1ab2c4 100644 --- a/src/hyp3_srg/back_projection.py +++ b/src/hyp3_srg/back_projection.py @@ -65,6 +65,7 @@ def create_product(work_dir) -> Path: product_name = gslc_path.with_suffix('').name orbit_path = work_dir / f'{product_name}.orbtiming' rsc_path = work_dir / 'elevation.dem.rsc' + bounds_path = work_dir / 'bounds' zip_path = work_dir / f'{product_name}.zip' parameter_file = work_dir / f'{product_name}.txt' @@ -78,6 +79,7 @@ def create_product(work_dir) -> Path: z.write(gslc_path, gslc_path.name) z.write(orbit_path, orbit_path.name) z.write(rsc_path, rsc_path.name) + z.write(bounds_path, bounds_path.name) z.write(parameter_file, parameter_file.name) return zip_path @@ -148,7 +150,7 @@ def main(): parser.add_argument('--bucket', help='AWS S3 bucket HyP3 for upload the final product(s)') parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)') parser.add_argument('--gpu', default=False, action='store_true', help='Use the GPU-based version of the workflow.') - parser.add_argument('--bounds', default=None, help='Bounds for DEM (max lat, min lat, min lon, max lon)') + parser.add_argument('--bounds', default=None, type=float, nargs=4, help='Bounds for DEM (max lat, min lat, min lon, max lon)') parser.add_argument('granules', type=str.split, nargs='+', help='Level-0 S1 granule(s) to back-project.') args = parser.parse_args() args.granules = [item for sublist in args.granules for item in sublist] From 66697cf98412682857e4a65053c2f16e46d266f9 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Wed, 18 Sep 2024 16:53:32 -0500 Subject: [PATCH 36/53] dem from bounds --- src/hyp3_srg/dem.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/src/hyp3_srg/dem.py b/src/hyp3_srg/dem.py index d09dce2..1556eb9 100644 --- a/src/hyp3_srg/dem.py +++ b/src/hyp3_srg/dem.py @@ -41,25 +41,29 @@ def download_dem_for_srg( Returns: The path to the downloaded DEM """ - dem_path = work_dir / 'elevation.dem' - dem_rsc = work_dir / 'elevation.dem.rsc' - - ensure_egm_model_available() - - # bounds produces min x, min y, max x, max y; stanford wants toplat, botlat, leftlon, rightlon stanford_bounds = [footprint.bounds[i] for i in [3, 1, 0, 2]] - args = [str(dem_path), str(dem_rsc), *stanford_bounds] - utils.call_stanford_module('DEM/createDEMcop.py', args, work_dir=work_dir) - return dem_path + return download_dem_from_bounds(stanford_bounds, work_dir) def download_dem_from_bounds(bounds: list[float], work_dir: Optional[Path]): + """Download the DEM for the given stanford bounds. + + Args: + footprint: The footprint to download a DEM for + work_dir: The directory to save create the DEM in + + Returns: + The path to the downloaded DEM + """ if (bounds[0] <= bounds[1] or bounds[2] >= bounds[3]): raise ValueError("Improper bounding box formatting, should be [max latitude, min latitude, min longitude, max longitude].") dem_path = work_dir / 'elevation.dem' dem_rsc = work_dir / 'elevation.dem.rsc' + with open(work_dir / 'bounds', 'w') as bounds_file: + bounds_file.write(' '.join([str(bound) for bound in bounds])) + ensure_egm_model_available() args = [str(dem_path), str(dem_rsc), *bounds] From 675a8e5583384e130d5fdd4db26aa2a7d283eb09 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 19 Sep 2024 09:31:25 -0500 Subject: [PATCH 37/53] updaged changelog --- CHANGELOG.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 33ea153..0fe4e88 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,7 +9,10 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [0.8.0] ### Added -* `time_series` workflow for time series processing of GSLC stacks. +* New `time_series` workflow for time series processing of GSLC stacks. + +### Changed +* The `back_projection` workflow now accepts an optional `--bounds` parameter to specify the DEM extent * The back-projection product now includes the elevation.dem.rsc file. ## [0.7.0] From 61ef231264f1699bef976a6156682bd62277d585 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 19 Sep 2024 09:31:51 -0500 Subject: [PATCH 38/53] refactoring for using bounds --- src/hyp3_srg/time_series.py | 31 ++++++++++++------------------- 1 file changed, 12 insertions(+), 19 deletions(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index d8d40e5..64a39cf 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -14,7 +14,6 @@ from boto3 import client from hyp3lib.aws import upload_file_to_s3 from hyp3lib.fetch import download_file as download_from_http -from shapely.geometry import Polygon from hyp3_srg import dem, utils @@ -191,7 +190,7 @@ def create_time_series( def create_time_series_product_name( granule_names: list[str], - bbox: Polygon, + bounds: list[float], ): prefix = "S1_SRG_SBAS" split_names = [granule.split("_") for granule in granule_names] @@ -206,35 +205,28 @@ def create_time_series_product_name( earliest_granule = start_dates[0] latest_granule = start_dates[-1] - lons, lats = bbox.exterior.coords.xy - def lat_string(lat): return ('N' if lat >= 0 else 'S') + f"{('%.1f' % abs(lat)).zfill(4)}".replace('.', '_') def lon_string(lon): return ('E' if lon >= 0 else 'W') + f"{('%.1f' % abs(lon)).zfill(5)}".replace('.', '_') - lat_lims = [lat_string(lat) for lat in [min(lats), max(lats)]] - lon_lims = [lon_string(lon) for lon in [min(lons), max(lons)]] - - product_id = token_hex(2).upper() - return '_'.join([ prefix, relative_orbit, - lon_lims[0], - lat_lims[0], - lon_lims[1], - lat_lims[1], + lon_string(bounds[2]), + lat_string(bounds[0]), + lon_string(bounds[3]), + lat_string(bounds[1]), earliest_granule, latest_granule, - product_id + token_hex(2).upper() ]) def package_time_series( granules: list[str], - bbox: Polygon, + bounds: list[float], work_dir: Optional[Path] = None ) -> Path: """Package the time series into a product zip file. @@ -248,7 +240,7 @@ def package_time_series( if work_dir is None: work_dir = Path.cwd() sbas_dir = work_dir / 'sbas' - product_name = create_time_series_product_name(granules, bbox) + product_name = create_time_series_product_name(granules, bounds) product_path = work_dir / product_name product_path.mkdir(exist_ok=True, parents=True) zip_path = work_dir / f'{product_name}.zip' @@ -275,6 +267,7 @@ def package_time_series( def time_series( granules: Iterable[str], + bounds: list[float], bucket: str = None, bucket_prefix: str = '', work_dir: Optional[Path] = None, @@ -294,15 +287,14 @@ def time_series( mkdir(sbas_dir) granule_names = load_products(granules) - bbox = utils.get_bbox(granule_names[0]).buffer(0.1) - dem_path = dem.download_dem_for_srg(bbox, work_dir) + dem_path = dem.download_dem_from_bounds(bounds, work_dir) utils.create_param_file(dem_path, dem_path.with_suffix('.dem.rsc'), work_dir) utils.call_stanford_module('util/merge_slcs.py', work_dir=work_dir) create_time_series(work_dir=sbas_dir) - zip_path = package_time_series(work_dir) + zip_path = package_time_series(granule_names, bounds, work_dir) if bucket: upload_file_to_s3(zip_path, bucket, bucket_prefix) @@ -318,6 +310,7 @@ def main(): S1A_IW_RAW__0SDV_20231229T134404_20231229T134436_051870_064437_5F38.geo """ parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument('--bounds', default=None, type=float, nargs=4, help='Bounds for DEM (max lat, min lat, min lon, max lon)') parser.add_argument('--bucket', help='AWS S3 bucket HyP3 for upload the final product(s)') parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)') parser.add_argument('granules', type=str.split, nargs='+', help='GSLC granules.') From 7358e60f4b2f1a682ee71d4ff559a58050846616 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 19 Sep 2024 09:32:07 -0500 Subject: [PATCH 39/53] negative cases for dem download --- tests/test_dem.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/tests/test_dem.py b/tests/test_dem.py index ca4f6c0..4a35c0f 100644 --- a/tests/test_dem.py +++ b/tests/test_dem.py @@ -1,3 +1,5 @@ +import pytest + from pathlib import Path from unittest import mock @@ -19,3 +21,26 @@ def test_download_dem_for_srg(monkeypatch): [str(Path.cwd() / 'elevation.dem'), str(Path.cwd() / 'elevation.dem.rsc'), 3, 1, 0, 2], work_dir=Path.cwd(), ) + + +def test_download_dem_from_bounds(monkeypatch): + with monkeypatch.context() as m: + mock_ensure_egm_model_available = mock.Mock() + m.setattr(dem, 'ensure_egm_model_available', mock_ensure_egm_model_available) + mock_call_stanford_module = mock.Mock() + m.setattr(utils, 'call_stanford_module', mock_call_stanford_module) + dem.download_dem_from_bounds([1.0, 0.0, -1.0, 1.0], Path.cwd()) + mock_ensure_egm_model_available.assert_called_once() + mock_call_stanford_module.assert_called_once_with( + 'DEM/createDEMcop.py', + [str(Path.cwd() / 'elevation.dem'), str(Path.cwd() / 'elevation.dem.rsc'), 1.0, 0.0, -1.0, 1.0], + work_dir=Path.cwd(), + ) + bad_bboxs = [ + [0.0, 1.0, -1.0, 1.0], + [1.0, 1.0, -1.0, 1.0], + [1.0, 0.0, 1.0, -1.0] + ] + for bbox in bad_bboxs: + with pytest.raises(ValueError, match=r'Improper bounding box formatting*'): + dem.download_dem_from_bounds(bbox, Path.cwd()) From 2de20d5d3e98cd64f43752117d1d200381612f05 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 19 Sep 2024 10:22:47 -0500 Subject: [PATCH 40/53] refactoring --- src/hyp3_srg/back_projection.py | 6 ++++-- src/hyp3_srg/dem.py | 4 +++- src/hyp3_srg/time_series.py | 4 +++- tests/test_dem.py | 3 +-- 4 files changed, 11 insertions(+), 6 deletions(-) diff --git a/src/hyp3_srg/back_projection.py b/src/hyp3_srg/back_projection.py index c1ab2c4..9ac63fc 100644 --- a/src/hyp3_srg/back_projection.py +++ b/src/hyp3_srg/back_projection.py @@ -118,7 +118,7 @@ def back_project( orbit_path = utils.download_orbit(granule, work_dir) bboxs.append(granule_bbox) granule_orbit_pairs.append((granule_path, orbit_path)) - if bounds == None: + if bounds is None: full_bbox = unary_union(bboxs).buffer(0.1) dem_path = dem.download_dem_for_srg(full_bbox, work_dir) else: @@ -150,7 +150,9 @@ def main(): parser.add_argument('--bucket', help='AWS S3 bucket HyP3 for upload the final product(s)') parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)') parser.add_argument('--gpu', default=False, action='store_true', help='Use the GPU-based version of the workflow.') - parser.add_argument('--bounds', default=None, type=float, nargs=4, help='Bounds for DEM (max lat, min lat, min lon, max lon)') + parser.add_argument( + '--bounds', default=None, type=float, nargs=4, help='Bounds for DEM (max lat, min lat, min lon, max lon)' + ) parser.add_argument('granules', type=str.split, nargs='+', help='Level-0 S1 granule(s) to back-project.') args = parser.parse_args() args.granules = [item for sublist in args.granules for item in sublist] diff --git a/src/hyp3_srg/dem.py b/src/hyp3_srg/dem.py index 1556eb9..e559631 100644 --- a/src/hyp3_srg/dem.py +++ b/src/hyp3_srg/dem.py @@ -56,7 +56,9 @@ def download_dem_from_bounds(bounds: list[float], work_dir: Optional[Path]): The path to the downloaded DEM """ if (bounds[0] <= bounds[1] or bounds[2] >= bounds[3]): - raise ValueError("Improper bounding box formatting, should be [max latitude, min latitude, min longitude, max longitude].") + raise ValueError( + "Improper bounding box formatting, should be [max latitude, min latitude, min longitude, max longitude]." + ) dem_path = work_dir / 'elevation.dem' dem_rsc = work_dir / 'elevation.dem.rsc' diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index 64a39cf..6420022 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -310,7 +310,9 @@ def main(): S1A_IW_RAW__0SDV_20231229T134404_20231229T134436_051870_064437_5F38.geo """ parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) - parser.add_argument('--bounds', default=None, type=float, nargs=4, help='Bounds for DEM (max lat, min lat, min lon, max lon)') + parser.add_argument( + '--bounds', default=None, type=float, nargs=4, help='Bounds for DEM (max lat, min lat, min lon, max lon)' + ) parser.add_argument('--bucket', help='AWS S3 bucket HyP3 for upload the final product(s)') parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)') parser.add_argument('granules', type=str.split, nargs='+', help='GSLC granules.') diff --git a/tests/test_dem.py b/tests/test_dem.py index 4a35c0f..5d37af7 100644 --- a/tests/test_dem.py +++ b/tests/test_dem.py @@ -1,8 +1,7 @@ -import pytest - from pathlib import Path from unittest import mock +import pytest from shapely.geometry import box from hyp3_srg import dem, utils From af689a18d5a7aabaa77bf64ed69e0a95d898bb2f Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 19 Sep 2024 14:48:40 -0500 Subject: [PATCH 41/53] Update README.md Co-authored-by: Forrest Williams <31411324+forrestfwilliams@users.noreply.github.com> --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index f65d2dd..d860e53 100644 --- a/README.md +++ b/README.md @@ -26,7 +26,7 @@ docker run -it --rm \ ``` ### Back-projection -The `back_projection` processing type produces geocoded SLCs for raw S1 data. The workflow takes a list of Level-0 S1 granule names and outputs them as geocoded SLCs (GSLCs). +The `back_projection` processing type produces geocoded SLCs using Level-0 Sentinel-1 data as input. The workflow takes a list of Level-0 Sentinel-1 granule names and outputs them as geocoded SLCs (GSLCs). An example command for the `back_projection` workflow is: ```bash docker run -it --rm \ From 172aa5976aaa35fc300126e59156b2dd095be0ff Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 19 Sep 2024 15:09:47 -0500 Subject: [PATCH 42/53] add snaphu --- Dockerfile.gpu | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile.gpu b/Dockerfile.gpu index d1632a2..e4e579a 100644 --- a/Dockerfile.gpu +++ b/Dockerfile.gpu @@ -69,6 +69,7 @@ SHELL ["/bin/bash", "-l", "-c"] USER ${CONDA_UID} WORKDIR /home/conda/ +COPY --chown=${CONDA_UID}:${CONDA_GID} --from=builder /srg/snaphu_v2.0b0.0.0/bin/snaphu /srg/bin/snaphu COPY --chown=${CONDA_UID}:${CONDA_GID} --from=builder /srg /srg COPY --chown=${CONDA_UID}:${CONDA_GID} --from=builder /hyp3-srg /hyp3-srg From 78c9ecd4baac7c61ccabbdffb0f9e107c57faad3 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 19 Sep 2024 15:09:57 -0500 Subject: [PATCH 43/53] doc strings --- src/hyp3_srg/time_series.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index 6420022..b4a303c 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -132,8 +132,8 @@ def compute_sbas_velocity_solution( """Computes the sbas velocity solution from the unwrapped interferograms Args: - threshold: ... - do_tropo_correction: ... + threshold: correlation threshold for picking reference points + do_tropo_correction: whether or not to apply tropospheric correction unw_shape: tuple containing the width and length from the dem.rsc file work_dir: the directory containing the wrapped interferograms """ @@ -173,8 +173,8 @@ def create_time_series( Args: looks: tuple containing the number range looks and azimuth looks baselines: tuple containing the time baseline and spatial baseline - threshold: ... - do_tropo_correction: ... + threshold: correlation threshold for picking reference points + do_tropo_correction: whether or not to apply tropospheric correction work_dir: the directory containing the GSLCs to do work in """ dem_shape = get_size_from_dem('elevation.dem.rsc') @@ -232,6 +232,8 @@ def package_time_series( """Package the time series into a product zip file. Args: + granules: list of the granule names + bounds: bounds that were used to aquire the dem extent work_dir: Working directory for completed back-projection run Returns: @@ -276,6 +278,7 @@ def time_series( Args: granules: List of Sentinel-1 GSLCs + bounds: bounding box that was used to generate the GSLCs for aquiring the DEM bucket: AWS S3 bucket for uploading the final product(s) bucket_prefix: Add a bucket prefix to the product(s) work_dir: Working directory for processing @@ -311,7 +314,7 @@ def main(): """ parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( - '--bounds', default=None, type=float, nargs=4, help='Bounds for DEM (max lat, min lat, min lon, max lon)' + '--bounds', default=None, type=float, nargs=4, help='Bounding box that was used to generate the GSLCs' ) parser.add_argument('--bucket', help='AWS S3 bucket HyP3 for upload the final product(s)') parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)') From a9fa06b960823f7aae6dff9f43d816886da3889d Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 19 Sep 2024 15:10:05 -0500 Subject: [PATCH 44/53] updated readme --- README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.md b/README.md index d860e53..ac7e6a0 100644 --- a/README.md +++ b/README.md @@ -39,8 +39,7 @@ docker run -it --rm \ ``` ### Time-series -The `time_series` workflow takes a list of up to 50 Level-0 S1 granule names and produces a time-series of the respective geocoded SLCs. Stacks are created with looks `10x10`and baselines of `1000x1000`. A trophospheric correction is applied using an elevation-dependent regression. -This workflow will output interferograms and time-series files for all input granules. +The `time_series` workflow takes a list of up to 50 Sentinel-1 GSLC granule names, along with a bounding box, and produces a time-series. **Note that all of the input GSLSs must have been generated with the provided bounding box.** Stacks are created with `10` range looks, `10` azimuth looks, and temporal and spatial baselines of `1000` and `1000`, respectively. Candidate reference points are chosen with a correlation threshold of `0.5` - meaning the correlation must be above `0.5` in all scenes at that point. A tropospheric correction is applied using an elevation-dependent regression. The following command will run the `time_series` workflow: ``` docker run -it --rm \ From 3096321436a932c87df68853c1a246cb386966e6 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 19 Sep 2024 15:11:38 -0500 Subject: [PATCH 45/53] Update src/hyp3_srg/time_series.py Co-authored-by: Forrest Williams <31411324+forrestfwilliams@users.noreply.github.com> --- src/hyp3_srg/time_series.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index b4a303c..c66e543 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -143,13 +143,6 @@ def compute_sbas_velocity_solution( copyfile(work_dir / 'intlist', work_dir / 'unwlist') utils.call_stanford_module('util/sed.py', args=['s/int/unw/g', 'unwlist'], work_dir=work_dir) - num_unw_files = 0 - num_slcs = 0 - with open(work_dir / 'unwlist', 'r') as unw_list: - num_unw_files = len(unw_list.readlines()) - with open(work_dir / 'geolist', 'r') as slc_list: - num_slcs = len(slc_list.readlines()) - ref_point_args = ['unwlist', unw_width, unw_length, threshold] utils.call_stanford_module('int/findrefpoints', args=ref_point_args, work_dir=work_dir) @@ -157,6 +150,14 @@ def compute_sbas_velocity_solution( tropo_correct_args = ['unwlist', unw_width, unw_length] utils.call_stanford_module('int/tropocorrect.py', args=tropo_correct_args, work_dir=work_dir) + num_unw_files = 0 + with open(work_dir / 'unwlist', 'r') as unw_list: + num_unw_files = len(unw_list.readlines()) + + num_slcs = 0 + with open(work_dir / 'geolist', 'r') as slc_list: + num_slcs = len(slc_list.readlines()) + sbas_velocity_args = ['unwlist', num_unw_files, num_slcs, unw_width, 'ref_locs'] utils.call_stanford_module('sbas/sbas', args=sbas_velocity_args, work_dir=work_dir) From aa9ddeb983e3507289bbb0aa39ec489898f5e427 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 19 Sep 2024 15:28:12 -0500 Subject: [PATCH 46/53] shapely bounds instead of stanford bounds --- src/hyp3_srg/back_projection.py | 10 +++++----- src/hyp3_srg/dem.py | 28 ++++++---------------------- src/hyp3_srg/time_series.py | 21 +++++++++++++++------ 3 files changed, 26 insertions(+), 33 deletions(-) diff --git a/src/hyp3_srg/back_projection.py b/src/hyp3_srg/back_projection.py index 9ac63fc..3f31972 100644 --- a/src/hyp3_srg/back_projection.py +++ b/src/hyp3_srg/back_projection.py @@ -99,6 +99,7 @@ def back_project( Args: granules: List of Sentinel-1 level-0 granules to back-project + bounds: DEM extent bounding box [min_lon, min_lat, max_lon, max_lat] earthdata_username: Username for NASA's EarthData service earthdata_password: Password for NASA's EarthData service bucket: AWS S3 bucket for uploading the final product(s) @@ -118,11 +119,10 @@ def back_project( orbit_path = utils.download_orbit(granule, work_dir) bboxs.append(granule_bbox) granule_orbit_pairs.append((granule_path, orbit_path)) + if bounds is None: - full_bbox = unary_union(bboxs).buffer(0.1) - dem_path = dem.download_dem_for_srg(full_bbox, work_dir) - else: - dem_path = dem.download_dem_from_bounds(bounds, work_dir) + bounds = unary_union(bboxs).buffer(0.1).bounds + dem_path = dem.download_dem_for_srg(bounds, work_dir) utils.create_param_file(dem_path, dem_path.with_suffix('.dem.rsc'), work_dir) back_project_granules(granule_orbit_pairs, work_dir=work_dir, gpu=gpu) @@ -151,7 +151,7 @@ def main(): parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)') parser.add_argument('--gpu', default=False, action='store_true', help='Use the GPU-based version of the workflow.') parser.add_argument( - '--bounds', default=None, type=float, nargs=4, help='Bounds for DEM (max lat, min lat, min lon, max lon)' + '--bounds', default=None, type=float, nargs=4, help='DEM extent bbox: [min_lon, min_lat, max_lon, max_lat].' ) parser.add_argument('granules', type=str.split, nargs='+', help='Level-0 S1 granule(s) to back-project.') args = parser.parse_args() diff --git a/src/hyp3_srg/dem.py b/src/hyp3_srg/dem.py index e559631..6f7afe4 100644 --- a/src/hyp3_srg/dem.py +++ b/src/hyp3_srg/dem.py @@ -28,34 +28,17 @@ def ensure_egm_model_available(): f.write(chunk) -def download_dem_for_srg( - footprint: Polygon, - work_dir: Path, -) -> Path: - """Download the given DEM for the given extent. +def download_dem_for_srg(bounds: list[float], work_dir: Optional[Path]): + """Download the DEM for the given bounds - [min_lon, min_lat, max_lon, max_lat]. Args: - footprint: The footprint to download a DEM for + bounds: The bounds of the extent of the desired DEM - [min_lon, min_lat, max_lon, max_lat]. work_dir: The directory to save create the DEM in Returns: The path to the downloaded DEM """ - stanford_bounds = [footprint.bounds[i] for i in [3, 1, 0, 2]] - return download_dem_from_bounds(stanford_bounds, work_dir) - - -def download_dem_from_bounds(bounds: list[float], work_dir: Optional[Path]): - """Download the DEM for the given stanford bounds. - - Args: - footprint: The footprint to download a DEM for - work_dir: The directory to save create the DEM in - - Returns: - The path to the downloaded DEM - """ - if (bounds[0] <= bounds[1] or bounds[2] >= bounds[3]): + if (bounds[0] >= bounds[2] or bounds[1] >= bounds[3]): raise ValueError( "Improper bounding box formatting, should be [max latitude, min latitude, min longitude, max longitude]." ) @@ -68,6 +51,7 @@ def download_dem_from_bounds(bounds: list[float], work_dir: Optional[Path]): ensure_egm_model_available() - args = [str(dem_path), str(dem_rsc), *bounds] + stanford_bounds = [bounds[i] for i in [3, 1, 0, 2]] + args = [str(dem_path), str(dem_rsc), *stanford_bounds] utils.call_stanford_module('DEM/createDEMcop.py', args, work_dir=work_dir) return dem_path diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index b4a303c..2cca595 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -192,6 +192,15 @@ def create_time_series_product_name( granule_names: list[str], bounds: list[float], ): + """Create a product name for the given granules. + + Args: + granules: list of the granule names + bounds: bounding box that was used to generate the GSLCs + + Returns: + the product name as a string. + """ prefix = "S1_SRG_SBAS" split_names = [granule.split("_") for granule in granule_names] @@ -214,10 +223,10 @@ def lon_string(lon): return '_'.join([ prefix, relative_orbit, - lon_string(bounds[2]), - lat_string(bounds[0]), - lon_string(bounds[3]), + lon_string(bounds[0]), lat_string(bounds[1]), + lon_string(bounds[2]), + lat_string(bounds[3]), earliest_granule, latest_granule, token_hex(2).upper() @@ -233,7 +242,7 @@ def package_time_series( Args: granules: list of the granule names - bounds: bounds that were used to aquire the dem extent + bounds: bounding box that was used to generate the GSLCs work_dir: Working directory for completed back-projection run Returns: @@ -278,7 +287,7 @@ def time_series( Args: granules: List of Sentinel-1 GSLCs - bounds: bounding box that was used to generate the GSLCs for aquiring the DEM + bounds: bounding box that was used to generate the GSLCs bucket: AWS S3 bucket for uploading the final product(s) bucket_prefix: Add a bucket prefix to the product(s) work_dir: Working directory for processing @@ -290,7 +299,7 @@ def time_series( mkdir(sbas_dir) granule_names = load_products(granules) - dem_path = dem.download_dem_from_bounds(bounds, work_dir) + dem_path = dem.download_dem_for_srg(bounds, work_dir) utils.create_param_file(dem_path, dem_path.with_suffix('.dem.rsc'), work_dir) utils.call_stanford_module('util/merge_slcs.py', work_dir=work_dir) From c61e6f260218319551476da80ae384c6f95c8e06 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 19 Sep 2024 15:52:57 -0500 Subject: [PATCH 47/53] updated tests --- tests/test_dem.py | 23 ++++---------------- tests/test_time_series.py | 45 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+), 19 deletions(-) create mode 100644 tests/test_time_series.py diff --git a/tests/test_dem.py b/tests/test_dem.py index 5d37af7..b34be8e 100644 --- a/tests/test_dem.py +++ b/tests/test_dem.py @@ -13,33 +13,18 @@ def test_download_dem_for_srg(monkeypatch): m.setattr(dem, 'ensure_egm_model_available', mock_ensure_egm_model_available) mock_call_stanford_module = mock.Mock() m.setattr(utils, 'call_stanford_module', mock_call_stanford_module) - dem.download_dem_for_srg(box(0, 1, 2, 3), Path.cwd()) + dem.download_dem_for_srg(box(0, 1, 2, 3).bounds, Path.cwd()) mock_ensure_egm_model_available.assert_called_once() mock_call_stanford_module.assert_called_once_with( 'DEM/createDEMcop.py', [str(Path.cwd() / 'elevation.dem'), str(Path.cwd() / 'elevation.dem.rsc'), 3, 1, 0, 2], work_dir=Path.cwd(), ) - - -def test_download_dem_from_bounds(monkeypatch): - with monkeypatch.context() as m: - mock_ensure_egm_model_available = mock.Mock() - m.setattr(dem, 'ensure_egm_model_available', mock_ensure_egm_model_available) - mock_call_stanford_module = mock.Mock() - m.setattr(utils, 'call_stanford_module', mock_call_stanford_module) - dem.download_dem_from_bounds([1.0, 0.0, -1.0, 1.0], Path.cwd()) - mock_ensure_egm_model_available.assert_called_once() - mock_call_stanford_module.assert_called_once_with( - 'DEM/createDEMcop.py', - [str(Path.cwd() / 'elevation.dem'), str(Path.cwd() / 'elevation.dem.rsc'), 1.0, 0.0, -1.0, 1.0], - work_dir=Path.cwd(), - ) bad_bboxs = [ [0.0, 1.0, -1.0, 1.0], - [1.0, 1.0, -1.0, 1.0], - [1.0, 0.0, 1.0, -1.0] + [1.0, 1.0, -1.0, 2.0], + [1.0, 0.0, 2.0, -1.0] ] for bbox in bad_bboxs: with pytest.raises(ValueError, match=r'Improper bounding box formatting*'): - dem.download_dem_from_bounds(bbox, Path.cwd()) + dem.download_dem_for_srg(bbox, Path.cwd()) diff --git a/tests/test_time_series.py b/tests/test_time_series.py new file mode 100644 index 0000000..b9bb0a5 --- /dev/null +++ b/tests/test_time_series.py @@ -0,0 +1,45 @@ +from unittest import mock + +import pytest + +from hyp3_srg import time_series, utils + + +def test_create_time_series_product_name(): + granule_names = [ + 'S1A_IW_RAW__0SDV_001_003_054532_06A2F8_8276.zip', + 'S1A_IW_RAW__0SDV_004_005_054882_06AF26_2CE5.zip', + 'S1A_IW_RAW__0SDV_010_020_055057_06B527_1346.zip' + ] + bounds = [-100, 45, -90, 50] + name = time_series.create_time_series_product_name(granule_names, bounds) + assert name.startswith('S1_SRG_SBAS_35_W100_0_N45_0_W090_0_N50_0_001_010') + + bounds = [101.5123, -34.333, 56.866, -25.8897] + name = time_series.create_time_series_product_name(granule_names, bounds) + assert name.startswith('S1_SRG_SBAS_35_E101_5_S34_3_E056_9_S25_9_001_010') + + +def test_get_size_from_dem(tmp_path): + rsc_content = """ + WIDTH 1235 + FILE_LENGTH 873 + X_FIRST -124.41472222 + Y_FIRST 39.52388889 + X_STEP 0.0027777778 + Y_STEP -0.0027777778 + X_UNIT degrees + Y_UNIT degrees + Z_OFFSET 0 + Z_SCALE 1 + PROJECTION LL + xstart 1 + ystart 1 + xsize 12357 + ysize 8731""" + + rsc_path = tmp_path / 'elevation.dem.rsc' + with open(rsc_path, 'w') as rsc_file: + rsc_file.write(rsc_content.strip()) + dem_width, dem_height = time_series.get_size_from_dem(dem_file=rsc_path) + assert dem_width, dem_height == (1235, 873) From aa73b40b94185e3e1ed673cb7ceb0f79b39b682c Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Thu, 19 Sep 2024 15:54:29 -0500 Subject: [PATCH 48/53] flake8 --- src/hyp3_srg/dem.py | 1 - src/hyp3_srg/time_series.py | 2 +- tests/test_time_series.py | 6 +----- 3 files changed, 2 insertions(+), 7 deletions(-) diff --git a/src/hyp3_srg/dem.py b/src/hyp3_srg/dem.py index 6f7afe4..d092b19 100644 --- a/src/hyp3_srg/dem.py +++ b/src/hyp3_srg/dem.py @@ -4,7 +4,6 @@ from typing import Optional import requests -from shapely.geometry import Polygon from hyp3_srg import utils diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index b226433..458411b 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -157,7 +157,7 @@ def compute_sbas_velocity_solution( num_slcs = 0 with open(work_dir / 'geolist', 'r') as slc_list: num_slcs = len(slc_list.readlines()) - + sbas_velocity_args = ['unwlist', num_unw_files, num_slcs, unw_width, 'ref_locs'] utils.call_stanford_module('sbas/sbas', args=sbas_velocity_args, work_dir=work_dir) diff --git a/tests/test_time_series.py b/tests/test_time_series.py index b9bb0a5..53212dc 100644 --- a/tests/test_time_series.py +++ b/tests/test_time_series.py @@ -1,8 +1,4 @@ -from unittest import mock - -import pytest - -from hyp3_srg import time_series, utils +from hyp3_srg import time_series def test_create_time_series_product_name(): From 454c3f788c90a5f1b444a36890a81f19ac1e70c5 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Fri, 20 Sep 2024 13:34:39 -0500 Subject: [PATCH 49/53] unit test for getting s3 download arguments --- src/hyp3_srg/time_series.py | 21 +++++++++++++++++++-- tests/test_time_series.py | 10 ++++++++++ 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/src/hyp3_srg/time_series.py b/src/hyp3_srg/time_series.py index 458411b..6914574 100644 --- a/src/hyp3_srg/time_series.py +++ b/src/hyp3_srg/time_series.py @@ -22,11 +22,17 @@ log = logging.getLogger(__name__) -def download_from_s3(uri: str, dest_dir: Optional[Path] = None) -> None: - """Download a file from an S3 bucket +def get_s3_args(uri: str, dest_dir: Optional[Path] = None) -> None: + """Retrieve the arguments for downloading from an S3 bucket Args: uri: URI of the file to download + dest_dir: the directory to place the downloaded file in + + Returns: + bucket: the s3 bucket to download from + key: the path to the file following the s3 bucket + out_path: the destination path of the file to download """ if dest_dir is None: dest_dir = Path.cwd() @@ -35,6 +41,17 @@ def download_from_s3(uri: str, dest_dir: Optional[Path] = None) -> None: bucket = simple_s3_uri.parts[0] key = '/'.join(simple_s3_uri.parts[1:]) out_path = dest_dir / simple_s3_uri.parts[-1] + return bucket, key, out_path + + +def download_from_s3(uri: str, dest_dir: Optional[Path] = None) -> None: + """Download a file from an S3 bucket + + Args: + uri: URI of the file to download + dest_dir: the directory to place the downloaded file in + """ + bucket, key, out_path = get_s3_args(uri, dest_dir) S3.download_file(bucket, key, out_path) return out_path diff --git a/tests/test_time_series.py b/tests/test_time_series.py index 53212dc..49392b5 100644 --- a/tests/test_time_series.py +++ b/tests/test_time_series.py @@ -1,3 +1,5 @@ +from pathlib import Path + from hyp3_srg import time_series @@ -39,3 +41,11 @@ def test_get_size_from_dem(tmp_path): rsc_file.write(rsc_content.strip()) dem_width, dem_height = time_series.get_size_from_dem(dem_file=rsc_path) assert dem_width, dem_height == (1235, 873) + + +def test_get_s3_args(): + s3_uri_1 = 's3://foo/bar.zip' + s3_uri_2 = 's3://foo/bing/bong/bar.zip' + dest_dir = Path('output') + assert time_series.get_s3_args(s3_uri_1) == ('foo', 'bar.zip', Path.cwd() / "bar.zip") + assert time_series.get_s3_args(s3_uri_2, dest_dir) == ('foo', 'bing/bong/bar.zip', dest_dir / 'bar.zip') From f51231382ff9c74a48e44b4e12a55df5bda5e8c0 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Mon, 23 Sep 2024 12:40:36 -0500 Subject: [PATCH 50/53] Update README.md Co-authored-by: Forrest Williams <31411324+forrestfwilliams@users.noreply.github.com> --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ac7e6a0..231e6bd 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ HyP3 plugin for Stanford Radar Group (SRG) SAR Processor The HyP3-SRG plugin provides a set of workflows (currently only accessible via the docker container) that can be used to process SAR data using the [Stanford Radar Group Processor](https://github.com/asfhyp3/srg). This set of workflow uses the [SRG alogorithms]((https://doi.org/10.1109/LGRS.2017.2753580)) to process Level-0 Sentinel-1 (S1) data to geocoded, user-friendly products that can be used for time-series analysis. The workflows currently included in this plugin are: -- [`back_projection`](#back-projection): A workflow for creating geocoded Sentinel-1 SLCs, +- [`back_projection`](#back-projection): A workflow for creating geocoded Sentinel-1 SLCs. - [`time_series`](#time-series): A workflow for creating a deformation timeseries of geocoded Sentinel-1 SLCs. To run a workflow, you'll first need to build the docker container: From e6441bea59e7c5edc020b0926685027a31301eab Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Mon, 23 Sep 2024 14:04:32 -0500 Subject: [PATCH 51/53] add sentinel value bounds --- src/hyp3_srg/back_projection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hyp3_srg/back_projection.py b/src/hyp3_srg/back_projection.py index 3f31972..63938f6 100644 --- a/src/hyp3_srg/back_projection.py +++ b/src/hyp3_srg/back_projection.py @@ -120,7 +120,7 @@ def back_project( bboxs.append(granule_bbox) granule_orbit_pairs.append((granule_path, orbit_path)) - if bounds is None: + if bounds is None or bounds == [0, 0, 0, 0]: bounds = unary_union(bboxs).buffer(0.1).bounds dem_path = dem.download_dem_for_srg(bounds, work_dir) utils.create_param_file(dem_path, dem_path.with_suffix('.dem.rsc'), work_dir) From e7fea6f4d2fee142da4c8929883780f038498d43 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Mon, 23 Sep 2024 14:16:01 -0500 Subject: [PATCH 52/53] accept bounds as space delimited string --- src/hyp3_srg/back_projection.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/hyp3_srg/back_projection.py b/src/hyp3_srg/back_projection.py index 63938f6..6d54c70 100644 --- a/src/hyp3_srg/back_projection.py +++ b/src/hyp3_srg/back_projection.py @@ -151,11 +151,15 @@ def main(): parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)') parser.add_argument('--gpu', default=False, action='store_true', help='Use the GPU-based version of the workflow.') parser.add_argument( - '--bounds', default=None, type=float, nargs=4, help='DEM extent bbox: [min_lon, min_lat, max_lon, max_lat].' + '--bounds', default=None, type=str.split, nargs='+', help='DEM extent bbox in EPSG:4326: [min_lon, min_lat, max_lon, max_lat].' ) parser.add_argument('granules', type=str.split, nargs='+', help='Level-0 S1 granule(s) to back-project.') args = parser.parse_args() args.granules = [item for sublist in args.granules for item in sublist] + if args.bounds is not None: + args.bounds = [float(item) for sublist in args.bounds for item in sublist] + if len(args.bounds) != 4: + parser.error('Bounds must have exactly 4 values: [min lon, min lat, max lon, max lat] in EPSG:4326.') back_project(**args.__dict__) From 59c7d61f7c88e94ee15cfe4ee754d8a961d8b3fe Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Mon, 23 Sep 2024 14:19:59 -0500 Subject: [PATCH 53/53] flake8 --- src/hyp3_srg/back_projection.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/hyp3_srg/back_projection.py b/src/hyp3_srg/back_projection.py index 6d54c70..3b6d9a0 100644 --- a/src/hyp3_srg/back_projection.py +++ b/src/hyp3_srg/back_projection.py @@ -151,7 +151,11 @@ def main(): parser.add_argument('--bucket-prefix', default='', help='Add a bucket prefix to product(s)') parser.add_argument('--gpu', default=False, action='store_true', help='Use the GPU-based version of the workflow.') parser.add_argument( - '--bounds', default=None, type=str.split, nargs='+', help='DEM extent bbox in EPSG:4326: [min_lon, min_lat, max_lon, max_lat].' + '--bounds', + default=None, + type=str.split, + nargs='+', + help='DEM extent bbox in EPSG:4326: [min_lon, min_lat, max_lon, max_lat].' ) parser.add_argument('granules', type=str.split, nargs='+', help='Level-0 S1 granule(s) to back-project.') args = parser.parse_args()