From 5fe49cbf9791c79c0b2532c0ee97ed2179bf67a3 Mon Sep 17 00:00:00 2001 From: Jake Herrmann Date: Wed, 28 Jun 2023 19:10:34 -0800 Subject: [PATCH 01/16] Add `create-jira-issue.yml` --- .github/workflows/create-jira-issue.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 .github/workflows/create-jira-issue.yml diff --git a/.github/workflows/create-jira-issue.yml b/.github/workflows/create-jira-issue.yml new file mode 100644 index 00000000..8ce649de --- /dev/null +++ b/.github/workflows/create-jira-issue.yml @@ -0,0 +1,15 @@ +name: Create Jira issue + +on: + issues: + types: [labeled] + +jobs: + call-create-jira-issue-workflow: + uses: ASFHyP3/actions/.github/workflows/reusable-create-jira-issue.yml@v0.8.0 + secrets: + JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} + JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} + JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} + JIRA_PROJECT: ${{ secrets.JIRA_PROJECT }} + JIRA_FIELDS: ${{ secrets.JIRA_FIELDS }} From 8a25e76c71a8aa3f2165d94bfa33d57efd75b96f Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Thu, 24 Aug 2023 14:32:47 -0800 Subject: [PATCH 02/16] Update v.description for S1 data --- hyp3_autorift/vend/netcdf_output.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/hyp3_autorift/vend/netcdf_output.py b/hyp3_autorift/vend/netcdf_output.py index 08060edc..d12a8882 100755 --- a/hyp3_autorift/vend/netcdf_output.py +++ b/hyp3_autorift/vend/netcdf_output.py @@ -639,10 +639,7 @@ def netCDF_packaging(VX, VY, DX, DY, INTERPMASK, CHIPSIZEX, CHIPSIZEY, SSM, SSM1 var = nc_outfile.createVariable('v', np.dtype('int16'), ('y', 'x'), fill_value=NoDataValue, zlib=True, complevel=2, shuffle=True, chunksizes=ChunkSize) var.setncattr('standard_name', 'land_ice_surface_velocity') - if pair_type == 'radar': - var.setncattr('description', 'velocity magnitude from radar range and azimuth measurements') - else: - var.setncattr('description', 'velocity magnitude') + var.setncattr('description', 'velocity magnitude') var.setncattr('units', 'meter/year') var.setncattr('grid_mapping', mapping_var_name) From ad8298dd03932074da210a91988e7e691675c4a2 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Thu, 24 Aug 2023 14:45:00 -0800 Subject: [PATCH 03/16] state s1 metadata changes --- CHANGELOG.md | 6 ++++++ hyp3_autorift/vend/CHANGES-227.diff | 15 +++++++++++++++ hyp3_autorift/vend/README.md | 4 ++++ 3 files changed, 25 insertions(+) create mode 100644 hyp3_autorift/vend/CHANGES-227.diff diff --git a/CHANGELOG.md b/CHANGELOG.md index 2cb73c1d..eaf82e90 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,12 @@ and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.11.1] + +### Fixed +* Patch [227](hyp3_autorift/vend/CHANGES-227.diff) was applied to align the S1 granules velocity description with the + optical products + ## [0.11.1] ### Fixed diff --git a/hyp3_autorift/vend/CHANGES-227.diff b/hyp3_autorift/vend/CHANGES-227.diff new file mode 100644 index 00000000..53202d5f --- /dev/null +++ b/hyp3_autorift/vend/CHANGES-227.diff @@ -0,0 +1,15 @@ +diff --git netcdf_output.py netcdf_output.py +--- netcdf_output.py ++++ netcdf_output.py +@@ -639,10 +639,7 @@ def netCDF_packaging(VX, VY, DX, DY, INTERPMASK, CHIPSIZEX, CHIPSIZEY, SSM, SSM1 + var = nc_outfile.createVariable('v', np.dtype('int16'), ('y', 'x'), fill_value=NoDataValue, + zlib=True, complevel=2, shuffle=True, chunksizes=ChunkSize) + var.setncattr('standard_name', 'land_ice_surface_velocity') +- if pair_type == 'radar': +- var.setncattr('description', 'velocity magnitude from radar range and azimuth measurements') +- else: +- var.setncattr('description', 'velocity magnitude') ++ var.setncattr('description', 'velocity magnitude') + var.setncattr('units', 'meter/year') + var.setncattr('grid_mapping', mapping_var_name) + diff --git a/hyp3_autorift/vend/README.md b/hyp3_autorift/vend/README.md index c6eaf42d..f0f9defd 100644 --- a/hyp3_autorift/vend/README.md +++ b/hyp3_autorift/vend/README.md @@ -66,3 +66,7 @@ We've replaced it with `hyp3_autorift.io.get_topsinsar_config`. from Sentinel-1 pairs that were created using HyP3 autoRIFT versions < 0.9.0, which was released November 2, 2022 9. The changes listed in `CHANGES-223.diff` were applied in [ASFHyP3/hyp3-autorift#223](https://github.com/ASFHyP3/hyp3-autorift/pull/223) were applied to the S1 correction workflow so that the scene's polarization was set correctly +10. The changes listed in `CHANGES-227.diff` were applied in [ASFHyP3/hyp3-autorift#227](https://github.com/ASFHyP3/hyp3-autorift/pull/227) + were applied to align the S1 granules velocity description with the optical products. These changes have been + [proposed upstream](https://github.com/nasa-jpl/autoRIFT/pull/87) and should be applied in the next + `nasa-jpl/autoRIFT` release. From 95a5e3c6c6a5307b12298c39b0a5e782aff83ecc Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Thu, 24 Aug 2023 14:47:05 -0800 Subject: [PATCH 04/16] Update CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index eaf82e90..fbacede8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,7 @@ and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [0.11.1] +## [0.11.2] ### Fixed * Patch [227](hyp3_autorift/vend/CHANGES-227.diff) was applied to align the S1 granules velocity description with the From 12b4e30409d8209a3dbcb01459add5ecd103ec32 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Fri, 1 Sep 2023 11:50:12 -0800 Subject: [PATCH 05/16] fix typo --- hyp3_autorift/vend/netcdf_output.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hyp3_autorift/vend/netcdf_output.py b/hyp3_autorift/vend/netcdf_output.py index d12a8882..f20582e4 100755 --- a/hyp3_autorift/vend/netcdf_output.py +++ b/hyp3_autorift/vend/netcdf_output.py @@ -806,7 +806,7 @@ def netCDF_packaging(VX, VY, DX, DY, INTERPMASK, CHIPSIZEX, CHIPSIZEY, SSM, SSM1 var.setncattr('stable_shift_flag', stable_shift_applied) var.setncattr('stable_shift_flag_description', 'flag for applying velocity bias correction: 0 = no correction; ' '1 = correction from overlapping stable surface mask ' - '(stationary or slow-flowing surfaces with velocity < 15 meter/yearr)' + '(stationary or slow-flowing surfaces with velocity < 15 meter/year)' '(top priority); 2 = correction from slowest 25% of overlapping ' 'velocities (second priority)') From eaa4f45f65e476e6c8a310f48cac41d33070e39c Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Fri, 1 Sep 2023 11:51:31 -0800 Subject: [PATCH 06/16] fix typo --- hyp3_autorift/vend/CHANGES-189.diff | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hyp3_autorift/vend/CHANGES-189.diff b/hyp3_autorift/vend/CHANGES-189.diff index acb15fa0..c76e9a7e 100644 --- a/hyp3_autorift/vend/CHANGES-189.diff +++ b/hyp3_autorift/vend/CHANGES-189.diff @@ -295,7 +295,7 @@ diff --git netcdf_output.py netcdf_output.py var.setncattr('stable_shift_flag_description', 'flag for applying velocity bias correction: 0 = no correction; ' '1 = correction from overlapping stable surface mask ' - '(stationary or slow-flowing surfaces with velocity < 15 m/yr)' -+ '(stationary or slow-flowing surfaces with velocity < 15 meter/yearr)' ++ '(stationary or slow-flowing surfaces with velocity < 15 meter/year)' '(top priority); 2 = correction from slowest 25% of overlapping ' 'velocities (second priority)') From 60e3f83677b2d9cde48c98802f188fd5976e9d99 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Thu, 7 Sep 2023 14:13:52 -0800 Subject: [PATCH 07/16] First pass at cropping granules to valid data --- environment.yml | 2 + hyp3_autorift/crop.py | 117 +++++++++++++++++++++++++++++++++++++++ hyp3_autorift/process.py | 3 + setup.py | 2 + 4 files changed, 124 insertions(+) create mode 100644 hyp3_autorift/crop.py diff --git a/environment.yml b/environment.yml index 0cb3eab9..ff5aafab 100644 --- a/environment.yml +++ b/environment.yml @@ -31,5 +31,7 @@ dependencies: - matplotlib-base - netCDF4 - numpy<1.24 # https://github.com/isce-framework/isce2/pull/639 + - pyproj - requests - scipy + - xarray diff --git a/hyp3_autorift/crop.py b/hyp3_autorift/crop.py new file mode 100644 index 00000000..e2b2b18e --- /dev/null +++ b/hyp3_autorift/crop.py @@ -0,0 +1,117 @@ +# MIT License +# +# Copyright (c) 2020 NASA Jet Propulsion Laboratory +# Modifications (c) Copyright 2023 Alaska Satellite Facility +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Crop HyP3 AutoRIFT products to their valid data range, inplace + +This module is based on the ITS_LIVE production script for cropping V2 products +after they have been generated and has been heavily refactored for use in this HyP3 plugin: + +The original script: +https://github.com/nasa-jpl/its_live_production/blob/957e9aba627be2abafcc9601712a7f9c4dd87849/src/tools/crop_v2_granules.py +""" + +from pathlib import Path + +import numpy as np +import pyproj +import xarray as xr + + +ENCODING_TEMPLATE = { + 'interp_mask': {'_FillValue': 0.0, 'dtype': 'ubyte', "zlib": True, "complevel": 2, "shuffle": True}, + 'chip_size_height': {'_FillValue': 0.0, 'dtype': 'ushort', "zlib": True, "complevel": 2, "shuffle": True}, + 'chip_size_width': {'_FillValue': 0.0, 'dtype': 'ushort', "zlib": True, "complevel": 2, "shuffle": True}, + 'M11': {'_FillValue': -32767, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, + 'M12': {'_FillValue': -32767, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, + 'v': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, + 'vx': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, + 'vy': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, + 'v_error': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, + 'va': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, + 'vr': {'_FillValue': -32767.0, 'dtype': 'short', "zlib": True, "complevel": 2, "shuffle": True}, + 'x': {'_FillValue': None}, + 'y': {'_FillValue': None} + } + + +def crop_netcdf_product(netcdf_file: Path): + with xr.open_dataset(netcdf_file) as ds: + # this will drop X/Y coordinates, so drop non-None values just to get X/Y extends + xy_ds = ds.where(ds.v.notnull(), drop=True) + + x_values = xy_ds.x.values + grid_x_min, grid_x_max = x_values.min(), x_values.max() + + y_values = xy_ds.y.values + grid_y_min, grid_y_max = y_values.min(), y_values.max() + + # Based on X/Y extends, mask original dataset + mask_lon = (ds.x >= grid_x_min) & (ds.x <= grid_x_max) + mask_lat = (ds.y >= grid_y_min) & (ds.y <= grid_y_max) + mask = (mask_lon & mask_lat) + + cropped_ds = ds.where(mask, drop=True) + cropped_ds = cropped_ds.load() + + # Reset data for grid_mapping and img_pair_info data variables as ds.where() extends data of all data variables + # to the dimensions of the "mask" + cropped_ds['grid_mapping'] = ds['grid_mapping'] + cropped_ds['img_pair_info'] = ds['img_pair_info'] + + # Compute centroid longitude/latitude + center_x = (grid_x_min + grid_x_max) / 2 + center_y = (grid_y_min + grid_y_max) / 2 + + # Convert to lon/lat coordinates + projection = ds['mapping'].attrs['spatial_epsg'] + to_lon_lat_transformer = pyproj.Transformer.from_crs( + f"EPSG:{projection}", + 'EPSG:4326', + always_xy=True + ) + + # Update centroid information for the granule + center_lon_lat = to_lon_lat_transformer.transform(center_x, center_y) + + cropped_ds['mapping'].attrs['latitude'] = round(center_lon_lat[1], 2) + cropped_ds['img_pair_info'].attrs['longitude'] = round(center_lon_lat[0], 2) + + # Update mapping.GeoTransform + x_cell = x_values[1] - x_values[0] + y_cell = y_values[1] - y_values[0] + + # It was decided to keep all values in GeoTransform center-based + cropped_ds['mapping'].attrs['GeoTransform'] = f"{x_values[0]} {x_cell} 0 {y_values[0]} 0 {y_cell}" + + # Compute chunking like AutoRIFT does: + # https://github.com/ASFHyP3/hyp3-autorift/blob/develop/hyp3_autorift/vend/netcdf_output.py#L410-L411 + dims = cropped_ds.dims + chunk_lines = np.min([np.ceil(8192 / dims['y']) * 128, dims['y']]) + two_dim_chunks_settings = (chunk_lines, dims['x']) + + encoding = ENCODING_TEMPLATE.copy() + for _, attributes in encoding.items(): + if attributes['_FillValue'] is not None: + attributes['chunksizes'] = two_dim_chunks_settings + + cropped_ds.to_netcdf(netcdf_file, engine='h5netcdf', encoding=encoding) diff --git a/hyp3_autorift/process.py b/hyp3_autorift/process.py index 0138d688..9ec57980 100644 --- a/hyp3_autorift/process.py +++ b/hyp3_autorift/process.py @@ -26,6 +26,7 @@ from osgeo import gdal from hyp3_autorift import geometry, image, io +from hyp3_autorift.crop import crop_netcdf_product log = logging.getLogger(__name__) @@ -484,6 +485,8 @@ def process(reference: str, secondary: str, parameter_file: str = DEFAULT_PARAME product_file = Path(netcdf_file.replace('.nc', '_IL_ASF_OD.nc')) shutil.move(netcdf_file, str(product_file)) + crop_netcdf_product(product_file) + with Dataset(product_file) as nc: velocity = nc.variables['v'] data = np.ma.masked_values(velocity, -32767.).filled(0) diff --git a/setup.py b/setup.py index 5fd55996..2bf0a134 100644 --- a/setup.py +++ b/setup.py @@ -41,8 +41,10 @@ 'matplotlib', 'netCDF4', 'numpy', + 'pyproj', 'requests', 'scipy', + 'xarray', ], extras_require={ From c10044c1befc46f7288c4aef6af31c48bb771b05 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Thu, 7 Sep 2023 14:25:37 -0800 Subject: [PATCH 08/16] drop radar only vars for optical products --- hyp3_autorift/crop.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/hyp3_autorift/crop.py b/hyp3_autorift/crop.py index e2b2b18e..67e99cf5 100644 --- a/hyp3_autorift/crop.py +++ b/hyp3_autorift/crop.py @@ -110,6 +110,10 @@ def crop_netcdf_product(netcdf_file: Path): two_dim_chunks_settings = (chunk_lines, dims['x']) encoding = ENCODING_TEMPLATE.copy() + if not netcdf_file.name.startswith('S1'): + for radar_variable in ['M11', 'M12', 'va', 'vr']: + encoding.pop(radar_variable) + for _, attributes in encoding.items(): if attributes['_FillValue'] is not None: attributes['chunksizes'] = two_dim_chunks_settings From 740f044cba33a1daad6be7453fc89b713c1ae36f Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Thu, 7 Sep 2023 14:37:29 -0800 Subject: [PATCH 09/16] del not pop since I don't care removed items --- hyp3_autorift/crop.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/hyp3_autorift/crop.py b/hyp3_autorift/crop.py index 67e99cf5..41245955 100644 --- a/hyp3_autorift/crop.py +++ b/hyp3_autorift/crop.py @@ -55,6 +55,11 @@ def crop_netcdf_product(netcdf_file: Path): + """ + + Args: + netcdf_file: + """ with xr.open_dataset(netcdf_file) as ds: # this will drop X/Y coordinates, so drop non-None values just to get X/Y extends xy_ds = ds.where(ds.v.notnull(), drop=True) @@ -112,7 +117,7 @@ def crop_netcdf_product(netcdf_file: Path): encoding = ENCODING_TEMPLATE.copy() if not netcdf_file.name.startswith('S1'): for radar_variable in ['M11', 'M12', 'va', 'vr']: - encoding.pop(radar_variable) + del encoding[radar_variable] for _, attributes in encoding.items(): if attributes['_FillValue'] is not None: From 4f078adea07cb7bf6d6d59d1b51cd5c19795dd4b Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Thu, 7 Sep 2023 16:34:24 -0800 Subject: [PATCH 10/16] a little cleanup/refactoring --- hyp3_autorift/crop.py | 11 +++++++---- hyp3_autorift/process.py | 12 +++++++----- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/hyp3_autorift/crop.py b/hyp3_autorift/crop.py index 41245955..7f661687 100644 --- a/hyp3_autorift/crop.py +++ b/hyp3_autorift/crop.py @@ -54,7 +54,7 @@ } -def crop_netcdf_product(netcdf_file: Path): +def crop_netcdf_product(netcdf_file: Path) -> Path: """ Args: @@ -78,9 +78,9 @@ def crop_netcdf_product(netcdf_file: Path): cropped_ds = ds.where(mask, drop=True) cropped_ds = cropped_ds.load() - # Reset data for grid_mapping and img_pair_info data variables as ds.where() extends data of all data variables + # Reset data for mapping and img_pair_info data variables as ds.where() extends data of all data variables # to the dimensions of the "mask" - cropped_ds['grid_mapping'] = ds['grid_mapping'] + cropped_ds['mapping'] = ds['mapping'] cropped_ds['img_pair_info'] = ds['img_pair_info'] # Compute centroid longitude/latitude @@ -123,4 +123,7 @@ def crop_netcdf_product(netcdf_file: Path): if attributes['_FillValue'] is not None: attributes['chunksizes'] = two_dim_chunks_settings - cropped_ds.to_netcdf(netcdf_file, engine='h5netcdf', encoding=encoding) + cropped_file = netcdf_file.with_stem(f'{netcdf_file.stem}_cropped') + cropped_ds.to_netcdf(cropped_file, engine='h5netcdf', encoding=encoding) + + return cropped_file \ No newline at end of file diff --git a/hyp3_autorift/process.py b/hyp3_autorift/process.py index 9ec57980..86371b67 100644 --- a/hyp3_autorift/process.py +++ b/hyp3_autorift/process.py @@ -472,20 +472,22 @@ def process(reference: str, secondary: str, parameter_file: str = DEFAULT_PARAME if netcdf_file is None: raise Exception('Processing failed! Output netCDF file not found') + netcdf_file = Path(netcdf_file) + cropped_file = crop_netcdf_product(netcdf_file) + netcdf_file.unlink() + if naming_scheme == 'ITS_LIVE_PROD': - product_file = Path(netcdf_file) + product_file = netcdf_file elif naming_scheme == 'ASF': product_name = get_product_name( reference, secondary, orbit_files=(reference_state_vec, secondary_state_vec), pixel_spacing=parameter_info['xsize'], ) product_file = Path(f'{product_name}.nc') - shutil.move(netcdf_file, str(product_file)) else: - product_file = Path(netcdf_file.replace('.nc', '_IL_ASF_OD.nc')) - shutil.move(netcdf_file, str(product_file)) + product_file = netcdf_file.with_stem(f'{netcdf_file.stem}_IL_ASF_OD') - crop_netcdf_product(product_file) + shutil.move(cropped_file, str(product_file)) with Dataset(product_file) as nc: velocity = nc.variables['v'] From bf2c3d75370aacf2c807675c98cb3fd5f13a357f Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Fri, 8 Sep 2023 09:46:35 -0800 Subject: [PATCH 11/16] Add h5netcdf for performance reasons --- environment.yml | 1 + hyp3_autorift/crop.py | 2 +- setup.py | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index ff5aafab..d713bda9 100644 --- a/environment.yml +++ b/environment.yml @@ -23,6 +23,7 @@ dependencies: - wheel # For running - gdal>=3 + - h5netcdf - hyp3lib=1.7.0 - isce2=2.6.1.dev7 - autorift=1.5.0 diff --git a/hyp3_autorift/crop.py b/hyp3_autorift/crop.py index 7f661687..fa94e45f 100644 --- a/hyp3_autorift/crop.py +++ b/hyp3_autorift/crop.py @@ -126,4 +126,4 @@ def crop_netcdf_product(netcdf_file: Path) -> Path: cropped_file = netcdf_file.with_stem(f'{netcdf_file.stem}_cropped') cropped_ds.to_netcdf(cropped_file, engine='h5netcdf', encoding=encoding) - return cropped_file \ No newline at end of file + return cropped_file diff --git a/setup.py b/setup.py index 2bf0a134..ddd5742f 100644 --- a/setup.py +++ b/setup.py @@ -37,6 +37,7 @@ 'boto3', 'botocore', 'gdal', + 'h5netcdf', 'hyp3lib==1.7.0', 'matplotlib', 'netCDF4', From f562e6a66bac46b19e9d3246706a1b9baaaeff16 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Fri, 8 Sep 2023 09:54:12 -0800 Subject: [PATCH 12/16] Update README for cropping --- CHANGELOG.md | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fbacede8..f3c5cf6b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,14 @@ and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [0.11.2] +## [0.12.0] + +### Added +* [`hyp3_autorift.crop`](hyp3_autorift/crop.py) provides a `crop_netcdf_product` function to crop HyP3 AutoRIFT products + to the extent of valid `v` data + +### Changed +* HyP3 AutoRIFT products generated with the main workflow will be cropped to the extent of the valid `v` data ### Fixed * Patch [227](hyp3_autorift/vend/CHANGES-227.diff) was applied to align the S1 granules velocity description with the From d9494205db36df65f0e384d01de617312d23975a Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Fri, 15 Sep 2023 15:40:09 -0500 Subject: [PATCH 13/16] use dropna instead of drop=True --- hyp3_autorift/crop.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/hyp3_autorift/crop.py b/hyp3_autorift/crop.py index fa94e45f..221f9370 100644 --- a/hyp3_autorift/crop.py +++ b/hyp3_autorift/crop.py @@ -62,7 +62,7 @@ def crop_netcdf_product(netcdf_file: Path) -> Path: """ with xr.open_dataset(netcdf_file) as ds: # this will drop X/Y coordinates, so drop non-None values just to get X/Y extends - xy_ds = ds.where(ds.v.notnull(), drop=True) + xy_ds = ds.where(ds.v.notnull()).dropna(dim='x', how='all').dropna(dim='y', how='all') x_values = xy_ds.x.values grid_x_min, grid_x_max = x_values.min(), x_values.max() @@ -75,7 +75,7 @@ def crop_netcdf_product(netcdf_file: Path) -> Path: mask_lat = (ds.y >= grid_y_min) & (ds.y <= grid_y_max) mask = (mask_lon & mask_lat) - cropped_ds = ds.where(mask, drop=True) + cropped_ds = ds.where(mask).dropna(dim='x', how='all').dropna(dim='y', how='all') cropped_ds = cropped_ds.load() # Reset data for mapping and img_pair_info data variables as ds.where() extends data of all data variables From 2c343c8dc4b9385be3d59a294ef266c343b39619 Mon Sep 17 00:00:00 2001 From: Andrew Player Date: Fri, 15 Sep 2023 16:11:22 -0500 Subject: [PATCH 14/16] update changelog and add diff --- CHANGELOG.md | 5 +++++ hyp3_autorift/vend/CHANGES-232.diff | 21 +++++++++++++++++++++ 2 files changed, 26 insertions(+) create mode 100644 hyp3_autorift/vend/CHANGES-232.diff diff --git a/CHANGELOG.md b/CHANGELOG.md index f3c5cf6b..4277dc5a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,11 @@ and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.12.1] + +### Fixed +* Patch [232](hyp3_autorift/vend/CHANGES-232.diff) was applied to significantly improve the performance of `crop_netcdf_product`. + ## [0.12.0] ### Added diff --git a/hyp3_autorift/vend/CHANGES-232.diff b/hyp3_autorift/vend/CHANGES-232.diff new file mode 100644 index 00000000..4237611c --- /dev/null +++ b/hyp3_autorift/vend/CHANGES-232.diff @@ -0,0 +1,21 @@ +diff --git crop.py crop.py +--- a/hyp3_autorift/crop.py ++++ b/hyp3_autorift/crop.py +@@ -62,7 +62,7 @@ def crop_netcdf_product(netcdf_file: Path) -> Path: + """ + with xr.open_dataset(netcdf_file) as ds: + # this will drop X/Y coordinates, so drop non-None values just to get X/Y extends +- xy_ds = ds.where(ds.v.notnull(), drop=True) ++ xy_ds = ds.where(ds.v.notnull()).dropna(dim='x', how='all').dropna(dim='y', how='all') + + x_values = xy_ds.x.values + grid_x_min, grid_x_max = x_values.min(), x_values.max() +@@ -75,7 +75,7 @@ def crop_netcdf_product(netcdf_file: Path) -> Path: + mask_lat = (ds.y >= grid_y_min) & (ds.y <= grid_y_max) + mask = (mask_lon & mask_lat) + +- cropped_ds = ds.where(mask, drop=True) ++ cropped_ds = ds.where(mask).dropna(dim='x', how='all').dropna(dim='y', how='all') + cropped_ds = cropped_ds.load() + + # Reset data for mapping and img_pair_info data variables as ds.where() extends data of all data variables \ No newline at end of file From dab90fbd585cca295fb7177b7c96194a32510db0 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Tue, 19 Sep 2023 13:56:36 -0800 Subject: [PATCH 15/16] Haven't release yet and don't need to state changes to (not in the vend dir) --- CHANGELOG.md | 5 ----- hyp3_autorift/vend/CHANGES-232.diff | 21 --------------------- 2 files changed, 26 deletions(-) delete mode 100644 hyp3_autorift/vend/CHANGES-232.diff diff --git a/CHANGELOG.md b/CHANGELOG.md index 4277dc5a..f3c5cf6b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,11 +7,6 @@ and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/) and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [0.12.1] - -### Fixed -* Patch [232](hyp3_autorift/vend/CHANGES-232.diff) was applied to significantly improve the performance of `crop_netcdf_product`. - ## [0.12.0] ### Added diff --git a/hyp3_autorift/vend/CHANGES-232.diff b/hyp3_autorift/vend/CHANGES-232.diff deleted file mode 100644 index 4237611c..00000000 --- a/hyp3_autorift/vend/CHANGES-232.diff +++ /dev/null @@ -1,21 +0,0 @@ -diff --git crop.py crop.py ---- a/hyp3_autorift/crop.py -+++ b/hyp3_autorift/crop.py -@@ -62,7 +62,7 @@ def crop_netcdf_product(netcdf_file: Path) -> Path: - """ - with xr.open_dataset(netcdf_file) as ds: - # this will drop X/Y coordinates, so drop non-None values just to get X/Y extends -- xy_ds = ds.where(ds.v.notnull(), drop=True) -+ xy_ds = ds.where(ds.v.notnull()).dropna(dim='x', how='all').dropna(dim='y', how='all') - - x_values = xy_ds.x.values - grid_x_min, grid_x_max = x_values.min(), x_values.max() -@@ -75,7 +75,7 @@ def crop_netcdf_product(netcdf_file: Path) -> Path: - mask_lat = (ds.y >= grid_y_min) & (ds.y <= grid_y_max) - mask = (mask_lon & mask_lat) - -- cropped_ds = ds.where(mask, drop=True) -+ cropped_ds = ds.where(mask).dropna(dim='x', how='all').dropna(dim='y', how='all') - cropped_ds = cropped_ds.load() - - # Reset data for mapping and img_pair_info data variables as ds.where() extends data of all data variables \ No newline at end of file From 39a01c4624ced7cc10ed2f27e8568f57d0f81492 Mon Sep 17 00:00:00 2001 From: Joseph H Kennedy Date: Tue, 19 Sep 2023 13:58:21 -0800 Subject: [PATCH 16/16] fix attribute typo --- hyp3_autorift/crop.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hyp3_autorift/crop.py b/hyp3_autorift/crop.py index 221f9370..4e14ab6a 100644 --- a/hyp3_autorift/crop.py +++ b/hyp3_autorift/crop.py @@ -98,7 +98,7 @@ def crop_netcdf_product(netcdf_file: Path) -> Path: # Update centroid information for the granule center_lon_lat = to_lon_lat_transformer.transform(center_x, center_y) - cropped_ds['mapping'].attrs['latitude'] = round(center_lon_lat[1], 2) + cropped_ds['img_pair_info'].attrs['latitude'] = round(center_lon_lat[1], 2) cropped_ds['img_pair_info'].attrs['longitude'] = round(center_lon_lat[0], 2) # Update mapping.GeoTransform