diff --git a/.gitignore b/.gitignore index 1333c69..a44f3d6 100644 --- a/.gitignore +++ b/.gitignore @@ -162,4 +162,6 @@ scripts/optimize/param_sds !/results/site_param_rmses/*.csv !/contrib/palomaki/*.csv -*.sqlite \ No newline at end of file +*.sqlite + +*.nfs \ No newline at end of file diff --git a/SnowEx-Data/.nfs000000013a9d95c700004d8b b/SnowEx-Data/.nfs000000013a9d95c700004d8b new file mode 100644 index 0000000..235f25d Binary files /dev/null and b/SnowEx-Data/.nfs000000013a9d95c700004d8b differ diff --git a/SnowEx-Data/.nfs000000013ac787a900004d8c b/SnowEx-Data/.nfs000000013ac787a900004d8c new file mode 100644 index 0000000..8b0c883 Binary files /dev/null and b/SnowEx-Data/.nfs000000013ac787a900004d8c differ diff --git a/contrib/gagliano/wet_snow_testing.ipynb b/contrib/gagliano/wet_snow_testing.ipynb index decc39d..f6d6635 100644 --- a/contrib/gagliano/wet_snow_testing.ipynb +++ b/contrib/gagliano/wet_snow_testing.ipynb @@ -96,7 +96,7 @@ " ds_name = f.split('stacks/')[-1].split('.')[0]\n", " print(datetime.now(), f' -- starting {ds_name}')\n", "\n", - " if Path(f'rmse_out/{ds_name}_wet_flag.nc').is_file():\n", + " if Path(f'rmse_out_with_si/{ds_name}_wet_flag.nc').is_file():\n", " print('This file already exists, continuing.')\n", " continue\n", " \n", @@ -124,7 +124,7 @@ " ds = calc_snow_index(ds)\n", " ds = calc_snow_index_to_snow_depth(ds, C=c, inplace=False)\n", " ds = id_newly_wet_snow(ds,wet_thresh=wst)\n", - " ds = id_wet_negative_si_test(ds)\n", + " ds = id_wet_negative_si(ds,wet_SI_thresh=0) #change to test depending on whether we want neg SI to flag as wet snow\n", " ds = id_newly_frozen_snow(ds,freeze_thresh=-1*wst)\n", " ds = flag_wet_snow(ds)\n", " # Compare snow depths - mask wet snow\n", @@ -144,9 +144,9 @@ " print(f'Frac valid pixels = {mask_wet.sum() /mask.sum():0.2f}')\n", "\n", " # After loop, save RMSE results per file\n", - " rmse_wet_flag.to_netcdf(f'rmse_out/{ds_name}_wet_flag.nc')\n", - " rmse_no_flag.to_netcdf(f'rmse_out/{ds_name}_no_flag.nc')\n", - " valid_pixels.to_netcdf(f'rmse_out/{ds_name}_valid_pixels.nc')\n", + " rmse_wet_flag.to_netcdf(f'rmse_out_with_si/{ds_name}_wet_flag.nc')\n", + " rmse_no_flag.to_netcdf(f'rmse_out_with_si/{ds_name}_no_flag.nc')\n", + " valid_pixels.to_netcdf(f'rmse_out_with_si/{ds_name}_valid_pixels.nc')\n", " " ] }, @@ -156,11 +156,21 @@ "metadata": {}, "outputs": [], "source": [ - "results = sorted(glob('rmse_out/*.nc'))\n", + "with_neg_si_flag_wet_snow = True\n", + "\n", + "directory = 'rmse_out_with_si'\n", + "#directory = 'rmse_out'\n", + "\n", + "#if with_neg_si_flag_wet_snow == True:\n", + "# directory = 'rmse_out_full_with_si'\n", + "#else:\n", + "# directory = 'rmse_out_full'\n", + "\n", + "results = sorted(glob(f'{directory}/*.nc'))\n", "names = []\n", "for f in results:\n", " if 'no_flag' in f:\n", - " ds_name = f.split('rmse_out/')[-1]\n", + " ds_name = f.split(f'{directory}/')[-1]\n", " ds_name = ds_name.split('_no')[0]\n", " names.append(ds_name)\n", "\n", @@ -172,19 +182,19 @@ "for f in results:\n", " if 'wet_flag' in f:\n", " r = xr.open_dataarray(f).load()\n", - " ds_name = f.split('rmse_out/')[-1]\n", + " ds_name = f.split(f'{directory}/')[-1]\n", " ds_name = ds_name.split('_wet')[0]\n", " for ind,val in zip(r.wet_snow_thresh.values,r.values):\n", " thresh_results.loc[ind,ds_name] = val\n", " if 'no_flag' in f:\n", " r = xr.open_dataarray(f).load()\n", - " ds_name = f.split('rmse_out/')[-1]\n", + " ds_name = f.split(f'{directory}/')[-1]\n", " ds_name = ds_name.split('_no')[0]\n", " for ind,val in zip(r.wet_snow_thresh.values,r.values):\n", " no_thresh_results.loc[ind,ds_name] = val\n", " if 'valid' in f:\n", " r = xr.open_dataarray(f).load()\n", - " ds_name = f.split('rmse_out/')[-1]\n", + " ds_name = f.split(f'{directory}/')[-1]\n", " ds_name = ds_name.split('_valid')[0]\n", " for ind,val in zip(r.wet_snow_thresh.values,r.values):\n", " valid_pixels_results.loc[ind,ds_name] = val\n" @@ -251,7 +261,7 @@ " ds_name = f.split('stacks/')[-1].split('.')[0]\n", " print(datetime.now(), f' -- starting {ds_name}')\n", "\n", - " if Path(f'rmse_out_full/{ds_name}_wet_flag.nc').is_file():\n", + " if Path(f'rmse_out_full_with_si/{ds_name}_wet_flag.nc').is_file():\n", " print('This file already exists, continuing.')\n", " continue\n", "\n", @@ -280,7 +290,7 @@ " ds = calc_snow_index(ds)\n", " ds = calc_snow_index_to_snow_depth(ds, C=c, inplace=False)\n", " ds = id_newly_wet_snow(ds,wet_thresh=wst)\n", - " ds = id_wet_negative_si_test(ds)\n", + " ds = id_wet_negative_si(ds) #change to test depdning on whether to remove neg SI = wet snow\n", " ds = id_newly_frozen_snow(ds,freeze_thresh=-1*wst)\n", " ds = flag_wet_snow(ds)\n", " # Compare snow depths - mask wet snow\n", @@ -300,9 +310,9 @@ " print(f'Frac valid pixels = {mask_wet.sum()/ mask.sum():0.2f}')\n", "\n", " # After loop, save RMSE results per file\n", - " rmse_wet_flag.to_netcdf(f'rmse_out_full/{ds_name}_wet_flag.nc')\n", - " rmse_no_flag.to_netcdf(f'rmse_out_full/{ds_name}_no_flag.nc')\n", - " valid_pixels.to_netcdf(f'rmse_out_full/{ds_name}_valid_pixels.nc')\n", + " rmse_wet_flag.to_netcdf(f'rmse_out_full_with_si/{ds_name}_wet_flag.nc')\n", + " rmse_no_flag.to_netcdf(f'rmse_out_full_with_si/{ds_name}_no_flag.nc')\n", + " valid_pixels.to_netcdf(f'rmse_out_full_with_si/{ds_name}_valid_pixels.nc')\n", " " ] }, @@ -312,11 +322,14 @@ "metadata": {}, "outputs": [], "source": [ + "directory = 'rmse_out_full'\n", + "directory = 'rmse_out_full_with_si'\n", + "\n", "which_site = 5\n", "\n", - "results1 = sorted(glob('rmse_out_full/*wet*.nc'))\n", - "results2 = sorted(glob('rmse_out_full/*no*.nc'))\n", - "results3 = sorted(glob('rmse_out_full/*valid*.nc'))\n", + "results1 = sorted(glob(f'{directory}/*wet*.nc'))\n", + "results2 = sorted(glob(f'{directory}/*no*.nc'))\n", + "results3 = sorted(glob(f'{directory}/*valid*.nc'))\n", "\n", "wet_snow = xr.open_dataarray(results1[which_site])\n", "all_snow = xr.open_dataarray(results2[which_site])\n", @@ -345,6 +358,27 @@ " plt.tight_layout()" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "iles = sorted(glob('spicy_s1_stacks/*.nc'))\n", + "\n", + "\n", + "for f in files:\n", + " ds_name = f.split('stacks/')[-1].split('.')[0]\n", + " print(datetime.now(), f' -- starting {ds_name}')\n", + "\n", + " # Open dataset \n", + " ds_ = xr.open_dataset(f).load()\n", + " dataset = ds_[['s1','deltaVV','ims','fcf','lidar-sd']]\n", + " td = abs(pd.to_datetime(dataset.time) - pd.to_datetime(dataset.attrs['lidar-flight-time']))\n", + " closest_ts_idx = np.where(td == td.min())[0][0]\n", + " closest_ts = dataset.time[closest_ts_idx]" + ] + }, { "cell_type": "code", "execution_count": null, @@ -356,7 +390,7 @@ "c = 0.55\n", "wst = -3\n", "\n", - "for wst in [-4,-3,-2,-1,0,1,2,3,4]:\n", + "for wst in [-4,-3,-2,-1,0]:\n", " ds = calc_delta_cross_ratio(dataset, A=a, inplace=False)\n", " ds = calc_delta_gamma(ds, B=b, inplace=False)\n", " print(f'A={a:0.2f}; B={b:0.2f}; C={c:0.2f}; wst={wst:0.2f}')\n", diff --git a/contrib/keskinen/optimization/param_ridgeplots.ipynb b/contrib/keskinen/optimization/param_ridgeplots.ipynb new file mode 100644 index 0000000..dc591ac --- /dev/null +++ b/contrib/keskinen/optimization/param_ridgeplots.ipynb @@ -0,0 +1,475 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from pathlib import Path\n", + "\n", + "import numpy as np\n", + "import pandas as pd\n", + "import xarray as xr\n", + "\n", + "import matplotlib.pyplot as plt\n", + "import seaborn as sns" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "data_fp = '/bsuhome/zacharykeskinen/spicy-snow/data/res_ds_iter_large.nc'\n", + "\n", + "ds = xr.open_dataset(data_fp)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import matplotlib.gridspec as grid_spec\n", + "from matplotlib import colormaps\n", + "cmap = colormaps.get_cmap('RdBu')\n", + "colors = cmap(np.linspace(0.1, 0.4, len(ds.location)))\n", + "\n", + "fig = plt.figure(layout='constrained', figsize=(10, 4))\n", + "subfigs = fig.subfigures(1, 3, wspace=0.07)\n", + "params = ['A', 'B', 'C']\n", + "\n", + "# sorted = ds.reindex(location = ds['pearsonr'].isel(ds['pearsonr'].argmin(['A', 'C'])).idxmin('B').mean('iteration').to_dataframe().sort_values('B').to_xarray().location[::-1])\n", + "\n", + "loc_sorted = ['Little_Cottonwood_2021-03-18', 'Mores_2021-03-15',\\\n", + " 'Frasier_2020-02-11', 'Banner_2020-02-18', 'Banner_2021-03-15',\\\n", + " 'Dry_Creek_2020-02-19'] #, 'Cameron_2021-03-19', 'Frasier_2021-03-19' 'Mores_2020-02-09'\n", + "\n", + "sorted = ds.reindex(location = loc_sorted)\n", + "\n", + "clips = [[0.5, 3], [0, 1], [0, 1]]\n", + "\n", + "param_stat = {'A':'pearsonr', 'B':'pearsonr', 'C':'mae'}\n", + "\n", + "for i, (param, subfig, clip,) in enumerate(zip(params, subfigs, clips)):\n", + " axes = subfig.subplots(len(sorted.location), gridspec_kw = {'hspace': -0.7})\n", + " other_params = params[:]\n", + " other_params.remove(param)\n", + "\n", + " stat_name = param_stat[param]\n", + "\n", + " for loc, color, ax in zip(sorted.location, colors, axes):\n", + "\n", + " param_fp = Path('/bsuhome/zacharykeskinen/scratch/param_regional')\n", + " lidar_orig = np.load(param_fp.joinpath(str(loc.data), 'lidar.npy'))\n", + " \n", + " param_1, param_2 = other_params\n", + " a = sorted.loc[{param_1 : sorted[param_stat[param_1]].idxmax(param_1)}]\n", + " if param_2 == 'C':\n", + " data = a.loc[{param_2 : a[param_stat[param_2]].idxmin(param_2)}]\n", + " else:\n", + " data = a.loc[{param_2 : a[param_stat[param_2]].idxmax(param_2)}]\n", + " \n", + " if param == 'C':\n", + " print(data.sel(location = loc)['mae'].min())\n", + " data = data.idxmin(param)\n", + " else:\n", + " data = data.idxmax(param)\n", + " \n", + " data = data.sel(location = loc)\n", + " \n", + " data = data[param_stat[param]].data\n", + " # data = data + np.random.random(data.shape)/1000\n", + "\n", + " sns.kdeplot(data, color = color, \\\n", + " fill = True, alpha = 1.0, ax= ax, clip = clip, warn_singular=False, zorder = 1)\n", + " ax.set_xlim(clip)\n", + " ax.set_yticks([])\n", + " ax.set_ylabel('')\n", + "\n", + " rect = ax.patch\n", + " rect.set_alpha(0)\n", + "\n", + " spines = [\"top\", \"right\", \"left\", \"bottom\"]\n", + " for s in spines:\n", + " ax.spines[s].set_visible(False)\n", + " \n", + " if i == 0:\n", + " site_name = str(loc.data).replace('_', ' ').replace('Little Cottonwood', 'LCC').split('-')[0]\n", + " ax.text(-0.02, 0, site_name, fontweight = 'bold', ha = 'right', transform = ax.transAxes, zorder = 1e5)\n", + "\n", + "\n", + " for ax in axes[:-1]:\n", + " ax.set_xticks([])\n", + "\n", + " stat_title= {'mae':'Mean Error Minimized', 'pearsonr':'Correlation Maximized'}\n", + " subfig.suptitle(f'{param} - {stat_title[param_stat[param]]}')\n", + "\n", + "plt.tight_layout()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import matplotlib.gridspec as grid_spec\n", + "from matplotlib import colormaps\n", + "cmap = colormaps.get_cmap('RdBu')\n", + "colors = cmap(np.linspace(0.1, 0.4, len(ds.location)))\n", + "\n", + "fig = plt.figure(layout='constrained', figsize=(10, 4))\n", + "subfigs = fig.subfigures(2, 3, wspace=0.07)\n", + "params = ['A', 'B', 'C']\n", + "\n", + "# sorted = ds.reindex(location = ds['pearsonr'].isel(ds['pearsonr'].argmin(['A', 'C'])).idxmin('B').mean('iteration').to_dataframe().sort_values('B').to_xarray().location[::-1])\n", + "\n", + "loc_sorted = ['Little_Cottonwood_2021-03-18', 'Mores_2021-03-15',\\\n", + " 'Frasier_2020-02-11', 'Banner_2020-02-18', 'Banner_2021-03-15',\\\n", + " 'Dry_Creek_2020-02-19'] #'Cameron_2021-03-19', 'Frasier_2021-03-19' 'Mores_2020-02-09',\n", + "\n", + "sorted = ds.reindex(location = loc_sorted)\n", + "sorted = sorted.isel(iteration = slice(50))\n", + "\n", + "clips = [[1, 3], [0, 1], [0, 1]]\n", + "\n", + "param_stat = {'A':'pearsonr', 'B':'pearsonr', 'C':'mae'}\n", + "\n", + "# for i, (param, subfig, clip,) in enumerate(zip(params, subfigs[0, :], clips)):\n", + "# axes = subfig.subplots(len(ds.location), gridspec_kw = {'hspace': -0.7})\n", + "# other_params = params[:]\n", + "# other_params.remove(param)\n", + "\n", + "# stat_name = param_stat[param]\n", + "\n", + "# for loc, color, ax in zip(sorted.location, colors, axes):\n", + "\n", + "# param_fp = Path('/bsuhome/zacharykeskinen/scratch/param_regional')\n", + "# lidar_orig = np.load(param_fp.joinpath(str(loc.data), 'lidar.npy'))\n", + " \n", + "# param_1, param_2 = other_params\n", + "# a = sorted.loc[{param_1 : sorted[param_stat[param_1]].idxmax(param_1)}]\n", + "# if param_2 == 'C':\n", + "# data = a.loc[{param_2 : a[param_stat[param_2]].idxmin(param_2)}]\n", + "# else:\n", + "# data = a.loc[{param_2 : a[param_stat[param_2]].idxmax(param_2)}]\n", + " \n", + "# if param == 'C':\n", + "# data = data.idxmin(param)\n", + "# else:\n", + "# data = data.idxmax(param)\n", + " \n", + "# data = data.sel(location = loc)\n", + " \n", + "# data = data[param_stat[param]].data\n", + "# # data = data + np.random.random(data.shape)/1000\n", + "\n", + "# sns.kdeplot(data, color = color, \\\n", + "# fill = True, alpha = 1.0, ax= ax, clip = clip, warn_singular=False, zorder = 1)\n", + "# ax.set_xlim(clip)\n", + "# ax.set_yticks([])\n", + "# ax.set_ylabel('')\n", + "\n", + "# rect = ax.patch\n", + "# rect.set_alpha(0)\n", + "\n", + "# spines = [\"top\", \"right\", \"left\", \"bottom\"]\n", + "# for s in spines:\n", + "# ax.spines[s].set_visible(False)\n", + " \n", + "# if i == 0:\n", + "# site_name = str(loc.data).replace('_', ' ').replace('Little Cottonwood', 'LCC').split('-')[0]\n", + "# ax.text(-0.02, 0, site_name, fontweight = 'bold', ha = 'right', transform = ax.transAxes, zorder = 1e5)\n", + "\n", + "\n", + "# for ax in axes[:-1]:\n", + "# ax.set_xticks([])\n", + "\n", + "# stat_title= {'mae':'Mean Error Minimized', 'pearsonr':'Correlation Maximized'}\n", + "# subfig.suptitle(f'{param} - {stat_title[param_stat[param]]}')\n", + "\n", + "\n", + "for i, (subfig, param) in enumerate(zip(subfigs[1, :], params)):\n", + "\n", + " ax = subfig.subplots(1, 1)\n", + " other_params = params[:]\n", + " other_params.remove(param)\n", + "\n", + " for loc in sorted.location:\n", + "\n", + " ds_fp = Path('/bsuhome/zacharykeskinen/scratch/SnowEx-Data')\n", + " full_ds = xr.load_dataset(ds_fp.joinpath(str(loc.data)+'.nc'))\n", + " \n", + " param_1, param_2 = other_params\n", + " a = sorted.loc[{param_1 : sorted[param_stat[param_1]].idxmax(param_1)}]\n", + " if param_2 == 'C':\n", + " data = a.loc[{param_2 : a[param_stat[param_2]].idxmin(param_2)}]\n", + " else:\n", + " data = a.loc[{param_2 : a[param_stat[param_2]].idxmax(param_2)}]\n", + " \n", + " if param == 'C':\n", + " data = data.idxmin(param)\n", + " else:\n", + " data = data.idxmax(param)\n", + " \n", + " data = data.sel(location = loc)\n", + " \n", + " data = data[param_stat[param]].data\n", + "\n", + " if param == 'C':\n", + " CR = full_ds['s1'].sel(band = 'VH') - full_ds['s1'].sel(band = 'VV')\n", + " x = CR.max('time').mean()\n", + " # x = full_ds['deltaGamma'].max('time').mean()\n", + " # x = full_ds['s1'].sel(band = 'VV').max('time').mean()\n", + " else:\n", + " trees = full_ds.where(full_ds['fcf'] > 0.5)\n", + " x = (full_ds['deltaVV'].mean('time') / full_ds['lidar-sd']).mean()\n", + " # y = trees['deltaVV'].mean(dim = 'time')\n", + " # x = (y / (trees['fcf'] + 0.01)).mean()\n", + " # x = full_ds['s1'].sel(band = 'VV').max('time')# / full_ds['fcf'] # / full_ds['deltaCR']\n", + " # s1 = full_ds['s1']\n", + " # trees = full_ds.where(full_ds['fcf'] > 0.5)\n", + " # x = full_ds['s1'].sel(band = 'VH').diff('time').mean('time') / (full_ds['fcf'] + 1)\n", + " # x = xr.where(x > 1000, np.nan, x)\n", + " # x = xr.where(x > -1000, x, np.nan)\n", + " # x = trees['deltaCR'] / trees['deltaVV']\n", + " # CR = full_ds['s1'].sel(band = 'VH') - full_ds['s1'].sel(band = 'VV')\n", + " # x = full_ds['fcf']\n", + " # x = x.mean()\n", + " # if x < -0.5:\n", + " # print(loc)\n", + " # x = np.nan\n", + " \n", + " ax.scatter(x, np.mean(data), label = loc)\n", + " # plt.legend()\n", + "plt.tight_layout()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "params = ['A', 'B', 'C']\n", + "\n", + "loc_sorted = [ 'Cameron_2021-03-19','Little_Cottonwood_2021-03-18', 'Mores_2020-02-09', 'Mores_2021-03-15',\\\n", + " 'Frasier_2020-02-11', 'Frasier_2021-03-19', 'Banner_2020-02-18', 'Banner_2021-03-15',\\\n", + " 'Dry_Creek_2020-02-19']\n", + "sorted = ds.reindex(location = loc_sorted)\n", + "\n", + "best_c = sorted.loc[{'C' : sorted['mae'].idxmin('C')}]\n", + "best_c = best_c.mean('iteration')\n", + "for loc, sub_ds in best_c.groupby('location'):\n", + " sub_ds['pearsonr'].plot()\n", + " plt.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 134, + "metadata": {}, + "outputs": [], + "source": [ + "files = Path('/bsuhome/zacharykeskinen/spicy-snow/Lidar_s1_stacks/').glob('*.nc')" + ] + }, + { + "cell_type": "code", + "execution_count": 135, + "metadata": {}, + "outputs": [], + "source": [ + "import py3dep\n", + "import rioxarray as rxa\n", + "files = Path('/bsuhome/zacharykeskinen/spicy-snow/Lidar_s1_stacks/').glob('*.nc')\n", + "for fp in files:\n", + " ds = xr.load_dataset(fp)\n", + " dem = py3dep.get_dem(ds.rio.bounds(), 30)\n", + " ds['dem'] = dem.drop(['band', 'spatial_ref']).interp_like(ds)\n", + " ds = ds.drop('lidar-dem')\n", + " ds.to_netcdf(fp.with_suffix('.fix.nc'))\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "files = Path('/bsuhome/zacharykeskinen/spicy-snow/SnowEx-Data').glob('*fix.nc')\n", + "for fp in files:\n", + " ds = xr.load_dataset(fp)" + ] + }, + { + "cell_type": "code", + "execution_count": 164, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2023-08-29 17:47:10.403288 -- starting Frasier_2020-02-11\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + " 0%| | 0/5 [00:00 71\u001b[0m ds \u001b[39m=\u001b[39m flag_wet_snow(ds)\n\u001b[1;32m 72\u001b[0m \u001b[39mfor\u001b[39;00m c \u001b[39min\u001b[39;00m C:\n\u001b[1;32m 73\u001b[0m \u001b[39m# print(f' c: {c}')\u001b[39;00m\n\u001b[1;32m 74\u001b[0m \u001b[39m# print(f'A={a}; B={b}; C={c}')\u001b[39;00m\n\u001b[1;32m 76\u001b[0m ds \u001b[39m=\u001b[39m calc_snow_index_to_snow_depth(ds, C \u001b[39m=\u001b[39m c)\n", + "File \u001b[0;32m~/spicy-snow/spicy_snow/processing/wet_snow.py:153\u001b[0m, in \u001b[0;36mflag_wet_snow\u001b[0;34m(dataset, inplace)\u001b[0m\n\u001b[1;32m 150\u001b[0m prev_time \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m\n\u001b[1;32m 152\u001b[0m \u001b[39m# loop through time steps in this orbit\u001b[39;00m\n\u001b[0;32m--> 153\u001b[0m \u001b[39mfor\u001b[39;00m ts \u001b[39min\u001b[39;00m orbit_dataset\u001b[39m.\u001b[39mtime:\n\u001b[1;32m 154\u001b[0m \n\u001b[1;32m 155\u001b[0m \u001b[39m# if not first round then pull in previous time step's values for wet-snow\u001b[39;00m\n\u001b[1;32m 156\u001b[0m \u001b[39mif\u001b[39;00m prev_time:\n\u001b[1;32m 157\u001b[0m dataset[\u001b[39m'\u001b[39m\u001b[39mwet_snow\u001b[39m\u001b[39m'\u001b[39m]\u001b[39m.\u001b[39mloc[\u001b[39mdict\u001b[39m(time \u001b[39m=\u001b[39m ts)] \u001b[39m=\u001b[39m dataset\u001b[39m.\u001b[39msel(time \u001b[39m=\u001b[39m prev_time)[\u001b[39m'\u001b[39m\u001b[39mwet_snow\u001b[39m\u001b[39m'\u001b[39m]\n", + "File \u001b[0;32m~/miniconda3/envs/spicy/lib/python3.11/site-packages/xarray/core/common.py:192\u001b[0m, in \u001b[0;36mAbstractArray._iter\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 190\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39m_iter\u001b[39m(\u001b[39mself\u001b[39m: Any) \u001b[39m-\u001b[39m\u001b[39m>\u001b[39m Iterator[Any]:\n\u001b[1;32m 191\u001b[0m \u001b[39mfor\u001b[39;00m n \u001b[39min\u001b[39;00m \u001b[39mrange\u001b[39m(\u001b[39mlen\u001b[39m(\u001b[39mself\u001b[39m)):\n\u001b[0;32m--> 192\u001b[0m \u001b[39myield\u001b[39;00m \u001b[39mself\u001b[39;49m[n]\n", + "File \u001b[0;32m~/miniconda3/envs/spicy/lib/python3.11/site-packages/xarray/core/dataarray.py:816\u001b[0m, in \u001b[0;36mDataArray.__getitem__\u001b[0;34m(self, key)\u001b[0m\n\u001b[1;32m 813\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_getitem_coord(key)\n\u001b[1;32m 814\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m 815\u001b[0m \u001b[39m# xarray-style array indexing\u001b[39;00m\n\u001b[0;32m--> 816\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49misel(indexers\u001b[39m=\u001b[39;49m\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_item_key_to_dict(key))\n", + "File \u001b[0;32m~/miniconda3/envs/spicy/lib/python3.11/site-packages/xarray/core/dataarray.py:1407\u001b[0m, in \u001b[0;36mDataArray.isel\u001b[0;34m(self, indexers, drop, missing_dims, **indexers_kwargs)\u001b[0m\n\u001b[1;32m 1403\u001b[0m coord_indexers \u001b[39m=\u001b[39m {\n\u001b[1;32m 1404\u001b[0m k: v \u001b[39mfor\u001b[39;00m k, v \u001b[39min\u001b[39;00m indexers\u001b[39m.\u001b[39mitems() \u001b[39mif\u001b[39;00m k \u001b[39min\u001b[39;00m coord_value\u001b[39m.\u001b[39mdims\n\u001b[1;32m 1405\u001b[0m }\n\u001b[1;32m 1406\u001b[0m \u001b[39mif\u001b[39;00m coord_indexers:\n\u001b[0;32m-> 1407\u001b[0m coord_value \u001b[39m=\u001b[39m coord_value\u001b[39m.\u001b[39;49misel(coord_indexers)\n\u001b[1;32m 1408\u001b[0m \u001b[39mif\u001b[39;00m drop \u001b[39mand\u001b[39;00m coord_value\u001b[39m.\u001b[39mndim \u001b[39m==\u001b[39m \u001b[39m0\u001b[39m:\n\u001b[1;32m 1409\u001b[0m \u001b[39mcontinue\u001b[39;00m\n", + "File \u001b[0;32m~/miniconda3/envs/spicy/lib/python3.11/site-packages/xarray/core/variable.py:1328\u001b[0m, in \u001b[0;36mVariable.isel\u001b[0;34m(self, indexers, missing_dims, **indexers_kwargs)\u001b[0m\n\u001b[1;32m 1325\u001b[0m indexers \u001b[39m=\u001b[39m drop_dims_from_indexers(indexers, \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mdims, missing_dims)\n\u001b[1;32m 1327\u001b[0m key \u001b[39m=\u001b[39m \u001b[39mtuple\u001b[39m(indexers\u001b[39m.\u001b[39mget(dim, \u001b[39mslice\u001b[39m(\u001b[39mNone\u001b[39;00m)) \u001b[39mfor\u001b[39;00m dim \u001b[39min\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mdims)\n\u001b[0;32m-> 1328\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m[key]\n", + "File \u001b[0;32m~/miniconda3/envs/spicy/lib/python3.11/site-packages/xarray/core/variable.py:879\u001b[0m, in \u001b[0;36mVariable.__getitem__\u001b[0;34m(self, key)\u001b[0m\n\u001b[1;32m 877\u001b[0m \u001b[39mif\u001b[39;00m new_order:\n\u001b[1;32m 878\u001b[0m data \u001b[39m=\u001b[39m np\u001b[39m.\u001b[39mmoveaxis(data, \u001b[39mrange\u001b[39m(\u001b[39mlen\u001b[39m(new_order)), new_order)\n\u001b[0;32m--> 879\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_finalize_indexing_result(dims, data)\n", + "File \u001b[0;32m~/miniconda3/envs/spicy/lib/python3.11/site-packages/xarray/core/variable.py:883\u001b[0m, in \u001b[0;36mVariable._finalize_indexing_result\u001b[0;34m(self, dims, data)\u001b[0m\n\u001b[1;32m 881\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39m_finalize_indexing_result\u001b[39m(\u001b[39mself\u001b[39m: T_Variable, dims, data) \u001b[39m-\u001b[39m\u001b[39m>\u001b[39m T_Variable:\n\u001b[1;32m 882\u001b[0m \u001b[39m \u001b[39m\u001b[39m\"\"\"Used by IndexVariable to return IndexVariable objects when possible.\"\"\"\u001b[39;00m\n\u001b[0;32m--> 883\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49m_replace(dims\u001b[39m=\u001b[39;49mdims, data\u001b[39m=\u001b[39;49mdata)\n", + "File \u001b[0;32m~/miniconda3/envs/spicy/lib/python3.11/site-packages/xarray/core/variable.py:1088\u001b[0m, in \u001b[0;36mVariable._replace\u001b[0;34m(self, dims, data, attrs, encoding)\u001b[0m\n\u001b[1;32m 1086\u001b[0m \u001b[39mif\u001b[39;00m encoding \u001b[39mis\u001b[39;00m _default:\n\u001b[1;32m 1087\u001b[0m encoding \u001b[39m=\u001b[39m copy\u001b[39m.\u001b[39mcopy(\u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_encoding)\n\u001b[0;32m-> 1088\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mtype\u001b[39;49m(\u001b[39mself\u001b[39;49m)(dims, data, attrs, encoding, fastpath\u001b[39m=\u001b[39;49m\u001b[39mTrue\u001b[39;49;00m)\n", + "File \u001b[0;32m~/miniconda3/envs/spicy/lib/python3.11/site-packages/xarray/core/variable.py:361\u001b[0m, in \u001b[0;36mVariable.__init__\u001b[0;34m(self, dims, data, attrs, encoding, fastpath)\u001b[0m\n\u001b[1;32m 341\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39m__init__\u001b[39m(\u001b[39mself\u001b[39m, dims, data, attrs\u001b[39m=\u001b[39m\u001b[39mNone\u001b[39;00m, encoding\u001b[39m=\u001b[39m\u001b[39mNone\u001b[39;00m, fastpath\u001b[39m=\u001b[39m\u001b[39mFalse\u001b[39;00m):\n\u001b[1;32m 342\u001b[0m \u001b[39m \u001b[39m\u001b[39m\"\"\"\u001b[39;00m\n\u001b[1;32m 343\u001b[0m \u001b[39m Parameters\u001b[39;00m\n\u001b[1;32m 344\u001b[0m \u001b[39m ----------\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 359\u001b[0m \u001b[39m unrecognized encoding items.\u001b[39;00m\n\u001b[1;32m 360\u001b[0m \u001b[39m \"\"\"\u001b[39;00m\n\u001b[0;32m--> 361\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_data \u001b[39m=\u001b[39m as_compatible_data(data, fastpath\u001b[39m=\u001b[39;49mfastpath)\n\u001b[1;32m 362\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_dims \u001b[39m=\u001b[39m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_parse_dimensions(dims)\n\u001b[1;32m 363\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_attrs \u001b[39m=\u001b[39m \u001b[39mNone\u001b[39;00m\n", + "File \u001b[0;32m~/miniconda3/envs/spicy/lib/python3.11/site-packages/xarray/core/variable.py:291\u001b[0m, in \u001b[0;36mas_compatible_data\u001b[0;34m(data, fastpath)\u001b[0m\n\u001b[1;32m 288\u001b[0m data \u001b[39m=\u001b[39m np\u001b[39m.\u001b[39masarray(data)\n\u001b[1;32m 290\u001b[0m \u001b[39mif\u001b[39;00m \u001b[39misinstance\u001b[39m(data, np\u001b[39m.\u001b[39mndarray) \u001b[39mand\u001b[39;00m data\u001b[39m.\u001b[39mdtype\u001b[39m.\u001b[39mkind \u001b[39min\u001b[39;00m \u001b[39m\"\u001b[39m\u001b[39mOMm\u001b[39m\u001b[39m\"\u001b[39m:\n\u001b[0;32m--> 291\u001b[0m data \u001b[39m=\u001b[39m _possibly_convert_objects(data)\n\u001b[1;32m 292\u001b[0m \u001b[39mreturn\u001b[39;00m _maybe_wrap_data(data)\n", + "File \u001b[0;32m~/miniconda3/envs/spicy/lib/python3.11/site-packages/xarray/core/variable.py:217\u001b[0m, in \u001b[0;36m_possibly_convert_objects\u001b[0;34m(values)\u001b[0m\n\u001b[1;32m 205\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39m_possibly_convert_objects\u001b[39m(values):\n\u001b[1;32m 206\u001b[0m \u001b[39m \u001b[39m\u001b[39m\"\"\"Convert arrays of datetime.datetime and datetime.timedelta objects into\u001b[39;00m\n\u001b[1;32m 207\u001b[0m \u001b[39m datetime64 and timedelta64, according to the pandas convention. For the time\u001b[39;00m\n\u001b[1;32m 208\u001b[0m \u001b[39m being, convert any non-nanosecond precision DatetimeIndex or TimedeltaIndex\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 215\u001b[0m \u001b[39m if they are not.\u001b[39;00m\n\u001b[1;32m 216\u001b[0m \u001b[39m \"\"\"\u001b[39;00m\n\u001b[0;32m--> 217\u001b[0m as_series \u001b[39m=\u001b[39m pd\u001b[39m.\u001b[39;49mSeries(values\u001b[39m.\u001b[39;49mravel(), copy\u001b[39m=\u001b[39;49m\u001b[39mFalse\u001b[39;49;00m)\n\u001b[1;32m 218\u001b[0m \u001b[39mif\u001b[39;00m as_series\u001b[39m.\u001b[39mdtype\u001b[39m.\u001b[39mkind \u001b[39min\u001b[39;00m \u001b[39m\"\u001b[39m\u001b[39mmM\u001b[39m\u001b[39m\"\u001b[39m:\n\u001b[1;32m 219\u001b[0m as_series \u001b[39m=\u001b[39m _as_nanosecond_precision(as_series)\n", + "File \u001b[0;32m~/miniconda3/envs/spicy/lib/python3.11/site-packages/pandas/core/series.py:483\u001b[0m, in \u001b[0;36mSeries.__init__\u001b[0;34m(self, data, index, dtype, name, copy, fastpath)\u001b[0m\n\u001b[1;32m 481\u001b[0m \u001b[39mobject\u001b[39m\u001b[39m.\u001b[39m\u001b[39m__setattr__\u001b[39m(\u001b[39mself\u001b[39m, \u001b[39m\"\u001b[39m\u001b[39m_name\u001b[39m\u001b[39m\"\u001b[39m, name)\n\u001b[1;32m 482\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[0;32m--> 483\u001b[0m \u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49mname \u001b[39m=\u001b[39m name\n\u001b[1;32m 484\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_set_axis(\u001b[39m0\u001b[39m, index)\n", + "File \u001b[0;32m~/miniconda3/envs/spicy/lib/python3.11/site-packages/pandas/core/generic.py:5924\u001b[0m, in \u001b[0;36mNDFrame.__setattr__\u001b[0;34m(self, name, value)\u001b[0m\n\u001b[1;32m 5922\u001b[0m \u001b[39mobject\u001b[39m\u001b[39m.\u001b[39m\u001b[39m__setattr__\u001b[39m(\u001b[39mself\u001b[39m, name, value)\n\u001b[1;32m 5923\u001b[0m \u001b[39melif\u001b[39;00m name \u001b[39min\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_metadata:\n\u001b[0;32m-> 5924\u001b[0m \u001b[39mobject\u001b[39;49m\u001b[39m.\u001b[39;49m\u001b[39m__setattr__\u001b[39;49m(\u001b[39mself\u001b[39;49m, name, value)\n\u001b[1;32m 5925\u001b[0m \u001b[39melse\u001b[39;00m:\n\u001b[1;32m 5926\u001b[0m \u001b[39mtry\u001b[39;00m:\n", + "File \u001b[0;32m~/miniconda3/envs/spicy/lib/python3.11/site-packages/pandas/core/series.py:661\u001b[0m, in \u001b[0;36mSeries.name\u001b[0;34m(self, value)\u001b[0m\n\u001b[1;32m 613\u001b[0m \u001b[39m \u001b[39m\u001b[39m\"\"\"\u001b[39;00m\n\u001b[1;32m 614\u001b[0m \u001b[39m Return the name of the Series.\u001b[39;00m\n\u001b[1;32m 615\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 657\u001b[0m \u001b[39m 'Even Numbers'\u001b[39;00m\n\u001b[1;32m 658\u001b[0m \u001b[39m \"\"\"\u001b[39;00m\n\u001b[1;32m 659\u001b[0m \u001b[39mreturn\u001b[39;00m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39m_name\n\u001b[0;32m--> 661\u001b[0m \u001b[39m@name\u001b[39m\u001b[39m.\u001b[39msetter\n\u001b[1;32m 662\u001b[0m \u001b[39mdef\u001b[39;00m \u001b[39mname\u001b[39m(\u001b[39mself\u001b[39m, value: Hashable) \u001b[39m-\u001b[39m\u001b[39m>\u001b[39m \u001b[39mNone\u001b[39;00m:\n\u001b[1;32m 663\u001b[0m validate_all_hashable(value, error_name\u001b[39m=\u001b[39m\u001b[39mf\u001b[39m\u001b[39m\"\u001b[39m\u001b[39m{\u001b[39;00m\u001b[39mtype\u001b[39m(\u001b[39mself\u001b[39m)\u001b[39m.\u001b[39m\u001b[39m__name__\u001b[39m\u001b[39m}\u001b[39;00m\u001b[39m.name\u001b[39m\u001b[39m\"\u001b[39m)\n\u001b[1;32m 664\u001b[0m \u001b[39mobject\u001b[39m\u001b[39m.\u001b[39m\u001b[39m__setattr__\u001b[39m(\u001b[39mself\u001b[39m, \u001b[39m\"\u001b[39m\u001b[39m_name\u001b[39m\u001b[39m\"\u001b[39m, value)\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], + "source": [ + "# FOR GENERATING THE PARAMETER DATASETS! #\n", + "\n", + "import numpy as np\n", + "import pandas as pd\n", + "import xarray as xr\n", + "import matplotlib.pyplot as plt\n", + "from shapely import wkt\n", + "from shapely.geometry import box\n", + "from pathlib import Path\n", + "from datetime import datetime\n", + "from scipy.stats import pearsonr\n", + "from sklearn.metrics import mean_squared_error\n", + "from tqdm import tqdm\n", + "\n", + "import sys\n", + "sys.path.append(Path('../..'))\n", + "\n", + "from spicy_snow.processing.snow_index import calc_delta_cross_ratio, calc_delta_gamma, \\\n", + " clip_delta_gamma_outlier, calc_snow_index, calc_snow_index_to_snow_depth\n", + "from spicy_snow.processing.wet_snow import id_newly_wet_snow, id_wet_negative_si, \\\n", + " id_newly_frozen_snow, flag_wet_snow\n", + "\n", + "def get_idx(x, y, z, q):\n", + " # ravel to numpy arrays\n", + " x = x.values.ravel()\n", + " y = y.values.ravel()\n", + " z = z.values.ravel()\n", + " q = q.values.ravel()\n", + "\n", + " # find non nans\n", + " id = (~np.isnan(x)) & (~np.isnan(y)) & (~np.isnan(z)) & (~np.isnan(q))\n", + " return id\n", + "\n", + "# Create parameter space\n", + "A = np.round(np.arange(1, 3.1, 0.5), 2)\n", + "B = np.round(np.arange(0, 1.01, 0.1), 2)\n", + "C = np.round(np.arange(0, 1.001, 0.01), 2)\n", + "\n", + "files = Path('/bsuhome/zacharykeskinen/spicy-snow/Lidar_s1_stacks/').glob('*fix.nc')\n", + "\n", + "param_dir = Path('~/scratch/param_regional_v2').expanduser()\n", + "param_dir.mkdir(exist_ok = True)\n", + "for f in files:\n", + " # get dataset\n", + " ds_name = f.name.split('stacks/')[-1].split('.')[0]\n", + " print(datetime.now(), f' -- starting {ds_name}')\n", + " ds_ = xr.open_dataset(f).load()\n", + " dataset = ds_[['s1','deltaVV','ims','fcf', 'lidar-sd', 'dem']]\n", + "\n", + " # find closest timestep to lidar\n", + " td = abs(pd.to_datetime(dataset.time) - pd.to_datetime(dataset.attrs['lidar-flight-time']))\n", + " closest_ts = dataset.time[np.argmin(td)]\n", + "\n", + " if 'Frasier_2020-02-11.nc' in f.name:\n", + " closest_ts = '2020-02-16T13:09:43'\n", + "\n", + " param_dir.joinpath(ds_name).mkdir(exist_ok = True)\n", + "\n", + " # Brute-force processing loop\n", + " for a in tqdm(A):\n", + " # print(f'A: {a}')\n", + " ds = calc_delta_cross_ratio(dataset, A = a)\n", + " for b in B:\n", + " # print(f' B: {b}')\n", + " ds = calc_delta_gamma(ds, B = b, inplace=False)\n", + " ds = clip_delta_gamma_outlier(ds)\n", + " ds = calc_snow_index(ds)\n", + " ds = id_newly_wet_snow(ds)\n", + " ds = id_wet_negative_si(ds)\n", + " ds = id_newly_frozen_snow(ds)\n", + " ds = flag_wet_snow(ds)\n", + " for c in C:\n", + " # print(f' c: {c}')\n", + " # print(f'A={a}; B={b}; C={c}')\n", + "\n", + " ds = calc_snow_index_to_snow_depth(ds, C = c)\n", + "\n", + " sub = ds.sel(time = closest_ts)[['snow_depth', 'lidar-sd', 'fcf', 'dem']]\n", + " id = get_idx(sub['snow_depth'], sub['lidar-sd'], sub['fcf'], sub['dem'])\n", + " spicy_sd = sub['snow_depth'].values.ravel()[id]\n", + " np.save(param_dir.joinpath(ds_name, f'{a}_{b}_{c}.npy'), spicy_sd)\n", + "\n", + " if not param_dir.joinpath(ds_name, 'lidar.npy').exists():\n", + " lidar_sd = sub['lidar-sd'].values.ravel()[id]\n", + " np.save(param_dir.joinpath(ds_name, f'lidar.npy'), lidar_sd)\n", + " fcf = sub['fcf'].values.ravel()[id]\n", + " np.save(param_dir.joinpath(ds_name, f'fcf.npy'), fcf)\n", + " dem = sub['dem'].values.ravel()[id]\n", + " np.save(param_dir.joinpath(ds_name, f'dem.npy'), dem)\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "spicy", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.0" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/scripts/data_acquisition/add_elevation.py b/scripts/data_acquisition/add_elevation.py index 74f1505..8141305 100644 --- a/scripts/data_acquisition/add_elevation.py +++ b/scripts/data_acquisition/add_elevation.py @@ -7,4 +7,5 @@ ds = xr.load_dataset(fp) dem = py3dep.get_dem(ds.rio.bounds(), 30) ds['dem'] = dem.drop(['band', 'spatial_ref']).interp_like(ds) + ds = ds.drop('lidar-dem') ds.to_netcdf(fp.with_suffix('.fix.nc')) \ No newline at end of file diff --git a/scripts/optimize/generate_param_ds.py b/scripts/optimize/generate_param_ds.py index 76f61f4..d2e4761 100644 --- a/scripts/optimize/generate_param_ds.py +++ b/scripts/optimize/generate_param_ds.py @@ -20,30 +20,32 @@ from spicy_snow.processing.wet_snow import id_newly_wet_snow, id_wet_negative_si, \ id_newly_frozen_snow, flag_wet_snow -def get_numpy(x, y): +def get_idx(x, y, z, q): # ravel to numpy arrays x = x.values.ravel() y = y.values.ravel() + z = z.values.ravel() + q = q.values.ravel() - # remove nans - x, y = x[(~np.isnan(x)) & (~np.isnan(y))], y[(~np.isnan(x)) & (~np.isnan(y))] - - return x, y + # find non nans + id = (~np.isnan(x)) & (~np.isnan(y)) & (~np.isnan(z)) & (~np.isnan(q)) + return id # Create parameter space A = np.round(np.arange(1, 3.1, 0.5), 2) B = np.round(np.arange(0, 1.01, 0.1), 2) C = np.round(np.arange(0, 1.001, 0.01), 2) -files = Path('/bsuhome/zacharykeskinen/spicy-snow/Lidar_s1_stacks/').glob('*.nc') +files = Path('/bsuhome/zacharykeskinen/spicy-snow/Lidar_s1_stacks/').glob('*fix.nc') -param_dir = Path('~/scratch/param_regional').expanduser() +param_dir = Path('~/scratch/param_regional_v2').expanduser() +param_dir.mkdir(exist_ok = True) for f in files: # get dataset ds_name = f.name.split('stacks/')[-1].split('.')[0] print(datetime.now(), f' -- starting {ds_name}') ds_ = xr.open_dataset(f).load() - dataset = ds_[['s1','deltaVV','ims','fcf', 'lidar-sd']] + dataset = ds_[['s1','deltaVV','ims','fcf', 'lidar-sd', 'dem']] # find closest timestep to lidar td = abs(pd.to_datetime(dataset.time) - pd.to_datetime(dataset.attrs['lidar-flight-time'])) @@ -73,11 +75,16 @@ def get_numpy(x, y): ds = calc_snow_index_to_snow_depth(ds, C = c) - sub = ds.sel(time = closest_ts)[['snow_depth', 'lidar-sd']] - spicy_sd, _ = get_numpy(sub['snow_depth'], sub['lidar-sd']) + sub = ds.sel(time = closest_ts)[['snow_depth', 'lidar-sd', 'fcf', 'dem']] + id = get_idx(sub['snow_depth'], sub['lidar-sd'], sub['fcf'], sub['dem']) + spicy_sd = sub['snow_depth'].values.ravel()[id] # param_np = np.vstack([spicy_sd, spicy_wet]) np.save(param_dir.joinpath(ds_name, f'{a}_{b}_{c}.npy'), spicy_sd) if not param_dir.joinpath(ds_name, 'lidar.npy').exists(): - _, lidar_sd = get_numpy(sub['snow_depth'], sub['lidar-sd']) + lidar_sd = sub['lidar-sd'].values.ravel()[id] np.save(param_dir.joinpath(ds_name, f'lidar.npy'), lidar_sd) + fcf = sub['fcf'].values.ravel()[id] + np.save(param_dir.joinpath(ds_name, f'fcf.npy'), fcf) + dem = sub['dem'].values.ravel()[id] + np.save(param_dir.joinpath(ds_name, f'dem.npy'), dem) diff --git a/scripts/optimize/param_ridgeplots.ipynb b/scripts/optimize/param_ridgeplots.ipynb deleted file mode 100644 index da10583..0000000 --- a/scripts/optimize/param_ridgeplots.ipynb +++ /dev/null @@ -1,331 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from pathlib import Path\n", - "\n", - "import numpy as np\n", - "import pandas as pd\n", - "import xarray as xr\n", - "\n", - "import matplotlib.pyplot as plt\n", - "import seaborn as sns" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "data_fp = '/bsuhome/zacharykeskinen/spicy-snow/data/res_ds_iter_large.nc'\n", - "\n", - "ds = xr.open_dataset(data_fp)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import matplotlib.gridspec as grid_spec\n", - "from matplotlib import colormaps\n", - "cmap = colormaps.get_cmap('RdBu')\n", - "colors = cmap(np.linspace(0.1, 0.4, len(ds.location)))\n", - "\n", - "fig = plt.figure(layout='constrained', figsize=(10, 4))\n", - "subfigs = fig.subfigures(1, 3, wspace=0.07)\n", - "params = ['A', 'B', 'C']\n", - "\n", - "# sorted = ds.reindex(location = ds['pearsonr'].isel(ds['pearsonr'].argmin(['A', 'C'])).idxmin('B').mean('iteration').to_dataframe().sort_values('B').to_xarray().location[::-1])\n", - "\n", - "loc_sorted = ['Little_Cottonwood_2021-03-18', 'Mores_2021-03-15',\\\n", - " 'Frasier_2020-02-11', 'Banner_2020-02-18', 'Banner_2021-03-15',\\\n", - " 'Dry_Creek_2020-02-19'] #, 'Cameron_2021-03-19', 'Frasier_2021-03-19' 'Mores_2020-02-09'\n", - "\n", - "sorted = ds.reindex(location = loc_sorted)\n", - "\n", - "clips = [[0.5, 3], [0, 1], [0, 1]]\n", - "\n", - "param_stat = {'A':'pearsonr', 'B':'pearsonr', 'C':'mae'}\n", - "\n", - "for i, (param, subfig, clip,) in enumerate(zip(params, subfigs, clips)):\n", - " axes = subfig.subplots(len(sorted.location), gridspec_kw = {'hspace': -0.7})\n", - " other_params = params[:]\n", - " other_params.remove(param)\n", - "\n", - " stat_name = param_stat[param]\n", - "\n", - " for loc, color, ax in zip(sorted.location, colors, axes):\n", - "\n", - " param_fp = Path('/bsuhome/zacharykeskinen/scratch/param_regional')\n", - " lidar_orig = np.load(param_fp.joinpath(str(loc.data), 'lidar.npy'))\n", - " \n", - " param_1, param_2 = other_params\n", - " a = sorted.loc[{param_1 : sorted[param_stat[param_1]].idxmax(param_1)}]\n", - " if param_2 == 'C':\n", - " data = a.loc[{param_2 : a[param_stat[param_2]].idxmin(param_2)}]\n", - " else:\n", - " data = a.loc[{param_2 : a[param_stat[param_2]].idxmax(param_2)}]\n", - " \n", - " if param == 'C':\n", - " print(data.sel(location = loc)['mae'].min())\n", - " data = data.idxmin(param)\n", - " else:\n", - " data = data.idxmax(param)\n", - " \n", - " data = data.sel(location = loc)\n", - " \n", - " data = data[param_stat[param]].data\n", - " # data = data + np.random.random(data.shape)/1000\n", - "\n", - " sns.kdeplot(data, color = color, \\\n", - " fill = True, alpha = 1.0, ax= ax, clip = clip, warn_singular=False, zorder = 1)\n", - " ax.set_xlim(clip)\n", - " ax.set_yticks([])\n", - " ax.set_ylabel('')\n", - "\n", - " rect = ax.patch\n", - " rect.set_alpha(0)\n", - "\n", - " spines = [\"top\", \"right\", \"left\", \"bottom\"]\n", - " for s in spines:\n", - " ax.spines[s].set_visible(False)\n", - " \n", - " if i == 0:\n", - " site_name = str(loc.data).replace('_', ' ').replace('Little Cottonwood', 'LCC').split('-')[0]\n", - " ax.text(-0.02, 0, site_name, fontweight = 'bold', ha = 'right', transform = ax.transAxes, zorder = 1e5)\n", - "\n", - "\n", - " for ax in axes[:-1]:\n", - " ax.set_xticks([])\n", - "\n", - " stat_title= {'mae':'Mean Error Minimized', 'pearsonr':'Correlation Maximized'}\n", - " subfig.suptitle(f'{param} - {stat_title[param_stat[param]]}')\n", - "\n", - "plt.tight_layout()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import matplotlib.gridspec as grid_spec\n", - "from matplotlib import colormaps\n", - "cmap = colormaps.get_cmap('RdBu')\n", - "colors = cmap(np.linspace(0.1, 0.4, len(ds.location)))\n", - "\n", - "fig = plt.figure(layout='constrained', figsize=(10, 4))\n", - "subfigs = fig.subfigures(2, 3, wspace=0.07)\n", - "params = ['A', 'B', 'C']\n", - "\n", - "# sorted = ds.reindex(location = ds['pearsonr'].isel(ds['pearsonr'].argmin(['A', 'C'])).idxmin('B').mean('iteration').to_dataframe().sort_values('B').to_xarray().location[::-1])\n", - "\n", - "loc_sorted = ['Little_Cottonwood_2021-03-18', 'Mores_2021-03-15',\\\n", - " 'Frasier_2020-02-11', 'Banner_2020-02-18', 'Banner_2021-03-15',\\\n", - " 'Dry_Creek_2020-02-19'] #'Cameron_2021-03-19', 'Frasier_2021-03-19' 'Mores_2020-02-09',\n", - "\n", - "sorted = ds.reindex(location = loc_sorted)\n", - "sorted = sorted.isel(iteration = slice(50))\n", - "\n", - "clips = [[1, 3], [0, 1], [0, 1]]\n", - "\n", - "param_stat = {'A':'pearsonr', 'B':'pearsonr', 'C':'mae'}\n", - "\n", - "# for i, (param, subfig, clip,) in enumerate(zip(params, subfigs[0, :], clips)):\n", - "# axes = subfig.subplots(len(ds.location), gridspec_kw = {'hspace': -0.7})\n", - "# other_params = params[:]\n", - "# other_params.remove(param)\n", - "\n", - "# stat_name = param_stat[param]\n", - "\n", - "# for loc, color, ax in zip(sorted.location, colors, axes):\n", - "\n", - "# param_fp = Path('/bsuhome/zacharykeskinen/scratch/param_regional')\n", - "# lidar_orig = np.load(param_fp.joinpath(str(loc.data), 'lidar.npy'))\n", - " \n", - "# param_1, param_2 = other_params\n", - "# a = sorted.loc[{param_1 : sorted[param_stat[param_1]].idxmax(param_1)}]\n", - "# if param_2 == 'C':\n", - "# data = a.loc[{param_2 : a[param_stat[param_2]].idxmin(param_2)}]\n", - "# else:\n", - "# data = a.loc[{param_2 : a[param_stat[param_2]].idxmax(param_2)}]\n", - " \n", - "# if param == 'C':\n", - "# data = data.idxmin(param)\n", - "# else:\n", - "# data = data.idxmax(param)\n", - " \n", - "# data = data.sel(location = loc)\n", - " \n", - "# data = data[param_stat[param]].data\n", - "# # data = data + np.random.random(data.shape)/1000\n", - "\n", - "# sns.kdeplot(data, color = color, \\\n", - "# fill = True, alpha = 1.0, ax= ax, clip = clip, warn_singular=False, zorder = 1)\n", - "# ax.set_xlim(clip)\n", - "# ax.set_yticks([])\n", - "# ax.set_ylabel('')\n", - "\n", - "# rect = ax.patch\n", - "# rect.set_alpha(0)\n", - "\n", - "# spines = [\"top\", \"right\", \"left\", \"bottom\"]\n", - "# for s in spines:\n", - "# ax.spines[s].set_visible(False)\n", - " \n", - "# if i == 0:\n", - "# site_name = str(loc.data).replace('_', ' ').replace('Little Cottonwood', 'LCC').split('-')[0]\n", - "# ax.text(-0.02, 0, site_name, fontweight = 'bold', ha = 'right', transform = ax.transAxes, zorder = 1e5)\n", - "\n", - "\n", - "# for ax in axes[:-1]:\n", - "# ax.set_xticks([])\n", - "\n", - "# stat_title= {'mae':'Mean Error Minimized', 'pearsonr':'Correlation Maximized'}\n", - "# subfig.suptitle(f'{param} - {stat_title[param_stat[param]]}')\n", - "\n", - "\n", - "for i, (subfig, param) in enumerate(zip(subfigs[1, :], params)):\n", - "\n", - " ax = subfig.subplots(1, 1)\n", - " other_params = params[:]\n", - " other_params.remove(param)\n", - "\n", - " for loc in sorted.location:\n", - "\n", - " ds_fp = Path('/bsuhome/zacharykeskinen/scratch/SnowEx-Data')\n", - " full_ds = xr.load_dataset(ds_fp.joinpath(str(loc.data)+'.nc'))\n", - " \n", - " param_1, param_2 = other_params\n", - " a = sorted.loc[{param_1 : sorted[param_stat[param_1]].idxmax(param_1)}]\n", - " if param_2 == 'C':\n", - " data = a.loc[{param_2 : a[param_stat[param_2]].idxmin(param_2)}]\n", - " else:\n", - " data = a.loc[{param_2 : a[param_stat[param_2]].idxmax(param_2)}]\n", - " \n", - " if param == 'C':\n", - " data = data.idxmin(param)\n", - " else:\n", - " data = data.idxmax(param)\n", - " \n", - " data = data.sel(location = loc)\n", - " \n", - " data = data[param_stat[param]].data\n", - "\n", - " if param == 'C':\n", - " CR = full_ds['s1'].sel(band = 'VH') - full_ds['s1'].sel(band = 'VV')\n", - " x = CR.max('time').mean()\n", - " # x = full_ds['deltaGamma'].max('time').mean()\n", - " # x = full_ds['s1'].sel(band = 'VV').max('time').mean()\n", - " else:\n", - " trees = full_ds.where(full_ds['fcf'] > 0.5)\n", - " x = (full_ds['deltaVV'].mean('time') / full_ds['lidar-sd']).mean()\n", - " # y = trees['deltaVV'].mean(dim = 'time')\n", - " # x = (y / (trees['fcf'] + 0.01)).mean()\n", - " # x = full_ds['s1'].sel(band = 'VV').max('time')# / full_ds['fcf'] # / full_ds['deltaCR']\n", - " # s1 = full_ds['s1']\n", - " # trees = full_ds.where(full_ds['fcf'] > 0.5)\n", - " # x = full_ds['s1'].sel(band = 'VH').diff('time').mean('time') / (full_ds['fcf'] + 1)\n", - " # x = xr.where(x > 1000, np.nan, x)\n", - " # x = xr.where(x > -1000, x, np.nan)\n", - " # x = trees['deltaCR'] / trees['deltaVV']\n", - " # CR = full_ds['s1'].sel(band = 'VH') - full_ds['s1'].sel(band = 'VV')\n", - " # x = full_ds['fcf']\n", - " # x = x.mean()\n", - " # if x < -0.5:\n", - " # print(loc)\n", - " # x = np.nan\n", - " \n", - " ax.scatter(x, np.mean(data), label = loc)\n", - " # plt.legend()\n", - "plt.tight_layout()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "params = ['A', 'B', 'C']\n", - "\n", - "loc_sorted = [ 'Cameron_2021-03-19','Little_Cottonwood_2021-03-18', 'Mores_2020-02-09', 'Mores_2021-03-15',\\\n", - " 'Frasier_2020-02-11', 'Frasier_2021-03-19', 'Banner_2020-02-18', 'Banner_2021-03-15',\\\n", - " 'Dry_Creek_2020-02-19']\n", - "sorted = ds.reindex(location = loc_sorted)\n", - "\n", - "best_c = sorted.loc[{'C' : sorted['mae'].idxmin('C')}]\n", - "best_c = best_c.mean('iteration')\n", - "for loc, sub_ds in best_c.groupby('location'):\n", - " sub_ds['pearsonr'].plot()\n", - " plt.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "files = Path('/bsuhome/zacharykeskinen/spicy-snow/Lidar_s1_stacks/').glob('*.nc')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import py3dep\n", - "import rioxarray as rxa\n", - "files = Path('/bsuhome/zacharykeskinen/spicy-snow/SnowEx-Data').glob('*.nc')\n", - "for fp in files:\n", - " ds = xr.load_dataset(fp)\n", - " dem = py3dep.get_dem(ds.rio.bounds(), 30)\n", - " ds['dem'] = dem.drop(['band', 'spatial_ref']).interp_like(ds)\n", - " ds = ds.drop('lidar-dem')\n", - " ds.to_netcdf(fp.with_suffix('.fix.nc'))\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "files = Path('/bsuhome/zacharykeskinen/spicy-snow/SnowEx-Data').glob('*fix.nc')\n", - "for fp in files:\n", - " ds = xr.load_dataset(fp)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "spicy", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.0" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/spicy_snow/processing/wet_snow.py b/spicy_snow/processing/wet_snow.py index 3e75345..91b61fe 100644 --- a/spicy_snow/processing/wet_snow.py +++ b/spicy_snow/processing/wet_snow.py @@ -37,12 +37,12 @@ def id_newly_wet_snow(dataset: xr.Dataset, wet_thresh: int = -2, inplace: bool = dataset['wet_flag'] = xr.zeros_like(dataset['deltaVV']) # identify possible newly wet snow in regions FCF < 0.5 with deltaCR - dataset['wet_flag'] = dataset['wet_flag'].where(((dataset['fcf'] > 0.5) | (dataset['deltaCR'] > wet_thresh)), 1) + dataset['wet_flag'] = dataset['wet_flag'].where(((dataset['fcf'] > 0.5) | (dataset['deltaCR'] > wet_thresh) | (dataset['deltaCR'].isnull())), 1) # identify possible newly wet snow in regions FCF > 0.5 with deltaVV - dataset['wet_flag'] = dataset['wet_flag'].where(((dataset['fcf'] < 0.5) | (dataset['deltaVV'] > wet_thresh)), 1) + dataset['wet_flag'] = dataset['wet_flag'].where(((dataset['fcf'] < 0.5) | (dataset['deltaVV'] > wet_thresh) | (dataset['deltaVV'].isnull())), 1) # mask nans from Sentinel-1 data - dataset['wet_flag'] = dataset['wet_flag'].where(~dataset['deltaVV'].isnull()) + dataset['wet_flag'] = dataset['wet_flag'].where(~dataset['s1'].sel(band = 'VV').isnull(),np.nan) if not inplace: return dataset @@ -74,10 +74,10 @@ def id_newly_frozen_snow(dataset: xr.Dataset, freeze_thresh: int = 2, inplace: b dataset['freeze_flag'] = xr.zeros_like(dataset['deltaVV']) # identify possible re-freezing by increases of deltaGammaNaught of 2dB - dataset['freeze_flag'] = dataset['freeze_flag'].where(dataset['deltaGamma'] < freeze_thresh, 1) + dataset['freeze_flag'] = dataset['freeze_flag'].where((dataset['deltaGamma'] < freeze_thresh) | (dataset['deltaGamma'].isnull()), 1) # mask nans from Sentinel-1 data - dataset['freeze_flag'] = dataset['freeze_flag'].where(~dataset['deltaGamma'].isnull()) + dataset['freeze_flag'] = dataset['freeze_flag'].where(~dataset['snow_index'].isnull()) if not inplace: return dataset @@ -109,10 +109,10 @@ def id_wet_negative_si(dataset: xr.Dataset, wet_SI_thresh = 0, inplace: bool = F dataset['alt_wet_flag'] = xr.zeros_like(dataset['deltaVV']) # identify wetting of snow by negative snow index with snow present - dataset['alt_wet_flag'] = dataset['alt_wet_flag'].where(((dataset['ims'] != 4) | (dataset['snow_index'] > wet_SI_thresh)), 1) + dataset['alt_wet_flag'] = dataset['alt_wet_flag'].where(((dataset['ims'] != 4) | (dataset['snow_index'] > wet_SI_thresh) | (dataset['snow_index'].isnull())), 1) # mask nans from Sentinel-1 data - dataset['alt_wet_flag'] = dataset['alt_wet_flag'].where(~dataset['deltaVV'].isnull()) + dataset['alt_wet_flag'] = dataset['alt_wet_flag'].where(~dataset['snow_index'].isnull()) if not inplace: return dataset @@ -166,20 +166,21 @@ def flag_wet_snow(dataset: xr.Dataset, inplace: bool = False) -> Union[None, xr. dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = prev_time)['wet_snow'] # add newly wet snow flags to old wet snow and then bound at 1 - dataset['wet_snow'].loc[dict(time = ts)]= dataset.sel(time = ts)['wet_snow'] + dataset.sel(time = ts)['wet_flag'] - dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'] + dataset.sel(time = ts)['alt_wet_flag'] - dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'].where(dataset.sel(time = ts)['wet_snow'] < 1, 1) - + + dataset['wet_snow'].loc[dict(time = ts)] = xr.where(~dataset.sel(time = ts)['wet_flag'].isnull(),dataset.sel(time = ts)['wet_snow'] + dataset.sel(time = ts)['wet_flag'],np.nan) + dataset['wet_snow'].loc[dict(time = ts)] = xr.where(~dataset.sel(time = ts)['alt_wet_flag'].isnull(),dataset.sel(time = ts)['wet_snow'] + dataset.sel(time = ts)['alt_wet_flag'],np.nan) + dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'].where((dataset.sel(time = ts)['wet_snow'] < 1) | (dataset.sel(time = ts)['wet_snow'].isnull()), 1) + # add newly frozen snow flags to old wet snow and then bound at 0 to avoid negatives - dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'] - dataset.sel(time = ts)['freeze_flag'] - dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'].where(dataset.sel(time = ts)['wet_snow'] > 0, 0) + dataset['wet_snow'].loc[dict(time = ts)] = xr.where(~dataset.sel(time = ts)['freeze_flag'].isnull(),dataset.sel(time = ts)['wet_snow'] - dataset.sel(time = ts)['freeze_flag'],np.nan) + dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'].where((dataset.sel(time = ts)['wet_snow'] > 0) | (dataset.sel(time = ts)['wet_snow'].isnull()), 0) + + # set non snow (IMS != 4) to not wet (0) + dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'].where(dataset.sel(time = ts)['ims'] == 4, 0) # make nans at areas without S1 data dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'].where(~dataset['s1'].sel(time = ts, band = 'VV').isnull(), np.nan) - - # make - dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'].where(dataset.sel(time = ts)['ims'] == 4, 0) - + prev_time = ts # if >50% wet of last 4 cycles after feb 1 then set remainer till @@ -207,7 +208,7 @@ def flag_wet_snow(dataset: xr.Dataset, inplace: bool = False) -> Union[None, xr. # now if we are over 1 (ie it was flagged wet by dB and negative SI flags) we should floor those back to 1 dataset['perma_wet'].loc[dict(time = melt_orbit)] = \ - dataset['perma_wet'].loc[dict(time = melt_orbit)].where(dataset['perma_wet'].loc[dict(time = melt_orbit)] <= 1 , 1) + dataset['perma_wet'].loc[dict(time = melt_orbit)].where((dataset['perma_wet'].loc[dict(time = melt_orbit)] <= 1) | (dataset['perma_wet'].loc[dict(time = melt_orbit)].isnull()) , 1) # now calculate the rolling mean of the perma wet so we have a % 0-1 of days out of 4 that were flagged dataset['perma_wet'].loc[dict(time = melt_orbit)] = \ @@ -217,6 +218,7 @@ def flag_wet_snow(dataset: xr.Dataset, inplace: bool = False) -> Union[None, xr. # this will fail if bottleneck is installed due to it lacking the min_periods keyword # see: https://github.com/pydata/xarray/issues/4922 + if 'bottleneck' not in sys.modules: dataset['perma_wet'].loc[dict(time = melt_orbit)] = \ dataset['perma_wet'].loc[dict(time = melt_orbit)].rolling(time = len(orbit_dataset.time), min_periods = 1).max() diff --git a/spicy_snow/retrieval.py b/spicy_snow/retrieval.py index 067a5a7..e091a42 100644 --- a/spicy_snow/retrieval.py +++ b/spicy_snow/retrieval.py @@ -191,7 +191,7 @@ def retrieval_from_parameters(dataset: xr.Dataset, C: float, wet_SI_thresh: float = 0, freezing_snow_thresh: float = 2, - wet_snow_thres: float = -2): + wet_snow_thresh: float = -2): """ Retrieve snow depth with varied parameter set from an already pre-processed dataset. @@ -228,7 +228,7 @@ def retrieval_from_parameters(dataset: xr.Dataset, dataset = calc_snow_index_to_snow_depth(dataset, C = C) # find newly wet snow - dataset = id_newly_wet_snow(dataset, wet_thresh = wet_snow_thres) + dataset = id_newly_wet_snow(dataset, wet_thresh = wet_snow_thresh) dataset = id_wet_negative_si(dataset, wet_SI_thresh = wet_SI_thresh) # find newly frozen snow diff --git a/tests/test_wetsnow.py b/tests/test_wetsnow.py index aa54bb3..eca50fe 100644 --- a/tests/test_wetsnow.py +++ b/tests/test_wetsnow.py @@ -21,6 +21,7 @@ class TestWetSnowFlags(unittest.TestCase): fcf = np.random.randn(10, 10)/10 + 0.5 deltaVV = np.random.randn(10, 10, 6) * 3 deltaCR = np.random.randn(10, 10, 6) * 3 + s1 = np.random.randn(10, 10, 6, 3) ims = np.full((10, 10, 6), 4, dtype = int) times = [np.datetime64(t) for t in ['2020-01-01','2020-01-02', '2020-01-07','2020-01-08', '2020-01-14', '2020-01-15']] x = np.linspace(0, 9, 10) @@ -32,6 +33,7 @@ class TestWetSnowFlags(unittest.TestCase): deltaVV = (["x", "y", "time"], deltaVV), deltaCR = (["x", "y", "time"], deltaCR), ims = (["x", "y", "time"], ims), + s1 = (["x", "y", "time", "band"], s1) ), coords = dict( lon = (["x", "y"], lon),