Skip to content

Commit

Permalink
Merge branch 'main' of https://github.com/SnowEx/spicy-snow into main
Browse files Browse the repository at this point in the history
  • Loading branch information
gbrencher committed Sep 6, 2023
2 parents 8f9de7a + e646a4c commit 067fed3
Show file tree
Hide file tree
Showing 11 changed files with 574 additions and 382 deletions.
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -162,4 +162,6 @@ scripts/optimize/param_sds
!/results/site_param_rmses/*.csv
!/contrib/palomaki/*.csv

*.sqlite
*.sqlite

*.nfs
Binary file added SnowEx-Data/.nfs000000013a9d95c700004d8b
Binary file not shown.
Binary file added SnowEx-Data/.nfs000000013ac787a900004d8c
Binary file not shown.
72 changes: 53 additions & 19 deletions contrib/gagliano/wet_snow_testing.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@
" ds_name = f.split('stacks/')[-1].split('.')[0]\n",
" print(datetime.now(), f' -- starting {ds_name}')\n",
"\n",
" if Path(f'rmse_out/{ds_name}_wet_flag.nc').is_file():\n",
" if Path(f'rmse_out_with_si/{ds_name}_wet_flag.nc').is_file():\n",
" print('This file already exists, continuing.')\n",
" continue\n",
" \n",
Expand Down Expand Up @@ -124,7 +124,7 @@
" ds = calc_snow_index(ds)\n",
" ds = calc_snow_index_to_snow_depth(ds, C=c, inplace=False)\n",
" ds = id_newly_wet_snow(ds,wet_thresh=wst)\n",
" ds = id_wet_negative_si_test(ds)\n",
" ds = id_wet_negative_si(ds,wet_SI_thresh=0) #change to test depending on whether we want neg SI to flag as wet snow\n",
" ds = id_newly_frozen_snow(ds,freeze_thresh=-1*wst)\n",
" ds = flag_wet_snow(ds)\n",
" # Compare snow depths - mask wet snow\n",
Expand All @@ -144,9 +144,9 @@
" print(f'Frac valid pixels = {mask_wet.sum() /mask.sum():0.2f}')\n",
"\n",
" # After loop, save RMSE results per file\n",
" rmse_wet_flag.to_netcdf(f'rmse_out/{ds_name}_wet_flag.nc')\n",
" rmse_no_flag.to_netcdf(f'rmse_out/{ds_name}_no_flag.nc')\n",
" valid_pixels.to_netcdf(f'rmse_out/{ds_name}_valid_pixels.nc')\n",
" rmse_wet_flag.to_netcdf(f'rmse_out_with_si/{ds_name}_wet_flag.nc')\n",
" rmse_no_flag.to_netcdf(f'rmse_out_with_si/{ds_name}_no_flag.nc')\n",
" valid_pixels.to_netcdf(f'rmse_out_with_si/{ds_name}_valid_pixels.nc')\n",
" "
]
},
Expand All @@ -156,11 +156,21 @@
"metadata": {},
"outputs": [],
"source": [
"results = sorted(glob('rmse_out/*.nc'))\n",
"with_neg_si_flag_wet_snow = True\n",
"\n",
"directory = 'rmse_out_with_si'\n",
"#directory = 'rmse_out'\n",
"\n",
"#if with_neg_si_flag_wet_snow == True:\n",
"# directory = 'rmse_out_full_with_si'\n",
"#else:\n",
"# directory = 'rmse_out_full'\n",
"\n",
"results = sorted(glob(f'{directory}/*.nc'))\n",
"names = []\n",
"for f in results:\n",
" if 'no_flag' in f:\n",
" ds_name = f.split('rmse_out/')[-1]\n",
" ds_name = f.split(f'{directory}/')[-1]\n",
" ds_name = ds_name.split('_no')[0]\n",
" names.append(ds_name)\n",
"\n",
Expand All @@ -172,19 +182,19 @@
"for f in results:\n",
" if 'wet_flag' in f:\n",
" r = xr.open_dataarray(f).load()\n",
" ds_name = f.split('rmse_out/')[-1]\n",
" ds_name = f.split(f'{directory}/')[-1]\n",
" ds_name = ds_name.split('_wet')[0]\n",
" for ind,val in zip(r.wet_snow_thresh.values,r.values):\n",
" thresh_results.loc[ind,ds_name] = val\n",
" if 'no_flag' in f:\n",
" r = xr.open_dataarray(f).load()\n",
" ds_name = f.split('rmse_out/')[-1]\n",
" ds_name = f.split(f'{directory}/')[-1]\n",
" ds_name = ds_name.split('_no')[0]\n",
" for ind,val in zip(r.wet_snow_thresh.values,r.values):\n",
" no_thresh_results.loc[ind,ds_name] = val\n",
" if 'valid' in f:\n",
" r = xr.open_dataarray(f).load()\n",
" ds_name = f.split('rmse_out/')[-1]\n",
" ds_name = f.split(f'{directory}/')[-1]\n",
" ds_name = ds_name.split('_valid')[0]\n",
" for ind,val in zip(r.wet_snow_thresh.values,r.values):\n",
" valid_pixels_results.loc[ind,ds_name] = val\n"
Expand Down Expand Up @@ -251,7 +261,7 @@
" ds_name = f.split('stacks/')[-1].split('.')[0]\n",
" print(datetime.now(), f' -- starting {ds_name}')\n",
"\n",
" if Path(f'rmse_out_full/{ds_name}_wet_flag.nc').is_file():\n",
" if Path(f'rmse_out_full_with_si/{ds_name}_wet_flag.nc').is_file():\n",
" print('This file already exists, continuing.')\n",
" continue\n",
"\n",
Expand Down Expand Up @@ -280,7 +290,7 @@
" ds = calc_snow_index(ds)\n",
" ds = calc_snow_index_to_snow_depth(ds, C=c, inplace=False)\n",
" ds = id_newly_wet_snow(ds,wet_thresh=wst)\n",
" ds = id_wet_negative_si_test(ds)\n",
" ds = id_wet_negative_si(ds) #change to test depdning on whether to remove neg SI = wet snow\n",
" ds = id_newly_frozen_snow(ds,freeze_thresh=-1*wst)\n",
" ds = flag_wet_snow(ds)\n",
" # Compare snow depths - mask wet snow\n",
Expand All @@ -300,9 +310,9 @@
" print(f'Frac valid pixels = {mask_wet.sum()/ mask.sum():0.2f}')\n",
"\n",
" # After loop, save RMSE results per file\n",
" rmse_wet_flag.to_netcdf(f'rmse_out_full/{ds_name}_wet_flag.nc')\n",
" rmse_no_flag.to_netcdf(f'rmse_out_full/{ds_name}_no_flag.nc')\n",
" valid_pixels.to_netcdf(f'rmse_out_full/{ds_name}_valid_pixels.nc')\n",
" rmse_wet_flag.to_netcdf(f'rmse_out_full_with_si/{ds_name}_wet_flag.nc')\n",
" rmse_no_flag.to_netcdf(f'rmse_out_full_with_si/{ds_name}_no_flag.nc')\n",
" valid_pixels.to_netcdf(f'rmse_out_full_with_si/{ds_name}_valid_pixels.nc')\n",
" "
]
},
Expand All @@ -312,11 +322,14 @@
"metadata": {},
"outputs": [],
"source": [
"directory = 'rmse_out_full'\n",
"directory = 'rmse_out_full_with_si'\n",
"\n",
"which_site = 5\n",
"\n",
"results1 = sorted(glob('rmse_out_full/*wet*.nc'))\n",
"results2 = sorted(glob('rmse_out_full/*no*.nc'))\n",
"results3 = sorted(glob('rmse_out_full/*valid*.nc'))\n",
"results1 = sorted(glob(f'{directory}/*wet*.nc'))\n",
"results2 = sorted(glob(f'{directory}/*no*.nc'))\n",
"results3 = sorted(glob(f'{directory}/*valid*.nc'))\n",
"\n",
"wet_snow = xr.open_dataarray(results1[which_site])\n",
"all_snow = xr.open_dataarray(results2[which_site])\n",
Expand Down Expand Up @@ -345,6 +358,27 @@
" plt.tight_layout()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"iles = sorted(glob('spicy_s1_stacks/*.nc'))\n",
"\n",
"\n",
"for f in files:\n",
" ds_name = f.split('stacks/')[-1].split('.')[0]\n",
" print(datetime.now(), f' -- starting {ds_name}')\n",
"\n",
" # Open dataset \n",
" ds_ = xr.open_dataset(f).load()\n",
" dataset = ds_[['s1','deltaVV','ims','fcf','lidar-sd']]\n",
" td = abs(pd.to_datetime(dataset.time) - pd.to_datetime(dataset.attrs['lidar-flight-time']))\n",
" closest_ts_idx = np.where(td == td.min())[0][0]\n",
" closest_ts = dataset.time[closest_ts_idx]"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -356,7 +390,7 @@
"c = 0.55\n",
"wst = -3\n",
"\n",
"for wst in [-4,-3,-2,-1,0,1,2,3,4]:\n",
"for wst in [-4,-3,-2,-1,0]:\n",
" ds = calc_delta_cross_ratio(dataset, A=a, inplace=False)\n",
" ds = calc_delta_gamma(ds, B=b, inplace=False)\n",
" print(f'A={a:0.2f}; B={b:0.2f}; C={c:0.2f}; wst={wst:0.2f}')\n",
Expand Down
Loading

0 comments on commit 067fed3

Please sign in to comment.