Skip to content

Commit 50b9778

Browse files
committed
catch notebooks up with changes
1 parent 0560155 commit 50b9778

File tree

6 files changed

+102
-68
lines changed

6 files changed

+102
-68
lines changed

examples/01_multi-process_models.ipynb

+58-29
Original file line numberDiff line numberDiff line change
@@ -241,7 +241,7 @@
241241
" time_step=np.timedelta64(24, \"h\"),\n",
242242
" options={\n",
243243
" \"input_dir\": domain_dir,\n",
244-
" \"budget_type\": None,\n",
244+
" \"budget_type\": \"error\",\n",
245245
" \"netcdf_output_dir\": nb_output_dir / \"nhm_memory\",\n",
246246
" },\n",
247247
")\n",
@@ -349,8 +349,10 @@
349349
"source": [
350350
"run_dir = pl.Path(nb_output_dir / \"nhm_yaml\")\n",
351351
"run_dir.mkdir(exist_ok=True)\n",
352-
"control_yaml_file = run_dir / \"control.yml\"\n",
353-
"control.to_yaml(control_yaml_file)"
352+
"control_yaml_file = run_dir / \"control.yaml\"\n",
353+
"control_yaml = deepcopy(control)\n",
354+
"control_yaml.options[\"netcdf_output_dir\"] = nb_output_dir / \"nhm_yaml\"\n",
355+
"control_yaml.to_yaml(control_yaml_file)"
354356
]
355357
},
356358
{
@@ -366,6 +368,16 @@
366368
"We add the option `netcdf_output_dir` to the control since we assume we wont be able to do so at run time. Note that this option and the `input_dir` option are `pathlib.Path` objects. These are not what we want to write to file. We want their string version. We could do `str()` on each one by hand, but it will be more handy to write a small, recursive function to do this on a supplied dictionary since this will be a recurring task with the model dictionary we will create after the control YAML file."
367369
]
368370
},
371+
{
372+
"cell_type": "code",
373+
"execution_count": null,
374+
"id": "8044016c-7728-4149-9be5-f52b8f43975d",
375+
"metadata": {},
376+
"outputs": [],
377+
"source": [
378+
"control"
379+
]
380+
},
369381
{
370382
"cell_type": "code",
371383
"execution_count": null,
@@ -471,7 +483,7 @@
471483
"metadata": {},
472484
"outputs": [],
473485
"source": [
474-
"model_dict_yaml_file = run_dir / \"model_dict.yml\"\n",
486+
"model_dict_yaml_file = run_dir / \"model_dict.yaml\"\n",
475487
"with open(model_dict_yaml_file, \"w\") as file:\n",
476488
" _ = yaml.dump(model_dict, file)"
477489
]
@@ -491,7 +503,7 @@
491503
"metadata": {},
492504
"outputs": [],
493505
"source": [
494-
"! cat 01_multi-process_models/nhm_yaml/control.yml"
506+
"! cat 01_multi-process_models/nhm_yaml/control.yaml"
495507
]
496508
},
497509
{
@@ -501,7 +513,7 @@
501513
"metadata": {},
502514
"outputs": [],
503515
"source": [
504-
"! cat 01_multi-process_models/nhm_yaml/model_dict.yml"
516+
"! cat 01_multi-process_models/nhm_yaml/model_dict.yaml"
505517
]
506518
},
507519
{
@@ -519,8 +531,8 @@
519531
"metadata": {},
520532
"outputs": [],
521533
"source": [
522-
"model_yml = pws.Model.from_yaml(model_dict_yaml_file)\n",
523-
"model_yml"
534+
"model_yaml = pws.Model.from_yaml(model_dict_yaml_file)\n",
535+
"model_yaml"
524536
]
525537
},
526538
{
@@ -533,7 +545,7 @@
533545
"show_params = not (platform == \"darwin\" and processor() == \"arm\")\n",
534546
"try:\n",
535547
" pws.analysis.ModelGraph(\n",
536-
" model_yml,\n",
548+
" model_yaml,\n",
537549
" hide_variables=False,\n",
538550
" process_colors=palette,\n",
539551
" show_params=show_params,\n",
@@ -558,8 +570,8 @@
558570
"outputs": [],
559571
"source": [
560572
"%%time\n",
561-
"model_yml.run()\n",
562-
"model_yml.finalize()"
573+
"model_yaml.run()\n",
574+
"model_yaml.finalize()"
563575
]
564576
},
565577
{
@@ -584,11 +596,23 @@
584596
"outputs": [],
585597
"source": [
586598
"mem_out_dir = nb_output_dir / \"nhm_memory\"\n",
587-
"yml_out_dir = nb_output_dir / \"nhm_yaml\"\n",
599+
"yaml_out_dir = nb_output_dir / \"nhm_yaml\"\n",
588600
"mem_files = sorted(mem_out_dir.glob(\"*.nc\"))\n",
589-
"yml_files = sorted(yml_out_dir.glob(\"*.nc\"))\n",
601+
"yaml_files = sorted(yaml_out_dir.glob(\"*.nc\"))\n",
590602
"# We get all the same output files\n",
591-
"assert set([ff.name for ff in mem_files]) == set([ff.name for ff in yml_files])"
603+
"assert set([ff.name for ff in mem_files]) == set(\n",
604+
" [ff.name for ff in yaml_files]\n",
605+
")"
606+
]
607+
},
608+
{
609+
"cell_type": "code",
610+
"execution_count": null,
611+
"id": "310a8d35-5ddb-4ef1-b4a1-65a823d4aa7c",
612+
"metadata": {},
613+
"outputs": [],
614+
"source": [
615+
"set([ff.name for ff in yaml_files]) - set([ff.name for ff in mem_files])"
592616
]
593617
},
594618
{
@@ -604,16 +628,29 @@
604628
"Now compare the values of all variables:"
605629
]
606630
},
631+
{
632+
"cell_type": "code",
633+
"execution_count": null,
634+
"id": "523633e0-bb6e-4fa7-8a7a-bd2de3d602b0",
635+
"metadata": {},
636+
"outputs": [],
637+
"source": [
638+
"nb_output_dir.resolve()"
639+
]
640+
},
607641
{
608642
"cell_type": "code",
609643
"execution_count": null,
610644
"id": "db0d920e-0227-45ba-92c6-8ffacab9ca3c",
611645
"metadata": {},
612646
"outputs": [],
613647
"source": [
614-
"for mf, yf in zip(mem_files, yml_files):\n",
648+
"for mf, yf in zip(mem_files, yaml_files):\n",
615649
" var = mf.with_suffix(\"\").name\n",
616-
" # print(var)\n",
650+
"\n",
651+
" if \"budget\" in var.lower():\n",
652+
" continue\n",
653+
"\n",
617654
" mda = xr.open_dataset(mf)[var]\n",
618655
" yda = xr.open_dataset(yf)[var]\n",
619656
" xr.testing.assert_equal(mda, yda)\n",
@@ -681,7 +718,7 @@
681718
"metadata": {},
682719
"outputs": [],
683720
"source": [
684-
"run_dir = pl.Path(nb_output_dir / \"yml_less_output\").resolve()\n",
721+
"run_dir = pl.Path(nb_output_dir / \"yaml_less_output\").resolve()\n",
685722
"run_dir.mkdir(exist_ok=True)"
686723
]
687724
},
@@ -704,7 +741,7 @@
704741
"]\n",
705742
"print(control_cp) # .to_dict(), sort_dicts=False)\n",
706743
"\n",
707-
"control_yaml_file = run_dir / \"control.yml\"\n",
744+
"control_yaml_file = run_dir / \"control.yaml\"\n",
708745
"control_cp.to_yaml(control_yaml_file)"
709746
]
710747
},
@@ -725,7 +762,7 @@
725762
"source": [
726763
"model_dict_copy = deepcopy(model_dict)\n",
727764
"model_dict_copy[\"control\"] = str(control_yaml_file)\n",
728-
"model_dict_yaml_file = run_dir / \"model_dict.yml\""
765+
"model_dict_yaml_file = run_dir / \"model_dict.yaml\""
729766
]
730767
},
731768
{
@@ -928,7 +965,7 @@
928965
"control_cp = deepcopy(control)\n",
929966
"control_cp.options[\"input_dir\"] = yaml_output_dir.resolve()\n",
930967
"control_cp.options[\"netcdf_output_dir\"] = run_dir.resolve()\n",
931-
"control_yaml_file = run_dir / \"control.yml\"\n",
968+
"control_yaml_file = run_dir / \"control.yaml\"\n",
932969
"control_cp.to_yaml(control_yaml_file)\n",
933970
"pprint(control.to_dict(), sort_dicts=False)"
934971
]
@@ -949,7 +986,7 @@
949986
"outputs": [],
950987
"source": [
951988
"model_dict[\"control\"] = str(control_yaml_file)\n",
952-
"model_dict_yaml_file = run_dir / \"model_dict.yml\"\n",
989+
"model_dict_yaml_file = run_dir / \"model_dict.yaml\"\n",
953990
"keep_procs = [\"prms_soilzone\", \"prms_groundwater\", \"prms_channel\"]\n",
954991
"model_dict[\"model_order\"] = keep_procs\n",
955992
"for kk in list(model_dict.keys()):\n",
@@ -1205,14 +1242,6 @@
12051242
"* Regan, R. S., Markstrom, S. L., Hay, L. E., Viger, R. J., Norton, P. A., Driscoll, J. M., & LaFontaine, J. H. (2018). Description of the national hydrologic model for use with the precipitation-runoff modeling system (prms) (No. 6-B9). US Geological Survey.\n",
12061243
"* Regan, R.S., Markstrom, S.L., LaFontaine, J.H., 2022, PRMS version 5.2.1: Precipitation-Runoff Modeling System (PRMS): U.S. Geological Survey Software Release, 02/10/2022."
12071244
]
1208-
},
1209-
{
1210-
"cell_type": "code",
1211-
"execution_count": null,
1212-
"id": "ff4dbac1-046f-4354-94e6-2610269c2de4",
1213-
"metadata": {},
1214-
"outputs": [],
1215-
"source": []
12161245
}
12171246
],
12181247
"metadata": {

examples/02_prms_legacy_models.ipynb

+1-13
Original file line numberDiff line numberDiff line change
@@ -69,11 +69,7 @@
6969
{
7070
"cell_type": "markdown",
7171
"id": "39abd69f-13a3-4f23-9678-a57c0b1f848d",
72-
"metadata": {
73-
"jupyter": {
74-
"source_hidden": true
75-
}
76-
},
72+
"metadata": {},
7773
"source": [
7874
"The domain directory is where we have all the required inputs to run this model (among others) and `nb_output_dir` is where this notebook will write its output. "
7975
]
@@ -493,14 +489,6 @@
493489
"%%time\n",
494490
"submodel.run(finalize=True)"
495491
]
496-
},
497-
{
498-
"cell_type": "code",
499-
"execution_count": null,
500-
"id": "cfcc3aa8-cc8f-40b9-9a72-124567c2c8bf",
501-
"metadata": {},
502-
"outputs": [],
503-
"source": []
504492
}
505493
],
506494
"metadata": {

examples/03_compare_pws_prms.ipynb

+27-12
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@
9696
"calc_method: str = \"numba\"\n",
9797
"budget_type: str = None\n",
9898
"\n",
99-
"run_prms: bool = True ## always forced/overwrite\n",
99+
"run_prms: bool = False ## always forced/overwrite\n",
100100
"\n",
101101
"run_pws: bool = True # run if the output does not exist on disk\n",
102102
"force_pws_run: bool = True # if it exists on disk, re-run it and overwrite?"
@@ -194,7 +194,22 @@
194194
" input_dir = domain_dir / \"output\"\n",
195195
" run_dir = nb_output_dir / f\"{domain_name}_subset_nhm\"\n",
196196
"\n",
197-
" control = pws.Control.load(domain_dir / \"control.test\")\n",
197+
" control = pws.Control.load_prms(\n",
198+
" domain_dir / \"control.test\", warn_unused_options=False\n",
199+
" )\n",
200+
" output_var_names = control.options[\"netcdf_output_var_names\"] + [\n",
201+
" \"soltab_horad_potsw\",\n",
202+
" \"soltab_potsw\",\n",
203+
" \"soltab_sunhrs\",\n",
204+
" ]\n",
205+
" control.options = control.options | {\n",
206+
" \"input_dir\": input_dir,\n",
207+
" \"budget_type\": budget_type,\n",
208+
" \"calc_method\": calc_method,\n",
209+
" \"netcdf_output_dir\": run_dir,\n",
210+
" \"netcdf_output_var_names\": output_var_names,\n",
211+
" }\n",
212+
"\n",
198213
" params = pws.parameters.PrmsParameters.load(domain_dir / \"myparam.param\")\n",
199214
"\n",
200215
" if run_dir.exists():\n",
@@ -205,13 +220,6 @@
205220
"\n",
206221
" print(f\"PWS writing output to {run_dir}\")\n",
207222
"\n",
208-
" control.options = control.options | {\n",
209-
" \"input_dir\": input_dir,\n",
210-
" \"budget_type\": budget_type,\n",
211-
" \"calc_method\": calc_method,\n",
212-
" \"netcdf_output_dir\": run_dir,\n",
213-
" }\n",
214-
"\n",
215223
" nhm = pws.Model(\n",
216224
" nhm_processes,\n",
217225
" control=control,\n",
@@ -248,11 +256,16 @@
248256
" var_meta = pws.meta.find_variables(var_name)[var_name]\n",
249257
" ylabel = f\"{fill(var_meta['desc'], 40)}\\n({var_meta['units']})\"\n",
250258
"\n",
259+
" pws_file = run_dir / f\"{var_name}.nc\"\n",
251260
" prms_file = domain_dir / f\"output/{var_name}.nc\"\n",
252261
" if not prms_file.exists():\n",
253262
" return None\n",
263+
" if not pws_file.exists():\n",
264+
" print(f\"PWS file {pws_file} DNE, skipping.\")\n",
265+
" return None\n",
266+
"\n",
254267
" prms_var = xr.open_dataarray(prms_file)\n",
255-
" pws_var = xr.open_dataarray(run_dir / f\"{var_name}.nc\")\n",
268+
" pws_var = xr.open_dataarray(pws_file)\n",
256269
"\n",
257270
" if rmse_min is not None:\n",
258271
" if \"time\" in prms_var.dims:\n",
@@ -313,7 +326,9 @@
313326
" return None\n",
314327
" prms = xr.open_dataarray(prms_file, decode_timedelta=False)\n",
315328
" pws_file = run_dir / f\"{var_name}.nc\"\n",
316-
" assert pws_file.exists()\n",
329+
" if not pws_file.exists():\n",
330+
" print(f\"PWS file '{prms_file}' DNE, skipping.\")\n",
331+
" return None\n",
317332
" nhm_after = xr.open_dataarray(pws_file, decode_timedelta=False)\n",
318333
" if \"time\" in prms.dims:\n",
319334
" time_dim = \"time\"\n",
@@ -549,7 +564,7 @@
549564
"outputs": [],
550565
"source": [
551566
"if pws.PRMSGroundwater in nhm_processes:\n",
552-
" compare_var_timeseries(\"gwres_flow_vol\")"
567+
" compare_var_timeseries(\"gwres_flow\")"
553568
]
554569
},
555570
{

pywatershed/atmosphere/prms_atmosphere.py

+5-6
Original file line numberDiff line numberDiff line change
@@ -138,12 +138,11 @@ def __init__(
138138
self._calculated = False
139139

140140
self._netcdf_initialized = False
141-
self.netcdf_output_dir = netcdf_output_dir
142-
if self.netcdf_output_dir:
141+
if self._netcdf_output_dir:
143142
self.initialize_netcdf(
144-
output_dir=pl.Path(netcdf_output_dir),
145-
separate_variables=netcdf_separate_files,
146-
output_vars=netcdf_output_vars,
143+
output_dir=pl.Path(self._netcdf_output_dir),
144+
separate_variables=self._netcdf_separate_files,
145+
output_vars=self._netcdf_output_vars,
147146
)
148147

149148
return
@@ -835,7 +834,7 @@ def initialize_netcdf(
835834
output_vars: list = None,
836835
**kwargs,
837836
):
838-
if self._netcdf_initialized:
837+
if self._netcdf_initialized and self.control.options["verbosity"] > 5:
839838
msg = (
840839
f"{self.name} class previously initialized netcdf output "
841840
f"in {self._netcdf_output_dir}"

pywatershed/atmosphere/prms_solar_geometry.py

+5-6
Original file line numberDiff line numberDiff line change
@@ -97,12 +97,11 @@ def __init__(
9797
self._calculated = False
9898

9999
self._netcdf_initialized = False
100-
if self.netcdf_output_dir:
101-
self._calculate_all_time()
100+
if self._netcdf_output_dir:
102101
self.initialize_netcdf(
103-
output_dir=pl.Path(netcdf_output_dir),
104-
separate_variables=netcdf_separate_files,
105-
output_vars=netcdf_output_vars,
102+
output_dir=pl.Path(self._netcdf_output_dir),
103+
separate_variables=self._netcdf_separate_files,
104+
output_vars=self._netcdf_output_vars,
106105
)
107106

108107
return
@@ -481,7 +480,7 @@ def initialize_netcdf(
481480
output_vars: list = None,
482481
**kwargs,
483482
):
484-
if self._netcdf_initialized:
483+
if self._netcdf_initialized and self.control.options["verbosity"] > 5:
485484
msg = (
486485
f"{self.name} class previously initialized netcdf output "
487486
f"in {self._netcdf_output_dir}"

0 commit comments

Comments
 (0)