Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion .github/workflows/test_suite.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,4 +69,8 @@ jobs:
- name: 'Run tests'
run: |
python3 -m coverage erase
python3 -m pytest --cov=pygem -v --durations=20 pygem/tests
# run each test file explicitly in the desired order
python3 -m pytest --cov=pygem -v --durations=20 pygem/tests/test_01_basics.py
python3 -m pytest --cov=pygem -v --durations=20 pygem/tests/test_02_config.py
python3 -m pytest --cov=pygem -v --durations=20 pygem/tests/test_03_notebooks.py
python3 -m pytest --cov=pygem -v --durations=20 pygem/tests/test_04_postproc.py
749 changes: 145 additions & 604 deletions pygem/bin/postproc/postproc_compile_simulations.py

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions pygem/bin/postproc/postproc_monthly_mass.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ def main():
simpath = None
if args.simdir:
# get list of sims
simpath = glob.glob(args.simdir + '*.nc')
simpath = glob.glob(args.simdir + '/*.nc')
else:
if args.simpath:
simpath = args.simpath
Expand All @@ -238,8 +238,8 @@ def main():
ncores = 1

# Parallel processing
print('Processing with ' + str(args.ncores) + ' cores...')
with multiprocessing.Pool(args.ncores) as p:
print('Processing with ' + str(ncores) + ' cores...')
with multiprocessing.Pool(ncores) as p:
p.map(run, simpath)

print('Total processing time:', time.time() - time_start, 's')
Expand Down
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,14 @@

import pytest

from pygem.setup.config import ConfigManager

# instantiate ConfigManager
config_manager = ConfigManager()
# update export_extra_vars to True before running tests
config_manager.update_config({'sim.out.export_extra_vars': True})


# Get all notebooks in the PyGEM-notebooks repository
nb_dir = os.environ.get('PYGEM_NOTEBOOKS_DIRPATH') or os.path.join(
os.path.expanduser('~'), 'PyGEM-notebooks'
Expand Down
122 changes: 122 additions & 0 deletions pygem/tests/test_04_postproc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
import glob
import os
import subprocess

import numpy as np
import pytest
import xarray as xr

from pygem.setup.config import ConfigManager


@pytest.fixture(scope='module')
def rootdir():
config_manager = ConfigManager()
pygem_prms = config_manager.read_config()
return pygem_prms['root']


def test_postproc_monthly_mass(rootdir):
"""
Test the postproc_monthly_mass CLI script.
"""
simdir = os.path.join(
rootdir, 'Output', 'simulations', '01', 'CESM2', 'ssp245', 'stats'
)

# Run postproc_monthyl_mass CLI script
subprocess.run(['postproc_monthly_mass', '-simdir', simdir], check=True)


def test_postproc_compile_simulations(rootdir):
"""
Test the postproc_compile_simulations CLI script.
"""

# Run postproc_compile_simulations CLI script
subprocess.run(
[
'postproc_compile_simulations',
'-rgi_region01',
'01',
'-option_calibration',
'MCMC',
'-sim_climate_name',
'CESM2',
'-sim_climate_scenario',
'ssp245',
'-sim_startyear',
'2000',
'-sim_endyear',
'2100',
],
check=True,
)

# Check if output files were created
compdir = os.path.join(rootdir, 'Output', 'simulations', 'compile', 'glacier_stats')
output_files = glob.glob(os.path.join(compdir, '**', '*.nc'), recursive=True)
assert output_files, f'No output files found in {compdir}'


def test_check_compiled_product(rootdir):
"""
Verify the contents of the files created by postproc_compile_simulations.
"""
# skip variables that are not in the compiled products
vars_to_skip = [
'glac_temp_monthly',
'glac_mass_change_ignored_annual',
'offglac_prec_monthly',
'offglac_refreeze_monthly',
'offglac_melt_monthly',
'offglac_snowpack_monthly',
]

simpath = os.path.join(
rootdir,
'Output',
'simulations',
'01',
'CESM2',
'ssp245',
'stats',
'1.03622_CESM2_ssp245_MCMC_ba1_50sets_2000_2100_all.nc',
)
compdir = os.path.join(rootdir, 'Output', 'simulations', 'compile', 'glacier_stats')

with xr.open_dataset(simpath) as simds:
# loop through vars
vars_to_check = [
name for name, var in simds.variables.items() if len(var.dims) > 1
]
vars_to_check = [item for item in vars_to_check if item not in vars_to_skip]

for var in vars_to_check:
# skip mad
if 'mad' in var:
continue
simvar = simds[var]
comppath = os.path.join(compdir, var, '01')
comppath = glob.glob(f'{comppath}/R01_{var}*.nc')[0]
assert os.path.isfile(comppath), (
f'Compiled product not found for {var} at {comppath}'
)
with xr.open_dataset(comppath) as compds:
compvar = compds[var]

# verify coords (compiled product has one more dimension for the `model`)
assert compvar.ndim == simvar.ndim + 1

# pull data values
simvals = simvar.values
compvals = compvar.values[0, :, :] # first index is the glacier index

# check that compiled product has same shape as original data
assert simvals.shape == compvals.shape, (
f'Compiled product shape {compvals.shape} does not match original data shape {simvals.shape}'
)
# check that compiled product matches original data
assert np.all(np.array_equal(simvals, compvals)), (
f'Compiled product for {var} does not match original data'
)