diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..3abc0453 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,3 @@ +tests/data/wrfnat_hrconus_16.grib2 filter=lfs diff=lfs merge=lfs -text +tests/data/wrfprs_hrconus_16.grib2 filter=lfs diff=lfs merge=lfs -text +tests/data/wrf* filter=lfs diff=lfs merge=lfs -text diff --git a/.github/workflows/graphics_tests.yml b/.github/workflows/graphics_tests.yml index 3fae5462..b5f37e1c 100644 --- a/.github/workflows/graphics_tests.yml +++ b/.github/workflows/graphics_tests.yml @@ -12,21 +12,20 @@ jobs: runs-on: ubuntu-latest defaults: run: - shell: bash -l {0} + shell: bash -el {0} steps: - name: Checkout repo - uses: actions/checkout@v2 + uses: actions/checkout@v6 with: lfs: true - - name: Install Micromamba with pygraf environment - uses: mamba-org/setup-micromamba@v1 + - name: Install Miniforge with pygraf environment + uses: conda-incubator/setup-miniconda@v3 with: environment-file: environment.yml - cache-downloads: true - cache-environment: true - - name: Lint code - run: find . -type f -name "*.py" | xargs pylint - shell: bash -el {0} + miniforge-version: 25.11.0-0 + - name: Install dev pkgs + run: make devenv - name: Test code - run: python -m pytest --nat-file tests/data/wrfnat_hrconus_07.grib2 --prs-file tests/data/wrfprs_hrconus_07.grib2 --ignore=tests/test_hrrr_maps.py - shell: bash -el {0} + run: | + conda activate pygraf + make test diff --git a/.github/workflows/hrrr_maps_tests.yml b/.github/workflows/hrrr_maps_tests.yml deleted file mode 100644 index 40f6791d..00000000 --- a/.github/workflows/hrrr_maps_tests.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: hrrr_maps_tests -env: - data_loc: ${{ github.workspace }}/input_data - output_loc: ${{ github.workspace }}/output -on: - push: - branches: - - main - pull_request: - branches: - - main - workflow_dispatch: -jobs: - test_hrrr_maps: - runs-on: ubuntu-latest - defaults: - run: - shell: bash -l {0} - steps: - - name: Checkout repo - uses: actions/checkout@v3 - - name: Create data and output folders - run: | - mkdir -p $output_loc - mkdir -p $data_loc - - name: Fetch Grib Files - run: | - wget -N -P $data_loc -i - << EOF - https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.20230315/conus/hrrr.t00z.wrfprsf00.grib2 - https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.20230315/conus/hrrr.t00z.wrfprsf01.grib2 - https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.20230315/conus/hrrr.t00z.wrfprsf02.grib2 - https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.20230315/conus/hrrr.t00z.wrfprsf03.grib2 - https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.20230315/conus/hrrr.t00z.wrfprsf04.grib2 - https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.20230315/conus/hrrr.t00z.wrfprsf05.grib2 - https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.20230315/conus/hrrr.t00z.wrfprsf06.grib2 - https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.20230315/conus/hrrr.t00z.wrfprsf07.grib2 - https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.20230315/conus/hrrr.t00z.wrfprsf08.grib2 - https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.20230315/conus/hrrr.t00z.wrfprsf09.grib2 - https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.20230315/conus/hrrr.t00z.wrfprsf10.grib2 - https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.20230315/conus/hrrr.t00z.wrfprsf11.grib2 - https://noaa-hrrr-bdp-pds.s3.amazonaws.com/hrrr.20230315/conus/hrrr.t00z.wrfprsf12.grib2 - EOF - ls - - name: Install Micromamba with pygraf environment - uses: mamba-org/setup-micromamba@v1 - with: - environment-file: environment.yml - cache-downloads: true - cache-env: true - - name: Test code - run: | - export GITHUB_WORKSPACE=$(pwd) - python -m pytest tests/test_hrrr_maps.py - shell: bash -el {0} diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..52e99e04 --- /dev/null +++ b/Makefile @@ -0,0 +1,30 @@ +TARGETS = devenv env format lint test typecheck unittest +DEVPKGS = $(shell cat devpkgs) +ENVNAME = pygraf + +.PHONY: $(TARGETS) + + +devenv: env + mamba install -y -n $(ENVNAME) $(DEVPKGS) + +env: + mamba env create -y -f environment.yml + +format: + @./format + +lint: + ruff check . + +test: lint typecheck unittest + +typecheck: + mypy --install-types --non-interactive . + +unittest: + pytest --cov -k "not hrrr_maps" -n 4 . + +memtest: + pytest --memray -k "not hrrr" . + diff --git a/README.md b/README.md index 18c70ac4..02dc0ddc 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,5 @@ # ADB Graphics Creation for UPP Model Output -> Note: This repository is under heavy development. Use at your own risk! - This repository houses a Python-based implementation of the graphics package that is responsible for generating maps for the RAP/HRRR/FV3/RRFS data. It has replaced NCL as the real-time graphics creation package at NOAA GSL for maps and @@ -40,10 +38,30 @@ activate this environment, do the following: ``` module use -a /contrib/miniconda3/modulefiles -module load miniconda3 +module load miniconda3/25.11.0 conda activate pygraf ``` +This environment contains the necessary development packages. + +## Installing with conda + +Pygraf comes with an `environment.yml` file for use with any conda installation. Ensure the conda base +environment is activated, and run the following command to create a `pygraf` environment suitable +for creating graphics: + +``` +cd pygraf +make env +``` + +For developers who want to run the test suite before contributing new changes to the repository, +additional development packages are required. To install those, run: + +``` +cd pygraf +make devenv +``` ## Stage data @@ -143,7 +161,7 @@ python create_graphics.py \ --tiles full,ATL,CA-NV,CentralCA ``` -NOTE: The graphics already run as a workflow step in the RRFS Retros! They may be +> NOTE: The graphics already run as a workflow step in the RRFS Retros! They may be zipped by default, so you can unzip those files to see your images on disk. ### Creating Skew-T Diagrams @@ -274,16 +292,58 @@ guidelines: - All code must pass tests, and tests must be updated to accommodate new code. - Style beyond linting: - Alphabetize lists (anywhere another order is not more obvious to everyone) - - A single white space line before and after comments. - - A single white space after each method/function. Two after classes. - - Lists are maintained with each item on a single line followed by a comma, - even the last item. This repository is using a minor variation on GitLab flow, requiring new work be contributed via Pull Request from a branch with reviewers (required). Releases will be handled with tags (as opposed to branches, in the original GitLab flow), and will be marked as versions with v[major].[minor].[update]. + +# Running Tests + +GitHub Actions is configured to run several code quality checks, including linting, formatting, and +sorting with `ruff`, type checking with `mypy`, and unit tests with `pytest`. To perform the same +checks locally, developers can run: + +``` +(pygraf) $ make format && make test +``` + +# Working with ecCodes for grib2 + +Two command line utilities are available in the conda environment that will help navigate the +ecCodes interpretation of a grib2 file. `grib_ls` gives a single-line record listing of the entire file by +default, while `grib_dump` provides all the metadata for each record. + +Documentation is available from ECMWF: +* https://confluence.ecmwf.int/display/ECC/grib_ls +* https://confluence.ecmwf.int/display/ECC/grib_dump + + +There are many examples in the documentation, but here are a couple that could help with `pygraf` +specifically. + +Show the common `pygraf` parameters for the 6th record: +``` +$ grib_ls -p shortName,parameterNumber,typeOfLevel,stepType,level -w count=6 hrrr.t05z.wrfnatf08.grib2 +hrrr.t05z.wrfnatf08.grib2 +shortName parameterNumber typeOfLevel stepType level +grle 32 hybrid instant 1 +``` + +The `count` parameter is nice to use in conjunction with `wgrib2` output where that tool may show +the information needed to identify a variable where the `shortName` from ecCodes may be "unknown". + +To see all available information from that record: + +``` +grib_dump -w count=6 hrrr.t05z.wrfnatf08.grib2 +``` + +> NOTE: When using the `-w` flag with items other than `count`, multiple records may be included in +the output. + + # Contact | Name | Email | diff --git a/adb_graphics/conversions.py b/adb_graphics/conversions.py index 1917a1f5..e7f0a216 100644 --- a/adb_graphics/conversions.py +++ b/adb_graphics/conversions.py @@ -1,109 +1,109 @@ -# pylint: disable=unused-argument,invalid-name -''' +""" This module contains functions for converting the units of a field. The interface requires a single atmospheric field in a Numpy array, and returns the converted values as output. -''' +""" -import numpy as np +from numpy import ndarray +from xarray import DataArray +from xarray.ufuncs import sqrt, square -def k_to_c(field, **kwargs): - ''' Conversion from Kelvin to Celsius ''' +def k_to_c(field: ndarray, **_kwargs): + """Conversion from Kelvin to Celsius.""" return field - 273.15 -def k_to_f(field, **kwargs): - ''' Conversion from Kelvin to Farenheit ''' +def k_to_f(field: ndarray, **_kwargs): + """Conversion from Kelvin to Fahrenheit.""" + return (field - 273.15) * 9 / 5 + 32 - return (field - 273.15) * 9/5 + 32 -def kgm2_to_in(field, **kwargs): - - ''' Conversion from kg per m^2 to inches ''' +def kgm2_to_in(field: ndarray, **_kwargs): + """Conversion from kg per m^2 to inches.""" return field * 0.03937 -def magnitude(a, b, **kwargs): - ''' Return the magnitude of vector components ''' +def magnitude(a: DataArray, b: DataArray, **_kwargs) -> DataArray: + """Return the magnitude of vector components.""" - return np.sqrt(np.square(a) + np.square(b)) + return DataArray(sqrt(square(a) + square(b))) -def m_to_dm(field, **kwargs): - ''' Conversion from meters to decameters ''' +def m_to_dm(field: ndarray, **_kwargs): + """Conversion from meters to decameters.""" - return field / 10. + return field / 10.0 -def m_to_in(field, **kwargs): - ''' Conversion from meters to inches ''' +def m_to_in(field: ndarray, **_kwargs): + """Conversion from meters to inches.""" return field * 39.3701 -def m_to_kft(field, **kwargs): - ''' Conversion from meters to kilofeet ''' +def m_to_kft(field: ndarray, **_kwargs): + """Conversion from meters to kilofeet.""" return field / 304.8 -def m_to_mi(field, **kwargs): - ''' Conversion from meters to miles ''' +def m_to_mi(field: ndarray, **_kwargs): + """Conversion from meters to miles.""" return field / 1609.344 -def ms_to_kt(field, **kwargs): - ''' Conversion from m s-1 to knots ''' +def ms_to_kt(field: ndarray, **_kwargs): + """Conversion from m s-1 to knots.""" return field * 1.9438 -def pa_to_hpa(field, **kwargs): - ''' Conversion from Pascals to hectopascals ''' +def pa_to_hpa(field: ndarray, **_kwargs): + """Conversion from Pascals to hectopascals.""" - return field / 100. + return field / 100.0 -def percent(field, **kwargs): - ''' Conversion from values between 0 - 1 to percent ''' +def percent(field: ndarray, **_kwargs): + """Conversion from values between 0 - 1 to percent.""" - return field * 100. + return field * 100.0 -def to_micro(field, **kwargs): - ''' Convert field to micro ''' +def sden_to_slr(field: ndarray, **_kwargs): + """Convert snow density (kg m-3) to snow-liquid ratio.""" - return field * 1E6 + return 1000.0 / field -def to_micrograms_per_m3(field, **kwargs): - ''' Convert field to micrograms per cubic meter ''' +def to_micro(field: ndarray, **_kwargs): + """Convert field to micro.""" - return field * 1E9 + return field * 1e6 -def vvel_scale(field, **kwargs): - ''' Scale vertical velocity for plotting ''' +def to_micrograms_per_m3(field: ndarray, **_kwargs): + """Convert field to micrograms per cubic meter.""" - return field * -10 + return field * 1e9 -def vort_scale(field, **kwargs): - ''' Scale vorticity for plotting ''' +def vvel_scale(field: ndarray, **_kwargs): + """Scale vertical velocity for plotting.""" - return field / 1E-05 + return field * -10 -def weasd_to_1hsnw(field, **kwargs): - ''' Conversion from snow wter equiv to snow (10:1 ratio) ''' +def vort_scale(field: ndarray, **_kwargs): + """Scale vorticity for plotting.""" - return field * 10. + return field / 1e-05 -def sden_to_slr(field, **kwargs): - ''' Convert snow density (kg m-3) to snow-liquid ratio ''' +def weasd_to_1hsnw(field: ndarray, **_kwargs): + """Conversion from snow water equiv to snow (10:1 ratio).""" - return 1000. / field + return field * 10.0 diff --git a/adb_graphics/datahandler/gribdata.py b/adb_graphics/datahandler/gribdata.py index af24425c..63949b76 100644 --- a/adb_graphics/datahandler/gribdata.py +++ b/adb_graphics/datahandler/gribdata.py @@ -1,254 +1,158 @@ -# pylint: disable=invalid-name, too-many-public-methods, too-many-lines - -''' +""" Classes that handle the specifics of grib files from UPP. -''' +""" import abc -import datetime -from functools import lru_cache -from string import digits, ascii_letters +from copy import deepcopy +from datetime import datetime, timedelta +from functools import cached_property -from matplotlib import cm import numpy as np -import xarray as xr +from matplotlib.pyplot import get_cmap +from pandas import to_datetime +from uwtools.api.config import YAMLConfig +from xarray import DataArray, Dataset, ufuncs, where -from .. import conversions -from .. import errors -from .. import specs -from .. import utils +from adb_graphics import conversions, errors, specs, utils -class UPPData(specs.VarSpec): - ''' +class UPPData(specs.VarSpec): + """ Class provides interface for accessing field data from UPP in Grib2 format. Input: ds: xarray dataset from grib file + model: name of the model from the image list short_name: name of variable corresponding to entry in specs configuration - - Keyword Arguments: - config: path to a user-specified configuration file - model: string describing the model type - ''' - - def __init__(self, ds, short_name, **kwargs): - - - # Parse kwargs first - config = kwargs.get('config', 'adb_graphics/default_specs.yml') - self.model = kwargs.get('model') - self.filetype = kwargs.get('filetype', 'prs') - - - specs.VarSpec.__init__(self, config) - - self.spec = self.yml + spec: full specs dictionary + """ + + def __init__( + self, + fhr: int, + ds: dict[str, Dataset], + model: str, + short_name: str, + spec: dict | YAMLConfig, + level: str | None = None, + ): + self.model = model + self.spec = spec self.short_name = short_name - self.level = 'ua' - - self.fhr = str(kwargs['fhr']) + self.level = level or "ua" + self.fhr = fhr + cf = deepcopy(self.vspec) + utils.set_level(level=str(level), model=self.model, spec=cf) + cf = utils.cfgrib_spec(cf["cfgrib"], self.model) + key = "typeOfLevel" + try: + self.vertical_coord = cf[key] + except KeyError: # pragma: no cover + msg = f"{key} is not a key for {short_name} at {level}. cf: {cf}" + raise KeyError(msg) from None self.ds = ds @property - def anl_dt(self) -> datetime.datetime: - - ''' Returns the initial time of the grib file as a datetime object from - the grib file.''' - - return datetime.datetime.strptime(self.field.initial_time, '%m/%d/%Y (%H:%M)') + def anl_dt(self) -> datetime: + """ + Returns the initial time of the GRIB file as a datetime object from + the GRIB file. + """ + ret: datetime = to_datetime(self.field.time.values) + return ret @property def clevs(self) -> np.ndarray: - - ''' + """ Uses the information contained in the yaml config file to determine the set of levels to be contoured. Returns the list of levels. - The yaml file "clevs" key may contain a list, a range, or a call to a - function. The logic to parse those options is included here. - ''' - - clev = np.asarray(self.vspec.get('clevs', [])) + The yaml file "clevs" key may contain a list or a range. + """ - # Is clevs a list? - if isinstance(clev, (list, np.ndarray)): - return np.asarray(clev) - - # Is clev a call to another function? - try: - return utils.get_func(clev)() - except ImportError: - print(f'Check yaml file definition of CLEVS for {self.short_name}. ', - 'Must be a list, range, or function call!') + return np.asarray(self.vspec.get("clevs", [])) @staticmethod def date_to_str(date: datetime) -> str: - - ''' Returns a formatted string (for graphic title) from a datetime - object''' - - return date.strftime('%Y%m%d %H UTC') - - @property - def field(self): - - ''' Wrapper that calls get_field method for the current variable. - Returns the NioVariable object ''' - - return self._get_field(self.ncl_name(self.vspec)) - - def field_column_max(self, values, variable, level, **kwargs): - - # pylint: disable=unused-argument - - ''' Returns the column max of the values. ''' - - vals = self.values(name=variable, level=level, one_lev=False) - maxvals = vals.max(axis=0) - - return maxvals - - def field_sum(self, values, variable2, level2, **kwargs): - - # pylint: disable=unused-argument - - ''' Return the sum of the values. ''' - - value2 = self.values(name=variable2, level=level2) - sum2 = values + value2 - value2.close() - - return sum2 - - def field_diff(self, values, variable2, level2, **kwargs): - - # pylint: disable=unused-argument - - ''' Subtracts the values from variable2 from self.field. ''' - - value2 = self.values(name=variable2, level=level2) - diff = values - value2 - value2.close() - - return diff - - def field_mean(self, values, variable, levels, global_levels, **kwargs): - - # pylint: disable=unused-argument - - ''' Returns the mean of the values. ''' - - fsum = np.zeros_like(values) - - chosen_levels = global_levels if 'global' in self.model else levels - for level in global_levels: - val_lev = self.values(name=variable, level=level) - fsum = fsum + val_lev - val_lev.close() - - return fsum / len(chosen_levels) - - def _get_data_levels(self, vertical_dim): - - ''' Return a list of vertical dimension values corresponding to the - requested vertical dimension to get the values of those dimensions ''' - - fcst_hr = 0 if self.ds.sizes.get('fcst_hr', 0) <= 1 else int(self.fhr) - - ret = [] - for dim in [var for var in self.ds.variables \ - if vertical_dim in var]: - - # Get the current forecast hour slice, if it's in the dataset - selector = {'fcst_hr': fcst_hr} if 'fcst_hr' in self.ds[dim].dims else {} - ret.append(self.ds[dim].sel(**selector).values) - return ret - - def _get_field(self, ncl_name): - - ''' Given an ncl_name, return the NioVariable object. ''' - - try: - field = self.ds[ncl_name.format(level_type=self.level_type)] - except KeyError: - raise errors.GribReadError(f'{ncl_name}') - return field - - def _get_level(self, field, level, spec, **kwargs): - - ''' Returns the value of the level to for a 3D array - - Arguments: - - field dataset object for a given variable - level string describing the level atmospheric level; corresponds - to a key in default specs - spec the specifications dictionary to use for the variable in + """ + Returns a formatted string (for graphic title) from a datetime object. + """ + + return date.strftime("%Y%m%d %H UTC") + + @cached_property + def field(self) -> DataArray: + """ + Get the first DataArray out of the Dataset. + """ + return self._get_field(self.vspec["cfgrib"].get(self.model, self.vspec["cfgrib"])) + + def _get_data_levels(self, vertical_coord: str): + """ + Values of the vertical dimension. + + Arg: + vertical_coord the name of the vertical dimension + """ + dim = [str(coord) for coord in self.field.coords if vertical_coord in str(coord)][0] + return self.field.coords[dim].to_numpy() + + def _get_field(self, cfgribspec: dict) -> DataArray: + """ + Given a cfgrib block, return the DataArray. + + Arg: + cfgribspec the specifications dictionary to use for the variable in question - - - Keyword Arguments: - split bool sometimes passed in through transforms that indicates - a level string should be split, e.g. 06km. - - - Return: - - Integer value corresponding to the array index for the atmospheric - level. - ''' - - # The index of the requested level - lev = spec.get('vertical_index') - if lev is not None: - return lev - - vertical_dim = self.vertical_dim(field) - - # numeric_level returns a list of length 1 (e.g. [500] for 500 mb) or of - # length 2 when split=True and it's like 0-6 km, so returns [0, 6000] - requested_level, _ = self.numeric_level(level=level, - split=kwargs.get('split', spec.get('split')), - ) - - # data_levels contains a list of vertical dimension values - data_levels = self._get_data_levels(vertical_dim) - - # For split-level variables, like 0-6km, find the matching index by - # looping through both the possible vertical level arrays. - if len(data_levels) == 2 and len(requested_level) == 2: - for lev, levset in enumerate(zip(*[list(lev) for lev in data_levels])): - if sorted(levset) == requested_level: - return lev - - # For single-level variables, like 500mb, use the argwhere function to - # return the matching index - if len(requested_level) == 1: - for dim_levels in data_levels: - lev = np.argwhere(dim_levels == requested_level[0]) - try: - if lev or lev == [0]: - lev = int(lev[0]) - return lev - except ValueError: - print(f'BAD LEVEL is {lev} for {field.name}') - - print(f"Could not find a level for {field.name} at requested \ - level = {requested_level} for variable levels = {data_levels}. Index \ - was {lev}.") - - # If neither of those cases worked out appropriately, raise an error. - msg = f'Length of requested_level ({len(requested_level)}) or '\ - f'data_levels ({len(data_levels)}) bad!' \ - f' {level} {field.name}' - raise ValueError(msg) - - def get_transform(self, transforms, val): - - ''' Applies a set of one or more transforms to an np.array of + """ + + def _find_var(): + if ds.get(short_name) is not None: + return short_name + + for var in ds: + if ds[var].attrs["GRIB_shortName"] == short_name: + return var + return None # pragma: no cover + + short_name = cfgribspec.get("shortName", "unknown") + vertical_coord = cfgribspec["typeOfLevel"] + step_type = cfgribspec.get("stepType", "instant") + var_id = f"{short_name}_{vertical_coord}_{step_type}" + vertical_coord = "level" if vertical_coord == "unknown" else vertical_coord + ds: Dataset | dict = self.ds.get(var_id, {}) + if ds == {}: + msg = f"{var_id} is not a valid key for the dataset" + raise ValueError(msg) + var = _find_var() + if var is not None: + field = ds[var] + level = cfgribspec.get("level") + layered = False + if level is None: + top = cfgribspec.get("topLevel", cfgribspec.get("scaledValueOfFirstFixedSurface")) + bottom = cfgribspec.get( + "bottomLevel", cfgribspec.get("scaledValueOfSecondFixedSurface") + ) + layered = top is not None or bottom is not None + level = top if top in field.coords[vertical_coord] else bottom + if level is None: + level = utils.numeric_level(self.level)[0] + level = None if level == "" else level + leveled = level is not None and vertical_coord != "hybrid" + if len(field.coords[vertical_coord].shape) > 0 and (layered or leveled): + if vertical_coord == "depthBelowLandLayer" and level: + level = float(level) / 100.0 # pragma: no cover + field = field.sel(**{vertical_coord: level}) + return DataArray(field) + msg = f"Variable {short_name} not found in dataset." # pragma: no cover + raise ValueError(msg) # pragma: no cover + + def get_transform(self, transforms: dict | list | str, val: DataArray) -> DataArray: + """ + Applies a set of one or more transforms to an np.array of data values. Input: @@ -258,217 +162,122 @@ def get_transform(self, transforms, val): transformed Return: - val: updated values after transforms have been applied - ''' - transform_kwargs = {} + """ + + transform_kwargs: dict = {} if isinstance(transforms, dict): - transform_list = transforms.get('funcs') + transform_list = transforms.get("funcs") if not isinstance(transform_list, list): transform_list = [transform_list] - transform_kwargs = transforms.get('kwargs') + transform_kwargs = transforms.get("kwargs", {}) elif isinstance(transforms, str): transform_list = [transforms] else: transform_list = transforms for transform in transform_list: - if len(transform.split('.')) == 1: + if len(transform.split(".")) == 1: val = self.__getattribute__(transform)(val, **transform_kwargs) else: val = utils.get_func(transform)(val, **transform_kwargs) return val - @lru_cache() - def get_xypoint(self, site_lat, site_lon) -> tuple: - - ''' + def get_xypoint(self, site_lat: float, site_lon: float) -> tuple: + """ Return the X, Y grid point corresponding to the site location. No interpolation is used. - ''' + """ lats, lons = self.latlons() adjust = 360 if np.any(lons < 0) else 0 lons = lons + adjust - max_x, max_y = np.shape(lats) - # Numpy magic to grab the X, Y grid point nearest the profile site - # pylint: disable=unbalanced-tuple-unpacking - x, y = np.unravel_index((np.abs(lats - site_lat) \ - + np.abs(lons - site_lon)).argmin(), lats.shape) - # pylint: enable=unbalanced-tuple-unpacking + msg = f"site location is outside your domain! {site_lat} {site_lon}" + if not lats.min() < site_lat < lats.max() or not lons.min() < site_lon < lons.max(): + print(msg) + return (-1, -1) - if x <= 0 or y <= 0 or x >= max_x or y >= max_y: - print(f'site location is outside your domain! {site_lat} {site_lon}') - return(-1, -1) + # Numpy magic to grab the X, Y grid point nearest the profile site + x, y = np.unravel_index( + (np.abs(lats - site_lat) + np.abs(lons - site_lon)).argmin(), lats.shape + ) return (x, y) - @property - def grid_suffix(self): - - ''' Return the suffix of the first variable with 4 sections (split on _) - in the file. This should correspond to the grid tag. ''' - - for var in self.ds.keys(): - vsplit = var.split('_') - if len(vsplit) == 4: - return vsplit[-1] - return 'GRID NOT FOUND' - - - def latlons(self): - - ''' Returns the set of latitudes and longitudes ''' - - coords = sorted([c for c in list(self.ds.coords) if - any(ele in c for ele in ['lat', 'lon'])]) - return [self.ds.coords[c].values for c in coords] - - @property - def lev_descriptor(self): - - ''' Returns the descriptor for the variable's level type. ''' - - return self.field.level_type - - @property - def level_type(self): - - ''' Returns a Grib2 code for type of level. 10 is used for - entire atmosphere in HRRR, while 200 is used in RRFS. ''' - - if self.filetype == 'prs': - if self.model == 'rrfs' or self.model == 'regional_mpas': - return 200 - return 10 - return 105 - - def ncl_name(self, spec: dict): - - ''' Get the ncl_name from the specified spec dict. ''' - - name = spec.get('ncl_name') - - if isinstance(name, dict): - if self.model in name.keys(): - name = name.get(self.model) - else: - name = name.get(self.filetype) - - if name is None: - print(f"Cannot find ncl_name for: ") - for key, value in spec.items(): - print(f'{key}: {value}') - raise KeyError - - # The level_type for the entire atmosphere could be L10 or L200. Thanks - # Grib2! Handle that in "try" statement when reading file. - - name = name if isinstance(name, list) else [name] - - try_name = '' - for try_name in name: - try_name = try_name.format(fhr=self.fhr, - grid=self.grid_suffix, - level_type=self.level_type) - - try: - self._get_field(try_name) - except errors.GribReadError: - continue - else: - return try_name - - msg = f'Could not find any of {try_name} in input file' - raise errors.GribReadError(msg) - - def numeric_level(self, index_match=True, level=None, split=None): - - ''' - Split the numeric level and unit associated with the level key. - - A blank string is returned for lev_val for levels that do not contain a - numeric, e.g., 'sfc' or 'ua'. - ''' - - level = level if level else self.level - - # Gather all the numbers in the string - lev_val = ''.join([c for c in level if (c in digits or c == '.')]) - - # Convert the numbers to a list, and make integers or floats - if lev_val: - if split is not None: - lev_val = [int(lev) for lev in lev_val] - else: - lev_val = [float(lev_val) if '.' in lev_val else int(lev_val)] - - # Gather all the letters - lev_unit = ''.join([c for c in level if c in ascii_letters]) - - if index_match: - if lev_unit == 'cm': - lev_val = [val / 100. for val in lev_val] - if lev_unit in ['mb', 'mxmb']: - lev_val = [val * 100. for val in lev_val] - if lev_unit in ['in', 'km', 'mn', 'mx', 'sr']: - lev_val = [val * 1000. for val in lev_val] - - return lev_val, lev_unit + def latlons(self) -> list[np.ndarray]: + """Returns the set of latitudes and longitudes.""" + + coords = sorted( + [ + str(c) + for c in list(self.field.coords) + if any(ele in str(c) for ele in ["lat", "lon"]) + ] + ) + lat = self.field.coords[coords[0]].to_numpy() + if len(lat.shape) == 1 and lat[-1] < lat[0]: + lat = lat[::-1] + lon = self.field.coords[coords[-1]].to_numpy() + return [lat, lon] @staticmethod - def opposite(values, **kwargs): - # pylint: disable=unused-argument + def opposite(values: DataArray, **_kwargs) -> DataArray: + """Returns the opposite of input values.""" - ''' Returns the opposite of input values ''' - - return - values + return -values @property - def valid_dt(self) -> datetime.datetime: - - ''' Returns a datetime object corresponding to the forecast hour's valid - time as set in the Grib file. ''' + def valid_dt(self) -> datetime: + """ + Returns a datetime object corresponding to the forecast hour's valid + time as set in the Grib file. + """ - fh = datetime.timedelta(hours=int(self.fhr)) + fh = timedelta(hours=int(self.fhr)) return self.anl_dt + fh @abc.abstractmethod - def values(self, level=None, name=None, **kwargs): - - ''' Returns the values of a given variable. ''' - ... - - @staticmethod - def vertical_dim(field): - - ''' Determine the vertical dimension of the variable by looking through - the field's dimensions for one that includes "lv". Return the first - matching instance. ''' - - vert_dim = [dim for dim in field.dims if ('lv' in dim or 'probability' in dim)] - if vert_dim: - return vert_dim[0] - return '' + def get_values( + self, level: str | None = None, name: str | None = None, do_transform: bool = True + ) -> DataArray: + """Returns the values of a given variable.""" + + def vector_magnitude( + self, + field1: DataArray, + field2_id: str, + **_kwargs, + ): + """ + Returns the vector magnitude of two component vector fields. + + The second field can be specified by either a dict of cfkeys or a default_specs identifier + in the form _. + + """ + var, lev = field2_id.split("_") if "_" in field2_id else (field2_id, self.level) + field2 = self.get_values(level=lev, name=var, do_transform=False) + mag = conversions.magnitude(field1, field2) + field1.close() + field2.close() + return mag @property def vspec(self): - - ''' Return the graphics specification for a given level. ''' + """Return the graphics specification for a given level.""" vspec = self.spec.get(self.short_name, {}).get(self.level) if not vspec: - raise errors.NoGraphicsDefinitionForVariable(self.short_name, self.level) + raise errors.NoGraphicsDefinitionForVariableError(self.short_name, self.level) return vspec -class fieldData(UPPData): - - ''' +class FieldData(UPPData): + """ Class provides interface for accessing field (2D plan view) data from UPP in Grib2 format. @@ -481,128 +290,185 @@ class fieldData(UPPData): config: path to a user-specified configuration file member: integer describing the ensemble member number to grab data for - ''' - - def __init__(self, ds, level, short_name, **kwargs): - - super().__init__(ds, short_name, **kwargs) + """ + + def __init__( + self, + fhr: int, + ds: dict[str, Dataset], + level: str, + model: str, + short_name: str, + spec: dict | YAMLConfig, + member: str | None = None, + contour_kwargs: dict | None = None, + ): + super().__init__( + fhr=fhr, + ds=ds, + level=level, + model=model, + short_name=short_name, + spec=spec, + ) self.level = level - self.contour_kwargs = kwargs.get('contour_kwargs', {}) - self.mem = kwargs.get('member', None) + self.contour_kwargs = {} if contour_kwargs is None else contour_kwargs + self.mem = member - def aviation_flight_rules(self, values, **kwargs): - # pylint: disable=unused-argument - - ''' - Generates a field of Aviation Flight Rules from Ceil and Vis - ''' + def aviation_flight_rules(self, values: DataArray, **_kwargs): + """ + Generates a field of Aviation Flight Rules from Ceil and Vis. + """ ceil = values - vis = self.values(name='vis', level='sfc') + vis = self.get_values(name="vis", level="sfc") - flru = np.where((ceil > 1.) & (ceil < 3.), 1.01, 0.0) - flru = np.where((vis > 3.) & (vis < 5.), 1.01, flru) - flru = np.where((ceil > 0.5) & (ceil < 1.), 2.01, flru) - flru = np.where((vis > 1.) & (vis < 3.), 2.01, flru) - flru = np.where((ceil > 0.0) & (ceil < 0.5), 3.01, flru) - flru = np.where((vis < 1.), 3.01, flru) + flru = where((ceil > 1.0) & (ceil < 3.0), 1.01, 0.0) + flru = where((vis > 3.0) & (vis < 5.0), 1.01, flru) + flru = where((ceil > 0.5) & (ceil < 1.0), 2.01, flru) + flru = where((vis > 1.0) & (vis < 3.0), 2.01, flru) + flru = where((ceil > 0.0) & (ceil < 0.5), 3.01, flru) + flru = where((vis < 1.0), 3.01, flru) vis.close() - return xr.DataArray(flru) + return DataArray(flru) @property def cmap(self): + """ + The LinearSegmentedColormap specified by the config key 'cmap'. + """ - ''' Returns the LinearSegmentedColormap specified by the config key - "cmap" ''' - - return cm.get_cmap(self.vspec['cmap']) + return get_cmap(self.vspec["cmap"]) @property def colors(self) -> np.ndarray: - - ''' - Returns a list of colors, specified by the config key "colors". - - The yaml file "colors" key may contain a list or a function to be - called. - ''' - - color_spec = self.vspec.get('colors') - - if isinstance(color_spec, (list, np.ndarray)): - return np.asarray(color_spec) + """ + Returns an array of colors, specified by the config key "colors". + """ + + color_spec = self.vspec.get("colors", "") + if not color_spec: + msg = f"No colors definition found for {self.short_name} at {self.level}" + raise errors.NoGraphicsDefinitionForVariableError(msg) try: ret = self.__getattribute__(color_spec) - if callable(ret): - return ret() - return ret - except AttributeError: - return color_spec + except AttributeError as e: + msg = f"There is no color definition named {color_spec}" + raise AttributeError(msg) from e + if callable(ret): + return np.asarray(ret()) # pragma: no cover + return np.asarray(ret) @property def corners(self) -> list: + """ - ''' - Returns lat and lon of lower left (ll) and upper right(ur) corners: + Returns lat and lon of lower left (ll) and upper right (ur) corners. + + Order: ll_lat, ur_lat, ll_lon, ur_lon - ''' + """ lat, lon = self.latlons() - if self.model in ['global', 'hfip', 'obs']: - ret = [lat[-1], lat[0], lon[0], lon[-1]] - elif self.model == 'global_mpas': - ret = [lat[0], lat[-1], lon[0], lon[-1]] - else: - ret = [lat[0, 0], lat[-1, -1], lon[0, 0], lon[-1, -1]] + if len(lat.shape) == 2: + return [ + np.round(x, decimals=6) for x in [lat[0, 0], lat[-1, -1], lon[0, 0], lon[-1, -1]] + ] + return [np.round(x, decimals=6) for x in [lat[0], lat[-1], lon[0], lon[-1]]] - return ret + @property + def data(self) -> DataArray: + """ + Sets the data property on the object for use when we need to update + the values associated with a given object -- helpful for differences. + """ + if not hasattr(self, "_data"): + self._data = self.get_values() + return self._data - def fire_weather_index(self, values, **kwargs): + @data.setter + def data(self, value: DataArray): + self._data = value + + def field_column_max(self, values: DataArray, **_kwargs): + """Returns the column max of the values.""" + + return values.max(dim=self.vertical_coord) + + def field_diff(self, values: DataArray, variable2: str, level2: str, **kwargs): + """Subtracts the values from variable2 from self.field.""" + + value2 = self.get_values( + name=variable2, level=level2, do_transform=kwargs.get("do_transform", True) + ) + diff = values - value2 + value2.close() + + return diff + + def field_mean( + self, + values: DataArray, + levels: list, + **kwargs, + ): + """Returns the mean of the values over the vertical dimension.""" + + levels = kwargs["global_levels"] if "global" in self.model else levels + levs = [int(x[:-2]) for x in levels] + return values.sel(isobaricInhPa=levs).mean("isobaricInhPa") + + def field_sum(self, values: DataArray, variable2: str, level2: str, **kwargs): + """Returns the sum of the values.""" - # pylint: disable=unused-argument + value2 = self.get_values( + name=variable2, level=level2, do_transform=kwargs.get("do_transform", True) + ) + sum2 = values + value2 + value2.close() + + return sum2 - ''' - Generates a field of Fire Weather Index + def fire_weather_index(self, values: DataArray, **_kwargs): + """ + Generates a field of Fire Weather Index. This method uses wrfprs data to find regions where weather conditions are most likely to lead to wildfires. - ''' + """ # Gather fields from the input - veg = values # Chose this value as the main one in the default_specs - temp = self.values(name='temp', level='2m', do_transform=False) - dewpt = self.values(name='dewp', level='2m', do_transform=False) - weasd = self.values(name='weasd', level='sfc', do_transform=False) - gust = self.values(name='gust', level='10m', do_transform=False) - soilm = self.values(name='soilm', level='sfc', do_transform=False) + veg = values + + temp = self.get_values(level="2m", name="temp", do_transform=False) + dewpt = self.get_values(level="2m", name="dewp", do_transform=False) + weasd = self.get_values(level="sfc", name="weasd", do_transform=False) + gust = self.get_values(level="10m", name="gust", do_transform=False) + soilm = self.get_values(level="sfc", name="soilm", do_transform=False) # A few derived fields dewpt_depression = temp - dewpt - dewpt_depression = np.where(dewpt_depression < 0, 0, dewpt_depression) - dewpt_depression = np.maximum(15.0, dewpt_depression) + dewpt_depression = where(dewpt_depression < 0, 0, dewpt_depression) + dewpt_depression = ufuncs.maximum(15.0, dewpt_depression) gust_max = np.maximum(3.0, gust) snowc = (25.0 - weasd) / 25.0 - snowc = np.where(snowc > 0.0, snowc, 0.0) + snowc = where(snowc > 0.0, snowc, 0.0) - mois = 0.01*(100.0 - soilm) + mois = 0.01 * (100.0 - soilm) # Set urban (13), snow/ice (15), barren (16), and water (17) to 0. for vegtype in [13, 15, 16, 17]: - veg = np.where(veg == vegtype, 0, veg) + veg = where(veg == vegtype, 0, veg) # Set all others vegetation types to 1 - veg = np.where(veg > 0, 1, veg) + veg = where(veg > 0, 1, veg) - fwi = veg * (2.37 * - (gust_max ** 1.11) * - (dewpt_depression ** 0.92) * - (mois ** 6.95) * - snowc) + fwi = veg * (2.37 * (gust_max**1.11) * (dewpt_depression**0.92) * (mois**6.95) * snowc) fwi = fwi / 10.0 @@ -614,114 +480,103 @@ def fire_weather_index(self, values, **kwargs): return fwi - def grid_info(self): - - ''' Returns a dict that includes the grid info for the full grid. ''' + def grid_info(self) -> dict: + """Returns a dict that includes the grid info for the full grid.""" # Keys are grib names, values are Basemap argument names - ncl_to_basemap = dict( - CenterLon='lon_0', - CenterLat='lat_0', - Latin2='lat_1', - Latin1='lat_2', - Lov='lon_0', - La1='lat_0', - La2='lat_2', - Lo1='lon_1', - Lo2='lon_2', - ) - - # Last coordinate listed should be latitude or longitude - lat_var = [var for var in self.field.coords if 'lat' in var][0] - - # Get the latitude variable - lat = self.ds[lat_var] - - grid_info = {} - if self.model != 'hrrrhi': - grid_info['corners'] = self.corners - if self.grid_suffix in ['GLC0']: - attrs = ['Latin1', 'Latin2', 'Lov'] - grid_info['projection'] = 'lcc' - grid_info['lat_0'] = 39.0 - elif self.grid_suffix == 'GST0': - attrs = ['Lov'] - grid_info['projection'] = 'stere' - grid_info['lat_0'] = 90 - elif self.grid_suffix == 'GLL0': - attrs = [] - grid_info['projection'] = 'cyl' - else: - attrs = [] - grid_info['projection'] = 'rotpole' - - # CenterLon in RAP and Longitude_of_southern_pole in RRFS - lon_0 = lat.attrs.get('CenterLon', lat.attrs.get('Longitude_of_southern_pole')) - grid_info['lon_0'] = lon_0[0] - 360 - - # CenterLat in RAP and Latitude_of_southern_pole in RRFS - center_lat = lat.attrs.get('CenterLat', lat.attrs.get('Latitude_of_southern_pole')) - grid_info['o_lat_p'] = - center_lat[0] if center_lat[0] < 0 else 90 - center_lat[0] - - grid_info['o_lon_p'] = 180 - - for attr in attrs: - bm_arg = ncl_to_basemap[attr] - val = lat.attrs[attr] - val = val[0] if isinstance(val, np.ndarray) else val - grid_info[bm_arg] = val - del val - - if self.model == 'hrrrhi': - grid_info['lat_0'] = 20.44 - grid_info['lon_0'] = 202.54 - grid_info['width'] = 2000000 - grid_info['height'] = 2000000 - - del lat + keys_to_basemap = dict( + CenterLon="lon_0", + CenterLat="lat_0", + GRIB_Latin2InDegrees="lat_1", + GRIB_Latin1InDegrees="lat_2", + GRIB_LoVInDegrees="lon_0", + GRIB_orientationOfTheGridInDegrees="lon_0", + Latin2="lat_1", + Latin1="lat_2", + Lov="lon_0", + La1="lat_0", + La2="lat_2", + Lo1="lon_1", + Lo2="lon_2", + ) + + grid_info: dict[str, str | float | int | list] = {} + var_info = self.field + grid_def = var_info.attrs["GRIB_gridDefinitionDescription"].lower() + match grid_def: # pragma: no cover + case x if "lambert" in x: + attrs = [ + "GRIB_Latin1InDegrees", + "GRIB_Latin2InDegrees", + "GRIB_LoVInDegrees", + ] + grid_info["projection"] = "lcc" + grid_info["lat_0"] = 39.0 + case x if "polar stereographic" in x: + attrs = ["GRIB_orientationOfTheGridInDegrees"] + grid_info["projection"] = "stere" + grid_info["lat_0"] = 90 + case "rotated latitude/longitude": # RRFS NA + attrs = [] + grid_info["projection"] = "rotpole" + lon_0: float = var_info.attrs["GRIB_longitudeOfSouthernPoleInDegrees"] + grid_info["lon_0"] = lon_0 - 360 + center_lat: float = var_info.attrs["GRIB_latitudeOfSouthernPoleInDegrees"] + grid_info["o_lat_p"] = -center_lat if center_lat < 0 else 90 - center_lat + grid_info["o_lon_p"] = 180 + + case x if "equidistant cylindrical" in x: # GFS + attrs = [] + grid_info["projection"] = "cyl" + case _: + msg = f"Can't define grid for {grid_def}" + raise ValueError(msg) + if self.model != "hrrrhi": + if not grid_info.get("corners"): + grid_info["corners"] = self.corners + + for attr in attrs: + bm_arg = keys_to_basemap[attr] + val = var_info.attrs[attr] + val = val[0] if isinstance(val, np.ndarray) else val + grid_info[bm_arg] = val + del val + + else: # pragma: no cover + grid_info["lat_0"] = 20.44 + grid_info["lon_0"] = 202.54 + grid_info["width"] = 2000000 + grid_info["height"] = 2000000 return grid_info - def icing_adjust_trace(self, values, **kwargs): - - # pylint: disable=unused-argument,no-self-use - - ''' Changes the value of ICSEV trace from 4.0 to 0.5, to maintain ascending order ''' - - vals = np.where(values == 4.0, 0.5, values) - - return vals - - def run_max(self, values, **kwargs): - - ''' Finds the max hourly value over all the forecast lead times available. ''' - - # pylint: disable=unused-argument,no-self-use - - return values.max(dim='fcst_hr') - - def run_min(self, values, **kwargs): - - ''' Finds the min hourly value over all the forecast lead times available. ''' - - # pylint: disable=unused-argument,no-self-use + @staticmethod + def icing_adjust_trace(values: DataArray, **_kwargs): + """Changes the value of ICSEV trace from 4.0 to 0.5, to maintain ascending order.""" - return values.min(dim='fcst_hr') + return where(values == 4.0, 0.5, values) - def run_total(self, values, **kwargs): + @staticmethod + def run_max(values: DataArray, **_kwargs): + """Finds the max hourly value over all the forecast lead times available.""" - ''' Sums over all the forecast lead times available. ''' + return values.max(dim="time") # pragma: no cover - # pylint: disable=unused-argument,no-self-use + @staticmethod + def run_min(values: DataArray, **_kwargs): + """Finds the min hourly value over all the forecast lead times available.""" - return values.sum(dim='fcst_hr') + return values.min(dim="time") # pragma: no cover - def supercooled_liquid_water(self, values, **kwargs): + @staticmethod + def run_total(values: DataArray, **_kwargs): + """Sums over all the forecast lead times available.""" - # pylint: disable=unused-argument + return values.sum(dim="time") # pragma: no cover - ''' - Generates a field of Supercooled Liquid Water + def supercooled_liquid_water(self, **_kwargs): + """ + Generates a field of Supercooled Liquid Water. This method uses wrfnat data to find regions where cloud and rain moisture are in below-freezing temps. @@ -732,29 +587,31 @@ def supercooled_liquid_water(self, values, **kwargs): columns, and (3) uses the layer depth to find the pressure at the next sigma level. - The process is iterative to the topof the atmosphere. - ''' - - pres_sfc = self.values(name='pres', level='sfc') * 100. # convert back to Pa - pres_nat_lev = self.values(name='pres', level='ua', one_lev=False) - temp = self.values(name='temp', level='ua', one_lev=False) - cloud_mixing_ratio = self.values(name='clwmr', level='ua', one_lev=False) - rain_mixing_ratio = self.values(name='rwmr', level='ua', one_lev=False) + The process is iterative to the top of the atmosphere. + """ + pres_sfc = self.get_values(name="pres", level="sfc") * 100.0 # convert back to Pa + pres_nat_lev = self.get_values(name="pres", level="ua") + temp = self.get_values(name="temp", level="ua") + cloud_mixing_ratio = self.get_values(name="clwmr", level="ua") + rain_mixing_ratio = self.get_values(name="rwmr", level="ua") gravity = 9.81 - slw = pres_sfc * 0. # start with array of zero values + slw = pres_sfc * 0.0 # start with array of zero values - nlevs = np.shape(pres_nat_lev)[0] # determine number of vertical levels + nlevs = np.shape(pres_nat_lev)[0] # determine number of vertical levels for n in range(nlevs): if n == 0: pres_layer = 2 * (pres_sfc[:, :] - pres_nat_lev[n, :, :]) # layer depth - pres_sigma = pres_sfc - pres_layer # pressure at next sigma level + pres_sigma = pres_sfc - pres_layer # pressure at next sigma level else: - pres_layer = 2 * (pres_sigma[:, :] - pres_nat_lev[n, :, :]) # layer depth - pres_sigma = pres_sigma - pres_layer # pressure at next sigma level + pres_layer = 2 * (pres_sigma[:, :] - pres_nat_lev[n, :, :]) # layer depth + pres_sigma = pres_sigma - pres_layer # pressure at next sigma level # compute supercooled water in layer and add to previous values - supercool_locs = np.where((temp[n, :, :] < 0.0), \ - cloud_mixing_ratio[n, :, :]+rain_mixing_ratio[n, :, :], 0.0) + supercool_locs = where( + (temp[n, ::] < 0.0), + cloud_mixing_ratio[n, ::] + rain_mixing_ratio[n, ::], + 0.0, + ) slw = slw + pres_layer / gravity * supercool_locs pres_sfc.close() @@ -766,179 +623,57 @@ def supercooled_liquid_water(self, values, **kwargs): @property def ticks(self) -> int: + """ + Returns the number of color bar tick marks from the yaml config + settings. + """ - ''' Returns the number of color bar tick marks from the yaml config - settings. ''' - - return self.vspec.get('ticks', 10) + return int(self.vspec.get("ticks", 10)) @property def units(self) -> str: - - ''' Returns the variable unit from the yaml config, if available. If not - specified in the yaml file, returns the value set in the Grib file. ''' - - return self.vspec.get('unit', self.field.units) - - @property - def data(self): - ''' Sets the data property on the object for use when we need to update - the values associated with a given object -- helpful for differences.''' - if not hasattr(self, '_data'): - return self.values() - return self._data - - @data.setter - def data(self, value): - self._data = value - - def values(self, level=None, name=None, **kwargs): - - ''' - Returns the numpy array of values at the requested level for the + """ + Returns the variable unit from the yaml config, if available. If not + specified in the yaml file, returns the value set in the Grib file. + """ + + return str(self.vspec.get("unit", self.field.units)) + + def get_values( + self, level: str | None = None, name: str | None = None, do_transform: bool = True + ) -> DataArray: + """ + Returns the FieldData array of values at the requested level for the variable after applying any unit conversion to the original data. Optional Input: - name the name of a field other than defined in self - level the desired level of the named field - - Keyword Args: - do_transform bool flag. to call, or not, the transform specified - in specs (default: True) - ncl_name the NCL-assigned Grib2 name (default: '') - one_lev bool flag. if True, get the single level of the variable - (default: True) - vertical_index the index (int) of the desired vertical level - ''' + level the desired level of the named field + name the name of a field other than defined in self + do_transform apply a standard transformation of units, etc.? - level = level if level else self.level + """ - one_lev = kwargs.get('one_lev', True) - vertical_index = kwargs.get('vertical_index') - - ncl_name = kwargs.get('ncl_name', '') - ncl_name = ncl_name.format(fhr=self.fhr, grid=self.grid_suffix) - - do_transform = kwargs.get('do_transform', True) - - if name is None and not ncl_name: - - # Use field and spec from the current object - field = self.field - spec = self.vspec - - else: + level = str(level or self.level) + vals = self.field + spec = self.vspec + if name is not None: # Get the spec dict and ncl_name for the given variable name - spec = self.spec.get(name, {}).get(level, {}) - if not spec and name is not None: - raise errors.NoGraphicsDefinitionForVariable(name, level) - field = self._get_field(ncl_name or self.ncl_name(spec)) - - lev = vertical_index - vals = field - if one_lev: - - # Check if it's a 3D variable (lv in any dimension field) - dim_name = self.vertical_dim(field) - - if dim_name: # Field has a vertical dimension - - # Use vertical_index if provided in kwargs - lev = vertical_index if vertical_index is not None else \ - self._get_level(field, level, spec) - - if lev is None or dim_name is None: - print(f'ERROR: Could not find dim_name ({dim_name}) or' \ - f'lev {lev} for {vals}') - raise ValueError - - try: - vals = vals.isel(**{dim_name: lev}) - except: - print(f'Error for {vals.name} : {dim_name} {lev} \ - {level} {spec}') - raise - - if self.mem is not None: - vals = vals.isel(**{'ens_mem': self.mem}) - - # Select a single forecast hour (only if there are many) - if not spec.get('accumulate', False): - if 'fcst_hr' in vals.dims: - fcst_hr = 0 if self.ds.sizes['fcst_hr'] <= 1 else int(self.fhr) - vals = vals.sel(**{'fcst_hr': fcst_hr}) - - transforms = spec.get('transform') + spec = deepcopy(self.spec.get(name, {}).get(level, {})) + if not spec: + raise errors.NoGraphicsDefinitionForVariableError(name, level) + utils.set_level(level=level, model=self.model, spec=spec) + vals = self._get_field(spec["cfgrib"].get(self.model, spec["cfgrib"])) + + transforms = spec.get("transform") if transforms and do_transform: vals = self.get_transform(transforms, vals) - return vals - - def vector_magnitude(self, field1, field2, level=None, vertical_index=None, **kwargs): - - # pylint: disable=unused-argument - - ''' - Returns the vector magnitude of two component vector fields. The - input fields can be either NCL names (string) or full data fields. The - first layer of a variable is returned if none is provided. - ''' - - if isinstance(field1, str): - field1 = self.values( - level=level, - ncl_name=field1, - vertical_index=vertical_index, - **kwargs, - ) - - if isinstance(field2, str): - field2 = self.values( - level=level, - ncl_name=field2, - vertical_index=vertical_index, - **kwargs, - ) - - mag = conversions.magnitude(field1, field2) - field1.close() - field2.close() - - return mag - - def wind(self, level) -> [np.ndarray, np.ndarray]: - - ''' - Returns the u, v wind components as a list (length 2) of arrays. - - Input: - level bool or level key. If True, use same level as self, - if a string level key is provided, use wind at that - level. - ''' - - level = self.level if level and isinstance(level, bool) else level - - # Just in case wind gets called with level=False - if not level: - return False - - # Create fieldData objects for u, v components - field_lambda = lambda ds, level, var: fieldData( - ds=ds, - fhr=self.fhr, - level=level, - short_name=var, - ) - u, v = [field_lambda(self.ds, level, var) for var in ['u', 'v']] - - return [component.values() for component in [u, v]] - + return vals if "global" not in self.model else vals[::-1, :] -class profileData(UPPData): - ''' +class ProfileData(UPPData): + """ Class provides methods for getting profiles from a specific lat/lon location from a grib file. @@ -954,17 +689,32 @@ class profileData(UPPData): Only used for base classes. - ''' - - def __init__(self, ds, loc, short_name, **kwargs): - - super().__init__(ds, short_name, **kwargs) + """ + + def __init__( + self, + fhr: int, + ds: dict[str, Dataset], + model: str, + loc: str, + short_name: str, + spec: dict | YAMLConfig, + level: str | None = None, + ): + super().__init__( + fhr=fhr, + ds=ds, + level=level or "ua", + model=model, + short_name=short_name, + spec=spec, + ) - # The first 31 columns are space delimted - self.site_code, _, self.site_num, lat, lon = \ - loc[:31].split() + self.loc = loc + # The first 31 columns are space-delimited + self.site_code, _, self.site_num, lat, lon = loc[:31].split() - # The variable lenght site name is included past column 37 + # The variable length site name is included past column 37 self.site_name = loc[37:].rstrip() # Convert the string to a number. Longitude should be positive for all @@ -974,98 +724,49 @@ def __init__(self, ds, loc, short_name, **kwargs): # minus sign to convert the longitude to deg East, and then need to # adjust to the 0 to 360 system. self.site_lat = float(lat) - self.site_lon = -float(lon) # lons are -180 but without minus sign in input file + self.site_lon = -float(lon) # lons are -180 but without minus sign in input file if self.site_lon < 0: self.site_lon = self.site_lon + 360.0 - def values(self, level=None, name=None, **kwargs): - - ''' + def get_values( + self, + level: str | None = None, + name: str | None = None, + do_transform: bool = False, + ) -> DataArray: + """ Returns the numpy array of values at the object's x, y location for the - requested variable. Transforms are performed in the child class. + requested variable. Optional Input: name the short name of a field other than defined in self level the level of the alternate field to use, default='ua' for upper air - Keyword Args: - ncl_name the NCL name of the variable to be retrieved - one_lev bool flag. if True, get the single level of the variable - split bool flag. if True, level string numbers are split - into a list, e.g. used to get [0, 6000] from 06km - vertical_index the index of the required level - ''' + """ + + assert do_transform is False # not supported by this class # Set the defaults here since this is an instance of an abstract method # level refers to the level key in the specs file. - level = level if level is not None else 'ua' - - if not name: - name = self.short_name - - one_lev = kwargs.get('one_lev', False) - vertical_index = kwargs.get('vertical_index') - split = kwargs.get('split') + level = level if level is not None else "ua" + if name is not None: + # Get the spec dict and ncl_name for the given variable name + spec = deepcopy(self.spec.get(name, {}).get(level, {})) + if not spec: + raise errors.NoGraphicsDefinitionForVariableError(name, level) + utils.set_level(level=level, model=self.model, spec=spec) + profile = self._get_field(spec["cfgrib"].get(self.model, spec["cfgrib"])).squeeze() + else: + profile = self.field.squeeze() # Retrive the location for the profile x, y = self.get_xypoint(self.site_lat, self.site_lon) - # Retrieve the default_specs section for the specified level - var_spec = self.spec.get(name, {}).get(level, {}) - - # Set the NCL name from the specs section, unless otherwise specified - ncl_name = kwargs.get('ncl_name') or self.ncl_name(var_spec) - ncl_name = ncl_name.format(fhr=self.fhr, grid=self.grid_suffix) - - if not ncl_name: - raise errors.NoGraphicsDefinitionForVariable( - name, - 'ua', - ) - - # Get the full 2- or 3-D field - field = self.ds[ncl_name] - - profile = field[::] - lev = 0 + # 2D if len(profile.shape) == 2: profile = profile[x, y] + # 3D elif len(profile.shape) == 3: - if one_lev: - lev = vertical_index - if vertical_index is None: - lev = self._get_level(field, level, var_spec, split=split) - profile = profile[lev, x, y] - else: - profile = profile[:, x, y] + profile = profile[:, x, y] return profile - - def vector_magnitude(self, field1, field2, level='ua', vertical_index=None, **kwargs): - - ''' - Returns the vector magnitude of two component vector profiles. The - input fields can be either NCL names (string) or full data fields. - - If no layer or level is provided, the default 'ua' will be used in - self.values. - ''' - - - if isinstance(field1, str): - field1 = self.values( - level=level, - ncl_name=field1, - vertical_index=vertical_index, - **kwargs, - ) - - if isinstance(field2, str): - field2 = self.values( - level=level, - ncl_name=field2, - vertical_index=vertical_index, - **kwargs, - ) - - return conversions.magnitude(field1, field2) diff --git a/adb_graphics/datahandler/gribfile.py b/adb_graphics/datahandler/gribfile.py index 66140a6f..a70d2a32 100644 --- a/adb_graphics/datahandler/gribfile.py +++ b/adb_graphics/datahandler/gribfile.py @@ -1,42 +1,30 @@ -# pylint: disable=invalid-name,too-few-public-methods,too-many-locals,too-many-branches,too-many-statements - -''' +""" Classes that load grib files. -''' - -import xarray as xr - -class GribFile(): - - ''' Wrappers and helper functions for interfacing with pyNIO.''' - - def __init__(self, filename, **kwargs): +""" - # pylint: disable=unused-argument +import warnings +from functools import cached_property +from pathlib import Path - self.filename = filename - self.contents = self._load() - - def _load(self): - - ''' Internal method that opens the grib file. Returns a grib message - iterator. ''' - - return xr.open_dataset(self.filename, - engine='pynio', - lock=False, - backend_kwargs=dict(format="grib2"), - ) +import cfgrib +import xarray as xr -class GribFiles(): +warnings.filterwarnings("ignore", category=FutureWarning, module="cfgrib") - ''' Class for loading in a set of grib files and combining them over - forecast hours. ''' - def __init__(self, coord_dims, filenames, filetype, **kwargs): +class GribFiles: # pragma: no cover + """ + Class for loading in a set of grib files and combining them over + forecast hours. + """ - ''' - Arguments: + def __init__( + self, + filenames: list[Path], + cfgrib_config: dict, + ): + """ + Initialize GribFiles object. coord_dims dict containing the name of the dimension to concat (key), and a list of its values (value). @@ -45,249 +33,72 @@ def __init__(self, coord_dims, filenames, filetype, **kwargs): forecast lead times ('01fcst'), and all the free forecast hours after that ('free_fcst'). filetype key to use for dict when setting variable_names - - Keyword Arguments: model string describing the model type - ''' - - self.model = kwargs.get('model', '') + """ self.filenames = filenames - self.filetype = filetype - self.coord_dims = coord_dims - self.grid_suffix = self._get_grid_suffix(filenames) - self.contents = self._load() - - - def append(self, filenames): - - ''' Add a single new slice to existing data set. Must match coord_dims - and filetype of original dataset. Updates current contents of Object''' - - self.contents = self._load(filenames) - - def free_fcst_names(self, ds, fcst_type): - - ''' Given an opened dataset, return a dict of original variable names - (key) and the desired name (value) ''' - - ret = {} - - fhr = self.coord_dims['fcst_hr'][-1] - - special_suffixes = ['max', 'min', 'acc', 'avg'] - for var in ds.variables: - suffix = var.split('_')[-1] - - # Keeping lists of misbehaving "accumulated" variables here because - # there doesn't seem to be another way to know.... - - if fcst_type == '01fcst': - # Don't rename these variables at early hours - odd_variables = [ - 'ASNOW', - 'FROZR', - 'FRZR', - 'LRGHR', - ] - if self.model == 'rrfs': - odd_variables.append('WEASD') - if self.model != 'rrfs': - odd_variables.extend([ - 'CDLYR', - 'TCDC', - ]) - needs_renaming = var.split('_')[0] not in odd_variables - if suffix in special_suffixes and needs_renaming: - if 'global' not in self.model or self.model == 'global_mpas': - new_suffix = f'{suffix}1h' - else: - new_suffix = f'{suffix}6h' - ret[var] = var.replace(suffix, new_suffix) - # MASSDEN is a special case when ending in "avg_1'" - if var.split('_')[0] == 'MASSDEN' and var.split('_')[-2] == 'avg': - print(f'Special change to MASSDEN avg_1 name to avg1h_1') - ret[var] = var.replace('avg', 'avg1h') - else: - # Only rename these variables at late hours - odd_variables = [ - 'APCP', - 'CDLYR', - 'FROZR', - 'FRZR', - 'LRGHR', - 'TCDC', - 'TSNOWP', - 'WEASD', - ] - if self.model == 'rrfs': - odd_variables.remove('WEASD') - variable = var.split('_')[0] - needs_renaming = variable in odd_variables - contains_suffix = [] - for suf in special_suffixes: - - # The LRGHR variable behaves differently in RRFS than in all - # others! At 7 hours, it starts averaging since 6h. From 0-6 - # h it's named with suffix avg, after its named avg1h, - # avg2h, etc. - if self.model == 'rrfs' and \ - variable == 'LRGHR' and \ - suffix == f'{suf}1h': - contains_suffix.append(suf) - - # RRFS_A has fields that have the suffix 'acc0h' but we don't - # want those. Drop them if they come up. - bad_0h_vars = ['APCP_P8_L1_GLL0_acc0h', \ - 'FROZR_P8_L1_GLC0_acc0h', 'FRZR_P8_L1_GLC0_acc0h', \ - 'CDLYR_P8_L200_GLC0_avg0h', 'TCDC_P8_L200_GLC0_avg0h', \ - 'APCP_P8_L1_GLC0_acc0h', 'APCP_P8_L1_GST0_acc0h'] - if fhr != 0 and var in bad_0h_vars: - print(f'dropping {var}') - ds.drop(var) - continue - # mpas_global has fields that have the suffix 'acc1h' but we don't - # want those since the output is 6h. Drop them if they come up. - bad_1h_vars = ['APCP_P8_L1_GLL0_acc1h', \ - 'FROZR_P8_L1_GLL0_acc1h', 'FRZR_P8_L1_GLL0_acc1h', \ - 'CDLYR_P8_L200_GLL0_avg1h', 'TCDC_P8_L200_GLL0_avg1h', \ - 'APCP_P8_L1_GLL0_acc1h', 'APCP_P8_L1_GST0_acc1h', \ - 'WEASD_P8_L1_GLL0_acc1h'] - if self.model == 'global_mpas' and fhr != 0 and var in bad_1h_vars: - print(f'dropping {var}') - ds.drop(var) - continue - # For the RAP CONUS and AK domains, the APCP, WEASD, and FROZR - # variables all have 3h accumulation fields in addition to - # the 1h accumulation fields. This causes problems with the - # renaming, so just drop those fields from the dataset. - bad_3h_vars = ['APCP_P8_L1_GLC0_acc3h', \ - 'WEASD_P8_L1_GLC0_acc3h', 'FROZR_P8_L1_GLC0_acc3h', \ - 'APCP_P8_L1_GST0_acc3h', 'WEASD_P8_L1_GST0_acc3h', \ - 'FROZR_P8_L1_GST0_acc3h'] - if self.model == 'rap' and fhr != 3 and var in bad_3h_vars: - print(f'dropping {var}') - ds.drop(var) - continue - - # Some global models will start producing 12h accumulations at - # lead times past 246h. These cause problems with the renaming, - # so we can drop those fields. - bad_12h_vars = ['APCP_P8_L1_GLL0_acc12h', \ - 'APCP_P8_L1_GLC0_acc12h', 'APCP_P8_L1_GST0_acc12h'] - if fhr != 12 and var in bad_12h_vars: - print(f'dropping {var}') - ds.drop(var) - continue - - # All the variables that need to be renamed. In most cases, - # exclude the "1h" ("6h" for global) accumulated variables - accum_freq = 6 if 'global' in self.model else 1 - if suf in suffix and suffix != f'{suf}{accum_freq}h': - contains_suffix.append(suf) - - if contains_suffix and needs_renaming: - ret[var] = var.replace(suffix, contains_suffix[0]) - - return ret - - @staticmethod - def _get_grid_suffix(filenames): - - ''' Return the suffix of the first variable with 4 sections (split on _) - in the file. This should correspond to the grid tag. ''' - - for files in filenames.values(): - if files: - gfile = xr.open_dataset(files[0], - cache=False, - engine='pynio', - lock=False, - backend_kwargs=dict(format="grib2"), - ) - for var in gfile.keys(): - vsplit = var.split('_') - if len(vsplit) == 4: - gfile.close() - return vsplit[-1] - return 'GRID NOT FOUND' - - def _load(self, filenames=None): - - ''' Load the set of files into a single XArray structure. ''' - - all_leads = [] if filenames is None else [self.contents] - filenames = self.filenames if filenames is None else filenames - - # 0h and 1h accumulated forecast variables are named differently than - # the rest of the forecast hours. Rename those accumulated variables if - # needed. - for fcst_type in ['01fcst', 'free_fcst']: - - if filenames.get(fcst_type): - for filename in filenames.get(fcst_type): - print(f'Loading grib2 file: {fcst_type}, {filename}') - - # Rename variables to match free forecast variables - dataset = xr.open_mfdataset( - filenames[fcst_type], - **self.open_kwargs, - ) - - renaming = self.free_fcst_names(dataset, fcst_type) - if renaming and self.model not in ['hrrre', 'rrfse']: - print(f'RENAMING VARIABLES:') - for old_name, new_name in renaming.items(): - print(f' {old_name:>30s} -> {new_name}') - dataset = dataset.rename_vars(renaming) - - if len(all_leads) == 1: - # Check that specific variables exist in the xarray that is - # already loaded (presumably 0hr), and add them if they - # don't. This implementation is relying on pointers to - # update "in place" - og_ds = all_leads[0] - bad_vars = [ - 'APCP_P8_L1_{grid}_acc', - 'ACPCP_P8_L1_{grid}_acc', - 'FROZR_P8_L1_{grid}_acc', - 'NCPCP_P8_L1_{grid}_acc', - 'WEASD_P8_L1_{grid}_acc', - ] - bad_vars = [v.format(grid=self.grid_suffix) for v in \ - bad_vars] - for bad_var in bad_vars: - # Check to see if the bad variable is in the current - # dataset and NOT in the original dataset. - if bad_var not in og_ds.variables and \ - dataset.get(bad_var) is not None: - print(f'Adding {bad_var} to og ds') - # Duplicate the accumulated variable with the - # required name - og_ds[bad_var] = og_ds.get(f'{bad_var}1h') - all_leads.append(dataset) - - ret = xr.combine_nested(all_leads, - compat='override', - concat_dim=list(self.coord_dims.keys())[0], - coords='minimal', - data_vars='all', - ) - return ret - - @property - def open_kwargs(self): - - ''' Defines the key word arguments used by the various calls to XArray - open_mfdataset ''' + self.cfgrib_config = cfgrib_config + + @cached_property + def datasets(self): + """Load the set of files into a single Xarray structure.""" + ds = xr.open_mfdataset( + self.filenames, + engine="cfgrib", + concat_dim="time", + combine="nested", + compat="override", + coords="minimal", + backend_kwargs=( + { + "filter_by_keys": self.cfgrib_config, + "indexpath": "", + "read_keys": ["orientationOfTheGridInDegrees"], + } + ), + ) + return {_var_id(ds, list(ds.data_vars)[0]): ds} + + +class WholeGribFile: + """ + Class for loading a whole gribfile into a dictionary for different categories of data, mostly + separated by vertical coordinate and bucket type (avg, max, etc.). + """ + + def __init__( + self, + filename: Path, + ): + self.filename = filename - return dict( - backend_kwargs=dict(format="grib2"), - cache=False, - combine='nested', - compat='override', - concat_dim=list(self.coord_dims.keys())[0], - coords='minimal', - engine='pynio', - lock=False, - ) + @cached_property + def datasets(self): + datasets = cfgrib.open_datasets( + str(self.filename), + read_keys=["orientationOfTheGridInDegrees", "parameterNumber"], + backend_kwargs=( + { + "indexpath": "", + "read_keys": ["orientationOfTheGridInDegrees"], + } + ), + ) + + all_fields: dict = {} + for ds in datasets: + for var in ds.data_vars: + var_id = _var_id(ds, str(var)) + if all_fields.get(var_id) is None: + all_fields[var_id] = ds + else: # pragma: no cover + msg = f"Multiple entries for {var_id} when opening {self.filename}" + raise ValueError(msg) + return all_fields + + +def _var_id(ds: xr.Dataset, var: str): + vertical_dim = ds[list(ds.data_vars)[0]].attrs.get("GRIB_typeOfLevel", "unknown") + var_name = ds[var].attrs.get("GRIB_shortName") + step_type = ds[var].attrs.get("GRIB_stepType", "nostepType") + return f"{var_name}_{vertical_dim}_{step_type}" diff --git a/adb_graphics/default_specs.yml b/adb_graphics/default_specs.yml index 680a5216..5e0449dc 100644 --- a/adb_graphics/default_specs.yml +++ b/adb_graphics/default_specs.yml @@ -11,8 +11,8 @@ # # clevs: specifies the contour levels by one of the following methods # - a list with no quotes -# - a numpy.arange specified as "!!python/object/apply:numpy.arange [list]" -# with quotes. Specify "list" as with numpy.anumpy.arange() like this -- +# - a numpy.arange specified as "!arange [list]" +# with quotes. Specify "list" as with numpy.arange() like this -- # [[start, ]stop[, increment]]]. start and increment are options. # - the name of a function to be called that will return a list. # @@ -79,6 +79,23 @@ 1hsnw: # 1 hr Accumulated Snow Using 10:1 Ratio sfc: + cfgrib: + hrrr: &1hrsnw_hrrr + shortName: sdwe + level: 0 + typeOfLevel: surface + stepType: accum + stepRange: '{{ "%d-%d" % (fhr-1, fhr) }}' + hrrrcar: + <<: *1hrsnw_hrrr + hrrrhi: + <<: *1hrsnw_hrrr + rrfs: + parameterNumber: 50 + level: 0 + typeOfLevel: surface + stepType: accum + stepRange: '{{ "%d-%d" % (fhr-1, fhr) }}' clevs: [0.03, 0.05, 0.1, 0.5, 1, 2, 3, 4, 5, 6, 7, 8] cmap: gist_ncar colors: snow_colors @@ -94,7 +111,30 @@ unit: in 1ref: # Reflectivity at 1 km AGL 1000m: &refl - clevs: !!python/object/apply:numpy.arange [5, 76, 5] + cfgrib: + hrrr: &hrrr_1ref + shortName: refd + typeOfLevel: heightAboveGround + level: 1000 + hrrrcar: + <<: *hrrr_1ref + shortName: unknown + parameterCategory: 16 + parameterNumber: 195 + hrrrhi: + <<: *hrrr_1ref + shortName: unknown + parameterCategory: 16 + parameterNumber: 195 + global: + <<: *hrrr_1ref + mpas: + <<: *hrrr_1ref + rrfs: + shortName: rare + typeOfLevel: heightAboveGround + level: 1000 + clevs: !arange [5, 76, 5] cmap: NWSReflectivity colors: cref_colors ncl_name: REFD_P0_L103_{grid} @@ -103,6 +143,11 @@ unit: dBZ acfrozr: # Run Total Graupel sfc: &graupel + cfgrib: + parameterNumber: 227 + stepRange: '{{ "%d-%d" % (0, fhr) }}' + typeOfLevel: surface + stepType: accum clevs: [0.002, 0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 1, 2] cmap: gist_ncar colors: pcp_colors @@ -114,10 +159,45 @@ acfrozr: # Run Total Graupel acfrzr: # Run Total Freezing Rain sfc: <<: *graupel + cfgrib: + hrrr: &named_frzr + shortName: frzr + level: 0 + typeOfLevel: surface + stepType: accum + hrrrcar: + <<: *named_frzr + shortName: unknown + parameterCategory: 1 + parameterNumber: 225 + mpas: + <<: *named_frzr + rrfs: + <<: *named_frzr ncl_name: FRZR_P8_L1_GLC0_acc title: Run Total Freezing Rain acpcp: # Accumulated run total precipitation sfc: + cfgrib: + hrrr: &named_acctp + shortName: tp + level: 0 + typeOfLevel: surface + stepRange: '{{ "%d-%d" % (0, fhr) }}' + stepType: accum + hrrrcar: &unnamed_acctp + <<: *named_acctp + shortName: unknown + parameterCategory: 1 + parameterNumber: 8 + hrrrhi: + <<: *unnamed_acctp + global: + <<: *named_acctp + mpas: + <<: *named_acctp + rrfs: + <<: *named_acctp clevs: [0.01, 0.1, 0.25, 0.5, 1, 2, 3, 5, 10, 15, 20, 40] cmap: gist_ncar colors: rainbow12_colors @@ -220,6 +300,11 @@ acpcpens6: # ensemble probability of precipitation, 6h title: Probability of 6-hr Precipitation >= 0.5 in within 40km acsnod: # Accumulated snow sfc: &snow + cfgrib: + level: 0 + typeOfLevel: surface + stepType: accum + parameterName: Total snowfall clevs: [0.01, 0.1, 1, 2, 3, 4, 6, 8, 10, 12, 18, 24] cmap: gist_ncar colors: snow_colors @@ -230,6 +315,20 @@ acsnod: # Accumulated snow unit: in acsnw: # Run Total Accumulated Snow Using 10:1 Ratio sfc: + cfgrib: + hrrr: &named_weasd + shortName: sdwe + level: 0 + typeOfLevel: surface + stepType: accum + stepRange: '{{ "%d-%d" % (0, fhr) }}' + hrrrhi: &unnamed_weasd + <<: *named_weasd + shortName: unknown + parameterCategory: 1 + parameterNumber: 13 + rrfs: + <<: *unnamed_weasd clevs: [0.01, 0.1, 1, 2, 3, 4, 6, 8, 10, 12, 18, 24] cmap: gist_ncar colors: snow_colors @@ -245,6 +344,10 @@ acsnw: # Run Total Accumulated Snow Using 10:1 Ratio unit: in aodbc: # Black Carbon AOD sfc: &aod + cfgrib: + parameterNumber: 102 + typeOfLevel: atmosphere + constituentType: 62009 clevs: [.1, .16, .23, .29, .36, .42, .49, .55, .61, .68, .74, .81, .87, 1] cmap: jet colors: aod_colors @@ -255,35 +358,64 @@ aodbc: # Black Carbon AOD aodfd: # Fine Dust AOD sfc: <<: *aod + cfgrib: + parameterNumber: 102 + typeOfLevel: atmosphere + constituentType: 62001 ncl_name: AOTK_P48_L10_{grid}_A62001 title: Fine Dust Aerosol Optical Depth aodhg: # OPT2 AOD sfc: <<: *aod + cfgrib: + parameterNumber: 102 + typeOfLevel: atmosphere + constituentType: 62007 ncl_name: AOTK_P48_L10_{grid}_A62007 title: Total OPT2 Aerosol Optical Depth aodoc: # Organic Carbon AOD sfc: <<: *aod + cfgrib: + parameterNumber: 102 + typeOfLevel: atmosphere + constituentType: 62010 ncl_name: AOTK_P48_L10_{grid}_A62010 title: Organic Carbon Aerosol Optical Depth aodss: # Sea Salt AOD sfc: <<: *aod + cfgrib: + parameterNumber: 102 + typeOfLevel: atmosphere + constituentType: 62008 ncl_name: AOTK_P48_L10_{grid}_A62008 title: Sea Salt Aerosol Optical Depth aodsulf: # Sulfate AOD sfc: <<: *aod + cfgrib: + parameterNumber: 102 + typeOfLevel: atmosphere + constituentType: 62006 ncl_name: AOTK_P48_L10_{grid}_A62006 title: Sulfate Aerosol Optical Depth aodtot: # Total AOD sfc: <<: *aod + cfgrib: + parameterNumber: 102 + typeOfLevel: atmosphere + constituentType: 62000 ncl_name: AOTK_P48_L10_{grid}_A62000 title: Total Aerosol Optical Depth bc: # Black Carbon sfc: &bcsfc + cfgrib: + shortName: pmtf + typeOfLevel: hybrid + constituentType: 62014 + level: 1 clevs: [0.05, 0.1, 0.2, 0.5, 1, 2, 3, 4, 5, 7, 10, 15, 20, 30] cmap: jet colors: aod_colors @@ -300,17 +432,45 @@ bc: # Black Carbon bc1: # Black Carbon 1 sfc: <<: *bcsfc + cfgrib: + shortName: pmtf + typeOfLevel: hybrid + constituentType: 62014 + level: 1 ncl_name: PMTF_P48_L105_{grid}_A62014 title: Surface Black Carbon 1 transform: [] bc2: # Black Carbon 2 sfc: <<: *bcsfc + cfgrib: + shortName: pmtf + typeOfLevel: hybrid + constituentType: 62013 + level: 1 ncl_name: PMTF_P48_L105_{grid}_A62013 title: Surface Black Carbon 2 transform: [] cape: mu: &cape # Most Unstable CAPE + cfgrib: + hrrr: &named_cape + shortName: cape + topLevel: 25500 + typeOfLevel: pressureFromGroundLayer + hrrrcar: &unnamed_cape + <<: *named_cape + shortName: unknown + parameterCategory: 7 + parameterNumber: 6 + hrrrhi: + <<: *unnamed_cape + global: + <<: *named_cape + mpas: + <<: *named_cape + rrfs: + <<: *named_cape clevs: [1, 100, 500, 1000, 1500, 2000, 2500, 3000, 3500, 4000, 4500, 5000] cmap: gist_ncar colors: vort_colors @@ -335,6 +495,24 @@ cape: vertical_index: 2 mul: # Most Unstable Layer CAPE <<: *cape + cfgrib: + hrrr: &named_mulcape + shortName: cape + topLevel: 18000 + typeOfLevel: pressureFromGroundLayer + hrrrcar: + <<: *named_mulcape + shortName: unknown + parameterCategory: 7 + parameterNumber: 6 + hrrrhi: + <<: *named_mulcape + global: + <<: *named_mulcape + mpas: + <<: *named_mulcape + rrfs: + <<: *named_mulcape contours: cape: colors: white @@ -354,6 +532,24 @@ cape: vertical_index: 1 mx90mb: # Lowest 90 mb Mixed Layer CAPE <<: *cape + cfgrib: + hrrr: &named_mixcape + shortName: cape + topLevel: 9000 + typeOfLevel: pressureFromGroundLayer + hrrrcar: + <<: *named_mixcape + shortName: unknown + parameterCategory: 7 + parameterNumber: 6 + hrrrhi: + <<: *named_mixcape + global: + <<: *named_mixcape + mpas: + <<: *named_mixcape + rrfs: + <<: *named_mixcape contours: cape: colors: white @@ -373,6 +569,24 @@ cape: title: Lowest 90 mb Mixed Layer CAPE sfc: <<: *cape + cfgrib: + hrrr: &named_sfccape + shortName: cape + level: 0 + typeOfLevel: surface + hrrrcar: &unnamed_sfccape + <<: *named_sfccape + shortName: unknown + parameterCategory: 7 + parameterNumber: 6 + hrrrhi: + <<: *unnamed_sfccape + global: + <<: *named_sfccape + mpas: + <<: *named_sfccape + rrfs: + <<: *named_sfccape contours: cin: colors: 'k' @@ -387,14 +601,38 @@ cape: title: Surface CAPE cell: # Storm cell motion ua: + cfgrib: + shortName: ustm + typeOfLevel: heightAboveGroundLayer + bottomLevel: 6000 ncl_name: USTM_P0_2L103_{grid} transform: funcs: [vector_magnitude, conversions.ms_to_kt] kwargs: - field2: VSTM_P0_2L103_{grid} + field2_id: cellv unit: kt +cellv: + ua: + cfgrib: + shortName: vstm + typeOfLevel: heightAboveGroundLayer + bottomLevel: 6000 ceil: # Ceiling ua: &ceil + cfgrib: + hrrr: &named_ceil + shortName: gh + typeOfLevel: cloudCeiling + level: 0 + hrrrhi: + <<: *named_ceil + typeOfLevel: cloudBase + global: + <<: *named_ceil + mpas: + <<: *named_ceil + rrfs: + <<: *named_ceil clevs: [0, 0.1, 0.3, 0.5, 1, 2, 3, 5, 10, 15, 20, 30, 52] cmap: gist_ncar colors: ceil_colors @@ -410,12 +648,20 @@ ceil: # Ceiling ceilexp: # Ceiling - experimental ua: <<: *ceil + cfgrib: + shortName: ceil + typeOfLevel: cloudCeiling + level: 0 ncl_name: regional_mpas: CEIL_P0_L215_{grid} title: Ceiling (exp) ceilexp2: # Ceiling - experimental no.2 ua: <<: *ceil + cfgrib: + shortName: gh + typeOfLevel: cloudBase + level: 0 ncl_name: # Note that the "HGT_P0_L2_{grid}" ncl_names in RAPv5/HRRRv4 (below) # seemingly correspond to cloud-base height. This is intentional; i.e., @@ -431,18 +677,70 @@ ceilexp2: # Ceiling - experimental no.2 cloudbase: # Cloud-base height ua: <<: *ceil + cfgrib: + shortName: gh + typeOfLevel: cloudBase + level: 0 ncl_name: rrfs: HGT_P0_L2_{grid} regional_mpas: HGT_P0_L2_{grid} title: Cloud-Base Height cfrzr: # Categorical Freezing Rain sfc: + cfgrib: + hrrr: &named_cfrzr + shortName: cfrzr + typeOfLevel: surface + hrrrcar: &unnamed_cfrzr + parameterCategory: 1 + parameterNumber: 193 + typeOfLevel: unknown + hrrrhi: + <<: *unnamed_cfrzr + mpas: + <<: *named_cfrzr + rrfs: + <<: *named_cfrzr ncl_name: CFRZR_P0_L1_{grid} cicep: # Categorical Ice Pellets sfc: + cfgrib: + hrrr: &named_cicep + shortName: cicep + typeOfLevel: surface + level: 0 + stepType: instant + hrrrcar: &unnamed_cicep + parameterCategory: 1 + parameterNumber: 193 + typeOfLevel: unknown + hrrrhi: + <<: *unnamed_cicep + mpas: + <<: *named_cicep + rrfs: + <<: *named_cicep ncl_name: CICEP_P0_L1_{grid} cin: # Surface Convective Inhibition mu: + cfgrib: + hrrr: &named_cin + shortName: cin + level: 25500 + typeOfLevel: pressureFromGroundLayer + hrrrcar: &unnamed_cin + <<: *named_cin + shortName: unknown + parameterCategory: 7 + parameterNumber: 7 + hrrrhi: + <<: *unnamed_cin + global: + <<: *named_cin + mpas: + <<: *named_cin + rrfs: + <<: *named_cin clevs: [-300, -200, -150, -100, -75, -50, -40, -30, -20, -10, -1] cmap: gist_ncar colors: cin_colors @@ -450,10 +748,47 @@ cin: # Surface Convective Inhibition unit: J/kg vertical_index: 2 mx90mb: # Lowest 90 mb Mixed Layer CIN + cfgrib: + hrrr: &named_mixcin + shortName: cin + level: 9000 + typeOfLevel: pressureFromGroundLayer + hrrrcar: + <<: *named_mixcin + shortName: unknown + parameterCategory: 7 + parameterNumber: 7 + hrrrhi: + <<: *named_mixcin + global: + <<: *named_mixcin + mpas: + <<: *named_mixcin + rrfs: + <<: *named_mixcin + clevs: [-300, -200, -150, -100, -75, -50, -40, -30, -20, -10, -1] ncl_name: CIN_P0_2L108_{grid} title: 'ML CIN < -50' vertical_index: 0 sfc: &sfc_cin + cfgrib: + hrrr: &named_sfccin + shortName: cin + level: 0 + typeOfLevel: surface + hrrrcar: &unnamed_sfccin + <<: *named_sfccin + shortName: unknown + parameterCategory: 7 + parameterNumber: 7 + hrrrhi: + <<: *unnamed_sfccin + global: + <<: *named_sfccin + mpas: + <<: *named_sfccin + rrfs: + <<: *named_sfccin clevs: [-300, -200, -150, -100, -75, -50, -40, -30, -20, -10, -1] cmap: gist_ncar colors: cin_colors @@ -467,6 +802,10 @@ cin: # Surface Convective Inhibition title: Surface CIN < -50 cloudcover: bndylay: &cld_cover # PBL ... 1 km Cloud Cover + cfgrib: + shortName: tcc + typeOfLevel: boundaryLayerCloudLayer + level: 0 clevs: [2, 5, 10, 20, 30, 40, 50, 60, 70, 80, 90, 95] cmap: gist_ncar colors: cldcov_colors @@ -476,6 +815,23 @@ cloudcover: unit: '%' high: <<: *cld_cover + cfgrib: + hrrr: &high_cld_cover + shortName: hcc + typeOfLevel: highCloudLayer + level: 0 + hrrrcar: + <<: *high_cld_cover + typeOfLevel: unknown + hrrrhi: + <<: *high_cld_cover + typeOfLevel: unknown + global: + <<: *high_cld_cover + mpas: + <<: *high_cld_cover + rrfs: + <<: *high_cld_cover clevs: [2, 5, 10, 20, 30, 40, 50, 60, 70, 80, 90, 95] cmap: gist_ncar colors: cldcov_colors @@ -485,21 +841,78 @@ cloudcover: unit: '%' low: <<: *cld_cover + cfgrib: + hrrr: &low_cld_cover + shortName: lcc + typeOfLevel: lowCloudLayer + level: 0 + hrrrcar: + <<: *low_cld_cover + typeOfLevel: unknown + hrrrhi: + <<: *low_cld_cover + typeOfLevel: unknown + global: + <<: *low_cld_cover + mpas: + <<: *low_cld_cover + rrfs: + <<: *low_cld_cover ncl_name: LCDC_P0_L214_{grid} title: Low-Level Cloud Cover mid: <<: *cld_cover + cfgrib: + hrrr: &mid_cld_cover + shortName: mcc + typeOfLevel: middleCloudLayer + level: 0 + hrrrcar: + <<: *mid_cld_cover + typeOfLevel: unknown + hrrrhi: + <<: *mid_cld_cover + typeOfLevel: unknown + global: + <<: *mid_cld_cover + mpas: + <<: *mid_cld_cover + rrfs: + <<: *mid_cld_cover ncl_name: MCDC_P0_L224_{grid} title: Mid-Level Cloud Cover total: <<: *cld_cover + cfgrib: + hrrr: &tcc + shortName: tcc + typeOfLevel: atmosphere + stepType: instant + level: 0 + hrrrcar: + <<: *tcc + shortName: unknown + parameterNumber: 1 + parameterCategory: 6 + hrrrhi: + <<: *tcc + shortName: unknown + parameterNumber: 1 + parameterCategory: 6 + mpas: + <<: *tcc + typeOfLevel: atmosphereSingleLayer + rrfs: + <<: *tcc + typeOfLevel: atmosphereSingleLayer ncl_name: TCDC_P0_L{level_type}_{grid} title: Total Cloud Cover clwmr: # Cloud water Mixing Ratio ua: - ncl_name: - nat: CLWMR_P0_L105_{grid} - prs: CLWMR_P0_L100_{grid} + cfgrib: + shortName: clwmr + typeOfLevel: '{{ "isobaricInhPa" if file_type == "prs" else "hybrid" }}' + ncl_name: CLWMR_P0_L105_{grid} coarsedust: # Coarse dust int: &coldust clevs: [1, 4, 7, 11, 15, 20, 25, 30, 40, 50, 75, 150, 250, 500] @@ -523,6 +936,10 @@ coarsedust: # Coarse dust colbc: # Column Black Carbon sfc: &col # clevs: [0.00000005, 0.0000001, 0.00000015, 0.0000002, 0.0000005, 0.000001, 0.000002, 0.000003, 0.000004, 0.000005, 0.0000075, 0.00001, 0.000015, 0.00002] + cfgrib: + parameterNumber: 1 + typeOfLevel: atmosphere + constituentType: 62009 clevs: [0.05, 0.1, 0.15, 0.2, 0.5, 1, 2, 3, 4, 5, 7, 10, 15, 20] cmap: jet colors: aod_colors @@ -535,36 +952,62 @@ colbc: # Column Black Carbon colfd: # Column Fine Dust sfc: <<: *col + cfgrib: + parameterNumber: 1 + typeOfLevel: atmosphere + constituentType: 62001 clevs: [2.5, 5, 7.5, 10, 25, 50, 100, 150, 200, 250, 375, 500, 750, 1000] ncl_name: COLMD_P48_L10_{grid}_A62001 title: Column Fine Dust coloc: # Column Organic Carbon sfc: <<: *col + cfgrib: + parameterNumber: 1 + typeOfLevel: atmosphere + constituentType: 62010 clevs: [0.5, 1, 5, 10, 20, 30, 40, 50, 80, 100, 120, 150, 200, 250] ncl_name: COLMD_P48_L10_{grid}_A62010 title: Column Organic Carbon colpm10: # Column PM10 sfc: <<: *col + cfgrib: + parameterNumber: 1 + typeOfLevel: atmosphere + constituentType: 62000 + scaledValueOfFirstSize: 10 clevs: [4, 5, 10, 20, 30, 40, 50, 100, 200, 300, 400, 500, 1000, 2000] ncl_name: COLMD_P48_L10_{grid}_A62000 title: Column PM10 colpm25: # Column PM25 sfc: <<: *col + cfgrib: + parameterNumber: 1 + typeOfLevel: atmosphere + constituentType: 62000 + scaledValueOfFirstSize: 25 clevs: [0.25, 0.5, 0.75, 1, 2.5, 5, 10, 15, 20, 25, 30, 50, 75, 100] ncl_name: COLMD_P48_L10_{grid}_A62000_1 title: Column PM25 colss: # Column Sea Salt sfc: <<: *col + cfgrib: + parameterNumber: 1 + typeOfLevel: atmosphere + constituentType: 62008 clevs: [0.1, 0.6, 3, 6, 10, 13, 16, 20, 23, 26, 30, 33, 36, 100] ncl_name: COLMD_P48_L10_{grid}_A62008 title: Column Sea Salt colsu: # Column Sulfate sfc: <<: *col + cfgrib: + parameterNumber: 1 + typeOfLevel: atmosphere + constituentType: 62006 clevs: [2, 3, 4, 5, 7.5, 10, 15, 20, 30, 40, 50, 100, 200, 300] ncl_name: COLMD_P48_L10_{grid}_A62006 title: Column Sulfate @@ -578,6 +1021,9 @@ colto: # Column Total Ozone unit: DU cpofp: # Frozen Precipitation Percentage sfc: + cfgrib: + shortName: cpofp + typeOfLevel: surface clevs: [-0.1, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100] cmap: gist_ncar colors: frzn_colors @@ -587,6 +1033,20 @@ cpofp: # Frozen Precipitation Percentage unit: '%' crain: # Categorical Rain sfc: + cfgrib: + hrrr: &named_crain + shortName: crain + typeOfLevel: surface + hrrrcar: &unnamed_crain + parameterCategory: 1 + parameterName: 192 + typeOfLevel: surface + hrrrhi: + <<: *unnamed_crain + mpas: + <<: *named_crain + rrfs: + <<: *named_crain ncl_name: CRAIN_P0_L1_{grid} cref: # Composite reflectivity esbl: @@ -599,13 +1059,36 @@ cref: # Composite reflectivity title: Observed Composite Reflectivity sfc: <<: *refl + cfgrib: + hrrr: + shortName: refc + typeOfLevel: atmosphere + level: 0 + hrrrcar: &refc_noname + parameterCategory: 16 + parameterNumber: 196 + level: 0 + typeOfLevel: atmosphere + hrrrhi: + <<: *refc_noname + global: + shortName: refc + typeOfLevel: atmosphereSingleLayer + level: 0 + mpas: + <<: *refc_noname + rrfs: + parameterNumber: 5 + parameterCategory: 16 + typeOfLevel: atmosphereSingleLayer + level: 0 ncl_name: REFC_P0_L{level_type}_{grid} title: Composite Reflectivity include_obs: True crefmax: # Comp reflectivity (max over forecast) sfc: accumulate: True - clevs: !!python/object/apply:numpy.arange [5, 76, 5] + clevs: !arange [5, 76, 5] cmap: NWSReflectivity colors: cref_colors ncl_name: REFC_P0_L200_{grid} @@ -615,10 +1098,18 @@ crefmax: # Comp reflectivity (max over forecast) unit: dBZ csnow: # Categorical Snow sfc: + cfgrib: + shortName: csnow + typeOfLevel: surface + level: 0 ncl_name: CSNOW_P0_L1_{grid} ctop: # Cloud top height ua: - clevs: !!python/object/apply:numpy.arange [0, 61, 5] + cfgrib: + shortName: gh + typeOfLevel: cloudTop + level: 0 + clevs: !arange [0, 61, 5] cmap: gist_ncar colors: ceil_colors ncl_name: HGT_P0_L3_{grid} @@ -628,17 +1119,25 @@ ctop: # Cloud top height unit: kft asl dewp: # Dew point temperature 2m: - clevs: !!python/object/apply:numpy.arange [-50, 141, 10] + cfgrib: + shortName: 2d + level: 2 + typeOfLevel: heightAboveGround + clevs: !arange [-50, 141, 10] cmap: gist_ncar colors: tsfc_colors ncl_name: DPT_P0_L103_{grid} ticks: 10 + title: 2m Dewpoint Temperature transform: conversions.k_to_f unit: F wind: 10m dlwrf: # Downward Longwave Radiation Flux sfc: &radiation_flux - clevs: !!python/object/apply:numpy.arange [200, 501, 12] + cfgrib: + shortName: sdlwrf + typeOfLevel: surface + clevs: !arange [200, 501, 12] cmap: gist_ncar colors: radiation_colors ncl_name: DLWRF_P0_L1_{grid} @@ -646,7 +1145,7 @@ dlwrf: # Downward Longwave Radiation Flux title: Downward LW Radiation Flux, Surface unit: W/m$^{2}$ top: # Nominal top of atmosphere - clevs: !!python/object/apply:numpy.arange [80, 341, 2] + clevs: !arange [80, 341, 2] cmap: ir_rgbv_r colors: radiation_mix_colors ncl_name: DLWRF_P0_L8_{grid} @@ -656,7 +1155,11 @@ dlwrf: # Downward Longwave Radiation Flux dlwrfavg: # Downward Longwave Radiation Flux Average sfc: <<: *radiation_flux - clevs: !!python/object/apply:numpy.arange [200, 501, 12] + cfgrib: + shortName: sdlwrf + typeOfLevel: surface + stepType: avg + clevs: !arange [200, 501, 12] cmap: gist_ncar colors: radiation_colors ncl_name: DLWRF_P0_L1_{grid}_avg6h @@ -666,6 +1169,9 @@ dlwrfavg: # Downward Longwave Radiation Flux Average dswrf: # Downward Shortwave Radiation Flux sfc: <<: *radiation_flux + cfgrib: + shortName: sdswrf + typeOfLevel: surface clevs: [0, 50, 100, 150, 200, 250, 300, 400, 500, 600, 700, 800, 900, 1000] colors: rainbow12_colors ncl_name: DSWRF_P0_L1_{grid} @@ -673,7 +1179,7 @@ dswrf: # Downward Shortwave Radiation Flux title: Downward SW Radiation Flux, Surface top: # Nominal top of atmosphere <<: *radiation_flux - clevs: !!python/object/apply:numpy.arange [50, 851, 10] + clevs: !arange [50, 851, 10] cmap: Greys_r colors: radiation_bw_colors ncl_name: DSWRF_P0_L8_{grid} @@ -682,6 +1188,10 @@ dswrf: # Downward Shortwave Radiation Flux dswrfavg: # Downward Shortwave Radiation Flux Average sfc: <<: *radiation_flux + cfgrib: + shortName: sdswrf + typeOfLevel: surface + stepType: avg clevs: [0, 50, 100, 150, 200, 250, 300, 400, 500, 600, 700, 800, 900, 1000] colors: rainbow12_colors ncl_name: DSWRF_P8_L1_{grid}_avg6h @@ -689,6 +1199,11 @@ dswrfavg: # Downward Shortwave Radiation Flux Average title: Downward SW Radiation Flux 6h Avg, Surface fd: # Fine dust, global chem sfc: + cfgrib: + shortName: pmtf + typeOfLevel: hybrid + level: 1 + constituentType: 62001 clevs: [1, 2, 4, 6, 8, 12, 16, 20, 25, 30, 40, 60, 100, 200] cmap: jet colors: aod_colors @@ -719,7 +1234,24 @@ fullintdust: # Full vertically integrated dust (Fine dust + Coarse dust) level2: int echotop: # Echo Top sfc: - clevs: !!python/object/apply:numpy.arange [4, 50, 3] + cfgrib: + hrrr: &named_echotop + parameterNumber: 3 + parameterCategory: 16 + typeOfLevel: cloudTop + level: 0 + hrrrcar: + <<: *named_echotop + hrrrhi: + <<: *named_echotop + mpas: &unnamed_echotop + parameterNumber: 3 + parameterCategory: 16 + typeOfLevel: atmosphereSingleLayer + level: 0 + rrfs: + <<: *unnamed_echotop + clevs: !arange [4, 50, 3] cmap: NWSReflectivity colors: cref_colors ncl_name: @@ -769,6 +1301,11 @@ ffldro: # Ensemble flash flood runoff title: Prob of 6-hr Precip > RFC flash flood guidance w/in 40 km firewx: # Fire Weather Index sfc: &firewx + cfgrib: + rrfs: + parameterNumber: 26 + parameterCategory: 4 + typeOfLevel: surface clevs: [10, 20, 30, 40, 50, 60, 70, 80, 90, 95, 100] cmap: gist_ncar colors: rainbow11_colors @@ -776,13 +1313,41 @@ firewx: # Fire Weather Index ticks: 0 title: Hourly Wildfire Potential unit: "%" -firewx-pygraf: # Fire Weather Index computed by pygraf (for HRRR) +firewx-pygraf: sfc: <<: *firewx + cfgrib: + hrrr: &named_vgtyp + shortName: vgtyp + typeOfLevel: surface + level: 0 + hrrrhi: + <<: *named_vgtyp + shortName: gppbfas + parameterCategory: 0 + parameterNumber: 198 + rrfs: + <<: *named_vgtyp ncl_name: VGTYP_P0_L1_{grid} transform: fire_weather_index flru: # Aviation Flight Rules sfc: + cfgrib: + hrrr: &cloud_base + shortName: gh + typeOfLevel: cloudCeiling + level: 0 + hrrrcar: + <<: *cloud_base + typeOfLevel: cloudBase + hrrrhi: + <<: *cloud_base + typeOfLevel: cloudBase + mpas: + <<: *cloud_base + rrfs: + <<: *cloud_base + clevs: [0.0, 1.0, 2.0, 3.0, 4.0] cmap: gist_ncar colors: flru_colors @@ -797,7 +1362,30 @@ flru: # Aviation Flight Rules unit: "category" G113bt: # GOES-W Brightness temperature, water vapor (Ch 3) sat: &goes_sat - clevs: !!python/object/apply:numpy.arange [-80, 41, 1] + cfgrib: + hrrr: + shortName: SBT113 + typeOfLevel: nominalTop + level: 0 + hrrrcar: + parameterCategory: 192 + parameterNumber: 7 + typeOfLevel: nominalTop + level: 0 + hrrrhi: + parameterCategory: 192 + typeOfLevel: nominalTop + parameterNumber: 7 + level: 0 + mpas: + parameterNumber: 242 + typeOfLevel: atmosphere + level: 0 + rrfs: + parameterNumber: 242 + typeOfLevel: atmosphere + level: 0 + clevs: !arange [-80, 41, 1] cmap: WVCIMSS_r colors: goes_colors ncl_name: SBT113_P0_L8_{grid} @@ -808,23 +1396,78 @@ G113bt: # GOES-W Brightness temperature, water vapor (Ch 3) G114bt: # GOES-W Brightness temperature, infrared (Ch 4) sat: <<: *goes_sat + cfgrib: + hrrr: &g114_bt + shortName: SBT114 + typeOfLevel: nominalTop + level: 0 + hrrrcar: + parameterCategory: 192 + parameterNumber: 7 + typeOfLevel: nominalTop + level: 0 + hrrrhi: + parameterCategory: 192 + parameterNumber: 7 + typeOfLevel: nominalTop + level: 0 + mpas: + <<: *g114_bt + rrfs: + <<: *g114_bt ncl_name: SBT114_P0_L8_{grid} title: GOES-W Brightness Temperature, Infrared unit: ch 4 G123bt: # GOES-E Brightness temperature, water vapor (Ch 3) sat: <<: *goes_sat + cfgrib: + hrrr: &g123_bt + shortName: SBT123 + typeOfLevel: nominalTop + level: 0 + hrrrcar: &unnamed_g123_bt + parameterCategory: 192 + parameterNumber: 1 + level: 0 + typeOfLevel: nominalTop + hrrrhi: + <<: *unnamed_g123_bt + mpas: + <<: *g123_bt + rrfs: + <<: *g123_bt ncl_name: SBT123_P0_L8_{grid} title: GOES-E Brightness Temperature, Water Vapor G124bt: # GOES-E Brightness temperature, infrared (Ch 4) sat: <<: *goes_sat + cfgrib: + hrrr: &g124_bt + shortName: SBT124 + typeOfLevel: nominalTop + level: 0 + hrrrcar: &unnamed_g124_bt + typeOfLevel: nominalTop + parameterCategory: 192 + parameterNumber: 2 + level: 0 + hrrrhi: + <<: *unnamed_g124_bt + mpas: + <<: *g124_bt + rrfs: + <<: *g124_bt + ncl_name: SBT124_P0_L8_{grid} title: GOES-E Brightness Temperature, Infrared unit: ch 4 gh: # Geopotential height 5mb: &ua_gh - clevs: !!python/object/apply:numpy.arange [6, 4680, 6] + cfgrib: + shortName: gh + typeOfLevel: isobaricInhPa + clevs: !arange [6, 4680, 6] cmap: rainbow colors: terrain_colors ncl_name: @@ -841,23 +1484,27 @@ gh: # Geopotential height <<: *ua_gh 500mb: <<: *ua_gh - clevs: !!python/object/apply:numpy.arange [504, 601, 6] + clevs: !arange [504, 601, 6] ticks: 6 700mb: <<: *ua_gh - clevs: !!python/object/apply:numpy.arange [201, 373, 3] + clevs: !arange [201, 373, 3] 850mb: <<: *ua_gh - clevs: !!python/object/apply:numpy.arange [3, 600, 3] + clevs: !arange [3, 600, 3] 925mb: <<: *ua_gh - clevs: !!python/object/apply:numpy.arange [3, 600, 3] + clevs: !arange [3, 600, 3] 1000mb: <<: *ua_gh - clevs: !!python/object/apply:numpy.arange [500, 600, 10] + clevs: !arange [500, 600, 10] sfc: <<: *ua_gh - clevs: !!python/object/apply:numpy.arange [0, 5000, 250] + cfgrib: + shortName: orog + typeOfLevel: surface + level: 0 + clevs: !arange [0, 5000, 250] cmap: gist_earth ncl_name: HGT_P0_L1_{grid} ticks: 0 @@ -865,8 +1512,15 @@ gh: # Geopotential height unit: gpm ua: <<: *ua_gh + cfgrib: + shortName: gh + typeOfLevel: hybrid ghtfl: # Ground Heat Flux sfc: + cfgrib: + shortName: gflux + typeOfLevel: surface + stepType: instant cmap: magma_r clevs: [-200, -150, -100, -50, -25, 0, 25, 50, 100, 150, 200, 300] cmap: PuOr @@ -877,12 +1531,19 @@ ghtfl: # Ground Heat Flux unit: W/m$^{2}$ grle: # Graupel ua: + cfgrib: + shortName: grle + typeOfLevel: '{{ "isobaricInhPa" if file_type == "prs" else "hybrid" }}' ncl_name: nat: GRLE_P0_L105_{grid} prs: GRLE_P0_L100_{grid} gust: 10m: - clevs: !!python/object/apply:numpy.arange [5, 95, 5] + cfgrib: + shortName: gust + typeOfLevel: surface + level: 0 + clevs: !arange [5, 95, 5] cmap: gist_ncar colors: wind_colors ncl_name: GUST_P0_L1_{grid} @@ -902,6 +1563,20 @@ gust: unit: in hail: # Max 1h Hail diameter maxsfc: &hail # surface + cfgrib: + hrrr: &hrrr_hail + shortName: hail + typeOfLevel: sigma + level: 0 + stepType: max + hrrrcar: + <<: *hrrr_hail + hrrrhi: + <<: *hrrr_hail + rrfs: + shortName: hail + typeOfLevel: surface + stepType: max clevs: [0.10, 0.25, 0.50, 0.75, 1.0, 1.5, 2.0, 2.5, 3.0] cmap: gist_ncar colors: hail_colors @@ -912,6 +1587,21 @@ hail: # Max 1h Hail diameter unit: in max: # total atmosphere <<: *hail + cfgrib: + hrrr: &hrrr_maxhail + shortName: hail + typeOfLevel: atmosphere + level: 0 + stepType: max + hrrrcar: + <<: *hrrr_maxhail + hrrrhi: + <<: *hrrr_maxhail + mpas: + <<: *hrrr_maxhail + rrfs: + <<: *hrrr_maxhail + typeOfLevel: surface ncl_name: HAIL_P8_L10_{grid}_max1h title: Max 1h Hail/Graupel Diameter, Entire Column hailcast: # Max 1h Hail diameter @@ -921,7 +1611,7 @@ hailcast: # Max 1h Hail diameter title: Max 1h Hail Diameter at Sfc from HAILCAST hlcy: # Helicity in16: &hlcy # Hourly updraft helicity over 1-6 km layer - clevs: !!python/object/apply:numpy.arange [25, 301, 25] + clevs: !arange [25, 301, 25] cmap: gist_ncar colors: rainbow12_colors ncl_name: UPHL_P0_2L103_{grid} @@ -931,12 +1621,23 @@ hlcy: # Helicity unit: $m^2 / s^2$ in25: # Hourly updraft helicity over 2-5 km layer <<: *hlcy - clevs: !!python/object/apply:numpy.arange [25, 301, 25] + cfgrib: + parameterNumber: 15 + typeOfLevel: heightAboveGroundLayer + topLevel: 5000 + bottomLevel: 2000 + clevs: !arange [25, 301, 25] ncl_name: UPHL_P0_2L103_{grid} split: True title: 2-5km Updraft Helicity mn02: &hlcy_mn # Hourly minimum of updraft helicity over 0-2 km layer - clevs: !!python/object/apply:numpy.arange [-300, -24, 25] + cfgrib: + parameterNumber: 200 + typeOfLevel: heightAboveGroundLayer + topLevel: 2000 + bottomLevel: 0 + stepType: min + clevs: !arange [-300, -24, 25] cmap: gist_ncar colors: rainbow12_reverse ncl_name: VAR_0_7_200_P8_2L103_{grid}_min1h @@ -946,15 +1647,33 @@ hlcy: # Helicity unit: $m^2 / s^2$ mn03: &hlcy_mn03 # Hourly minimum of updraft helicity over 0-3 km layer <<: *hlcy_mn + cfgrib: + parameterNumber: 200 + typeOfLevel: heightAboveGroundLayer + topLevel: 3000 + bottomLevel: 0 + stepType: min title: 0-3km Min Updraft Helicity (over prv hr) mn16: &hlcy_mn16 # Hourly minimum of updraft helicity over 1-6 km layer <<: *hlcy_mn title: 1-6km Min Updraft Helicity (over prv hr) mn25: &hlcy_mn25 # Hourly minimum of updraft helicity over 2-5 km layer <<: *hlcy_mn + cfgrib: + parameterNumber: 200 + typeOfLevel: heightAboveGroundLayer + topLevel: 5000 + bottomLevel: 2000 + stepType: min title: 2-5km Min Updraft Helicity (over prv hr) mx02: &hlcy_mx02 # Hourly maximum of updraft helicity over 0-2 km layer <<: *hlcy + cfgrib: + parameterNumber: 199 + typeOfLevel: heightAboveGroundLayer + topLevel: 2000 + bottomLevel: 0 + stepType: max clevs: !join_ranges [[12.5, 87.6, 12.5], [100, 301, 25]] colors: rainbow16_colors ncl_name: MXUPHL_P8_2L103_{grid}_max1h @@ -963,6 +1682,12 @@ hlcy: # Helicity title: 0-2km Max Updraft Helicity (over prv hr) mx03: &hlcy_mx03 # Hourly maximum of updraft helicity over 0-3 km layer <<: *hlcy + cfgrib: + parameterNumber: 199 + typeOfLevel: heightAboveGroundLayer + topLevel: 3000 + bottomLevel: 0 + stepType: max clevs: !join_ranges [[12.5, 87.6, 12.5], [100, 301, 25]] colors: rainbow16_colors ncl_name: MXUPHL_P8_2L103_{grid}_max1h @@ -979,6 +1704,12 @@ hlcy: # Helicity title: 1-6km Max Updraft Helicity (over prv hr) mx25: &hlcy_mx25 # Hourly maximum of updraft helicity over 2-5 km layer <<: *hlcy + cfgrib: + parameterNumber: 199 + typeOfLevel: heightAboveGroundLayer + topLevel: 5000 + bottomLevel: 2000 + stepType: max clevs: !join_ranges [[25, 176, 25], [200, 601, 50]] colors: rainbow16_colors ncl_name: MXUPHL_P8_2L103_{grid}_max1h @@ -987,6 +1718,11 @@ hlcy: # Helicity title: 2-5km Max Updraft Helicity (over prv hr) sr01: # 0-1 km Storm Relative Helicity <<: *hlcy + cfgrib: + shortName: hlcy + typeOfLevel: heightAboveGroundLayer + topLevel: 1000 + bottomLevel: 0 clevs: [25, 50, 100, 150, 200, 250, 300, 400, 500, 600, 700, 800] ncl_name: HLCY_P0_2L103_{grid} unit: $m^2 / s^2$ @@ -995,6 +1731,11 @@ hlcy: # Helicity title: 0-1 km Storm Relative Helicity sr03: # 0-3 km Storm Relative Helicity <<: *hlcy + cfgrib: + shortName: hlcy + typeOfLevel: heightAboveGroundLayer + topLevel: 3000 + bottomLevel: 0 clevs: [25, 50, 100, 150, 200, 250, 300, 400, 500, 600, 700, 800] ncl_name: HLCY_P0_2L103_{grid} unit: $m^2 / s^2$ @@ -1043,7 +1784,23 @@ hlcytot: title: Run Total 2-5km Max Updraft Helicity transform: run_max hpbl: # Height of Planetary Boundary Layer - sfc: + sfc: + cfgrib: + hrrr: &hpbl + shortName: blh + typeOfLevel: surface + hrrrcar: &unnamed_blh + parameterNumber: 196 + parameterCategory: 3 + typeOfLevel: surface + hrrrhi: + <<: *unnamed_blh + global: + <<: *unnamed_blh + mpas: + <<: *hpbl + rrfs: + <<: *hpbl clevs: [25, 50, 100, 200, 300, 500, 750, 1000, 1500, 2000, 2500, 3000, 4000, 5000] cmap: ir_rgbv colors: pbl_colors @@ -1053,16 +1810,23 @@ hpbl: # Height of Planetary Boundary Layer unit: m icmr: # Ice Water Mixing Ratio ua: + cfgrib: + shortName: icmr + typeOfLevel: isobaricInhPa ncl_name: nat: - ICMR_P0_L105_{grid} - CIMIXR_P0_L105_{grid} prs: ICMR_P0_L100_{grid} + uanat: + cfgrib: + parameterNumber: 82 + typeOfLevel: hybrid icprb: # Icing Probability # levels chosen are arbitrary based on initial plot samples # additional levels may be requested in the future. 500ft: &icprb - clevs: !!python/object/apply:numpy.arange [5, 86, 10] + clevs: !arange [5, 86, 10] cmap: gist_ncar colors: icprb_colors ncl_name: ICPRB_P0_L102_{grid} @@ -1120,7 +1884,11 @@ icsev: # Icing Severity variable: icsev lcl: # Lifted condensation level sfc: &lcl - clevs: !!python/object/apply:numpy.arange [0, 5000, 250] + cfgrib: + shortName: gh + typeOfLevel: adiabaticCondensation + level: 0 + clevs: !arange [0, 5000, 250] cmap: rainbow colors: lcl_colors contours: @@ -1137,6 +1905,9 @@ lcl: # Lifted condensation level unit: m lhtfl: # Latent Heat Net Flux sfc: &lhtflsfc + cfgrib: + shortName: slhtf + typeOfLevel: surface cmap: magma_r clevs: [-100, -50, -25, -10, 0, 10, 25, 50, 100, 150, 200, 250, 300, 400, 500, 750] cmap: BrBG @@ -1148,11 +1919,33 @@ lhtfl: # Latent Heat Net Flux lhtflavg: sfc: <<: *lhtflsfc + cfgrib: + shortName: avg_slhtf + typeOfLevel: surface ncl_name: LHTFL_P8_L1_{grid}_avg6h title: Latent Heat Net Flux 6h Avg li: # Lifted Index best: &lifted_index - clevs: !!python/object/apply:numpy.arange [-15, 16] + cfgrib: + hrrr: &named_bestli + shortName: 4lftx + typeOfLevel: pressureFromGroundLayer + topLevel: 18000 + hrrrcar: + <<: *named_bestli + shortName: unknown + parameterCategory: 7 + parameterNumber: 193 + hrrrhi: + <<: *named_bestli + shortName: unknown + parameterCategory: 7 + parameterNumber: 193 + mpas: + <<: *named_bestli + rrfs: + <<: *named_bestli + clevs: !arange [-15, 16] cmap: Spectral colors: lifted_index_colors ncl_name: 4LFTX_P0_2L108_{grid} @@ -1161,10 +1954,44 @@ li: # Lifted Index unit: C sfc: <<: *lifted_index + cfgrib: + hrrr: &named_lftx + shortName: lftx + typeOfLevel: isobaricLayer + level: 500 + hrrrcar: + <<: *named_lftx + shortName: unknown + parameterCategory: 7 + parameterNumber: 192 + hrrrhi: + <<: *named_lftx + mpas: + <<: *named_lftx + rrfs: + <<: *named_lftx ncl_name: LFTX_P0_2L100_{grid} title: Surface Lifted Index lpl: # Lifted parcel level agl: + cfgrib: &cfgrib_lpl + hrrr: &named_plpl + shortName: plpl + typeOfLevel: pressureFromGroundLayer + level: 25500 + hrrrcar: &unnamed_plpl + <<: *named_plpl + shortName: unknown + parameterCategory: 3 + parameterNumber: 200 + hrrrhi: + <<: *unnamed_plpl + global: + <<: *named_plpl + mpas: + <<: *named_plpl + rrfs: + <<: *named_plpl ncl_name: PLPL_P0_2L108_{grid} title: Lifted Parcel Level AGL >50 transform: @@ -1174,11 +2001,27 @@ lpl: # Lifted parcel level level2: sfc unit: hPa ua: + cfgrib: + <<: *cfgrib_lpl ncl_name: PLPL_P0_2L108_{grid} transform: conversions.pa_to_hpa unit: hPa ltg3: # Lightning Threat (LTG1 ... LTG2) sfc: + cfgrib: + hrrr: &named_ltng + shortName: ltng + typeOfLevel: atmosphere + level: 0 + hrrrcar: &unnamed_ltng + <<: *named_ltng + shortName: unknown + parameterCategory: 17 + parameterNumber: 192 + hrrrhi: + <<: *unnamed_ltng + rrfs: + <<: *named_ltng clevs: [0.02, 0.5, 1.0, 1.5, 2.0, 2.5, 3, 4, 5, 6, 7, 8, 10, 12] cmap: NWSReflectivity colors: cref_colors @@ -1188,7 +2031,12 @@ ltg3: # Lightning Threat (LTG1 ... LTG2) unit: flashes / km$^{2}$ / 5 min ltng: # Lightning sfc: - clevs: !!python/object/apply:numpy.arange [5, 91, 5] + cfgrib: + shortName: ltng + typeOfLevel: atmosphere + level: 0 + stepType: max + clevs: !arange [5, 91, 5] cmap: jet colors: graupel_colors ncl_name: LTNG_P8_L10_{grid}_max1h @@ -1196,6 +2044,11 @@ ltng: # Lightning unit: strikes / hr lwtp: # Lightning with total precip sfc: + cfgrib: + shortName: tp + typeOfLevel: surface + stepType: accum + stepRange: '{{ "%d-%d" % (fhr-1, fhr) }}' clevs: [0.25, 0.50, 0.75, 1.0, 2.0] cmap: gist_ncar colors: pcp_colors_high @@ -1210,6 +2063,9 @@ lwtp: # Lightning with total precip unit: in mfrp: # Fire radiative power sfc: + cfgrib: + shortName: cfnsf + typeOfLevel: surface clevs: [0, 10, 25, 50, 100, 250] colors: fire_power_colors ncl_name: CFNSF_P0_L1_{grid} @@ -1221,10 +2077,20 @@ mfrp: # Fire radiative power mref: # Maximum reflectivity for past hour at 1 km AGL sfc: <<: *refl + cfgrib: + parameterNumber: 198 + typeOfLevel: heightAboveGround + stepType: max + level: 1000 ncl_name: MAXREF_P8_L103_{grid}_max1h title: Max 1km agl Reflectivity (over prev hr) oc: # Organic Carbon sfc: &ocsfc + cfgrib: + shortName: pmtf + typeOfLevel: hybrid + level: 1 + constituentType: 62014 clevs: [0.05, 0.1, 0.5, 1, 2, 3, 5, 7, 10, 15, 20, 30, 50, 100] cmap: jet colors: aod_colors @@ -1241,17 +2107,32 @@ oc: # Organic Carbon oc1: # Organic Carbon 1 sfc: <<: *ocsfc + cfgrib: + shortName: pmtf + typeOfLevel: hybrid + level: 1 + constituentType: 62016 ncl_name: PMTF_P48_L105_{grid}_A62016 title: Surface Organic Carbon 1 transform: [] oc2: # Organic Carbon 2 sfc: <<: *ocsfc + cfgrib: + shortName: pmtf + typeOfLevel: hybrid + level: 1 + constituentType: 62015 ncl_name: PMTF_P48_L105_{grid}_A62015 title: Surface Organic Carbon 2 transform: [] PM10: # PM10, global chem sfc: &pmsfc + cfgrib: + shortName: pmtc + typeOfLevel: surface + constituentType: 62000 + scaledValueOfFirstSize: 10 clevs: [0.5, 1, 2, 5, 10, 20, 30, 50, 70, 100, 150, 200, 500, 1000] cmap: jet colors: aod_colors @@ -1263,19 +2144,49 @@ PM10: # PM10, global chem PM25: # PM25, global chem sfc: <<: *pmsfc + cfgrib: + shortName: pmtf + typeOfLevel: surface + constituentType: 62000 + scaledValueOfFirstSize: 25 ncl_name: PMTF_P48_L1_{grid}_A62000 title: PM25 pres: sfc: - clevs: !!python/object/apply:numpy.arange [650, 1051, 4] + cfgrib: + shortName: sp + level: 0 + typeOfLevel: surface + clevs: !arange [650, 1051, 4] cmap: gist_ncar colors: ps_colors ncl_name: PRES_P0_L1_{grid} ticks: 20 + title: Surface Pressure transform: conversions.pa_to_hpa unit: hPa msl: - clevs: !!python/object/apply:numpy.arange [976, 1051, 4] + cfgrib: + hrrr: &named_mslp + shortName: mslma + typeOfLevel: meanSea + level: 0 + hrrrcar: &unnamed_mslp + <<: *named_mslp + shortName: unknown + parameterCategory: 3 + parameterNumber: 198 + hrrrhi: + <<: *unnamed_mslp + global: + <<: *named_mslp + shortName: prmsl + mpas: + <<: *named_mslp + rrfs: + <<: *named_mslp + shortName: mslet + clevs: !arange [976, 1051, 4] cmap: Spectral_r colors: pmsl_colors ncl_name: @@ -1296,12 +2207,19 @@ pres: unit: hPa wind: 10m ua: + cfgrib: + shortName: pres + typeOfLevel: hybrid ncl_name: PRES_P0_L105_{grid} presmin: msl: accumulate: True annotate: True - clevs: !!python/object/apply:numpy.arange [860, 1001, 10] + cfgrib: + shortName: mslet + typeOfLevel: meanSea + level: 0 + clevs: !arange [860, 1001, 10] cmap: NWSReflectivity colors: cref_colors ncl_name: @@ -1322,7 +2240,22 @@ presmin: # wind: 10m ptmp: # Potential temperature 2m: - clevs: !!python/object/apply:numpy.arange [210, 350, 5] + cfgrib: + hrrr: &hrrr_pt + shortName: pt + typeOfLevel: heightAboveGround + level: 2 + hrrrcar: + <<: *hrrr_pt + hrrrhi: + <<: *hrrr_pt + mpas: + <<: *hrrr_pt + rrfs: + shortName: pt + typeOfLevel: surface + level: 0 + clevs: !arange [210, 350, 5] cmap: jet colors: t_colors ncl_name: POT_P0_L103_{grid} @@ -1331,6 +2264,24 @@ ptmp: # Potential temperature wind: 10m ptyp: # Hourly total precipitation sfc: + cfgrib: &cfgrib_precip + hrrr: &named_tp + shortName: tp + typeOfLevel: surface + level: 0 + stepRange: '{{ "%d-%d" % (fhr-1, fhr) }}' + stepType: accum + hrrrcar: &unnamed_tp + <<: *named_tp + shortName: unknown + parameterCategory: 1 + parameterNumber: 8 + hrrrhi: + <<: *unnamed_tp + mpas: + <<: *named_tp + rrfs: + <<: *named_tp clevs: [0.002, 0.01, 0.05, 0.1, 0.25, 0.50, 0.75, 1.0, 2.0] cmap: gist_ncar colors: pcp_colors @@ -1385,32 +2336,92 @@ ptyp: # Hourly total precipitation transform: conversions.kgm2_to_in unit: in pwtr: # Precipitable water - sfc: - clevs: !!python/object/apply:numpy.arange [4, 81, 4] + sfc: &pwtr + cfgrib: + hrrr: &pwat + shortName: pwat + typeOfLevel: atmosphereSingleLayer + level: 0 + hrrrcar: + <<: *pwat + typeOfLevel: unknown + hrrrhi: + <<: *pwat + typeOfLevel: unknown + global: + <<: *pwat + mpas: + <<: *pwat + rrfs: + <<: *pwat + clevs: !arange [4, 81, 4] cmap: gist_ncar colors: pw_colors ncl_name: PWAT_P0_L200_{grid} ticks: 4 unit: mm + ref: # Maximum reflectivity for past hour at 1 km AGL m10: <<: *refl + cfgrib: + hrrr: &named_refd + shortName: refd + typeOfLevel: isothermal + level: 263 + stepType: instant + hrrrcar: + <<: *named_refd + parameterCategory: 16 + parameterNumber: 195 + shortName: unknown + hrrrhi: + <<: *named_refd + parameterCategory: 16 + parameterNumber: 195 + shortName: unknown + mpas: + <<: *named_refd + rrfs: + <<: *named_refd + shortName: rare ncl_name: REFD_P0_L20_{grid} title: -10C Isothermal Reflectivity maxm10: <<: *refl + cfgrib: + hrrr: &named_maxm10 + shortName: refd + typeOfLevel: isothermal + level: 263 + stepType: max + hrrrcar: + <<: *named_maxm10 + shortName: unknown + parameterCategory: 16 + parameterNumber: 195 + mpas: + <<: *named_maxm10 ncl_name: REFD_P8_L20_{grid}_max1h title: Max 1h -10C Isothermal Reflectivity rh: # Relative Humidity 2m: &rh + cfgrib: + shortName: 2r + typeOfLevel: heightAboveGround + level: 2 clevs: [10, 20, 30, 40, 50, 60, 70, 80, 90, 95, 100, 105] cmap: gist_ncar colors: rainbow12_colors ncl_name: RH_P0_L103_{grid} + title: 2 m Relative Humidity ticks: 10 unit: '%' 500mb: &rh_ua <<: *rh + cfgrib: + shortName: r + typeOfLevel: isobaricInhPa ncl_name: RH_P0_L100_{grid} contours: pres_sfc: @@ -1476,6 +2487,9 @@ rh: # Relative Humidity variable: rh mean: # Mean RH 850mb to 500mb <<: *rh + cfgrib: + shortName: r + typeOfLevel: isobaricInhPa print_units: false title: Mean 850-500mb RH (%, shaded), 700mb Wind (kt) transform: @@ -1488,10 +2502,29 @@ rh: # Relative Humidity wind: 700mb pw: # RH wrt Precipitable Water <<: *rh + cfgrib: + parameterNumber: 242 + typeOfLevel: atmosphere + level: 0 ncl_name: RHPW_P0_L10_{grid} title: Relative Humidity wrt Precipitable Water rvil: # Radar-derived Vertically Integrated Liquid sfc: &vert_int_liq + cfgrib: + hrrr: &hrrr_rvil + shortName: veril + typeOfLevel: atmosphere + level: 0 + hrrrcar: + <<: *hrrr_rvil + hrrrhi: + <<: *hrrr_rvil + mpas: + <<: *hrrr_rvil + typeOfLevel: atmosphereSingleLayer + rrfs: + <<: *hrrr_rvil + typeOfLevel: atmosphereSingleLayer clevs: [0.05, 0.15, 0.76, 3.47, 6.92, 12, 31.6, 35, 40, 45, 50, 55, 60, 65, 70] cmap: NWSReflectivity colors: cref_colors @@ -1507,11 +2540,16 @@ rvil: # Radar-derived Vertically Integrated Liquid unit: kg/m$^{2}$ rwmr: # Rain Mixing Ratio ua: - ncl_name: - nat: RWMR_P0_L105_{grid} - prs: RWMR_P0_L100_{grid} + cfgrib: + shortName: rwmr + typeOfLevel: '{{ "isobaricInhPa" if file_type == "prs" else "hybrid" }}' + ncl_name: RWMR_P0_L105_{grid} seasalt: # Fine dust, global chem sfc: + cfgrib: + shortName: pmtf + typeOfLevel: surface + constituentType: 62008 clevs: [0.05, 0.1, 0.5, 1, 2, 3, 5, 10, 15, 20, 30, 40, 50, 100] cmap: jet colors: aod_colors @@ -1522,6 +2560,12 @@ seasalt: # Fine dust, global chem unit: $\mu g/m^3$ shear: 01km: &shear # 0-1 km + cfgrib: + shortName: vucsh + typeOfLevel: heightAboveGroundLayer + level: 0 + topLevel: 0 + bottomLevel: 1000 clevs: [5, 10, 20, 30, 40, 50, 60] cmap: gist_ncar colors: wind_colors_high @@ -1530,27 +2574,46 @@ shear: transform: funcs: [vector_magnitude, conversions.ms_to_kt] kwargs: - field2: VVCSH_P0_2L103_{grid} - one_lev: True - split: True - level: 01km + field2_id: vshear_01km ticks: 0 title: 0–1 km Bulk Shear unit: kt 06km: # 0-6 km <<: *shear + cfgrib: + shortName: vucsh + typeOfLevel: heightAboveGroundLayer + level: 0 + topLevel: 0 + bottomLevel: 6000 clevs: [20, 30, 40, 50, 60, 70, 80, 90, 100] split: True transform: funcs: [vector_magnitude, conversions.ms_to_kt] kwargs: - field2: VVCSH_P0_2L103_{grid} - one_lev: True - split: True - level: 06km + field2_id: vshear_06km title: 0–6 km Bulk Shear +vshear: + 01km: + cfgrib: + shortName: vvcsh + typeOfLevel: heightAboveGroundLayer + topLevel: 0 + level: 0 + bottomLevel: 1000 + 06km: + cfgrib: + shortName: vvcsh + typeOfLevel: heightAboveGroundLayer + level: 0 + topLevel: 0 + bottomLevel: 6000 shtfl: # Sensible Heat Net Flux sfc: &shtflsfc + cfgrib: + shortName: ishf + typeOfLevel: surface + stepType: instant cmap: magma_r clevs: [-100, -50, -25, -10, 0, 10, 25, 50, 100, 150, 200, 250, 300, 400, 500, 750] cmap: RdYlBu_r @@ -1562,13 +2625,17 @@ shtfl: # Sensible Heat Net Flux shtflavg: sfc: <<: *shtflsfc + cfgrib: + shortName: avg_ishf + typeOfLevel: surface + stepType: avg ncl_name: SHTFL_P8_L1_{grid}_avg6h title: Sensible Heat Net Flux 6h Avg sipd: # Supercooled Large Droplet Icing # levels chosen are arbitrary based on initial plot samples # additional levels may be requested in the future. 500ft: &sipd - clevs: !!python/object/apply:numpy.arange [5, 86, 10] + clevs: !arange [5, 86, 10] cmap: gist_ncar colors: icprb_colors ncl_name: SIPD_P0_L102_{grid} @@ -1593,8 +2660,11 @@ sipd: # Supercooled Large Droplet Icing kwargs: level: 1000ft variable: sipd -slw: # Supercooled Liquid Water +slw: # Supercooled Liquid Water -- requires nat data sfc: + cfgrib: + shortName: t + typeOfLevel: hybrid clevs: [0.05, 0.1, 0.2, .3, .4, .5, .75, 1., 1.25, 1.5, 2., 3., 4., 5., 6.] cmap: gist_ncar colors: slw_colors @@ -1605,16 +2675,36 @@ slw: # Supercooled Liquid Water unit: kg/m$^{2}$ snmr: # Snow Mixing Ratio ua: + cfgrib: + shortName: snmr + typeOfLevel: '{{ "isobaricInhPa" if file_type == "prs" else "hybrid" }}' ncl_name: nat: SNMR_P0_L105_{grid} prs: SNMR_P0_L100_{grid} snod: # Snow Depth sfc: <<: *snow + cfgrib: + shortName: sde + typeOfLevel: surface ncl_name: SNOD_P0_L1_{grid} title: Cycled Snow Depth soilm: # Soil Moisture Availability sfc: + cfgrib: + hrrr: &named_mstav + shortName: mstav + typeOfLevel: depthBelowLand + hrrrcar: + <<: *named_mstav + hrrrhi: &unnamed_mstav + <<: *named_mstav + shortName: unknown + parameterNumber: 194 + parameterCategory: 0 + rrfs: + <<: *named_mstav + level: 0 clevs: [0, 5, 15, 25, 35, 45, 55, 65, 75, 85, 95] cmap: Spectral colors: soilm_colors @@ -1630,7 +2720,12 @@ soilm: # Soil Moisture Availability unit: "%" soilt: # Soil Temperature 0cm: &soilt_levs - clevs: !!python/object/apply:numpy.arange [235, 331, 5] + cfgrib: + shortName: st + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 0 + scaledValueOfSecondFixedSurface: 0 + clevs: !arange [235, 331, 5] cmap: gist_ncar colors: tsfc_colors ncl_name: TSOIL_P0_2L106_{grid} @@ -1639,33 +2734,114 @@ soilt: # Soil Temperature unit: K 1cm: <<: *soilt_levs + cfgrib: + shortName: st + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 1 + scaledValueOfSecondFixedSurface: 1 title: Soil Temperature at 1cm 4cm: <<: *soilt_levs + cfgrib: + shortName: st + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 4 + scaledValueOfSecondFixedSurface: 4 title: Soil Temperature at 4cm 10cm: <<: *soilt_levs + cfgrib: + hrrr: &st_10cm + shortName: st + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 10 + scaledValueOfSecondFixedSurface: 10 + hrrrcar: + <<: *st_10cm + hrrrhi: + <<: *st_10cm + global: + <<: *st_10cm + scaledValueOfFirstFixedSurface: 0 + mpas: + <<: *st_10cm + rrfs: + <<: *st_10cm title: Soil Temperature at 10cm 30cm: <<: *soilt_levs + cfgrib: + shortName: st + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 30 + scaledValueOfSecondFixedSurface: 30 title: Soil Temperature at 30cm 40cm: <<: *soilt_levs title: Soil Temperature at 30cm 60cm: <<: *soilt_levs + cfgrib: + shortName: st + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 60 + scaledValueOfSecondFixedSurface: 60 title: Soil Temperature at 60cm 1m: <<: *soilt_levs + cfgrib: + hrrr: &st_1m + shortName: st + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 100 + scaledValueOfSecondFixedSurface: 100 + hrrrcar: + <<: *st_1m + hrrrhi: + <<: *st_1m + global: + <<: *st_1m + scaledValueOfFirstFixedSurface: 40 + mpas: + <<: *st_1m + rrfs: + <<: *st_1m title: Soil Temperature at 1m 1.6m: <<: *soilt_levs + cfgrib: + shortName: st + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 160 + scaledValueOfSecondFixedSurface: 160 title: Soil Temperature at 1.6m 3m: <<: *soilt_levs + cfgrib: + shortName: st + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 300 + scaledValueOfSecondFixedSurface: 300 title: Soil Temperature at 3m soilw: # Soil Moisture 0cm: &soilw_levs + cfgrib: + hrrr: &soilw_0cm + shortName: soilw + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 0 + scaledValueOfSecondFixedSurface: 0 + hrrrcar: + <<: *soilw_0cm + shortName: unknown + parameterCategory: 0 + parameterNumber: 2 + hrrrhi: + <<: *soilw_0cm + mpas: + <<: *soilw_0cm + rrfs: + <<: *soilw_0cm clevs: [0, 0.01, 0.05, 0.10, 0.15, 0.20, 0.25, 0.30, 0.35, 0.40, 0.45, 0.50] cmap: jet_r colors: soilw_colors @@ -1675,33 +2851,206 @@ soilw: # Soil Moisture unit: fraction 1cm: <<: *soilw_levs + cfgrib: + hrrr: &soilw_1cm + shortName: soilw + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 1 + scaledValueOfSecondFixedSurface: 1 + hrrrcar: + <<: *soilw_1cm + shortName: unknown + parameterCategory: 0 + parameterNumber: 2 + hrrrhi: + <<: *soilw_1cm + mpas: + <<: *soilw_1cm + rrfs: + <<: *soilw_1cm title: Soil Moisture at 1cm 4cm: <<: *soilw_levs + cfgrib: + hrrr: &soilw_4cm + shortName: soilw + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 4 + scaledValueOfSecondFixedSurface: 4 + hrrrcar: + <<: *soilw_4cm + shortName: unknown + parameterCategory: 0 + parameterNumber: 2 + hrrrhi: + <<: *soilw_4cm + mpas: + <<: *soilw_4cm + rrfs: + <<: *soilw_4cm title: Soil Moisture at 4cm 10cm: <<: *soilw_levs + cfgrib: + hrrr: &soilw_cfgrib_10cm + shortName: soilw + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 10 + scaledValueOfSecondFixedSurface: 10 + hrrrcar: + <<: *soilw_cfgrib_10cm + shortName: unknown + parameterCategory: 0 + parameterNumber: 2 + hrrrhi: + <<: *soilw_cfgrib_10cm + global: + <<: *soilw_cfgrib_10cm + scaledValueOfFirstFixedSurface: 0 + mpas: + <<: *soilw_cfgrib_10cm + rrfs: + <<: *soilw_cfgrib_10cm title: Soil Moisture at 10cm 30cm: <<: *soilw_levs + cfgrib: + hrrr: &soilw_30cm + shortName: soilw + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 30 + scaledValueOfSecondFixedSurface: 30 + hrrrcar: + <<: *soilw_30cm + shortName: unknown + parameterCategory: 0 + parameterNumber: 2 + hrrrhi: + <<: *soilw_30cm + mpas: + <<: *soilw_30cm + rrfs: + <<: *soilw_30cm title: Soil Moisture at 30cm 40cm: <<: *soilw_levs + cfgrib: + hrrr: &soilw_40cm + shortName: soilw + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 40 + scaledValueOfSecondFixedSurface: 40 + hrrrcar: + <<: *soilw_40cm + shortName: unknown + parameterCategory: 0 + parameterNumber: 2 + hrrrhi: + <<: *soilw_40cm + rrfs: + <<: *soilw_40cm title: Soil Moisture at 40cm 60cm: <<: *soilw_levs + cfgrib: + hrrr: &soilw_60cm + shortName: soilw + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 60 + scaledValueOfSecondFixedSurface: 60 + hrrrcar: + <<: *soilw_60cm + shortName: unknown + parameterCategory: 0 + parameterNumber: 2 + hrrrhi: + <<: *soilw_60cm + mpas: + <<: *soilw_60cm + rrfs: + <<: *soilw_60cm title: Soil Moisture at 60cm 1m: <<: *soilw_levs + cfgrib: + hrrr: &soilw_cfgrib_100cm + shortName: soilw + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 100 + scaledValueOfSecondFixedSurface: 100 + hrrrcar: + <<: *soilw_cfgrib_100cm + shortName: unknown + parameterCategory: 0 + parameterNumber: 2 + hrrrhi: + <<: *soilw_cfgrib_100cm + global: + <<: *soilw_cfgrib_100cm + scaledValueOfFirstFixedSurface: 40 + mpas: + <<: *soilw_cfgrib_100cm + rrfs: + <<: *soilw_cfgrib_100cm title: Soil Moisture at 1m 1.6m: <<: *soilw_levs + cfgrib: + hrrr: &soilw_cfgrib_160cm + shortName: soilw + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 160 + scaledValueOfSecondFixedSurface: 160 + hrrrcar: + <<: *soilw_cfgrib_160cm + shortName: unknown + parameterCategory: 0 + parameterNumber: 2 + hrrrhi: + <<: *soilw_cfgrib_160cm + mpas: + <<: *soilw_cfgrib_160cm + rrfs: + <<: *soilw_cfgrib_160cm title: Soil Moisture at 1.6m 3m: <<: *soilw_levs + cfgrib: + hrrr: &soilw_cfgrib_300cm + shortName: soilw + typeOfLevel: depthBelowLandLayer + scaledValueOfFirstFixedSurface: 300 + scaledValueOfSecondFixedSurface: 300 + hrrrcar: + <<: *soilw_cfgrib_300cm + shortName: unknown + parameterCategory: 0 + parameterNumber: 2 + hrrrhi: + <<: *soilw_cfgrib_300cm + mpas: + <<: *soilw_cfgrib_300cm + rrfs: + <<: *soilw_cfgrib_300cm title: Soil Moisture at 3m solar: # Incoming Solar Radiation sfc: &incoming_radiation + cfgrib: + hrrr: &sdswrf + shortName: sdswrf + level: 0 + typeOfLevel: surface + hrrrcar: + <<: *sdswrf + hrrrhi: + <<: *sdswrf + global: + <<: *sdswrf + mpas: + <<: *sdswrf + rrfs: + <<: *sdswrf + shortName: csdsf clevs: [50, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000, 1100] cmap: gist_ncar colors: vort_colors @@ -1711,11 +3060,22 @@ solar: # Incoming Solar Radiation unit: W/m$^{2}$ sphum: # Specific humidity 2m: + cfgrib: + shortName: 2sh + typeOfLevel: heightAboveGround ncl_name: SPFH_P0_L103_{grid} ua: + cfgrib: + shortName: q + typeOfLevel: hybrid ncl_name: SPFH_P0_L105_{grid} ssrun: # Storm Surface Runoff sfc: &precip + cfgrib: + shortName: ssrun + typeOfLevel: surface + level: 0 + stepType: accum clevs: [0.002, 0.01, 0.05, 0.1, 0.25, 0.50, 0.75, 1.0, 2.0] cmap: gist_ncar colors: pcp_colors @@ -1726,6 +3086,11 @@ ssrun: # Storm Surface Runoff unit: in sulf: # Sulfate, global chem sfc: + cfgrib: + shortName: pmtf + typeOfLevel: hybrid + level: 1 + constituentType: 62006 clevs: [0.05, 0.1, 0.2, 0.5, 1, 2, 3, 5, 10, 15, 20, 30 ,40 ,50] cmap: jet colors: aod_colors @@ -1736,7 +3101,11 @@ sulf: # Sulfate, global chem unit: $\mu g/m^3$ temp: # Temperature 2ds: # 2m - Sfc - clevs: !!python/object/apply:numpy.arange [-32, 33, 2] + cfgrib: + shortName: 2t + level: 2 + typeOfLevel: heightAboveGround + clevs: !arange [-32, 33, 2] cmap: Spectral_r colors: centered_diff ncl_name: TMP_P0_L103_{grid} # 2m Temp @@ -1751,16 +3120,24 @@ temp: # Temperature wind: False 2m: &sfc_temp annotate: True - clevs: !!python/object/apply:numpy.arange [-50, 141, 10] + cfgrib: + shortName: 2t + level: 2 + typeOfLevel: heightAboveGround + clevs: !arange [-50, 141, 10] cmap: gist_ncar colors: tsfc_colors ncl_name: TMP_P0_L103_{grid} ticks: 10 + title: 2m Temperature transform: conversions.k_to_f unit: F wind: 10m 500mb: &ua_temp - clevs: !!python/object/apply:numpy.arange [-40, 40, 2.5] + cfgrib: + shortName: t + typeOfLevel: isobaricInhPa + clevs: !arange [-40, 40, 2.5] cmap: jet colors: ua_temp_colors contours: @@ -1818,11 +3195,18 @@ temp: # Temperature levels: [0, 925] sfc: <<: *sfc_temp + cfgrib: + shortName: t + level: 0 + typeOfLevel: surface ncl_name: TMP_P0_L1_{grid} title: Skin Temperature wind: False ua: <<: *ua_temp + cfgrib: + shortName: t + typeOfLevel: '{{ "isobaricInhPa" if file_type == "prs" else "hybrid" }}' thick: 500mb: <<: *ua_gh @@ -1835,14 +3219,16 @@ thick: totp: # Hourly total precipitation sfc: <<: *precip + cfgrib: + <<: *cfgrib_precip contours: pres_msl: colors: red linewidths: 0.4 - levels: !!python/object/apply:numpy.arange [650, 1051, 2] + levels: !arange [650, 1051, 2] thick_500mb: colors: blue - levels: !!python/object/apply:numpy.arange [402, 601, 6] + levels: !arange [402, 601, 6] linewidths: 0.4 linestyles: dashed ncl_name: APCP_P8_L1_{grid}_acc1h @@ -1850,20 +3236,29 @@ totp: # Hourly total precipitation totp6h: # 6-hourly total precipitation sfc: <<: *precip + cfgrib: + shortName: tp + typeOfLevel: surface + stepRange: '{{ "%d-%d" % (0, fhr) }}' + stepType: accum contours: pres_msl: colors: red linewidths: 0.4 - levels: !!python/object/apply:numpy.arange [650, 1051, 2] + levels: !arange [650, 1051, 2] thick_500mb: colors: blue - levels: !!python/object/apply:numpy.arange [402, 601, 6] + levels: !arange [402, 601, 6] linewidths: 0.4 linestyles: dashed ncl_name: APCP_P8_L1_{grid}_acc6h title: 6 hr Total Precip trc1: int: + cfgrib: + parameterNumber: 1 + typeOfLevel: atmosphereSingleLayer + level: 0 clevs: [1, 4, 7, 11, 15, 20, 25, 30, 40, 50, 75, 150, 250, 500] colors: smoke_colors ncl_name: @@ -1877,7 +3272,8 @@ trc1: title: Vertically Integrated Smoke transform: conversions.to_micro unit: $mg/m^2$ - 1000ft: &tracer1 + 1000ft: &tracer1 + # requires nat data clevs: [1, 2, 4, 6, 8, 12, 16, 20, 25, 30, 40, 60, 100, 200] colors: smoke_colors ncl_name: MASSDEN_P0_L105_{grid} @@ -1889,10 +3285,15 @@ trc1: unit: $\mu g/m^3$ vertical_index: 4 6000ft: + # requires nat data <<: *tracer1 title: 6000ft AGL Smoke vertical_index: 11 sfc: + cfgrib: + parameterNumber: 0 + typeOfLevel: heightAboveGround + level: 8 clevs: [1, 2, 4, 6, 8, 12, 16, 20, 25, 30, 40, 60, 100, 200] colors: smoke_colors ncl_name: @@ -1909,12 +3310,34 @@ trc1: wind: 10m u: 10m: &agl_uwind + cfgrib: + shortName: 10u + level: 10 + typeOfLevel: heightAboveGround ncl_name: UGRD_P0_L103_{grid} transform: conversions.ms_to_kt - 80m: *agl_uwind - 160m: *agl_uwind - 320m: *agl_uwind + 80m: + <<: *agl_uwind + cfgrib: + shortName: u + level: 80 + typeOfLevel: heightAboveGround + 160m: + <<: *agl_uwind + cfgrib: + shortName: u + level: 160 + typeOfLevel: heightAboveGround + 320m: + <<: *agl_uwind + cfgrib: + shortName: u + level: 320 + typeOfLevel: heightAboveGround 5mb: &ua_uwind + cfgrib: + shortName: u + typeOfLevel: isobaricInhPa ncl_name: prs: UGRD_P0_L100_{grid} nat: UGRD_P0_L105_{grid} @@ -1934,14 +3357,24 @@ u: <<: *nat_uwind vertical_index: 11 max: + cfgrib: + shortName: u + typeOfLevel: maxWind ncl_name: MAXUW_P8_L103_{grid}_max1h transform: conversions.ms_to_kt ua: <<: *ua_uwind + cfgrib: + shortName: u + typeOfLevel: hybrid ulwrf: # Upward Longwave Radiation Flux sfc: <<: *radiation_flux - clevs: !!python/object/apply:numpy.arange [350, 601, 10] + cfgrib: + shortName: sulwrf + typeOfLevel: surface + stepType: instant + clevs: !arange [350, 601, 10] cmap: gist_ncar colors: radiation_colors ncl_name: ULWRF_P0_L1_{grid} @@ -1949,7 +3382,21 @@ ulwrf: # Upward Longwave Radiation Flux title: Upward LW Radiation Flux, Surface unit: W/m$^{2}$ top: # Nominal top of atmosphere - clevs: !!python/object/apply:numpy.arange [80, 341, 2] + cfgrib: + hrrr: &ulwrf + parameterNumber: 4 + typeOfLevel: nominalTop + level: 0 + hrrrcar: + <<: *ulwrf + hrrrhi: + <<: *ulwrf + mpas: + <<: *ulwrf + rrfs: + <<: *ulwrf + stepType: avg + clevs: !arange [80, 341, 2] cmap: ir_rgbv_r colors: radiation_mix_colors ncl_name: ULWRF_P0_L8_{grid} @@ -1959,7 +3406,11 @@ ulwrf: # Upward Longwave Radiation Flux ulwrfavg: # Upward Longwave Radiation Flux sfc: <<: *radiation_flux - clevs: !!python/object/apply:numpy.arange [350, 601, 10] + cfgrib: + shortName: sulwrf + typeOfLevel: surface + stepType: avg + clevs: !arange [350, 601, 10] cmap: gist_ncar colors: radiation_colors ncl_name: ULWRF_P0_L1_{grid}_avg6h @@ -1967,7 +3418,12 @@ ulwrfavg: # Upward Longwave Radiation Flux title: Upward LW Radiation Flux, Surface unit: W/m$^{2}$ top: # Nominal top of atmosphere - clevs: !!python/object/apply:numpy.arange [80, 341, 2] + cfgrib: + parameterCategory: 5 + parameterNumber: 4 + typeOfLevel: nominalTop + stepType: avg + clevs: !arange [80, 341, 2] cmap: ir_rgbv_r colors: radiation_mix_colors ncl_name: ULWRF_P0_L8_{grid}_avg6h @@ -1977,6 +3433,10 @@ ulwrfavg: # Upward Longwave Radiation Flux uswrf: # Upward Shortwave Radiation Flux sfc: <<: *radiation_flux + cfgrib: + shortName: suswrf + typeOfLevel: surface + stepType: instant clevs: [0, 50, 100, 150, 200, 250, 300, 400, 500, 600, 700, 800, 900, 1000] colors: rainbow12_colors ncl_name: USWRF_P0_L1_{grid} @@ -1984,7 +3444,13 @@ uswrf: # Upward Shortwave Radiation Flux title: Upward SW Radiation Flux, Surface top: # Nominal top of atmosphere <<: *radiation_flux - clevs: !!python/object/apply:numpy.arange [50, 851, 10] + cfgrib: + cfgrib: + parameterNumber: 8 + parameterCategory: 4 + typeOfLevel: nominalTop + level: 0 + clevs: !arange [50, 851, 10] cmap: Greys_r colors: radiation_bw_colors ncl_name: USWRF_P0_L8_{grid} @@ -1993,6 +3459,10 @@ uswrf: # Upward Shortwave Radiation Flux uswrfavg: # Upward Shortwave Radiation Flux Average sfc: <<: *radiation_flux + cfgrib: + shortName: suswrf + typeOfLevel: surface + stepType: avg clevs: [0, 50, 100, 150, 200, 250, 300, 400, 500, 600, 700, 800, 900, 1000] colors: rainbow12_colors ncl_name: USWRF_P8_L1_{grid}_avg6h @@ -2000,7 +3470,7 @@ uswrfavg: # Upward Shortwave Radiation Flux Average title: Upward SW Radiation Flux 6h Avg, Surface top: # Nominal top of atmosphere <<: *radiation_flux - clevs: !!python/object/apply:numpy.arange [50, 851, 10] + clevs: !arange [50, 851, 10] cmap: Greys_r colors: radiation_bw_colors ncl_name: USWRF_P8_L8_{grid}_avg6h @@ -2008,12 +3478,31 @@ uswrfavg: # Upward Shortwave Radiation Flux Average title: Outgoing SW Radiation Flux 6h Avg, TOA v: 10m: &agl_wind + cfgrib: + shortName: 10v + level: 10 + typeOfLevel: heightAboveGround ncl_name: VGRD_P0_L103_{grid} transform: conversions.ms_to_kt - 80m: *agl_wind - 160m: *agl_wind - 320m: *agl_wind + 80m: + <<: *agl_wind + cfgrib: + shortName: v + typeOfLevel: heightAboveGround + 160m: + <<: *agl_wind + cfgrib: + shortName: v + typeOfLevel: heightAboveGround + 320m: + <<: *agl_wind + cfgrib: + shortName: v + typeOfLevel: heightAboveGround 5mb: &ua_vwind + cfgrib: + shortName: v + typeOfLevel: isobaricInhPa ncl_name: prs: VGRD_P0_L100_{grid} nat: VGRD_P0_L105_{grid} @@ -2033,23 +3522,79 @@ v: <<: *nat_vwind vertical_index: 11 max: + cfgrib: + shortName: v + typeOfLevel: maxWind ncl_name: MAXVW_P8_L103_{grid}_max1h transform: conversions.ms_to_kt ua: <<: *ua_vwind + cfgrib: + shortName: v + typeOfLevel: hybrid vbdsf: # Incoming Direct Radiation sfc: <<: *incoming_radiation + cfgrib: + hrrr: &named_vbdsf + shortName: vbdsf + typeOfLevel: surface + hrrrcar: &unnamed_vbdsf + parameterCategory: 4 + parameterNumber: 200 + typeOfLevel: unknown + hrrrhi: + <<: *unnamed_vbdsf + parameterCategory: 4 + parameterNumber: 200 + typeOfLevel: unknown + mpas: + <<: *named_vbdsf + rrfs: + <<: *named_vbdsf ncl_name: VBDSF_P0_L1_{grid} title: Incoming Direct Radiation vddsf: # Incoming Diffuse Radiation sfc: <<: *incoming_radiation + cfgrib: + hrrr: + shortName: vddsf + typeOfLevel: surface + hrrrcar: + parameterCategory: 4 + parameterNumber: 201 + typeOfLevel: unknown + hrrrhi: + parameterCategory: 4 + parameterNumber: 201 + typeOfLevel: unknown + rrfs: + shortName: vddsf + typeOfLevel: surface + shortName: vddsf + typeOfLevel: surface ncl_name: VDDSF_P0_L1_{grid} title: Incoming Diffuse Radiation vig: # Vertically-integrated graupel sfc: - clevs: !!python/object/apply:numpy.arange [5, 91, 5] + cfgrib: + hrrr: &hrrr_vig + parameterCategory: 1 + parameterNumber: 74 + typeOfLevel: atmosphereSingleLayer + level: 0 + hrrrcar: + <<: *hrrr_vig + typeOfLevel: unknown + hrrrhi: + <<: *hrrr_vig + typeOfLevel: unknown + mpas: + <<: *hrrr_vig + rrfs: + <<: *hrrr_vig + clevs: !arange [5, 91, 5] cmap: jet colors: graupel_colors ncl_name: TCOLG_P8_L200_{grid}_max1h @@ -2063,6 +3608,9 @@ vil: # Vertically Integrated Liquid title: Vertically Integrated Liquid vis: # Visibility sfc: &vis + cfgrib: + shortName: vis + typeOfLevel: surface # see the description provided in adb_graphics/utils.py, for the join_ranges method. clevs: !join_ranges [[0, 10, 0.1], [10, 51, 1.0]] cmap: gist_ncar @@ -2079,7 +3627,10 @@ visbsn: # Visibility incl. blowing snow title: Sfc Visibility incl. blowing snow (exp) vort: # Absolute vorticity 500mb: - clevs: !!python/object/apply:numpy.arange [6, 29, 2] + cfgrib: + shortName: absv + typeOfLevel: isobaricInhPa + clevs: !arange [6, 29, 2] cmap: gist_ncar colors: vort_colors contours: @@ -2091,7 +3642,25 @@ vort: # Absolute vorticity unit: 1E-5/s vvel: # Vertical velocity 700mb: - clevs: !!python/object/apply:numpy.arange [-17, 34, 5] + cfgrib: + global: &named_vvel + shortName: w + typeOfLevel: isobaricInhPa + hrrr: + <<: *named_vvel + hrrrcar: + parameterCategory: 1 + parameterNumber: 74 + typeOfLevel: unknown + hrrrhi: + <<: *named_vvel + mpas: + <<: *named_vvel + shortName: wz + rrfs: + <<: *named_vvel + shortName: wz + clevs: !arange [-17, 34, 5] cmap: gist_ncar colors: vvel_colors contours: @@ -2102,6 +3671,11 @@ vvel: # Vertical velocity transform: conversions.vvel_scale unit: -Pa/s * 10 mean: # Mean Vertical Velocity + cfgrib: + shortName: wz + typeOfLevel: sigmaLayer + stepType: avg + level: 1 clevs: [-20, -15, -10, -5, -1, -0.75, -0.5, -0.25, -0.1, 0.1, 0.25, 0.5, 0.75, 1, 5, 10, 15, 20] cmap: Spectral_r colors: mean_vvel_colors @@ -2111,7 +3685,12 @@ vvel: # Vertical velocity unit: m/s vvort: # Vertical vorticity mx01: &vvort # Hourly maximum of vertical vorticity over 0-2 km layer - clevs: !!python/object/apply:numpy.arange [0.0025, 0.0301, 0.0025] + cfgrib: + shortName: max_vo + typeOfLevel: heightAboveGroundLayer + topLevel: 1000 + stepType: max + clevs: !arange [0.0025, 0.0301, 0.0025] cmap: gist_ncar colors: vort_colors ncl_name: RELV_P8_2L103_{grid}_max1h @@ -2120,9 +3699,18 @@ vvort: # Vertical vorticity unit: 1/s mx02: # Hourly maximum of vertical vorticity over 0-2 km layer <<: *vvort + cfgrib: + shortName: max_vo + typeOfLevel: heightAboveGroundLayer + topLevel: 2000 + stepType: max title: 0-2km Max Vertical Vorticity (over prev hour) weasd: # Water equivalent of accumulated snow depth sfc: + cfgrib: + shortName: sdwe + typeOfLevel: surface + stepType: instant clevs: [0.01, 0.1, 0.3, 0.5, 1, 2, 3, 4, 5, 7.5, 10, 20] cmap: gist_ncar colors: snow_colors @@ -2133,6 +3721,9 @@ weasd: # Water equivalent of accumulated snow depth unit: in snoliqr: # Snow-liquid ratio (from U. Utah diagnostic in UPP) sfc: + cfgrib: + shortName: rsn + typeOfLevel: surface clevs: [6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28] cmap: gist_ncar colors: snow_colors @@ -2144,7 +3735,12 @@ snoliqr: # Snow-liquid ratio (from U. Utah diagnostic in UPP) windmax: 10m: accumulate: True - clevs: !!python/object/apply:numpy.arange [5, 95, 5] + cfgrib: + shortName: max_10si + typeOfLevel: heightAboveGround + level: 10 + stepType: max + clevs: !arange [5, 95, 5] cmap: gist_ncar colors: wind_colors ncl_name: WIND_P8_L103_{grid}_max1h @@ -2154,7 +3750,11 @@ windmax: unit: kt wspeed: # Wind Speed 10m: &ua_wspeed - clevs: !!python/object/apply:numpy.arange [5, 95, 5] + cfgrib: + shortName: 10u + level: 10 + typeOfLevel: heightAboveGround + clevs: !arange [5, 95, 5] cmap: gist_ncar colors: wind_colors ncl_name: UGRD_P0_L103_{grid} @@ -2163,10 +3763,13 @@ wspeed: # Wind Speed transform: funcs: [vector_magnitude, conversions.ms_to_kt] kwargs: - field2: VGRD_P0_L103_{grid} + field2_id: v_10m unit: kt wind: True 5mb: &ua_wspeed_high + cfgrib: + shortName: u + typeOfLevel: isobaricInhPa clevs: [20, 40, 60, 70, 80, 90, 100, 110, 120, 140, 160, 180, 200] cmap: gist_ncar colors: wind_colors_high @@ -2180,7 +3783,7 @@ wspeed: # Wind Speed transform: funcs: [vector_magnitude, conversions.ms_to_kt] kwargs: - field2: VGRD_P0_L100_{grid} + field2_id: v unit: kt wind: True 10mb: @@ -2200,9 +3803,20 @@ wspeed: # Wind Speed title: 320m Wind 80m: <<: *ua_wspeed + cfgrib: + shortName: u + typeOfLevel: heightAboveGround + level: 80 title: 80m Wind + transform: + funcs: [vector_magnitude, conversions.ms_to_kt] + kwargs: + field2_id: v_80m 850mb: <<: *ua_wspeed + cfgrib: + shortName: u + typeOfLevel: isobaricInhPa contours: gh: colors: white @@ -2212,13 +3826,41 @@ wspeed: # Wind Speed transform: funcs: [vector_magnitude, conversions.ms_to_kt] kwargs: - field2: VGRD_P0_L100_{grid} + field2_id: v max: # Hourly Maximum 10m Wind <<: *ua_wspeed + cfgrib: + shortName: max_10si + typeOfLevel: heightAboveGround + level: 10 + stepType: max ncl_name: WIND_P8_L103_{grid}_max1h title: Max 10m Wind (over prev hour) transform: conversions.ms_to_kt + wind: 10m mdn: # Hourly Maximum Downdraft Velocity + cfgrib: + hrrr: &wspeed_mdn + parameterCategory: 2 + parameterNumber: 221 + typeOfLevel: pressureFromGroundLayer + topLevel: 10000 + bottomLevel: 100000 + stepType: max + hrrrcar: + <<: *wspeed_mdn + hrrrhi: + <<: *wspeed_mdn + mpas: + <<: *wspeed_mdn + typeOfLevel: isobaricLayer + topLevel: 100 + bottomLevel: 1000 + rrfs: + <<: *wspeed_mdn + typeOfLevel: isobaricLayer + topLevel: 100 + bottomLevel: 1000 clevs: [-40, -35, -30, -25, -22.5, -20, -17.5, -15, -12.5, -10, -7.5, -5, -2.5, -2, -1.5, -1, -0.5] cmap: jet colors: mdn_colors @@ -2227,6 +3869,28 @@ wspeed: # Wind Speed title: Max Downdraft Velocity (over prev hour) unit: m/s mup: # Hourly Maximum Updraft Velocity + cfgrib: + hrrr: &wspeed_mup + parameterCategory: 2 + parameterNumber: 220 + typeOfLevel: pressureFromGroundLayer + topLevel: 10000 + bottomLevel: 100000 + stepType: max + hrrrcar: + <<: *wspeed_mup + hrrrhi: + <<: *wspeed_mup + mpas: + <<: *wspeed_mup + typeOfLevel: isobaricLayer + topLevel: 100 + bottomLevel: 1000 + rrfs: + <<: *wspeed_mup + typeOfLevel: isobaricLayer + topLevel: 100 + bottomLevel: 1000 clevs: [0.5, 1, 1.5, 2, 2.5, 5, 7.5, 10, 12.5, 15, 17.5, 20, 22.5, 25, 30, 35, 40] cmap: jet colors: mup_colors diff --git a/adb_graphics/errors.py b/adb_graphics/errors.py index f3532d99..48d7f6b4 100644 --- a/adb_graphics/errors.py +++ b/adb_graphics/errors.py @@ -1,17 +1,14 @@ -''' Errors specific to the ADB Graphics package. ''' +"""Errors specific to the ADB Graphics package.""" -class Error(Exception): - '''Base class for handling errors''' +class FieldNotUniqueError(Exception): + """Exception raised when multiple Grib fields are found with input parameters.""" -class FieldNotUnique(Error): - '''Exception raised when multiple Grib fields are found with input parameters''' +class GribReadError(Exception): # pragma: no cover + """Exception raised when there is an error reading the grib file.""" -class GribReadError(Error): - '''Exception raised when there is an error reading the grib file.''' - - def __init__(self, name, message="was not found"): + def __init__(self, name: str, message: str = "was not found"): self.name = name self.message = message @@ -20,11 +17,6 @@ def __init__(self, name, message="was not found"): def __str__(self): return f'"{self.name}" {self.message}' -class NoGraphicsDefinitionForVariable(Error): - '''Exception raised when there is no configuration for the variable.''' - -class LevelNotFound(Error): - '''Exception raised when there is no configuration for the variable.''' -class OutsideDomain(Error): - '''Exception raised when there is no configuration for the variable.''' +class NoGraphicsDefinitionForVariableError(Exception): + """Exception raised when there is no configuration for the variable.""" diff --git a/adb_graphics/figure_builders.py b/adb_graphics/figure_builders.py index bbefc30e..e4ae1dc5 100644 --- a/adb_graphics/figure_builders.py +++ b/adb_graphics/figure_builders.py @@ -1,171 +1,158 @@ -# pylint: disable=invalid-name -''' +""" This module is where pieces of the figures are put together. Data is -compbined with maps and skewts to provide the final product. -''' +combined with maps and skewts to provide the final product. +""" import gc -import os +from argparse import Namespace +from pathlib import Path import matplotlib.pyplot as plt import numpy as np +from matplotlib import axes +from xarray import Dataset from adb_graphics.datahandler import gribfile -from adb_graphics.datahandler import gribdata -import adb_graphics.errors as errors -from adb_graphics.figures import maps from adb_graphics.figures import skewt - -AIRPORTS = 'static/Airports_locs.txt' - -def add_obs_panel(ax, model_name, obs_file, proj_info, short_name, tile): - - # pylint: disable=too-many-arguments - ''' Plot observation data provided by the obs_file - path using the assigned projection. ''' - - gribobs = gribfile.GribFile(filename=obs_file) - ax.axis('on') - field = gribdata.fieldData( - ds=gribobs.contents, +from adb_graphics.figures.maps import DataMap, DiffMap, Map, MapFields, MultiPanelDataMap + +AIRPORTS = Path(__file__).resolve().parent.parent / "static" / "Airports_locs.txt" + + +def add_obs_panel( + ax: axes.Axes, + model_name: str, + dataset: dict[str, Dataset], + proj_info: dict, + spec: dict, + short_name: str, + tile: str, +): + """ + Plot observation data provided by the dataset + path using the assigned projection. + """ + + ax.axis("on") + map_fields = MapFields( fhr=0, - level='obs', - model='obs', - short_name=short_name, - ) - map_fields = maps.MapFields(main_field=field) - m = maps.Map( + fields_spec=spec, + ds=dataset, + level="obs", + model="obs", + name=short_name, + map_type="maps", + ) + m = Map( airport_fn=AIRPORTS, ax=ax, grid_info=proj_info, - model='obs', + model="obs", tile=tile, - ) - dm = maps.MultiPanelDataMap( + ) + dm = MultiPanelDataMap( map_fields=map_fields, map_=m, - member='obs', + member="obs", model_name=model_name, - ) + ) # Draw the map - dm.draw(show=True) - -def parallel_maps(cla, fhr, ds, level, model, spec, variable, workdir, - tile='full', ds2=None): - - # pylint: disable=too-many-arguments,too-many-locals - # pylint: disable=too-many-branches,too-many-statements - - ''' + return dm.draw() + + +def parallel_maps( # noqa: PLR0915, PLR0912 + cla: Namespace, + fhr: int, + dataset: dict[str, Dataset], + level: str, + variable: str, + workdir: Path, + tile: str = "full", + ds2: Path | None = None, +): + """ Function that creates plan-view maps, either a single panel, or multipanel for a forecast ensemble. Can be used in parallel. Input: fhr forecast hour - ds xarray dataset from the grib file + dataset loaded data level the vertical level of the variable to be plotted corresponding to a key in the specs file - model model name: rap, hrrr, hrrre, rrfs, rtma - spec the dictionary of specifications for the given variable - and level variable the name of the variable section in the specs file workdir output directory Optional: tile the label of the tile being plotted - ''' - - fig, axes = set_figure(cla.model_name, cla.graphic_type, tile) + ds2 second dataset + """ + fig, axes = set_figure(cla.images[0], cla.graphic_type, tile) + spec = cla.specs[variable][level] # set last_panel to send into DataMap for colorbar control last_panel = False # Declare the type of object depending on graphic type map_classes = { - "enspanel": maps.MultiPanelDataMap, - "diff": maps.DiffMap, - } - map_class = map_classes.get(cla.graphic_type, maps.DataMap) - + "enspanel": MultiPanelDataMap, + "diff": DiffMap, + } + map_class = map_classes.get(cla.graphic_type, DataMap) + + top_left = 0 + center_left = 4 + lower_left = 8 for index, current_ax in enumerate(axes): - - if current_ax is axes[-1] or index == cla.ens_size: + if current_ax is axes[-1]: last_panel = True mem = None - if cla.graphic_type == 'enspanel': + if cla.graphic_type == "enspanel": # Don't put data in the top left or bottom left panels. - if index in (0, 8): - current_ax.axis('off') - - # If we have less than 10 members, skip the remaining panels. - if index > cla.ens_size: - continue - - # Shenanigans to match ensemble member to panel index - mem = 0 if index == 4 else index - mem = mem if mem < 4 else index - 1 - mem = mem if mem < 8 else index - 2 - - # Object to be plotted on the map in filled contours. - field = gribdata.fieldData( - ds=ds, + if index in (top_left, lower_left): + current_ax.axis("off") + + # Shenanigans to match ensemble member to panel index. Here's where the ensemble members + # should go: + # ---------------- + # | | 1 | 2 | 3 | + # ---------------- + # | 0 | 4 | 5 | 6 | + # ---------------- + # | o | 7 | 8 | 9 | + # ---------------- + match index: + case x if x in (top_left, center_left, lower_left): + mem = 0 + case x if x > lower_left: + mem = index - 2 + case x if x > center_left: + mem = index - 1 + case x if x < center_left: + mem = index + + # Create an object that holds all the fields for this map + map_fields = MapFields( + ds=dataset, + ds2=ds2, fhr=fhr, - filetype=cla.file_type, + fields_spec=cla.specs, level=level, - member=mem, - model=model, - short_name=variable, - config=cla.specs['file'] - ) - - try: - field.field - except errors.GribReadError: - print(f'Cannot find grib2 variable for {variable} at {level}. Skipping.') - return - - if cla.graphic_type == "diff": - - field2 = gribdata.fieldData( - ds=ds2, - fhr=fhr, - filetype=cla.file_type, - level=level, - member=mem, - model=model, - short_name=variable, - config=cla.specs['file'] - ) - - try: - field2.field - except errors.GribReadError: - print((f'Cannot find grib2 variable for {variable} at {level} in', - '2nd set of files. Skipping.')) - return - - field.data = field.values() - field2.values() - - - map_fields = maps.MapFields( - fields_spec=spec, - main_field=field, + name=variable, map_type=cla.graphic_type, - model=model, + model=cla.images[0], tile=tile, - ) - + ) # Generate a map object - m = maps.Map( + m = Map( airport_fn=AIRPORTS, ax=current_ax, - grid_info=field.grid_info(), - model=model, - plot_airports=spec.get('plot_airports', True), + grid_info=map_fields.shaded.grid_info(), + model=cla.images[0], + plot_airports=spec.get("plot_airports", True), tile=tile, - ) + ) # Send all objects (map_field, contours, hatches) to a DataMap object dm = map_class( @@ -174,48 +161,50 @@ def parallel_maps(cla, fhr, ds, level, model, spec, variable, workdir, member=mem, model_name=cla.model_name, last_panel=last_panel, - ) + ) # Draw the map - if cla.graphic_type == 'enspanel': + if cla.graphic_type == "enspanel": if index == 0: dm.title() dm.add_logo(current_ax) - elif index == 8: - if spec.get('include_obs', False) and cla.obs_file_path: + elif index == lower_left: + if spec.get("include_obs", False) and cla.obs_file_path: # Add observation panel to lower left. Currently only # supported for composite reflectivity. + obs_ds = gribfile.WholeGribFile(cla.obs_file_path).datasets add_obs_panel( ax=axes[8], model_name=cla.model_name, - obs_file=cla.obs_file_path, - proj_info=field.grid_info(), + dataset=obs_ds, + proj_info=map_fields.shaded.grid_info(), short_name=variable, + spec=cla.specs, tile=tile, - ) + ) else: dm.draw(show=True) else: dm.draw(show=True) # Build the output path - png_file = f'{variable}_{tile}_{level}_f{fhr:03d}.png' + png_file = f"{variable}_{tile}_{level}_f{fhr:03d}.png" png_file = png_file.replace("__", "_") - png_path = os.path.join(workdir, png_file) + png_path = workdir / png_file - print('*' * 120) + print("*" * 120) print(f"Creating image file: {png_path}") - print('*' * 120) + print("*" * 120) # Save the png file to disk plt.savefig( png_path, - bbox_inches='tight', + bbox_inches="tight", dpi=cla.img_res, - format='png', - orientation='landscape', - pil_kwargs={'optimize': True}, - ) + format="png", + orientation="landscape", + pil_kwargs={"optimize": True}, + ) fig.clear() # Clear the current axes. @@ -223,15 +212,15 @@ def parallel_maps(cla, fhr, ds, level, model, spec, variable, workdir, # Clear the current figure. plt.clf() # Closes all the figure windows. - plt.close('all') - del field - del m + plt.close("all") gc.collect() -def parallel_skewt(cla, fhr, ds, site, workdir): - ''' - Function that creates a single SkewT plot. Can be used in parallel. +def parallel_skewt(cla: Namespace, fhr: int, dataset: dict[str, Dataset], site: str, workdir: Path): + """ + Function that creates a single SkewT plot. + + Can be used in parallel. Input: cla command line arguments Namespace object @@ -239,80 +228,89 @@ def parallel_skewt(cla, fhr, ds, site, workdir): fhr the forecast hour integer site the string representation of the site from the sites file workdir output directory - ''' - + """ + # deduce model name + possible_namers = (cla.model_name, cla.data_root, cla.file_tmpl) + model = "" + for name in ("global", "hrrr", "rrfs"): + if any( + name in namer.lower() if isinstance(namer, str) else name in str(namer[0]).lower() + for namer in possible_namers + ): + model = name + break skew = skewt.SkewTDiagram( - ds=ds, fhr=fhr, - filetype=cla.file_type, + ds=dataset, loc=site, + model=model, + spec=cla.specs, max_plev=cla.max_plev, model_name=cla.model_name, - ) + ) skew.create_diagram() outfile = f"{skew.site_code}_{skew.site_num}_skewt_f{fhr:03d}.png" - png_path = os.path.join(workdir, outfile) - - print('*' * 80) + png_path = workdir / outfile + print("*" * 80) print(f"Creating image file: {png_path}") - print('*' * 80) + print("*" * 80) - # pylint: disable=duplicate-code plt.savefig( png_path, - bbox_inches='tight', + bbox_inches="tight", dpi=cla.img_res, - format='png', - orientation='landscape', - ) + format="png", + orientation="landscape", + ) - start_time = cla.start_time.strftime('%Y%m%d%H') + start_time = cla.start_time.strftime("%Y%m%d%H") csvfile = f"{skew.site_code}.{skew.site_num}.skewt.{start_time}_f{fhr:03d}.csv" - csv_path = os.path.join(workdir, csvfile) - print('*' * 80) + csv_path = workdir / csvfile + print("*" * 80) print(f"Creating csv file: {csv_path}") - print('*' * 80) + print("*" * 80) skew.create_csv(csv_path) plt.close() -def set_figure(model_name, graphic_type, tile): - ''' Create the figure and subplots appropriate for the model and - graphics type. Return the figure handle and list of axes. ''' +def set_figure(model_name: str, graphic_type: str, tile: str): + """ + Create the figure and subplots appropriate for the model and + graphics type. Return the figure handle and list of axes. + """ - if model_name == "HRRR-HI": - inches = 12.2 - else: - inches = 10 + inches = 12.2 if model_name == "HRRR-HI" else 10 # Settings for a default single map - x_aspect = 1 - y_aspect = 1 + x_aspect = 1.0 + y_aspect = 1.0 nrows = 1 ncols = 1 - if graphic_type == 'enspanel': + if graphic_type == "enspanel": nrows = 3 ncols = 4 inches = 20 # Most rough-square subdomains can use the 0.8 y_aspect y_aspect = 0.8 x_aspect = 1 - if tile in ['full', 'NW']: + if tile in ["full", "NW"]: # Horizontal rectangle subdomains, and CONUS need more # squashed horizontal rectangles y_aspect = 0.5 - if tile in ['SE']: + if tile in ["SE"]: # Vertical rectangle subdomains can use a bit more height # than the others y_aspect = 0.95 - fig, ax = plt.subplots(nrows, ncols, - figsize=(x_aspect*inches, y_aspect*inches), - sharex=True, - sharey=True, - ) + fig, ax = plt.subplots( + nrows, + ncols, + figsize=(x_aspect * inches, y_aspect * inches), + sharex=True, + sharey=True, + ) # Flatten the 2D array and number panel axes from top left to bottom right # sequentially ax = ax.flatten() if isinstance(ax, np.ndarray) else [ax] diff --git a/adb_graphics/figures/maps.py b/adb_graphics/figures/maps.py index 5658c254..a3ac7839 100644 --- a/adb_graphics/figures/maps.py +++ b/adb_graphics/figures/maps.py @@ -1,22 +1,28 @@ -# pylint: disable=invalid-name,too-few-public-methods - -''' +""" Module contains classes relevant to plotting maps. The Map class handles all the functionality related to a Basemap, and adding airports to a blank map. The DataMap class takes as input a Map object and a DataHandler object (e.g., UPPData object) and creates a standard plot with shaded fields, contours, wind barbs, and descriptive annotation. -''' +""" -import copy +from collections.abc import Callable +from copy import copy, deepcopy from math import isnan -import matplotlib.pyplot as plt +from pathlib import Path + import matplotlib.image as mpimg import matplotlib.offsetbox as mpob import matplotlib.patches as mpatches -from mpl_toolkits.basemap import Basemap -from mpl_toolkits.basemap import shiftgrid +import matplotlib.pyplot as plt import numpy as np +from matplotlib.axes import Axes +from matplotlib.contour import QuadContourSet +from mpl_toolkits.basemap import Basemap, shiftgrid +from xarray import Dataset + +from adb_graphics.datahandler import gribdata +from adb_graphics.utils import numeric_level, set_level # FULL_TILES is a list of strings that includes the labels GSL attaches to some of # the wgrib2 cutouts used for larger domains like RAP, RRFS NA, and global. @@ -28,59 +34,209 @@ "hrrr", "hrrrak", "NHemi", - ] +] # TILE_DEFS is a dict of dicts with predefined tiles specifying the corners of the grid # to be plotted, and the stride and length of the wind barbs. # Order for corners: [lower left lat, upper right lat, lower left lon, upper right lon] - -TILE_DEFS = { - 'NC': {'corners': [36, 51, -109, -85], 'stride': 10, 'length': 4}, - 'NE': {'corners': [36, 48, -91, -62], 'stride': 10, 'length': 4}, - 'NW': {'corners': [35, 52, -126, -102], 'stride': 10, 'length': 4}, - 'SC': {'corners': [24, 41, -107, -86], 'stride': 10, 'length': 4}, - 'SE': {'corners': [22, 37, -93.5, -72], 'stride': 10, 'length': 4}, - 'SW': {'corners': [24.5, 45, -122, -103], 'stride': 10, 'length': 4}, - 'Africa': {'corners': [-40, 40, -40, 60], 'stride': 7, 'length': 5}, - 'AKZoom': {'corners': [52, 73, -162, -132], 'stride': 4, 'length': 5}, - 'AKZoom2': {'corners': [37.9, 80.8, 180, -105.7], 'stride': 8, 'length': 5}, - 'AKRange': {'corners': [62.0, 67.0, -152.0, -143.0], 'stride': 4, 'length': 4}, - 'Anchorage': {'corners': [58.59, 62.776, -152.749, -146.218], 'stride': 4, 'length': 4}, - 'ATL': {'corners': [31.2, 35.8, -87.4, -79.8], 'stride': 4, 'length': 4}, - 'Beijing': {'corners': [25, 53, 102, 133], 'stride': 3, 'length': 5}, - 'CA-NV': {'corners': [30, 45, -124, -114], 'stride': 10, 'length': 4}, - 'Cambodia': {'corners': [0, 24, 90, 118], 'stride': 3, 'length': 5}, - 'CentralCA': {'corners': [34.5, 40.5, -124, -118], 'stride': 4, 'length': 4}, - 'CHI-DET': {'corners': [39, 44, -92, -83], 'stride': 4, 'length': 4}, - 'DCArea': {'corners': [36.7, 40, -81, -72], 'stride': 4, 'length': 4}, - 'EastCO': {'corners': [36.5, 41.5, -108, -101.8], 'stride': 4, 'length': 4}, - 'EPacific': {'corners': [0, 60, 180, 300], 'stride': 10, 'length': 5}, - 'Europe': {'corners': [15, 75, -30, 75], 'stride': 10, 'length': 5}, - 'Florida': {'corners': [19.2305, 29.521, -86.1119, -73.8189], 'stride': 10, 'length': 5}, - 'GreatLakes': {'corners': [37, 50, -96, -70], 'stride': 10, 'length': 4}, - 'HI': {'corners': [16.6, 24.6, -157.6, -157.5], 'stride': 1, 'length': 4}, - 'HI-zoom': {'corners': None, 'width': 800000, 'height': 800000, 'stride': 4, 'length': 4}, - 'HFIP': {'corners': [8.35, 51.6, 244., 336.], 'stride': 30, 'length': 4}, - 'Hurr-Car': {'corners': [21, 28, -96, -69], 'stride': 10, 'length': 4}, - 'Juneau': {'corners': [55.741, 59.629, -140.247, -129.274], 'stride': 4, 'length': 4}, - 'NW-large': {'corners': [29.5787, 52.6127, -121.666, -96.5617], 'stride': 15, 'length': 4}, - 'NYC-BOS': {'corners': [39, 43.5, -77, -66.5], 'stride': 4, 'length': 4}, - 'PuertoRico': {'corners': [15.5257, 24.0976, -74.6703, -61.848], 'stride': 10, 'length': 5}, - 'SEA-POR': {'corners': [43, 50, -125, -119], 'stride': 4, 'length': 4}, - 'SouthCA': {'corners': [31, 37, -120, -114], 'stride': 4, 'length': 4}, - 'SouthFL': {'corners': [24, 28.5, -84, -77], 'stride': 4, 'length': 4}, - 'Taiwan': {'corners': [19, 28, 116, 126], 'stride': 1, 'length': 5}, - 'VortexSE': {'corners': [30, 37, -92.5, -82], 'stride': 4, 'length': 4}, - 'WAtlantic': {'corners': [-0.25, 50.25, 261.75, 330.25], 'stride': 5, 'length': 5}, - 'WFIP3-d01': {'corners': [33.66, 46.86, -78.83, -61.01], 'stride': 10, 'length': 4}, - 'WFIP3-d02': {'corners': [37.84, 43.22, -74.77, -66.50], 'stride': 5, 'length': 5}, - 'WPacific': {'corners': [-40, 50, 90, 240], 'stride': 10, 'length': 5}, +TILE_DEFS: dict = { + "NC": {"corners": [36, 51, -109, -85], "stride": 10, "length": 4}, + "NE": {"corners": [36, 48, -91, -62], "stride": 10, "length": 4}, + "NW": {"corners": [35, 52, -126, -102], "stride": 10, "length": 4}, + "SC": {"corners": [24, 41, -107, -86], "stride": 10, "length": 4}, + "SE": {"corners": [22, 37, -93.5, -72], "stride": 10, "length": 4}, + "SW": {"corners": [24.5, 45, -122, -103], "stride": 10, "length": 4}, + "Africa": {"corners": [-40, 40, -40, 60], "stride": 7, "length": 5}, + "AKZoom": {"corners": [52, 73, -162, -132], "stride": 4, "length": 5}, + "AKZoom2": {"corners": [37.9, 80.8, 180, -105.7], "stride": 8, "length": 5}, + "AKRange": {"corners": [62.0, 67.0, -152.0, -143.0], "stride": 4, "length": 4}, + "Anchorage": { + "corners": [58.59, 62.776, -152.749, -146.218], + "stride": 4, + "length": 4, + }, + "ATL": {"corners": [31.2, 35.8, -87.4, -79.8], "stride": 4, "length": 4}, + "Beijing": {"corners": [25, 53, 102, 133], "stride": 3, "length": 5}, + "CA-NV": {"corners": [30, 45, -124, -114], "stride": 10, "length": 4}, + "Cambodia": {"corners": [0, 24, 90, 118], "stride": 3, "length": 5}, + "CentralCA": {"corners": [34.5, 40.5, -124, -118], "stride": 4, "length": 4}, + "CHI-DET": {"corners": [39, 44, -92, -83], "stride": 4, "length": 4}, + "DCArea": {"corners": [36.7, 40, -81, -72], "stride": 4, "length": 4}, + "EastCO": {"corners": [36.5, 41.5, -108, -101.8], "stride": 4, "length": 4}, + "EPacific": {"corners": [0, 60, 180, 300], "stride": 10, "length": 5}, + "Europe": {"corners": [15, 75, -30, 75], "stride": 10, "length": 5}, + "Florida": { + "corners": [19.2305, 29.521, -86.1119, -73.8189], + "stride": 10, + "length": 5, + }, + "GreatLakes": {"corners": [37, 50, -96, -70], "stride": 10, "length": 4}, + "HI": {"corners": [16.6, 24.6, -157.6, -157.5], "stride": 1, "length": 4}, + "HI-zoom": { + "corners": None, + "width": 800000, + "height": 800000, + "stride": 4, + "length": 4, + }, + "HFIP": {"corners": [8.35, 51.6, 244.0, 336.0], "stride": 30, "length": 4}, + "Hurr-Car": {"corners": [21, 28, -96, -69], "stride": 10, "length": 4}, + "Juneau": { + "corners": [55.741, 59.629, -140.247, -129.274], + "stride": 4, + "length": 4, + }, + "NW-large": { + "corners": [29.5787, 52.6127, -121.666, -96.5617], + "stride": 15, + "length": 4, + }, + "NYC-BOS": {"corners": [39, 43.5, -77, -66.5], "stride": 4, "length": 4}, + "PuertoRico": { + "corners": [15.5257, 24.0976, -74.6703, -61.848], + "stride": 10, + "length": 5, + }, + "SEA-POR": {"corners": [43, 50, -125, -119], "stride": 4, "length": 4}, + "SouthCA": {"corners": [31, 37, -120, -114], "stride": 4, "length": 4}, + "SouthFL": {"corners": [24, 28.5, -84, -77], "stride": 4, "length": 4}, + "Taiwan": {"corners": [19, 28, 116, 126], "stride": 1, "length": 5}, + "VortexSE": {"corners": [30, 37, -92.5, -82], "stride": 4, "length": 4}, + "WAtlantic": {"corners": [-0.25, 50.25, 261.75, 330.25], "stride": 5, "length": 5}, + "WFIP3-d01": {"corners": [33.66, 46.86, -78.83, -61.01], "stride": 10, "length": 4}, + "WFIP3-d02": {"corners": [37.84, 43.22, -74.77, -66.50], "stride": 5, "length": 5}, + "WPacific": {"corners": [-40, 50, 90, 240], "stride": 10, "length": 5}, } -class Map(): - # pylint: disable=too-many-instance-attributes +class MapFields: + """ + Class that packages all the field objects needed for producing + desired map content, i.e. an object that contains all filled + contours, hatched spaces, and overlayed contours needed for a full + product. + """ + + def __init__( + self, + ds: dict[str, Dataset], + fhr: int, + fields_spec: dict, + level: str, + map_type: str, + name: str, + ds2: Path | str | None = None, + model: str | None = None, + tile: str | None = None, + ): + self.fhr = fhr + self.fields_spec = deepcopy(fields_spec) + self.ds = ds + self.level = level + self.map_type = map_type + self.model = "" if model is None else model + self.name = name + self.tile = tile or "full" + + self.map_spec = deepcopy(self.fields_spec[self.name][self.level]) + set_level(self.level, self.model, self.map_spec) + # Required if map_type is "diff" + if map_type == "diff": # pragma: no cover + self.ds2 = ds2 + if not self.ds2: + msg = "Diff map requires a second grib path. Provide ds2 argument!" + raise ValueError(msg) + + @property + def shaded(self): + """ + The main field to be shaded on the map. + """ + args = { + "fhr": self.fhr, + "level": self.level, + "model": self.model, + "short_name": self.name, + "spec": self.fields_spec, + "ds": self.ds, + } + field = gribdata.FieldData(**args) # type: ignore[arg-type] + if self.map_type == "diff": # pragma: no cover + args["ds"] = self.ds2 + field2 = gribdata.FieldData(**args) # type: ignore[arg-type] + field.data = field.data - field2.data + + return field + + @property + def contours(self): + """Return the list of contour FieldData objects.""" + + # We won't plot contours on multipanel plots, or full global + # plots. + if self.map_type == "enspanel": # pragma: no cover + return [] + + if "global" in self.model and self.tile in ["full"]: # pragma: no cover + return [] - ''' + return self._overlay_fields("contours") + + @property + def hatches(self): + """Return the list of hatch FieldData objects.""" + + return self._overlay_fields("hatches") + + def wind_fields(self, level: str | None = None): # pragma: no cover + """Return u, v tuple of wind fields.""" + + lev = level or self.level + winds = [] + for var in ("u", "v"): + wind_spec = self.fields_spec[var][lev] + set_level(lev, self.model, wind_spec) + + args = { + "fhr": self.fhr, + "level": lev, + "model": self.model, + "short_name": var, + "spec": self.fields_spec, + "ds": self.ds, + } + winds.append(gribdata.FieldData(**args)) # type: ignore[arg-type] + return winds + + def _overlay_fields(self, spec_sect: str) -> list: # pragma: no cover + """ + Create FieldData objects for the specified overlay type - hatches or contours. + """ + + overlay_fields = [] + for overlay, overlay_kwargs in self.map_spec.get(spec_sect, {}).items(): + if "_" in overlay: + var, lev = overlay.split("_") + else: + var, lev = overlay, self.level + overlay_spec = deepcopy(self.fields_spec[var][lev]) + set_level(lev, self.model, overlay_spec) + args = { + "fhr": self.fhr, + "level": lev, + "model": self.model, + "short_name": var, + "spec": self.fields_spec, + "ds": self.ds, + } + overlay_obj = gribdata.FieldData(**args) + # Set the attributes for the overlay field + overlay_obj.contour_kwargs = overlay_kwargs + overlay_fields.append(overlay_obj) + return overlay_fields + + +class Map: + """ Class includes utilities needed to create a Basemap object, add airport locations, and draw the blank map. @@ -89,179 +245,154 @@ class Map(): airport_fn full path to airport file ax figure axis - Keyword arguments: - - map_proj dict describing the map projection to use. + Keyword Arguments: + grid_info dict describing the map projection to use. The only options currently are for lcc settings in _get_basemap() - corners list of values lat and lon of lower left (ll) and upper - right(ur) corners: - ll_lat, ur_lat, ll_lon, ur_lon model model designation used to trigger higher resolution maps if needed also used to turn off plotting of airports on global maps plot_airports bool to allow airport plotting to be turned off for certain plots, default is True tile a string corresponding to a pre-defined tile in the TILE_DEFS dictionary - ''' - def __init__(self, airport_fn, ax, **kwargs): + """ + def __init__(self, airport_fn: Path, ax: Axes, **kwargs): self.ax = ax - self.grid_info = kwargs.get('grid_info', {}) - self.model = kwargs.get('model') - self.plot_airports = kwargs.get('plot_airports', True) - self.tile = kwargs.get('tile', 'full') - self.airports = self.load_airports(airport_fn) - - if self.model == 'hrrr' and 'WFIP3' in self.tile: - self.grid_info.update({'lat_1': 40.6, 'lat_2': 40.6, 'lon_0': 289.2}) - if self.model != 'hrrrhi': + self.airport_fn = airport_fn + self.grid_info = kwargs.get("grid_info", {}) + self.model = kwargs.get("model", "") + self.plot_airports = kwargs.get("plot_airports", True) + self.tile = kwargs.get("tile", "full") + + if self.model == "hrrr" and "WFIP3" in self.tile: + self.grid_info.update( + {"lat_1": 40.6, "lat_2": 40.6, "lon_0": 289.2} + ) # pragma: no cover + if self.model != "hrrrhi": if self.tile in FULL_TILES: - self.corners = self.grid_info.pop('corners') - else: - self.corners = self.get_corners() - self.grid_info.pop('corners') - else: + self.corners = self.grid_info.pop("corners") + else: # pragma: no cover + self.corners = TILE_DEFS[self.tile]["corners"] + self.grid_info.pop("corners") + else: # pragma: no cover self.corners = None if self.tile in FULL_TILES: - self.width = self.grid_info.pop('width') - self.height = self.grid_info.pop('height') + self.width = self.grid_info.pop("width") + self.height = self.grid_info.pop("height") else: - self.width = self.get_width() - self.grid_info.pop('width') - self.height = self.get_height() - self.grid_info.pop('height') + self.width = TILE_DEFS[self.tile]["width"] + self.grid_info.pop("width") + self.height = TILE_DEFS[self.tile]["height"] + self.grid_info.pop("height") # Some of Hawaii's smaller islands and islands in the Caribbean don't # show up with a larger threshold. area_thresh = 1000 - if self.tile in ['HI', 'Florida', 'PuertoRico'] or self.model in ['hrrrhi', 'hrrrcar']: + if self.tile in ["HI", "Florida", "PuertoRico"] or self.model in [ + "hrrrhi", + "hrrrcar", + ]: # pragma: no cover area_thresh = 100 self.m = self._get_basemap(area_thresh=area_thresh, **self.grid_info) - if self.model == 'hrrrhi': - parallels = np.arange(0., 81, 5.) + if self.model == "hrrrhi": # pragma: no cover + parallels = np.arange(0.0, 81, 5.0) self.m.drawparallels(parallels, labels=[False, True, True, False]) - meridians = np.arange(10., 351., 5.) + meridians = np.arange(10.0, 351.0, 5.0) self.m.drawmeridians(meridians, labels=[True, False, False, True]) def boundaries(self): - - ''' Draws map boundaries - coasts, states, countries. ''' + """Draws map boundaries - coasts, states, countries.""" try: self.m.drawcoastlines(linewidth=0.5) - except ValueError: - self.m.drawcounties(color='black', - linewidth=0.4, - zorder=2, - ) + except ValueError: # pragma: no cover + self.m.drawcounties( + color="black", + linewidth=0.4, + zorder=2, + ) else: - if self.model not in ['global', 'hfip'] and self.tile not in FULL_TILES: - self.m.drawcounties(antialiased=False, - color='black', - linewidth=0.1, - zorder=2, - ) + if ( + self.model not in ["global", "hfip"] and self.tile not in FULL_TILES + ): # pragma: no cover + self.m.drawcounties( + antialiased=False, + color="black", + linewidth=0.1, + zorder=2, + ) self.m.drawstates() self.m.drawcountries() def draw(self): - - ''' Draw a map with political boundaries and airports only. ''' + """Draw a map with political boundaries and airports only.""" self.boundaries() - if self.plot_airports and 'global' not in self.model: # airports are too dense in global + if self.plot_airports and "global" not in self.model: # airports are too dense in global self.draw_airports() def draw_airports(self): - - ''' Plot each of the airport locations on the map. ''' - - lats = self.airports[:, 0] - lons = 360 + self.airports[:, 1] # Convert to positive longitude + """Plot each of the airport locations on the map.""" + airports = self.load_airports() + lats = airports[:, 0] + lons = 360 + airports[:, 1] # Convert to positive longitude x, y = self.m(lons, lats) - self.m.plot(x, y, 'ko', - ax=self.ax, - color='w', - fillstyle='full', - markeredgecolor='k', - markeredgewidth=0.5, - markersize=4, - ) + self.m.plot( + x, + y, + "wo", + ax=self.ax, + fillstyle="full", + markeredgecolor="k", + markeredgewidth=0.5, + markersize=4, + ) del x del y def _get_basemap(self, **get_basemap_kwargs): - - ''' Wrapper around basemap creation ''' + """Wrapper around basemap creation.""" basemap_args = dict( ax=self.ax, - resolution='i', - ) + resolution="i", + ) if self.corners is not None: corners = self.corners - basemap_args.update(dict( - llcrnrlat=corners[0], - llcrnrlon=corners[2], - urcrnrlat=corners[1], - urcrnrlon=corners[3], - )) + basemap_args.update( + dict( + llcrnrlat=corners[0], + llcrnrlon=corners[2], + urcrnrlat=corners[1], + urcrnrlon=corners[3], + ) + ) else: - basemap_args.update(dict( - width=self.width, - height=self.height, - )) + basemap_args.update( + dict( + width=self.width, + height=self.height, + ) + ) # pragma: no cover basemap_args.update(get_basemap_kwargs) return Basemap(**basemap_args) - def get_corners(self): - - ''' - Gather the corners for a specific tile. Corners are supplied in the - following format: - - lat and lon of lower left (ll) and upper right(ur) corners: - ll_lat, ur_lat, ll_lon, ur_lon - ''' - - return TILE_DEFS[self.tile]["corners"] - - def get_width(self): - - ''' - Gather the width for a specific tile. - ''' - - return TILE_DEFS[self.tile]["width"] - - def get_height(self): - - ''' - Gather the height for a specific tile. - ''' - - return TILE_DEFS[self.tile]["height"] - - @staticmethod - def load_airports(fn): - - ''' Load lat, lon pairs from a text file, return a list of lists. ''' - - with open(fn, 'r') as f: + def load_airports(self): + """Load lat, lon pairs from a text file, return a list of lists.""" + with self.airport_fn.open() as f: data = f.readlines() - return np.array([l.strip().split(',') for l in data], dtype=float) + return np.array([line.strip().split(",") for line in data], dtype=float) -class DataMap(): - #pylint: disable=too-many-arguments - ''' +class DataMap: + """ Class that combines the input data and the chosen map to plot both together. Input: @@ -272,24 +403,26 @@ class DataMap(): fields map maps object - ''' - - #pylint: disable=unused-argument - def __init__(self, map_fields, map_, model_name=None, **kwargs): + """ - self.field = map_fields.main_field + def __init__(self, map_fields: MapFields, map_: Map, model_name: str | None = None, **_kwargs): + self.field = map_fields.shaded self.contour_fields = map_fields.contours self.hatch_fields = map_fields.hatches + self.map_fields = map_fields self.map = map_ self.model_name = model_name - self.plot_scatter = map_fields.fields_spec.get('plot_scatter', False) + self.plot_scatter = map_fields.fields_spec.get("plot_scatter", False) - @staticmethod - def add_logo(ax): + def wind_fields(self, level: str): # pragma: no cover + return self.map_fields.wind_fields(level) - ''' Puts the NOAA logo at the bottom left of the matplotlib axes. ''' + @staticmethod + def add_logo(ax: Axes): + """Puts the NOAA logo at the bottom left of the matplotlib axes.""" - logo = mpimg.imread('static/noaa-logo-50x50.png') + logo_path = Path(__file__).resolve().parent.parent.parent / "static" / "noaa-logo-50x50.png" + logo = mpimg.imread(logo_path) imagebox = mpob.OffsetImage(logo) ab = mpob.AnnotationBbox( @@ -297,48 +430,54 @@ def add_logo(ax): (0, 0), box_alignment=(-0.2, -0.2), frameon=False, - xycoords='axes points', - ) + xycoords="axes points", + ) ax.add_artist(ab) - - def _colorbar(self, cc, ax): - - ''' Internal method that plots the color bar for a contourf field. - If ticks is set to zero, use a user-defined list of clevs from default_specs - If ticks is less than zero, use abs(ticks) as the step for labeling clevs ''' + def _colorbar(self, cc: QuadContourSet, ax: Axes): + """ + Plot the colorbar for the contourf field. + If ticks is set to zero, use a user-defined list of clevs from default_specs. + If ticks is less than zero, use abs(ticks) as the step for labeling clevs. + """ if self.field.ticks > 0: - ticks = np.arange(np.amin(self.field.clevs), - np.amax(self.field.clevs+1), self.field.ticks) - elif self.field.ticks == 0: + ticks = np.arange( + np.amin(self.field.clevs), + np.amax(self.field.clevs + 1), + self.field.ticks, + ) + elif self.field.ticks == 0: # pragma: no cover ticks = self.field.clevs - else: - ticks = self.field.clevs[0:len(self.field.clevs):-self.field.ticks] + else: # pragma: no cover + ticks = self.field.clevs[0 : len(self.field.clevs) : -self.field.ticks] ticks = np.around(ticks, 4) - cbar = plt.colorbar(cc, - ax=ax, - orientation='horizontal', - pad=0.02, - shrink=1.0, - ticks=ticks, - ) + cbar = plt.colorbar( + cc, + ax=ax, + orientation="horizontal", + pad=0.02, + shrink=1.0, + ticks=ticks, + ) - if self.field.short_name == 'flru': - ticks = [label.rjust(30) for label in ['VFR', 'MVFR', 'IFR', 'LIFR']] + tick_labels = [str(t) for t in ticks] + if self.field.short_name == "flru": # pragma: no cover + tick_labels = [label.rjust(30) for label in ["VFR", "MVFR", "IFR", "LIFR", ""]] # this step is done to allow proper order of icing severity levels (trace before light) - if self.field.short_name == 'icsev': - ticks = [label.rjust(30) for label in ['TRACE', 'LIGHT', 'MODERATE', 'HEAVY']] - - cbar.ax.set_xticklabels(ticks, fontsize=12) + if self.field.short_name == "icsev": # pragma: no cover + tick_labels = [label.rjust(30) for label in ["TRACE", "LIGHT", "MODERATE", "HEAVY", ""]] - def draw(self, show=False): + cbar.ax.set_xticklabels(tick_labels, fontsize=12) - ''' Main method for creating the plot. Set show=True to display the - figure from the command line. ''' + def draw(self, show: bool = False): + """ + Main method for creating the plot. Set show=True to display the + figure from the command line. + """ cf = self._draw_panel() @@ -351,124 +490,136 @@ def draw(self, show=False): # Create a pop-up to display the figure, if show=True if show: plt.tight_layout() - plt.show() self.add_logo(self.map.ax) - def _draw_panel(self, wind_barbs=True): # pylint: disable=too-many-locals, too-many-branches - + def _draw_panel(self, wind_barbs: bool = True): ax = self.map.ax # Draw a map and add the shaded field self.map.draw() - cf = self._draw_field(ax=ax, - colors=self.field.colors, - extend='both', - field=self.field, - func=self.map.m.contourf, - levels=self.field.clevs, - ) + cf = self._draw_field( + ax=ax, + colors=self.field.colors, + extend="both", + field=self.field, + func=self.map.m.contourf, + levels=self.field.clevs, + ) not_labeled = [self.field.short_name] - if self.hatch_fields: + if self.hatch_fields: # pragma: no cover not_labeled.extend([h.short_name for h in self.hatch_fields]) # Contour secondary fields, if requested - if self.contour_fields: + if self.contour_fields: # pragma: no cover self._draw_contours(ax, not_labeled) # Add hatched fields, if requested - if self.hatch_fields: + if self.hatch_fields: # pragma: no cover self._draw_hatches(ax) # Add wind barbs, if requested - add_wind = self.field.vspec.get('wind', False) - if add_wind and wind_barbs: + add_wind = self.field.vspec.get("wind", False) + if add_wind and wind_barbs: # pragma: no cover self._wind_barbs(add_wind) # Add field values at airports - annotate = self.field.vspec.get('annotate', False) + annotate = self.field.vspec.get("annotate", False) model_name = self.model_name - if annotate and 'global' not in self.map.model: # too dense in global - if model_name not in ['RRFS NA 3km']: # too dense in full RRFS domain - if model_name == 'RAP-NCEP' and self.map.tile not in ['full']: - self._draw_field_values(ax) + # too dense in global and rrfs NA + if ( + annotate + and "global" not in self.map.model + and model_name not in ["RRFS NA 3km"] + and model_name == "RAP-NCEP" + and self.map.tile not in ["full"] + ): # pragma: no cover + self._draw_field_values(ax) # Add scatter plot, if requested - if self.plot_scatter: + if self.plot_scatter: # pragma: no cover self._draw_scatter(ax) return cf - def _draw_contours(self, ax, not_labeled): - - ''' Draw the contour fields requested. ''' + def _draw_contours(self, ax: Axes, not_labeled: list[str]): # pragma: no cover + """Draw the contour fields requested.""" - model_name = self.model_name main_field = self.field.short_name for contour_field in self.contour_fields: - levels = contour_field.contour_kwargs.pop('levels', - contour_field.clevs) - - if model_name in ["RAP-NCEP", "RRFS-NCEP", "RRFS NA 3km"]: - if main_field == "totp" and contour_field.short_name == "pres" and \ - self.map.tile == "full": - levels = np.arange(650, 1051, 8) - - cc = self._draw_field(ax=ax, - field=contour_field, - func=self.map.m.contour, - levels=levels, - **contour_field.contour_kwargs, - ) + levels = contour_field.contour_kwargs.pop("levels", contour_field.clevs) + + if self.model_name in ["RAP-NCEP", "RRFS-NCEP", "RRFS NA 3km"] and ( + main_field == "totp" + and contour_field.short_name == "pres" + and self.map.tile == "full" + ): + levels = np.arange(650, 1051, 8) + + cc = self._draw_field( + ax=ax, + field=contour_field, + func=self.map.m.contour, + levels=levels, + **contour_field.contour_kwargs, + ) if contour_field.short_name not in not_labeled: try: - plt.clabel(cc, levels[::4], - colors='k', - fmt='%1.0f', - fontsize=10, - inline=1, - ) + plt.clabel( + cc, + levels[::4], + colors="k", + fmt="%1.0f", + fontsize=10, + inline=1, + ) except ValueError: - print(f'Cannot add contour labels to map for {self.field.short_name} \ - {self.field.level}') - - def _draw_scatter(self, ax): + print( + f"Cannot add contour labels to map for {self.field.short_name} \ + {self.field.level}" + ) - ''' Plot dots at locations on the map that meet a threshold. ''' + def _draw_scatter(self, ax: Axes): # pragma: no cover + """Plot dots at locations on the map that meet a threshold.""" field = self.field levels = self.field.clevs colors = self.field.colors - vals = self.field.values() + vals = self.field.data - value_to_color = np.full_like(vals, colors[0], dtype='object') + value_to_color = np.full_like(vals, colors[0], dtype="object") num_levels = len(levels) for i in range(num_levels): if i != num_levels - 1: - value_to_color = np.where((vals > levels[i]) & \ - (vals <= levels[i+1]), colors[i+1], value_to_color) + value_to_color = np.where( + (vals > levels[i]) & (vals <= levels[i + 1]), + colors[i + 1], + value_to_color, + ) else: - value_to_color = np.where(vals > levels[i], colors[i+1], value_to_color) + value_to_color = np.where(vals > levels[i], colors[i + 1], value_to_color) vtc1d = np.ravel(value_to_color) # Scatter plot dots are sized by value. Doing this here alters the size # without altering the colors we just set. - field.data = np.log10(field.values()) * 20 - - self._draw_field(ax=ax, - field=field, - alpha=1.0, - c=vtc1d, - func=self.map.m.scatter, - **field.contour_kwargs, - ) + field.data = np.log10(field.data) * 20 - def _draw_field(self, ax, field, func, **kwargs): - - ''' + self._draw_field( + ax=ax, + field=field, + alpha=1.0, + c=vtc1d, + func=self.map.m.scatter, + **field.contour_kwargs, + ) + + def _draw_field( + self, ax: Axes, field: gribdata.FieldData, func: Callable, **kwargs + ): # pragma: no cover + """ Internal implementation that calls a matplotlib function. Input args: @@ -476,29 +627,33 @@ def _draw_field(self, ax, field, func, **kwargs): field: Field to be plotted func: Matplotlib function to be called. - Keyword args: + kwargs: Can be any of the keyword args accepted by original func in matplotlib. Return: The return from the function called. - ''' + + """ x, y = self._xy_mesh(field) vals = field.data # For global lat-lon models, make 2D arrays for x and y # Shift the map and data if needed - if self.map.model in ['global', 'global_mpas', 'hfip']: + if self.map.model in ["global", "global_mpas", "hfip"]: tile = self.map.tile - if tile in ['Africa', 'Europe']: - vals, x = shiftgrid(180., vals, x, start=False) - y, x = np.meshgrid(y, x, sparse=False, indexing='ij') - - ret = func(x, y, vals, - ax=ax, - **kwargs, - ) + if tile in ["Africa", "Europe"]: + vals, x = shiftgrid(180.0, vals, x, start=False) + y, x = np.meshgrid(y, x, sparse=False, indexing="ij") + + ret = func( + x, + y, + vals, + ax=ax, + **kwargs, + ) del x del y @@ -506,71 +661,77 @@ def _draw_field(self, ax, field, func, **kwargs): vals.close() except AttributeError: del vals - print(f'CLOSE ERROR: {field.short_name} {field.level}') + print(f"CLOSE ERROR: {field.short_name} {field.level}") return ret - def _draw_field_values(self, ax): - - ''' Add the text value of the field at airport locations. ''' - annotate_decimal = self.field.vspec.get('annotate_decimal', 0) - lats = self.map.airports[:, 0] - lons = 360 + self.map.airports[:, 1] + def _draw_field_values(self, ax: Axes): # pragma: no cover + """Add the text value of the field at airport locations.""" + annotate_decimal = self.field.vspec.get("annotate_decimal", 0) + airports = self.map.load_airports() + lats = airports[:, 0] + lons = 360 + airports[:, 1] x, y = self.map.m(lons, lats) if self.map.corners is None: return data_values = self.field.data - crnrs = copy.copy(self.map.corners) + crnrs = copy(self.map.corners) if crnrs[2] < 0: crnrs[2] = 360 + crnrs[2] if crnrs[3] < 0: crnrs[3] = 360 + crnrs[3] for i, lat in enumerate(lats): - if crnrs[1] > lat > crnrs[0] and \ - crnrs[3] > lons[i] > crnrs[2]: + if crnrs[1] > lat > crnrs[0] and crnrs[3] > lons[i] > crnrs[2]: xgrid, ygrid = self.field.get_xypoint(lat, lons[i]) data_value = data_values[xgrid, ygrid].values.item() - if xgrid > 0 and ygrid > 0: - if (not isnan(data_value)) and (data_value != 0.): - ax.annotate(f"{data_value:.{annotate_decimal}f}", \ - xy=(x[i], y[i]), fontsize=10) + if xgrid > 0 and ygrid > 0 and (not isnan(data_value)) and (data_value != 0.0): + ax.annotate( + f"{data_value:.{annotate_decimal}f}", + xy=(x[i], y[i]), + fontsize=10, + ) data_values.close() - def _draw_hatches(self, ax): - - ''' Draw the hatched regions requested. ''' + def _draw_hatches(self, ax: Axes): # pragma: no cover + """Draw the hatched regions requested.""" # Levels should be included in the settings dict here since they don't # correspond to a full field of contours. handles = [] for field in self.hatch_fields: - colors = field.contour_kwargs.get('colors', 'k') - hatches = field.contour_kwargs.get('hatches', '----') - labels = field.contour_kwargs.get('labels', 'XXXX') - linewidths = field.contour_kwargs.get('linewidths', 0.1) - handles.append(mpatches.Patch(edgecolor=colors[-1], facecolor='lightgrey', \ - label=labels, hatch=hatches[-1])) - - cf = self._draw_field(ax=ax, - extend='both', - field=field, - func=self.map.m.contourf, - **field.contour_kwargs, - ) + colors = field.contour_kwargs.get("colors", "k") + hatches = field.contour_kwargs.get("hatches", "----") + labels = field.contour_kwargs.get("labels", "XXXX") + linewidths = field.contour_kwargs.get("linewidths", 0.1) + handles.append( + mpatches.Patch( + edgecolor=colors[-1], + facecolor="lightgrey", + label=labels, + hatch=hatches[-1], + ) + ) + cf = self._draw_field( + ax=ax, + extend="both", + field=field, + func=self.map.m.contourf, + **field.contour_kwargs, + ) # For each level, we set the color of its hatch - for collection in cf.collections: - collection.set_edgecolor(colors) - collection.set_facecolor(['None']) - collection.set_linewidth(linewidths) + cf.set_edgecolor(colors) + cf.set_facecolor("None") + cf.set_linewidth(linewidths) # Create legend for precip type field - if self.field.short_name == 'ptyp': + if self.field.short_name == "ptyp": plt.legend(handles=handles, loc=[0.25, 0.03]) - def _set_overlay_string(self): - - ''' Creates the main title of the plot with select hatched and - contoured fields defined. ''' + def _set_overlay_string(self) -> str: + """ + Creates the main title of the plot with select hatched and + contoured fields defined. + """ f = self.field @@ -578,95 +739,88 @@ def _set_overlay_string(self): contoured = [] contoured_units = [] not_labeled = [f.short_name] - if self.hatch_fields: + if self.hatch_fields: # pragma: no cover cf = self.hatch_fields[0] not_labeled.extend([h.short_name for h in self.hatch_fields]) - if not any(list(set(cf.short_name).intersection(['pres']))): - title = cf.vspec.get('title', cf.field.long_name) - contoured.append(f'{title} ({cf.units}, hatched)') + if not any(list(set(cf.short_name).intersection(["pres"]))): + user_title = cf.vspec.get("title", cf.field.long_name) + contoured.append(f"{user_title} ({cf.units}, hatched)") # Add descriptor string for the important contoured fields - if self.contour_fields: + if self.contour_fields: # pragma: no cover for cf in self.contour_fields: if cf.short_name not in not_labeled: - title = cf.vspec.get('title', cf.field.long_name) - title = title.replace("Geopotential", "Geop.") - contoured.append(f'{title}') - contoured_units.append(f'{cf.units}') + user_title = cf.vspec.get("title", cf.field.long_name) + user_title = user_title.replace("Geopotential", "Geop.") + contoured.append(f"{user_title}") + contoured_units.append(f"{cf.units}") - contoured = '\n'.join(contoured) # Make 'contoured' a string with linefeeds - if contoured_units: - contoured = f"{contoured} ({', '.join(contoured_units)}, contoured)" + title = "\n".join(contoured) # Make 'contoured' a multiline string + if contoured_units: # pragma: no cover + title = f"{title} ({', '.join(contoured_units)}, contoured)" - return contoured + return title def _title(self): - - ''' Draw the title for a map. ''' + """Draw the title for a map.""" f = self.field atime = f.date_to_str(f.anl_dt) vtime = f.date_to_str(f.valid_dt) # Analysis time (top) and forecast hour with Valid Time (bottom) on the left - plt.title(f"{self.model_name}: {atime}\nFcst Hr: {f.fhr}, Valid Time {vtime}", - alpha=None, - fontsize=14, - loc='left', - ) - - level, lev_unit = f.numeric_level(index_match=False) - if f.vspec.get('print_units', True): - units = f'({f.units}, shaded)' - else: - units = f'' + plt.title( + f"{self.model_name}: {atime}\nFcst Hr: {f.fhr}, Valid Time {vtime}", + alpha=None, + fontsize=14, + loc="left", + ) + + units = f"({f.units}, shaded)" if f.vspec.get("print_units", True) else "" # Title or Atmospheric level and unit in the high center - if f.vspec.get('title'): + if f.vspec.get("title"): title = f"{f.vspec.get('title')} {units}" - else: - level = level if not isinstance(level, list) else level[0] - title = f'{level} {lev_unit} {f.field.long_name} {units}' - plt.title(f"{title}", position=(0.5, 1.10), fontsize=18) + else: # pragma: no cover + level, lev_unit = numeric_level(f.level) + title = f"{level} {lev_unit} {f.field.long_name} {units}" + plt.title(f"{title}", loc="center", y=1.10, fontsize=18) # Two lines for hatched data (top), and contoured data (bottom) on the right contoured = self._set_overlay_string() - plt.title(f"{contoured}", - loc='right', - fontsize=14, - ) - - def _wind_barbs(self, level): - - ''' Draws the wind barbs. A decent stride can be found if you divide the - number of grid points on the shorter side by 35. Subdomains are defined - by lat,lon so the stride is set in the TILE_DEFS. For the globalCONUS - subdomains, further dividing by 2.5 works well. ''' - - u, v = self.field.wind(level) - + plt.title( + f"{contoured}", + loc="right", + fontsize=14, + ) + + def _wind_barbs(self, level: bool | str): # pragma: no cover + """ + Draws the wind barbs. A decent stride can be found if you divide the + number of grid points on the shorter side by 35. Subdomains are defined + by lat,lon so the stride is set in the TILE_DEFS. For the globalCONUS + subdomains, further dividing by 2.5 works well. + """ + + lev = level if not isinstance(level, bool) else self.field.level + u, v = [f.data for f in self.wind_fields(lev)] tile = self.map.tile full_tile = tile in FULL_TILES # Set the stride and size of the barbs to be plotted with a masked array. if full_tile: - if u.shape[0] < u.shape[1]: - stride = int(round(u.shape[0] / 35)) - else: - stride = int(round(u.shape[1] / 35)) + stride = round(u.shape[0] / 35) if u.shape[0] < u.shape[1] else round(u.shape[1] / 35) length = 5 else: stride = TILE_DEFS[tile]["stride"] length = TILE_DEFS[tile]["length"] - if self.map.model == 'globalCONUS': - stride = int(round(stride / 2.5)) + if self.map.model == "globalCONUS": + stride = round(stride / 2.5) length = 5 - if self.map.model == 'hrrr' and self.model_name == 'WFIP3-FULL' and \ - tile == 'WFIP3-d02': + if self.map.model == "hrrr" and self.model_name == "WFIP3-FULL" and tile == "WFIP3-d02": stride = 6 - if self.map.model == 'hrrr' and self.model_name == 'WFIP3-NEST' and \ - tile == 'WFIP3-d02': + if self.map.model == "hrrr" and self.model_name == "WFIP3-NEST" and tile == "WFIP3-d02": stride = 17 mask = np.ones_like(u) @@ -676,75 +830,79 @@ def _wind_barbs(self, level): # For global lat-lon models, make 2D arrays for x and y # Shift the map and data if needed - if self.map.m.projection == 'cyl': - if tile in ['Africa', 'Europe']: + if self.map.m.projection == "cyl": + if tile in ["Africa", "Europe"]: savex = x - u, x = shiftgrid(180., u, x, start=False) - v, savex = shiftgrid(180., v, savex, start=False) - y, x = np.meshgrid(y, x, sparse=False, indexing='ij') + u, x = shiftgrid(180.0, u, x, start=False) + v, savex = shiftgrid(180.0, v, savex, start=False) + y, x = np.meshgrid(y, x, sparse=False, indexing="ij") + mu: np.ma.MaskedArray + mv: np.ma.MaskedArray mu, mv = [np.ma.masked_array(c, mask=mask) for c in [u, v]] - self.map.m.barbs(x, y, mu, mv, - barbcolor='k', - flagcolor='k', - length=length, - linewidth=0.2, - sizes={'spacing': 0.25}, - ) - - def _xy_mesh(self, field): - - ''' Helper function to create mesh for various plot. ''' + self.map.m.barbs( + x, + y, + mu, + mv, + barbcolor="k", + flagcolor="k", + length=length, + linewidth=0.2, + sizes={"spacing": 0.25}, + ) + + def _xy_mesh(self, field: gribdata.FieldData): + """Helper function to create mesh for various plot.""" lat, lon = field.latlons() - if self.map.model == 'obs': - lat, lon = np.meshgrid(lat, lon, sparse=False, indexing='ij') - adjust = 360 if np.any(lon < 0) else 0 return self.map.m(adjust + lon, lat) + class DiffMap(DataMap): - ''' + """ Extends DataMap for handling difference plots, which need different titles, and will not plot overlays and such. - ''' + """ - def _colorbar(self, cc, ax): - - ''' Set the colorbar for a difference field. ''' + def _colorbar(self, cc: QuadContourSet, ax: Axes): # pragma: no cover + """Set the colorbar for a difference field.""" plt.colorbar( cc, ax=ax, - orientation='horizontal', + orientation="horizontal", pad=0.02, shrink=1.0, - ) + ) - def _draw_panel(self, wind_barbs=False): - - ''' Draw a map of the difference field. ''' + def _draw_panel(self, wind_barbs: bool = False): # pragma: no cover + """Draw a map of the difference field.""" ax = self.map.ax # Draw a map and add the shaded field self.map.draw() + if wind_barbs: + print("Wind barbs are not drawn for diff plots") + # The number of levels (nlev) here, should be the same number as is used # in the linspace call in self._eq_contours. 21 seems reasonable, but is # arbitrary. - colors = self.field.centered_diff(cmap='Spectral_r', nlev=21) - cf = self._draw_field(ax=ax, - colors=colors, - extend='both', - field=self.field, - func=self.map.m.contourf, - levels=self._eq_contours(), - ) - return cf + colors = self.field.centered_diff(cmap="Spectral_r", nlev=21) + return self._draw_field( + ax=ax, + colors=colors, + extend="both", + field=self.field, + func=self.map.m.contourf, + levels=self._eq_contours(), + ) - def _eq_contours(self): - ''' Center the contours based on the data min/max ''' + def _eq_contours(self): # pragma: no cover + """Center the contours based on the data min/max.""" minval = np.amin(self.field.data) maxval = np.amax(self.field.data) @@ -753,55 +911,54 @@ def _eq_contours(self): maxval = max(abs(minval), abs(maxval)) return np.linspace(-maxval, maxval, 21) - def _title(self): - ''' Draw the title for a map. ''' + def _title(self): # pragma: no cover + """Draw the title for a map.""" f = self.field atime = f.date_to_str(f.anl_dt) vtime = f.date_to_str(f.valid_dt) # Analysis time (top) and forecast hour with Valid Time (bottom) on the left - plt.title(f"{self.model_name}: {atime}\nFcst Hr: {f.fhr}, Valid Time {vtime}", - alpha=None, - fontsize=14, - loc='left', - ) - - level, lev_unit = f.numeric_level(index_match=False) - if f.vspec.get('print_units', True): - units = f'({f.units}, shaded)' - else: - units = f'' + plt.title( + f"{self.model_name}: {atime}\nFcst Hr: {f.fhr}, Valid Time {vtime}", + alpha=None, + fontsize=14, + loc="left", + ) + + units = f"({f.units}, shaded)" if f.vspec.get("print_units", True) else "" # Title or Atmospheric level and unit in the high center - if f.vspec.get('title'): + if f.vspec.get("title"): title = f"Diff: {f.vspec.get('title')} {units}" else: - level = level if not isinstance(level, list) else level[0] - title = f'Diff: {level} {lev_unit} {f.field.long_name} {units}' + level, lev_unit = numeric_level(f.level) + title = f"Diff: {level} {lev_unit} {f.field.long_name} {units}" plt.title(f"{title}", position=(0.5, 1.08), fontsize=18) - class MultiPanelDataMap(DataMap): - ''' + """ Class that extends a DataMap for handling multiple panels. - Keyword arguments: + Keyword Arguments: last_panel flag for multipanel plots to designate last panel drawn - ''' - def __init__(self, map_fields, map_, member, model_name=None, **kwargs): + """ + def __init__( + self, map_fields: MapFields, map_: Map, member: str, model_name: str | None = None, **kwargs + ): super().__init__(map_fields, map_, model_name=model_name) - self.last_panel = kwargs.get('last_panel', False) + self.last_panel = kwargs.get("last_panel", False) self.member = str(member) - def draw(self, show=False): - - ''' Main method for creating the plot. Set show=True to display the - figure from the command line. ''' + def draw(self, show: bool = False): + """ + Main method for creating the plot. Set show=True to display the + figure from the command line. + """ cf = self._draw_panel(wind_barbs=False) @@ -809,35 +966,33 @@ def draw(self, show=False): # Finish with the colorbar on the last panel only # Plot it on the full figure scale. - if self.last_panel: - cax = plt.axes([0.0, 0.0, 1.0, 0.2]) + if self.last_panel: # pragma: no cover + cax = plt.axes((0.0, 0.0, 1.0, 0.2)) self._colorbar(ax=cax, cc=cf) - cax.axis('off') + cax.axis("off") # Create a pop-up to display the figure, if show=True - if show: + if show: # pragma: no cover plt.tight_layout() - plt.show() return cf def _label_member(self): - - ''' Add the member label to the top left of the plot ''' + """Add the member label to the top left of the plot.""" ax = self.map.ax ax.text( - 0.05, 0.90, + 0.05, + 0.90, self.member, fontsize=18, - fontweight='bold', + fontweight="bold", backgroundcolor="white", transform=ax.transAxes, - ) + ) - def title(self): - - ''' Draw the title for a map. ''' + def title(self): # pragma: no cover + """Draw the title for a map.""" f = self.field atime = f.date_to_str(f.anl_dt) @@ -845,100 +1000,43 @@ def title(self): ax = self.map.ax # Analysis time (top) and forecast hour with Valid Time (bottom) on the left - ax.text(0.0, 0.5, - f"{self.model_name}: {atime}\nFcst Hr: {f.fhr}, Valid Time {vtime}", - alpha=None, - fontsize=14, - horizontalalignment='left', - verticalalignment='top', - transform=ax.transAxes, - ) + ax.text( + 0.0, + 0.5, + f"{self.model_name}: {atime}\nFcst Hr: {f.fhr}, Valid Time {vtime}", + alpha=None, + fontsize=14, + horizontalalignment="left", + verticalalignment="top", + transform=ax.transAxes, + ) - level, lev_unit = f.numeric_level(index_match=False) - if f.vspec.get('print_units', True): - units = f'({f.units}, shaded)' - else: - units = f'' + units = f"({f.units}, shaded)" if f.vspec.get("print_units", True) else "" # Title or Atmospheric level and unit in the high center - if f.vspec.get('title'): + if f.vspec.get("title"): title = f"{f.vspec.get('title')} {units}" else: - level = level if not isinstance(level, list) else level[0] - title = f'{level} {lev_unit} {f.field.long_name} {units}' - ax.text(0, 0.7, - f"{title}", - horizontalalignment='left', - verticalalignment='top', - fontsize=16, - transform=ax.transAxes, - ) + level, lev_unit = numeric_level(f.level) + title = f"{level} {lev_unit} {f.field.long_name} {units}" + ax.text( + 0, + 0.7, + f"{title}", + horizontalalignment="left", + verticalalignment="top", + fontsize=16, + transform=ax.transAxes, + ) # Two lines for hatched data (top), and contoured data (bottom) on the right contoured = self._set_overlay_string() - ax.text(0, 0.6, - f"{contoured}", - horizontalalignment='left', - verticalalignment='top', - fontsize=14, - transform=ax.transAxes, - ) - - -class MapFields(): - ''' Class that packages all the field objects need for producing - desired map content, i.e. an object that contains all filled - contours, hatched spaces, and overlayed contours needed for a full - product. ''' - - def __init__(self, main_field, fields_spec=None, map_type=None, - **kwargs): - - self.main_field = main_field - self.fields_spec = fields_spec if fields_spec is not None else {} - self.map_type = map_type - self.model = kwargs.get('model') - self.tile = kwargs.get('tile', 'full') - - @property - def contours(self): - ''' Return the list of contour fieldData objects''' - - # We won't plot contours on multipanel plots, or full global - # plots. - if self.map_type == 'enspanel': - return [] - - if 'global' in self.model and self.tile in ['full']: - return [] - - return self._overlay_fields('contours') - - @property - def hatches(self): - ''' Return the list of hatch fieldData objects''' - - return self._overlay_fields('hatches') - - def _overlay_fields(self, spec_sect): - - ''' Generate a list of fieldData objects for the specified type - of overlay -- hatches or contours ''' - overlays = self.fields_spec.get(spec_sect) - overlay_fields = [] - if overlays is not None: - for overlay, overlay_kwargs in overlays.items(): - if '_' in overlay: - var, lev = overlay.split('_') - else: - var, lev = overlay, self.main_field.level - - # Make a copy of the main object, and change the - # attributes to match the overlay field - overlay_obj = copy.deepcopy(self.main_field) - overlay_obj.contour_kwargs = overlay_kwargs - overlay_obj.short_name = var - overlay_obj.level = lev - - overlay_fields.append(overlay_obj) - return overlay_fields + ax.text( + 0, + 0.6, + f"{contoured}", + horizontalalignment="left", + verticalalignment="top", + fontsize=14, + transform=ax.transAxes, + ) diff --git a/adb_graphics/figures/skewt.py b/adb_graphics/figures/skewt.py index 56bafbd7..4781ed7a 100644 --- a/adb_graphics/figures/skewt.py +++ b/adb_graphics/figures/skewt.py @@ -1,31 +1,47 @@ # pylint: disable=invalid-name -''' +""" The module the contains the SkewTDiagram class responsible for creating a Skew-T Log-P diagram using MetPy. -''' +""" -from collections import OrderedDict -from functools import lru_cache -import numpy as np +from functools import cached_property +from math import atan2, degrees +from pathlib import Path +from typing import TYPE_CHECKING, Any, TypedDict import matplotlib.font_manager as fm -import matplotlib.pyplot as plt -from matplotlib.ticker import FixedLocator import matplotlib.lines as mlines -from matplotlib.lines import Line2D +import matplotlib.pyplot as plt import metpy.calc as mpcalc +import numpy as np +import pandas as pd +from matplotlib.axes import Axes +from matplotlib.lines import Line2D +from matplotlib.ticker import FixedLocator from metpy.plots import Hodograph, SkewT from metpy.units import units from mpl_toolkits.axes_grid1.inset_locator import inset_axes -import pandas as pd +from uwtools.api.config import YAMLConfig +from xarray import DataArray, Dataset, where + +from adb_graphics import errors +from adb_graphics.datahandler import gribdata + +if TYPE_CHECKING: + from pint import UnitRegistry + -import adb_graphics.datahandler.gribdata as gribdata -import adb_graphics.errors as errors -import adb_graphics.utils as utils +class HydroPlotSettings(TypedDict): + color: str + label: str + marker: str + scale: float + units: str -class SkewTDiagram(gribdata.profileData): - ''' The class responsible for gathering all data needed from a grib file to +class SkewTDiagram(gribdata.ProfileData): + """ + The class responsible for gathering all data needed from a grib file to produce a Skew-T Log-P diagram. Input: @@ -39,255 +55,260 @@ class SkewTDiagram(gribdata.profileData): max_plev maximum pressure level to plot in mb model_name model name to use for plotting - Additional keyword arguments for the gribdata.profileData base class should also + Additional keyword arguments for the gribdata.ProfileData base class should also be included. - ''' - - def __init__(self, ds, loc, **kwargs): - + """ + + def __init__( + self, + fhr: int, + ds: dict[str, Dataset], + loc: str, + model: str, + spec: dict | YAMLConfig, + max_plev: int | None = 0, + model_name: str | None = None, + ): # Initialize on the temperature field since we need to gather # field-specific data from this object, e.g. dates, lat, lon, etc. - super().__init__(ds=ds, - loc=loc, - short_name='temp', - **kwargs, - ) - - self.max_plev = kwargs.get('max_plev', 0) - self.model_name = kwargs.get('model_name', 'Analysis') - - def _add_hydrometeors(self, hydro_subplot): - # pylint: disable=too-many-locals - mixing_ratios = OrderedDict({ - 'clwmr': { - 'color': 'blue', - 'label': 'CWAT', - 'marker': 's', - 'scale': 1.0, - 'units': 'g/m2' + super().__init__(fhr=fhr, ds=ds, loc=loc, model=model, short_name="temp", spec=spec) + + self.max_plev = max_plev + self.model_name = model_name or "Analysis" + + def _add_hydrometeors(self, hydro_subplot: Axes): + mixing_ratios: dict[str, HydroPlotSettings] = { + "clwmr": { + "color": "blue", + "label": "CWAT", + "marker": "s", + "scale": 1.0, + "units": "g/m2", }, - 'icmr': { - 'color': 'red', - 'label': 'CICE', - 'marker': '^', - 'scale': 10.0, - 'units': 'g/m2' + "icmr": { + "color": "red", + "label": "CICE", + "marker": "^", + "scale": 10.0, + "units": "g/m2", }, - 'rwmr': { - 'color': 'cyan', - 'label': 'RAIN', - 'marker': 'o', - 'scale': 1.0, - 'units': 'g/m2' + "rwmr": { + "color": "cyan", + "label": "RAIN", + "marker": "o", + "scale": 1.0, + "units": "g/m2", }, - 'snmr': { - 'color': 'purple', - 'label': 'SNOW', - 'marker': '*', - 'scale': 1.0, - 'units': 'g/m2' + "snmr": { + "color": "purple", + "label": "SNOW", + "marker": "*", + "scale": 1.0, + "units": "g/m2", }, - 'grle': { - 'color': 'orange', - 'label': 'GRPL', - 'marker': 'D', - 'scale': 1.0, - 'units': 'g/m2' + "grle": { + "color": "orange", + "label": "GRPL", + "marker": "D", + "scale": 1.0, + "units": "g/m2", }, - }) + } - profiles = self.atmo_profiles # dictionary - pres = profiles.get('pres').get('data') - temp = profiles.get('temp').get('data') - nlevs = len(pres) # determine number of vertical levels - pres_sfc = pres[0] # need correct surface pressure value! + pres = self.atmo_profiles["pres"]["data"] + temp = self.atmo_profiles["temp"]["data"] handles = [] - gravity = 9.81 # m/s^2 + gravity = 9.81 # m/s^2 - lines = ['Vert. Integrated Amt\n(Resolved, Total)\n'\ - +'(supercool layers shaded,\nwith filled markers)'] + lines = [ + "Vert. Integrated Amt\n(Resolved, Total)\n" + "(supercool layers shaded,\nwith filled markers)" + ] + freezing_f = 32.0 for mixr, settings in mixing_ratios.items(): # Get the profile values - scale = settings.get('scale') + scale = settings.get("scale", 1.0) try: - profile = np.asarray(self.values(name=mixr)) * 1000. * scale - except errors.GribReadError: - print(f'missing {mixr} for hydrometeor plot, skipping that field.') - continue - mixr_total = 0. - for n in range(nlevs): - if n == 0: - pres_layer = 2 * (pres_sfc - pres[n]) # layer depth - pres_sigma = pres_sfc - pres_layer # pressure at next sigma level - else: - pres_layer = 2 * (pres_sigma - pres[n]) # layer depth - pres_sigma = pres_sigma - pres_layer # pressure at next sigma level - mixr_total = mixr_total + pres_layer / gravity * profile[n] - - # limit values to upper and lower values of lotting range - profile = np.where((profile > 0.) & (profile < 1.e-4), 1.e-4, profile) - profile = np.where((profile > 10.), 10., profile) + profile = self.get_values(name=mixr) * 1000.0 * scale + except (errors.NoGraphicsDefinitionForVariableError, IndexError, ValueError): + try: + profile = self.get_values(name=mixr, level="uanat") * 1000.0 * scale + except errors.NoGraphicsDefinitionForVariableError: # pragma: no cover + print(f"missing {mixr} for hydrometeor plot, skipping that field.") + continue + mixr_total: units = 0.0 + if profile.any(): + for n in range(len(pres)): + if n == 0: + pres_sigma = pres[0] + else: + pres_layer = 2 * (pres_sigma - pres[n]) # layer depth + pres_sigma = pres_sigma - pres_layer # pressure at next sigma level + mixr_total = mixr_total + pres_layer / gravity * profile[n] + mixr_total = mixr_total.to_numpy() + + # limit values to upper and lower values of plotting range + profile = where((profile > 0.0) & (profile < 1.0e-4), 1.0e-4, profile) # noqa: PLR2004 + profile = where((profile > 10.0), 10.0, profile) # noqa: PLR2004 # plot line - hydro_subplot.plot(profile, pres, - settings.get('color'), - fillstyle='none', - linewidth=0.5, - marker=settings.get('marker'), - markersize=6, - ) - if mixr in ['clwmr', 'rwmr']: - hydro_subplot.plot(profile[temp.magnitude < 32.0], pres[temp.magnitude < 32.0], - settings.get('color'), - fillstyle='full', - linewidth=0.5, - marker=settings.get('marker'), - markersize=6, - ) - layer = False - for i, profile_lev in enumerate(profile): - if ((profile_lev > 0.0 and temp[i].magnitude < 32.0) and not layer): - layer = True - p_base = pres[i].magnitude - elif ((profile_lev <= 0.0 or temp[i].magnitude > 32.0) and layer): - # Shade the supercooled water depth - p_top = pres[i-1].magnitude - rect = plt.Rectangle((0, p_top), 100, (p_base-p_top),\ - facecolor=settings.get('color'), alpha=0.1) - hydro_subplot.add_patch(rect) - layer = False + profile = profile[: pres.shape[0]] + hydro_subplot.plot( + profile, + pres, + settings.get("color", ""), + fillstyle="none", + linewidth=0.5, + marker=settings.get("marker"), + markersize=6, + ) + if mixr in ["clwmr", "rwmr"]: + freezing_levs = profile.where( + (profile > 0.0) & (temp.magnitude < freezing_f), profile, 0 + ).to_numpy() + if freezing_levs.any(): + hydro_subplot.plot( + profile[temp.magnitude < freezing_f], + pres[temp.magnitude < freezing_f], + settings.get("color", ""), + fillstyle="full", + linewidth=0.5, + marker=settings.get("marker"), + markersize=6, + ) + pres_levs = pres[freezing_levs > 0].magnitude + rect = plt.Rectangle( + (0, pres_levs[-1]), + 100, + (pres_levs[0] - pres_levs[-1]), + facecolor=settings.get("color"), + alpha=0.1, + ) + hydro_subplot.add_patch(rect) # compute vertically integrated amount and add legend line - line = f"{settings.get('label'):<7s} {mixr_total.magnitude:>10.3f} "\ - f"{settings.get('units')}" + label = settings.get("label") + line = f"{label:<7s} {mixr_total:>10.3f} {settings.get('units')}" if scale != 1.0: - line = f"{settings.get('label'):<5s}(x{scale}) {mixr_total.magnitude:.3f} "\ - f"{settings.get('units')}" + line = f"{label:<5s}(x{scale}) {mixr_total:.3f} {settings.get('units')}" lines.append(line) - label = f"{settings.get('label'):<7s}" - if scale != 1.0: - label = f"{settings.get('label'):<5s}(x{scale})" - handles.append(mlines.Line2D([], [], - color=settings.get('color'), - fillstyle='none', - label=label, - linewidth=1.0, - marker=settings.get('marker'), - markersize=8, - ) - ) + handles.append( + mlines.Line2D( + [], + [], + color=settings.get("color"), + fillstyle="none", + label=f"{label:<5s}(x{scale})" if scale != 1.0 else f"{label:<7s}", + linewidth=1.0, + marker=settings.get("marker"), + markersize=8, + ) + ) hydro_subplot.legend(handles=handles, loc=[0.05, 0.65]) - contents = '\n'.join(lines) + contents = "\n".join(lines) # Draw the vertically integrated amounts box - hydro_subplot.text(0.02, 0.95, contents, - bbox=dict(facecolor='white', edgecolor='black', alpha=0.7), - fontproperties=fm.FontProperties(family='monospace'), - size=8, - transform=hydro_subplot.transAxes, - verticalalignment='top', - ) - - def _add_thermo_inset(self, skew): - + hydro_subplot.text( + 0.02, + 0.98, + contents, + bbox=dict(facecolor="white", edgecolor="black", alpha=0.7), + fontproperties=fm.FontProperties(family="monospace"), + size=8, + transform=hydro_subplot.transAxes, + verticalalignment="top", + ) + + def _add_thermo_inset(self, skew: SkewT): # Build up the text that goes in the thermo-dyniamics box lines = [] for name, items in self.thermo_variables.items(): - # Magic to get the desired number of decimals to appear. - decimals = items.get('decimals', 0) - value = items['data'] - if value != '--': - value = int(value) if decimals == 0 else value.round(decimals=decimals).values + decimals = items.get("decimals", 0) + data = items["data"] + value = int(data) if decimals == 0 else data.round(decimals=decimals).to_numpy() # Sure would have been nice to use a variable in the f string to # denote the format per variable. - line = f"{name.upper():<7s} {str(value):>6} {items['units']}" + line = f"{name.upper():<7s} {value!s:>6} {items['units']}" lines.append(line) - contents = '\n'.join(lines) + contents = "\n".join(lines) # Draw the text box - skew.ax.text(0.75, 0.98, contents, - bbox=dict(facecolor='white', edgecolor='black', alpha=0.7), - fontproperties=fm.FontProperties(family='monospace'), - size=8, - transform=skew.ax.transAxes, - verticalalignment='top', - ) - - @property - @lru_cache() + skew.ax.text( + 0.75, + 0.98, + contents, + bbox=dict(facecolor="white", edgecolor="black", alpha=0.7), + fontproperties=fm.FontProperties(family="monospace"), + size=8, + transform=skew.ax.transAxes, + verticalalignment="top", + ) + + @cached_property def atmo_profiles(self): - - ''' + """ Return a dictionary of atmospheric data profiles for each variable needed by the skewT. - Each of these variables must be have units set appropriately for use + Each of these variables must have units set appropriately for use with MetPy SkewT. Handle those units and conversions here since it differs from the requirements of other graphics units/transforms. - ''' + """ - # OrderedDict because we need to get pressure profile first. Entries in + # We need to get pressure profile first. Entries in # the dict are as follows: # # Variable short name: consistent with default_specs.yml # transform: units string to pass to MetPy's to() function # units: the end unit of the field (after transform, # if applicable). - atmo_vars = OrderedDict({ - 'pres': { - 'transform': 'hectoPa', - 'units': units.Pa, - }, - 'gh': { - 'units': units.gpm, - }, - 'sphum': { - 'units': units.dimensionless, - }, - 'temp': { - 'transform': 'degF', - 'units': units.degK, - }, - 'u': { - 'transform': 'knots', - 'units': units.meter_per_second, - }, - 'v': { - 'transform': 'knots', - 'units': units.meter_per_second, - }, - }) - - top = None - for var, items in atmo_vars.items(): + atmo_vars = { + "pres": { + "transform": "hectoPa", + "units": units.Pa, + }, + "gh": { + "units": units.gpm, + }, + "sphum": { + "units": units.dimensionless, + }, + "temp": { + "transform": "degF", + "units": units.degK, + }, + "u": { + "transform": "knots", + "units": units.meter_per_second, + }, + "v": { + "transform": "knots", + "units": units.meter_per_second, + }, + } + for var, items in atmo_vars.items(): # Get the profile values and attach MetPy units - tmp = np.asarray(self.values(name=var)) * items['units'] - - # Apply any needed transdecimals - transform = items.get('transform') - if transform: - tmp = tmp.to(transform) + vals = self.get_values(name=var).to_numpy() * items["units"] - # Only return values up to the maximum pressure level requested - if var == 'pres' and top is None: - top = np.sum(np.where(tmp.magnitude >= self.max_plev)) - 1 - - atmo_vars[var]['data'] = tmp[:top] + # Apply any needed transformations + transform = items.get("transform") + atmo_vars[var]["data"] = vals.to(transform) if transform else vals return atmo_vars def create_diagram(self): - - ''' Calls the private methods for creating each component of the SkewT - Diagram. ''' + """ + Calls the private methods for creating each component of the SkewT + Diagram. + """ skew, hydro_subplot = self._setup_diagram() self._title() @@ -299,15 +320,12 @@ def create_diagram(self): self._add_thermo_inset(skew) self._add_hydrometeors(hydro_subplot) - def create_csv(self, csv_path): - - ''' Calls the private methods for writing each of the SkewT Data. ''' + def create_csv(self, csv_path: Path | str): + """Calls the private methods for writing each of the SkewT Data.""" self._write_profile(csv_path) - def _plot_hodograph(self, skew): - - + def _plot_hodograph(self, skew: SkewT): # Create an array that indicates which layer (10-3, 3-1, 0-1 km) the # wind belongs to. The array, agl, will be set to the height # corresponding to the top of the layer. The resulting array will look @@ -318,119 +336,120 @@ def _plot_hodograph(self, skew): # Where the values above 10 km are unchanged, and there are three levels # in each of the 3 layers of interest. # - agl = np.copy(self.atmo_profiles.get('gh', {}).get('data')).to('km') + data_copy: units = np.copy(self.atmo_profiles["gh"]["data"]) + agl = data_copy.to("km") # Retrieve the wind data profiles - u_wind = self.atmo_profiles.get('u', {}).get('data') - v_wind = self.atmo_profiles.get('v', {}).get('data') + u_wind = self.atmo_profiles["u"]["data"] + v_wind = self.atmo_profiles["v"]["data"] # Create an inset axes object that is 28% width and height of the # figure and put it in the upper left hand corner. - ax = inset_axes(skew.ax, '25%', '25%', loc=2) - h = Hodograph(ax, component_range=80.) + ax = inset_axes(skew.ax, "25%", "25%", loc=2) + h = Hodograph(ax, component_range=80.0) h.add_grid(increment=20, linewidth=0.5) - intervals = [0, 1, 3, 10] * agl.units - colors = ['xkcd:salmon', 'xkcd:aquamarine', 'xkcd:navy blue'] + intervals: UnitRegistry = np.array([0, 1, 3, 10]) * agl.units + colors = ["xkcd:salmon", "xkcd:aquamarine", "xkcd:navy blue"] line_width = 1.5 # Plot the line colored by height AGL only up to the 10km level - lines = h.plot_colormapped(u_wind, v_wind, agl, - colors=colors, - intervals=intervals, - linewidth=line_width, - ) + lines = h.plot_colormapped( + u_wind, + v_wind, + agl, + colors=colors, + intervals=intervals, + linewidth=line_width, + ) # Local function to create a proxy line object for creating a legend on # a LineCollection returned from plot_colormapped. Using lines and # colors from outside scope. - def make_proxy(zval, idx=None, **kwargs): - color = colors[idx] if idx < len(colors) else lines.cmap(zval-1) - return Line2D([0, 1], [0, 1], color=color, linewidth=line_width, **kwargs) + def make_proxy(zval: int, idx: int): + color = colors[idx] if idx < len(colors) else lines.cmap(zval - 1) + return Line2D([0, 1], [0, 1], color=color, linewidth=line_width) # Make a list of proxies - proxies = [make_proxy(item, idx=i) for i, item in - enumerate(intervals.magnitude)] + proxies = [make_proxy(item, i) for i, item in enumerate(np.asarray(intervals.magnitude))] # Draw the legend - ax.legend(proxies[:-1], - ['0-1 km', '1-3 km', '3-10 km', ''], - fontsize='small', - loc='lower left', - ) + ax.legend( + proxies[:-1], + ["0-1 km", "1-3 km", "3-10 km", ""], + fontsize="small", + loc="lower left", + ) @staticmethod - def _plot_labels(skew): - - skew.ax.set_xlabel('Temperature (F)') - skew.ax.set_ylabel('Pressure (hPa)') - - def _write_profile(self, csv_path): - - profiles = self.atmo_profiles # dictionary - pres = profiles.get('pres').get('data') - u = profiles.get('u').get('data') - v = profiles.get('v').get('data') - temp = profiles.get('temp').get('data').to('degC') - sphum = profiles.get('sphum').get('data') - - dewpt = np.array(mpcalc.dewpoint_from_specific_humidity( - sphum, temp, pres).to('degC')) - wspd = np.array(mpcalc.wind_speed(u, v)) - wdir = np.array(mpcalc.wind_direction(u, v)) - - pres = np.array(pres) - temp = np.array(temp) - - profile = pd.DataFrame({ - 'LEVEL': pres, - 'TEMP': temp, - 'DWPT': dewpt, - 'WDIR': wdir, - 'WSPD': wspd, - }) + def _plot_labels(skew: SkewT): + skew.ax.set_xlabel("Temperature (F)") + skew.ax.set_ylabel("Pressure (hPa)") + + def _write_profile(self, csv_path: str | Path): + profiles = self.atmo_profiles # dictionary + pres = profiles["pres"]["data"] + u = profiles["u"]["data"] + v = profiles["v"]["data"] + temp = profiles["temp"]["data"].to("degC") + sphum = profiles["sphum"]["data"] + + dewpt = mpcalc.dewpoint_from_specific_humidity(pressure=pres, specific_humidity=sphum).to( + "degC" + ) + wspd = mpcalc.wind_speed(u, v) + wdir = mpcalc.wind_direction(u, v) + + profile = pd.DataFrame( + { + "LEVEL": pres.magnitude, + "TEMP": temp.magnitude, + "DWPT": dewpt.magnitude, + "WDIR": wdir.magnitude, + "WSPD": wspd.magnitude, + } + ) profile.to_csv(csv_path, index=False, float_format="%10.2f") - def _plot_profile(self, skew): - - profiles = self.atmo_profiles # dictionary - pres = profiles.get('pres').get('data') - temp = profiles.get('temp').get('data') - sphum = profiles.get('sphum').get('data') + def _plot_profile(self, skew: SkewT): + profiles = self.atmo_profiles # dictionary + pres = profiles.get("pres").get("data") + temp = profiles.get("temp").get("data") + sphum = profiles.get("sphum").get("data") - dewpt = mpcalc.dewpoint_from_specific_humidity(sphum, temp, pres).to('degF') + dewpt = mpcalc.dewpoint_from_specific_humidity(pressure=pres, specific_humidity=sphum).to( + "degF" + ) # Pressure vs temperature - skew.plot(pres, temp, 'r', linewidth=1.5) + skew.plot(pres, temp, "r", linewidth=1.5) # Pressure vs dew point temperature - skew.plot(pres, dewpt, 'blue', linewidth=1.5) + skew.plot(pres, dewpt, "blue", linewidth=1.5) # Compute parcel profile and plot it - parcel_profile = mpcalc.parcel_profile(pres, - temp[0], - dewpt[0]).to('degC') - skew.plot(pres, - parcel_profile, - 'orange', - linestyle='dashed', - linewidth=1.2, - ) - - def _plot_wind_barbs(self, skew): - + parcel_profile = mpcalc.parcel_profile(pres, temp[0], dewpt[0]).to("degC") + skew.plot( + pres, + parcel_profile, + "orange", + linestyle="dashed", + linewidth=1.2, + ) + + def _plot_wind_barbs(self, skew: SkewT): # Pressure vs wind - skew.plot_barbs(self.atmo_profiles.get('pres', {}).get('data'), - self.atmo_profiles.get('u', {}).get('data'), - self.atmo_profiles.get('v', {}).get('data'), - color='blue', - linewidth=0.2, - y_clip_radius=0, - ) + skew.plot_barbs( + self.atmo_profiles["pres"]["data"], + self.atmo_profiles["u"]["data"], + self.atmo_profiles["v"]["data"], + color="blue", + linewidth=0.2, + y_clip_radius=0, + ) def _setup_diagram(self): - # Create a new figure. The dimensions here give a good aspect ratio. fig = plt.figure(figsize=(12, 12)) gs = plt.GridSpec(4, 5) @@ -445,104 +464,106 @@ def _setup_diagram(self): # display in Fahrenheit. # Fahrenheit tick labels that will display - labels_F = list(range(-20, 125, 20)) * units.degF + labels_f = list(range(-20, 125, 20)) * units.degF - # Celcius VALUES for those tick marks. These put the ticks in the right + # Celsius VALUES for those tick marks. These put the ticks in the right # spot. - labels = labels_F.to('degC').magnitude + labels = labels_f.to("degC").magnitude - # Set the MINOR tick values to the CELCIUS values. + # Set the MINOR tick values to the CELSIUS values. skew.ax.xaxis.set_minor_locator(FixedLocator(labels)) # Set the MINOR tick labels to the FAHRENHEIT values. - skew.ax.set_xticklabels(labels_F.magnitude, minor=True) - skew.ax.tick_params(which='minor', - length=8) + skew.ax.set_xticklabels(labels_f.magnitude, minor=True) + skew.ax.tick_params(which="minor", length=8) - # Turn off the MAJOR (celcius) tick marks, label the grid lines inside + # Turn off the MAJOR (celsius) tick marks, label the grid lines inside # the axes. - skew.ax.tick_params(axis='x', - labelbottom=True, - labelcolor='gray', - labelright=True, - labelrotation=45, - labeltop=True, - length=0, - pad=-25, - which='major', - ) + skew.ax.tick_params( + axis="x", + labelbottom=True, + labelcolor="gray", + labelright=True, + labelrotation=45, + labeltop=True, + length=0, + pad=-25, + which="major", + ) # Add the relevant special lines with their labels dry_adiabats = np.arange(-40, 210, 10) * units.degC - skew.plot_dry_adiabats(dry_adiabats, - colors='tan', - linestyles='solid', - linewidth=0.7, - ) - utils.label_lines(ax=skew.ax, - lines=skew.dry_adiabats, - labels=dry_adiabats.magnitude, - end='top', - offset=1, - ) + skew.plot_dry_adiabats( + dry_adiabats, + colors="tan", + linestyles="solid", + linewidth=0.7, + ) + label_lines( + ax=skew.ax, + lines=skew.dry_adiabats, + labels=dry_adiabats.magnitude, + end="top", + offset=1, + ) moist_adiabats = np.arange(8, 36, 4) * units.degC moist_pr = np.arange(1001, 220, -10) * units.hPa - skew.plot_moist_adiabats(moist_adiabats, - moist_pr, - colors='green', - linestyles='solid', - linewidth=0.7, - ) - utils.label_lines(ax=skew.ax, - lines=skew.moist_adiabats, - labels=moist_adiabats.magnitude, - end='top', - ) - - mixing_lines = np.array([1, 2, 3, 5, 8, 12, 16, 20]).reshape(-1, 1) / 1000 + skew.plot_moist_adiabats( + moist_adiabats, + moist_pr, + colors="green", + linestyles="solid", + linewidth=0.7, + ) + label_lines( + ax=skew.ax, + lines=skew.moist_adiabats, + labels=moist_adiabats.magnitude, + end="top", + ) + + mixing_lines = np.array([1, 2, 3, 5, 8, 12, 16, 20]).reshape(-1, 1) / 1000 mix_pr = np.arange(1001, 400, -50) * units.hPa - skew.plot_mixing_lines(w=mixing_lines, p=mix_pr, - colors='green', - linestyles=(0, (5, 10)), - linewidth=0.7, - ) - utils.label_lines(ax=skew.ax, - lines=skew.mixing_lines, - labels=mixing_lines * 1000, - ) + skew.plot_mixing_lines( + mixing_ratio=mixing_lines, + pressure=mix_pr, + colors="green", + linestyles=(0, (5, 10)), + linewidth=0.7, + ) + label_lines( + ax=skew.ax, + lines=skew.mixing_lines, + labels=mixing_lines * 1000, + ) hydro_subplot = fig.add_subplot(gs[:, -1], sharey=skew.ax) hydro_subplot.set_xlim(0.0001, 10.0) hydro_subplot.set_xscale("log") hydro_subplot.yaxis.tick_right() - hydro_subplot.set_aspect(23) # completely arbitrary + hydro_subplot.set_aspect(23) # completely arbitrary - plt.grid(which='major', axis='both') + plt.grid(which="major", axis="both") plt.xlabel("hydrometeors") plt.ylabel("") return skew, hydro_subplot - @property - @lru_cache() + @cached_property def thermo_variables(self): - - ''' - Return an ordered dictionary of thermodynamic variables needed for the skewT. - Ordered because we want to print these values in this order on the SkewT - diagram. - The return dictionary contains a 'data' entry for each variable that - includes the value of the metric. - - Variables' transforms and units are handled by default specs in much the - same way as in fieldData class since these are not used by MetPy - explictly. - ''' - - # OrderedDict so that we get the thermodynamic variables printed in the - # same order every time in the resulting SkewT inset. The fields - # include: + """ + Return a dictionary of thermodynamic variables needed for the skewT. + Ensure it's ordered because we want to print these values in this order on the SkewT + diagram. The return dictionary contains a 'data' entry for each variable that includes the + value of the metric. + + Variables' transforms and units are handled by default specs in much the same way as in + FieldData class since these are not used by MetPy explictly. + """ + + # We want the thermodynamic variables printed in the same order every time in the resulting + # SkewT inset. The fields include: # # Variable short name: can be consistent with default_specs.yml. # If not, must provide level and variable @@ -554,111 +575,213 @@ def thermo_variables(self): # decimals: (optional) number of decimal places to # include when formatting output. Defaults # to 0 (integer). - thermo = OrderedDict({ - 'cape': { # Convective available potential energy - 'level': 'sfc', - }, - 'cin': { # Convective inhibition - 'level': 'sfc', - }, - 'mucape': { # Most Unstable CAPE - 'level': 'mu', - 'variable': 'cape', - }, - 'mucin': { # CIN from MUCAPE level - 'level': 'mu', - 'variable': 'cin', - }, - 'li': { # Lifted Index - 'decimals': 1, - 'level': 'sfc', - }, - 'bli': { # Best Lifted Index - 'decimals': 1, - 'level': 'best', - 'variable': 'li', - }, - 'lcl': { # Lifted Condensation Level - }, - 'lpl': { # Lifted Parcel Level - }, - 'srh03': { # 0-3 km Storm relative helicity - 'level': 'sr03', - 'variable': 'hlcy', - }, - 'srh01': { # 0-1 km Storm relative helicity - 'level': 'sr01', - 'variable': 'hlcy', - }, - 'shr06': { # 0-6 km Shear - 'level': '06km', - 'variable': 'shear', - }, - 'shr01': { # 0-1 km Shear - 'level': '01km', - 'variable': 'shear', - }, - 'cell': { # Cell motion - }, - 'pwtr': { # Precipitable water - 'decimals': 1, - 'level': 'sfc', - }, - }) + thermo: dict = { + "cape": { # Convective available potential energy + "level": "sfc", + }, + "cin": { # Convective inhibition + "level": "sfc", + }, + "mucape": { # Most Unstable CAPE + "level": "mu", + "variable": "cape", + }, + "mucin": { # CIN from MUCAPE level + "level": "mu", + "variable": "cin", + }, + "li": { # Lifted Index + "decimals": 1, + "level": "sfc", + }, + "bli": { # Best Lifted Index + "decimals": 1, + "level": "best", + "variable": "li", + }, + "lcl": { # Lifted Condensation Level + }, + "lpl": { # Lifted Parcel Level + }, + "srh03": { # 0-3 km Storm relative helicity + "level": "sr03", + "variable": "hlcy", + }, + "srh01": { # 0-1 km Storm relative helicity + "level": "sr01", + "variable": "hlcy", + }, + "shr06": { # 0-6 km Shear + "level": "06km", + "variable": "shear", + }, + "shr01": { # 0-1 km Shear + "level": "01km", + "variable": "shear", + }, + "cell": { # Cell motion + }, + "pwtr": { # Precipitable water + "decimals": 1, + "level": "sfc", + }, + } for var, items in thermo.items(): - - varname = items.get('variable', var) - lev = items.get('level', 'ua') + varname = items.get("variable", var) + lev = items.get("level", "ua") spec = self.spec.get(varname, {}).get(lev) - if not spec: - raise errors.NoGraphicsDefinitionForVariable(varname, lev) + if not spec: # pragma: no cover + raise errors.NoGraphicsDefinitionForVariableError(varname, lev) try: - tmp = self.values(level=lev, name=varname, one_lev=True) - - - transforms = spec.get('transform') + vals = self.get_values(level=lev, name=varname) + transforms = spec.get("transform") if transforms: - tmp = self.get_transform(transforms, tmp) - - except errors.GribReadError: - - tmp = '--' + vals = self.get_transform(transforms, vals) - thermo[var]['data'] = tmp - thermo[var]['units'] = spec.get('unit') + except errors.GribReadError: # pragma: no cover + vals = DataArray([]) + thermo[var]["data"] = vals + thermo[var]["units"] = spec.get("unit") return thermo def _title(self): - - ''' Creates standard annotation for a skew-T. ''' + """Creates standard annotation for a skew-T.""" atime = self.date_to_str(self.anl_dt) vtime = self.date_to_str(self.valid_dt) # Top Left - plt.title(f"{self.model_name}: {atime}\nFcst Hr: {self.fhr}", - fontsize=16, - loc='left', - position=(-4.8, 1.03), - ) + plt.title( + f"{self.model_name}: {atime}\nFcst Hr: {self.fhr}", + fontsize=16, + loc="left", + x=-4.8, + y=1.03, + ) # Top Right - plt.title(f"Valid: {vtime}", - fontsize=16, - loc='right', - position=(-0.20, 1.03), - ) + plt.title( + f"Valid: {vtime}", + fontsize=16, + loc="right", + x=-0.20, + y=1.03, + ) # Center site = f"{self.site_code} {self.site_num} {self.site_name}" site_loc = f"{self.site_lat}, {self.site_lon}" site_title = f"{site} at nearest grid pt over land {site_loc}" - plt.title(site_title, - fontsize=12, - loc='center', - position=(-2.5, 1.0), - ) + plt.title( + site_title, + fontsize=12, + loc="center", + x=-2.5, + y=1.0, + ) + + +def label_line(ax: Axes, label: str, segment: np.ndarray, **kwargs): + """ + Label a single line with line2D label data. + + Input: + + ax the SkewT object axis + label label to be used for the current line + segment a list (array) of values for the current line + + Key Word Arguments + + align optional bool to enable the rotation of the label to line angle + end the end of the line at which to put the label. 'bottom' or 'top' + offset index to use for the "end" of the array + + Any kwargs accepted by matplotlib's text box. + """ + + # Strip non-text-box key word arguments and set default if they don't exist + align = kwargs.pop("align", True) + end = kwargs.pop("end", "bottom") + offset = kwargs.pop("offset", 0) + + # Label location + if end == "bottom": + x, y = segment[0 + offset, :] + ip = 1 + offset + elif end == "top": + x, y = segment[-1 - offset, :] + ip = -1 - offset + + if align: + # Compute the slope + dx = segment[ip, 0] - segment[ip - 1, 0] + dy = segment[ip, 1] - segment[ip - 1, 1] + ang = degrees(atan2(dy, dx)) + + # Transform to screen co-ordinates + pt = np.array([x, y]).reshape((1, 2)) + trans_angle = ax.transData.transform_angles(np.array((ang,)), pt)[0] + + if end == "top": + trans_angle -= 180 + + else: # pragma: no cover + trans_angle = 0 + + # Set a bunch of keyword arguments + if ("horizontalalignment" not in kwargs) and ("ha" not in kwargs): + kwargs["ha"] = "center" + + if ("verticalalignment" not in kwargs) and ("va" not in kwargs): + kwargs["va"] = "center" + + if "backgroundcolor" not in kwargs: + kwargs["backgroundcolor"] = ax.get_facecolor() + + if "clip_on" not in kwargs: + kwargs["clip_on"] = True + + if "fontsize" not in kwargs: + kwargs["fontsize"] = "larger" + + if "fontweight" not in kwargs: + kwargs["fontweight"] = "bold" + + # Larger value (e.g., 2.0) to move box in front of other diagram elements + if "zorder" not in kwargs: + kwargs["zorder"] = 1.50 + + # Place the text box label on the line. + ax.text(x, y, label, rotation=trans_angle, **kwargs) + + +def label_lines(ax: Axes, lines: Any, labels: np.ndarray, offset: float = 0, **kwargs): + """ + Plots labels on a set of lines from SkewT. + + Input: + + ax the SkewT object axis + lines the SkewT object special lines + labels list of labels to be used + offset index to use for the "end" of the array + + Key Word Arguments + + color line color + + Along with any other kwargs accepted by matplotlib's text box. + """ + + if "color" not in kwargs: + kwargs["color"] = lines.get_color()[0] + + for i, line in enumerate(lines.get_segments()): + assert not labels[i].ndim > 1 + label = int(labels[i]) + label_line(ax, str(label), line, align=True, offset=offset, **kwargs) diff --git a/adb_graphics/specs.py b/adb_graphics/specs.py index a4088e17..f7896240 100644 --- a/adb_graphics/specs.py +++ b/adb_graphics/specs.py @@ -1,520 +1,457 @@ -# pylint: disable=too-many-public-methods -''' +""" This module sets the specifications for certain atmospheric variables. Typically -this is related to a spec that needs some level of computation, i.e. a set of +this is related to a spec that needs some level of computation, e.g. a set of colors from a color map. -''' +""" import abc from itertools import chain -from matplotlib import cm -from matplotlib import colors as mpcolors + import numpy as np -import yaml +from matplotlib import colors as mpcolors +from matplotlib.pyplot import get_cmap from metpy.plots import ctables -class VarSpec(abc.ABC): - ''' +class VarSpec(abc.ABC): + """ Loads a yaml config file with spec settings. Also defines methods for declaring more complex specifications for variables based on settings within the config file. - ''' - - def __init__(self, config): - - with open(config, 'r') as cfg: - self.yml = yaml.load(cfg, Loader=yaml.Loader) + """ @property def aod_colors(self) -> np.ndarray: + """Default color map for AOD products and chem products.""" - ''' Default color map for AOD products and chem products ''' - - grays = cm.get_cmap('Greys', 2)([0]) - others = cm.get_cmap(self.vspec.get('cmap'), 15)(range(1, 15, 1), alpha=0.6) + grays = get_cmap("Greys", 2)([0]) + others = get_cmap(self.vspec.get("cmap"), 15)(range(1, 15, 1)) return np.concatenate((grays, others)) - def centered_diff(self, cmap=None, nlev=None): - - ''' Returns the colors specified by levels and cmap in default spec, but - with white center. ''' + def centered_diff(self, cmap: str | None = None, nlev: int | None = None) -> np.ndarray: + """ + Returns the colors specified by levels and cmap in default spec, but + with white center. + """ if nlev is None: - clevs = self.vspec.get('clevs') + clevs = self.vspec.get("clevs", self.clevs) nlev = len(clevs) + 1 if cmap is None: - cmap = self.vspec.get('cmap') + cmap = self.vspec.get("cmap") - colors = cm.get_cmap(cmap, nlev)(range(nlev)) + colors = get_cmap(cmap, nlev)(range(nlev)) mid = nlev // 2 colors[mid] = [1, 1, 1, 1] - colors[mid-1] = [1, 1, 1, 1] + colors[mid - 1] = [1, 1, 1, 1] return colors @property def cin_colors(self) -> np.ndarray: + """Default color map for Convective Inhibition.""" - ''' Default color map for Convective Inhibition ''' - - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([120, 100, 90, 85, 80, 70, 60, 50, 25, 20, 18]) - grays = cm.get_cmap('Greys', 2)([0]) + ncar = get_cmap(self.vspec.get("cmap"), 128)([120, 100, 90, 85, 80, 70, 60, 50, 25, 20, 18]) + grays = get_cmap("Greys", 2)([0]) return np.concatenate((ncar, grays)) @property @abc.abstractmethod def clevs(self) -> np.ndarray: - - ''' An abstract method responsible for returning the np.ndarray of contour - levels for a given field. Numpy arange supports non-integer values. ''' + """ + An abstract method responsible for returning the np.ndarray of contour + levels for a given field. Numpy arange supports non-integer values. + """ @property @abc.abstractproperty def vspec(self): - - ''' The variable plotting specification. The level-specific subgroup - from a config file like default_specs.yml. ''' + """ + The variable plotting specification. The level-specific subgroup + from a config file like default_specs.yml. + """ @property def ceil_colors(self) -> np.ndarray: + """Default color map for Ceiling.""" - ''' Default color map for Ceiling ''' - - grays = cm.get_cmap('Greys', 2)([0]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([15, 18, 20, 25, 50, 60, 70, 80, 85, 90, 100, 120]) + grays = get_cmap("Greys", 2)([0]) + ncar = get_cmap(self.vspec.get("cmap"), 128)( + [15, 18, 20, 25, 50, 60, 70, 80, 85, 90, 100, 120] + ) return np.concatenate((grays, ncar, grays)) @property def cldcov_colors(self) -> np.ndarray: + """Default color map for Cloud Cover.""" - ''' Default color map for Cloud Cover ''' - - grays = cm.get_cmap('Greys', 7)([0, 1, 3]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([120, 100, 90, 85, 80, 70, 60, 50, 25, 20]) + grays = get_cmap("Greys", 7)([0, 1, 3]) + ncar = get_cmap(self.vspec.get("cmap"), 128)([120, 100, 90, 85, 80, 70, 60, 50, 25, 20]) return np.concatenate((grays, ncar)) @property def cref_colors(self) -> np.ndarray: + """Default color map for Reflectivity.""" - ''' Default color map for Reflectivity ''' - - ncolors = len(self.clevs)-1 - grays = cm.get_cmap('Greys', 5)([0]) - nws = ctables.colortables.get_colortable(self.vspec.get('cmap'))(range(ncolors)) - white = cm.get_cmap('Greys', 5)([0]) + ncolors = len(self.clevs) - 1 + grays = get_cmap("Greys", 5)([0]) + nws = ctables.colortables.get_colortable(self.vspec.get("cmap"))(range(ncolors)) + white = get_cmap("Greys", 5)([0]) return np.concatenate((grays, nws, white)) @property - def fire_power_colors(self) -> np.ndarray: - - ''' Default color map for fire power plot. ''' + def fire_power_colors(self) -> list[str]: + """Default color map for fire power plot.""" # The scatter plot utility won't accept anything but named colors - colors = ['white', 'lightskyblue', 'darkblue', 'green', 'darkorange', \ - 'indianred', 'firebrick'] - - return colors + return [ + "white", + "lightskyblue", + "darkblue", + "green", + "darkorange", + "indianred", + "firebrick", + ] @property - def smoke_emissions_colors(self) -> np.ndarray: - - ''' Default color map for smoke emissions plot. ''' - - # The scatter plot utility won't accept anything but named colors - colors = ['white', 'rebeccapurple', 'royalblue', 'cadetblue', \ - 'yellowgreen', 'mediumaquamarine', 'lightgreen', 'yellow', \ - 'gold', 'orange', 'darkorange', 'orangered', 'red', \ - 'firebrick'] - - return colors - def flru_colors(self) -> np.ndarray: + """Default color map for Ceiling.""" - ''' Default color map for Ceiling ''' - - ctable = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([50, 15, 90, 120]) - return ctable + return get_cmap(self.vspec.get("cmap"), 128)([50, 15, 90, 120]) @property def frzn_colors(self) -> np.ndarray: + """Default color map for Frozen Precip %.""" - ''' Default color map for Frozen Precip % ''' - - grays = cm.get_cmap('Greys', 7)([0, 2]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([120, 90, 85, 80, 70, 60, 50, 25, 20, 15]) + grays = get_cmap("Greys", 7)([0, 2]) + ncar = get_cmap(self.vspec.get("cmap"), 128)([120, 90, 85, 80, 70, 60, 50, 25, 20, 15]) return np.concatenate((grays, ncar)) @property def goes_colors(self) -> np.ndarray: + """Default color map for simulated GOES IR satellite.""" - ''' Default color map for simulated GOES IR satellite ''' - - grays = cm.get_cmap('Greys_r', 33)(range(33)) - ctable2 = ctables.colortables.get_colortable(self.vspec.get('cmap')) \ - (range(65, 150)) + grays = get_cmap("Greys_r", 33)(range(33)) + ctable2 = ctables.colortables.get_colortable(self.vspec.get("cmap"))(range(65, 150)) return np.concatenate((grays[-1:], grays, ctable2, grays[1:])) @property def graupel_colors(self) -> np.ndarray: + """Default color map for Max Vertically Integrated Graupel.""" - ''' Default color map for Max Vertically Integrated Graupel ''' - - grays = cm.get_cmap('Greys', 3)([0]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - (range(20, 128, 6)) + grays = get_cmap("Greys", 3)([0]) + ncar = get_cmap(self.vspec.get("cmap"), 128)(range(20, 128, 6)) return np.concatenate((grays, ncar)) @property def hail_colors(self) -> np.ndarray: + """Default color map for Hail diameter.""" - ''' Default color map for Hail diameter ''' - - grays = cm.get_cmap('Greys', 2)([0]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([100, 15, 18, 20, 25, 60, 80, 85, 90]) + grays = get_cmap("Greys", 2)([0]) + ncar = get_cmap(self.vspec.get("cmap"), 128)([100, 15, 18, 20, 25, 60, 80, 85, 90]) return np.concatenate((grays, ncar)) - @property - def heat_flux_colors(self) -> np.ndarray: - - ''' Default color map for Latent/Sensible Heat Flux ''' - - grays = cm.get_cmap('Greys', 8)([6, 5, 4, 3, 2]) - ctable = ctables.colortables.get_colortable(self.vspec.get('cmap')) \ - (range(0, 33, 2)) - return np.concatenate((grays, ctable)) - @property def heat_flux_colors_g(self) -> np.ndarray: + """Default color map for ground heat flux.""" - ''' Default color map for Latent/Sensible Heat Flux ''' - - colors = cm.get_cmap(self.vspec.get('cmap'), 128) \ - (range(15, 112, 8)) - return colors + return get_cmap(self.vspec.get("cmap"), 128)(range(15, 112, 8)) @property def heat_flux_colors_l(self) -> np.ndarray: + """Default color map for net latent heat flux.""" - ''' Default color map for Latent/Sensible Heat Flux ''' - - colors = cm.get_cmap(self.vspec.get('cmap'), 128) \ - (range(32, 129, 6)) - return colors + return get_cmap(self.vspec.get("cmap"), 128)(range(32, 129, 6)) @property def heat_flux_colors_s(self) -> np.ndarray: + """Default color map for sensible heat flux.""" - ''' Default color map for Latent/Sensible Heat Flux ''' - - colors = cm.get_cmap(self.vspec.get('cmap'), 128) \ - (range(32, 129, 6)) - return colors + return get_cmap(self.vspec.get("cmap"), 128)(range(32, 129, 6)) @property def icprb_colors(self) -> np.ndarray: + """Default color map for Icing Probability.""" - ''' Default color map for Icing Probability ''' - - grays = cm.get_cmap('Greys', 2)([0]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([25, 35, 50, 60, 70, 80, 85, 90, 100]) + grays = get_cmap("Greys", 2)([0]) + ncar = get_cmap(self.vspec.get("cmap"), 128)([25, 35, 50, 60, 70, 80, 85, 90, 100]) return np.concatenate((grays, ncar)) + @property def icsev_colors(self) -> np.ndarray: + """Default color map for Icing Severity.""" - ''' Default color map for Icing Severity ''' - - white = cm.get_cmap('Greys', 2)([0]) - blues = cm.get_cmap(self.vspec.get('cmap'), 9) \ - ([2, 3, 4, 6, 8]) + white = get_cmap("Greys", 2)([0]) + blues = get_cmap(self.vspec.get("cmap"), 9)([2, 3, 4, 6, 8]) return np.concatenate((white, blues)) @property def lcl_colors(self) -> np.ndarray: + """Default color map for Lifted Condensation Level.""" - ''' Default color map for Lifted Condensation Level ''' - - ctable = ctables.colortables.get_colortable(self.vspec.get('cmap')) \ - (range(50, 180, 7)) # rainbow - return ctable + # rainbow + return np.asarray( + ctables.colortables.get_colortable(self.vspec.get("cmap"))(range(50, 180, 7)) + ) @property def lifted_index_colors(self) -> np.ndarray: + """Default color map for Lifted Index.""" - ''' Default color map for Lifted Index ''' - - ctable = cm.get_cmap(self.vspec.get('cmap'), 128) \ - (range(4, 125, 4)) + ctable = get_cmap(self.vspec.get("cmap"), 128)(range(4, 125, 4)) ctable[14] = [1, 1, 1, 1] ctable[15] = [1, 1, 1, 1] return ctable @property def mdn_colors(self) -> np.ndarray: + """Default color map for Max Downdraft.""" - ''' Default color map for Max Downdraft ''' - - grays = cm.get_cmap('Greys', 2)([0]) - others = cm.get_cmap(self.vspec.get('cmap'), 18)(range(18, 1, -1), alpha=0.6) + grays = get_cmap("Greys", 2)([0]) + others = get_cmap(self.vspec.get("cmap"), 18)(range(18, 1, -1)) return np.concatenate((others, grays)) @property def mean_vvel_colors(self) -> np.ndarray: + """Default color map for Mean Vertical Velocity.""" - ''' Default color map for Mean Vertical Velocity ''' - - ctable = cm.get_cmap(self.vspec.get('cmap'), 128)(range(0, 114, 6)) + ctable = get_cmap(self.vspec.get("cmap"), 128)(range(0, 114, 6)) ctable[9] = [1, 1, 1, 1] return ctable @property def mup_colors(self) -> np.ndarray: + """Default color map for Max Updraft.""" - ''' Default color map for Max Updraft ''' - - grays = cm.get_cmap('Greys', 2)([0]) - others = cm.get_cmap(self.vspec.get('cmap'), 18)(range(1, 18, 1), alpha=0.6) + grays = get_cmap("Greys", 2)([0]) + others = get_cmap(self.vspec.get("cmap"), 18)(range(1, 18, 1)) return np.concatenate((grays, others)) @property def pbl_colors(self) -> np.ndarray: + """Default color map for PBL Height.""" - ''' Default color map for PBL Height ''' - - return ctables.colortables.get_colortable(self.vspec.get('cmap')) \ - (range(15, 60, 3)) + return np.asarray( + ctables.colortables.get_colortable(self.vspec.get("cmap"))(range(15, 60, 3)) + ) @property def pcp_colors(self) -> np.ndarray: + """Default color map for Hourly Precipitation.""" - ''' Default color map for Hourly Precipitation ''' - - grays = cm.get_cmap('Greys', 6)([0, 3]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([25, 50, 60, 70, 80, 85, 90, 115]) + grays = get_cmap("Greys", 6)([0, 3]) + ncar = get_cmap(self.vspec.get("cmap"), 128)([25, 50, 60, 70, 80, 85, 90, 115]) return np.concatenate((grays, ncar)) @property def pcp_colors_high(self) -> np.ndarray: + """High values color map for Hourly Precipitation.""" - ''' High values color map for Hourly Precipitation ''' - - grays = cm.get_cmap('Greys', 2)([0]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([70, 80, 85, 90, 115]) + grays = get_cmap("Greys", 2)([0]) + ncar = get_cmap(self.vspec.get("cmap"), 128)([70, 80, 85, 90, 115]) return np.concatenate((grays, ncar)) @property def pmsl_colors(self) -> np.ndarray: + """Default color map for mean sea level pressure.""" - ''' Default color map for Surface Pressure ''' - - ncolors = len(self.vspec.get('clevs')) + ncolors = len(self.vspec.get("clevs", self.clevs)) incr = 128 // ncolors - colors = cm.get_cmap(self.vspec.get('cmap'), 128)(range(incr, 128, incr)) + colors = get_cmap(self.vspec.get("cmap"), 128)(range(incr, 128, incr)) return np.asarray(colors) @property def ps_colors(self) -> np.ndarray: + """Default color map for surface pressure.""" - ''' Default color map for Surface Pressure ''' - - grays = cm.get_cmap('Greys', 13)(range(13)) + grays = get_cmap("Greys", 13)(range(13)) segments = [[16, 53], [86, 105], [110, 151, 2], [172, 202, 2]] - ncar = cm.get_cmap('gist_ncar', 200)(list(chain(*[range(*i) for i in segments]))) + ncar = get_cmap("gist_ncar", 200)(list(chain(*[range(*i) for i in segments]))) return np.concatenate((grays, ncar)) @property def pw_colors(self) -> np.ndarray: - - ''' Default color map for Precipitable Water ''' - - grays = cm.get_cmap('Greys', 5)([1, 3]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([120, 100, 95, 85, 80, 70, 65, 50, 25, 22, 20, 17]) - bupu = cm.get_cmap('BuPu', 15)([13, 14]) - cool = cm.get_cmap('cool', 15)([10, 9, 12, 7, 5]) + """Default color map for Precipitable Water.""" + + grays = get_cmap("Greys", 5)([1, 3]) + ncar = get_cmap(self.vspec.get("cmap"), 128)( + [120, 100, 95, 85, 80, 70, 65, 50, 25, 22, 20, 17] + ) + bupu = get_cmap("BuPu", 15)([13, 14]) + cool = get_cmap("cool", 15)([10, 9, 12, 7, 5]) return np.concatenate((grays, ncar, bupu, cool)) @property def radiation_colors(self) -> np.ndarray: + """Default color map for Longwave Radiation.""" - ''' Default color map for Longwave Radiation ''' - - grays = cm.get_cmap('Greys', 2)([0]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - (range(0, 126, 5)) + grays = get_cmap("Greys", 2)([0]) + ncar = get_cmap(self.vspec.get("cmap"), 128)(range(0, 126, 5)) return np.concatenate((grays, ncar)) @property def radiation_bw_colors(self) -> np.ndarray: + """Default grayscale map for Outgoing Shortwave Radiation.""" - ''' Default grayscale map for Outgoing Shortwave Radiation ''' - - return cm.get_cmap(self.vspec.get('cmap'), 128) \ - (range(30, 110)) + return get_cmap(self.vspec.get("cmap"), 128)(range(30, 110)) @property def radiation_mix_colors(self) -> np.ndarray: + """Default color map for Longwave Radiation.""" - ''' Default color map for Longwave Radiation ''' - - ncar = ctables.colortables.get_colortable(self.vspec.get('cmap')) \ - (range(0, 40)) - grays = cm.get_cmap('Greys', 100)(range(10, 100)) + ncar = ctables.colortables.get_colortable(self.vspec.get("cmap"))(range(40)) + grays = get_cmap("Greys", 100)(range(10, 100)) return np.concatenate((ncar, grays)) @property def rainbow11_colors(self) -> np.ndarray: + """Default color map for Hourly Wildfire Potential.""" - ''' Default color map for Hourly Wildfire Potential ''' - - grays = cm.get_cmap('Greys', 2)([0]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([18, 20, 25, 50, 60, 70, 80, 85, 90, 100, 120]) + grays = get_cmap("Greys", 2)([0]) + ncar = get_cmap(self.vspec.get("cmap"), 128)([18, 20, 25, 50, 60, 70, 80, 85, 90, 100, 120]) return np.concatenate((grays, ncar)) @property def rainbow12_colors(self) -> np.ndarray: + """Default color map for ACPCP, ACSNOD, HLCY, RH, and SNOD.""" - ''' Default color map for ACPCP, ACSNOD, HLCY, RH, and SNOD ''' - - grays = cm.get_cmap('Greys', 2)([0]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([15, 18, 20, 25, 50, 60, 70, 80, 85, 90, 100, 120]) + grays = get_cmap("Greys", 2)([0]) + ncar = get_cmap(self.vspec.get("cmap"), 128)( + [15, 18, 20, 25, 50, 60, 70, 80, 85, 90, 100, 120] + ) return np.concatenate((grays, ncar)) @property def rainbow12_reverse(self) -> np.ndarray: - - ''' Default color map for min helicity ''' + """Default color map for min helicity.""" return np.flip(self.rainbow12_colors, 0) @property def rainbow16_colors(self) -> np.ndarray: + """Default color map for helicity.""" - ''' Default color map for helicity ''' - - grays = cm.get_cmap('Greys', 5)([0, 2]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([9, 15, 18, 20, 25, 48, 57, 65, 74, 79, 87, 94, 102, 109, 120]) + grays = get_cmap("Greys", 5)([0, 2]) + ncar = get_cmap(self.vspec.get("cmap"), 128)( + [9, 15, 18, 20, 25, 48, 57, 65, 74, 79, 87, 94, 102, 109, 120] + ) return np.concatenate((grays, ncar)) @property def shear_colors(self) -> np.ndarray: + """Default color map for Vertical Shear.""" - ''' Default color map for Vertical Shear ''' - - ctable = cm.get_cmap(self.vspec.get('cmap'), 16) \ - (range(5, 15)) + ctable = get_cmap(self.vspec.get("cmap"), 16)(range(5, 15)) ctable[9] = [1, 1, 1, 1] return ctable @property def slw_colors(self) -> np.ndarray: + """Default color map for Max Vertically Integrated Graupel.""" - ''' Default color map for Max Vertically Integrated Graupel ''' - - white = cm.get_cmap('Greys', 3)([0]) - purples = cm.get_cmap('nipy_spectral', 30)([3, 1]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 15) \ - ([2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]) + white = get_cmap("Greys", 3)([0]) + purples = get_cmap("nipy_spectral", 30)([3, 1]) + ncar = get_cmap(self.vspec.get("cmap"), 15)([2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14]) return np.concatenate((white, purples, ncar)) @property def smoke_colors(self) -> np.ndarray: + """Default color map for smoke plots.""" - ''' Default color map for smoke plots. ''' - - white = cm.get_cmap('Greys', 2)([0]) - blues = cm.get_cmap('Blues', 6)(range(1, 5)) - green_yellow_red = cm.get_cmap('RdYlGn_r', 18)([1, 3, 5, 9, 12, 13, 14, 16, 18]) - purple = np.array([mpcolors.to_rgba('xkcd:vivid purple')]) + white = get_cmap("Greys", 2)([0]) + blues = get_cmap("Blues", 6)(range(1, 5)) + green_yellow_red = get_cmap("RdYlGn_r", 18)([1, 3, 5, 9, 12, 13, 14, 16, 18]) + purple = np.array([mpcolors.to_rgba("xkcd:vivid purple")]) return np.concatenate((white, blues, green_yellow_red, purple)) + @property + def smoke_emissions_colors(self) -> list[str]: + """Default color map for smoke emissions plot.""" + + # The scatter plot utility won't accept anything but named colors + return [ + "white", + "rebeccapurple", + "royalblue", + "cadetblue", + "yellowgreen", + "mediumaquamarine", + "lightgreen", + "yellow", + "gold", + "orange", + "darkorange", + "orangered", + "red", + "firebrick", + ] @property def snow_colors(self) -> np.ndarray: + """Default color map for Snow fields.""" - ''' Default color map for Snow fields ''' - - grays = cm.get_cmap('Greys', 5)([0, 2]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([15, 18, 20, 25, 50, 60, 74, 81, 85, 90, 100]) + grays = get_cmap("Greys", 5)([0, 2]) + ncar = get_cmap(self.vspec.get("cmap"), 128)([15, 18, 20, 25, 50, 60, 74, 81, 85, 90, 100]) return np.concatenate((grays, ncar)) @property def soilm_colors(self) -> np.ndarray: + """Default color map for Soil Moisture Availability.""" - ''' Default color map for Soil Moisture Availability ''' - - ncar = cm.get_cmap(self.vspec.get('cmap'), 128)(range(0, 122, 11)) - return ncar + return get_cmap(self.vspec.get("cmap"), 128)(range(0, 122, 11)) @property def soilw_colors(self) -> np.ndarray: + """Default color map for Soil Moisture.""" - ''' Default color map for Soil Moisture ''' - - grays = cm.get_cmap('Greys', 2)([1]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 110) \ - ([0, 10, 20, 25, 35, 40, 60, 73, 80, 85, 95, 105]) + grays = get_cmap("Greys", 2)([1]) + ncar = get_cmap(self.vspec.get("cmap"), 110)( + [0, 10, 20, 25, 35, 40, 60, 73, 80, 85, 95, 105] + ) return np.concatenate((grays, ncar)) @property def t_colors(self) -> np.ndarray: - - ''' Default color map for Potential Temperature ''' + """Default color map for Potential Temperature.""" ncolors = len(self.clevs) - return cm.get_cmap(self.vspec.get('cmap', 'jet'), ncolors)(range(ncolors)) + return get_cmap(self.vspec.get("cmap", "jet"), ncolors)(range(ncolors)) @property def tsfc_colors(self) -> np.ndarray: + """Default color map for Surface Temperature.""" - ''' Default color map for Surface Temperature ''' - - purples = cm.get_cmap('Purples', 16)([14, 12, 8, 6, 4, 2]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([15, 20, 25, 33, 50, 60, 70, 80, 85, 90, 115]) - grays = cm.get_cmap('Greys', 15)([2, 4, 6, 8]) + purples = get_cmap("Purples", 16)([14, 12, 8, 6, 4, 2]) + ncar = get_cmap(self.vspec.get("cmap"), 128)([15, 20, 25, 33, 50, 60, 70, 80, 85, 90, 115]) + grays = get_cmap("Greys", 15)([2, 4, 6, 8]) return np.concatenate((purples, ncar, grays)) @property def terrain_colors(self) -> np.ndarray: + """Default color map for Terrain.""" - ''' Default color map for Terrain ''' - - ctable = ctables.colortables.get_colortable(self.vspec.get('cmap')) \ - (range(54, 157, 6)) - return ctable + return np.asarray( + ctables.colortables.get_colortable(self.vspec.get("cmap"))(range(54, 157, 6)) + ) @property def ua_temp_colors(self) -> np.ndarray: + """Default color map for Upper-Air Temperature.""" - ''' Default color map for Upper-Air Temperature ''' - - grays = cm.get_cmap('Greys', 27)(range(17, 1, -2)) - purples = cm.get_cmap('Purples', 27)(range(17, 1, -2)) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([30, 34, 36, 40, 45, 55, 60, 65, 70, \ - 75, 80, 85, 90, 95, 100, 115]) + grays = get_cmap("Greys", 27)(range(17, 1, -2)) + purples = get_cmap("Purples", 27)(range(17, 1, -2)) + ncar = get_cmap(self.vspec.get("cmap"), 128)( + [30, 34, 36, 40, 45, 55, 60, 65, 70, 75, 80, 85, 90, 95, 100, 115] + ) return np.concatenate((grays, purples, ncar)) @property def vis_colors(self) -> np.ndarray: - - ''' Default color map for Visibility + """ + Default color map for Visibility. section names are based on Aviation Flight Rule visibility categories LIFR (Low Instrument Flight Rules) -- less than 1 mile @@ -523,55 +460,52 @@ def vis_colors(self) -> np.ndarray: VFR (Visual Flight Rules) -- greater than 5 miles the gray range is arbitrary compared to the official flight levels - ''' + """ - lifr = cm.get_cmap('RdPu_r', 20)(range(0, 11)) - ifr = cm.get_cmap('autumn', 30)(range(0, 30)) - mvfr = cm.get_cmap('Blues', 20)(range(10, 20)) - vfr1 = cm.get_cmap('YlGn_r', 60)(range(0, 50)) - vfr2 = cm.get_cmap('Greys', 25)(np.full(10, 9)) - hi01 = cm.get_cmap('Greys', 25)(np.full(10, 6)) - hi02 = cm.get_cmap('Greys', 25)(np.full(20, 3)) - hi03 = cm.get_cmap('Greys', 25)(np.full(1, 0)) + lifr = get_cmap("RdPu_r", 20)(range(11)) + ifr = get_cmap("autumn", 30)(range(30)) + mvfr = get_cmap("Blues", 20)(range(10, 20)) + vfr1 = get_cmap("YlGn_r", 60)(range(50)) + vfr2 = get_cmap("Greys", 25)(np.full(10, 9)) + hi01 = get_cmap("Greys", 25)(np.full(10, 6)) + hi02 = get_cmap("Greys", 25)(np.full(20, 3)) + hi03 = get_cmap("Greys", 25)(np.full(1, 0)) return np.concatenate((lifr, ifr, mvfr, vfr1, vfr2, hi01, hi02, hi03)) @property def vvel_colors(self) -> np.ndarray: + """Default color map for Vetical Velocity.""" - ''' Default color map for Vetical Velocity ''' - - ncar1 = cm.get_cmap(self.vspec.get('cmap'), 128)([15, 18, 20, 25]) - grays = cm.get_cmap('Greys', 2)([0]) - ncar2 = cm.get_cmap(self.vspec.get('cmap'), 128)([60, 70, 80, 85, 90, 100, 120]) + ncar1 = get_cmap(self.vspec.get("cmap"), 128)([15, 18, 20, 25]) + grays = get_cmap("Greys", 2)([0]) + ncar2 = get_cmap(self.vspec.get("cmap"), 128)([60, 70, 80, 85, 90, 100, 120]) return np.concatenate((ncar1, grays, ncar2)) @property def vort_colors(self) -> np.ndarray: + """Default color map for Absolute Vorticity.""" - ''' Default color map for Absolute Vorticity ''' - - grays = cm.get_cmap('Greys', 2)([0]) - ncar = cm.get_cmap(self.vspec.get('cmap'), 128) \ - ([15, 18, 20, 25, 50, 60, 70, 80, 83, 90, 100, 120]) + grays = get_cmap("Greys", 2)([0]) + ncar = get_cmap(self.vspec.get("cmap"), 128)( + [15, 18, 20, 25, 50, 60, 70, 80, 83, 90, 100, 120] + ) return np.concatenate((grays, ncar)) @property def wind_colors(self) -> np.ndarray: + """Default color map for Wind Speed.""" - ''' Default color map for Wind Speed ''' - - low = cm.get_cmap(self.vspec.get('cmap'), 129)(range(129, 109, -5)) - high1 = cm.get_cmap(self.vspec.get('cmap'), 129)(range(16, 29, 3)) - high2 = cm.get_cmap(self.vspec.get('cmap'), 129)(range(48, 103, 6)) + low = get_cmap(self.vspec.get("cmap"), 129)(range(129, 109, -5)) + high1 = get_cmap(self.vspec.get("cmap"), 129)(range(16, 29, 3)) + high2 = get_cmap(self.vspec.get("cmap"), 129)(range(48, 103, 6)) return np.concatenate((low, high1, high2)) @property def wind_colors_high(self) -> np.ndarray: + """Default color map for High Wind Speed.""" - ''' Default color map for High Wind Speed ''' - - low = cm.get_cmap(self.vspec.get('cmap'), 129)(range(129, 108, -7)) - high1 = cm.get_cmap(self.vspec.get('cmap'), 129)(range(16, 29, 4)) - high2 = cm.get_cmap(self.vspec.get('cmap'), 129)(range(46, 95, 7)) + low = get_cmap(self.vspec.get("cmap"), 129)(range(129, 108, -7)) + high1 = get_cmap(self.vspec.get("cmap"), 129)(range(16, 29, 4)) + high2 = get_cmap(self.vspec.get("cmap"), 129)(range(46, 95, 7)) return np.concatenate((low, high1, high2)) diff --git a/adb_graphics/utils.py b/adb_graphics/utils.py index f91c8122..7b953b06 100644 --- a/adb_graphics/utils.py +++ b/adb_graphics/utils.py @@ -1,66 +1,94 @@ -# pylint: disable=invalid-name -''' +""" A set of generic utilities available to all the adb_graphics components. -''' +""" -import argparse -import datetime as dt import functools import glob -import importlib as il -from math import atan2, degrees -from multiprocessing import Process -import os -import subprocess +import re import sys import time +from collections.abc import Callable +from datetime import datetime, timedelta +from importlib import import_module +from importlib.util import find_spec +from pathlib import Path +from typing import Any +from zipfile import ZipFile import numpy as np import yaml +from uwtools.api.config import YAMLConfig +from uwtools.config.support import uw_yaml_loader + + +def cfgrib_spec(config: dict, model: str) -> dict: + """ + Given a cfgrib block and a model, return the appropriate sub-block, if it exists. + """ + spec: dict = config.get(model, {}) + if spec and isinstance(spec, dict): + return spec + return config + + +def _write_zip(files_to_zip: list[Path], zipf: Path | str): + """Write the zip file, overwriting existing files that have a newer modification timestamp.""" + print(f"Writing to zip file {zipf} for files like: {files_to_zip[0].name}") + overwrite = {} + with ZipFile(zipf, "a") as zf: + arcfiles = zf.namelist() + for file in files_to_zip: + if file.name in arcfiles: + arcinfo = zf.getinfo(file.name) + arc_mod_time = datetime(*arcinfo.date_time) + file_mod_time = datetime.fromtimestamp(file.stat().st_mtime) + if file_mod_time > arc_mod_time: + overwrite[file.name] = file + else: + zf.write(file, arcname=Path(file).name) + if overwrite: + tmp_path = Path(f"{zipf}.tmp") + with ZipFile(tmp_path, "w") as tmp: + for item in zf.namelist(): + if (arcfile := zf.getinfo(item).filename) not in overwrite: + tmp.write(arcfile, str(zf.read(item))) + for arcname, file in overwrite.items(): + tmp.write(file, arcname=arcname) + tmp_path.rename(zipf) -def create_zip(files_to_zip, zipf): - ''' Create a zip file. Use a locking mechanism -- write a lock file to disk. ''' +def create_zip(files_to_zip: list[Path], zipf: Path | str): + """Create a zip file. Use a locking mechanism -- write a lock file to disk.""" - lock_file = f'{zipf}._lock' + lock_file = Path(f"{zipf}._lock") retry = 2 count = 0 while True: - if not os.path.exists(lock_file): + if not lock_file.exists(): # Create the lock - fd = open(lock_file, 'w') - print(f'Writing to zip file {zipf} for files like: {files_to_zip[0][-10:]}') - - cmd = f'zip -uj {zipf} {" ".join(files_to_zip)}' - print(f'Running command: {cmd}') + lock_file.touch() try: - subprocess.run(cmd, - check=True, - shell=True, - ) - except: # pylint: disable=bare-except - print(f'Error on writing zip file! {sys.exc_info()[0]}') + _write_zip(files_to_zip, zipf) + except Exception as e: count += 1 if count >= retry: - raise + msg = "Error writing zip file!" + raise RuntimeError(msg) from e else: # Zipping was successful. Remove files that were zipped for file_to_zip in files_to_zip: - if os.path.exists(file_to_zip): - os.remove(file_to_zip) + file_to_zip.unlink(missing_ok=True) + break finally: - # Remove the lock - fd.close() - if os.path.exists(lock_file): - os.remove(lock_file) - break + lock_file.unlink(missing_ok=True) + # Wait before trying to obtain the lock on the file - time.sleep(5) + time.sleep(1) -def fhr_list(args): - ''' +def fhr_list(args: list[int]) -> list[int]: + """ Given an argparse list argument, return the sequence of forecast hours to process. @@ -72,9 +100,7 @@ def fhr_list(args): Length > 3: List as is argparse should provide a list of at least one item (nargs='+'). - - Must ensure that the list contains integers. - ''' + """ args = args if isinstance(args, list) else [args] arg_len = len(args) @@ -84,41 +110,37 @@ def fhr_list(args): return args -def from_datetime(date): - ''' Return a string like YYYYMMDDHH given a datetime object. ''' - return dt.datetime.strftime(date, '%Y%m%d%H') + +def from_datetime(date: datetime) -> str: + """Return a string like YYYYMMDDHH given a datetime object.""" + return datetime.strftime(date, "%Y%m%d%H") + def get_func(val: str): + """ + Gets a callable function. - ''' Given an input string, val, returns the corresponding callable function. This function is borrowed from stackoverflow.com response to "Python: YAML dictionary of functions: how to load without converting to strings." - ''' + """ - if '.' in val: - module_name, fun_name = val.rsplit('.', 1) - else: - module_name = '__main__' - fun_name = val + module_name, fun_name = val.rsplit(".", 1) - mod_spec = il.util.find_spec(module_name, package='adb_graphics') + mod_spec = find_spec(module_name, package="adb_graphics") + if mod_spec is None: + mod_spec = find_spec("." + module_name, package="adb_graphics") if mod_spec is None: - mod_spec = il.util.find_spec('.' + module_name, package='adb_graphics') + msg = "Could not find {module_name} in current environment." + raise ValueError(msg) - try: - __import__(mod_spec.name) - except ImportError as exc: - print(f'Could not load {module_name} while trying to locate function in get_func') - raise exc + import_module(mod_spec.name) module = sys.modules[mod_spec.name] - fun = getattr(module, fun_name) - return fun + return getattr(module, fun_name) -# pylint: disable=unused-argument -def join_ranges(loader, node): - ''' +def join_ranges(_loader: yaml.SafeLoader, node: yaml.Node) -> Any: + """ Merge two or more different ranges into a single array for color bar clevs. e.g.: in default_specs.yml, clevs for visibility can be assigned as @@ -130,220 +152,167 @@ def join_ranges(loader, node): resolution than the rest, while keeping the colorbar from looking squished. Note that a "yaml.add_constructor" is required, as shown after the method. - ''' + """ list_ = [] for seq_node in node.value: - range_args = [] - for scalar_node in seq_node.value: - range_args.append(float(scalar_node.value)) + range_args = [float(scalar_node.value) for scalar_node in seq_node.value] list_.append(np.arange(*range_args)) return np.concatenate(list_, axis=0) -# SafeLoader doesn't seem compatible with our numpy contructors, using Loader here -yaml.add_constructor("!join_ranges", join_ranges, Loader=yaml.Loader) -# pylint: disable=invalid-name, too-many-locals -def label_line(ax, label, segment, **kwargs): +def arange_constructor(_loader: yaml.SafeLoader, node: yaml.Node) -> Any: + return np.arange(*[float(n.value) for n in node.value]) - ''' - Label a single line with line2D label data. - Input: +def load_yaml(config: Path | str) -> YAMLConfig: + yaml.add_constructor("!join_ranges", join_ranges, Loader=uw_yaml_loader()) + yaml.add_constructor("!arange", arange_constructor, Loader=uw_yaml_loader()) + return YAMLConfig(config) + - ax the SkewT object axis - label label to be used for the current line - segment a list (array) of values for the current line +def load_sites(arg: str | Path) -> list[str]: + """Return the contents of the sites file, if it exists.""" + path = Path(arg) + with path.open() as sites_file: + sites: list[str] = sites_file.readlines() + return sites - Key Word Arguments - align optional bool to enable the rotation of the label to line angle - end the end of the line at which to put the label. 'bottom' or 'top' - offset index to use for the "end" of the array +def load_specs(arg: str | Path) -> YAMLConfig: + """Check to make sure arg file exists. Return its contents.""" - Any kwargs accepted by matplotlib's text box. - ''' + spec_file = Path(arg) + if not spec_file.exists(): + msg = f"The spec file {spec_file} does not exist." + raise FileNotFoundError(msg) + specs = load_yaml(spec_file) + specs["file"] = spec_file - # Strip non-text-box key word arguments and set default if they don't exist - align = kwargs.pop('align', True) - end = kwargs.pop('end', 'bottom') - offset = kwargs.pop('offset', 0) + return specs - # Label location - if end == 'bottom': - x, y = segment[0 + offset, :] - ip = 1 + offset - elif end == 'top': - x, y = segment[-1 - offset, :] - ip = -1 - offset - if align: - #Compute the slope - dx = segment[ip, 0] - segment[ip-1, 0] - dy = segment[ip, 1] - segment[ip-1, 1] - ang = degrees(atan2(dy, dx)) +def numeric_level(level: str | None = None) -> tuple[float | int | str, str]: + """ + Split the numeric level and unit associated with the level key. - #Transform to screen co-ordinates - pt = np.array([x, y]).reshape((1, 2)) - trans_angle = ax.transData.transform_angles(np.array((ang, )), pt)[0] + A blank string is returned for lev_val for levels that do not contain a + numeric, e.g., 'sfc' or 'ua'. + """ - if end == 'top': - trans_angle -= 180 + level = level if level is not None else "" + if m := re.match(r"^([0-9.]+)?([a-z]+)?([0-9.]+)?$", level): + groups = m.groups() + units = groups[1] + value = groups[0] or groups[2] + for convert in (int, float): + try: + return convert(value), units + except (TypeError, ValueError): # noqa: PERF203 + pass + return "", "" - else: - trans_angle = 0 - #Set a bunch of keyword arguments - if ('horizontalalignment' not in kwargs) and ('ha' not in kwargs): - kwargs['ha'] = 'center' +def old_enough(age: int, file_path: Path | str): + """ + Helper function to test the age of a file. - if ('verticalalignment' not in kwargs) and ('va' not in kwargs): - kwargs['va'] = 'center' + Input: - if 'backgroundcolor' not in kwargs: - kwargs['backgroundcolor'] = ax.get_facecolor() + age desired age in minutes + file_path full path to file to check + + Output: - if 'clip_on' not in kwargs: - kwargs['clip_on'] = True + bool whether the file is at least age minutes old + """ - if 'fontsize' not in kwargs: - kwargs['fontsize'] = 'larger' + file_path = Path(file_path) if isinstance(file_path, str) else file_path - if 'fontweight' not in kwargs: - kwargs['fontweight'] = 'bold' + file_time = datetime.fromtimestamp(file_path.stat().st_ctime) + max_age = datetime.now() - timedelta(minutes=age) - # Larger value (e.g., 2.0) to move box in front of other diagram elements - if 'zorder' not in kwargs: - kwargs['zorder'] = 1.50 + return file_time < max_age - # Place the text box label on the line. - ax.text(x, y, label, rotation=trans_angle, **kwargs) -def label_lines(ax, lines, labels, offset=0, **kwargs): +def path_exists(path: Path | str): + """Checks whether a file exists, and returns the path if it does.""" - ''' - Plots labels on a set of lines from SkewT. + ret_path = Path(path) + if not ret_path.exists(): + msg = f"{path} does not exist!" + raise FileNotFoundError(msg) - Input: + return ret_path - ax the SkewT object axis - lines the SkewT object special lines - labels list of labels to be used - offset index to use for the "end" of the array - Key Word Arguments +def set_level(level: str, model: str, spec: dict): + """ + Given the default_specs level string, extract and set a numeric level in the cfgrib block. + """ + nlevel, _ = numeric_level(level=level) + level_info = any( + key + for keys in cfgrib_spec(spec["cfgrib"], model) + for key in ("level", "top", "bottom", "Surface") + if key in keys + ) + if nlevel and not level_info: + if spec["cfgrib"].get(model) is not None: + spec["cfgrib"][model]["level"] = nlevel + else: + spec["cfgrib"]["level"] = nlevel - color line color - Along with any other kwargs accepted by matplotlib's text box. - ''' +def timer(func: Callable): + """Decorator function that provides an elapsed time for a method.""" - if 'color' not in kwargs: - kwargs['color'] = lines.get_color()[0] + @functools.wraps(func) + def wrapper_timer(*args, **kwargs): + tic = time.perf_counter() + value = func(*args, **kwargs) + toc = time.perf_counter() + elapsed_time = toc - tic + print(f"{func.__name__} Elapsed time: {elapsed_time:0.4f} seconds") + return value - for i, line in enumerate(lines.get_segments()): - label = int(labels[i]) - label_line(ax, label, line, align=True, offset=offset, **kwargs) + return wrapper_timer -def load_sites(arg): - ''' Check that the sites file exists, and return its contents. ''' +def to_datetime(string: str): + """Return a datetime object given a string like YYYYMMDDHH.""" - # Check that the file exists - path = path_exists(arg) + return datetime.strptime(string, "%Y%m%d%H") - with open(path, 'r') as sites_file: - sites = sites_file.readlines() - return sites -def uniq_wgrib2_list(inlist): - ''' Given a list of wgrib2 output fields, returns a uniq list of fields for +def uniq_wgrib2_list(inlist: list[str]): + """ + Given a list of wgrib2 output fields, returns a unique list of fields for simplifying a grib2 dataset. Uniqueness is defined by the wgrib output from field 3 (colon delimted) onward, although the original full grib record must be included in the wgrib2 command below. - ''' + """ uniq_field_set = set() uniq_list = [] for infield in inlist: - infield_info = infield.split(':') - if len(infield_info) <= 3: + infield_info = infield.split(":") + if len(infield_info) <= 3: # noqa: PLR2004 continue - infield_str = ':'.join(infield_info[3:]) + infield_str = ":".join(infield_info[3:]) if infield_str not in uniq_field_set: uniq_list.append(infield) uniq_field_set.add(infield_str) return uniq_list -def load_specs(arg): - - ''' Check to make sure arg file exists. Return its contents. ''' - - spec_file = path_exists(arg) - - with open(spec_file, 'r') as fn: - specs = yaml.load(fn, Loader=yaml.Loader) - - specs['file'] = spec_file - - return specs - -def old_enough(age, file_path): - - ''' - Helper function to test the age of a file. - - Input: - - age desired age in minutes - file_path full path to file to check - - Output: - - bool whether the file is at least age minutes old - ''' - - file_time = dt.datetime.fromtimestamp(os.path.getctime(file_path)) - max_age = dt.datetime.now() - dt.timedelta(minutes=age) - - return file_time < max_age - -def path_exists(path: str): - - ''' Checks whether a file exists, and returns the path if it does. ''' - - if not os.path.exists(path): - msg = f'{path} does not exist!' - raise argparse.ArgumentTypeError(msg) - - return path - -def timer(func): - - ''' Decorator function that provides an elapsed time for a method. ''' - - @functools.wraps(func) - def wrapper_timer(*args, **kwargs): - tic = time.perf_counter() - value = func(*args, **kwargs) - toc = time.perf_counter() - elapsed_time = toc - tic - print(f"{func.__name__} Elapsed time: {elapsed_time:0.4f} seconds") - return value - return wrapper_timer - -def to_datetime(string): - ''' Return a datetime object give a string like YYYYMMDDHH. ''' - - return dt.datetime.strptime(string, '%Y%m%d%H') @timer -def zip_products(fhr, workdir, zipfiles): - - ''' Spin up a subprocess to zip all the product files into the staged zip files. +def zip_products(fhr: int, workdir: Path, zipfiles: dict) -> None: # pragma: no cover + """ + Spin up a subprocess to zip all the product files into the staged zip files. Input: @@ -353,18 +322,13 @@ def zip_products(fhr, workdir, zipfiles): Output: None - ''' + """ for tile, zipf in zipfiles.items(): - if tile == 'skewt_csv': - file_tmpl = f'*.skewt.*_f{fhr:03d}.csv' + if tile == "skewt_csv": + file_tmpl = f"*.skewt.*_f{fhr:03d}.csv" else: - file_tmpl = f'*_{tile}_*{fhr:02d}.png' - product_files = glob.glob(os.path.join(workdir, file_tmpl)) + file_tmpl = f"*_{tile}_*{fhr:02d}.png" + product_files = [Path(f) for f in glob.glob(str(workdir / file_tmpl))] if product_files: - zip_proc = Process(group=None, - target=create_zip, - args=(product_files, zipf), - ) - zip_proc.start() - zip_proc.join() + create_zip(product_files, zipf) diff --git a/conftest.py b/conftest.py index ea5a1a3c..47395548 100644 --- a/conftest.py +++ b/conftest.py @@ -1,37 +1,69 @@ -''' +""" Add command line options to the pytest suite. Each CLA needs to be defined in pytest_addoption and to have a pytest.fixture function defined. -''' +""" -import pytest +import glob +from pathlib import Path + +from pytest import fixture + +from adb_graphics.datahandler import gribfile + + +@fixture(scope="session", autouse=True) +def cleanup_data_idx(): + yield # Nothing to be done before tests + print("Removing idx files from test data") + for path in glob.glob("tests/data/*.idx"): + Path(path).unlink() def pytest_addoption(parser): + """Define command line arguments to be parsed.""" + + parser.addoption( + "--nat-file", + action="store", + help="Path to nat-file.", + ) + + parser.addoption( + "--prs-file", + action="store", + help="Path to prs-file.", + ) + - ''' Define command line arguments to be parsed. ''' +@fixture(scope="session") +def natfile(pytestconfig): + """Interface to pass a grib file to pytest.""" + if path := pytestconfig.getoption("--nat-file"): + return Path(path) + return Path("tests", "data", "wrfnat_hrconus_16.grib2") - parser.addoption('--nat-file', - action='store', - help='Path to nat-file.', - ) - parser.addoption('--prs-file', - action='store', - help='Path to prs-file.', - ) +@fixture(scope="session") +def prsfile(pytestconfig): + """Interface to pass a grib file to pytest.""" + if path := pytestconfig.getoption("--prs-file"): + return Path(path) + return Path("tests", "data", "wrfprs_hrconus_16.grib2") -@pytest.fixture -def natfile(request): - ''' Interface to pass a grib file to pytest''' +@fixture(scope="session") +def spec_file(): + """Interface to pass a grib file to pytest.""" + return Path("adb_graphics", "default_specs.yml") - return request.config.getoption('--nat-file') -@pytest.fixture -def prsfile(request): +@fixture(scope="session") +def prs_ds(prsfile): + return gribfile.WholeGribFile(prsfile).datasets - ''' Interface to pass a grib file to pytest''' - return request.config.getoption('--prs-file') +@fixture(scope="session") +def nat_ds(natfile): + return gribfile.WholeGribFile(natfile).datasets diff --git a/create_graphics.py b/create_graphics.py index 9b070101..c140df6e 100644 --- a/create_graphics.py +++ b/create_graphics.py @@ -1,368 +1,368 @@ -# pylint: disable=invalid-name -''' +""" Driver for creating all the SkewT diagrams needed for a specific input dataset. -''' +""" -# pylint: disable=wrong-import-position, wrong-import-order import matplotlib as mpl -mpl.use('Agg') -# pylint: enable=wrong-import-position, wrong-import-order -import argparse -import copy +mpl.use("Agg") +import copy import glob -from multiprocessing import Pool -import os import random import string import subprocess import sys import time +from argparse import ArgumentError, ArgumentParser, Namespace +from multiprocessing import Pool +from pathlib import Path import yaml +from adb_graphics import errors, utils from adb_graphics.datahandler import gribfile -import adb_graphics.errors as errors from adb_graphics.figure_builders import parallel_maps, parallel_skewt from adb_graphics.figures import maps -import adb_graphics.utils as utils - -AIRPORTS = 'static/Airports_locs.txt' +AIRPORTS = "static/Airports_locs.txt" -COMBINED_FN = 'combined_{fhr:03d}_{uniq}.grib2' -TMP_FN = 'combined_{fhr:03d}_{uniq}.tmp.grib2' +COMBINED_FN = "combined_{fhr:03d}_{uniq}.grib2" +TMP_FN = "combined_{fhr:03d}_{uniq}.tmp.grib2" LOG_BREAK = f"{('-' * 80)}\n{('-' * 80)}" -def check_file(cla, fhr, data_root=None, file_tmpl=None, mem=None): - ''' Given the command line arguments, the forecast hour, and a potential - ensemble member, build a full path to the file and ensure it exists. ''' + +def check_file( + cla: Namespace, + fhr: int, + data_root: Path | None = None, + file_tmpl: str | None = None, + mem: int | None = None, +) -> tuple[Path, bool]: # pragma: no cover + """ + Given the command line arguments, the forecast hour, and a potential + ensemble member, build a full path to the file and ensure it exists. + """ if data_root is None: data_root = cla.data_root[0] if file_tmpl is None: file_tmpl = cla.file_tmpl[0] - grib_path = os.path.join(data_root, file_tmpl) + grib_path = data_root / file_tmpl if mem is not None: - grib_path = grib_path.format(FCST_TIME=fhr, mem=mem) + grib_path = str(grib_path).format(FCST_TIME=fhr, mem=mem) else: - grib_path = grib_path.format(FCST_TIME=fhr) + grib_path = str(grib_path).format(FCST_TIME=fhr) + grib_path = Path(grib_path) - print(f'Checking on file {grib_path}') - old_enough = utils.old_enough(cla.data_age, grib_path) if \ - os.path.exists(grib_path) else False + print(f"Checking on file {grib_path}") + old_enough = utils.old_enough(cla.data_age, grib_path) if grib_path.exists() else False return grib_path, old_enough -def create_skewt(cla, fhr, grib_path, workdir): - ''' Generate arguments for parallel processing of Skew T graphics, - and generate a pool of workers to complete the tasks. ''' - - # Create the file object to load the contents - gfile = gribfile.GribFile(grib_path) - - args = [(cla, fhr, gfile.contents, site, workdir) for site in cla.sites] - - print(f'Queueing {len(args)} Skew Ts') +def create_skewt(cla: Namespace, fhr: int, grib_path: Path, workdir: Path): # pragma: no cover + """ + Generate arguments for parallel processing of Skew T graphics, + and generate a pool of workers to complete the tasks. + """ + ds = gribfile.WholeGribFile(grib_path).datasets + args = [(cla, fhr, ds, site, workdir) for site in cla.sites] + print(f"Queueing {len(args)} Skew Ts") with Pool(processes=cla.nprocs) as pool: pool.starmap(parallel_skewt, args) -def create_maps(cla, fhr, grib_contents, workdir, grib_contents2=None): - ''' Generate arguments for parallel processing of plan-view maps and - generate a pool of workers to complete the task. ''' +def create_maps( + cla: Namespace, + fhr: int, + grib_paths: list[Path], + workdir: Path, + grib_path2: Path | None = None, +): # pragma: no cover + """ + Generate arguments for parallel processing of plan-view maps and + generate a pool of workers to complete the task. + """ - model = cla.images[0] + ds = gribfile.WholeGribFile(grib_paths[-1]).datasets for tile in cla.tiles: args = [] for variable, levels in cla.images[1].items(): for level in levels: - # Load the spec for the current variable spec = cla.specs.get(variable, {}).get(level) if not spec: - msg = f'graphics: {variable} {level}' - raise errors.NoGraphicsDefinitionForVariable(msg) - - args.append((cla, fhr, grib_contents, level, model, spec, - variable, workdir, tile, grib_contents2)) + msg = f"graphics: {variable} {level}" + raise errors.NoGraphicsDefinitionForVariableError(msg) + accumulate = spec.get("accumulate", False) + vspec = utils.cfgrib_spec(spec["cfgrib"], cla.images[0]) + grib_acc = ( + vspec.get("stepRange", "").startswith("-1") or vspec.get("stepRange") == "0-0" + ) + if (accumulate or grib_acc) and fhr == 0: + continue + if accumulate: + ads = gribfile.GribFiles(grib_paths, vspec).datasets + + args.append( + ( + cla, + fhr, + ads if accumulate else ds, + level, + variable, + workdir, + tile, + grib_path2, + ) + ) - print(f'Queueing {len(args)} maps') + print(f"Queueing {len(args)} maps") with Pool(processes=cla.nprocs) as pool: pool.starmap(parallel_maps, args) -def gather_gribfiles(cla, fhr, filename, gribfiles): - - ''' Returns the appropriate gribfiles object for the type of graphics being - generated -- whether it's for a single forecast time or all forecast lead - times. ''' - - filenames = {'01fcst': [], 'free_fcst': []} - - fcst_hour = int(fhr) - - first_fcst = 6 if 'global' in cla.images[0] else 1 - if fcst_hour <= first_fcst: - filenames['01fcst'].append(filename) - else: - filenames['free_fcst'].append(filename) - - if gribfiles is None or not cla.all_leads: - - # Create a new GribFiles object, include all hours, or just this one, - # depending on command line argument flag - - gribfiles = gribfile.GribFiles( - coord_dims={'fcst_hr': [fhr]}, - filenames=filenames, - filetype=cla.file_type, - model=cla.images[0], - ) - else: - # Append a single forecast hour to the existing GribFiles object. - gribfiles.coord_dims.get('fcst_hr').append(fhr) - gribfiles.append(filenames) - - return gribfiles - -def generate_tile_list(arg_list): - - ''' Given the input arguments -- a list if the argument is provided, return +def generate_tile_list(arg_list: list) -> list[str]: # pragma: no cover + """ + Given the input arguments -- a list if the argument is provided, return the list. If no arg is provided, defaults to the full domain, and if 'all' - is provided, the full domain, and all subdomains are plotted. ''' + is provided, the full domain, and all subdomains are plotted. + """ if not arg_list: - return ['full'] + return ["full"] - if ',' in arg_list[0]: - arg_list = arg_list[0].split(',') + if "," in arg_list[0]: + arg_list = arg_list[0].split(",") - hrrr_ak_only = ('Anchorage', 'AKRange', 'Juneau') - rap_only = ('AK', 'AKZoom', 'conus', 'HI') - if 'all' in arg_list: - all_list = ['full'] + list(maps.TILE_DEFS.keys()) + hrrr_ak_only = ("Anchorage", "AKRange", "Juneau") + rap_only = ("AK", "AKZoom", "conus", "HI") + if "all" in arg_list: + all_list = ["full", *list(maps.TILE_DEFS.keys())] return [tile for tile in all_list if tile not in hrrr_ak_only + rap_only] return arg_list -def load_images(arg): - ''' Check that input image file exists, and that it contains the +def load_images(arg: list[Path | str]): # pragma: no cover + """ + Check that input image file exists, and that it contains the requested section. Return a 2-list (required by argparse) of the file path and dictionary of images to be created. - ''' + """ - # Agument is expected to be a 2-list of file name and internal + # Argument is expected to be a 2-list of file name and internal # section name. - image_file = arg[0] + image_file = Path(arg[0]) image_set = arg[1] # Check that the file exists - image_file = utils.path_exists(image_file) + assert image_file.exists() # Load yaml file - with open(image_file, 'r') as fn: + with Path.open(image_file) as fn: images = yaml.load(fn, Loader=yaml.Loader)[image_set] - return [images.get('model'), images.get('variables')] + return [images.get("model"), images.get("variables")] -def parse_args(argv): - ''' Set up argparse command line arguments, and return the Namespace - containing the settings. ''' +def parse_args(argv: list) -> Namespace: # pragma: no cover + """ + Set up argparse command line arguments, and return the Namespace + containing the settings. + """ - parser = argparse.ArgumentParser(description='Script to drive the \ - creation of graphices files.') + parser = ArgumentParser(description="Script to drive the creation of graphics files.") # Positional argument parser.add_argument( - 'graphic_type', - choices=['maps', 'skewts', 'enspanel', 'diff'], - help='The type of graphics to create.', - ) + "graphic_type", + choices=["maps", "skewts", "enspanel", "diff"], + help="The type of graphics to create.", + ) # Short args parser.add_argument( - '-r', - dest='img_res', + "-r", + dest="img_res", default=72, required=False, - help='Resolution of output images in DPI. Recommended to stay below 1000. Default = 72', + help="Resolution of output images in DPI. Recommended to stay below 1000. Default = 72", type=int, - ) + ) parser.add_argument( - '-a', - dest='data_age', + "-a", + dest="data_age", default=3, - help='Age in minutes required for data files to be complete. Default = 3', + help="Age in minutes required for data files to be complete. Default = 3", type=int, - ) + ) parser.add_argument( - '-d', - dest='data_root', - help='Cycle-independant data directory location. Provide more than one \ + "-d", + dest="data_root", + help="Cycle-independant data directory location. Provide more than one \ data path if data input files should be combined. When providing \ multiple options, the same number of options is required for the \ - --file_tmpl flag.', - nargs='+', + --file_tmpl flag.", + nargs="+", required=True, - ) + type=Path, + ) parser.add_argument( - '-f', - dest='fcst_hour', - help='A list describing forecast hours. If one argument, \ + "-f", + dest="fcst_hour", + help="A list describing forecast hours. If one argument, \ one fhr will be processed. If 2 or 3 arguments, a sequence \ of forecast hours [start, stop, [increment]] will be \ processed. If more than 3 arguments, the list is processed \ - as-is.', - nargs='+', + as-is.", + nargs="+", required=True, type=int, - ) + ) parser.add_argument( - '-m', - default='Unnamed Experiment', - dest='model_name', - help='string to use in title of graphic.', + "-m", + default="Unnamed Experiment", + dest="model_name", + help="String to use in title of graphic.", type=str, - ) + ) parser.add_argument( - '-n', + "-n", default=1, - dest='nprocs', - help='Number of processes to use for parallelization.', + dest="nprocs", + help="Number of processes to use for parallelization.", type=int, - ) + ) parser.add_argument( - '-o', - dest='output_path', - help='Directory location desired for the output graphics files.', + "-o", + dest="output_path", + help="Directory location desired for the output graphics files.", required=True, - ) + type=Path, + ) parser.add_argument( - '-s', - dest='start_time', - help='Start time in YYYYMMDDHH format', + "-s", + dest="start_time", + help="Start time in YYYYMMDDHH format", required=True, type=utils.to_datetime, - ) + ) parser.add_argument( - '-w', - dest='wait_time', + "-w", + dest="wait_time", default=10, - help='Time in minutes to wait on data files to be available. Default = 10', + help="Time in minutes to wait on data files to be available. Default = 10", type=int, - ) + ) parser.add_argument( - '-z', - dest='zip_dir', - help='Full path to zip directory.', - ) + "-z", + dest="zip_dir", + help="Full path to zip directory.", + ) # Long args parser.add_argument( - '--all_leads', - action='store_true', - help='Use --all_leads to accumulate all forecast lead times.', - ) + "--all_leads", + action="store_true", + help="Use --all_leads to accumulate all forecast lead times.", + ) parser.add_argument( - '--file_tmpl', - default='wrfnat_hrconus_{FCST_TIME:02d}.grib2', - nargs='+', - help='File naming convention. Use FCST_TIME to indicate forecast hour. \ + "--file_tmpl", + default="wrfnat_hrconus_{FCST_TIME:02d}.grib2", + nargs="+", + help="File naming convention. Use FCST_TIME to indicate forecast hour. \ Provide more than one template when data files should be combined. \ When providing multiple options, the same number of options is required \ - for the -d flag.', \ - ) + for the -d flag.", + ) parser.add_argument( - '--file_type', - choices=('nat', 'prs'), - default='nat', - help='Type of levels contained in grib file.', - ) + "--file_type", + choices=("nat", "prs"), + default="nat", + help="Type of levels contained in grib file.", + ) # SkewT-specific args - skewt_group = parser.add_argument_group('SkewT Arguments') + skewt_group = parser.add_argument_group("SkewT Arguments") skewt_group.add_argument( - '--max_plev', - help='Maximum pressure level to plot for profiles.', + "--max_plev", + help="Maximum pressure level to plot for profiles.", type=int, - ) + ) skewt_group.add_argument( - '--sites', - help='Path to a sites file.', + "--sites", + help="Path to a sites file.", type=utils.load_sites, - ) + ) # Map-specific args - map_group = parser.add_argument_group('Map Arguments') + map_group = parser.add_argument_group("Map Arguments") map_group.add_argument( - '--images', - help='Path to YAML config file specifying which \ - variables to map and the top-level section to use.', - metavar=('[FILE,', 'SECTION]'), + "--images", + help="Path to YAML config file specifying which \ + variables to map and the top-level section to use.", + metavar=("[FILE,", "SECTION]"), nargs=2, - ) + ) map_group.add_argument( - '--obs_file_path', - help='Path to an observation file. Currently this \ + "--obs_file_path", + help="Path to an observation file. Currently this \ feature is only supported for ensemble panel plots and \ - composite reflectivity.', + composite reflectivity.", type=utils.path_exists, - ) + ) map_group.add_argument( - '--specs', - default='adb_graphics/default_specs.yml', - help='Path to the specs YAML file.', - ) + "--specs", + default="adb_graphics/default_specs.yml", + help="Path to the specs YAML file.", + ) map_group.add_argument( - '--subh_freq', + "--subh_freq", default=60, - help='Sub-hourly frequency in minutes.', - ) + help="Sub-hourly frequency in minutes.", + ) map_group.add_argument( - '--tiles', - default=['full'], - help='The domains to plot. Choose from any of those listed. Special ' \ - 'choices: full is full model output domain, and all is the full domain, ' \ - 'plus all of the sub domains. ' \ - f'Choices: {["full", "all"] + maps.FULL_TILES + list(maps.TILE_DEFS.keys())}', - nargs='+', - ) + "--tiles", + default=["full"], + help="The domains to plot. Choose from any of those listed. Special " + "choices: full is full model output domain, and all is the full domain, " + "plus all of the sub domains. " + f"Choices: {['full', 'all', *maps.FULL_TILES, *list(maps.TILE_DEFS.keys())]}", + nargs="+", + ) # Ensemble panel-specific args - ens_group = parser.add_argument_group('Ensemble Panel Arguments') + ens_group = parser.add_argument_group("Ensemble Panel Arguments") ens_group.add_argument( - '--ens_size', + "--ens_size", default=10, - help='Number of ensemble members.', + help="Number of ensemble members.", type=int, - ) + ) # Diff args - diff_group = parser.add_argument_group('Difference Maps Arguments') + diff_group = parser.add_argument_group("Difference Maps Arguments") diff_group.add_argument( - '--data_root2', - help='Cycle-independant data directory location. The order of the ' \ - 'difference will be generated in order: data_root - data_root2.', - ) + "--data_root2", + help="Cycle-independant data directory location. The order of the " + "difference will be generated in order: data_root - data_root2.", + ) diff_group.add_argument( - '--file_tmpl2', - default='wrfnat_hrconus_{FCST_TIME:02d}.grib2', - help='File naming convention for second set of files used in \ - difference maps. Use FCST_TIME to indicate forecast hour.', - ) + "--file_tmpl2", + default="wrfnat_hrconus_{FCST_TIME:02d}.grib2", + help="File naming convention for second set of files used in \ + difference maps. Use FCST_TIME to indicate forecast hour.", + ) return parser.parse_args(argv) -def pre_proc_grib_files(cla, fhr): - ''' Use the command line argument object (cla) to determine the grib file +def pre_proc_grib_files(cla: Namespace, fhr: int) -> tuple[Path, bool]: # pragma: no cover + """ + Use the command line argument object (cla) to determine the grib file location at a given forecast hour. If multiple data input paths and file templates are provided by user, concatenate the files and remove the duplicates. Return the file path of the file to be used by the graphics data @@ -378,110 +378,113 @@ def pre_proc_grib_files(cla, fhr): old_enough bool stating whether the file is old enough as defined by user settings. Combined files here are presumed old enough by default - ''' + """ if len(cla.data_root) == 1 and len(cla.file_tmpl) == 1: # Nothing to do, return the original file location return check_file(cla, fhr) # Generate a list of files to be joined. - file_list = [os.path.join(*path).format(FCST_TIME=fhr) for path in - zip(cla.data_root, cla.file_tmpl)] + file_list = [ + "/".join(path).format(FCST_TIME=fhr) + for path in zip(cla.data_root, cla.file_tmpl, strict=True) + ] for file_path in file_list: - if not os.path.exists(file_path) \ - or not utils.old_enough(cla.data_age, file_path): - return file_path, False + if not Path(file_path).exists() or not utils.old_enough(cla.data_age, file_path): + return Path(file_path), False - print(f'Combining input files: ') + print("Combining input files: ") for fn in file_list: - print(f' {fn}') - - file_rand = ''.join([random.choice(string.ascii_letters + string.digits) \ - for _ in range(8)]) - combined_fp = os.path.join(cla.output_path, - COMBINED_FN.format(fhr=fhr, uniq=file_rand)) - tmp_fp = os.path.join(cla.output_path, - TMP_FN.format(fhr=fhr, uniq=file_rand)) - - cmd = f'cat {" ".join(file_list)} > {tmp_fp}' - output = subprocess.run(cmd, - capture_output=True, - check=True, - shell=True, - ) + print(f" {fn}") + + file_rand = "".join([random.choice(string.ascii_letters + string.digits) for _ in range(8)]) + combined_fp = Path(cla.output_path, COMBINED_FN.format(fhr=fhr, uniq=file_rand)) + tmp_fp = Path(cla.output_path, TMP_FN.format(fhr=fhr, uniq=file_rand)) + + cmd = f"cat {' '.join(file_list)} > {tmp_fp}" + output = subprocess.run( + cmd, + capture_output=True, + check=True, + shell=True, + ) if output.returncode != 0: - msg = f'{cmd} returned exit status: {output.returncode}!' + msg = f"{cmd} returned exit status: {output.returncode}!" raise OSError(msg) # Gather all grib2 entries from combined file - cmd = f'wgrib2 {tmp_fp} -submsg 1' - output = subprocess.run(cmd, - capture_output=True, - check=True, - shell=True, - ) - wgrib2_list = output.stdout.decode("utf-8").split('\n') + cmd = f"wgrib2 {tmp_fp} -submsg 1" + output = subprocess.run( + cmd, + capture_output=True, + check=True, + shell=True, + ) + wgrib2_list = output.stdout.decode("utf-8").split("\n") # Create a unique list of grib fields. wgrib2_list = utils.uniq_wgrib2_list(wgrib2_list) # Remove duplicate grib2 entries in grib file - cmd = f'wgrib2 -i {tmp_fp} -GRIB {combined_fp}' - input_arg = '\n'.join(wgrib2_list).encode("utf-8") - - output = subprocess.run(cmd, - capture_output=True, - check=True, - input=input_arg, - shell=True, - ) + cmd = f"wgrib2 -i {tmp_fp} -GRIB {combined_fp}" + input_arg = "\n".join(wgrib2_list).encode("utf-8") + + output = subprocess.run( + cmd, + capture_output=True, + check=True, + input=input_arg, + shell=True, + ) if output.returncode != 0: - msg = f'{cmd} returned exit status: {output.returncode}' + msg = f"{cmd} returned exit status: {output.returncode}" raise OSError(msg) - os.remove(f'{tmp_fp}') + tmp_fp.unlink() - return f'{combined_fp}', True + return combined_fp, True -def remove_accumulated_images(cla): - ''' Searches for all images that correspond with specs that have the +def remove_accumulated_images(cla: Namespace): # pragma: no cover + """ + Searches for all images that correspond with specs that have the accumulate entry set to True and removes them from the list of images to - create. ''' + create. + """ for variable, levels in cla.images[1].items(): for level in levels: spec = cla.specs.get(variable, {}).get(level) if not spec: - msg = f'graphics: {variable} {level}' - raise errors.NoGraphicsDefinitionForVariable(msg) - accumulate = spec.get('accumulate', False) + msg = f"graphics: {variable} {level}" + raise errors.NoGraphicsDefinitionForVariableError(msg) + accumulate = spec.get("accumulate", False) if accumulate: - print(f'Will not plot {variable}:{level}') + print(f"Will not plot {variable}:{level}") cla.images[1][variable].remove(level) if not cla.images[1][variable]: del cla.images[1][variable] -def remove_proc_grib_files(cla): - ''' Find all processed grib files produced by this script and remove them. - ''' +def remove_proc_grib_files(cla: Namespace) -> None: # pragma: no cover + """Find all processed grib files produced by this script and remove them.""" # Prepare template with all viable forecast hours -- glob accepts * - combined_fn = COMBINED_FN.format(fhr=999, uniq=999).replace('999', '*') - combined_fp = os.path.join(cla.output_path, combined_fn) + combined_fn = COMBINED_FN.format(fhr=999, uniq=999).replace("999", "*") + combined_fp = cla.output_path / combined_fn - combined_files = glob.glob(combined_fp) + combined_files = glob.glob(str(combined_fp)) if combined_files: - print(f'Removing combined files: ') + print("Removing combined files: ") for file_path in combined_files: - print(f' {file_path}') - os.remove(file_path) + print(f" {file_path}") + Path(file_path).unlink() -def stage_zip_files(tiles, zip_dir): - ''' Stage the zip files in the appropriate directory for each tile to be +def stage_zip_files(tiles: list, zip_dir: Path) -> dict: # pragma: no cover + """ + Stage the zip files in the appropriate directory for each tile to be plotted. Return the dictionary of zipfile paths. Input: @@ -494,25 +497,23 @@ def stage_zip_files(tiles, zip_dir): Returns: zipfiles dictionary of tile keys, and zip directory values. - ''' + """ zipfiles = {} for tile in tiles: - tile_zip_dir = os.path.join(zip_dir, tile) - os.makedirs(tile_zip_dir, exist_ok=True) - - tile_zip_file = os.path.join(tile_zip_dir, 'files.zip') + tile_zip_dir = Path(zip_dir, tile) + tile_zip_dir.mkdir(parents=True, exist_ok=True) + tile_zip_file = tile_zip_dir / "files.zip" zipfiles[tile] = tile_zip_file return zipfiles -@utils.timer -def graphics_driver(cla): - # pylint: disable=too-many-statements +@utils.timer +def graphics_driver(cla: Namespace): # pragma: no cover + # ruff: noqa: PLR0915, PLR0912 # This whole script has likely reached the point of neededing refactoring - # into an object oriented design....each graphics type is it's own object + # into an object oriented design....each graphics type is its own object # sharing a base class. - - ''' + """ Function that interprets the command line arguments to locate the input grib file, create the output directory, and call the graphic-specifc function. @@ -520,15 +521,13 @@ def graphics_driver(cla): cla Namespace object containing command line arguments. - ''' - - # pylint: disable=too-many-branches, too-many-locals + """ # Create an empty zip file if cla.zip_dir: - tiles = cla.tiles if cla.graphic_type in ["maps", "enspanel"] else ['skewt'] - if 'skewt' in tiles: - tiles.append('skewt_csv') + tiles = cla.tiles if cla.graphic_type in ["maps", "enspanel"] else ["skewt"] + if "skewt" in tiles: + tiles.append("skewt_csv") zipfiles = stage_zip_files(tiles, cla.zip_dir) fcst_hours = copy.deepcopy(cla.fcst_hour) @@ -536,26 +535,28 @@ def graphics_driver(cla): # Initialize a timer used for killing the program timer_end = time.time() - gribfiles = None - gribfiles2 = None - + grib_paths = [] # When accummulating variables for preparing a single lead time, # load all of those into gribfiles up front. # This is not an operational feature. Exit if files don't exist. - if cla.graphic_type == 'maps': - first_fcst = 6 if 'global' in cla.images[0] else 0 - fcst_inc = 6 if 'global' in cla.images[0] else 1 + if cla.graphic_type == "maps": + first_fcst = 6 if "global" in cla.images[0] else 0 + fcst_inc = 6 if "global" in cla.images[0] else 1 if len(cla.fcst_hour) == 1 and cla.all_leads: for fhr in range(first_fcst, int(cla.fcst_hour[0]), fcst_inc): grib_path, old_enough = pre_proc_grib_files(cla, fhr) - if not os.path.exists(grib_path) or not old_enough: - msg = (f'File {grib_path} does not exist! Cannot accumulate', - f'data for this forecast lead time!') + if not grib_path.exists() or not old_enough: + msg = ( + f"File {grib_path} does not exist! Cannot accumulate", + "data for this forecast lead time!", + ) remove_proc_grib_files(cla) - raise FileNotFoundError(' '.join(msg)) - gribfiles = gather_gribfiles(cla, fhr, grib_path, gribfiles) + raise FileNotFoundError(" ".join(msg)) + if old_enough: + grib_paths.append(grib_path) + orig_spec = copy.deepcopy(cla.specs) # Allow this task to run concurrently with UPP by continuing to check for # new files as they become available. @@ -563,16 +564,17 @@ def graphics_driver(cla): timer_sleep = time.time() old_enough = False for fhr in sorted(fcst_hours): - if cla.graphic_type == 'enspanel': + if cla.graphic_type == "enspanel": # Expand template to create a list of ensemble member files and # check if they exist and that they're old enough - grib_paths = [] + ens_paths = [] ens_members = list(range(cla.ens_size)) for mem in ens_members: mem_path, mem_old_enough = check_file(cla, fhr, mem=mem) if mem_old_enough: - grib_paths.append(mem_path) - old_enough = len(grib_paths) == cla.ens_size + ens_paths.append(mem_path) + old_enough = len(ens_paths) == cla.ens_size + grib_paths = ens_paths else: # Only checks existence/age of base file for diffs grib_path, old_enough = pre_proc_grib_files(cla, fhr) @@ -580,74 +582,71 @@ def graphics_driver(cla): # UPP is most likely done writing if it hasn't written in data_age # mins (default is 3 to address most CONUS-sized domains) if old_enough: + grib_paths.append(grib_path) fcst_hours.remove(fhr) - fhr_as_list = [fhr] else: if cla.all_leads: # Wait on the missing file for an arbitrary 90% of wait time - if time.time() - timer_end > cla.wait_time * 60 * .9: - print(f"Giving up waiting on {grib_path}. \n", - f"Removing accumulated variables from image list \n", - f"{LOG_BREAK}\n") + if time.time() - timer_end > cla.wait_time * 60 * 0.9: + print( + f"Giving up waiting on {grib_path}. \n", + "Removing accumulated variables from image list \n", + f"{LOG_BREAK}\n", + ) remove_accumulated_images(cla) # Explicitly set -all_leads to False cla.all_leads = False else: # Break out of loop, wait for the desired period, and start # back at this forecast hour. - print(f'Waiting for {grib_path} to be available.') + print(f"Waiting for {grib_path} to be available.") break # It's safe to continue on processing the next forecast hour - print(f'Cannot find specified file(s), continuing to check on \n \ - next forecast hour.') + print("Cannot find specified file(s), continuing to check on next forecast hour.") continue # Create the working directory - workdir = os.path.join(cla.output_path, - f"{utils.from_datetime(cla.start_time)}{fhr:02d}") - os.makedirs(workdir, exist_ok=True) - - print(f'{LOG_BREAK}\n', - f'Graphics will be created for input files\n', - f'Output graphics directory: {workdir} \n' - f'{LOG_BREAK}') + workdir = Path(cla.output_path, f"{utils.from_datetime(cla.start_time)}{fhr:02d}") + workdir.mkdir(parents=True, exist_ok=True) - if cla.graphic_type == 'skewts': + print( + f"{LOG_BREAK}\n", + "Graphics will be created for input files\n", + f"Output graphics directory: {workdir} \n{LOG_BREAK}", + ) + full_spec = utils.load_yaml(orig_spec) + full_spec.dereference(context={"fhr": int(fhr), "file_type": cla.file_type}) + cla.specs = full_spec + if cla.graphic_type == "skewts": create_skewt(cla, fhr, grib_path, workdir) - elif cla.graphic_type == 'maps': - gribfiles = gather_gribfiles(cla, fhr, grib_path, gribfiles) - create_maps(cla, - fhr=fhr, - grib_contents=gribfiles.contents, - workdir=workdir, - ) - elif cla.graphic_type == 'diff': - gribfiles = gather_gribfiles(cla, fhr, grib_path, gribfiles) + elif cla.graphic_type == "maps": + create_maps( + cla, + fhr=fhr, + grib_paths=grib_paths, + workdir=workdir, + ) + elif cla.graphic_type == "diff": grib_path2, _ = check_file( cla, fhr, data_root=cla.data_root2, - file_tmpl=cla.file_tmpl2,) - gribfiles2 = gather_gribfiles(cla, fhr, grib_path2, gribfiles2) - - create_maps(cla, - fhr=fhr, - grib_contents=gribfiles.contents, - grib_contents2=gribfiles2.contents, - workdir=workdir, - ) - else: - gribfiles = gribfile.GribFiles( - coord_dims={'ens_mem': ens_members, 'fcst_hr': fhr_as_list}, - filenames={'free_fcst': grib_paths}, - filetype=cla.file_type, - model=cla.images[0], - ) - create_maps(cla, - fhr=fhr, - grib_contents=gribfiles.contents, - workdir=workdir, - ) + file_tmpl=cla.file_tmpl2, + ) + create_maps( + cla, + fhr=fhr, + grib_paths=[grib_path], + grib_path2=grib_path2, + workdir=workdir, + ) + else: # enspanel + create_maps( + cla, + fhr=fhr, + grib_paths=grib_paths, + workdir=workdir, + ) # Zip png files and remove the originals in a subprocess if cla.zip_dir: @@ -660,74 +659,77 @@ def graphics_driver(cla): # wait_time mins. This accounts for slower UPP processes. Default for # most CONUS-sized domains is 10 mins. if time.time() - timer_end > cla.wait_time * 60: - print(f"Exiting with forecast hours remaining: {fcst_hours}", - f"{LOG_BREAK}") + print(f"Exiting with forecast hours remaining: {fcst_hours}", f"{LOG_BREAK}") break # Wait for a bit if it's been < 2 minutes (about the length of time UPP # takes) since starting last loop - if fcst_hours and time.time() - timer_sleep < 120: - print(f"Waiting for a minute for forecast hours: {fcst_hours}", - f"{LOG_BREAK}") + two_mins = 120 + if fcst_hours and time.time() - timer_sleep < two_mins: + print(f"Waiting for a minute for forecast hours: {fcst_hours}", f"{LOG_BREAK}") time.sleep(60) remove_proc_grib_files(cla) -def create_graphics(argv): - ''' +def create_graphics(argv: list): # pragma: no cover + """ Function to perform a series of checks on command line arguments. - ''' - CLARGS = parse_args(argv) - CLARGS.fcst_hour = utils.fhr_list(CLARGS.fcst_hour) + """ + clargs = parse_args(argv) + clargs.fcst_hour = utils.fhr_list(clargs.fcst_hour) # Check that the same number of entries exists in -d and --file_tmpl - if len(CLARGS.data_root) != len(CLARGS.file_tmpl): + if len(clargs.data_root) != len(clargs.file_tmpl): errmsg = "Must specify the same number of arguments for -d and --file_tmpl" - argparse.ArgumentParser.exit(0, errmsg) + clargs.exit(errmsg, 0) # Ensure wgrib command is available in environment before getting too far # down this path... - if len(CLARGS.data_root) > 1: - retcode = subprocess.run('which wgrib2', shell=True, check=True) + if len(clargs.data_root) > 1: + retcode = subprocess.run("/usr/bin/which wgrib2", shell=True, check=True) if retcode.returncode != 0: - errmsg = 'Could not find wgrib2, please make sure it is loaded \ - in your environment.' + errmsg = "Could not find wgrib2, please make sure it is loaded \ + in your environment." raise OSError(errmsg) # Only need to load the default in memory if we're making maps. - if CLARGS.graphic_type in ['maps', 'enspanel', 'diff']: - CLARGS.specs = utils.load_specs(CLARGS.specs) + if clargs.graphic_type in ["maps", "enspanel", "diff"]: + clargs.specs = utils.load_specs(clargs.specs) - CLARGS.images = load_images(CLARGS.images) - CLARGS.tiles = generate_tile_list(CLARGS.tiles) + clargs.images = load_images(clargs.images) + clargs.tiles = generate_tile_list(clargs.tiles) # Make sure the second data root is provided when doing diffs - if CLARGS.graphic_type == 'diff': - if not CLARGS.data_root2: + if clargs.graphic_type == "diff": + if not clargs.data_root2: errmsg = "Must specify a second data root (--data_root2) for creating difference maps" - raise argparse.ArgumentError(CLARGS.data_root2, errmsg) - if CLARGS.all_leads: - warning = ("Warning! Plotting differences in graphics-accumulated ", - "fields is not supported!") + raise ArgumentError(clargs.data_root2, errmsg) + if clargs.all_leads: + warning = ( + "Warning! Plotting differences in graphics-accumulated ", + "fields is not supported!", + ) print(warning) # Make sure both required arguments (--max_plev, --sites) are provided when doing skewTs - if CLARGS.graphic_type == 'skewts': - if not CLARGS.max_plev: - argparse.ArgumentParser.exit(0, "Must specify maximum pressure level \ - (--max_plev) when creating skewTs") - if not CLARGS.sites: - argparse.ArgumentParser.exit(0, "Must specify sites (--sites) when creating skewTs") - - print(f"Running script for {CLARGS.graphic_type} with args: ", - f"{LOG_BREAK}") - - for name, val in CLARGS.__dict__.items(): - if name not in ['specs', 'sites']: + if clargs.graphic_type == "skewts": + if not clargs.max_plev: + clargs.exit( + "Must specify maximum pressure level \ + (--max_plev) when creating skewTs", + 0, + ) + if not clargs.sites: + clargs.exit("Must specify sites (--sites) when creating skewTs", 0) + + print(f"Running script for {clargs.graphic_type} with args: ", f"{LOG_BREAK}") + + for name, val in clargs.__dict__.items(): + if name not in ["specs", "sites"]: print(f"{name:>15s}: {val}") - graphics_driver(CLARGS) + graphics_driver(clargs) -if __name__ == '__main__': - create_graphics(sys.argv[1:]) +if __name__ == "__main__": + create_graphics(sys.argv[1:]) # pragma: no cover diff --git a/devpkgs b/devpkgs new file mode 100644 index 00000000..8f2c350c --- /dev/null +++ b/devpkgs @@ -0,0 +1,5 @@ +mypy==1.19.* +pytest-cov==7.0.* +pytest-xdist==3.8.* +pytest==9.0.* +ruff==0.14.* diff --git a/environment.yml b/environment.yml index cef291f7..1c555643 100644 --- a/environment.yml +++ b/environment.yml @@ -1,16 +1,18 @@ name: pygraf channels: - conda-forge - - defaults + - ufs-community + - nodefaults dependencies: - - python=3.7* - - basemap=1.2* - - basemap-data-hires=1.2* - - pynio=1.5.5 - - matplotlib=3.2* - - metpy=0.12.1 - - pylint=2.4* - - pytest=6.1* - - pyyaml=5.3* - - xarray=0.15* - - dask + - python=3.13.* + - basemap=2.0.* + - basemap-data-hires=2.0.* + - cfgrib=0.9.* + - dask=2025.11.* + - matplotlib=3.10.* + - metpy=1.7.* + - notebook=7.5.* + - numpy=2.3.* + - pint=0.25.* + - uwtools=2.12.* + - xarray=2025.11.* diff --git a/format b/format new file mode 100755 index 00000000..7e75b41f --- /dev/null +++ b/format @@ -0,0 +1,5 @@ +echo "=> Formatting code" +ruff format . + +echo "=> Sorting imports" +ruff check --select I --fix . diff --git a/image_lists/global.yml b/image_lists/global.yml index c99d9b92..8105b8cd 100644 --- a/image_lists/global.yml +++ b/image_lists/global.yml @@ -18,19 +18,14 @@ hourly: - high - low - mid - - total cpofp: - sfc cref: - sfc dewp: - 2m - dlwrf: - - sfc dlwrfavg: - sfc - dswrf: - - sfc dswrfavg: - sfc gh: @@ -42,48 +37,14 @@ hourly: - sr03 hpbl: - sfc - lhtfl: - - sfc - lhtflavg: - - sfc - pres: - - msl pwtr: - sfc - rh: - - 2m - - 850mb - - mean - shtfl: - - sfc - shtflavg: - - sfc snod: - sfc soilt: &soilt_levs - 10cm - 1m soilw: *soilt_levs - temp: - - 2ds - - 2m - - 500mb - - 700mb - - 850mb - - 925mb - - sfc - totp6h: - - sfc - ulwrf: - - sfc - ulwrfavg: - - sfc - - top - uswrf: - - sfc - uswrfavg: - - sfc - - top vis: - sfc vort: @@ -92,11 +53,3 @@ hourly: - 700mb weasd: - sfc - wspeed: - - 10m - - 10mb - - 20mb - - 250mb - - 5mb - - 80m - - 850mb diff --git a/image_lists/global_chem.yml b/image_lists/global_chem.yml index 09009335..96d5bc31 100644 --- a/image_lists/global_chem.yml +++ b/image_lists/global_chem.yml @@ -7,8 +7,6 @@ hourly: - sfc aodfd: - sfc - aodhg: - - sfc aodoc: - sfc aodss: @@ -23,20 +21,6 @@ hourly: - sfc bc2: - sfc - cape: - - mu - - mul - - mx90mb - - sfc - ceil: - - ua - cin: - - sfc - cloudcover: - - high - - low - - mid - - total colbc: - sfc colfd: @@ -51,29 +35,10 @@ hourly: - sfc colsu: - sfc - colto: - - sfc - cpofp: - - sfc cref: - sfc - ctop: - - ua - dewp: - - 2m fd: - sfc - flru: - - sfc - gust: - - 10m - hlcy: - - sr01 - - sr03 - hpbl: - - sfc - lhtfl: - - sfc oc: - sfc oc1: @@ -86,52 +51,19 @@ hourly: - sfc pres: - msl - ptyp: - - sfc - pwtr: - - sfc rh: - 850mb seasalt: - sfc - shtfl: - - sfc - snod: - - sfc - soilt: &soilt_levs - - 10cm - - 1m - soilw: *soilt_levs sulf: - sfc temp: - - 2ds - - 2m - - 500mb - 700mb - 850mb - 925mb - sfc totp6h: - sfc - totp: - - sfc - ulwrf: - - sfc - - top - uswrf: - - sfc - - top - vis: - - sfc - vort: - - 500mb - vvel: - - 700mb - weasd: - - sfc wspeed: - 10m - - 80m - - 250mb - 850mb diff --git a/image_lists/hrrr_subset.yml b/image_lists/hrrr_subset.yml index 517c4d27..af9fd3aa 100644 --- a/image_lists/hrrr_subset.yml +++ b/image_lists/hrrr_subset.yml @@ -15,8 +15,6 @@ hourly: - sfc acsnw: - sfc - snoliqr: - - sfc cape: - mu - mul @@ -24,14 +22,11 @@ hourly: - sfc ceil: - ua - ceilexp: - - ua ceilexp2: - ua cin: - sfc cloudcover: - - bndylay - high - low - mid @@ -64,7 +59,6 @@ hourly: - max - maxsfc hlcy: - - in25 - mn02 - mn03 - mn25 diff --git a/image_lists/hrrr_test.yml b/image_lists/hrrr_test.yml index 9646336c..70c2186c 100644 --- a/image_lists/hrrr_test.yml +++ b/image_lists/hrrr_test.yml @@ -2,9 +2,9 @@ hourly: model: hrrr variables: hlcytot: - - mn02 - - mn03 - - mn25 - - mx02 - - mx03 - - mx25 \ No newline at end of file + - mn02 + - mn03 + - mn25 + - mx02 + - mx03 + - mx25 diff --git a/image_lists/hrrrcar_subset.yml b/image_lists/hrrrcar_subset.yml index 5f08167d..7f1783f3 100644 --- a/image_lists/hrrrcar_subset.yml +++ b/image_lists/hrrrcar_subset.yml @@ -16,16 +16,9 @@ hourly: - mul - mx90mb - sfc - ceil: - - ua - ceilexp: - - ua - ceilexp2: - - ua cin: - sfc cloudcover: - - bndylay - high - low - mid @@ -40,14 +33,10 @@ hourly: - 2m echotop: - sfc - firewx-pygraf: - - sfc flru: - sfc G113bt: - sat - G114bt: - - sat G123bt: - sat G124bt: @@ -85,8 +74,6 @@ hourly: - sfc ltg3: - sfc - mfrp: - - sfc mref: - sfc pres: @@ -142,9 +129,6 @@ hourly: - sfc totp: - sfc - trc1: - - sfc - - int ulwrf: - sfc - top diff --git a/image_lists/rap.yml b/image_lists/rap.yml deleted file mode 100644 index cb548158..00000000 --- a/image_lists/rap.yml +++ /dev/null @@ -1,114 +0,0 @@ -hourly: - model: rap - variables: - 1hsnw: - - sfc - acsnod: - - sfc - acsnw: - - sfc - acfrozr: - - sfc - acfrzr: - - sfc - acpcp: - - sfc - cape: - - mu - - mul - - mx90mb - - sfc - ceil: - - ua - cin: - - sfc - cloudcover: - - high - - low - - mid - - total - cref: - - sfc - ctop: - - ua - dewp: - - 2m - echotop: - - sfc - flru: - - sfc - gust: - - 10m - hpbl: - - sfc - lhtfl: - - sfc - ltng: - - sfc - pchg: - - sfc - ptmp: - - 2m - ptyp: - - sfc - pwtr: - - sfc - rh: - - 2m - - 850mb - - mean - rhpw: - - sfc - rvil: - - sfc - shtfl: - - sfc - snod: - - sfc - soilt: - - 1cm - - 4cm - - 10cm - soilw: - - 0cm - - 1cm - - 4cm - - 10cm - solar: - - sfc - temp: - - 2ds - - 2m - - 500mb - - 700mb - - 850mb - - 925mb - - sfc - totp: - - sfc - ulwrf: - - sfc - - nta - uswrf: - - sfc - - nta - vil: - - sfc - vis: - - sfc - vort: - - 500mb - vvel: - - 700mb - wchg: - - 80m - wind: - - 10m - - 80m - - 850mb - - 250mb - wmag: - - 250mb - - 850mb - weasd: - - sfc diff --git a/image_lists/rap_subset.yml b/image_lists/rap_subset.yml deleted file mode 100644 index 261b8feb..00000000 --- a/image_lists/rap_subset.yml +++ /dev/null @@ -1,106 +0,0 @@ -hourly: - model: rap - variables: - 1hsnw: - - sfc - acsnod: - - sfc - acsnw: - - sfc - acfrozr: - - sfc - acfrzr: - - sfc - acpcp: - - sfc - cape: - - mu - - mul - - mx90mb - - sfc - ceil: - - ua - cin: - - sfc - cloudcover: - - high - - low - - mid - - total - cref: - - sfc - ctop: - - ua - dewp: - - 2m - echotop: - - sfc - flru: - - sfc - gust: - - 10m - hpbl: - - sfc - lhtfl: - - sfc - ptmp: - - 2m - ptyp: - - sfc - pwtr: - - sfc - rh: - - 2m - - 850mb - - mean - - pw - rvil: - - sfc - shtfl: - - sfc - snod: - - sfc - soilt: &soilt_levs - - 0cm - - 1cm - - 4cm - - 10cm - - 30cm - - 60cm - - 1m - - 1.6m - - 3m - soilw: *soilt_levs - solar: - - sfc - temp: - - 2ds - - 2m - - 500mb - - 700mb - - 850mb - - 925mb - - sfc - totp: - - sfc - ulwrf: - - sfc - - top - uswrf: - - sfc - - top - vil: - - sfc - vis: - - sfc - vort: - - 500mb - vvel: - - 700mb - weasd: - - sfc - wspeed: - - 10m - - 80m - - 250mb - - 850mb diff --git a/image_lists/regional_mpas_subset.yml b/image_lists/regional_mpas_subset.yml index 6641cccd..86d2d520 100644 --- a/image_lists/regional_mpas_subset.yml +++ b/image_lists/regional_mpas_subset.yml @@ -1,5 +1,5 @@ hourly: - model: regional_mpas + model: mpas variables: 1ref: - 1000m @@ -56,11 +56,8 @@ hourly: - sat gust: - 10m - 1hachail: - - sfc hail: - max - - maxsfc hlcy: - in25 - mn02 @@ -87,8 +84,6 @@ hourly: li: - best - sfc - ltg3: - - sfc mfrp: - sfc mref: @@ -119,8 +114,6 @@ hourly: - sfc snod: - sfc - soilm: - - sfc soilt: &soilt_levs - 0cm - 1cm diff --git a/image_lists/rrfs_subset.yml b/image_lists/rrfs_subset.yml index 358d666c..0bed0587 100644 --- a/image_lists/rrfs_subset.yml +++ b/image_lists/rrfs_subset.yml @@ -36,8 +36,6 @@ hourly: - low - mid - total - coarsedust: - - sfc cpofp: - sfc cref: @@ -48,24 +46,10 @@ hourly: - 2m echotop: - sfc - emissions: - - sfc - finedust: - - sfc firewx: - sfc flru: - sfc - fullintdust: - - int - G113bt: - - sat - G114bt: - - sat - G123bt: - - sat - G124bt: - - sat ghtfl: - sfc gust: @@ -93,27 +77,23 @@ hourly: - sfc li: - best - - sfc - ltg3: - - sfc ltng: - sfc lwtp: - sfc mref: - sfc + ptmp: + - 2m pres: - msl - sfc - ptmp: - - 2m ptyp: - sfc pwtr: - sfc ref: - m10 - - maxm10 rh: - 2m - 850mb @@ -153,11 +133,9 @@ hourly: - 850mb - 925mb - sfc + totp: - sfc - trc1: - - int - - sfc ulwrf: - sfc - top @@ -168,17 +146,12 @@ hourly: - sfc vddsf: - sfc - vig: - - sfc vis: - sfc - visbsn: - - sfc vort: - 500mb vvel: - 700mb - - mean vvort: - mx01 - mx02 diff --git a/pre.sh b/pre.sh index aac7f804..f6048564 100644 --- a/pre.sh +++ b/pre.sh @@ -3,7 +3,7 @@ module purge module use -a /contrib/miniconda3/modulefiles -module load miniconda3/4.12.0 +module load miniconda3/25.11.0 conda activate pygraf module list diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..43e1f556 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,77 @@ +[tool.coverage.report] +exclude_also = ["if TYPE_CHECKING:"] +fail_under = 100 +show_missing = true +skip_covered = true +omit = ["conftest.py", "tests/*"] + + +[tool.mypy] +check_untyped_defs = true +follow_untyped_imports = true # needed for mpl_toolkits.basemap +pretty = true +warn_return_any = true + +[tool.ruff] +line-length = 100 + +[tool.ruff.lint] +select = ["ALL"] +ignore = [ + "ANN002", # missing-type-args + "ANN003", # missing-type-kwargs + "ANN201", # missing-return-type-class-method + "ANN202", # missing-return-type-private-function + "ANN204", # missing-return-type-special-method + "ANN205", # missing-return-type-static-method + "ANN401", # any-type + "C408", # unnecessary-collection-call + "C901", # complex-structure + "COM812", # missing-trailing-comma + "D100", # undocumented-public-module + "D101", # undocumented-public-class + "D102", # undocumented-public-method + "D103", # undocumented-public-function + "D104", # undocumented-public-package + "D105", # undocumented-magic-method + "D107", # undocumented-public-init + "D200", # unnecessary-multiline-docstring + "D202", # blank-line-after-function + "D203", # incorrect-blank-line-before-class + "D205", # missing-blank-line-after-summary + "D212", # multi-line-summary-first-line + "D401", # non-imperative-mood + "D404", # docstring-starts-with-this + "DTZ001", # call-datetime-without-tzinfo + "DTZ005", # call-datetime-now-without-tzinfo + "DTZ006", # call-datetime-fromtimestamp + "DTZ007", # call-datetime-strptime-without-zone + "E731", # lambda-assignment + "FBT001", # boolean-type-hint-positional-argument + "FBT002", # boolean-default-value-positional-argument + "PLR0913", # too-many-arguments + "PTH207", # glob + "RUF015", # unnecessary-iterable-allocation-for-first-element + "S101", # assert + "S311", # suspicious-non-cryptographic-random-usage + "S506", # unsafe-yaml-load + "S602", # subprocess-popen-with-shell-equals-true + "T201", # print +] + +[tool.ruff.lint.per-file-ignores] +"conftest.py" = [ + "ANN001", # missing-type-function-argument + "PLR2004", # magic-value-comparison + "PT013", # pytest-incorrect-pytest-import + "SLF001", # private-member-access +] +"adb_graphics/datahandler/gribdata.py" = [ + "PLR2004", # magic-value-comparison +] +"tests/*" = [ + "ANN001", # missing-type-function-argument + "PLR2004", # magic-value-comparison + "PT013", # pytest-incorrect-pytest-import + "SLF001", # private-member-access +] diff --git a/tests/data/wgrib2_submsg1.txt b/tests/data/wgrib2_submsg1.txt new file mode 100644 index 00000000..53d3b76d --- /dev/null +++ b/tests/data/wgrib2_submsg1.txt @@ -0,0 +1,1847 @@ +1:0:d=2025100600:PRES:1 hybrid level:16 hour fcst: +2:2259236:d=2025100600:CLMR:1 hybrid level:16 hour fcst: +3:2275399:d=2025100600:CIMIXR:1 hybrid level:16 hour fcst: +4:2275766:d=2025100600:RWMR:1 hybrid level:16 hour fcst: +5:2589683:d=2025100600:SNMR:1 hybrid level:16 hour fcst: +6:2590771:d=2025100600:GRLE:1 hybrid level:16 hour fcst: +7:2591646:d=2025100600:NCONCD:1 hybrid level:16 hour fcst: +8:2612852:d=2025100600:NCCICE:1 hybrid level:16 hour fcst: +9:2613203:d=2025100600:SPNCR:1 hybrid level:16 hour fcst: +10:2928563:d=2025100600:PMTF:1 hybrid level:16 hour fcst: +11:3580683:d=2025100600:PMTC:1 hybrid level:16 hour fcst: +12:4221636:d=2025100600:FRACCC:1 hybrid level:16 hour fcst: +13:4271096:d=2025100600:HGT:1 hybrid level:16 hour fcst: +14:6575263:d=2025100600:TMP:1 hybrid level:16 hour fcst: +15:7737704:d=2025100600:SPFH:1 hybrid level:16 hour fcst: +16:9287179:d=2025100600:UGRD:1 hybrid level:16 hour fcst: +17:10495303:d=2025100600:VGRD:1 hybrid level:16 hour fcst: +18:11641941:d=2025100600:VVEL:1 hybrid level:16 hour fcst: +19:12856890:d=2025100600:TKE:1 hybrid level:16 hour fcst: +20:13469298:d=2025100600:MASSDEN:1 hybrid level:16 hour fcst: +21:14223354:d=2025100600:PRES:2 hybrid level:16 hour fcst: +22:16482381:d=2025100600:CLMR:2 hybrid level:16 hour fcst: +23:16498523:d=2025100600:CIMIXR:2 hybrid level:16 hour fcst: +24:16498892:d=2025100600:RWMR:2 hybrid level:16 hour fcst: +25:16814928:d=2025100600:SNMR:2 hybrid level:16 hour fcst: +26:16816097:d=2025100600:GRLE:2 hybrid level:16 hour fcst: +27:16817048:d=2025100600:NCONCD:2 hybrid level:16 hour fcst: +28:16838583:d=2025100600:NCCICE:2 hybrid level:16 hour fcst: +29:16838933:d=2025100600:SPNCR:2 hybrid level:16 hour fcst: +30:17158611:d=2025100600:PMTF:2 hybrid level:16 hour fcst: +31:17803941:d=2025100600:PMTC:2 hybrid level:16 hour fcst: +32:18444712:d=2025100600:FRACCC:2 hybrid level:16 hour fcst: +33:18493071:d=2025100600:HGT:2 hybrid level:16 hour fcst: +34:20867924:d=2025100600:TMP:2 hybrid level:16 hour fcst: +35:22010707:d=2025100600:SPFH:2 hybrid level:16 hour fcst: +36:23517214:d=2025100600:UGRD:2 hybrid level:16 hour fcst: +37:24740523:d=2025100600:VGRD:2 hybrid level:16 hour fcst: +38:25884357:d=2025100600:VVEL:2 hybrid level:16 hour fcst: +39:27311692:d=2025100600:TKE:2 hybrid level:16 hour fcst: +40:27873402:d=2025100600:MASSDEN:2 hybrid level:16 hour fcst: +41:28614767:d=2025100600:PRES:3 hybrid level:16 hour fcst: +42:30872983:d=2025100600:CLMR:3 hybrid level:16 hour fcst: +43:30893270:d=2025100600:CIMIXR:3 hybrid level:16 hour fcst: +44:30893645:d=2025100600:RWMR:3 hybrid level:16 hour fcst: +45:31214039:d=2025100600:SNMR:3 hybrid level:16 hour fcst: +46:31215405:d=2025100600:GRLE:3 hybrid level:16 hour fcst: +47:31216522:d=2025100600:NCONCD:3 hybrid level:16 hour fcst: +48:31244871:d=2025100600:NCCICE:3 hybrid level:16 hour fcst: +49:31245225:d=2025100600:SPNCR:3 hybrid level:16 hour fcst: +50:31572258:d=2025100600:PMTF:3 hybrid level:16 hour fcst: +51:32728250:d=2025100600:PMTC:3 hybrid level:16 hour fcst: +52:33358261:d=2025100600:FRACCC:3 hybrid level:16 hour fcst: +53:33432292:d=2025100600:HGT:3 hybrid level:16 hour fcst: +54:35834968:d=2025100600:TMP:3 hybrid level:16 hour fcst: +55:36965853:d=2025100600:SPFH:3 hybrid level:16 hour fcst: +56:38452065:d=2025100600:UGRD:3 hybrid level:16 hour fcst: +57:39658610:d=2025100600:VGRD:3 hybrid level:16 hour fcst: +58:40793223:d=2025100600:VVEL:3 hybrid level:16 hour fcst: +59:42312783:d=2025100600:TKE:3 hybrid level:16 hour fcst: +60:42870505:d=2025100600:MASSDEN:3 hybrid level:16 hour fcst: +61:43612659:d=2025100600:PRES:4 hybrid level:16 hour fcst: +62:45870590:d=2025100600:CLMR:4 hybrid level:16 hour fcst: +63:45900384:d=2025100600:CIMIXR:4 hybrid level:16 hour fcst: +64:45900766:d=2025100600:RWMR:4 hybrid level:16 hour fcst: +65:46227687:d=2025100600:SNMR:4 hybrid level:16 hour fcst: +66:46229485:d=2025100600:GRLE:4 hybrid level:16 hour fcst: +67:46230869:d=2025100600:NCONCD:4 hybrid level:16 hour fcst: +68:46274552:d=2025100600:NCCICE:4 hybrid level:16 hour fcst: +69:46274902:d=2025100600:SPNCR:4 hybrid level:16 hour fcst: +70:46736460:d=2025100600:PMTF:4 hybrid level:16 hour fcst: +71:47898688:d=2025100600:PMTC:4 hybrid level:16 hour fcst: +72:48536911:d=2025100600:FRACCC:4 hybrid level:16 hour fcst: +73:48664654:d=2025100600:HGT:4 hybrid level:16 hour fcst: +74:51105192:d=2025100600:TMP:4 hybrid level:16 hour fcst: +75:52219055:d=2025100600:SPFH:4 hybrid level:16 hour fcst: +76:53704102:d=2025100600:UGRD:4 hybrid level:16 hour fcst: +77:54870906:d=2025100600:VGRD:4 hybrid level:16 hour fcst: +78:55988172:d=2025100600:VVEL:4 hybrid level:16 hour fcst: +79:57592013:d=2025100600:TKE:4 hybrid level:16 hour fcst: +80:58155898:d=2025100600:MASSDEN:4 hybrid level:16 hour fcst: +81:58909005:d=2025100600:PRES:5 hybrid level:16 hour fcst: +82:61167057:d=2025100600:CLMR:5 hybrid level:16 hour fcst: +83:61215063:d=2025100600:CIMIXR:5 hybrid level:16 hour fcst: +84:61215444:d=2025100600:RWMR:5 hybrid level:16 hour fcst: +85:61552899:d=2025100600:SNMR:5 hybrid level:16 hour fcst: +86:61555598:d=2025100600:GRLE:5 hybrid level:16 hour fcst: +87:61557426:d=2025100600:NCONCD:5 hybrid level:16 hour fcst: +88:61631176:d=2025100600:NCCICE:5 hybrid level:16 hour fcst: +89:61631552:d=2025100600:SPNCR:5 hybrid level:16 hour fcst: +90:62113501:d=2025100600:PMTF:5 hybrid level:16 hour fcst: +91:63295398:d=2025100600:PMTC:5 hybrid level:16 hour fcst: +92:63965202:d=2025100600:FRACCC:5 hybrid level:16 hour fcst: +93:64205990:d=2025100600:HGT:5 hybrid level:16 hour fcst: +94:66682669:d=2025100600:TMP:5 hybrid level:16 hour fcst: +95:67776334:d=2025100600:SPFH:5 hybrid level:16 hour fcst: +96:69285663:d=2025100600:UGRD:5 hybrid level:16 hour fcst: +97:70402945:d=2025100600:VGRD:5 hybrid level:16 hour fcst: +98:71504531:d=2025100600:VVEL:5 hybrid level:16 hour fcst: +99:73179084:d=2025100600:TKE:5 hybrid level:16 hour fcst: +100:73743682:d=2025100600:MASSDEN:5 hybrid level:16 hour fcst: +101:74536683:d=2025100600:PRES:6 hybrid level:16 hour fcst: +102:76795229:d=2025100600:CLMR:6 hybrid level:16 hour fcst: +103:76873140:d=2025100600:CIMIXR:6 hybrid level:16 hour fcst: +104:76873637:d=2025100600:RWMR:6 hybrid level:16 hour fcst: +105:77221811:d=2025100600:SNMR:6 hybrid level:16 hour fcst: +106:77225972:d=2025100600:GRLE:6 hybrid level:16 hour fcst: +107:77228537:d=2025100600:NCONCD:6 hybrid level:16 hour fcst: +108:77310951:d=2025100600:NCCICE:6 hybrid level:16 hour fcst: +109:77311418:d=2025100600:SPNCR:6 hybrid level:16 hour fcst: +110:77816658:d=2025100600:PMTF:6 hybrid level:16 hour fcst: +111:79055142:d=2025100600:PMTC:6 hybrid level:16 hour fcst: +112:79750834:d=2025100600:FRACCC:6 hybrid level:16 hour fcst: +113:80148145:d=2025100600:HGT:6 hybrid level:16 hour fcst: +114:82656766:d=2025100600:TMP:6 hybrid level:16 hour fcst: +115:83736727:d=2025100600:SPFH:6 hybrid level:16 hour fcst: +116:85292207:d=2025100600:UGRD:6 hybrid level:16 hour fcst: +117:86379379:d=2025100600:VGRD:6 hybrid level:16 hour fcst: +118:87474107:d=2025100600:VVEL:6 hybrid level:16 hour fcst: +119:89194151:d=2025100600:TKE:6 hybrid level:16 hour fcst: +120:89745576:d=2025100600:MASSDEN:6 hybrid level:16 hour fcst: +121:90558716:d=2025100600:PRES:7 hybrid level:16 hour fcst: +122:92818049:d=2025100600:CLMR:7 hybrid level:16 hour fcst: +123:92912032:d=2025100600:CIMIXR:7 hybrid level:16 hour fcst: +124:92912949:d=2025100600:RWMR:7 hybrid level:16 hour fcst: +125:93265192:d=2025100600:SNMR:7 hybrid level:16 hour fcst: +126:93271327:d=2025100600:GRLE:7 hybrid level:16 hour fcst: +127:93273063:d=2025100600:NCONCD:7 hybrid level:16 hour fcst: +128:93373552:d=2025100600:NCCICE:7 hybrid level:16 hour fcst: +129:93374596:d=2025100600:SPNCR:7 hybrid level:16 hour fcst: +130:93770961:d=2025100600:PMTF:7 hybrid level:16 hour fcst: +131:95032830:d=2025100600:PMTC:7 hybrid level:16 hour fcst: +132:95754882:d=2025100600:FRACCC:7 hybrid level:16 hour fcst: +133:96311106:d=2025100600:HGT:7 hybrid level:16 hour fcst: +134:98842296:d=2025100600:TMP:7 hybrid level:16 hour fcst: +135:99918570:d=2025100600:SPFH:7 hybrid level:16 hour fcst: +136:101499179:d=2025100600:UGRD:7 hybrid level:16 hour fcst: +137:102583109:d=2025100600:VGRD:7 hybrid level:16 hour fcst: +138:103681320:d=2025100600:VVEL:7 hybrid level:16 hour fcst: +139:105434623:d=2025100600:TKE:7 hybrid level:16 hour fcst: +140:105938506:d=2025100600:MASSDEN:7 hybrid level:16 hour fcst: +141:107161342:d=2025100600:PRES:8 hybrid level:16 hour fcst: +142:109419689:d=2025100600:CLMR:8 hybrid level:16 hour fcst: +143:109547928:d=2025100600:CIMIXR:8 hybrid level:16 hour fcst: +144:109549399:d=2025100600:RWMR:8 hybrid level:16 hour fcst: +145:109907635:d=2025100600:SNMR:8 hybrid level:16 hour fcst: +146:109916027:d=2025100600:GRLE:8 hybrid level:16 hour fcst: +147:109920926:d=2025100600:NCONCD:8 hybrid level:16 hour fcst: +148:110056714:d=2025100600:NCCICE:8 hybrid level:16 hour fcst: +149:110057889:d=2025100600:SPNCR:8 hybrid level:16 hour fcst: +150:110465761:d=2025100600:PMTF:8 hybrid level:16 hour fcst: +151:111740944:d=2025100600:PMTC:8 hybrid level:16 hour fcst: +152:112484037:d=2025100600:FRACCC:8 hybrid level:16 hour fcst: +153:113135608:d=2025100600:HGT:8 hybrid level:16 hour fcst: +154:115680851:d=2025100600:TMP:8 hybrid level:16 hour fcst: +155:116762737:d=2025100600:SPFH:8 hybrid level:16 hour fcst: +156:118340814:d=2025100600:UGRD:8 hybrid level:16 hour fcst: +157:119429428:d=2025100600:VGRD:8 hybrid level:16 hour fcst: +158:120533965:d=2025100600:VVEL:8 hybrid level:16 hour fcst: +159:122264667:d=2025100600:TKE:8 hybrid level:16 hour fcst: +160:122691241:d=2025100600:MASSDEN:8 hybrid level:16 hour fcst: +161:124121792:d=2025100600:PRES:9 hybrid level:16 hour fcst: +162:126375635:d=2025100600:CLMR:9 hybrid level:16 hour fcst: +163:126550943:d=2025100600:CIMIXR:9 hybrid level:16 hour fcst: +164:126552635:d=2025100600:RWMR:9 hybrid level:16 hour fcst: +165:126911932:d=2025100600:SNMR:9 hybrid level:16 hour fcst: +166:126923047:d=2025100600:GRLE:9 hybrid level:16 hour fcst: +167:126929788:d=2025100600:NCONCD:9 hybrid level:16 hour fcst: +168:127103679:d=2025100600:NCCICE:9 hybrid level:16 hour fcst: +169:127105009:d=2025100600:SPNCR:9 hybrid level:16 hour fcst: +170:127522975:d=2025100600:PMTF:9 hybrid level:16 hour fcst: +171:128784043:d=2025100600:PMTC:9 hybrid level:16 hour fcst: +172:129539853:d=2025100600:FRACCC:9 hybrid level:16 hour fcst: +173:130171588:d=2025100600:HGT:9 hybrid level:16 hour fcst: +174:132727896:d=2025100600:TMP:9 hybrid level:16 hour fcst: +175:133812786:d=2025100600:SPFH:9 hybrid level:16 hour fcst: +176:135370771:d=2025100600:UGRD:9 hybrid level:16 hour fcst: +177:136451528:d=2025100600:VGRD:9 hybrid level:16 hour fcst: +178:137544390:d=2025100600:VVEL:9 hybrid level:16 hour fcst: +179:139252359:d=2025100600:TKE:9 hybrid level:16 hour fcst: +180:139578372:d=2025100600:MASSDEN:9 hybrid level:16 hour fcst: +181:141196166:d=2025100600:PRES:10 hybrid level:16 hour fcst: +182:143425289:d=2025100600:CLMR:10 hybrid level:16 hour fcst: +183:143586309:d=2025100600:CIMIXR:10 hybrid level:16 hour fcst: +184:143587576:d=2025100600:RWMR:10 hybrid level:16 hour fcst: +185:143928891:d=2025100600:SNMR:10 hybrid level:16 hour fcst: +186:143943503:d=2025100600:GRLE:10 hybrid level:16 hour fcst: +187:143948355:d=2025100600:NCONCD:10 hybrid level:16 hour fcst: +188:144098712:d=2025100600:NCCICE:10 hybrid level:16 hour fcst: +189:144100256:d=2025100600:SPNCR:10 hybrid level:16 hour fcst: +190:144493630:d=2025100600:PMTF:10 hybrid level:16 hour fcst: +191:145745709:d=2025100600:PMTC:10 hybrid level:16 hour fcst: +192:146503615:d=2025100600:FRACCC:10 hybrid level:16 hour fcst: +193:147063899:d=2025100600:HGT:10 hybrid level:16 hour fcst: +194:149626521:d=2025100600:TMP:10 hybrid level:16 hour fcst: +195:150702451:d=2025100600:SPFH:10 hybrid level:16 hour fcst: +196:152232961:d=2025100600:UGRD:10 hybrid level:16 hour fcst: +197:153297472:d=2025100600:VGRD:10 hybrid level:16 hour fcst: +198:154373776:d=2025100600:VVEL:10 hybrid level:16 hour fcst: +199:156052405:d=2025100600:TKE:10 hybrid level:16 hour fcst: +200:156258566:d=2025100600:MASSDEN:10 hybrid level:16 hour fcst: +201:157870227:d=2025100600:PRES:11 hybrid level:16 hour fcst: +202:160069255:d=2025100600:CLMR:11 hybrid level:16 hour fcst: +203:160181462:d=2025100600:CIMIXR:11 hybrid level:16 hour fcst: +204:160183128:d=2025100600:RWMR:11 hybrid level:16 hour fcst: +205:160490394:d=2025100600:SNMR:11 hybrid level:16 hour fcst: +206:160509280:d=2025100600:GRLE:11 hybrid level:16 hour fcst: +207:160517947:d=2025100600:NCONCD:11 hybrid level:16 hour fcst: +208:160625274:d=2025100600:NCCICE:11 hybrid level:16 hour fcst: +209:160627369:d=2025100600:SPNCR:11 hybrid level:16 hour fcst: +210:160969924:d=2025100600:PMTF:11 hybrid level:16 hour fcst: +211:162168406:d=2025100600:PMTC:11 hybrid level:16 hour fcst: +212:162923793:d=2025100600:FRACCC:11 hybrid level:16 hour fcst: +213:163409762:d=2025100600:HGT:11 hybrid level:16 hour fcst: +214:165972684:d=2025100600:TMP:11 hybrid level:16 hour fcst: +215:167021814:d=2025100600:SPFH:11 hybrid level:16 hour fcst: +216:168507484:d=2025100600:UGRD:11 hybrid level:16 hour fcst: +217:169551660:d=2025100600:VGRD:11 hybrid level:16 hour fcst: +218:170605892:d=2025100600:VVEL:11 hybrid level:16 hour fcst: +219:172258645:d=2025100600:TKE:11 hybrid level:16 hour fcst: +220:172358446:d=2025100600:MASSDEN:11 hybrid level:16 hour fcst: +221:174160630:d=2025100600:PRES:12 hybrid level:16 hour fcst: +222:176338548:d=2025100600:CLMR:12 hybrid level:16 hour fcst: +223:176448554:d=2025100600:CIMIXR:12 hybrid level:16 hour fcst: +224:176451111:d=2025100600:RWMR:12 hybrid level:16 hour fcst: +225:176658455:d=2025100600:SNMR:12 hybrid level:16 hour fcst: +226:176683532:d=2025100600:GRLE:12 hybrid level:16 hour fcst: +227:176697831:d=2025100600:NCONCD:12 hybrid level:16 hour fcst: +228:176804621:d=2025100600:NCCICE:12 hybrid level:16 hour fcst: +229:176807686:d=2025100600:SPNCR:12 hybrid level:16 hour fcst: +230:177132499:d=2025100600:PMTF:12 hybrid level:16 hour fcst: +231:178267701:d=2025100600:PMTC:12 hybrid level:16 hour fcst: +232:178989260:d=2025100600:FRACCC:12 hybrid level:16 hour fcst: +233:179452104:d=2025100600:HGT:12 hybrid level:16 hour fcst: +234:182010428:d=2025100600:TMP:12 hybrid level:16 hour fcst: +235:183026832:d=2025100600:SPFH:12 hybrid level:16 hour fcst: +236:184462619:d=2025100600:UGRD:12 hybrid level:16 hour fcst: +237:185484614:d=2025100600:VGRD:12 hybrid level:16 hour fcst: +238:186519978:d=2025100600:VVEL:12 hybrid level:16 hour fcst: +239:188157629:d=2025100600:TKE:12 hybrid level:16 hour fcst: +240:188201701:d=2025100600:MASSDEN:12 hybrid level:16 hour fcst: +241:189998383:d=2025100600:PRES:13 hybrid level:16 hour fcst: +242:192403129:d=2025100600:CLMR:13 hybrid level:16 hour fcst: +243:192511965:d=2025100600:CIMIXR:13 hybrid level:16 hour fcst: +244:192515437:d=2025100600:RWMR:13 hybrid level:16 hour fcst: +245:192718698:d=2025100600:SNMR:13 hybrid level:16 hour fcst: +246:192757294:d=2025100600:GRLE:13 hybrid level:16 hour fcst: +247:192767756:d=2025100600:NCONCD:13 hybrid level:16 hour fcst: +248:192871182:d=2025100600:NCCICE:13 hybrid level:16 hour fcst: +249:192875472:d=2025100600:SPNCR:13 hybrid level:16 hour fcst: +250:193197239:d=2025100600:PMTF:13 hybrid level:16 hour fcst: +251:194237341:d=2025100600:PMTC:13 hybrid level:16 hour fcst: +252:194936176:d=2025100600:FRACCC:13 hybrid level:16 hour fcst: +253:195371408:d=2025100600:HGT:13 hybrid level:16 hour fcst: +254:197920180:d=2025100600:TMP:13 hybrid level:16 hour fcst: +255:198900287:d=2025100600:SPFH:13 hybrid level:16 hour fcst: +256:200280195:d=2025100600:UGRD:13 hybrid level:16 hour fcst: +257:201279491:d=2025100600:VGRD:13 hybrid level:16 hour fcst: +258:202290408:d=2025100600:VVEL:13 hybrid level:16 hour fcst: +259:203930442:d=2025100600:TKE:13 hybrid level:16 hour fcst: +260:203950997:d=2025100600:MASSDEN:13 hybrid level:16 hour fcst: +261:205671795:d=2025100600:PRES:14 hybrid level:16 hour fcst: +262:208042580:d=2025100600:CLMR:14 hybrid level:16 hour fcst: +263:208171861:d=2025100600:CIMIXR:14 hybrid level:16 hour fcst: +264:208178895:d=2025100600:RWMR:14 hybrid level:16 hour fcst: +265:208369530:d=2025100600:SNMR:14 hybrid level:16 hour fcst: +266:208399861:d=2025100600:GRLE:14 hybrid level:16 hour fcst: +267:208417805:d=2025100600:NCONCD:14 hybrid level:16 hour fcst: +268:208531050:d=2025100600:NCCICE:14 hybrid level:16 hour fcst: +269:208538900:d=2025100600:SPNCR:14 hybrid level:16 hour fcst: +270:208732582:d=2025100600:PMTF:14 hybrid level:16 hour fcst: +271:209683732:d=2025100600:PMTC:14 hybrid level:16 hour fcst: +272:210350428:d=2025100600:FRACCC:14 hybrid level:16 hour fcst: +273:210714134:d=2025100600:HGT:14 hybrid level:16 hour fcst: +274:213247894:d=2025100600:TMP:14 hybrid level:16 hour fcst: +275:214234613:d=2025100600:SPFH:14 hybrid level:16 hour fcst: +276:215543299:d=2025100600:UGRD:14 hybrid level:16 hour fcst: +277:216514147:d=2025100600:VGRD:14 hybrid level:16 hour fcst: +278:217473521:d=2025100600:VVEL:14 hybrid level:16 hour fcst: +279:219088252:d=2025100600:TKE:14 hybrid level:16 hour fcst: +280:219099074:d=2025100600:MASSDEN:14 hybrid level:16 hour fcst: +281:220644193:d=2025100600:PRES:15 hybrid level:16 hour fcst: +282:222975012:d=2025100600:CLMR:15 hybrid level:16 hour fcst: +283:223067280:d=2025100600:CIMIXR:15 hybrid level:16 hour fcst: +284:223080495:d=2025100600:RWMR:15 hybrid level:16 hour fcst: +285:223224492:d=2025100600:SNMR:15 hybrid level:16 hour fcst: +286:223270365:d=2025100600:GRLE:15 hybrid level:16 hour fcst: +287:223298641:d=2025100600:NCONCD:15 hybrid level:16 hour fcst: +288:223382383:d=2025100600:NCCICE:15 hybrid level:16 hour fcst: +289:223396746:d=2025100600:SPNCR:15 hybrid level:16 hour fcst: +290:223603236:d=2025100600:PMTF:15 hybrid level:16 hour fcst: +291:224449524:d=2025100600:PMTC:15 hybrid level:16 hour fcst: +292:225097756:d=2025100600:FRACCC:15 hybrid level:16 hour fcst: +293:225351292:d=2025100600:HGT:15 hybrid level:16 hour fcst: +294:227864038:d=2025100600:TMP:15 hybrid level:16 hour fcst: +295:228830683:d=2025100600:SPFH:15 hybrid level:16 hour fcst: +296:230051170:d=2025100600:UGRD:15 hybrid level:16 hour fcst: +297:230995691:d=2025100600:VGRD:15 hybrid level:16 hour fcst: +298:231930835:d=2025100600:VVEL:15 hybrid level:16 hour fcst: +299:233512199:d=2025100600:TKE:15 hybrid level:16 hour fcst: +300:233517133:d=2025100600:MASSDEN:15 hybrid level:16 hour fcst: +301:234718560:d=2025100600:PRES:16 hybrid level:16 hour fcst: +302:236997902:d=2025100600:CLMR:16 hybrid level:16 hour fcst: +303:237075364:d=2025100600:CIMIXR:16 hybrid level:16 hour fcst: +304:237103785:d=2025100600:RWMR:16 hybrid level:16 hour fcst: +305:237216572:d=2025100600:SNMR:16 hybrid level:16 hour fcst: +306:237286101:d=2025100600:GRLE:16 hybrid level:16 hour fcst: +307:237323026:d=2025100600:NCONCD:16 hybrid level:16 hour fcst: +308:237391434:d=2025100600:NCCICE:16 hybrid level:16 hour fcst: +309:237421180:d=2025100600:SPNCR:16 hybrid level:16 hour fcst: +310:237542635:d=2025100600:PMTF:16 hybrid level:16 hour fcst: +311:238304783:d=2025100600:PMTC:16 hybrid level:16 hour fcst: +312:238913428:d=2025100600:FRACCC:16 hybrid level:16 hour fcst: +313:239126786:d=2025100600:HGT:16 hybrid level:16 hour fcst: +314:241610643:d=2025100600:TMP:16 hybrid level:16 hour fcst: +315:242536194:d=2025100600:SPFH:16 hybrid level:16 hour fcst: +316:244424473:d=2025100600:UGRD:16 hybrid level:16 hour fcst: +317:245337746:d=2025100600:VGRD:16 hybrid level:16 hour fcst: +318:246236852:d=2025100600:VVEL:16 hybrid level:16 hour fcst: +319:247780895:d=2025100600:TKE:16 hybrid level:16 hour fcst: +320:247782608:d=2025100600:MASSDEN:16 hybrid level:16 hour fcst: +321:249046038:d=2025100600:PRES:17 hybrid level:16 hour fcst: +322:251229111:d=2025100600:CLMR:17 hybrid level:16 hour fcst: +323:251287039:d=2025100600:CIMIXR:17 hybrid level:16 hour fcst: +324:251312077:d=2025100600:RWMR:17 hybrid level:16 hour fcst: +325:251373673:d=2025100600:SNMR:17 hybrid level:16 hour fcst: +326:251469200:d=2025100600:GRLE:17 hybrid level:16 hour fcst: +327:251513964:d=2025100600:NCONCD:17 hybrid level:16 hour fcst: +328:251563899:d=2025100600:NCCICE:17 hybrid level:16 hour fcst: +329:251591393:d=2025100600:SPNCR:17 hybrid level:16 hour fcst: +330:251667379:d=2025100600:PMTF:17 hybrid level:16 hour fcst: +331:252333066:d=2025100600:PMTC:17 hybrid level:16 hour fcst: +332:252914715:d=2025100600:FRACCC:17 hybrid level:16 hour fcst: +333:253109024:d=2025100600:HGT:17 hybrid level:16 hour fcst: +334:255537548:d=2025100600:TMP:17 hybrid level:16 hour fcst: +335:256442471:d=2025100600:SPFH:17 hybrid level:16 hour fcst: +336:258192619:d=2025100600:UGRD:17 hybrid level:16 hour fcst: +337:259078857:d=2025100600:VGRD:17 hybrid level:16 hour fcst: +338:259929093:d=2025100600:VVEL:17 hybrid level:16 hour fcst: +339:261413238:d=2025100600:TKE:17 hybrid level:16 hour fcst: +340:261413726:d=2025100600:MASSDEN:17 hybrid level:16 hour fcst: +341:262830486:d=2025100600:PRES:18 hybrid level:16 hour fcst: +342:265192764:d=2025100600:CLMR:18 hybrid level:16 hour fcst: +343:265237007:d=2025100600:CIMIXR:18 hybrid level:16 hour fcst: +344:265272527:d=2025100600:RWMR:18 hybrid level:16 hour fcst: +345:265338898:d=2025100600:SNMR:18 hybrid level:16 hour fcst: +346:265442023:d=2025100600:GRLE:18 hybrid level:16 hour fcst: +347:265481338:d=2025100600:NCONCD:18 hybrid level:16 hour fcst: +348:265537952:d=2025100600:NCCICE:18 hybrid level:16 hour fcst: +349:265577490:d=2025100600:SPNCR:18 hybrid level:16 hour fcst: +350:265674638:d=2025100600:PMTF:18 hybrid level:16 hour fcst: +351:266284308:d=2025100600:PMTC:18 hybrid level:16 hour fcst: +352:266852256:d=2025100600:FRACCC:18 hybrid level:16 hour fcst: +353:267056284:d=2025100600:HGT:18 hybrid level:16 hour fcst: +354:269439148:d=2025100600:TMP:18 hybrid level:16 hour fcst: +355:270325235:d=2025100600:SPFH:18 hybrid level:16 hour fcst: +356:271963195:d=2025100600:UGRD:18 hybrid level:16 hour fcst: +357:272828246:d=2025100600:VGRD:18 hybrid level:16 hour fcst: +358:273675097:d=2025100600:VVEL:18 hybrid level:16 hour fcst: +359:275113051:d=2025100600:TKE:18 hybrid level:16 hour fcst: +360:275113386:d=2025100600:MASSDEN:18 hybrid level:16 hour fcst: +361:276563973:d=2025100600:PRES:19 hybrid level:16 hour fcst: +362:278919289:d=2025100600:CLMR:19 hybrid level:16 hour fcst: +363:278956875:d=2025100600:CIMIXR:19 hybrid level:16 hour fcst: +364:278994546:d=2025100600:RWMR:19 hybrid level:16 hour fcst: +365:279055785:d=2025100600:SNMR:19 hybrid level:16 hour fcst: +366:279165894:d=2025100600:GRLE:19 hybrid level:16 hour fcst: +367:279197366:d=2025100600:NCONCD:19 hybrid level:16 hour fcst: +368:279244562:d=2025100600:NCCICE:19 hybrid level:16 hour fcst: +369:279284736:d=2025100600:SPNCR:19 hybrid level:16 hour fcst: +370:279364694:d=2025100600:PMTF:19 hybrid level:16 hour fcst: +371:279940446:d=2025100600:PMTC:19 hybrid level:16 hour fcst: +372:280488775:d=2025100600:FRACCC:19 hybrid level:16 hour fcst: +373:280706424:d=2025100600:HGT:19 hybrid level:16 hour fcst: +374:283042094:d=2025100600:TMP:19 hybrid level:16 hour fcst: +375:283907427:d=2025100600:SPFH:19 hybrid level:16 hour fcst: +376:285439024:d=2025100600:UGRD:19 hybrid level:16 hour fcst: +377:286312464:d=2025100600:VGRD:19 hybrid level:16 hour fcst: +378:287151296:d=2025100600:VVEL:19 hybrid level:16 hour fcst: +379:288549292:d=2025100600:TKE:19 hybrid level:16 hour fcst: +380:288549605:d=2025100600:MASSDEN:19 hybrid level:16 hour fcst: +381:289772340:d=2025100600:PRES:20 hybrid level:16 hour fcst: +382:292027986:d=2025100600:CLMR:20 hybrid level:16 hour fcst: +383:292048307:d=2025100600:CIMIXR:20 hybrid level:16 hour fcst: +384:292085072:d=2025100600:RWMR:20 hybrid level:16 hour fcst: +385:292123234:d=2025100600:SNMR:20 hybrid level:16 hour fcst: +386:292229013:d=2025100600:GRLE:20 hybrid level:16 hour fcst: +387:292253822:d=2025100600:NCONCD:20 hybrid level:16 hour fcst: +388:292278343:d=2025100600:NCCICE:20 hybrid level:16 hour fcst: +389:292319558:d=2025100600:SPNCR:20 hybrid level:16 hour fcst: +390:292370976:d=2025100600:PMTF:20 hybrid level:16 hour fcst: +391:292887521:d=2025100600:PMTC:20 hybrid level:16 hour fcst: +392:293436045:d=2025100600:FRACCC:20 hybrid level:16 hour fcst: +393:293659346:d=2025100600:HGT:20 hybrid level:16 hour fcst: +394:295907199:d=2025100600:TMP:20 hybrid level:16 hour fcst: +395:296745417:d=2025100600:SPFH:20 hybrid level:16 hour fcst: +396:298162232:d=2025100600:UGRD:20 hybrid level:16 hour fcst: +397:299027187:d=2025100600:VGRD:20 hybrid level:16 hour fcst: +398:299852191:d=2025100600:VVEL:20 hybrid level:16 hour fcst: +399:301219001:d=2025100600:TKE:20 hybrid level:16 hour fcst: +400:301219455:d=2025100600:MASSDEN:20 hybrid level:16 hour fcst: +401:302445099:d=2025100600:PRES:21 hybrid level:16 hour fcst: +402:304849718:d=2025100600:CLMR:21 hybrid level:16 hour fcst: +403:304858923:d=2025100600:CIMIXR:21 hybrid level:16 hour fcst: +404:304897978:d=2025100600:RWMR:21 hybrid level:16 hour fcst: +405:304922528:d=2025100600:SNMR:21 hybrid level:16 hour fcst: +406:305019923:d=2025100600:GRLE:21 hybrid level:16 hour fcst: +407:305040544:d=2025100600:NCONCD:21 hybrid level:16 hour fcst: +408:305051131:d=2025100600:NCCICE:21 hybrid level:16 hour fcst: +409:305094382:d=2025100600:SPNCR:21 hybrid level:16 hour fcst: +410:305122088:d=2025100600:PMTF:21 hybrid level:16 hour fcst: +411:305606977:d=2025100600:PMTC:21 hybrid level:16 hour fcst: +412:306130526:d=2025100600:FRACCC:21 hybrid level:16 hour fcst: +413:306329370:d=2025100600:HGT:21 hybrid level:16 hour fcst: +414:308520405:d=2025100600:TMP:21 hybrid level:16 hour fcst: +415:309325777:d=2025100600:SPFH:21 hybrid level:16 hour fcst: +416:310634912:d=2025100600:UGRD:21 hybrid level:16 hour fcst: +417:311475580:d=2025100600:VGRD:21 hybrid level:16 hour fcst: +418:312284885:d=2025100600:VVEL:21 hybrid level:16 hour fcst: +419:313636915:d=2025100600:TKE:21 hybrid level:16 hour fcst: +420:313637342:d=2025100600:MASSDEN:21 hybrid level:16 hour fcst: +421:314897023:d=2025100600:PRES:22 hybrid level:16 hour fcst: +422:317204080:d=2025100600:CLMR:22 hybrid level:16 hour fcst: +423:317208899:d=2025100600:CIMIXR:22 hybrid level:16 hour fcst: +424:317253096:d=2025100600:RWMR:22 hybrid level:16 hour fcst: +425:317270160:d=2025100600:SNMR:22 hybrid level:16 hour fcst: +426:317367986:d=2025100600:GRLE:22 hybrid level:16 hour fcst: +427:317385758:d=2025100600:NCONCD:22 hybrid level:16 hour fcst: +428:317390841:d=2025100600:NCCICE:22 hybrid level:16 hour fcst: +429:317441887:d=2025100600:SPNCR:22 hybrid level:16 hour fcst: +430:317461546:d=2025100600:PMTF:22 hybrid level:16 hour fcst: +431:317913050:d=2025100600:PMTC:22 hybrid level:16 hour fcst: +432:318416059:d=2025100600:FRACCC:22 hybrid level:16 hour fcst: +433:318625958:d=2025100600:HGT:22 hybrid level:16 hour fcst: +434:320756464:d=2025100600:TMP:22 hybrid level:16 hour fcst: +435:321528202:d=2025100600:SPFH:22 hybrid level:16 hour fcst: +436:322733684:d=2025100600:UGRD:22 hybrid level:16 hour fcst: +437:323577977:d=2025100600:VGRD:22 hybrid level:16 hour fcst: +438:324379373:d=2025100600:VVEL:22 hybrid level:16 hour fcst: +439:325687193:d=2025100600:TKE:22 hybrid level:16 hour fcst: +440:325687766:d=2025100600:MASSDEN:22 hybrid level:16 hour fcst: +441:327047674:d=2025100600:PRES:23 hybrid level:16 hour fcst: +442:329216624:d=2025100600:CLMR:23 hybrid level:16 hour fcst: +443:329220084:d=2025100600:CIMIXR:23 hybrid level:16 hour fcst: +444:329273016:d=2025100600:RWMR:23 hybrid level:16 hour fcst: +445:329285617:d=2025100600:SNMR:23 hybrid level:16 hour fcst: +446:329383365:d=2025100600:GRLE:23 hybrid level:16 hour fcst: +447:329399203:d=2025100600:NCONCD:23 hybrid level:16 hour fcst: +448:329402799:d=2025100600:NCCICE:23 hybrid level:16 hour fcst: +449:329469592:d=2025100600:SPNCR:23 hybrid level:16 hour fcst: +450:329484774:d=2025100600:PMTF:23 hybrid level:16 hour fcst: +451:329921750:d=2025100600:PMTC:23 hybrid level:16 hour fcst: +452:330403705:d=2025100600:FRACCC:23 hybrid level:16 hour fcst: +453:330620454:d=2025100600:HGT:23 hybrid level:16 hour fcst: +454:332670715:d=2025100600:TMP:23 hybrid level:16 hour fcst: +455:333410999:d=2025100600:SPFH:23 hybrid level:16 hour fcst: +456:334506608:d=2025100600:UGRD:23 hybrid level:16 hour fcst: +457:335331937:d=2025100600:VGRD:23 hybrid level:16 hour fcst: +458:336127102:d=2025100600:VVEL:23 hybrid level:16 hour fcst: +459:337402106:d=2025100600:TKE:23 hybrid level:16 hour fcst: +460:337403714:d=2025100600:MASSDEN:23 hybrid level:16 hour fcst: +461:338647038:d=2025100600:PRES:24 hybrid level:16 hour fcst: +462:340957009:d=2025100600:CLMR:24 hybrid level:16 hour fcst: +463:340959510:d=2025100600:CIMIXR:24 hybrid level:16 hour fcst: +464:340982622:d=2025100600:RWMR:24 hybrid level:16 hour fcst: +465:340991205:d=2025100600:SNMR:24 hybrid level:16 hour fcst: +466:341089454:d=2025100600:GRLE:24 hybrid level:16 hour fcst: +467:341103814:d=2025100600:NCONCD:24 hybrid level:16 hour fcst: +468:341106485:d=2025100600:NCCICE:24 hybrid level:16 hour fcst: +469:341144861:d=2025100600:SPNCR:24 hybrid level:16 hour fcst: +470:341157328:d=2025100600:PMTF:24 hybrid level:16 hour fcst: +471:341578989:d=2025100600:PMTC:24 hybrid level:16 hour fcst: +472:342032242:d=2025100600:FRACCC:24 hybrid level:16 hour fcst: +473:342264030:d=2025100600:HGT:24 hybrid level:16 hour fcst: +474:344244564:d=2025100600:TMP:24 hybrid level:16 hour fcst: +475:344954659:d=2025100600:SPFH:24 hybrid level:16 hour fcst: +476:345971643:d=2025100600:UGRD:24 hybrid level:16 hour fcst: +477:346795220:d=2025100600:VGRD:24 hybrid level:16 hour fcst: +478:347588646:d=2025100600:VVEL:24 hybrid level:16 hour fcst: +479:348829630:d=2025100600:TKE:24 hybrid level:16 hour fcst: +480:348832620:d=2025100600:MASSDEN:24 hybrid level:16 hour fcst: +481:349968049:d=2025100600:PRES:25 hybrid level:16 hour fcst: +482:352373585:d=2025100600:CLMR:25 hybrid level:16 hour fcst: +483:352375387:d=2025100600:CIMIXR:25 hybrid level:16 hour fcst: +484:352393014:d=2025100600:RWMR:25 hybrid level:16 hour fcst: +485:352400722:d=2025100600:SNMR:25 hybrid level:16 hour fcst: +486:352492827:d=2025100600:GRLE:25 hybrid level:16 hour fcst: +487:352505786:d=2025100600:NCONCD:25 hybrid level:16 hour fcst: +488:352507568:d=2025100600:NCCICE:25 hybrid level:16 hour fcst: +489:352558319:d=2025100600:SPNCR:25 hybrid level:16 hour fcst: +490:352567267:d=2025100600:PMTF:25 hybrid level:16 hour fcst: +491:352969293:d=2025100600:PMTC:25 hybrid level:16 hour fcst: +492:353813229:d=2025100600:FRACCC:25 hybrid level:16 hour fcst: +493:354043964:d=2025100600:HGT:25 hybrid level:16 hour fcst: +494:355950537:d=2025100600:TMP:25 hybrid level:16 hour fcst: +495:356630557:d=2025100600:SPFH:25 hybrid level:16 hour fcst: +496:357563033:d=2025100600:UGRD:25 hybrid level:16 hour fcst: +497:358382797:d=2025100600:VGRD:25 hybrid level:16 hour fcst: +498:359173426:d=2025100600:VVEL:25 hybrid level:16 hour fcst: +499:360381147:d=2025100600:TKE:25 hybrid level:16 hour fcst: +500:360385765:d=2025100600:MASSDEN:25 hybrid level:16 hour fcst: +501:361443519:d=2025100600:PRES:26 hybrid level:16 hour fcst: +502:363778329:d=2025100600:CLMR:26 hybrid level:16 hour fcst: +503:363779460:d=2025100600:CIMIXR:26 hybrid level:16 hour fcst: +504:363820545:d=2025100600:RWMR:26 hybrid level:16 hour fcst: +505:363825585:d=2025100600:SNMR:26 hybrid level:16 hour fcst: +506:363910655:d=2025100600:GRLE:26 hybrid level:16 hour fcst: +507:363922602:d=2025100600:NCONCD:26 hybrid level:16 hour fcst: +508:363924041:d=2025100600:NCCICE:26 hybrid level:16 hour fcst: +509:363986797:d=2025100600:SPNCR:26 hybrid level:16 hour fcst: +510:363992735:d=2025100600:PMTF:26 hybrid level:16 hour fcst: +511:364709154:d=2025100600:PMTC:26 hybrid level:16 hour fcst: +512:365523576:d=2025100600:FRACCC:26 hybrid level:16 hour fcst: +513:365745399:d=2025100600:HGT:26 hybrid level:16 hour fcst: +514:367568541:d=2025100600:TMP:26 hybrid level:16 hour fcst: +515:368218415:d=2025100600:SPFH:26 hybrid level:16 hour fcst: +516:369084921:d=2025100600:UGRD:26 hybrid level:16 hour fcst: +517:369913427:d=2025100600:VGRD:26 hybrid level:16 hour fcst: +518:370696426:d=2025100600:VVEL:26 hybrid level:16 hour fcst: +519:371871449:d=2025100600:TKE:26 hybrid level:16 hour fcst: +520:371877763:d=2025100600:MASSDEN:26 hybrid level:16 hour fcst: +521:372874408:d=2025100600:PRES:27 hybrid level:16 hour fcst: +522:375011540:d=2025100600:CLMR:27 hybrid level:16 hour fcst: +523:375011972:d=2025100600:CIMIXR:27 hybrid level:16 hour fcst: +524:375020852:d=2025100600:RWMR:27 hybrid level:16 hour fcst: +525:375023965:d=2025100600:SNMR:27 hybrid level:16 hour fcst: +526:375098971:d=2025100600:GRLE:27 hybrid level:16 hour fcst: +527:375109840:d=2025100600:NCONCD:27 hybrid level:16 hour fcst: +528:375110279:d=2025100600:NCCICE:27 hybrid level:16 hour fcst: +529:375182858:d=2025100600:SPNCR:27 hybrid level:16 hour fcst: +530:375186027:d=2025100600:PMTF:27 hybrid level:16 hour fcst: +531:375883223:d=2025100600:PMTC:27 hybrid level:16 hour fcst: +532:376645572:d=2025100600:FRACCC:27 hybrid level:16 hour fcst: +533:376860779:d=2025100600:HGT:27 hybrid level:16 hour fcst: +534:378572097:d=2025100600:TMP:27 hybrid level:16 hour fcst: +535:379189595:d=2025100600:SPFH:27 hybrid level:16 hour fcst: +536:380569226:d=2025100600:UGRD:27 hybrid level:16 hour fcst: +537:381392127:d=2025100600:VGRD:27 hybrid level:16 hour fcst: +538:382169077:d=2025100600:VVEL:27 hybrid level:16 hour fcst: +539:383312718:d=2025100600:TKE:27 hybrid level:16 hour fcst: +540:383321666:d=2025100600:MASSDEN:27 hybrid level:16 hour fcst: +541:384248285:d=2025100600:PRES:28 hybrid level:16 hour fcst: +542:386403899:d=2025100600:CLMR:28 hybrid level:16 hour fcst: +543:386404152:d=2025100600:CIMIXR:28 hybrid level:16 hour fcst: +544:386457336:d=2025100600:RWMR:28 hybrid level:16 hour fcst: +545:386457671:d=2025100600:SNMR:28 hybrid level:16 hour fcst: +546:386526687:d=2025100600:GRLE:28 hybrid level:16 hour fcst: +547:386536501:d=2025100600:NCONCD:28 hybrid level:16 hour fcst: +548:386536755:d=2025100600:NCCICE:28 hybrid level:16 hour fcst: +549:386619205:d=2025100600:SPNCR:28 hybrid level:16 hour fcst: +550:386619547:d=2025100600:PMTF:28 hybrid level:16 hour fcst: +551:387296797:d=2025100600:PMTC:28 hybrid level:16 hour fcst: +552:388013512:d=2025100600:FRACCC:28 hybrid level:16 hour fcst: +553:388236237:d=2025100600:HGT:28 hybrid level:16 hour fcst: +554:389836873:d=2025100600:TMP:28 hybrid level:16 hour fcst: +555:390448177:d=2025100600:SPFH:28 hybrid level:16 hour fcst: +556:391706260:d=2025100600:UGRD:28 hybrid level:16 hour fcst: +557:392520949:d=2025100600:VGRD:28 hybrid level:16 hour fcst: +558:393289835:d=2025100600:VVEL:28 hybrid level:16 hour fcst: +559:394402885:d=2025100600:TKE:28 hybrid level:16 hour fcst: +560:394414825:d=2025100600:MASSDEN:28 hybrid level:16 hour fcst: +561:395405671:d=2025100600:PRES:29 hybrid level:16 hour fcst: +562:397519682:d=2025100600:CLMR:29 hybrid level:16 hour fcst: +563:397519870:d=2025100600:CIMIXR:29 hybrid level:16 hour fcst: +564:397578458:d=2025100600:RWMR:29 hybrid level:16 hour fcst: +565:397578743:d=2025100600:SNMR:29 hybrid level:16 hour fcst: +566:397640283:d=2025100600:GRLE:29 hybrid level:16 hour fcst: +567:397648844:d=2025100600:NCONCD:29 hybrid level:16 hour fcst: +568:397649032:d=2025100600:NCCICE:29 hybrid level:16 hour fcst: +569:397736209:d=2025100600:SPNCR:29 hybrid level:16 hour fcst: +570:397736495:d=2025100600:PMTF:29 hybrid level:16 hour fcst: +571:398402833:d=2025100600:PMTC:29 hybrid level:16 hour fcst: +572:399085426:d=2025100600:FRACCC:29 hybrid level:16 hour fcst: +573:399300361:d=2025100600:HGT:29 hybrid level:16 hour fcst: +574:400772862:d=2025100600:TMP:29 hybrid level:16 hour fcst: +575:401387539:d=2025100600:SPFH:29 hybrid level:16 hour fcst: +576:402561099:d=2025100600:UGRD:29 hybrid level:16 hour fcst: +577:403377272:d=2025100600:VGRD:29 hybrid level:16 hour fcst: +578:404145300:d=2025100600:VVEL:29 hybrid level:16 hour fcst: +579:405232029:d=2025100600:TKE:29 hybrid level:16 hour fcst: +580:405244224:d=2025100600:MASSDEN:29 hybrid level:16 hour fcst: +581:406039144:d=2025100600:PRES:30 hybrid level:16 hour fcst: +582:408087658:d=2025100600:CLMR:30 hybrid level:16 hour fcst: +583:408087899:d=2025100600:CIMIXR:30 hybrid level:16 hour fcst: +584:408144759:d=2025100600:RWMR:30 hybrid level:16 hour fcst: +585:408145026:d=2025100600:SNMR:30 hybrid level:16 hour fcst: +586:408198508:d=2025100600:GRLE:30 hybrid level:16 hour fcst: +587:408206286:d=2025100600:NCONCD:30 hybrid level:16 hour fcst: +588:408206529:d=2025100600:NCCICE:30 hybrid level:16 hour fcst: +589:408293226:d=2025100600:SPNCR:30 hybrid level:16 hour fcst: +590:408293495:d=2025100600:PMTF:30 hybrid level:16 hour fcst: +591:408950994:d=2025100600:PMTC:30 hybrid level:16 hour fcst: +592:409606837:d=2025100600:FRACCC:30 hybrid level:16 hour fcst: +593:409779841:d=2025100600:HGT:30 hybrid level:16 hour fcst: +594:411134333:d=2025100600:TMP:30 hybrid level:16 hour fcst: +595:411747632:d=2025100600:SPFH:30 hybrid level:16 hour fcst: +596:412805569:d=2025100600:UGRD:30 hybrid level:16 hour fcst: +597:413613735:d=2025100600:VGRD:30 hybrid level:16 hour fcst: +598:414368292:d=2025100600:VVEL:30 hybrid level:16 hour fcst: +599:415428144:d=2025100600:TKE:30 hybrid level:16 hour fcst: +600:415439723:d=2025100600:MASSDEN:30 hybrid level:16 hour fcst: +601:416160068:d=2025100600:PRES:31 hybrid level:16 hour fcst: +602:417543303:d=2025100600:CLMR:31 hybrid level:16 hour fcst: +603:417543491:d=2025100600:CIMIXR:31 hybrid level:16 hour fcst: +604:417595057:d=2025100600:RWMR:31 hybrid level:16 hour fcst: +605:417595308:d=2025100600:SNMR:31 hybrid level:16 hour fcst: +606:417640030:d=2025100600:GRLE:31 hybrid level:16 hour fcst: +607:417647032:d=2025100600:NCONCD:31 hybrid level:16 hour fcst: +608:417647220:d=2025100600:NCCICE:31 hybrid level:16 hour fcst: +609:417729236:d=2025100600:SPNCR:31 hybrid level:16 hour fcst: +610:417729488:d=2025100600:PMTF:31 hybrid level:16 hour fcst: +611:418376961:d=2025100600:PMTC:31 hybrid level:16 hour fcst: +612:418995807:d=2025100600:FRACCC:31 hybrid level:16 hour fcst: +613:419133375:d=2025100600:HGT:31 hybrid level:16 hour fcst: +614:420436831:d=2025100600:TMP:31 hybrid level:16 hour fcst: +615:421055924:d=2025100600:SPFH:31 hybrid level:16 hour fcst: +616:421996628:d=2025100600:UGRD:31 hybrid level:16 hour fcst: +617:422798552:d=2025100600:VGRD:31 hybrid level:16 hour fcst: +618:423544946:d=2025100600:VVEL:31 hybrid level:16 hour fcst: +619:424576175:d=2025100600:TKE:31 hybrid level:16 hour fcst: +620:424586787:d=2025100600:MASSDEN:31 hybrid level:16 hour fcst: +621:425241168:d=2025100600:PRES:32 hybrid level:16 hour fcst: +622:426231667:d=2025100600:CLMR:32 hybrid level:16 hour fcst: +623:426231855:d=2025100600:CIMIXR:32 hybrid level:16 hour fcst: +624:426272516:d=2025100600:RWMR:32 hybrid level:16 hour fcst: +625:426272704:d=2025100600:SNMR:32 hybrid level:16 hour fcst: +626:426311400:d=2025100600:GRLE:32 hybrid level:16 hour fcst: +627:426317282:d=2025100600:NCONCD:32 hybrid level:16 hour fcst: +628:426317470:d=2025100600:NCCICE:32 hybrid level:16 hour fcst: +629:426386616:d=2025100600:SPNCR:32 hybrid level:16 hour fcst: +630:426386804:d=2025100600:PMTF:32 hybrid level:16 hour fcst: +631:427026405:d=2025100600:PMTC:32 hybrid level:16 hour fcst: +632:427605554:d=2025100600:FRACCC:32 hybrid level:16 hour fcst: +633:427725899:d=2025100600:HGT:32 hybrid level:16 hour fcst: +634:429035351:d=2025100600:TMP:32 hybrid level:16 hour fcst: +635:429660843:d=2025100600:SPFH:32 hybrid level:16 hour fcst: +636:430525796:d=2025100600:UGRD:32 hybrid level:16 hour fcst: +637:431312372:d=2025100600:VGRD:32 hybrid level:16 hour fcst: +638:432052215:d=2025100600:VVEL:32 hybrid level:16 hour fcst: +639:433039711:d=2025100600:TKE:32 hybrid level:16 hour fcst: +640:433049856:d=2025100600:MASSDEN:32 hybrid level:16 hour fcst: +641:433713688:d=2025100600:PRES:33 hybrid level:16 hour fcst: +642:434539386:d=2025100600:CLMR:33 hybrid level:16 hour fcst: +643:434539574:d=2025100600:CIMIXR:33 hybrid level:16 hour fcst: +644:434574286:d=2025100600:RWMR:33 hybrid level:16 hour fcst: +645:434574474:d=2025100600:SNMR:33 hybrid level:16 hour fcst: +646:434604195:d=2025100600:GRLE:33 hybrid level:16 hour fcst: +647:434608888:d=2025100600:NCONCD:33 hybrid level:16 hour fcst: +648:434609076:d=2025100600:NCCICE:33 hybrid level:16 hour fcst: +649:434667598:d=2025100600:SPNCR:33 hybrid level:16 hour fcst: +650:434667786:d=2025100600:PMTF:33 hybrid level:16 hour fcst: +651:435300922:d=2025100600:PMTC:33 hybrid level:16 hour fcst: +652:435829291:d=2025100600:FRACCC:33 hybrid level:16 hour fcst: +653:435939229:d=2025100600:HGT:33 hybrid level:16 hour fcst: +654:437252456:d=2025100600:TMP:33 hybrid level:16 hour fcst: +655:437878426:d=2025100600:SPFH:33 hybrid level:16 hour fcst: +656:439243367:d=2025100600:UGRD:33 hybrid level:16 hour fcst: +657:440026445:d=2025100600:VGRD:33 hybrid level:16 hour fcst: +658:440751955:d=2025100600:VVEL:33 hybrid level:16 hour fcst: +659:441707719:d=2025100600:TKE:33 hybrid level:16 hour fcst: +660:441715626:d=2025100600:MASSDEN:33 hybrid level:16 hour fcst: +661:442510459:d=2025100600:PRES:34 hybrid level:16 hour fcst: +662:443195325:d=2025100600:CLMR:34 hybrid level:16 hour fcst: +663:443195513:d=2025100600:CIMIXR:34 hybrid level:16 hour fcst: +664:443222500:d=2025100600:RWMR:34 hybrid level:16 hour fcst: +665:443222688:d=2025100600:SNMR:34 hybrid level:16 hour fcst: +666:443243555:d=2025100600:GRLE:34 hybrid level:16 hour fcst: +667:443246966:d=2025100600:NCONCD:34 hybrid level:16 hour fcst: +668:443247154:d=2025100600:NCCICE:34 hybrid level:16 hour fcst: +669:443295894:d=2025100600:SPNCR:34 hybrid level:16 hour fcst: +670:443296082:d=2025100600:PMTF:34 hybrid level:16 hour fcst: +671:443930893:d=2025100600:PMTC:34 hybrid level:16 hour fcst: +672:444420391:d=2025100600:FRACCC:34 hybrid level:16 hour fcst: +673:444505529:d=2025100600:HGT:34 hybrid level:16 hour fcst: +674:445820934:d=2025100600:TMP:34 hybrid level:16 hour fcst: +675:446447752:d=2025100600:SPFH:34 hybrid level:16 hour fcst: +676:447680459:d=2025100600:UGRD:34 hybrid level:16 hour fcst: +677:448460905:d=2025100600:VGRD:34 hybrid level:16 hour fcst: +678:449179643:d=2025100600:VVEL:34 hybrid level:16 hour fcst: +679:450088909:d=2025100600:TKE:34 hybrid level:16 hour fcst: +680:450093829:d=2025100600:MASSDEN:34 hybrid level:16 hour fcst: +681:450965292:d=2025100600:PRES:35 hybrid level:16 hour fcst: +682:451616482:d=2025100600:CLMR:35 hybrid level:16 hour fcst: +683:451616670:d=2025100600:CIMIXR:35 hybrid level:16 hour fcst: +684:451649571:d=2025100600:RWMR:35 hybrid level:16 hour fcst: +685:451649759:d=2025100600:SNMR:35 hybrid level:16 hour fcst: +686:451662641:d=2025100600:GRLE:35 hybrid level:16 hour fcst: +687:451664837:d=2025100600:NCONCD:35 hybrid level:16 hour fcst: +688:451665025:d=2025100600:NCCICE:35 hybrid level:16 hour fcst: +689:451699428:d=2025100600:SPNCR:35 hybrid level:16 hour fcst: +690:451699616:d=2025100600:PMTF:35 hybrid level:16 hour fcst: +691:452347699:d=2025100600:PMTC:35 hybrid level:16 hour fcst: +692:452784833:d=2025100600:FRACCC:35 hybrid level:16 hour fcst: +693:452819327:d=2025100600:HGT:35 hybrid level:16 hour fcst: +694:454136327:d=2025100600:TMP:35 hybrid level:16 hour fcst: +695:454755766:d=2025100600:SPFH:35 hybrid level:16 hour fcst: +696:455826972:d=2025100600:UGRD:35 hybrid level:16 hour fcst: +697:456609629:d=2025100600:VGRD:35 hybrid level:16 hour fcst: +698:457324011:d=2025100600:VVEL:35 hybrid level:16 hour fcst: +699:458195457:d=2025100600:TKE:35 hybrid level:16 hour fcst: +700:458198050:d=2025100600:MASSDEN:35 hybrid level:16 hour fcst: +701:459092190:d=2025100600:PRES:36 hybrid level:16 hour fcst: +702:459621344:d=2025100600:CLMR:36 hybrid level:16 hour fcst: +703:459621532:d=2025100600:CIMIXR:36 hybrid level:16 hour fcst: +704:459633566:d=2025100600:RWMR:36 hybrid level:16 hour fcst: +705:459633754:d=2025100600:SNMR:36 hybrid level:16 hour fcst: +706:459639851:d=2025100600:GRLE:36 hybrid level:16 hour fcst: +707:459640952:d=2025100600:NCONCD:36 hybrid level:16 hour fcst: +708:459641140:d=2025100600:NCCICE:36 hybrid level:16 hour fcst: +709:459655564:d=2025100600:SPNCR:36 hybrid level:16 hour fcst: +710:459655752:d=2025100600:PMTF:36 hybrid level:16 hour fcst: +711:460321290:d=2025100600:PMTC:36 hybrid level:16 hour fcst: +712:460663419:d=2025100600:FRACCC:36 hybrid level:16 hour fcst: +713:460664475:d=2025100600:HGT:36 hybrid level:16 hour fcst: +714:461984072:d=2025100600:TMP:36 hybrid level:16 hour fcst: +715:462607702:d=2025100600:SPFH:36 hybrid level:16 hour fcst: +716:463518051:d=2025100600:UGRD:36 hybrid level:16 hour fcst: +717:464307905:d=2025100600:VGRD:36 hybrid level:16 hour fcst: +718:465020937:d=2025100600:VVEL:36 hybrid level:16 hour fcst: +719:465846097:d=2025100600:TKE:36 hybrid level:16 hour fcst: +720:465846950:d=2025100600:MASSDEN:36 hybrid level:16 hour fcst: +721:467054867:d=2025100600:PRES:37 hybrid level:16 hour fcst: +722:467560558:d=2025100600:CLMR:37 hybrid level:16 hour fcst: +723:467560746:d=2025100600:CIMIXR:37 hybrid level:16 hour fcst: +724:467563694:d=2025100600:RWMR:37 hybrid level:16 hour fcst: +725:467563882:d=2025100600:SNMR:37 hybrid level:16 hour fcst: +726:467565574:d=2025100600:GRLE:37 hybrid level:16 hour fcst: +727:467566302:d=2025100600:NCONCD:37 hybrid level:16 hour fcst: +728:467566490:d=2025100600:NCCICE:37 hybrid level:16 hour fcst: +729:467570392:d=2025100600:SPNCR:37 hybrid level:16 hour fcst: +730:467570580:d=2025100600:PMTF:37 hybrid level:16 hour fcst: +731:468234162:d=2025100600:PMTC:37 hybrid level:16 hour fcst: +732:468458487:d=2025100600:FRACCC:37 hybrid level:16 hour fcst: +733:468458745:d=2025100600:HGT:37 hybrid level:16 hour fcst: +734:469777671:d=2025100600:TMP:37 hybrid level:16 hour fcst: +735:470422254:d=2025100600:SPFH:37 hybrid level:16 hour fcst: +736:471180676:d=2025100600:UGRD:37 hybrid level:16 hour fcst: +737:471979361:d=2025100600:VGRD:37 hybrid level:16 hour fcst: +738:472698260:d=2025100600:VVEL:37 hybrid level:16 hour fcst: +739:473455752:d=2025100600:TKE:37 hybrid level:16 hour fcst: +740:473456187:d=2025100600:MASSDEN:37 hybrid level:16 hour fcst: +741:474789531:d=2025100600:PRES:38 hybrid level:16 hour fcst: +742:478600001:d=2025100600:CLMR:38 hybrid level:16 hour fcst: +743:478600189:d=2025100600:CIMIXR:38 hybrid level:16 hour fcst: +744:478603075:d=2025100600:RWMR:38 hybrid level:16 hour fcst: +745:478603263:d=2025100600:SNMR:38 hybrid level:16 hour fcst: +746:478607278:d=2025100600:GRLE:38 hybrid level:16 hour fcst: +747:478609145:d=2025100600:NCONCD:38 hybrid level:16 hour fcst: +748:478609333:d=2025100600:NCCICE:38 hybrid level:16 hour fcst: +749:478613726:d=2025100600:SPNCR:38 hybrid level:16 hour fcst: +750:478613914:d=2025100600:PMTF:38 hybrid level:16 hour fcst: +751:479255251:d=2025100600:PMTC:38 hybrid level:16 hour fcst: +752:479843448:d=2025100600:FRACCC:38 hybrid level:16 hour fcst: +753:479843636:d=2025100600:HGT:38 hybrid level:16 hour fcst: +754:481154355:d=2025100600:TMP:38 hybrid level:16 hour fcst: +755:481819175:d=2025100600:SPFH:38 hybrid level:16 hour fcst: +756:482495341:d=2025100600:UGRD:38 hybrid level:16 hour fcst: +757:483286229:d=2025100600:VGRD:38 hybrid level:16 hour fcst: +758:484009914:d=2025100600:VVEL:38 hybrid level:16 hour fcst: +759:484692160:d=2025100600:TKE:38 hybrid level:16 hour fcst: +760:484692406:d=2025100600:MASSDEN:38 hybrid level:16 hour fcst: +761:486213704:d=2025100600:PRES:39 hybrid level:16 hour fcst: +762:490024174:d=2025100600:CLMR:39 hybrid level:16 hour fcst: +763:490024362:d=2025100600:CIMIXR:39 hybrid level:16 hour fcst: +764:490026434:d=2025100600:RWMR:39 hybrid level:16 hour fcst: +765:490026622:d=2025100600:SNMR:39 hybrid level:16 hour fcst: +766:490032366:d=2025100600:GRLE:39 hybrid level:16 hour fcst: +767:490035337:d=2025100600:NCONCD:39 hybrid level:16 hour fcst: +768:490035525:d=2025100600:NCCICE:39 hybrid level:16 hour fcst: +769:490036797:d=2025100600:SPNCR:39 hybrid level:16 hour fcst: +770:490036985:d=2025100600:PMTF:39 hybrid level:16 hour fcst: +771:490668586:d=2025100600:PMTC:39 hybrid level:16 hour fcst: +772:491191479:d=2025100600:FRACCC:39 hybrid level:16 hour fcst: +773:491191667:d=2025100600:HGT:39 hybrid level:16 hour fcst: +774:492473358:d=2025100600:TMP:39 hybrid level:16 hour fcst: +775:493132534:d=2025100600:SPFH:39 hybrid level:16 hour fcst: +776:493777408:d=2025100600:UGRD:39 hybrid level:16 hour fcst: +777:494568670:d=2025100600:VGRD:39 hybrid level:16 hour fcst: +778:495284717:d=2025100600:VVEL:39 hybrid level:16 hour fcst: +779:495898684:d=2025100600:TKE:39 hybrid level:16 hour fcst: +780:495991649:d=2025100600:MASSDEN:39 hybrid level:16 hour fcst: +781:497676176:d=2025100600:PRES:40 hybrid level:16 hour fcst: +782:501486646:d=2025100600:CLMR:40 hybrid level:16 hour fcst: +783:501486834:d=2025100600:CIMIXR:40 hybrid level:16 hour fcst: +784:501488582:d=2025100600:RWMR:40 hybrid level:16 hour fcst: +785:501488770:d=2025100600:SNMR:40 hybrid level:16 hour fcst: +786:501491130:d=2025100600:GRLE:40 hybrid level:16 hour fcst: +787:501492182:d=2025100600:NCONCD:40 hybrid level:16 hour fcst: +788:501492370:d=2025100600:NCCICE:40 hybrid level:16 hour fcst: +789:501493727:d=2025100600:SPNCR:40 hybrid level:16 hour fcst: +790:501493915:d=2025100600:PMTF:40 hybrid level:16 hour fcst: +791:502118539:d=2025100600:PMTC:40 hybrid level:16 hour fcst: +792:502590847:d=2025100600:FRACCC:40 hybrid level:16 hour fcst: +793:502591035:d=2025100600:HGT:40 hybrid level:16 hour fcst: +794:503845063:d=2025100600:TMP:40 hybrid level:16 hour fcst: +795:504526802:d=2025100600:SPFH:40 hybrid level:16 hour fcst: +796:505153030:d=2025100600:UGRD:40 hybrid level:16 hour fcst: +797:505945613:d=2025100600:VGRD:40 hybrid level:16 hour fcst: +798:506683846:d=2025100600:VVEL:40 hybrid level:16 hour fcst: +799:507235590:d=2025100600:TKE:40 hybrid level:16 hour fcst: +800:507295296:d=2025100600:MASSDEN:40 hybrid level:16 hour fcst: +801:509181361:d=2025100600:PRES:41 hybrid level:16 hour fcst: +802:512991831:d=2025100600:CLMR:41 hybrid level:16 hour fcst: +803:512992019:d=2025100600:CIMIXR:41 hybrid level:16 hour fcst: +804:512992718:d=2025100600:RWMR:41 hybrid level:16 hour fcst: +805:512992906:d=2025100600:SNMR:41 hybrid level:16 hour fcst: +806:512996688:d=2025100600:GRLE:41 hybrid level:16 hour fcst: +807:512997268:d=2025100600:NCONCD:41 hybrid level:16 hour fcst: +808:512997456:d=2025100600:NCCICE:41 hybrid level:16 hour fcst: +809:512998014:d=2025100600:SPNCR:41 hybrid level:16 hour fcst: +810:512998202:d=2025100600:PMTF:41 hybrid level:16 hour fcst: +811:513620259:d=2025100600:PMTC:41 hybrid level:16 hour fcst: +812:514063082:d=2025100600:FRACCC:41 hybrid level:16 hour fcst: +813:514063270:d=2025100600:HGT:41 hybrid level:16 hour fcst: +814:515315738:d=2025100600:TMP:41 hybrid level:16 hour fcst: +815:516033318:d=2025100600:SPFH:41 hybrid level:16 hour fcst: +816:516663644:d=2025100600:UGRD:41 hybrid level:16 hour fcst: +817:517504935:d=2025100600:VGRD:41 hybrid level:16 hour fcst: +818:518290355:d=2025100600:VVEL:41 hybrid level:16 hour fcst: +819:518823002:d=2025100600:TKE:41 hybrid level:16 hour fcst: +820:518869039:d=2025100600:MASSDEN:41 hybrid level:16 hour fcst: +821:520787497:d=2025100600:PRES:42 hybrid level:16 hour fcst: +822:524597967:d=2025100600:CLMR:42 hybrid level:16 hour fcst: +823:524598155:d=2025100600:CIMIXR:42 hybrid level:16 hour fcst: +824:524598462:d=2025100600:RWMR:42 hybrid level:16 hour fcst: +825:524598650:d=2025100600:SNMR:42 hybrid level:16 hour fcst: +826:524603681:d=2025100600:GRLE:42 hybrid level:16 hour fcst: +827:524603978:d=2025100600:NCONCD:42 hybrid level:16 hour fcst: +828:524604166:d=2025100600:NCCICE:42 hybrid level:16 hour fcst: +829:524604475:d=2025100600:SPNCR:42 hybrid level:16 hour fcst: +830:524604663:d=2025100600:PMTF:42 hybrid level:16 hour fcst: +831:525199020:d=2025100600:PMTC:42 hybrid level:16 hour fcst: +832:525944990:d=2025100600:FRACCC:42 hybrid level:16 hour fcst: +833:525945178:d=2025100600:HGT:42 hybrid level:16 hour fcst: +834:527239004:d=2025100600:TMP:42 hybrid level:16 hour fcst: +835:527956015:d=2025100600:SPFH:42 hybrid level:16 hour fcst: +836:528980909:d=2025100600:UGRD:42 hybrid level:16 hour fcst: +837:529811322:d=2025100600:VGRD:42 hybrid level:16 hour fcst: +838:530606647:d=2025100600:VVEL:42 hybrid level:16 hour fcst: +839:531093183:d=2025100600:TKE:42 hybrid level:16 hour fcst: +840:531123702:d=2025100600:MASSDEN:42 hybrid level:16 hour fcst: +841:533198200:d=2025100600:PRES:43 hybrid level:16 hour fcst: +842:537008670:d=2025100600:CLMR:43 hybrid level:16 hour fcst: +843:537008858:d=2025100600:CIMIXR:43 hybrid level:16 hour fcst: +844:537009046:d=2025100600:RWMR:43 hybrid level:16 hour fcst: +845:537009234:d=2025100600:SNMR:43 hybrid level:16 hour fcst: +846:537011159:d=2025100600:GRLE:43 hybrid level:16 hour fcst: +847:537011424:d=2025100600:NCONCD:43 hybrid level:16 hour fcst: +848:537011612:d=2025100600:NCCICE:43 hybrid level:16 hour fcst: +849:537011800:d=2025100600:SPNCR:43 hybrid level:16 hour fcst: +850:537011988:d=2025100600:PMTF:43 hybrid level:16 hour fcst: +851:537551932:d=2025100600:PMTC:43 hybrid level:16 hour fcst: +852:538268656:d=2025100600:FRACCC:43 hybrid level:16 hour fcst: +853:538268844:d=2025100600:HGT:43 hybrid level:16 hour fcst: +854:539548212:d=2025100600:TMP:43 hybrid level:16 hour fcst: +855:540261198:d=2025100600:SPFH:43 hybrid level:16 hour fcst: +856:541233371:d=2025100600:UGRD:43 hybrid level:16 hour fcst: +857:542073163:d=2025100600:VGRD:43 hybrid level:16 hour fcst: +858:542863134:d=2025100600:VVEL:43 hybrid level:16 hour fcst: +859:543283190:d=2025100600:TKE:43 hybrid level:16 hour fcst: +860:543297628:d=2025100600:MASSDEN:43 hybrid level:16 hour fcst: +861:545260568:d=2025100600:PRES:44 hybrid level:16 hour fcst: +862:549071038:d=2025100600:CLMR:44 hybrid level:16 hour fcst: +863:549071226:d=2025100600:CIMIXR:44 hybrid level:16 hour fcst: +864:549071414:d=2025100600:RWMR:44 hybrid level:16 hour fcst: +865:549071602:d=2025100600:SNMR:44 hybrid level:16 hour fcst: +866:549072751:d=2025100600:GRLE:44 hybrid level:16 hour fcst: +867:549072939:d=2025100600:NCONCD:44 hybrid level:16 hour fcst: +868:549073127:d=2025100600:NCCICE:44 hybrid level:16 hour fcst: +869:549073315:d=2025100600:SPNCR:44 hybrid level:16 hour fcst: +870:549073503:d=2025100600:PMTF:44 hybrid level:16 hour fcst: +871:549598300:d=2025100600:PMTC:44 hybrid level:16 hour fcst: +872:550300413:d=2025100600:FRACCC:44 hybrid level:16 hour fcst: +873:550300601:d=2025100600:HGT:44 hybrid level:16 hour fcst: +874:551575014:d=2025100600:TMP:44 hybrid level:16 hour fcst: +875:552281889:d=2025100600:SPFH:44 hybrid level:16 hour fcst: +876:553243988:d=2025100600:UGRD:44 hybrid level:16 hour fcst: +877:554077180:d=2025100600:VGRD:44 hybrid level:16 hour fcst: +878:554858318:d=2025100600:VVEL:44 hybrid level:16 hour fcst: +879:555251445:d=2025100600:TKE:44 hybrid level:16 hour fcst: +880:555255384:d=2025100600:MASSDEN:44 hybrid level:16 hour fcst: +881:557274170:d=2025100600:PRES:45 hybrid level:16 hour fcst: +882:561084640:d=2025100600:CLMR:45 hybrid level:16 hour fcst: +883:561084828:d=2025100600:CIMIXR:45 hybrid level:16 hour fcst: +884:561085016:d=2025100600:RWMR:45 hybrid level:16 hour fcst: +885:561085204:d=2025100600:SNMR:45 hybrid level:16 hour fcst: +886:561085502:d=2025100600:GRLE:45 hybrid level:16 hour fcst: +887:561085690:d=2025100600:NCONCD:45 hybrid level:16 hour fcst: +888:561085878:d=2025100600:NCCICE:45 hybrid level:16 hour fcst: +889:561086066:d=2025100600:SPNCR:45 hybrid level:16 hour fcst: +890:561086254:d=2025100600:PMTF:45 hybrid level:16 hour fcst: +891:561908566:d=2025100600:PMTC:45 hybrid level:16 hour fcst: +892:562586246:d=2025100600:FRACCC:45 hybrid level:16 hour fcst: +893:562586434:d=2025100600:HGT:45 hybrid level:16 hour fcst: +894:563868221:d=2025100600:TMP:45 hybrid level:16 hour fcst: +895:564559922:d=2025100600:SPFH:45 hybrid level:16 hour fcst: +896:566263216:d=2025100600:UGRD:45 hybrid level:16 hour fcst: +897:567094149:d=2025100600:VGRD:45 hybrid level:16 hour fcst: +898:567872075:d=2025100600:VVEL:45 hybrid level:16 hour fcst: +899:568245532:d=2025100600:TKE:45 hybrid level:16 hour fcst: +900:568246603:d=2025100600:MASSDEN:45 hybrid level:16 hour fcst: +901:570289487:d=2025100600:PRES:46 hybrid level:16 hour fcst: +902:574099957:d=2025100600:CLMR:46 hybrid level:16 hour fcst: +903:574100145:d=2025100600:CIMIXR:46 hybrid level:16 hour fcst: +904:574100333:d=2025100600:RWMR:46 hybrid level:16 hour fcst: +905:574100521:d=2025100600:SNMR:46 hybrid level:16 hour fcst: +906:574100709:d=2025100600:GRLE:46 hybrid level:16 hour fcst: +907:574100897:d=2025100600:NCONCD:46 hybrid level:16 hour fcst: +908:574101085:d=2025100600:NCCICE:46 hybrid level:16 hour fcst: +909:574101273:d=2025100600:SPNCR:46 hybrid level:16 hour fcst: +910:574101461:d=2025100600:PMTF:46 hybrid level:16 hour fcst: +911:574579926:d=2025100600:PMTC:46 hybrid level:16 hour fcst: +912:575222287:d=2025100600:FRACCC:46 hybrid level:16 hour fcst: +913:575222475:d=2025100600:HGT:46 hybrid level:16 hour fcst: +914:576508998:d=2025100600:TMP:46 hybrid level:16 hour fcst: +915:577207665:d=2025100600:SPFH:46 hybrid level:16 hour fcst: +916:578857809:d=2025100600:UGRD:46 hybrid level:16 hour fcst: +917:579675043:d=2025100600:VGRD:46 hybrid level:16 hour fcst: +918:580436280:d=2025100600:VVEL:46 hybrid level:16 hour fcst: +919:580706432:d=2025100600:TKE:46 hybrid level:16 hour fcst: +920:580706652:d=2025100600:MASSDEN:46 hybrid level:16 hour fcst: +921:582843766:d=2025100600:PRES:47 hybrid level:16 hour fcst: +922:586654236:d=2025100600:CLMR:47 hybrid level:16 hour fcst: +923:586654424:d=2025100600:CIMIXR:47 hybrid level:16 hour fcst: +924:586654612:d=2025100600:RWMR:47 hybrid level:16 hour fcst: +925:586654800:d=2025100600:SNMR:47 hybrid level:16 hour fcst: +926:586654988:d=2025100600:GRLE:47 hybrid level:16 hour fcst: +927:586655176:d=2025100600:NCONCD:47 hybrid level:16 hour fcst: +928:586655364:d=2025100600:NCCICE:47 hybrid level:16 hour fcst: +929:586655552:d=2025100600:SPNCR:47 hybrid level:16 hour fcst: +930:586655740:d=2025100600:PMTF:47 hybrid level:16 hour fcst: +931:587130102:d=2025100600:PMTC:47 hybrid level:16 hour fcst: +932:587759309:d=2025100600:FRACCC:47 hybrid level:16 hour fcst: +933:587759497:d=2025100600:HGT:47 hybrid level:16 hour fcst: +934:589044371:d=2025100600:TMP:47 hybrid level:16 hour fcst: +935:589732725:d=2025100600:SPFH:47 hybrid level:16 hour fcst: +936:591317486:d=2025100600:UGRD:47 hybrid level:16 hour fcst: +937:592125940:d=2025100600:VGRD:47 hybrid level:16 hour fcst: +938:592875069:d=2025100600:VVEL:47 hybrid level:16 hour fcst: +939:593130924:d=2025100600:TKE:47 hybrid level:16 hour fcst: +940:593131112:d=2025100600:MASSDEN:47 hybrid level:16 hour fcst: +941:595225541:d=2025100600:PRES:48 hybrid level:16 hour fcst: +942:599036011:d=2025100600:CLMR:48 hybrid level:16 hour fcst: +943:599036199:d=2025100600:CIMIXR:48 hybrid level:16 hour fcst: +944:599036387:d=2025100600:RWMR:48 hybrid level:16 hour fcst: +945:599036575:d=2025100600:SNMR:48 hybrid level:16 hour fcst: +946:599036763:d=2025100600:GRLE:48 hybrid level:16 hour fcst: +947:599036951:d=2025100600:NCONCD:48 hybrid level:16 hour fcst: +948:599037139:d=2025100600:NCCICE:48 hybrid level:16 hour fcst: +949:599037327:d=2025100600:SPNCR:48 hybrid level:16 hour fcst: +950:599037515:d=2025100600:PMTF:48 hybrid level:16 hour fcst: +951:599515584:d=2025100600:PMTC:48 hybrid level:16 hour fcst: +952:600128503:d=2025100600:FRACCC:48 hybrid level:16 hour fcst: +953:600128691:d=2025100600:HGT:48 hybrid level:16 hour fcst: +954:601414807:d=2025100600:TMP:48 hybrid level:16 hour fcst: +955:602103144:d=2025100600:SPFH:48 hybrid level:16 hour fcst: +956:603685731:d=2025100600:UGRD:48 hybrid level:16 hour fcst: +957:604494785:d=2025100600:VGRD:48 hybrid level:16 hour fcst: +958:605247464:d=2025100600:VVEL:48 hybrid level:16 hour fcst: +959:605448365:d=2025100600:TKE:48 hybrid level:16 hour fcst: +960:605448553:d=2025100600:MASSDEN:48 hybrid level:16 hour fcst: +961:607445851:d=2025100600:PRES:49 hybrid level:16 hour fcst: +962:611256321:d=2025100600:CLMR:49 hybrid level:16 hour fcst: +963:611256509:d=2025100600:CIMIXR:49 hybrid level:16 hour fcst: +964:611256697:d=2025100600:RWMR:49 hybrid level:16 hour fcst: +965:611256885:d=2025100600:SNMR:49 hybrid level:16 hour fcst: +966:611257073:d=2025100600:GRLE:49 hybrid level:16 hour fcst: +967:611257261:d=2025100600:NCONCD:49 hybrid level:16 hour fcst: +968:611257449:d=2025100600:NCCICE:49 hybrid level:16 hour fcst: +969:611257637:d=2025100600:SPNCR:49 hybrid level:16 hour fcst: +970:611257825:d=2025100600:PMTF:49 hybrid level:16 hour fcst: +971:611746656:d=2025100600:PMTC:49 hybrid level:16 hour fcst: +972:612365154:d=2025100600:FRACCC:49 hybrid level:16 hour fcst: +973:612365342:d=2025100600:HGT:49 hybrid level:16 hour fcst: +974:613650774:d=2025100600:TMP:49 hybrid level:16 hour fcst: +975:614311753:d=2025100600:SPFH:49 hybrid level:16 hour fcst: +976:615923227:d=2025100600:UGRD:49 hybrid level:16 hour fcst: +977:616698729:d=2025100600:VGRD:49 hybrid level:16 hour fcst: +978:617411418:d=2025100600:VVEL:49 hybrid level:16 hour fcst: +979:617617183:d=2025100600:TKE:49 hybrid level:16 hour fcst: +980:617617371:d=2025100600:MASSDEN:49 hybrid level:16 hour fcst: +981:619365828:d=2025100600:PRES:50 hybrid level:16 hour fcst: +982:623176298:d=2025100600:CLMR:50 hybrid level:16 hour fcst: +983:623176486:d=2025100600:CIMIXR:50 hybrid level:16 hour fcst: +984:623176674:d=2025100600:RWMR:50 hybrid level:16 hour fcst: +985:623176862:d=2025100600:SNMR:50 hybrid level:16 hour fcst: +986:623177050:d=2025100600:GRLE:50 hybrid level:16 hour fcst: +987:623177238:d=2025100600:NCONCD:50 hybrid level:16 hour fcst: +988:623177426:d=2025100600:NCCICE:50 hybrid level:16 hour fcst: +989:623177614:d=2025100600:SPNCR:50 hybrid level:16 hour fcst: +990:623177802:d=2025100600:PMTF:50 hybrid level:16 hour fcst: +991:623688737:d=2025100600:PMTC:50 hybrid level:16 hour fcst: +992:624314935:d=2025100600:FRACCC:50 hybrid level:16 hour fcst: +993:624315123:d=2025100600:HGT:50 hybrid level:16 hour fcst: +994:625578363:d=2025100600:TMP:50 hybrid level:16 hour fcst: +995:626253691:d=2025100600:SPFH:50 hybrid level:16 hour fcst: +996:627167649:d=2025100600:UGRD:50 hybrid level:16 hour fcst: +997:627803696:d=2025100600:VGRD:50 hybrid level:16 hour fcst: +998:628415609:d=2025100600:VVEL:50 hybrid level:16 hour fcst: +999:628642835:d=2025100600:TKE:50 hybrid level:16 hour fcst: +1000:628643023:d=2025100600:MASSDEN:50 hybrid level:16 hour fcst: +1001:630742305:d=2025100600:SOILW:0-0 m below ground:16 hour fcst: +1002:631983681:d=2025100600:SOILW:0.01-0.01 m below ground:16 hour fcst: +1003:633245010:d=2025100600:REFC:entire atmosphere:16 hour fcst: +1004:633774884:d=2025100600:RETOP:cloud top:16 hour fcst: +1005:633984358:d=2025100600:VIL:entire atmosphere:16 hour fcst: +1006:634294000:d=2025100600:VIS:surface:16 hour fcst: +1007:635792306:d=2025100600:REFD:1000 m above ground:16 hour fcst: +1008:636139549:d=2025100600:REFD:4000 m above ground:16 hour fcst: +1009:636323240:d=2025100600:REFD:263 K level:16 hour fcst: +1010:636504677:d=2025100600:GUST:surface:16 hour fcst: +1011:637690259:d=2025100600:MAXUVV:100-1000 mb above ground:15-16 hour max fcst: +1012:638512148:d=2025100600:MAXDVV:100-1000 mb above ground:15-16 hour max fcst: +1013:639270772:d=2025100600:DZDT:0.5-0.8 sigma layer:15-16 hour ave fcst: +1014:639876972:d=2025100600:MSLMA:mean sea level:16 hour fcst: +1015:640488257:d=2025100600:HGT:1000 mb:16 hour fcst: +1016:641194146:d=2025100600:MAXREF:1000 m above ground:15-16 hour max fcst: +1017:641510511:d=2025100600:REFD:263 K level:15-16 hour max fcst: +1018:641737366:d=2025100600:MXUPHL:5000-2000 m above ground:15-16 hour max fcst: +1019:641794809:d=2025100600:MNUPHL:5000-2000 m above ground:15-16 hour min fcst: +1020:641850333:d=2025100600:MXUPHL:2000-0 m above ground:15-16 hour max fcst: +1021:641898160:d=2025100600:MNUPHL:2000-0 m above ground:15-16 hour min fcst: +1022:641928000:d=2025100600:MXUPHL:3000-0 m above ground:15-16 hour max fcst: +1023:641983268:d=2025100600:MNUPHL:3000-0 m above ground:15-16 hour min fcst: +1024:642022379:d=2025100600:RELV:2000-0 m above ground:15-16 hour max fcst: +1025:644639823:d=2025100600:RELV:1000-0 m above ground:15-16 hour max fcst: +1026:647546850:d=2025100600:HAIL:entire atmosphere:15-16 hour max fcst: +1027:647781144:d=2025100600:HAIL:0.1 sigma level:15-16 hour max fcst: +1028:647782320:d=2025100600:TCOLG:entire atmosphere (considered as a single layer):15-16 hour max fcst: +1029:647836600:d=2025100600:LTNG:entire atmosphere:16 hour fcst: +1030:647869156:d=2025100600:UGRD:80 m above ground:16 hour fcst: +1031:649076871:d=2025100600:VGRD:80 m above ground:16 hour fcst: +1032:650212220:d=2025100600:PRES:surface:16 hour fcst: +1033:651723436:d=2025100600:HGT:surface:16 hour fcst: +1034:653877131:d=2025100600:TMP:surface:16 hour fcst: +1035:655233110:d=2025100600:ASNOW:surface:0-16 hour acc fcst: +1036:655277062:d=2025100600:MSTAV:0 m underground:16 hour fcst: +1037:656750350:d=2025100600:CNWAT:surface:16 hour fcst: +1038:656840459:d=2025100600:WEASD:surface:16 hour fcst: +1039:656858845:d=2025100600:SNOWC:surface:16 hour fcst: +1040:656872567:d=2025100600:SNOD:surface:16 hour fcst: +1041:656887952:d=2025100600:TMP:2 m above ground:16 hour fcst: +1042:658101664:d=2025100600:POT:2 m above ground:16 hour fcst: +1043:659241522:d=2025100600:SPFH:2 m above ground:16 hour fcst: +1044:660855796:d=2025100600:DPT:2 m above ground:16 hour fcst: +1045:662077418:d=2025100600:RH:2 m above ground:16 hour fcst: +1046:663722586:d=2025100600:MASSDEN:8 m above ground:16 hour fcst: +1047:664476642:d=2025100600:UGRD:10 m above ground:16 hour fcst: +1048:666858257:d=2025100600:VGRD:10 m above ground:16 hour fcst: +1049:669239872:d=2025100600:WIND:10 m above ground:15-16 hour max fcst: +1050:670466135:d=2025100600:MAXUW:10 m above ground:15-16 hour max fcst: +1051:671830867:d=2025100600:MAXVW:10 m above ground:15-16 hour max fcst: +1052:673140904:d=2025100600:CPOFP:surface:16 hour fcst: +1053:673270526:d=2025100600:PRATE:surface:16 hour fcst: +1054:673342198:d=2025100600:APCP:surface:0-16 hour acc fcst: +1055:674068507:d=2025100600:WEASD:surface:0-16 hour acc fcst: +1056:674105925:d=2025100600:FROZR:surface:0-16 hour acc fcst: +1057:674152239:d=2025100600:FRZR:surface:0-16 hour acc fcst: +1058:674170229:d=2025100600:SSRUN:surface:15-16 hour acc fcst: +1059:674188531:d=2025100600:BGRUN:surface:15-16 hour acc fcst: +1060:674188915:d=2025100600:APCP:surface:15-16 hour acc fcst: +1061:674431581:d=2025100600:WEASD:surface:15-16 hour acc fcst: +1062:674435094:d=2025100600:FROZR:surface:15-16 hour acc fcst: +1063:674437659:d=2025100600:CSNOW:surface:16 hour fcst: +1064:674438296:d=2025100600:CICEP:surface:16 hour fcst: +1065:674438537:d=2025100600:CFRZR:surface:16 hour fcst: +1066:674438981:d=2025100600:CRAIN:surface:16 hour fcst: +1067:674518622:d=2025100600:SFCR:surface:16 hour fcst: +1068:676436550:d=2025100600:FRICV:surface:16 hour fcst: +1069:677575086:d=2025100600:SHTFL:surface:16 hour fcst: +1070:679076095:d=2025100600:LHTFL:surface:16 hour fcst: +1071:680683350:d=2025100600:GFLUX:surface:16 hour fcst: +1072:681332960:d=2025100600:VGTYP:surface:16 hour fcst: +1073:682114139:d=2025100600:LFTX:500-1000 mb:16 hour fcst: +1074:683078298:d=2025100600:CAPE:surface:16 hour fcst: +1075:683680102:d=2025100600:CIN:surface:16 hour fcst: +1076:684368362:d=2025100600:PWAT:entire atmosphere (considered as a single layer):16 hour fcst: +1077:685411078:d=2025100600:AOTK:entire atmosphere (considered as a single layer):16 hour fcst: +1078:686533917:d=2025100600:COLMD:entire atmosphere (considered as a single layer):16 hour fcst: +1079:687438958:d=2025100600:LCDC:low cloud layer:16 hour fcst: +1080:688407266:d=2025100600:MCDC:middle cloud layer:16 hour fcst: +1081:688865146:d=2025100600:HCDC:high cloud layer:16 hour fcst: +1082:689242493:d=2025100600:TCDC:entire atmosphere:16 hour fcst: +1083:690188975:d=2025100600:HGT:cloud ceiling:16 hour fcst: +1084:691571070:d=2025100600:HGT:cloud base:16 hour fcst: +1085:694048623:d=2025100600:PRES:cloud base:16 hour fcst: +1086:695265646:d=2025100600:PRES:cloud top:16 hour fcst: +1087:695967301:d=2025100600:HGT:cloud top:16 hour fcst: +1088:697190089:d=2025100600:ULWRF:top of atmosphere:16 hour fcst: +1089:699199908:d=2025100600:DSWRF:surface:16 hour fcst: +1090:701852022:d=2025100600:DLWRF:surface:16 hour fcst: +1091:704032629:d=2025100600:USWRF:surface:16 hour fcst: +1092:706146169:d=2025100600:ULWRF:surface:16 hour fcst: +1093:707830726:d=2025100600:CFNSF:surface:16 hour fcst: +1094:707835605:d=2025100600:VBDSF:surface:16 hour fcst: +1095:709880154:d=2025100600:VDDSF:surface:16 hour fcst: +1096:712625728:d=2025100600:USWRF:top of atmosphere:16 hour fcst: +1097:715265027:d=2025100600:HLCY:3000-0 m above ground:16 hour fcst: +1098:716461643:d=2025100600:HLCY:1000-0 m above ground:16 hour fcst: +1099:718387051:d=2025100600:USTM:0-6000 m above ground:16 hour fcst: +1100:719496963:d=2025100600:VSTM:0-6000 m above ground:16 hour fcst: +1101:720562317:d=2025100600:VUCSH:0-1000 m above ground:16 hour fcst: +1102:722943932:d=2025100600:VVCSH:0-1000 m above ground:16 hour fcst: +1103:725325547:d=2025100600:VUCSH:0-6000 m above ground:16 hour fcst: +1104:727707162:d=2025100600:VVCSH:0-6000 m above ground:16 hour fcst: +1105:730326919:d=2025100600:HGT:0C isotherm:16 hour fcst: +1106:732618038:d=2025100600:RH:0C isotherm:16 hour fcst: +1107:733380863:d=2025100600:PRES:0C isotherm:16 hour fcst: +1108:734125282:d=2025100600:HGT:highest tropospheric freezing level:16 hour fcst: +1109:734896800:d=2025100600:RH:highest tropospheric freezing level:16 hour fcst: +1110:735646333:d=2025100600:PRES:highest tropospheric freezing level:16 hour fcst: +1111:736387892:d=2025100600:HGT:263 K level:16 hour fcst: +1112:737081661:d=2025100600:HGT:253 K level:16 hour fcst: +1113:737703986:d=2025100600:4LFTX:180-0 mb above ground:16 hour fcst: +1114:738674405:d=2025100600:CAPE:180-0 mb above ground:16 hour fcst: +1115:739274969:d=2025100600:CIN:180-0 mb above ground:16 hour fcst: +1116:739982133:d=2025100600:HPBL:surface:16 hour fcst: +1117:742993897:d=2025100600:HGT:level of adiabatic condensation from sfc:16 hour fcst: +1118:745978329:d=2025100600:CAPE:90-0 mb above ground:16 hour fcst: +1119:746474940:d=2025100600:CIN:90-0 mb above ground:16 hour fcst: +1120:747121176:d=2025100600:CAPE:255-0 mb above ground:16 hour fcst: +1121:747741125:d=2025100600:CIN:255-0 mb above ground:16 hour fcst: +1122:748441927:d=2025100600:HGT:equilibrium level:16 hour fcst: +1123:750765392:d=2025100600:PLPL:255-0 mb above ground:16 hour fcst: +1124:751809005:d=2025100600:CAPE:0-3000 m above ground:16 hour fcst: +1125:752837041:d=2025100600:HGT:level of free convection:16 hour fcst: +1126:755743823:d=2025100600:EFHL:surface:16 hour fcst: +1127:756814472:d=2025100600:CANGLE:0-500 m above ground:16 hour fcst: +1128:759105238:d=2025100600:LAYTH:261 K level - 256 K level:16 hour fcst: +1129:760437999:d=2025100600:ESP:0-3000 m above ground:16 hour fcst: +1130:761432482:d=2025100600:RHPW:entire atmosphere:16 hour fcst: +1131:762674516:d=2025100600:LAND:surface:16 hour fcst: +1132:762724992:d=2025100600:ICEC:surface:16 hour fcst: +1133:762725225:d=2025100600:SBT123:top of atmosphere:16 hour fcst: +1134:764382093:d=2025100600:SBT124:top of atmosphere:16 hour fcst: +1135:766169273:d=2025100600:SBT113:top of atmosphere:16 hour fcst: +1136:767706923:d=2025100600:SBT114:top of atmosphere:16 hour fcst: +1137:769413571:d=2025100600:HGT:50 mb:16 hour fcst: +1138:770117417:d=2025100600:TMP:50 mb:16 hour fcst: +1139:770663364:d=2025100600:RH:50 mb:16 hour fcst: +1140:770886421:d=2025100600:DPT:50 mb:16 hour fcst: +1141:770886609:d=2025100600:SPFH:50 mb:16 hour fcst: +1142:771801650:d=2025100600:VVEL:50 mb:16 hour fcst: +1143:772024945:d=2025100600:UGRD:50 mb:16 hour fcst: +1144:772595906:d=2025100600:VGRD:50 mb:16 hour fcst: +1145:773165770:d=2025100600:ABSV:50 mb:16 hour fcst: +1146:773889954:d=2025100600:CLMR:50 mb:16 hour fcst: +1147:773890142:d=2025100600:CIMIXR:50 mb:16 hour fcst: +1148:773890330:d=2025100600:RWMR:50 mb:16 hour fcst: +1149:773890518:d=2025100600:SNMR:50 mb:16 hour fcst: +1150:773891556:d=2025100600:GRLE:50 mb:16 hour fcst: +1151:773891744:d=2025100600:HGT:75 mb:16 hour fcst: +1152:774593111:d=2025100600:TMP:75 mb:16 hour fcst: +1153:775157691:d=2025100600:RH:75 mb:16 hour fcst: +1154:775645706:d=2025100600:DPT:75 mb:16 hour fcst: +1155:775646116:d=2025100600:SPFH:75 mb:16 hour fcst: +1156:776264759:d=2025100600:VVEL:75 mb:16 hour fcst: +1157:776580449:d=2025100600:UGRD:75 mb:16 hour fcst: +1158:777157853:d=2025100600:VGRD:75 mb:16 hour fcst: +1159:777730032:d=2025100600:ABSV:75 mb:16 hour fcst: +1160:778246433:d=2025100600:CLMR:75 mb:16 hour fcst: +1161:778246621:d=2025100600:CIMIXR:75 mb:16 hour fcst: +1162:778247319:d=2025100600:RWMR:75 mb:16 hour fcst: +1163:778247507:d=2025100600:SNMR:75 mb:16 hour fcst: +1164:778251230:d=2025100600:GRLE:75 mb:16 hour fcst: +1165:778251809:d=2025100600:HGT:100 mb:16 hour fcst: +1166:778961168:d=2025100600:TMP:100 mb:16 hour fcst: +1167:779514641:d=2025100600:RH:100 mb:16 hour fcst: +1168:780055101:d=2025100600:DPT:100 mb:16 hour fcst: +1169:780062507:d=2025100600:SPFH:100 mb:16 hour fcst: +1170:780703478:d=2025100600:VVEL:100 mb:16 hour fcst: +1171:781109854:d=2025100600:UGRD:100 mb:16 hour fcst: +1172:781692270:d=2025100600:VGRD:100 mb:16 hour fcst: +1173:782261475:d=2025100600:ABSV:100 mb:16 hour fcst: +1174:782922522:d=2025100600:CLMR:100 mb:16 hour fcst: +1175:782922710:d=2025100600:CIMIXR:100 mb:16 hour fcst: +1176:782924915:d=2025100600:RWMR:100 mb:16 hour fcst: +1177:782925103:d=2025100600:SNMR:100 mb:16 hour fcst: +1178:782931624:d=2025100600:GRLE:100 mb:16 hour fcst: +1179:782935076:d=2025100600:HGT:125 mb:16 hour fcst: +1180:783672207:d=2025100600:TMP:125 mb:16 hour fcst: +1181:784232112:d=2025100600:RH:125 mb:16 hour fcst: +1182:784792460:d=2025100600:DPT:125 mb:16 hour fcst: +1183:784817152:d=2025100600:SPFH:125 mb:16 hour fcst: +1184:785562763:d=2025100600:VVEL:125 mb:16 hour fcst: +1185:786060581:d=2025100600:UGRD:125 mb:16 hour fcst: +1186:786651350:d=2025100600:VGRD:125 mb:16 hour fcst: +1187:787218158:d=2025100600:ABSV:125 mb:16 hour fcst: +1188:787711110:d=2025100600:CLMR:125 mb:16 hour fcst: +1189:787711298:d=2025100600:CIMIXR:125 mb:16 hour fcst: +1190:787714122:d=2025100600:RWMR:125 mb:16 hour fcst: +1191:787714310:d=2025100600:SNMR:125 mb:16 hour fcst: +1192:787715940:d=2025100600:GRLE:125 mb:16 hour fcst: +1193:787716639:d=2025100600:HGT:150 mb:16 hour fcst: +1194:788454176:d=2025100600:TMP:150 mb:16 hour fcst: +1195:789012294:d=2025100600:RH:150 mb:16 hour fcst: +1196:789637836:d=2025100600:DPT:150 mb:16 hour fcst: +1197:789934807:d=2025100600:SPFH:150 mb:16 hour fcst: +1198:790949774:d=2025100600:VVEL:150 mb:16 hour fcst: +1199:791475565:d=2025100600:UGRD:150 mb:16 hour fcst: +1200:792060055:d=2025100600:VGRD:150 mb:16 hour fcst: +1201:792633025:d=2025100600:ABSV:150 mb:16 hour fcst: +1202:793121973:d=2025100600:CLMR:150 mb:16 hour fcst: +1203:793122161:d=2025100600:CIMIXR:150 mb:16 hour fcst: +1204:793152423:d=2025100600:RWMR:150 mb:16 hour fcst: +1205:793152611:d=2025100600:SNMR:150 mb:16 hour fcst: +1206:793164374:d=2025100600:GRLE:150 mb:16 hour fcst: +1207:793166371:d=2025100600:HGT:175 mb:16 hour fcst: +1208:793916149:d=2025100600:TMP:175 mb:16 hour fcst: +1209:794477803:d=2025100600:RH:175 mb:16 hour fcst: +1210:795139701:d=2025100600:DPT:175 mb:16 hour fcst: +1211:795703455:d=2025100600:SPFH:175 mb:16 hour fcst: +1212:796975376:d=2025100600:VVEL:175 mb:16 hour fcst: +1213:797524737:d=2025100600:UGRD:175 mb:16 hour fcst: +1214:798109516:d=2025100600:VGRD:175 mb:16 hour fcst: +1215:798678326:d=2025100600:ABSV:175 mb:16 hour fcst: +1216:799171133:d=2025100600:CLMR:175 mb:16 hour fcst: +1217:799171321:d=2025100600:CIMIXR:175 mb:16 hour fcst: +1218:799202613:d=2025100600:RWMR:175 mb:16 hour fcst: +1219:799202801:d=2025100600:SNMR:175 mb:16 hour fcst: +1220:799228477:d=2025100600:GRLE:175 mb:16 hour fcst: +1221:799232663:d=2025100600:HGT:200 mb:16 hour fcst: +1222:799973916:d=2025100600:TMP:200 mb:16 hour fcst: +1223:800537658:d=2025100600:RH:200 mb:16 hour fcst: +1224:801226291:d=2025100600:DPT:200 mb:16 hour fcst: +1225:801897047:d=2025100600:SPFH:200 mb:16 hour fcst: +1226:802757586:d=2025100600:VVEL:200 mb:16 hour fcst: +1227:803326890:d=2025100600:UGRD:200 mb:16 hour fcst: +1228:803912708:d=2025100600:VGRD:200 mb:16 hour fcst: +1229:804493061:d=2025100600:ABSV:200 mb:16 hour fcst: +1230:804997283:d=2025100600:CLMR:200 mb:16 hour fcst: +1231:804997471:d=2025100600:CIMIXR:200 mb:16 hour fcst: +1232:805043437:d=2025100600:RWMR:200 mb:16 hour fcst: +1233:805043681:d=2025100600:SNMR:200 mb:16 hour fcst: +1234:805083532:d=2025100600:GRLE:200 mb:16 hour fcst: +1235:805089699:d=2025100600:HGT:225 mb:16 hour fcst: +1236:805823608:d=2025100600:TMP:225 mb:16 hour fcst: +1237:806386515:d=2025100600:RH:225 mb:16 hour fcst: +1238:807107807:d=2025100600:DPT:225 mb:16 hour fcst: +1239:807909344:d=2025100600:SPFH:225 mb:16 hour fcst: +1240:808961616:d=2025100600:VVEL:225 mb:16 hour fcst: +1241:809545692:d=2025100600:UGRD:225 mb:16 hour fcst: +1242:810144022:d=2025100600:VGRD:225 mb:16 hour fcst: +1243:810729321:d=2025100600:ABSV:225 mb:16 hour fcst: +1244:811255731:d=2025100600:CLMR:225 mb:16 hour fcst: +1245:811255974:d=2025100600:CIMIXR:225 mb:16 hour fcst: +1246:811313403:d=2025100600:RWMR:225 mb:16 hour fcst: +1247:811313667:d=2025100600:SNMR:225 mb:16 hour fcst: +1248:811368267:d=2025100600:GRLE:225 mb:16 hour fcst: +1249:811376118:d=2025100600:HGT:250 mb:16 hour fcst: +1250:812102545:d=2025100600:TMP:250 mb:16 hour fcst: +1251:812656596:d=2025100600:RH:250 mb:16 hour fcst: +1252:813397497:d=2025100600:DPT:250 mb:16 hour fcst: +1253:814315449:d=2025100600:SPFH:250 mb:16 hour fcst: +1254:815539833:d=2025100600:VVEL:250 mb:16 hour fcst: +1255:816133635:d=2025100600:UGRD:250 mb:16 hour fcst: +1256:816733869:d=2025100600:VGRD:250 mb:16 hour fcst: +1257:817322137:d=2025100600:ABSV:250 mb:16 hour fcst: +1258:817841277:d=2025100600:CLMR:250 mb:16 hour fcst: +1259:817841530:d=2025100600:CIMIXR:250 mb:16 hour fcst: +1260:817894590:d=2025100600:RWMR:250 mb:16 hour fcst: +1261:817894873:d=2025100600:SNMR:250 mb:16 hour fcst: +1262:817962889:d=2025100600:GRLE:250 mb:16 hour fcst: +1263:817972515:d=2025100600:HGT:275 mb:16 hour fcst: +1264:818704505:d=2025100600:TMP:275 mb:16 hour fcst: +1265:819249135:d=2025100600:RH:275 mb:16 hour fcst: +1266:819984406:d=2025100600:DPT:275 mb:16 hour fcst: +1267:820932456:d=2025100600:SPFH:275 mb:16 hour fcst: +1268:822290698:d=2025100600:VVEL:275 mb:16 hour fcst: +1269:822890466:d=2025100600:UGRD:275 mb:16 hour fcst: +1270:823488640:d=2025100600:VGRD:275 mb:16 hour fcst: +1271:824076418:d=2025100600:ABSV:275 mb:16 hour fcst: +1272:824596507:d=2025100600:CLMR:275 mb:16 hour fcst: +1273:824597792:d=2025100600:CIMIXR:275 mb:16 hour fcst: +1274:824605176:d=2025100600:RWMR:275 mb:16 hour fcst: +1275:824606155:d=2025100600:SNMR:275 mb:16 hour fcst: +1276:824685208:d=2025100600:GRLE:275 mb:16 hour fcst: +1277:824696476:d=2025100600:HGT:300 mb:16 hour fcst: +1278:825420532:d=2025100600:TMP:300 mb:16 hour fcst: +1279:825963243:d=2025100600:RH:300 mb:16 hour fcst: +1280:826701621:d=2025100600:DPT:300 mb:16 hour fcst: +1281:827701354:d=2025100600:SPFH:300 mb:16 hour fcst: +1282:828579616:d=2025100600:VVEL:300 mb:16 hour fcst: +1283:829182740:d=2025100600:UGRD:300 mb:16 hour fcst: +1284:829774173:d=2025100600:VGRD:300 mb:16 hour fcst: +1285:830361811:d=2025100600:ABSV:300 mb:16 hour fcst: +1286:830882238:d=2025100600:CLMR:300 mb:16 hour fcst: +1287:830883982:d=2025100600:CIMIXR:300 mb:16 hour fcst: +1288:830919897:d=2025100600:RWMR:300 mb:16 hour fcst: +1289:830922399:d=2025100600:SNMR:300 mb:16 hour fcst: +1290:831011926:d=2025100600:GRLE:300 mb:16 hour fcst: +1291:831024515:d=2025100600:HGT:325 mb:16 hour fcst: +1292:831742917:d=2025100600:TMP:325 mb:16 hour fcst: +1293:832284018:d=2025100600:RH:325 mb:16 hour fcst: +1294:833027105:d=2025100600:DPT:325 mb:16 hour fcst: +1295:834048123:d=2025100600:SPFH:325 mb:16 hour fcst: +1296:835019175:d=2025100600:VVEL:325 mb:16 hour fcst: +1297:835625713:d=2025100600:UGRD:325 mb:16 hour fcst: +1298:836218126:d=2025100600:VGRD:325 mb:16 hour fcst: +1299:836807320:d=2025100600:ABSV:325 mb:16 hour fcst: +1300:837330521:d=2025100600:CLMR:325 mb:16 hour fcst: +1301:837332981:d=2025100600:CIMIXR:325 mb:16 hour fcst: +1302:837357463:d=2025100600:RWMR:325 mb:16 hour fcst: +1303:837359841:d=2025100600:SNMR:325 mb:16 hour fcst: +1304:837457881:d=2025100600:GRLE:325 mb:16 hour fcst: +1305:837471943:d=2025100600:HGT:350 mb:16 hour fcst: +1306:838184940:d=2025100600:TMP:350 mb:16 hour fcst: +1307:838725668:d=2025100600:RH:350 mb:16 hour fcst: +1308:839469986:d=2025100600:DPT:350 mb:16 hour fcst: +1309:840498033:d=2025100600:SPFH:350 mb:16 hour fcst: +1310:841543899:d=2025100600:VVEL:350 mb:16 hour fcst: +1311:842153105:d=2025100600:UGRD:350 mb:16 hour fcst: +1312:842744968:d=2025100600:VGRD:350 mb:16 hour fcst: +1313:843333611:d=2025100600:ABSV:350 mb:16 hour fcst: +1314:843867926:d=2025100600:CLMR:350 mb:16 hour fcst: +1315:843871513:d=2025100600:CIMIXR:350 mb:16 hour fcst: +1316:843928388:d=2025100600:RWMR:350 mb:16 hour fcst: +1317:843932237:d=2025100600:SNMR:350 mb:16 hour fcst: +1318:844033152:d=2025100600:GRLE:350 mb:16 hour fcst: +1319:844048692:d=2025100600:HGT:375 mb:16 hour fcst: +1320:844758503:d=2025100600:TMP:375 mb:16 hour fcst: +1321:845301162:d=2025100600:RH:375 mb:16 hour fcst: +1322:846041490:d=2025100600:DPT:375 mb:16 hour fcst: +1323:847058770:d=2025100600:SPFH:375 mb:16 hour fcst: +1324:848180377:d=2025100600:VVEL:375 mb:16 hour fcst: +1325:848781477:d=2025100600:UGRD:375 mb:16 hour fcst: +1326:849370278:d=2025100600:VGRD:375 mb:16 hour fcst: +1327:849955966:d=2025100600:ABSV:375 mb:16 hour fcst: +1328:850478252:d=2025100600:CLMR:375 mb:16 hour fcst: +1329:850483268:d=2025100600:CIMIXR:375 mb:16 hour fcst: +1330:850530719:d=2025100600:RWMR:375 mb:16 hour fcst: +1331:850535863:d=2025100600:SNMR:375 mb:16 hour fcst: +1332:850635912:d=2025100600:GRLE:375 mb:16 hour fcst: +1333:850653344:d=2025100600:HGT:400 mb:16 hour fcst: +1334:851363568:d=2025100600:TMP:400 mb:16 hour fcst: +1335:851897247:d=2025100600:RH:400 mb:16 hour fcst: +1336:852643705:d=2025100600:DPT:400 mb:16 hour fcst: +1337:853652027:d=2025100600:SPFH:400 mb:16 hour fcst: +1338:854863680:d=2025100600:VVEL:400 mb:16 hour fcst: +1339:855464411:d=2025100600:UGRD:400 mb:16 hour fcst: +1340:856051013:d=2025100600:VGRD:400 mb:16 hour fcst: +1341:856635840:d=2025100600:ABSV:400 mb:16 hour fcst: +1342:857155114:d=2025100600:CLMR:400 mb:16 hour fcst: +1343:857164118:d=2025100600:CIMIXR:400 mb:16 hour fcst: +1344:857205828:d=2025100600:RWMR:400 mb:16 hour fcst: +1345:857212559:d=2025100600:SNMR:400 mb:16 hour fcst: +1346:857312655:d=2025100600:GRLE:400 mb:16 hour fcst: +1347:857332547:d=2025100600:HGT:425 mb:16 hour fcst: +1348:858037995:d=2025100600:TMP:425 mb:16 hour fcst: +1349:858573022:d=2025100600:RH:425 mb:16 hour fcst: +1350:859324643:d=2025100600:DPT:425 mb:16 hour fcst: +1351:860341344:d=2025100600:SPFH:425 mb:16 hour fcst: +1352:861629177:d=2025100600:VVEL:425 mb:16 hour fcst: +1353:862229724:d=2025100600:UGRD:425 mb:16 hour fcst: +1354:862816701:d=2025100600:VGRD:425 mb:16 hour fcst: +1355:863401684:d=2025100600:ABSV:425 mb:16 hour fcst: +1356:863919880:d=2025100600:CLMR:425 mb:16 hour fcst: +1357:863939205:d=2025100600:CIMIXR:425 mb:16 hour fcst: +1358:863979255:d=2025100600:RWMR:425 mb:16 hour fcst: +1359:863988142:d=2025100600:SNMR:425 mb:16 hour fcst: +1360:864094098:d=2025100600:GRLE:425 mb:16 hour fcst: +1361:864116969:d=2025100600:HGT:450 mb:16 hour fcst: +1362:864817509:d=2025100600:TMP:450 mb:16 hour fcst: +1363:865361738:d=2025100600:RH:450 mb:16 hour fcst: +1364:866126043:d=2025100600:DPT:450 mb:16 hour fcst: +1365:867181272:d=2025100600:SPFH:450 mb:16 hour fcst: +1366:868560623:d=2025100600:VVEL:450 mb:16 hour fcst: +1367:869162118:d=2025100600:UGRD:450 mb:16 hour fcst: +1368:869753043:d=2025100600:VGRD:450 mb:16 hour fcst: +1369:870334107:d=2025100600:ABSV:450 mb:16 hour fcst: +1370:870855570:d=2025100600:CLMR:450 mb:16 hour fcst: +1371:870885628:d=2025100600:CIMIXR:450 mb:16 hour fcst: +1372:870924106:d=2025100600:RWMR:450 mb:16 hour fcst: +1373:870934847:d=2025100600:SNMR:450 mb:16 hour fcst: +1374:871044875:d=2025100600:GRLE:450 mb:16 hour fcst: +1375:871070601:d=2025100600:HGT:475 mb:16 hour fcst: +1376:871768756:d=2025100600:TMP:475 mb:16 hour fcst: +1377:872304619:d=2025100600:RH:475 mb:16 hour fcst: +1378:873069745:d=2025100600:DPT:475 mb:16 hour fcst: +1379:874137218:d=2025100600:SPFH:475 mb:16 hour fcst: +1380:875596192:d=2025100600:VVEL:475 mb:16 hour fcst: +1381:876195243:d=2025100600:UGRD:475 mb:16 hour fcst: +1382:876783412:d=2025100600:VGRD:475 mb:16 hour fcst: +1383:877369486:d=2025100600:ABSV:475 mb:16 hour fcst: +1384:877885658:d=2025100600:CLMR:475 mb:16 hour fcst: +1385:877925721:d=2025100600:CIMIXR:475 mb:16 hour fcst: +1386:877964921:d=2025100600:RWMR:475 mb:16 hour fcst: +1387:877979536:d=2025100600:SNMR:475 mb:16 hour fcst: +1388:878092307:d=2025100600:GRLE:475 mb:16 hour fcst: +1389:878123011:d=2025100600:HGT:500 mb:16 hour fcst: +1390:878821874:d=2025100600:TMP:500 mb:16 hour fcst: +1391:879358245:d=2025100600:RH:500 mb:16 hour fcst: +1392:880122503:d=2025100600:DPT:500 mb:16 hour fcst: +1393:881180052:d=2025100600:SPFH:500 mb:16 hour fcst: +1394:882692420:d=2025100600:VVEL:500 mb:16 hour fcst: +1395:883290792:d=2025100600:UGRD:500 mb:16 hour fcst: +1396:883876727:d=2025100600:VGRD:500 mb:16 hour fcst: +1397:884461225:d=2025100600:ABSV:500 mb:16 hour fcst: +1398:884972506:d=2025100600:CLMR:500 mb:16 hour fcst: +1399:885026091:d=2025100600:CIMIXR:500 mb:16 hour fcst: +1400:885064950:d=2025100600:RWMR:500 mb:16 hour fcst: +1401:885083767:d=2025100600:SNMR:500 mb:16 hour fcst: +1402:885193817:d=2025100600:GRLE:500 mb:16 hour fcst: +1403:885230009:d=2025100600:HGT:525 mb:16 hour fcst: +1404:885923010:d=2025100600:TMP:525 mb:16 hour fcst: +1405:886460603:d=2025100600:RH:525 mb:16 hour fcst: +1406:887242498:d=2025100600:DPT:525 mb:16 hour fcst: +1407:888347346:d=2025100600:SPFH:525 mb:16 hour fcst: +1408:889949726:d=2025100600:VVEL:525 mb:16 hour fcst: +1409:890549253:d=2025100600:UGRD:525 mb:16 hour fcst: +1410:891138395:d=2025100600:VGRD:525 mb:16 hour fcst: +1411:891716657:d=2025100600:ABSV:525 mb:16 hour fcst: +1412:892234186:d=2025100600:CLMR:525 mb:16 hour fcst: +1413:892293112:d=2025100600:CIMIXR:525 mb:16 hour fcst: +1414:892331068:d=2025100600:RWMR:525 mb:16 hour fcst: +1415:892353351:d=2025100600:SNMR:525 mb:16 hour fcst: +1416:892459746:d=2025100600:GRLE:525 mb:16 hour fcst: +1417:892500143:d=2025100600:HGT:550 mb:16 hour fcst: +1418:893196392:d=2025100600:TMP:550 mb:16 hour fcst: +1419:893731250:d=2025100600:RH:550 mb:16 hour fcst: +1420:894506147:d=2025100600:DPT:550 mb:16 hour fcst: +1421:895572573:d=2025100600:SPFH:550 mb:16 hour fcst: +1422:897211534:d=2025100600:VVEL:550 mb:16 hour fcst: +1423:897810227:d=2025100600:UGRD:550 mb:16 hour fcst: +1424:898396048:d=2025100600:VGRD:550 mb:16 hour fcst: +1425:898971855:d=2025100600:ABSV:550 mb:16 hour fcst: +1426:899484295:d=2025100600:CLMR:550 mb:16 hour fcst: +1427:899553758:d=2025100600:CIMIXR:550 mb:16 hour fcst: +1428:899588324:d=2025100600:RWMR:550 mb:16 hour fcst: +1429:899617587:d=2025100600:SNMR:550 mb:16 hour fcst: +1430:899719384:d=2025100600:GRLE:550 mb:16 hour fcst: +1431:899764049:d=2025100600:HGT:575 mb:16 hour fcst: +1432:900456055:d=2025100600:TMP:575 mb:16 hour fcst: +1433:900992543:d=2025100600:RH:575 mb:16 hour fcst: +1434:901781127:d=2025100600:DPT:575 mb:16 hour fcst: +1435:902870091:d=2025100600:SPFH:575 mb:16 hour fcst: +1436:904582308:d=2025100600:VVEL:575 mb:16 hour fcst: +1437:905184647:d=2025100600:UGRD:575 mb:16 hour fcst: +1438:905773988:d=2025100600:VGRD:575 mb:16 hour fcst: +1439:906352701:d=2025100600:ABSV:575 mb:16 hour fcst: +1440:906873149:d=2025100600:CLMR:575 mb:16 hour fcst: +1441:906954839:d=2025100600:CIMIXR:575 mb:16 hour fcst: +1442:906981083:d=2025100600:RWMR:575 mb:16 hour fcst: +1443:906995827:d=2025100600:SNMR:575 mb:16 hour fcst: +1444:907093721:d=2025100600:GRLE:575 mb:16 hour fcst: +1445:907140214:d=2025100600:HGT:600 mb:16 hour fcst: +1446:907832429:d=2025100600:TMP:600 mb:16 hour fcst: +1447:908366444:d=2025100600:RH:600 mb:16 hour fcst: +1448:909148753:d=2025100600:DPT:600 mb:16 hour fcst: +1449:910229378:d=2025100600:SPFH:600 mb:16 hour fcst: +1450:911977090:d=2025100600:VVEL:600 mb:16 hour fcst: +1451:912579378:d=2025100600:UGRD:600 mb:16 hour fcst: +1452:913166597:d=2025100600:VGRD:600 mb:16 hour fcst: +1453:913729023:d=2025100600:ABSV:600 mb:16 hour fcst: +1454:914245933:d=2025100600:CLMR:600 mb:16 hour fcst: +1455:914338473:d=2025100600:CIMIXR:600 mb:16 hour fcst: +1456:914356970:d=2025100600:RWMR:600 mb:16 hour fcst: +1457:914379966:d=2025100600:SNMR:600 mb:16 hour fcst: +1458:914467302:d=2025100600:GRLE:600 mb:16 hour fcst: +1459:914512123:d=2025100600:HGT:625 mb:16 hour fcst: +1460:915197980:d=2025100600:TMP:625 mb:16 hour fcst: +1461:915736105:d=2025100600:RH:625 mb:16 hour fcst: +1462:916529946:d=2025100600:DPT:625 mb:16 hour fcst: +1463:917639847:d=2025100600:SPFH:625 mb:16 hour fcst: +1464:919474874:d=2025100600:VVEL:625 mb:16 hour fcst: +1465:920080553:d=2025100600:UGRD:625 mb:16 hour fcst: +1466:920671432:d=2025100600:VGRD:625 mb:16 hour fcst: +1467:921238138:d=2025100600:ABSV:625 mb:16 hour fcst: +1468:921763646:d=2025100600:CLMR:625 mb:16 hour fcst: +1469:921860418:d=2025100600:CIMIXR:625 mb:16 hour fcst: +1470:921887961:d=2025100600:RWMR:625 mb:16 hour fcst: +1471:921917149:d=2025100600:SNMR:625 mb:16 hour fcst: +1472:921987865:d=2025100600:GRLE:625 mb:16 hour fcst: +1473:922025780:d=2025100600:HGT:650 mb:16 hour fcst: +1474:922711927:d=2025100600:TMP:650 mb:16 hour fcst: +1475:923251244:d=2025100600:RH:650 mb:16 hour fcst: +1476:924041125:d=2025100600:DPT:650 mb:16 hour fcst: +1477:925147792:d=2025100600:SPFH:650 mb:16 hour fcst: +1478:926262813:d=2025100600:VVEL:650 mb:16 hour fcst: +1479:926871283:d=2025100600:UGRD:650 mb:16 hour fcst: +1480:927460774:d=2025100600:VGRD:650 mb:16 hour fcst: +1481:928033617:d=2025100600:ABSV:650 mb:16 hour fcst: +1482:928558780:d=2025100600:CLMR:650 mb:16 hour fcst: +1483:928662353:d=2025100600:CIMIXR:650 mb:16 hour fcst: +1484:928680136:d=2025100600:RWMR:650 mb:16 hour fcst: +1485:928714196:d=2025100600:SNMR:650 mb:16 hour fcst: +1486:928772685:d=2025100600:GRLE:650 mb:16 hour fcst: +1487:928806439:d=2025100600:HGT:675 mb:16 hour fcst: +1488:929487192:d=2025100600:TMP:675 mb:16 hour fcst: +1489:930044405:d=2025100600:RH:675 mb:16 hour fcst: +1490:930851790:d=2025100600:DPT:675 mb:16 hour fcst: +1491:931985208:d=2025100600:SPFH:675 mb:16 hour fcst: +1492:933158096:d=2025100600:VVEL:675 mb:16 hour fcst: +1493:933780763:d=2025100600:UGRD:675 mb:16 hour fcst: +1494:934376207:d=2025100600:VGRD:675 mb:16 hour fcst: +1495:934962685:d=2025100600:ABSV:675 mb:16 hour fcst: +1496:935494548:d=2025100600:CLMR:675 mb:16 hour fcst: +1497:935617270:d=2025100600:CIMIXR:675 mb:16 hour fcst: +1498:935628049:d=2025100600:RWMR:675 mb:16 hour fcst: +1499:935666993:d=2025100600:SNMR:675 mb:16 hour fcst: +1500:935706748:d=2025100600:GRLE:675 mb:16 hour fcst: +1501:935732710:d=2025100600:HGT:700 mb:16 hour fcst: +1502:936412279:d=2025100600:TMP:700 mb:16 hour fcst: +1503:936970315:d=2025100600:RH:700 mb:16 hour fcst: +1504:937786720:d=2025100600:DPT:700 mb:16 hour fcst: +1505:938927861:d=2025100600:SPFH:700 mb:16 hour fcst: +1506:940150364:d=2025100600:VVEL:700 mb:16 hour fcst: +1507:940777796:d=2025100600:UGRD:700 mb:16 hour fcst: +1508:941380455:d=2025100600:VGRD:700 mb:16 hour fcst: +1509:941958604:d=2025100600:ABSV:700 mb:16 hour fcst: +1510:942489708:d=2025100600:CLMR:700 mb:16 hour fcst: +1511:942638591:d=2025100600:CIMIXR:700 mb:16 hour fcst: +1512:942646754:d=2025100600:RWMR:700 mb:16 hour fcst: +1513:942691369:d=2025100600:SNMR:700 mb:16 hour fcst: +1514:942723676:d=2025100600:GRLE:700 mb:16 hour fcst: +1515:942743986:d=2025100600:HGT:725 mb:16 hour fcst: +1516:943421160:d=2025100600:TMP:725 mb:16 hour fcst: +1517:943989125:d=2025100600:RH:725 mb:16 hour fcst: +1518:944821431:d=2025100600:DPT:725 mb:16 hour fcst: +1519:945971482:d=2025100600:SPFH:725 mb:16 hour fcst: +1520:947241342:d=2025100600:VVEL:725 mb:16 hour fcst: +1521:947877511:d=2025100600:UGRD:725 mb:16 hour fcst: +1522:948475872:d=2025100600:VGRD:725 mb:16 hour fcst: +1523:949048897:d=2025100600:ABSV:725 mb:16 hour fcst: +1524:949585367:d=2025100600:CLMR:725 mb:16 hour fcst: +1525:949750292:d=2025100600:CIMIXR:725 mb:16 hour fcst: +1526:949754923:d=2025100600:RWMR:725 mb:16 hour fcst: +1527:949802663:d=2025100600:SNMR:725 mb:16 hour fcst: +1528:949825708:d=2025100600:GRLE:725 mb:16 hour fcst: +1529:949840587:d=2025100600:HGT:750 mb:16 hour fcst: +1530:950523035:d=2025100600:TMP:750 mb:16 hour fcst: +1531:951091847:d=2025100600:RH:750 mb:16 hour fcst: +1532:951937079:d=2025100600:DPT:750 mb:16 hour fcst: +1533:953101629:d=2025100600:SPFH:750 mb:16 hour fcst: +1534:954416888:d=2025100600:VVEL:750 mb:16 hour fcst: +1535:955063256:d=2025100600:UGRD:750 mb:16 hour fcst: +1536:955674663:d=2025100600:VGRD:750 mb:16 hour fcst: +1537:956260419:d=2025100600:ABSV:750 mb:16 hour fcst: +1538:956801133:d=2025100600:CLMR:750 mb:16 hour fcst: +1539:956958633:d=2025100600:CIMIXR:750 mb:16 hour fcst: +1540:956964393:d=2025100600:RWMR:750 mb:16 hour fcst: +1541:957013017:d=2025100600:SNMR:750 mb:16 hour fcst: +1542:957046953:d=2025100600:GRLE:750 mb:16 hour fcst: +1543:957057450:d=2025100600:HGT:775 mb:16 hour fcst: +1544:957747364:d=2025100600:TMP:775 mb:16 hour fcst: +1545:958319840:d=2025100600:RH:775 mb:16 hour fcst: +1546:959169132:d=2025100600:DPT:775 mb:16 hour fcst: +1547:960331651:d=2025100600:SPFH:775 mb:16 hour fcst: +1548:961674010:d=2025100600:VVEL:775 mb:16 hour fcst: +1549:962348673:d=2025100600:UGRD:775 mb:16 hour fcst: +1550:962962247:d=2025100600:VGRD:775 mb:16 hour fcst: +1551:963560890:d=2025100600:ABSV:775 mb:16 hour fcst: +1552:964106273:d=2025100600:CLMR:775 mb:16 hour fcst: +1553:964243011:d=2025100600:CIMIXR:775 mb:16 hour fcst: +1554:964246648:d=2025100600:RWMR:775 mb:16 hour fcst: +1555:964295653:d=2025100600:SNMR:775 mb:16 hour fcst: +1556:964318515:d=2025100600:GRLE:775 mb:16 hour fcst: +1557:964325426:d=2025100600:HGT:800 mb:16 hour fcst: +1558:965024811:d=2025100600:TMP:800 mb:16 hour fcst: +1559:965601141:d=2025100600:RH:800 mb:16 hour fcst: +1560:966466025:d=2025100600:DPT:800 mb:16 hour fcst: +1561:967639957:d=2025100600:SPFH:800 mb:16 hour fcst: +1562:969023871:d=2025100600:VVEL:800 mb:16 hour fcst: +1563:969696898:d=2025100600:UGRD:800 mb:16 hour fcst: +1564:970315704:d=2025100600:VGRD:800 mb:16 hour fcst: +1565:970921088:d=2025100600:ABSV:800 mb:16 hour fcst: +1566:971473255:d=2025100600:CLMR:800 mb:16 hour fcst: +1567:971606285:d=2025100600:CIMIXR:800 mb:16 hour fcst: +1568:971607575:d=2025100600:RWMR:800 mb:16 hour fcst: +1569:971656046:d=2025100600:SNMR:800 mb:16 hour fcst: +1570:971672502:d=2025100600:GRLE:800 mb:16 hour fcst: +1571:971682949:d=2025100600:HGT:825 mb:16 hour fcst: +1572:972392988:d=2025100600:TMP:825 mb:16 hour fcst: +1573:972974306:d=2025100600:RH:825 mb:16 hour fcst: +1574:973853010:d=2025100600:DPT:825 mb:16 hour fcst: +1575:975033662:d=2025100600:SPFH:825 mb:16 hour fcst: +1576:976458620:d=2025100600:VVEL:825 mb:16 hour fcst: +1577:977136973:d=2025100600:UGRD:825 mb:16 hour fcst: +1578:977758057:d=2025100600:VGRD:825 mb:16 hour fcst: +1579:978365103:d=2025100600:ABSV:825 mb:16 hour fcst: +1580:978920564:d=2025100600:CLMR:825 mb:16 hour fcst: +1581:979046504:d=2025100600:CIMIXR:825 mb:16 hour fcst: +1582:979047174:d=2025100600:RWMR:825 mb:16 hour fcst: +1583:979163825:d=2025100600:SNMR:825 mb:16 hour fcst: +1584:979174188:d=2025100600:GRLE:825 mb:16 hour fcst: +1585:979180498:d=2025100600:HGT:850 mb:16 hour fcst: +1586:979904883:d=2025100600:TMP:850 mb:16 hour fcst: +1587:980490184:d=2025100600:RH:850 mb:16 hour fcst: +1588:981364544:d=2025100600:DPT:850 mb:16 hour fcst: +1589:982545621:d=2025100600:SPFH:850 mb:16 hour fcst: +1590:984006058:d=2025100600:VVEL:850 mb:16 hour fcst: +1591:984687503:d=2025100600:UGRD:850 mb:16 hour fcst: +1592:985302066:d=2025100600:VGRD:850 mb:16 hour fcst: +1593:985909603:d=2025100600:ABSV:850 mb:16 hour fcst: +1594:986466666:d=2025100600:CLMR:850 mb:16 hour fcst: +1595:986629144:d=2025100600:CIMIXR:850 mb:16 hour fcst: +1596:986696324:d=2025100600:RWMR:850 mb:16 hour fcst: +1597:986821901:d=2025100600:SNMR:850 mb:16 hour fcst: +1598:986827461:d=2025100600:GRLE:850 mb:16 hour fcst: +1599:986830648:d=2025100600:HGT:875 mb:16 hour fcst: +1600:987572478:d=2025100600:TMP:875 mb:16 hour fcst: +1601:988161305:d=2025100600:RH:875 mb:16 hour fcst: +1602:989053420:d=2025100600:DPT:875 mb:16 hour fcst: +1603:990227985:d=2025100600:SPFH:875 mb:16 hour fcst: +1604:991732351:d=2025100600:VVEL:875 mb:16 hour fcst: +1605:992418296:d=2025100600:UGRD:875 mb:16 hour fcst: +1606:993044472:d=2025100600:VGRD:875 mb:16 hour fcst: +1607:993652324:d=2025100600:ABSV:875 mb:16 hour fcst: +1608:994209842:d=2025100600:CLMR:875 mb:16 hour fcst: +1609:994393581:d=2025100600:CIMIXR:875 mb:16 hour fcst: +1610:994523720:d=2025100600:RWMR:875 mb:16 hour fcst: +1611:994650754:d=2025100600:SNMR:875 mb:16 hour fcst: +1612:994656302:d=2025100600:GRLE:875 mb:16 hour fcst: +1613:994657703:d=2025100600:HGT:900 mb:16 hour fcst: +1614:995414355:d=2025100600:TMP:900 mb:16 hour fcst: +1615:996015064:d=2025100600:RH:900 mb:16 hour fcst: +1616:996913508:d=2025100600:DPT:900 mb:16 hour fcst: +1617:998094137:d=2025100600:SPFH:900 mb:16 hour fcst: +1618:999632991:d=2025100600:VVEL:900 mb:16 hour fcst: +1619:1000307988:d=2025100600:UGRD:900 mb:16 hour fcst: +1620:1000932887:d=2025100600:VGRD:900 mb:16 hour fcst: +1621:1001539480:d=2025100600:ABSV:900 mb:16 hour fcst: +1622:1002096412:d=2025100600:CLMR:900 mb:16 hour fcst: +1623:1002264152:d=2025100600:CIMIXR:900 mb:16 hour fcst: +1624:1002389941:d=2025100600:RWMR:900 mb:16 hour fcst: +1625:1002514982:d=2025100600:SNMR:900 mb:16 hour fcst: +1626:1002517397:d=2025100600:GRLE:900 mb:16 hour fcst: +1627:1002518977:d=2025100600:HGT:925 mb:16 hour fcst: +1628:1003295386:d=2025100600:TMP:925 mb:16 hour fcst: +1629:1003900106:d=2025100600:RH:925 mb:16 hour fcst: +1630:1004795323:d=2025100600:DPT:925 mb:16 hour fcst: +1631:1005973024:d=2025100600:SPFH:925 mb:16 hour fcst: +1632:1007530300:d=2025100600:VVEL:925 mb:16 hour fcst: +1633:1008188969:d=2025100600:UGRD:925 mb:16 hour fcst: +1634:1008815981:d=2025100600:VGRD:925 mb:16 hour fcst: +1635:1009430969:d=2025100600:ABSV:925 mb:16 hour fcst: +1636:1009986498:d=2025100600:CLMR:925 mb:16 hour fcst: +1637:1010092641:d=2025100600:CIMIXR:925 mb:16 hour fcst: +1638:1010218694:d=2025100600:RWMR:925 mb:16 hour fcst: +1639:1010337105:d=2025100600:SNMR:925 mb:16 hour fcst: +1640:1010338399:d=2025100600:GRLE:925 mb:16 hour fcst: +1641:1010339852:d=2025100600:HGT:950 mb:16 hour fcst: +1642:1011123223:d=2025100600:TMP:950 mb:16 hour fcst: +1643:1011732999:d=2025100600:RH:950 mb:16 hour fcst: +1644:1012619346:d=2025100600:DPT:950 mb:16 hour fcst: +1645:1013787161:d=2025100600:SPFH:950 mb:16 hour fcst: +1646:1015351874:d=2025100600:VVEL:950 mb:16 hour fcst: +1647:1015996761:d=2025100600:UGRD:950 mb:16 hour fcst: +1648:1016626030:d=2025100600:VGRD:950 mb:16 hour fcst: +1649:1017241333:d=2025100600:ABSV:950 mb:16 hour fcst: +1650:1017795711:d=2025100600:CLMR:950 mb:16 hour fcst: +1651:1017853730:d=2025100600:CIMIXR:950 mb:16 hour fcst: +1652:1017987388:d=2025100600:RWMR:950 mb:16 hour fcst: +1653:1018097336:d=2025100600:SNMR:950 mb:16 hour fcst: +1654:1018097817:d=2025100600:GRLE:950 mb:16 hour fcst: +1655:1018098376:d=2025100600:HGT:975 mb:16 hour fcst: +1656:1018903251:d=2025100600:TMP:975 mb:16 hour fcst: +1657:1019519356:d=2025100600:RH:975 mb:16 hour fcst: +1658:1020400791:d=2025100600:DPT:975 mb:16 hour fcst: +1659:1021566265:d=2025100600:SPFH:975 mb:16 hour fcst: +1660:1023145309:d=2025100600:VVEL:975 mb:16 hour fcst: +1661:1023766661:d=2025100600:UGRD:975 mb:16 hour fcst: +1662:1024399773:d=2025100600:VGRD:975 mb:16 hour fcst: +1663:1025015539:d=2025100600:ABSV:975 mb:16 hour fcst: +1664:1025568606:d=2025100600:CLMR:975 mb:16 hour fcst: +1665:1025599439:d=2025100600:CIMIXR:975 mb:16 hour fcst: +1666:1025800280:d=2025100600:RWMR:975 mb:16 hour fcst: +1667:1025896499:d=2025100600:SNMR:975 mb:16 hour fcst: +1668:1025977876:d=2025100600:GRLE:975 mb:16 hour fcst: +1669:1025978571:d=2025100600:TMP:1000 mb:16 hour fcst: +1670:1026603921:d=2025100600:RH:1000 mb:16 hour fcst: +1671:1027474002:d=2025100600:DPT:1000 mb:16 hour fcst: +1672:1028643425:d=2025100600:SPFH:1000 mb:16 hour fcst: +1673:1030245802:d=2025100600:VVEL:1000 mb:16 hour fcst: +1674:1030821025:d=2025100600:UGRD:1000 mb:16 hour fcst: +1675:1031460645:d=2025100600:VGRD:1000 mb:16 hour fcst: +1676:1032076265:d=2025100600:ABSV:1000 mb:16 hour fcst: +1677:1032626421:d=2025100600:CLMR:1000 mb:16 hour fcst: +1678:1032640918:d=2025100600:CIMIXR:1000 mb:16 hour fcst: +1679:1032781407:d=2025100600:RWMR:1000 mb:16 hour fcst: +1680:1032850332:d=2025100600:SNMR:1000 mb:16 hour fcst: +1681:1032935480:d=2025100600:GRLE:1000 mb:16 hour fcst: +1682:1032935848:d=2025100600:HGT:1013.2 mb:16 hour fcst: +1683:1033783505:d=2025100600:TMP:1013.2 mb:16 hour fcst: +1684:1034412423:d=2025100600:RH:1013.2 mb:16 hour fcst: +1685:1035281592:d=2025100600:DPT:1013.2 mb:16 hour fcst: +1686:1036448937:d=2025100600:SPFH:1013.2 mb:16 hour fcst: +1687:1038057663:d=2025100600:VVEL:1013.2 mb:16 hour fcst: +1688:1038555681:d=2025100600:UGRD:1013.2 mb:16 hour fcst: +1689:1039195806:d=2025100600:VGRD:1013.2 mb:16 hour fcst: +1690:1039809497:d=2025100600:ABSV:1013.2 mb:16 hour fcst: +1691:1040357372:d=2025100600:CLMR:1013.2 mb:16 hour fcst: +1692:1040372100:d=2025100600:CIMIXR:1013.2 mb:16 hour fcst: +1693:1040450676:d=2025100600:RWMR:1013.2 mb:16 hour fcst: +1694:1040491986:d=2025100600:SNMR:1013.2 mb:16 hour fcst: +1695:1040570562:d=2025100600:GRLE:1013.2 mb:16 hour fcst: +1696:1040619661:d=2025100600:TSOIL:0-0 m below ground:16 hour fcst: +1697:1042504545:d=2025100600:SOILW:0-0 m below ground:16 hour fcst: +1698:1043745921:d=2025100600:TSOIL:0.01-0.01 m below ground:16 hour fcst: +1699:1045586734:d=2025100600:SOILW:0.01-0.01 m below ground:16 hour fcst: +1700:1046848063:d=2025100600:TSOIL:0.04-0.04 m below ground:16 hour fcst: +1701:1048664384:d=2025100600:SOILW:0.04-0.04 m below ground:16 hour fcst: +1702:1049923586:d=2025100600:TSOIL:0.1-0.1 m below ground:16 hour fcst: +1703:1051730291:d=2025100600:SOILW:0.1-0.1 m below ground:16 hour fcst: +1704:1052989010:d=2025100600:TSOIL:0.3-0.3 m below ground:16 hour fcst: +1705:1054823078:d=2025100600:SOILW:0.3-0.3 m below ground:16 hour fcst: +1706:1056079831:d=2025100600:TSOIL:0.6-0.6 m below ground:16 hour fcst: +1707:1057967812:d=2025100600:SOILW:0.6-0.6 m below ground:16 hour fcst: +1708:1059208197:d=2025100600:TSOIL:1-1 m below ground:16 hour fcst: +1709:1061152929:d=2025100600:SOILW:1-1 m below ground:16 hour fcst: +1710:1062367353:d=2025100600:TSOIL:1.6-1.6 m below ground:16 hour fcst: +1711:1064355673:d=2025100600:SOILW:1.6-1.6 m below ground:16 hour fcst: +1712:1065561972:d=2025100600:TSOIL:3-3 m below ground:16 hour fcst: +1713:1067127234:d=2025100600:SOILW:3-3 m below ground:16 hour fcst: +1714:1067959601:d=2025100600:REFC:entire atmosphere:16 hour fcst: +1715:1068489475:d=2025100600:RETOP:cloud top:16 hour fcst: +1716:1068698949:d=2025100600:VIL:entire atmosphere:16 hour fcst: +1717:1069008591:d=2025100600:VIS:surface:16 hour fcst: +1718:1070506897:d=2025100600:REFD:1000 m above ground:16 hour fcst: +1719:1070854140:d=2025100600:REFD:4000 m above ground:16 hour fcst: +1720:1071037831:d=2025100600:REFD:263 K level:16 hour fcst: +1721:1071219268:d=2025100600:GUST:surface:16 hour fcst: +1722:1072404850:d=2025100600:MAXUVV:100-1000 mb above ground:15-16 hour max fcst: +1723:1073226739:d=2025100600:MAXDVV:100-1000 mb above ground:15-16 hour max fcst: +1724:1073985363:d=2025100600:DZDT:0.5-0.8 sigma layer:15-16 hour ave fcst: +1725:1074591563:d=2025100600:MSLMA:mean sea level:16 hour fcst: +1726:1075202848:d=2025100600:HGT:1000 mb:16 hour fcst: +1727:1075908737:d=2025100600:MAXREF:1000 m above ground:15-16 hour max fcst: +1728:1076225102:d=2025100600:REFD:263 K level:15-16 hour max fcst: +1729:1076451957:d=2025100600:MXUPHL:5000-2000 m above ground:15-16 hour max fcst: +1730:1076509400:d=2025100600:MNUPHL:5000-2000 m above ground:15-16 hour min fcst: +1731:1076564924:d=2025100600:MXUPHL:2000-0 m above ground:15-16 hour max fcst: +1732:1076612751:d=2025100600:MNUPHL:2000-0 m above ground:15-16 hour min fcst: +1733:1076642591:d=2025100600:MXUPHL:3000-0 m above ground:15-16 hour max fcst: +1734:1076697859:d=2025100600:MNUPHL:3000-0 m above ground:15-16 hour min fcst: +1735:1076736970:d=2025100600:RELV:2000-0 m above ground:15-16 hour max fcst: +1736:1079354414:d=2025100600:RELV:1000-0 m above ground:15-16 hour max fcst: +1737:1082261441:d=2025100600:HAIL:entire atmosphere:15-16 hour max fcst: +1738:1082495735:d=2025100600:HAIL:0.1 sigma level:15-16 hour max fcst: +1739:1082496911:d=2025100600:TCOLG:entire atmosphere (considered as a single layer):15-16 hour max fcst: +1740:1082551191:d=2025100600:LTNG:entire atmosphere:16 hour fcst: +1741:1082583747:d=2025100600:UGRD:80 m above ground:16 hour fcst: +1742:1083791462:d=2025100600:VGRD:80 m above ground:16 hour fcst: +1743:1084926811:d=2025100600:PRES:surface:16 hour fcst: +1744:1086438027:d=2025100600:HGT:surface:16 hour fcst: +1745:1088591722:d=2025100600:TMP:surface:16 hour fcst: +1746:1089947701:d=2025100600:ASNOW:surface:0-16 hour acc fcst: +1747:1089991653:d=2025100600:MSTAV:0 m underground:16 hour fcst: +1748:1091464941:d=2025100600:CNWAT:surface:16 hour fcst: +1749:1091555050:d=2025100600:WEASD:surface:16 hour fcst: +1750:1091573436:d=2025100600:SNOWC:surface:16 hour fcst: +1751:1091587158:d=2025100600:SNOD:surface:16 hour fcst: +1752:1091602543:d=2025100600:TMP:2 m above ground:16 hour fcst: +1753:1092816255:d=2025100600:POT:2 m above ground:16 hour fcst: +1754:1093956113:d=2025100600:SPFH:2 m above ground:16 hour fcst: +1755:1095570387:d=2025100600:DPT:2 m above ground:16 hour fcst: +1756:1096792009:d=2025100600:RH:2 m above ground:16 hour fcst: +1757:1098437177:d=2025100600:MASSDEN:8 m above ground:16 hour fcst: +1758:1099191233:d=2025100600:UGRD:10 m above ground:16 hour fcst: +1759:1101572848:d=2025100600:VGRD:10 m above ground:16 hour fcst: +1760:1103954463:d=2025100600:WIND:10 m above ground:15-16 hour max fcst: +1761:1105180726:d=2025100600:MAXUW:10 m above ground:15-16 hour max fcst: +1762:1106545458:d=2025100600:MAXVW:10 m above ground:15-16 hour max fcst: +1763:1107855495:d=2025100600:CPOFP:surface:16 hour fcst: +1764:1107985117:d=2025100600:PRATE:surface:16 hour fcst: +1765:1108056789:d=2025100600:APCP:surface:0-16 hour acc fcst: +1766:1108783098:d=2025100600:WEASD:surface:0-16 hour acc fcst: +1767:1108820516:d=2025100600:FROZR:surface:0-16 hour acc fcst: +1768:1108866830:d=2025100600:FRZR:surface:0-16 hour acc fcst: +1769:1108884820:d=2025100600:SSRUN:surface:15-16 hour acc fcst: +1770:1108903122:d=2025100600:BGRUN:surface:15-16 hour acc fcst: +1771:1108903506:d=2025100600:APCP:surface:15-16 hour acc fcst: +1772:1109146172:d=2025100600:WEASD:surface:15-16 hour acc fcst: +1773:1109149685:d=2025100600:FROZR:surface:15-16 hour acc fcst: +1774:1109152250:d=2025100600:CSNOW:surface:16 hour fcst: +1775:1109152887:d=2025100600:CICEP:surface:16 hour fcst: +1776:1109153128:d=2025100600:CFRZR:surface:16 hour fcst: +1777:1109153572:d=2025100600:CRAIN:surface:16 hour fcst: +1778:1109233213:d=2025100600:SFCR:surface:16 hour fcst: +1779:1111151141:d=2025100600:FRICV:surface:16 hour fcst: +1780:1112289677:d=2025100600:SHTFL:surface:16 hour fcst: +1781:1113790686:d=2025100600:LHTFL:surface:16 hour fcst: +1782:1115397941:d=2025100600:GFLUX:surface:16 hour fcst: +1783:1116047551:d=2025100600:VGTYP:surface:16 hour fcst: +1784:1116828730:d=2025100600:LFTX:500-1000 mb:16 hour fcst: +1785:1117792889:d=2025100600:CAPE:surface:16 hour fcst: +1786:1118394693:d=2025100600:CIN:surface:16 hour fcst: +1787:1119082953:d=2025100600:PWAT:entire atmosphere (considered as a single layer):16 hour fcst: +1788:1120125669:d=2025100600:AOTK:entire atmosphere (considered as a single layer):16 hour fcst: +1789:1121248508:d=2025100600:COLMD:entire atmosphere (considered as a single layer):16 hour fcst: +1790:1122153549:d=2025100600:LCDC:low cloud layer:16 hour fcst: +1791:1123121857:d=2025100600:MCDC:middle cloud layer:16 hour fcst: +1792:1123579737:d=2025100600:HCDC:high cloud layer:16 hour fcst: +1793:1123957084:d=2025100600:TCDC:entire atmosphere:16 hour fcst: +1794:1124903566:d=2025100600:HGT:cloud ceiling:16 hour fcst: +1795:1126285661:d=2025100600:HGT:cloud base:16 hour fcst: +1796:1128763214:d=2025100600:PRES:cloud base:16 hour fcst: +1797:1129980237:d=2025100600:PRES:cloud top:16 hour fcst: +1798:1130681892:d=2025100600:HGT:cloud top:16 hour fcst: +1799:1131904680:d=2025100600:ULWRF:top of atmosphere:16 hour fcst: +1800:1133914499:d=2025100600:DSWRF:surface:16 hour fcst: +1801:1136566613:d=2025100600:DLWRF:surface:16 hour fcst: +1802:1138747220:d=2025100600:USWRF:surface:16 hour fcst: +1803:1140860760:d=2025100600:ULWRF:surface:16 hour fcst: +1804:1142545317:d=2025100600:CFNSF:surface:16 hour fcst: +1805:1142550196:d=2025100600:VBDSF:surface:16 hour fcst: +1806:1144594745:d=2025100600:VDDSF:surface:16 hour fcst: +1807:1147340319:d=2025100600:USWRF:top of atmosphere:16 hour fcst: +1808:1149979618:d=2025100600:HLCY:3000-0 m above ground:16 hour fcst: +1809:1151176234:d=2025100600:HLCY:1000-0 m above ground:16 hour fcst: +1810:1153101642:d=2025100600:USTM:0-6000 m above ground:16 hour fcst: +1811:1154211554:d=2025100600:VSTM:0-6000 m above ground:16 hour fcst: +1812:1155276908:d=2025100600:VUCSH:0-1000 m above ground:16 hour fcst: +1813:1157658523:d=2025100600:VVCSH:0-1000 m above ground:16 hour fcst: +1814:1160040138:d=2025100600:VUCSH:0-6000 m above ground:16 hour fcst: +1815:1162421753:d=2025100600:VVCSH:0-6000 m above ground:16 hour fcst: +1816:1165041510:d=2025100600:HGT:0C isotherm:16 hour fcst: +1817:1167332629:d=2025100600:RH:0C isotherm:16 hour fcst: +1818:1168095454:d=2025100600:PRES:0C isotherm:16 hour fcst: +1819:1168839873:d=2025100600:HGT:highest tropospheric freezing level:16 hour fcst: +1820:1169611391:d=2025100600:RH:highest tropospheric freezing level:16 hour fcst: +1821:1170360924:d=2025100600:PRES:highest tropospheric freezing level:16 hour fcst: +1822:1171102483:d=2025100600:HGT:263 K level:16 hour fcst: +1823:1171796252:d=2025100600:HGT:253 K level:16 hour fcst: +1824:1172418577:d=2025100600:4LFTX:180-0 mb above ground:16 hour fcst: +1825:1173388996:d=2025100600:CAPE:180-0 mb above ground:16 hour fcst: +1826:1173989560:d=2025100600:CIN:180-0 mb above ground:16 hour fcst: +1827:1174696724:d=2025100600:HPBL:surface:16 hour fcst: +1828:1177708488:d=2025100600:HGT:level of adiabatic condensation from sfc:16 hour fcst: +1829:1180692920:d=2025100600:CAPE:90-0 mb above ground:16 hour fcst: +1830:1181189531:d=2025100600:CIN:90-0 mb above ground:16 hour fcst: +1831:1181835767:d=2025100600:CAPE:255-0 mb above ground:16 hour fcst: +1832:1182455716:d=2025100600:CIN:255-0 mb above ground:16 hour fcst: +1833:1183156518:d=2025100600:HGT:equilibrium level:16 hour fcst: +1834:1185479983:d=2025100600:PLPL:255-0 mb above ground:16 hour fcst: +1835:1186523596:d=2025100600:CAPE:0-3000 m above ground:16 hour fcst: +1836:1187551632:d=2025100600:HGT:level of free convection:16 hour fcst: +1837:1190458414:d=2025100600:EFHL:surface:16 hour fcst: +1838:1191529063:d=2025100600:CANGLE:0-500 m above ground:16 hour fcst: +1839:1193819829:d=2025100600:LAYTH:261 K level - 256 K level:16 hour fcst: +1840:1195152590:d=2025100600:ESP:0-3000 m above ground:16 hour fcst: +1841:1196147073:d=2025100600:RHPW:entire atmosphere:16 hour fcst: +1842:1197389107:d=2025100600:LAND:surface:16 hour fcst: +1843:1197439583:d=2025100600:ICEC:surface:16 hour fcst: +1844:1197439816:d=2025100600:SBT123:top of atmosphere:16 hour fcst: +1845:1199096684:d=2025100600:SBT124:top of atmosphere:16 hour fcst: +1846:1200883864:d=2025100600:SBT113:top of atmosphere:16 hour fcst: +1847:1202421514:d=2025100600:SBT114:top of atmosphere:16 hour fcst: diff --git a/tests/data/wrfnat_hrconus_07.grib2 b/tests/data/wrfnat_hrconus_07.grib2 deleted file mode 100644 index 4bacf7bb..00000000 --- a/tests/data/wrfnat_hrconus_07.grib2 +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:faea9adc1df68d3e848435ba9f693f1fbfb2e31a0991a5e8c23b119741ed7faf -size 740036834 diff --git a/tests/data/wrfnat_hrconus_16.grib2 b/tests/data/wrfnat_hrconus_16.grib2 new file mode 100644 index 00000000..cde057bb --- /dev/null +++ b/tests/data/wrfnat_hrconus_16.grib2 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6818f89f3b43f486b5a972b294bc66c7117594217945bb1cc9a32696768bff49 +size 769413571 diff --git a/tests/data/wrfprs_hrconus_07.grib2 b/tests/data/wrfprs_hrconus_07.grib2 deleted file mode 100644 index efb98a13..00000000 --- a/tests/data/wrfprs_hrconus_07.grib2 +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:76ae4d355ff35a8e3ad3693d1a0b392a1e7025a8fddbe195c1389ff73657e3fd -size 460393025 diff --git a/tests/data/wrfprs_hrconus_16.grib2 b/tests/data/wrfprs_hrconus_16.grib2 new file mode 100644 index 00000000..f99f0c4b --- /dev/null +++ b/tests/data/wrfprs_hrconus_16.grib2 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6d2a7b6a201ed89058f4c3856db00cefaa2ea4179ddd6b7b69f7f9225fd58675 +size 434714591 diff --git a/tests/datahandler/__init__.py b/tests/datahandler/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/datahandler/test_gribdata.py b/tests/datahandler/test_gribdata.py new file mode 100644 index 00000000..5d10e10e --- /dev/null +++ b/tests/datahandler/test_gribdata.py @@ -0,0 +1,524 @@ +from datetime import datetime + +import numpy as np +from matplotlib.pyplot import get_cmap +from pytest import fixture, mark, raises +from uwtools.api.config import get_yaml_config +from xarray import DataArray, ones_like, zeros_like + +from adb_graphics import errors, utils +from adb_graphics.datahandler import gribdata + + +class ConcreteUPPData(gribdata.UPPData): + def get_values( + self, + level: str | None = None, # noqa: ARG002 + name: str | None = None, # noqa: ARG002 + do_transform: bool = True, # noqa: ARG002 + ) -> DataArray: + return self.field # type: ignore[no-any-return] + + +@fixture +def fielddata_obj(prs_ds, spec): + spec = get_yaml_config(spec) + spec.dereference(context={"file_type": "prs"}) + return gribdata.FieldData( + fhr=16, + ds=prs_ds, + level="sfc", + model="hrrr", + short_name="temp", + spec=spec, + ) + + +@fixture(scope="module") +def fielddata_obj_ro(prs_ds, spec): + spec = get_yaml_config(spec) + spec.dereference(context={"file_type": "prs"}) + return gribdata.FieldData( + fhr=16, + ds=prs_ds, + level="sfc", + model="hrrr", + short_name="temp", + spec=spec, + ) + + +@fixture(scope="module") +def profiledata_obj(nat_ds, spec): + spec = get_yaml_config(spec) + spec.dereference(context={"file_type": "nat"}) + return gribdata.ProfileData( + fhr=16, + ds=nat_ds, + loc=" DNR 23062 72469 39.77 104.88 1611 Denver, CO", + model="hrrr", + short_name="temp", + spec=spec, + ) + + +@fixture(scope="module") +def spec(spec_file): + spec = utils.load_yaml(spec_file) + spec.dereference(context={"fhr": 16}) + return spec + + +@fixture +def uppdata_obj(nat_ds, spec): + spec = get_yaml_config(spec) + spec.dereference(context={"file_type": "nat"}) + return ConcreteUPPData( + level="ua", + model="hrrr", + short_name="temp", + spec=spec, + fhr=16, + ds=nat_ds, + ) + + +@fixture(scope="module") +def uppdata_obj_ro(nat_ds, spec): + spec = get_yaml_config(spec) + spec.dereference(context={"file_type": "nat"}) + return ConcreteUPPData( + level="ua", + model="hrrr", + short_name="temp", + spec=spec, + fhr=16, + ds=nat_ds, + ) + + +@fixture +def uppdata_multilev_obj(nat_ds, spec): + spec = get_yaml_config(spec) + spec.dereference(context={"file_type": "nat"}) + return ConcreteUPPData( + model="hrrr", + short_name="temp", + spec=spec, + fhr=16, + ds=nat_ds, + ) + + +@fixture +def uppdata_multilev_prs_obj(prs_ds, spec): + spec = get_yaml_config(spec) + spec.dereference(context={"file_type": "prs"}) + return ConcreteUPPData( + level="500mb", + model="hrrr", + short_name="temp", + spec=spec, + fhr=16, + ds=prs_ds, + ) + + +def test_uppdata_anl_dt(uppdata_obj_ro): + dt = uppdata_obj_ro.anl_dt + assert dt == datetime(2025, 10, 6, 0) + + +def test_uppdata_clevs_array(uppdata_obj_ro): + assert np.array_equal(uppdata_obj_ro.clevs, np.arange(-40, 40, 2.5)) + + +def test_uppdata_clevs_list(uppdata_obj): + uppdata_obj.spec["temp"]["ua"]["clevs"] = [1, 2, 3] + assert np.array_equal(uppdata_obj.clevs, np.asarray([1, 2, 3])) + + +def test_uppdata_date_to_str(uppdata_obj_ro): + assert uppdata_obj_ro.date_to_str(uppdata_obj_ro.anl_dt) == "20251006 00 UTC" + + +def test_uppdata_field(uppdata_obj_ro): + assert np.array_equal(uppdata_obj_ro.field, uppdata_obj_ro.ds["t_hybrid_instant"].t) + + +def test_uppdata_field_column_max(prs_ds, spec): + spec = get_yaml_config(spec) + spec.dereference(context={"file_type": "prs"}) + fd = gribdata.FieldData( + fhr=16, + ds=prs_ds, + level="ua", + model="hrrr", + short_name="temp", + spec=spec, + ) + column_max = fd.field_column_max(values=fd.field) + assert np.array_equal(column_max, fd.field.max(axis=0)) + assert column_max.shape == (1059, 1799) + + +def test_uppdata_field_diff(fielddata_obj_ro): + summed_field = fielddata_obj_ro.field_diff( + values=fielddata_obj_ro.field, variable2="temp", level2="sfc", do_transform=False + ) + assert np.array_equal(summed_field, fielddata_obj_ro.field * 0) + + +def test_uppdata_field_mean(prs_ds, spec): + dataobj = gribdata.FieldData( + level="mean", + model="hrrr", + short_name="rh", + spec=spec, + fhr=16, + ds=prs_ds, + ) + levels = ["500mb", "800mb"] + mean = dataobj.field_mean(values=dataobj.field, levels=levels) + assert np.array_equal( + mean, dataobj.field.squeeze().sel(isobaricInhPa=[500, 800]).mean("isobaricInhPa") + ) + assert mean.shape == (1059, 1799) + + +def test_uppdata_field_sum(fielddata_obj_ro): + summed_field = fielddata_obj_ro.field_sum( + values=fielddata_obj_ro.field, variable2="temp", level2="sfc", do_transform=False + ) + assert np.array_equal(summed_field, fielddata_obj_ro.field * 2) + + +def test_uppdata__get_data_levels(uppdata_multilev_prs_obj): + assert np.array_equal( + uppdata_multilev_prs_obj._get_data_levels("isobaricInhPa"), + uppdata_multilev_prs_obj.field.coords["isobaricInhPa"].to_numpy(), + ) + + +def test_uppdata__get_field(uppdata_multilev_prs_obj): + spec = {"shortName": "t", "typeOfLevel": "isobaricInhPa", "level": 500} + field = uppdata_multilev_prs_obj._get_field(cfgribspec=spec) + assert np.array_equal( + field, uppdata_multilev_prs_obj.ds["t_isobaricInhPa_instant"].t.sel(isobaricInhPa=500) + ) + + +@mark.parametrize( + "transforms", + [ + "conversions.percent", + ["conversions.percent", "opposite"], + { + "funcs": "field_diff", + "kwargs": {"variable2": "temp", "level2": "sfc", "do_transform": False}, + }, + ], +) +def test_uppdata_get_transform(fielddata_obj_ro, transforms): + temp = fielddata_obj_ro.field + val = ones_like(temp) if not isinstance(transforms, dict) else temp + field = fielddata_obj_ro.get_transform(transforms, val) + expected = 0 + match transforms: + case dict(): + expected = zeros_like(temp) + case list(): + expected = val * -100.0 + case str(): + expected = val * 100.0 + assert np.array_equal(field, expected) + + +@mark.parametrize( + ("lat", "lon", "expected"), + [(40.019, 360 - 105.2747, (595, 679)), (25.7617, 360 - 80.1918, (109, 1487))], +) +def test_uppdata_get_xypoint(expected, lat, lon, uppdata_obj_ro): + assert uppdata_obj_ro.get_xypoint(lat, lon) == expected + + +@mark.parametrize(("lat", "lon"), [(88.0, 270.0), (40, 180), (10, 330), (30, 345)]) +def test_uppdata_get_xypoint_outside(lat, lon, uppdata_obj_ro): + assert uppdata_obj_ro.get_xypoint(lat, lon) == (-1, -1) + + +def test_uppdata_latlons(uppdata_obj_ro): + lats = uppdata_obj_ro.field.coords["latitude"].to_numpy() + lons = uppdata_obj_ro.field.coords["longitude"].to_numpy() + assert [ + np.array_equal(act, exp) + for act, exp in zip(uppdata_obj_ro.latlons(), [lats, lons], strict=True) + ] + + +def test_uppdata_latlons_lats_flipped(uppdata_obj): + # Test a 1D latitude option (like in Global, etc.) + ds = uppdata_obj.ds["t_hybrid_instant"].sel(y=500) + lats = ds.coords["latitude"].to_numpy() + ds.coords["latitude"] = (("x"), lats[::-1]) + lons = ds.coords["longitude"].to_numpy() + uppdata_obj.ds = {"t_hybrid_instant": ds} + assert [ + np.array_equal(act, exp) + for act, exp in zip(uppdata_obj.latlons(), [lats, lons], strict=True) + ] + + +@mark.parametrize("factor", [1, -1, 0, -20.0, 6543.0]) +def test_uppdata_opposite(factor, uppdata_obj_ro): + ds = ones_like(uppdata_obj_ro.field) * factor + assert np.array_equal(uppdata_obj_ro.opposite(ds), -ds) + + +def test_uppdata_valid_dt(uppdata_obj_ro): + assert uppdata_obj_ro.valid_dt == datetime(2025, 10, 6, 16) + + +def test_uppdata_vector_magnitude(prs_ds, spec): + fd = ConcreteUPPData( + model="hrrr", + level="250mb", + fhr=16, + ds=prs_ds, + short_name="u", + spec=spec, + ) + vm = fd.vector_magnitude(field1=fd.field, field2_id="v_250mb") + assert not np.array_equal(vm, fd.field) + + +def test_uppdata_vspec(uppdata_obj_ro): + expected = { + "cfgrib": {"shortName": "t", "typeOfLevel": "hybrid"}, + "clevs": np.arange(-40, 40, 2.5), + "cmap": "jet", + "colors": "ua_temp_colors", + "contours": { + "pres_sfc": {"levels": [0, 500], "colors": "k", "linewidths": 0.6}, + "gh": {"colors": "grey"}, + }, + "hatches": {"pres_sfc": {"hatches": ["", "..."], "levels": [0, 500]}}, + "ncl_name": {"prs": "TMP_P0_L100_{grid}", "nat": "TMP_P0_L105_{grid}"}, + "ticks": 5, + "transform": "conversions.k_to_c", + "unit": "C", + "wind": True, + } + vspec = uppdata_obj_ro.vspec + # Can't test the array items with ==, so check them separately. + actual_clevs = vspec.pop("clevs") + expected_clevs = expected.pop("clevs") + assert np.array_equal(actual_clevs, np.asarray(expected_clevs)) + assert uppdata_obj_ro.vspec == expected + + +def test_uppdata_vspec_bad(uppdata_obj): + uppdata_obj.short_name = "foo" + with raises(errors.NoGraphicsDefinitionForVariableError): + uppdata_obj.vspec # noqa: B018 + + +def test_fielddata_aviation_flight_rules(prs_ds, spec): + fd = gribdata.FieldData( + fhr=16, + ds=prs_ds, + level="sfc", + model="hrrr", + short_name="flru", + spec=spec, + ) + + flru = fd.aviation_flight_rules(fd.field) + assert flru.max() == 3.01 + assert flru.min() == 0.0 + + +def test_fielddata_cmap(fielddata_obj_ro): + assert fielddata_obj_ro.cmap == get_cmap("gist_ncar") + + +@mark.parametrize("color_def", ["aod_colors", "shear_colors", "vvel_colors"]) +def test_fielddata_colors(color_def, fielddata_obj): + fielddata_obj.vspec["colors"] = color_def + assert fielddata_obj.colors.min() == 0.0 + assert fielddata_obj.colors.max() == 1.0 + + +def test_fielddata_colors_undefined(fielddata_obj): + del fielddata_obj.vspec["colors"] + with raises(errors.NoGraphicsDefinitionForVariableError): + fielddata_obj.colors # noqa: B018 + + +def test_fielddata_colors_bad(fielddata_obj): + fielddata_obj.vspec["colors"] = "foo" + with raises(AttributeError) as e: + fielddata_obj.colors # noqa: B018 + assert "There is no color definition named foo" in str(e.value) + + +def test_fielddata_corners(fielddata_obj): + assert fielddata_obj.corners == [ + 21.138123, + 47.842195, + 237.280472, + 299.082807, + ] + + +def test_fielddata_corners_single_dim(fielddata_obj): + # Remove one dimension for the purposes of the test + fielddata_obj.field.coords["latitude"] = fielddata_obj.field.coords["latitude"][:, 0] + assert fielddata_obj.corners == [ + 21.138123, + 47.838623, + 237.280472, + 225.904520, + ] + + +def test_fielddata_data_getter_and_setter(fielddata_obj): + assert np.array_equal(fielddata_obj.data, fielddata_obj.get_values()) + new_data = ones_like(fielddata_obj.field) + fielddata_obj.data = new_data + assert np.array_equal(fielddata_obj.data, new_data) + + +def test_fielddata_fire_weather_index(prs_ds, spec): + fd = gribdata.FieldData( + fhr=16, + ds=prs_ds, + level="sfc", + model="hrrr", + short_name="firewx-pygraf", + spec=spec, + ) + + firewx = fd.fire_weather_index(fd.field) + assert firewx.max() <= 100 + assert firewx.min() == 0 + + +def test_fielddata_grid_info_lambert(fielddata_obj_ro): + grid_info = fielddata_obj_ro.grid_info() + assert grid_info == { + "corners": [21.138123, 47.842195, 237.280472, 299.082807], + "lat_0": 39.0, + "lat_1": 38.5, + "lat_2": 38.5, + "lon_0": 262.5, + "projection": "lcc", + } + + +def test_fielddata_icing_adjust_trace(prs_ds, spec): + spec = get_yaml_config(spec) + spec.dereference(context={"file_type": "prs"}) + fd = gribdata.FieldData( + fhr=16, + ds=prs_ds, + level="sfc", + model="hrrr", + short_name="flru", + spec=spec, + ) + field = ones_like(fd.field) * 4 + icing_adjust_trace = fd.icing_adjust_trace(field) + assert np.array_equal(icing_adjust_trace, ones_like(field) * 0.5) + + +def test_fielddata_supercooled_liquid_water(nat_ds, spec): + spec = get_yaml_config(spec) + spec.dereference(context={"file_type": "nat"}) + fd = gribdata.FieldData( + fhr=16, + ds=nat_ds, + level="sfc", + model="hrrr", + short_name="slw", + spec=spec, + ) + slw = fd.supercooled_liquid_water() + assert not np.array_equal(slw, fd.field) + + +def test_fielddata_ticks_default(fielddata_obj_ro): + assert fielddata_obj_ro.ticks == 10 + + +def test_fielddata_ticks_in_vspec(fielddata_obj): + ticks = 22 + fielddata_obj.vspec["ticks"] = ticks + assert fielddata_obj.ticks == ticks + + +def test_fielddata_units_default(fielddata_obj_ro): + assert fielddata_obj_ro.units == "F" + + +def test_fielddata_units_in_vspec(fielddata_obj): + units = "foo" + fielddata_obj.vspec["unit"] = units + assert fielddata_obj.units == units + + +@mark.parametrize( + ("var", "lev"), [("pres", "sfc"), ("1ref", "1000m"), ("acsnw", "sfc"), ("rh", "500mb")] +) +def test_fielddata_values_args_no_transform(fielddata_obj, lev, var): + fielddata_obj.vspec["transform"] = None + fielddata_obj.model = "hrrr" + assert not np.array_equal(fielddata_obj.get_values(level=lev, name=var), fielddata_obj.field) + + +def test_fielddata_values_args_transform(fielddata_obj): + fielddata_obj.vspec["transform"] = "opposite" + fielddata_obj.model = "hrrr" + assert np.array_equal(fielddata_obj.get_values(level="sfc", name="temp"), -fielddata_obj.field) + + +def test_fielddata_values_no_args_no_transform(fielddata_obj): + field = ones_like(fielddata_obj.ds["t_surface_instant"]) + fielddata_obj.ds = {"t_surface_instant": field} + fielddata_obj.vspec["transform"] = None + assert np.array_equal(fielddata_obj.get_values(), field.t) + + +def test_fielddata_values_no_args_transform(fielddata_obj): + field = ones_like(fielddata_obj.ds["t_surface_instant"]) + fielddata_obj.ds = {"t_surface_instant": field} + fielddata_obj.vspec["transform"] = "opposite" + assert np.array_equal(fielddata_obj.get_values(), -field.t.squeeze()) + + +def test_fielddata_values_bad_name_level(fielddata_obj_ro): + with raises(errors.NoGraphicsDefinitionForVariableError): + fielddata_obj_ro.get_values(level="foo", name="temp") + with raises(errors.NoGraphicsDefinitionForVariableError): + fielddata_obj_ro.get_values(level="sfc", name="foo") + with raises(errors.NoGraphicsDefinitionForVariableError): + fielddata_obj_ro.get_values(level="bar", name="foo") + + +def test_profiledata_values(profiledata_obj): + assert profiledata_obj.get_values().shape == (50,) + + +def test_profiledata_values_bad_name_level(profiledata_obj): + with raises(errors.NoGraphicsDefinitionForVariableError): + profiledata_obj.get_values(level="foo", name="temp") + with raises(errors.NoGraphicsDefinitionForVariableError): + profiledata_obj.get_values(level="sfc", name="foo") + with raises(errors.NoGraphicsDefinitionForVariableError): + profiledata_obj.get_values(level="bar", name="foo") + + +def test_profiledata_values_one_level(profiledata_obj): + value = profiledata_obj.get_values(name="hlcy", level="sr01") + assert value.shape == () # A single number + assert value == 47.7 diff --git a/tests/datahandler/test_gribfile.py b/tests/datahandler/test_gribfile.py new file mode 100644 index 00000000..433fb1e3 --- /dev/null +++ b/tests/datahandler/test_gribfile.py @@ -0,0 +1,27 @@ +from pathlib import Path + +from pytest import mark +from xarray import Dataset + +from adb_graphics.datahandler import gribfile + + +@mark.skip(reason="This test requires test data that is not yet available.") +def test_gribfiles(): + paths = [ + "/Users/cholt/work/pygraf_cfgrib/sample_data/rrfs_a/2025101312/rrfs.t12z.prslev.3km.f016.conus.grib2", + "/Users/cholt/work/pygraf_cfgrib/sample_data/rrfs_a/rrfs.t12z.prslev.3km.f016.conus.grib2", + ] + gribfiles = [Path(f) for f in paths] + gf = gribfile.GribFiles( + filenames=gribfiles, + cfgrib_config={ + "shortName": "sp", + "typeOfLevel": "surface", + }, + ) + assert isinstance(gf.datasets, dict) + assert isinstance(gf.datasets["sp_surface_instant"], Dataset) + assert len(gf.datasets) == 1 + assert len(gf.datasets["sp_surface_instant"].data_vars) == 1 + assert len(gf.datasets["sp_surface_instant"].data_vars["sp"].shape) == 3 diff --git a/tests/test_common.py b/tests/test_common.py index 1d7393ac..37b3d670 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -1,7 +1,7 @@ -# pylint: disable=invalid-name +""" +Pytests for the common utilities included in this package. -''' -Pytests for the common utilities included in this package. Includes: +Includes: - conversions.py - specs.py @@ -11,75 +11,25 @@ python -m pytest --nat-file [path/to/gribfile] --prs-file [path/to/gribfile] -''' +""" +import warnings from inspect import getfullargspec from string import ascii_letters, digits -import warnings +import numpy as np from matplotlib import cm from matplotlib import colors as mcolors from metpy.plots import ctables -import numpy as np -import yaml - -import adb_graphics.conversions as conversions -import adb_graphics.specs as specs -import adb_graphics.utils as utils -import adb_graphics.datahandler.gribdata as gribdata - -def test_conversion(): - - ''' Test that conversions return at numpy array for input of np.ndarray, - list, or int ''' - - a = np.ones([3, 2]) * 300 - c = a[0, 0] - - # Check for the right answer - assert np.array_equal(conversions.k_to_c(a), a - 273.15) - assert np.array_equal(conversions.k_to_f(a), (a - 273.15) * 9/5 + 32) - assert np.array_equal(conversions.kgm2_to_in(a), a * 0.03937) - assert np.array_equal(conversions.m_to_dm(a), a / 10) - assert np.array_equal(conversions.m_to_in(a), a * 39.3701) - assert np.array_equal(conversions.m_to_kft(a), a / 304.8) - assert np.array_equal(conversions.m_to_mi(a), a / 1609.344) - assert np.array_equal(conversions.ms_to_kt(a), a * 1.9438) - assert np.array_equal(conversions.pa_to_hpa(a), a / 100) - assert np.array_equal(conversions.percent(a), a * 100) - assert np.array_equal(conversions.to_micro(a), a * 1E6) - assert np.array_equal(conversions.to_micrograms_per_m3(a), a * 1E9) - assert np.array_equal(conversions.vvel_scale(a), a * -10) - assert np.array_equal(conversions.vort_scale(a), a / 1E-05) - assert np.array_equal(conversions.weasd_to_1hsnw(a), a * 10) - - functions = [ - conversions.k_to_c, - conversions.k_to_f, - conversions.kgm2_to_in, - conversions.m_to_dm, - conversions.m_to_in, - conversions.m_to_kft, - conversions.m_to_mi, - conversions.ms_to_kt, - conversions.pa_to_hpa, - conversions.percent, - conversions.to_micro, - conversions.to_micrograms_per_m3, - conversions.vvel_scale, - conversions.vort_scale, - conversions.weasd_to_1hsnw, - ] - # Check that all functions return a np.ndarray given a collection, or single float - for f in functions: - for collection in [a, c]: - assert isinstance(f(collection), type(collection)) +from adb_graphics import specs, utils +from adb_graphics.datahandler import gribdata class MockSpecs(specs.VarSpec): + """Mock class for the VarSpec abstract class.""" - ''' Mock class for the VarSpec abstract class ''' + cfg = utils.load_yaml("adb_graphics/default_specs.yml") @property def clevs(self): @@ -90,125 +40,104 @@ def vspec(self): return {} -def test_specs(): - - ''' Test VarSpec properties. ''' - - config = 'adb_graphics/default_specs.yml' - varspec = MockSpecs(config) - - # Ensure correct return type - assert isinstance(varspec.t_colors, np.ndarray) - assert isinstance(varspec.ps_colors, np.ndarray) - assert isinstance(varspec.yml, dict) - - # Ensure the appropriate number of colors is returned - assert np.shape(varspec.t_colors) == (len(varspec.clevs), 4) - assert np.shape(varspec.ps_colors) == (105, 4) - - def test_utils(): + """Test that utils works appropriately.""" - ''' Test that utils works appropriately. ''' + assert callable(utils.get_func("conversions.k_to_c")) - assert callable(utils.get_func('conversions.k_to_c')) +def test_join_ranges_constructor(tmp_path): + """Test that the join_ranges constructor works as expected.""" -def test_join_ranges_constructor(): - - ''' Test that the join_ranges constructor works as expected. ''' - - yaml.add_constructor('!join_ranges', utils.join_ranges, Loader=yaml.SafeLoader) - yaml_str = ''' + cfg_file = tmp_path / "cfg.yaml" + cfg_file.write_text(""" foo: !join_ranges [[0, 15, 0.1], [20, 61, 20]] foo2: !join_ranges [[0, 15, 0.1]] foo3: !join_ranges [[0, 15, 0.1], [20, 40, 10], [40, 61, 20]] - ''' - cfg = yaml.load(yaml_str, Loader=yaml.SafeLoader) + """) + cfg = utils.load_yaml(cfg_file) - expected = np.concatenate((np.arange(0, 15, 0.1), - np.arange(20, 61, 20)), axis=0) + expected = np.concatenate((np.arange(0, 15, 0.1), np.arange(20, 61, 20)), axis=0) expected2 = np.arange(0, 15, 0.1) - expected3 = np.concatenate((np.arange(0, 15, 0.1), - np.arange(20, 40, 10), - np.arange(40, 61, 20)), axis=0) - - assert np.array_equal(expected, cfg['foo']) - assert np.array_equal(expected2, cfg['foo2']) - assert np.array_equal(expected3, cfg['foo3']) + expected3 = np.concatenate( + (np.arange(0, 15, 0.1), np.arange(20, 40, 10), np.arange(40, 61, 20)), axis=0 + ) + assert np.array_equal(expected, cfg["foo"]) + assert np.array_equal(expected2, cfg["foo2"]) + assert np.array_equal(expected3, cfg["foo3"]) -class TestDefaultSpecs(): - ''' Test contents of default_specs.yml. ''' +class TestDefaultSpecs: + """Test contents of default_specs.yml.""" - config = 'adb_graphics/default_specs.yml' - varspec = MockSpecs(config) - - cfg = varspec.yml + config = "adb_graphics/default_specs.yml" + varspec = MockSpecs() + cfg = utils.load_yaml("adb_graphics/default_specs.yml") @property def allowable(self): - - ''' Each entry in the dict names a function that tests a key in - default_specs.yml. ''' + """ + Each entry in the dict names a function that tests a key in + default_specs.yml. + """ return { - 'accumulate': self.is_bool, - 'annotate': self.is_bool, - 'annotate_decimal': self.is_int, - 'clevs': self.is_a_clev, - 'cmap': self.is_a_cmap, - 'colors': self.is_a_color, - 'contours': self.is_a_contour_dict, - 'include_obs': self.is_bool, - 'hatches': self.is_a_contourf_dict, - 'labels': self.is_a_contourf_dict, - 'ncl_name': True, - 'plot_airports': self.is_bool, - 'plot_scatter': self.is_bool, - 'print_units': True, - 'split': self.is_bool, - 'ticks': self.is_number, - 'title': self.is_string, - 'transform': self.check_transform, - 'unit': self.is_string, - 'vertical_index': self.is_int, - 'vertical_level_name': self.is_string, - 'wind': self.is_wind, - } + "accumulate": self.is_bool, + "annotate": self.is_bool, + "annotate_decimal": self.is_int, + "clevs": self.is_a_clev, + "cmap": self.is_a_cmap, + "cfgrib": self.is_dict, + "colors": self.is_a_color, + "contours": self.is_a_contour_dict, + "include_obs": self.is_bool, + "hatches": self.is_a_contourf_dict, + "labels": self.is_a_contourf_dict, + "level": self.is_number, + "ncl_name": True, + "plot_airports": self.is_bool, + "plot_scatter": self.is_bool, + "print_units": True, + "split": self.is_bool, + "ticks": self.is_number, + "title": self.is_string, + "transform": self.check_transform, + "unit": self.is_string, + "vertical_index": self.is_int, + "vertical_level_name": self.is_string, + "wind": self.is_wind, + } def check_kwargs(self, accepted_args, kwargs): - - ''' Ensure a dictionary entry matches the kwargs accepted by a function. - ''' + """Ensure a dictionary entry matches the kwargs accepted by a function.""" assert isinstance(kwargs, dict) for key, args in kwargs.items(): - lev = None - if '_' in key: - short_name, lev = key.split('_') + if "_" in key: + short_name, lev = key.split("_") else: short_name = key assert self.is_a_key(short_name) if lev: - assert self.cfg.get(short_name).get(lev) is not None + assert self.cfg.get(short_name, {}).get(lev) is not None - for arg in args.keys(): + for arg in args: assert arg in accepted_args return True def check_transform(self, entry): + """ + Check structure of transform entry. - ''' - Check that the transform entry is either a single transformation - function, a list of transformation functions, or a dictionary containing - the functions list and the kwargs list like so: + The transform entry should be either a single transformation function, a list of + transformation functions, or a dictionary containing the functions list and the kwargs list + like so: transform: funcs: [list, of, functions] @@ -217,7 +146,7 @@ def check_transform(self, entry): sec_arg: value The functions listed under functions MUST be methods, not attributes! - ''' + """ kwargs = dict() @@ -228,22 +157,19 @@ def check_transform(self, entry): # If the transform entry is a dictionary, check that it has the # appropriate contents elif isinstance(entry, dict): - - funcs = entry.get('funcs') + funcs = entry.get("funcs") assert funcs is not None # Make sure funcs is a list funcs = funcs if isinstance(funcs, list) else [funcs] # Key word arguments may not be present. - kwargs = entry.get('kwargs') - + kwargs = entry.get("kwargs", {}) transforms = [] for func in funcs: callables = self.get_callable(func) - callables = callables if isinstance(callables, list) else \ - [callables] + callables = callables if isinstance(callables, list) else [callables] transforms.extend(callables) # The argspecs bit gives us a list of all the accepted arguments @@ -251,68 +177,63 @@ def check_transform(self, entry): # when provided arguments don't appear in all_params. # arguments not in that list, we fail. if kwargs: - argspecs = [getfullargspec(func) for func in transforms if - callable(func)] + argspecs = [getfullargspec(fx) for fx in transforms if callable(fx)] - all_params = [] + all_params: list = [] for argspec in argspecs: - # Make sure all functions accept key word arguments assert argspec.varkw is not None - parameters = [] + parameters: list = [] for argtype in [argspec.args, argspec.varargs, argspec.varkw]: if argtype is not None: parameters.extend(argtype) all_params.extend(parameters) - for key in kwargs.keys(): + for key in kwargs: if key not in all_params: - msg = f'Function key {key} is not an expicit parameter \ - in any of the transforms: {funcs}!' - warnings.warn(msg, UserWarning) - + msg = ( + f"Function key {key} is not an explicit parameter " + f"in any of the transforms: {funcs}!" + ) + warnings.warn(msg, UserWarning, stacklevel=2) return True - - # pylint: disable=inconsistent-return-statements def get_callable(self, func): - - - ''' Return the callable function given a function name. ''' + """Return the callable function given a function name.""" if func in dir(self.varspec): return self.varspec.__getattribute__(func) # Check datahandler.gribdata objects if a single word is provided - if len(func.split('.')) == 1: - + if len(func.split(".")) == 1: # Check all the classes in the gribdata module for attr in dir(gribdata): - # pylint: disable=no-member # Check the methods in each class if func in dir(gribdata.__getattribute__(attr)): method = gribdata.__getattribute__(attr).__dict__.get(func) if method is not None: + if isinstance(method, staticmethod): + return method.__func__ return method if callable(utils.get_func(func)): return utils.get_func(func) - raise ValueError('{func} is not a known callable function!') + msg = f"{func} is not a known callable function!" + raise ValueError(msg) @staticmethod def is_a_clev(clev): - - ''' Returns true for a clev that is a list, a range, or a callable function. ''' + """Returns true for a clev that is a list, a range, or a callable function.""" if isinstance(clev, (list, np.ndarray)): return True - if 'range' in clev.split('[')[0]: - clean = lambda x: x.strip().split('-')[-1].replace('.', '1') - items = clev.split(' ', 1)[1].strip('[').strip(']').split(',') + if "range" in clev.split("[")[0]: + clean = lambda x: x.strip().split("-")[-1].replace(".", "1") + items = clev.split(" ", 1)[1].strip("[").strip("]").split(",") nums = [clean(i).isnumeric() for i in items] return all(nums) @@ -320,19 +241,38 @@ def is_a_clev(clev): @staticmethod def is_a_cmap(cmap): - - ''' Returns true for a cmap that is a Colormap object. ''' + """Returns true for a cmap that is a Colormap object.""" return cmap in dir(cm) + list(ctables.colortables.keys()) def is_a_contour_dict(self, entry): - - ''' Set up the accepted arguments for plt.contour, and check the given - arguments. ''' - - args = ['X', 'Y', 'Z', 'levels', - 'corner_mask', 'colors', 'alpha', 'cmap', 'norm', 'vmin', - 'vmax', 'origin', 'extent', 'locator', 'extend', 'xunits', - 'yunits', 'antialiased', 'nchunk', 'linewidths', 'linestyles'] + """ + Set up the accepted arguments for plt.contour, and check the given + arguments. + """ + + args = [ + "X", + "Y", + "Z", + "levels", + "corner_mask", + "colors", + "alpha", + "cmap", + "norm", + "vmin", + "vmax", + "origin", + "extent", + "locator", + "extend", + "xunits", + "yunits", + "antialiased", + "nchunk", + "linewidths", + "linestyles", + ] if entry is None: return True @@ -340,15 +280,35 @@ def is_a_contour_dict(self, entry): return self.check_kwargs(args, entry) def is_a_contourf_dict(self, entry): - - ''' Set up the accepted arguments for plt.contourf, and check the given - arguments. ''' - - args = ['X', 'Y', 'Z', 'levels', - 'corner_mask', 'colors', 'alpha', 'cmap', 'labels', 'norm', 'vmin', - 'vmax', 'origin', 'extent', 'locator', 'extend', 'xunits', - 'yunits', 'antialiased', 'nchunk', 'linewidths', - 'hatches'] + """ + Set up the accepted arguments for plt.contourf, and check the given + arguments. + """ + + args = [ + "X", + "Y", + "Z", + "levels", + "corner_mask", + "colors", + "alpha", + "cmap", + "labels", + "norm", + "vmin", + "vmax", + "origin", + "extent", + "locator", + "extend", + "xunits", + "yunits", + "antialiased", + "nchunk", + "linewidths", + "hatches", + ] if entry is None: return True @@ -356,24 +316,21 @@ def is_a_contourf_dict(self, entry): return self.check_kwargs(args, entry) def is_a_color(self, color): + """Returns true if color is contained in the list of recognized colors.""" - ''' Returns true if color is contained in the list of recognized colors. ''' + colors = dict(mcolors.BASE_COLORS, **mcolors.CSS4_COLORS, **ctables.colortables) - colors = dict(mcolors.BASE_COLORS, **mcolors.CSS4_COLORS, - **ctables.colortables) - - if color in colors.keys(): + if color in colors: return True - if color in dir(self.varspec): - return True + return color in dir(self.varspec) - return False + def is_a_dict(self, cfgrib): + return isinstance(cfgrib, dict) @staticmethod def is_a_level(key): - - ''' + """ Returns true if the key fits one of the level descriptor formats. Allowable formats include: @@ -382,56 +339,59 @@ def is_a_level(key): [numeric][lev_type] e.g. 500mb, or 2m [stat][numeric] e.g. mn02, mx25 - ''' + """ + # fmt: off allowed_levels = [ - 'agl', # above ground level - 'best', # Best - 'bndylay', # boundary layer cld cover - 'esbl', # ??? - 'esblmn', # ??? - 'high', # high clouds - 'int', # vertical integral - 'low', # low clouds - 'max', # maximum in column - 'maxsfc', # max surface value - 'mdn', # maximum downward - 'mid', # mid-level clouds - 'mnsfc', # min surface value - 'msl', # mean sea level - 'mu', # most unstable - 'mul', # most unstable layer - 'mup', # maximum upward - 'mu', # most unstable - 'obs', # observations - 'pw', # wrt precipitable water - 'sat', # satellite - 'sfc', # surface - 'sfclt', # surface (less than) - 'top', # nominal top of atmosphere - 'total', # total clouds - 'ua', # upper air - ] + "agl", # above ground level + "best", # Best + "bndylay", # boundary layer cld cover + "esbl", # ??? + "esblmn", # ensemble mean + "high", # high clouds + "int", # vertical integral + "low", # low clouds + "max", # maximum in column + "maxsfc", # max surface value + "mdn", # maximum downward + "mid", # mid-level clouds + "mnsfc", # min surface value + "msl", # mean sea level + "mu", # most unstable + "mul", # most unstable layer + "mup", # maximum upward + "mu", # most unstable + "obs", # observations + "pw", # wrt precipitable water + "sat", # satellite + "sfc", # surface + "sfclt", # surface (less than) + "top", # nominal top of atmosphere + "total", # total clouds + "ua", # upper air + "uanat", # upper air native file + ] allowed_lev_type = [ - 'cm', # centimeters - 'ds', # difference - 'ft', # feet - 'km', # kilometers - 'm', # meters - 'mm', # millimeters - 'mb', # milibars - 'sr', # storm relative - ] + "cm", # centimeters + "ds", # difference + "ft", # feet + "km", # kilometers + "m", # meters + "mm", # millimeters + "mb", # milibars + "sr", # storm relative + ] allowed_stat = [ - 'in', # ??? - 'ens', # ensemble - 'm', # ??? - 'maxm', # ??? - 'mn', # minimum - 'mx', # maximum - ] + "in", # ??? + "ens", # ensemble + "m", # ??? + "maxm", # ??? + "mn", # minimum + "mx", # maximum + ] + # fmt: on # Easy check first -- it is in the allowed_levels list if key in allowed_levels: @@ -440,11 +400,11 @@ def is_a_level(key): # Check for [numeric][lev_type] or [lev_type][numeric] pattern # Numbers come at beginning or end, only - numeric = ''.join([c for c in key if c in digits + '.']) in key + numeric = "".join([c for c in key if c in digits + "."]) in key # The level is allowed level_str = [c for c in key if c in ascii_letters] - allowed = ''.join(level_str) in allowed_lev_type + allowed_stat + allowed = "".join(level_str) in allowed_lev_type + allowed_stat # Check the other direction - level string contains one of the allowed # types. @@ -454,27 +414,21 @@ def is_a_level(key): allowed = True break - if numeric and allowed: - return True - - return False + return numeric and allowed def is_a_key(self, key): - - ''' Returns true if key exists as a key in the config file. ''' + """Returns true if key exists as a key in the config file.""" return self.cfg.get(key) is not None @staticmethod def is_bool(k): - - ''' Returns true if k is a boolean variable. ''' + """Returns true if k is a boolean variable.""" return isinstance(k, bool) def is_callable(self, funcs): - - ''' Returns true if func in funcs list is the name of a callable function. ''' + """Returns true if func in funcs list is the name of a callable function.""" funcs = funcs if isinstance(funcs, list) else [funcs] @@ -484,9 +438,7 @@ def is_callable(self, funcs): callable_ = callable_ if isinstance(callable_, list) else [callable_] for clbl in callable_: - if isinstance(clbl, np.ndarray): - callables.append(True) - elif callable(clbl): + if isinstance(clbl, np.ndarray) or callable(clbl): callables.append(True) else: callables.append(False) @@ -495,46 +447,42 @@ def is_callable(self, funcs): @staticmethod def is_dict(d): - - ''' Returns true if d is a dictionary ''' + """Returns true if d is a dictionary.""" return isinstance(d, dict) @staticmethod def is_int(i): - - ''' Returns true if i is an integer. ''' + """Returns true if i is an integer.""" if isinstance(i, int): return True - return i.isnumeric() and len(i.split('.')) == 1 + return i.isnumeric() and len(i.split(".")) == 1 @staticmethod def is_number(i): - - ''' Returns true if i is a number. ''' + """Returns true if i is a number.""" if isinstance(i, (int, float)): return True - return i.isnumeric() and len(i.split('.')) <= 2 + return i.isnumeric() and len(i.split(".")) <= 2 @staticmethod def is_string(s): - - ''' Returns true if s is a string. ''' + """Returns true if s is a string.""" return isinstance(s, str) def is_wind(self, wind): - - ''' Returns true if wind is a bool or is_a_level. ''' + """Returns true if wind is a bool or is_a_level.""" return isinstance(wind, bool) or self.is_a_level(wind) def check_keys(self, d, depth=0): - - ''' Helper function that recursively checks the keys in the dictionary by calling the - function defined in allowable. ''' + """ + Helper function that recursively checks the keys in the dictionary by calling the + function defined in allowable. + """ max_depth = 2 @@ -546,11 +494,11 @@ def check_keys(self, d, depth=0): if depth >= max_depth: return - level = depth+1 + level = depth + 1 for k, v in d.items(): # Check that the key is allowable - assert (k in self.allowable.keys()) or self.is_a_level(k) + assert (k in self.allowable) or self.is_a_level(k) # Call a checker if one exists for the key, otherwise descend into # next level of dict @@ -560,14 +508,12 @@ def check_keys(self, d, depth=0): assert checker else: assert checker(v) - else: - if isinstance(v, dict): - self.check_keys(v, depth=level) + elif isinstance(v, dict): + self.check_keys(v, depth=level) def test_keys(self): - - ''' Tests each of top-level variables in the config file by calling the helper function. ''' + """Tests each of top-level variables in the config file by calling the helper function.""" for short_name, spec in self.cfg.items(): - assert '_' not in short_name + assert "_" not in short_name self.check_keys(spec) diff --git a/tests/test_conversions.py b/tests/test_conversions.py new file mode 100644 index 00000000..73dec4d6 --- /dev/null +++ b/tests/test_conversions.py @@ -0,0 +1,83 @@ +import numpy as np +from pytest import fixture +from xarray import DataArray + +from adb_graphics import conversions + + +@fixture +def array(): + return np.ones([3, 2]) * 300 + + +def test_k_to_c(array): + assert np.array_equal(conversions.k_to_c(array), array - 273.15) + + +def test_k_to_f(array): + assert np.array_equal(conversions.k_to_f(array), (array - 273.15) * 9 / 5 + 32) + + +def test_kgm2_to_in(array): + assert np.array_equal(conversions.kgm2_to_in(array), array * 0.03937) + + +def test_magnitude(): + ones = DataArray(np.ones([3, 2])) + field1 = ones * 3 + field2 = ones * 4 + out = conversions.magnitude(field1, field2) + assert np.array_equal(out, ones * 5) + + +def test_m_to_dm(array): + assert np.array_equal(conversions.m_to_dm(array), array / 10.0) + assert conversions.m_to_dm(array).dtype == np.float64 + + +def test_m_to_in(array): + assert np.array_equal(conversions.m_to_in(array), array * 39.3701) + + +def test_m_to_kft(array): + assert np.array_equal(conversions.m_to_kft(array), array / 304.8) + + +def test_m_to_mi(array): + assert np.array_equal(conversions.m_to_mi(array), array / 1609.344) + + +def test_ms_to_kt(array): + assert np.array_equal(conversions.ms_to_kt(array), array * 1.9438) + + +def test_pa_to_hpa(array): + assert np.array_equal(conversions.pa_to_hpa(array), array / 100) + + +def test_percent(array): + assert np.array_equal(conversions.percent(array), array * 100) + + +def test_sden_to_slr(array): + assert np.array_equal(conversions.sden_to_slr(array), 1000.0 / array) + + +def test_to_micro(array): + assert np.array_equal(conversions.to_micro(array), array * 1e6) + + +def test_to_micrograms_per_m3(array): + assert np.array_equal(conversions.to_micrograms_per_m3(array), array * 1e9) + + +def test_vvel_scale(array): + assert np.array_equal(conversions.vvel_scale(array), array * -10) + + +def test_vort_scale(array): + assert np.array_equal(conversions.vort_scale(array), array / 1e-05) + + +def test_weasd_to_1hsnw(array): + assert np.array_equal(conversions.weasd_to_1hsnw(array), array * 10) diff --git a/tests/test_figure_builders.py b/tests/test_figure_builders.py new file mode 100644 index 00000000..ac9096cd --- /dev/null +++ b/tests/test_figure_builders.py @@ -0,0 +1,184 @@ +import gc +import tracemalloc +from argparse import Namespace +from datetime import datetime +from unittest.mock import call, patch + +import numpy as np +from pytest import fixture +from uwtools.api.config import get_yaml_config + +from adb_graphics import figure_builders, utils +from adb_graphics.datahandler import gribdata + + +@fixture +def fielddata_obj(prs_ds, spec): + spec = get_yaml_config(spec) + spec.dereference(context={"file_type": "prs"}) + return gribdata.FieldData( + model="hrrr", + fhr=16, + ds=prs_ds, + level="sfc", + short_name="cref", + spec=spec, + ) + + +@fixture +def parallel_maps_args(prs_ds, spec, tmp_path): + spec = get_yaml_config(spec) + spec.dereference(context={"file_type": "prs"}) + cla = Namespace( + **{ # noqa: PIE804 + "ens_size": 0, + "graphic_type": "maps", + "img_res": 72, + "model_name": "hrrr", + "specs": spec, + "images": ["hrrr", []], + } + ) + return { + "cla": cla, + "fhr": 16, + "dataset": prs_ds, + "level": "sfc", + "variable": "temp", + "workdir": tmp_path, + } + + +@fixture +def parallel_skewt_args(nat_ds, spec, tmp_path): + spec = get_yaml_config(spec) + spec.dereference(context={"file_type": "nat"}) + cla = Namespace( + **{ # noqa: PIE804 + "file_type": "nat", + "img_res": 72, + "max_plev": 100, + "model_name": "hrrr", + "data_root": ["path"], + "file_tmpl": ["filename"], + "start_time": datetime(2025, 10, 6, 0), + "specs": spec, + "images": ["hrrr", []], + } + ) + return { + "cla": cla, + "fhr": 16, + "dataset": nat_ds, + "site": " DNR 23062 72469 39.77 104.88 1611 Denver, CO", + "workdir": tmp_path, + } + + +@fixture(scope="module") +def spec(spec_file): + spec = utils.load_yaml(spec_file) + spec.dereference(context={"fhr": 16}) + return spec + + +def test_add_obs_panel(fielddata_obj, nat_ds, spec): + spec = get_yaml_config(spec) + spec.dereference(context={"file_type": "nat"}) + fig, ax = figure_builders.set_figure("hrrr", "enspanel", "full") + # Overwriting this explicitly since the cfgrib should indefinitely come from the model data. + spec["cref"]["obs"]["cfgrib"] = spec["1ref"]["1000m"]["cfgrib"]["hrrr"] + args = { + "ax": ax[8], + "model_name": "hrrr", + "dataset": nat_ds, # fake it with model data + "proj_info": fielddata_obj.grid_info(), + "spec": spec, + "short_name": "cref", + "tile": "full", + } + dm = figure_builders.add_obs_panel(**args) + assert dm.figure == fig + assert np.array_equal(dm.levels, np.arange(5, 76, 5)) + + +def test_parallel_maps(parallel_maps_args, tmp_path): + figure_builders.parallel_maps(**parallel_maps_args) + assert (tmp_path / "temp_full_sfc_f016.png").is_file() + + +def test_parallel_maps_enspanel(parallel_maps_args, prsfile, tmp_path): + parallel_maps_args["cla"].ens_size = 9 + parallel_maps_args["cla"].graphic_type = "enspanel" + parallel_maps_args["cla"].obs_file_path = prsfile + parallel_maps_args["cla"].specs["temp"]["sfc"]["include_obs"] = True + + with ( + patch.object(figure_builders, "MapFields") as fields, + patch.object(figure_builders, "Map") as m, + patch.object(figure_builders, "MultiPanelDataMap") as mpdm, + patch.object(figure_builders, "add_obs_panel") as aop, + ): + mpdm_calls = [ + call( + **{ # noqa: PIE804 + "map_fields": fields(), + "map_": m(), + "member": mem, + "model_name": "hrrr", + "last_panel": mem == 9, + } + ) + for mem in [0, 1, 2, 3, 0, 4, 5, 6, 0, 7, 8, 9] + ] + figure_builders.parallel_maps(**parallel_maps_args) + assert mpdm.call_args_list == mpdm_calls + call.title().assert_called_once() + call.add_logo().assert_called_once() + aop.assert_called_once() + assert (tmp_path / "temp_full_sfc_f016.png").is_file() + + +def test_parallel_maps_mem_leak(parallel_maps_args): + gc.collect() + tracemalloc.start() + snapshot_before = tracemalloc.take_snapshot() + figure_builders.parallel_maps(**parallel_maps_args) + snapshot_after = tracemalloc.take_snapshot() + gc.collect() + tracemalloc.stop() + # Compare memory usage + stats_diff = snapshot_after.compare_to(snapshot_before, "lineno") + total_diff_mb = sum(stat.size_diff for stat in stats_diff) / (1024 * 1024) + assert total_diff_mb < 92 # Appropriate size when test was written + + +def test_parallel_skewt(parallel_skewt_args, tmp_path): + figure_builders.parallel_skewt(**parallel_skewt_args) + assert (tmp_path / "DNR_72469_skewt_f016.png").is_file() + assert (tmp_path / "DNR.72469.skewt.2025100600_f016.csv").is_file() + + +def test_set_figure_enspanel_full(): + fig, ax = figure_builders.set_figure("hrrr", "enspanel", "full") + assert len(ax) == 12 + assert list(fig.get_size_inches()) == [20.0, 10.0] + + +def test_set_figure_enspanel_other(): + fig, ax = figure_builders.set_figure("hrrr", "enspanel", "other") + assert len(ax) == 12 + assert list(fig.get_size_inches()) == [20.0, 16.0] + + +def test_set_figure_enspanel_se(): + fig, ax = figure_builders.set_figure("hrrr", "enspanel", "SE") + assert len(ax) == 12 + assert list(fig.get_size_inches()) == [20.0, 19.0] + + +def test_set_figure_maps_full(): + fig, ax = figure_builders.set_figure("hrrr", "maps", "full") + assert len(ax) == 1 + assert list(fig.get_size_inches()) == [10.0, 10.0] diff --git a/tests/test_grib.py b/tests/test_grib.py deleted file mode 100644 index a9f42b63..00000000 --- a/tests/test_grib.py +++ /dev/null @@ -1,111 +0,0 @@ -# pylint: disable=invalid-name -''' Test suite for grib datahandler. ''' - -import datetime - -import numpy as np -from matplotlib import colors as mcolors -import xarray as xr - -import adb_graphics.datahandler.gribdata as gribdata -import adb_graphics.datahandler.gribfile as gribfile - -DATAARRAY = xr.core.dataarray.DataArray - -def test_UPPData(natfile, prsfile): - - ''' Test the UPPData class methods on both types of input files. ''' - - nat_ds = gribfile.GribFile(natfile) - prs_ds = gribfile.GribFile(prsfile) - - class UPP(gribdata.UPPData): - - ''' Test class needed to define the values as an abstract class ''' - - def values(self, level=None, name=None, **kwargs): - return 1 - - upp_nat = UPP(nat_ds.contents, fhr=2, filetype='nat', short_name='temp') - upp_prs = UPP(prs_ds.contents, fhr=2, short_name='temp') - - # Ensure appropriate typing and size (where applicable) - for upp in [upp_nat, upp_prs]: - assert isinstance(upp.anl_dt, datetime.datetime) - assert isinstance(upp.clevs, np.ndarray) - assert isinstance(upp.date_to_str(datetime.datetime.now()), str) - assert isinstance(upp.fhr, str) - assert isinstance(upp.field, DATAARRAY) - assert isinstance(upp.latlons(), list) - assert isinstance(upp.lev_descriptor, str) - assert isinstance(upp.ncl_name(upp.vspec), str) - assert isinstance(upp.numeric_level(), tuple) - assert isinstance(upp.spec, dict) - assert isinstance(upp.valid_dt, datetime.datetime) - assert isinstance(upp.vspec, dict) - # Test for appropriate date formatting - test_date = datetime.datetime(2020, 12, 5, 12) - assert upp.date_to_str(test_date) == '20201205 12 UTC' - -def test_fieldData(prsfile): - - ''' Test the fieldData class methods on a prs file''' - - prs_ds = gribfile.GribFile(prsfile) - field = gribdata.fieldData(prs_ds.contents, fhr=2, level='500mb', short_name='temp') - - assert isinstance(field.cmap, mcolors.Colormap) - assert isinstance(field.colors, np.ndarray) - assert isinstance(field.corners, list) - assert isinstance(field.ticks, int) - assert isinstance(field.units, str) - assert isinstance(field.values(), DATAARRAY) - assert isinstance(field.aviation_flight_rules(field.values()), DATAARRAY) - assert isinstance(field.wind(True), list) - assert len(field.corners) == 4 - assert len(field.wind(True)) == 2 - assert len(field.wind('850mb')) == 2 - for component in field.wind(True): - assert isinstance(component, DATAARRAY) - - # Test retrieving other values - assert np.array_equal(field.values(), field.values(name='temp', level='500mb')) - - # Return zeros by subtracting same field - diff = field.field_diff(field.values(), variable2='temp', level2='500mb') - assert isinstance(diff, DATAARRAY) - assert not np.any(diff) - - # Test transform - assert np.array_equal(field.get_transform('conversions.k_to_f', field.values()), \ - (field.values() - 273.15) * 9/5 +32) - - field2 = gribdata.fieldData(prs_ds.contents, fhr=2, level='ua', short_name='ceil') - transforms = field2.vspec.get('transform') - assert np.array_equal(field2.get_transform(transforms, field2.values()), \ - field2.field_diff(field2.values(), variable2='gh', level2='sfc') / 304.8) - - # Expected size of values - assert len(np.shape((field.values()))) == 2 - assert len(np.shape((field.values(name='u')))) == 2 - assert len(np.shape((field.values(name='u', level='850mb')))) == 2 - -def test_profileData(natfile): - - ''' Test the profileData class methods on a nat file''' - - nat_ds = gribfile.GribFile(natfile) - loc = ' BNA 9999 99999 36.12 86.69 597 Nashville, TN\n' - profile = gribdata.profileData(nat_ds.contents, - fhr=2, - filetype='nat', - loc=loc, - short_name='temp', - ) - - assert isinstance(profile.get_xypoint(40., -100.), tuple) - assert isinstance(profile.values(), DATAARRAY) - - # The values should return a single number (0) or a 1D array (1) - assert len(np.shape((profile.values(level='best', name='li')))) == 0 - assert len(np.shape((profile.values(name='temp')))) == 1 diff --git a/tests/test_hrrr_maps.py b/tests/test_hrrr_maps.py index 19fdd277..67099003 100644 --- a/tests/test_hrrr_maps.py +++ b/tests/test_hrrr_maps.py @@ -1,52 +1,81 @@ -#pylint: disable=unused-variable -''' Tests for create_graphics driver ''' +"""Tests for create_graphics driver.""" + import os -import pytest -from create_graphics import create_graphics -from create_graphics import parse_args - -DATA_LOC = os.environ.get("data_loc") -OUTPUT_LOC = os.environ.get("data_loc") - -@pytest.fixture(name="_setup") -def build_maps(): - ''' Builds HRRR 12-hour accumulated maps ''' - args = ['maps', '-d', DATA_LOC, '-f', '0', '12', '1', '-o', OUTPUT_LOC,\ - '-s', '2023031500', '--file_tmpl', 'hrrr.t00z.wrfprsf{FCST_TIME:02d}.grib2', \ - '--images', './image_lists/hrrr_test.yml', 'hourly', '--all_leads', '--file_type=prs'] - create_graphics(args) - - -def test_parse_args(): - ''' Test parse_args for basic parsing success. - Checks if parse_args returns 'maps' in the graphic_type field. - ''' - args = ['maps', '-d', DATA_LOC, '-f', '0', '12', '1', '-o', OUTPUT_LOC,\ - '-s', '2021052315', '--file_tmpl', 'hrrr.t00z.wrfprsf{FCST_TIME:02d}.grib2', \ - '--images', './image_lists/hrrr_test.yml', 'hourly', '--all_leads', '--file_type=prs'] - test_args = parse_args(args) - assert test_args.graphic_type == 'maps' +from pytest import fixture + +from create_graphics import create_graphics, parse_args + +DATA_LOC = os.environ.get("DATA_LOC") -def test_folder_existence(_setup): - ''' Tests for existence of folders. - Can be extended to cover multiple folders. - ''' - folder = "/202303150000" - full_path = OUTPUT_LOC + folder - file_path = os.path.isdir(full_path) - assert file_path + +@fixture +def maps_args(tmp_path) -> list: + """Builds HRRR 12-hour accumulated maps.""" + return [ + "maps", + "-a", + "1", + "-d", + DATA_LOC, + "-f", + "0", + "6", + "1", + "-o", + str(tmp_path / "output"), + "-s", + "2023031500", + "--file_tmpl", + "hrrr.t00z.wrfprsf{FCST_TIME:02d}.grib2", + "--images", + "./image_lists/hrrr_test.yml", + "hourly", + "--all_leads", + "--file_type=prs", + ] + + +def test_hrrr_maps_parse_args(tmp_path): + """ + Test parse_args for basic parsing success. + Checks if parse_args returns 'maps' in the graphic_type field. + """ + args = [ + "maps", + "-d", + DATA_LOC, + "-f", + "0", + "12", + "1", + "-o", + str(tmp_path / "output"), + "-s", + "2021052315", + "--file_tmpl", + "hrrr.t00z.wrfprsf{FCST_TIME:02d}.grib2", + "--images", + "./image_lists/hrrr_test.yml", + "hourly", + "--all_leads", + "--file_type=prs", + ] + test_args = parse_args(args) + assert test_args.graphic_type == "maps" -def test_file_count(_setup): - ''' Test for file count in directory. - Can be extended to cover multiple folders. - ''' +def test_hrrr_maps_file_count(maps_args, tmp_path): + """ + Test for file count in directory. + Can be extended to cover multiple folders. + """ # Based on the hrrr_test.yml file, only 6 maps will be created - map_count = 6 + create_graphics(maps_args) + map_count = 1 count = 0 - folder = "/202303150000/" - for file_name in os.listdir(OUTPUT_LOC + folder): - if os.path.isfile(OUTPUT_LOC + folder + file_name): + output = tmp_path / "output" / "202303150003" + for file_name in output.iterdir(): + if (output / file_name).is_file(): count += 1 assert count == map_count diff --git a/tests/test_specs.py b/tests/test_specs.py new file mode 100644 index 00000000..e278eaa9 --- /dev/null +++ b/tests/test_specs.py @@ -0,0 +1,98 @@ +from pathlib import Path + +import numpy as np +from pytest import fixture, mark + +from adb_graphics import specs, utils + + +class Spec(specs.VarSpec): + """ + Concrete class for the VarSpec abstract class. + """ + + cfg = utils.load_yaml(Path("adb_graphics/default_specs.yml")) + + @property + def clevs(self): + return np.asarray(range(15)) + + @property + def vspec(self): + return {"cmap": "rainbow"} + + +@fixture +def spec(): + return Spec() + + +def test_aod_colors(spec): + colors = spec.aod_colors + assert len(colors) == 15 + + +@mark.parametrize(("levels", "expected"), [(3, 3), (4, 4), (None, 16)]) +def test_centered_diff(levels, expected, spec): + colors = spec.centered_diff(nlev=levels) + assert len(colors) == expected + + +@mark.parametrize( + ("func", "expected"), + [ + ("aod_colors", 15), + ("cin_colors", 12), + ("ceil_colors", 14), + ("cldcov_colors", 13), + ("cref_colors", 16), + ("fire_power_colors", 7), + ("flru_colors", 4), + ("frzn_colors", 12), + ("goes_colors", 151), + ("graupel_colors", 19), + ("hail_colors", 10), + ("heat_flux_colors_g", 13), + ("heat_flux_colors_l", 17), + ("heat_flux_colors_s", 17), + ("icprb_colors", 10), + ("icsev_colors", 6), + ("lcl_colors", 19), + ("lifted_index_colors", 31), + ("mdn_colors", 18), + ("mean_vvel_colors", 19), + ("mup_colors", 18), + ("pbl_colors", 15), + ("pcp_colors", 10), + ("pcp_colors_high", 6), + ("pmsl_colors", 15), + ("ps_colors", 105), + ("pw_colors", 21), + ("radiation_colors", 27), + ("radiation_bw_colors", 80), + ("radiation_mix_colors", 130), + ("rainbow11_colors", 12), + ("rainbow12_colors", 13), + ("rainbow12_reverse", 13), + ("rainbow16_colors", 17), + ("shear_colors", 10), + ("slw_colors", 16), + ("smoke_colors", 15), + ("smoke_emissions_colors", 14), + ("snow_colors", 13), + ("soilm_colors", 12), + ("soilw_colors", 13), + ("t_colors", 15), + ("tsfc_colors", 21), + ("terrain_colors", 18), + ("ua_temp_colors", 32), + ("vis_colors", 142), + ("vvel_colors", 12), + ("vort_colors", 13), + ("wind_colors", 19), + ("wind_colors_high", 14), + ], +) +def test_colors(expected, func, spec): + colors = spec.__getattribute__(func) + assert len(colors) == expected diff --git a/tests/test_utils.py b/tests/test_utils.py new file mode 100644 index 00000000..f527a487 --- /dev/null +++ b/tests/test_utils.py @@ -0,0 +1,410 @@ +import signal +import time +from contextlib import contextmanager +from copy import deepcopy +from datetime import datetime, timedelta +from os import utime +from pathlib import Path +from unittest.mock import patch +from zipfile import ZipFile + +import numpy as np +import yaml +from pytest import mark, raises + +from adb_graphics import conversions, utils + + +@contextmanager +def timeout(duration): + def timeout_handler(signum, frame): # noqa: ARG001 + raise TimeoutError + + signal.signal(signal.SIGALRM, timeout_handler) + signal.alarm(duration) + try: + yield + finally: + signal.alarm(0) + + +def test_cfgrib_spec_no_model(): + config = {"foo": {"bar": "baz"}} + answer = utils.cfgrib_spec(config, "model") + assert answer == config + + +def test_cfgrib_spec_model(): + config = {"model": {"foo": {"bar": "baz"}}} + answer = utils.cfgrib_spec(config, "model") + assert answer == config["model"] + + +def test_create_zip(tmp_path): + afile = tmp_path / "a.txt" + bfile = tmp_path / "b.txt" + afile.touch() + bfile.touch() + zipf = tmp_path / "file.zip" + utils.create_zip([afile, bfile], zipf) + with ZipFile(zipf, "r") as zf: + assert zf.namelist() == ["a.txt", "b.txt"] + assert not afile.is_file() + assert not bfile.is_file() + + +def test_create_zip_existing_empty(tmp_path): + afile = tmp_path / "a.txt" + bfile = tmp_path / "b.txt" + afile.write_text("foo") + bfile.write_text("bar") + zipf = tmp_path / "file.zip" + zipf.touch() + assert zipf.stat().st_size == 0 + utils.create_zip([afile, bfile], zipf) + assert zipf.stat().st_size > 0 + with ZipFile(zipf, "r") as zf: + assert zf.namelist() == ["a.txt", "b.txt"] + assert not afile.is_file() + assert not bfile.is_file() + + +def test_create_zip_existing_nonempty(tmp_path): + afile = tmp_path / "a.txt" + bfile = tmp_path / "b.txt" + afile.write_text("foo") + a_mod_time = datetime(2025, 1, 1, 1, 0, 0).timestamp() + utime(afile, (a_mod_time, a_mod_time)) + bfile.write_text("bar") + zipf = tmp_path / "file.zip" + with ZipFile(zipf, "w") as zf: + zf.write(afile, arcname=afile.name) + utils.create_zip([afile, bfile], zipf) + with ZipFile(zipf, "r") as zf: + assert zf.namelist() == ["a.txt", "b.txt"] + # Make sure the file has the older modify time. + assert datetime(*zf.getinfo("a.txt").date_time) == datetime.fromtimestamp(a_mod_time) + assert not afile.is_file() + assert not bfile.is_file() + # Call again and make sure that the "overwrite" branch is not executed. + with patch.object(utils, "ZipFile") as zf: + utils.create_zip([afile, bfile], zipf) + zf.assert_called_once_with(zipf, "a") + + +def test_create_zip_existing_nonempty_overwrite(tmp_path): + afile = tmp_path / "a.txt" + bfile = tmp_path / "b.txt" + afile.write_text("foo") + bfile.write_text("bar") + zipf = tmp_path / "file.zip" + with ZipFile(zipf, "w") as zf: + zf.write(afile, arcname=afile.name) + # A newer archive file (mod time > previously archived file) will overwrite an older one. + a_mod_time = ( + datetime.now().replace(second=0, microsecond=0) + timedelta(minutes=5) + ).timestamp() + utime(afile, (a_mod_time, a_mod_time)) + utils.create_zip([afile, bfile], zipf) + with ZipFile(zipf, "r") as zf: + assert zf.namelist() == ["a.txt", "b.txt"] + # Make sure the archived file has the newer time. + assert datetime(*zf.getinfo("a.txt").date_time) == datetime.fromtimestamp(a_mod_time) + assert not afile.is_file() + assert not bfile.is_file() + + +def test_create_zip_error(tmp_path): + zipf = tmp_path / "file.zip" + # Using a different error here (not Exception or RuntimeError) to make sure anything gets + # caught in code under test. + with ( + patch.object(utils.ZipFile, "write", side_effect=ValueError) as run, + raises(RuntimeError, match="Error writing zip file!"), + ): + utils.create_zip([Path(f) for f in ("afile", "bfile")], zipf) + assert run.call_count == 2 + + +def test_create_zip_locked(tmp_path): + afile = tmp_path / "a.txt" + bfile = tmp_path / "b.txt" + afile.touch() + bfile.touch() + zipf = tmp_path / "file.zip" + zipf_lock = tmp_path / "file.zip._lock" + zipf_lock.touch() + with raises(TimeoutError), timeout(2): + utils.create_zip([afile, bfile], zipf) + assert not zipf.is_file() + assert afile.is_file() + assert bfile.is_file() + + +@mark.parametrize( + ("arg", "expected"), + [ + ([1], [1]), + ([1, 9], list(range(1, 10))), + ([1, 9, 3], list(range(1, 10, 3))), + ([3, 4, 7, 19], [3, 4, 7, 19]), + ], +) +def test_fhr_list(arg, expected): + assert utils.fhr_list(arg) == expected + + +@mark.parametrize( + ("arg", "expected"), + [ + (datetime(2025, 10, 31, 12), "2025103112"), + (datetime(2025, 10, 31), "2025103100"), + (datetime(2025, 10, 31, 12, 1, 2), "2025103112"), + ], +) +def test_from_datetime(arg, expected): + assert utils.from_datetime(arg) == expected + + +@mark.parametrize( + ("arg", "expected"), + [ + ("conversions.to_micro", conversions.to_micro), + ("utils.join_ranges", utils.join_ranges), + ], +) +def test_get_func(arg, expected): + assert utils.get_func(arg) == expected + + +def test_get_func_undefined(): + with raises(ValueError): # noqa: PT011 + utils.get_func("foo.bar") + + +def test_join_ranges(): + yaml_str = """ + a: !join_ranges [[0, 10, 0.1], [10, 51, 1.0]] + b: !join_ranges [[0, 5], [4]] + c: !join_ranges [[2, 17, 7]] + """ + yaml.add_constructor("!join_ranges", utils.join_ranges, Loader=yaml.SafeLoader) + + d = yaml.safe_load(yaml_str) + assert np.array_equal(d["a"], np.concatenate([np.arange(0, 10, 0.1), np.arange(10, 51, 1.0)])) + assert np.array_equal(d["b"], np.asarray([0, 1, 2, 3, 4, 0, 1, 2, 3])) + assert np.array_equal(d["c"], np.asarray([2, 9, 16])) + + +def test_arange_constructor(): + yaml_str = """ + a: !arange [0, 10, 0.1] + b: !arange [0, 5] + c: !arange [2, 17, 7] + """ + yaml.add_constructor("!arange", utils.arange_constructor, Loader=yaml.SafeLoader) + + d = yaml.safe_load(yaml_str) + assert np.array_equal(d["a"], np.arange(0, 10, 0.1)) + assert np.array_equal(d["b"], np.asarray([0, 1, 2, 3, 4])) + assert np.array_equal(d["c"], np.asarray([2, 9, 16])) + + +def test_load_sites(): + sites_file = Path(__name__).parent.parent / "static" / "conus_raobs.txt" + sites = utils.load_sites(sites_file) + assert len(sites) == 91 + + +def test_load_sites_dne(): + sites_file = Path("foo.txt") + with raises(FileNotFoundError): + utils.load_sites(sites_file) + + +def test_load_sites_str(): + sites_file = Path(__name__).parent.parent / "static" / "conus_raobs.txt" + sites = utils.load_sites(str(sites_file)) + assert len(sites) == 91 + + +def test_load_specs(): + specs_file = Path(__name__).parent.resolve() / "adb_graphics" / "default_specs.yml" + specs = utils.load_specs(specs_file) + assert specs["file"] == specs_file + + +def test_load_specs_dne(): + specs_file = Path("foo.txt") + with raises(FileNotFoundError) as e: + utils.load_specs(specs_file) + assert str(specs_file) in str(e.value) + + +def test_load_specs_str(): + specs_file = Path(__name__).parent.resolve() / "adb_graphics" / "default_specs.yml" + specs = utils.load_specs(str(specs_file)) + assert specs["file"] == specs_file + + +def test_load_yaml(tmp_path): + yaml_str = """ + a: !float '{{ c[1] - 2 }}' + b: !join_ranges [[0, 5], [4]] + c: !arange [2, 17, 7] + """ + cfg = tmp_path / "config.yaml" + cfg.write_text(yaml_str) + d = utils.load_yaml(cfg) + d.dereference() + assert d["a"] == 7 + assert np.array_equal(d["b"], np.asarray([0, 1, 2, 3, 4, 0, 1, 2, 3])) + assert np.array_equal(d["c"], np.asarray([2, 9, 16])) + + +@mark.parametrize( + ("lev", "expected"), + [ + ("max", ("", "")), + ("mup", ("", "")), + ("sfc", ("", "")), + ("mx02", (2, "mx")), + ("06km", (6, "km")), + ("100mb", (100, "mb")), + ("320m", (320, "m")), + ("6000ft", (6000, "ft")), + ], +) +def test_numeric_level(expected, lev): + assert utils.numeric_level(lev) == expected + + +@mark.parametrize("age", [0, 1, -1]) +def test_old_enough(age, tmp_path): + path = tmp_path / "foo.txt" + path.touch() + old_enough = utils.old_enough(age, path) + if age < 1: + assert old_enough + else: + assert not old_enough + + +def test_path_exists(): + path = Path(__name__).parent.resolve() + assert utils.path_exists(path) == path + + +def test_path_exists_dne(): + path = Path("foo.txt") + with raises(FileNotFoundError) as e: + utils.path_exists(path) + assert str(path) in str(e.value) + + +def test_path_exists_str(): + path = Path(__name__).parent.resolve() + assert utils.path_exists(str(path)) == path + + +@mark.parametrize( + "spec", + [ + {"level": 1}, + {"topLevel": 200}, + {"model": {"bottomLevel": 1}}, + {"Surface": 29}, + ], +) +def test_set_level_nlevel(spec): + orig = deepcopy(spec) + utils.set_level(level="200mb", model="model", spec={"cfgrib": spec}) + assert spec.get("level") == orig.get("level") + + +@mark.parametrize( + ("level", "expected"), + [ + ("100mb", 100), + ("600m", 600), + ("10m", 10), + ], +) +def test_set_level_nlevel_no_level_info(expected, level): + spec: dict = {} + utils.set_level(level=level, model="model", spec={"cfgrib": spec}) + assert spec.get("level") == expected + + +def test_set_level_nlevel_no_level_info_model(): + spec: dict = {"model": {}} + utils.set_level(level="250mb", model="model", spec={"cfgrib": spec}) + assert spec["model"]["level"] == 250 + + +def test_set_level_nonlevel(): + spec = {"typeOfLevel": "foo"} + utils.set_level(level="max", model="model", spec={"cfgrib": spec}) + assert spec.get("level") is None + + +def test_timer_returns_original_value(capsys): + @utils.timer + def add(a, b): + return a + b + + result = add(2, 3) + captured = capsys.readouterr() + assert result == 5 + assert "add Elapsed time:" in captured.out + assert "seconds" in captured.out + + +def test_timer_preserves_function_name_and_docstring(): + @utils.timer + def foo(): + """Original docstring.""" + return 42 + + assert foo.__name__ == "foo" + assert foo.__doc__ == "Original docstring." + + +def test_timer_measures_expected_elapsed_time(capsys): + @utils.timer + def slow_func(): + time.sleep(0.01) + return "done" + + result = slow_func() + captured = capsys.readouterr() + assert result == "done" + # It should print something like: "slow_func Elapsed time: 0.0101 seconds" + assert "slow_func Elapsed time:" in captured.out + + +def test_timer_with_mocked_perf_counter(capsys): + """Make timing deterministic using mocks.""" + with patch("time.perf_counter", side_effect=[10.0, 12.5]): + + @utils.timer + def example(): + return "ok" + + result = example() + captured = capsys.readouterr() + + assert result == "ok" + assert "example Elapsed time: 2.5000 seconds" in captured.out + + +def test_to_datetime(): + assert utils.to_datetime("2025103112") == datetime(2025, 10, 31, 12, 0, 0) + + +def test_uniq_wgrib2_list(): + wgrib2_list_path = Path(__name__).parent.resolve() / "tests" / "data" / "wgrib2_submsg1.txt" + fields_list = wgrib2_list_path.read_text().split("\n") + uniq_list = utils.uniq_wgrib2_list(fields_list) + assert len(uniq_list) < len(fields_list) + assert len(uniq_list) == 1711