diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md index a500a942b..aae7f1f55 100644 --- a/CONTRIBUTORS.md +++ b/CONTRIBUTORS.md @@ -1,15 +1,17 @@ # Contributors -| GitHub user | Real Name | Affiliation | Date | -| --------------- | ---------------- | ----------- | ---------- | -| andrewcoughtrie | Andrew Coughtrie | Met Office | 2025.12.12 | -| james-bruten-mo | James Bruten | Met Office | 2025-12-09 | -| jedbakerMO | Jed Baker | Met Office | 2025-12-29 | -| jennyhickson | Jenny Hickson | Met Office | 2025-12-10 | -| mo-marqh | Mark Hedley | Met Office | 2025-12-11 | -| mike-hobson | Mike Hobson | Met Office | 2025-12-17 | -| MatthewHambley | Matthew Hambley | Met Office | 2025-12-15 | -| yaswant | Yaswant Pradhan | Met Office | 2025-12-16 | -| stevemullerworth | Steve Mullerworth | Met Office | 2026-01-08 | -| harry-shepherd | Harry Shepherd | Met Office | 2026-01-08 | -| EdHone | Ed Hone | Met Office | 2026-01-09 | -| tom-j-h | Tom Hill | Met Office | 2026-01-19 | + +| GitHub user | Real Name | Affiliation | Date | +| ---------------- | ----------------- | --------------------- | ---------- | +| andrewcoughtrie | Andrew Coughtrie | Met Office | 2025.12.12 | +| james-bruten-mo | James Bruten | Met Office | 2025-12-09 | +| jedbakerMO | Jed Baker | Met Office | 2025-12-29 | +| jennyhickson | Jenny Hickson | Met Office | 2025-12-10 | +| mo-marqh | Mark Hedley | Met Office | 2025-12-11 | +| mike-hobson | Mike Hobson | Met Office | 2025-12-17 | +| MatthewHambley | Matthew Hambley | Met Office | 2025-12-15 | +| yaswant | Yaswant Pradhan | Met Office | 2025-12-16 | +| stevemullerworth | Steve Mullerworth | Met Office | 2026-01-08 | +| harry-shepherd | Harry Shepherd | Met Office | 2026-01-08 | +| EdHone | Ed Hone | Met Office | 2026-01-09 | +| tom-j-h | Tom Hill | Met Office | 2026-01-19 | +| hiker | Joerg Henrichs | Bureau of Meteorology | 2026-01-22 | diff --git a/applications/skeleton/fab_skeleton.py b/applications/skeleton/fab_skeleton.py new file mode 100755 index 000000000..11df842c1 --- /dev/null +++ b/applications/skeleton/fab_skeleton.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python3 + +############################################################################## +# (c) Crown copyright Met Office. All rights reserved. +# The file LICENCE, distributed with this code, contains details of the terms +# under which the code may be used. +############################################################################## +# Author: J. Henrichs, Bureau of Meteorology +# Author: J. Lyu, Bureau of Meteorology + +""" +A FAB build script for applications/skeleton. It relies on +the LFRicBase class contained in the infrastructure directory. +""" + +import logging +from pathlib import Path +import sys + +from fab.steps.grab.folder import grab_folder + +# We need to import the base class: +sys.path.insert(0, str(Path(__file__).parents[2] / "infrastructure" / + "build" / "fab")) + +from lfric_base import LFRicBase # noqa: E402 + + +class FabSkeleton(LFRicBase): + """ + A Fab-based build script for skeleton. It relies on the LFRicBase class + to implement the actual functionality, and only provides the required + source files. + + :param name: The name of the application. + """ + + def __init__(self, name: str) -> None: + super().__init__(name=name) + # Store the root of this apps for later + this_file = Path(__file__).resolve() + self._this_root = this_file.parent + + def grab_files_step(self) -> None: + """ + Grabs the required source files and optimisation scripts. + """ + super().grab_files_step() + dirs = ['applications/skeleton/source/'] + + # pylint: disable=redefined-builtin + for dir in dirs: + grab_folder(self.config, src=self.lfric_core_root / dir, + dst_label='') + + # Copy the optimisation scripts into a separate directory + grab_folder(self.config, src=self._this_root / "optimisation", + dst_label='optimisation') + + def get_rose_meta(self) -> Path: + """ + :returns: the rose-meta.conf path. + """ + return (self._this_root / 'rose-meta' / 'lfric-skeleton' / 'HEAD' / + 'rose-meta.conf') + + +# ----------------------------------------------------------------------------- +if __name__ == '__main__': + + logger = logging.getLogger('fab') + logger.setLevel(logging.DEBUG) + fab_skeleton = FabSkeleton(name="skeleton") + fab_skeleton.build() diff --git a/infrastructure/build/fab/README.md b/infrastructure/build/fab/README.md new file mode 100644 index 000000000..5c22299be --- /dev/null +++ b/infrastructure/build/fab/README.md @@ -0,0 +1,54 @@ +# LFRic Core Fab Build Scripts + +Make sure you have Fab version 2.0.1 or later installed (in addition to all +LFRic core requirements of course). + +## Setting up Site- and Platform-specific Settings +Site- and platform-specific settings are contained in +```$LFRIC_CORE/infrastructure/build/site-specific/${SITE}-${PLATFORM}``` +The default settings are in ```.../site-specific/default``` (and at this +stage each other site-specific setup inherits the values set in the default, +and then adds or modifies settings). The Fab build system provides various +callbacks to the ```config.py``` file in the corresponding directory (details +are in the [Fab documentation](https://metoffice.github.io/fab/fab_base/config.html). + +If there is no existing site-specific setup, it is recommended to copy an existing +configuration file (e.g. from ```nci_gadi/config.py```). This act as a template +to indicate where you can specify linker information, select a default compiler +suite etc. + +The default setup contains compiler flags for Cray, GNU, Intel-classic (ifort), +Intel-LLVM (ifx), and NVIDIA. For modularity's sake (and to keep the file length +shorter), the default configuration will get the settings from the corresponding +```setup_...py``` script. There is no need for a site to replicate this structure, +existing ```config.py``` scripts show how this can be done. + + +## Building the Skeleton Apps + +In order to build the skeleton apps, change into the directory +```$LFRIC_CORE/applications/skeleton```, +and use the following command: + +``` +./fab_skeleton.py --nprocs 4 --site nci --platform gadi --suite intel-classic +``` +Select an appropriate number of processes to run in parallel, and your site and platform. +If you don't have a default compiler suite in your site-specific setup (or +want to use a non-default suite), use the ``--suite`` option. Once the process is finished, +you should have a binary in the directory +```./fab-workspace/skeleton-full-debug-COMPILER``` (where ```COMPILER``` is the compiler +used, e.g. ```mpif90-gfortran```). + +Using ```./fab_skeleton.py -h``` will show a help message with all supported command line +options (and their default value). If a default value is listed using an environment +variables (```(default: $SITE or 'default')```), the corresponding environment variable +is used if no command line option has been specified. + +A different compilation profile can be specified using ```--profile``` option. Note +that the available compilation profiles can vary from site to site (see +[Fab documentation](https://metoffice.github.io/fab/fab_base/config.html) for details). + +If Fab has issues finding a compiler, you can use the Fab debug option +```--available-compilers```, which will list all compilers and linkers Fab has +identified as being available. diff --git a/infrastructure/build/fab/configurator.py b/infrastructure/build/fab/configurator.py new file mode 100755 index 000000000..1c40f383a --- /dev/null +++ b/infrastructure/build/fab/configurator.py @@ -0,0 +1,92 @@ +############################################################################## +# (c) Crown copyright Met Office. All rights reserved. +# The file LICENCE, distributed with this code, contains details of the terms +# under which the code may be used. +############################################################################## +# Author J. Henrichs, Bureau of Meteorology +# Author J. Lyu, Bureau of Meteorology + +""" +This file defines the configurator script sequence for LFRic. +""" + +import logging +from pathlib import Path +from typing import cast, Optional + +from fab.api import BuildConfig, find_source_files, Category +from fab.tools.shell import Shell + +from rose_picker_tool import RosePicker + +logger = logging.getLogger('fab') + + +def configurator(config: BuildConfig, + lfric_core_source: Path, + rose_meta_conf: Path, + rose_picker: RosePicker, + include_paths: Optional[list[Path]] = None, + config_dir: Optional[Path] = None) -> None: + """ + This method implements the LFRic configurator tool. + + :param config: the Fab build config instance + :param lfric_core_source: the path to the LFRic core directory + :param rose_meta_conf: the path to the rose-meta configuration file + :param rose_picker: the rose picker tool + :param include_paths: additional include paths (each path will be added, + as well as the path with /'rose-meta') + :param config_dir: the directory for the generated configuration files + """ + + tools = lfric_core_source / 'infrastructure' / 'build' / 'tools' + config_dir = config_dir or config.build_output / 'configuration' + config_dir.mkdir(parents=True, exist_ok=True) + + # rose picker + # ----------- + # creates rose-meta.json and config_namelists.txt in + # gungho/build + logger.info('rose_picker') + + include_dirs = [lfric_core_source, lfric_core_source / 'rose-meta'] + if include_paths: + for path in include_paths: + include_dirs.extend([path, path / 'rose-meta']) + + parameters = [rose_meta_conf, '-directory', config_dir] + for incl_dir in include_dirs: + parameters.extend(['-include_dirs', incl_dir]) + + rose_picker.execute(parameters=parameters) + rose_meta = config_dir / 'rose-meta.json' + + shell = config.tool_box.get_tool(Category.SHELL) + shell = cast(Shell, shell) + + # build_config_loaders + # -------------------- + # builds a bunch of f90s from the json + logger.info('GenerateNamelist') + shell.exec(f"{tools / 'GenerateNamelist'} -verbose {rose_meta} " + f"-directory {config_dir}") + + # create configuration_mod.f90 in source root + # ------------------------------------------- + logger.info('GenerateLoader') + with open(config_dir / 'config_namelists.txt', encoding="utf8") as f_in: + names = [name.strip() for name in f_in.readlines()] + + configuration_mod_fpath = config_dir / 'configuration_mod.f90' + shell.exec(f"{tools / 'GenerateLoader'} {configuration_mod_fpath} " + f"{' '.join(names)}") + + # create feign_config_mod.f90 in source root + # ------------------------------------------ + logger.info('GenerateFeigns') + feign_config_mod_fpath = config_dir / 'feign_config_mod.f90' + shell.exec(f"{tools / 'GenerateFeigns'} {rose_meta} " + f"-output {feign_config_mod_fpath}") + + find_source_files(config, source_root=config_dir) diff --git a/infrastructure/build/fab/lfric_base.py b/infrastructure/build/fab/lfric_base.py new file mode 100755 index 000000000..a438f8f14 --- /dev/null +++ b/infrastructure/build/fab/lfric_base.py @@ -0,0 +1,435 @@ +############################################################################## +# (c) Crown copyright Met Office. All rights reserved. +# The file LICENCE, distributed with this code, contains details of the terms +# under which the code may be used. +############################################################################## +# Author: J. Henrichs, Bureau of Meteorology +# Author: J. Lyu, Bureau of Meteorology + +""" +This is an OO basic interface to FAB. It allows the typical LFRic +applications to only modify very few settings to have a working FAB build +script. +""" + +import argparse +import os +from pathlib import Path +import sys +from typing import List, Optional, Iterable, Union + +from fab.api import (ArtefactSet, BuildConfig, Exclude, grab_folder, Include, + input_to_output_fpath, preprocess_x90, psyclone, + SuffixFilter) +from fab.fab_base.fab_base import FabBase + +from configurator import configurator +from rose_picker_tool import get_rose_picker +from templaterator import Templaterator + + +class LFRicBase(FabBase): + ''' + This is the base class for all LFRic FAB scripts. + + :param name: the name to be used for the workspace. Note that + the name of the compiler will be added to it. + :param root_symbol: the symbol (or list of symbols) of the main + programs. Defaults to the parameter `name` if not specified. + + ''' + # pylint: disable=too-many-instance-attributes + def __init__(self, name: str, + root_symbol: Optional[Union[List[str], str]] = None + ): + + # List of all precision preprocessor symbols and their default. + # Used to add corresponding command line options, and then to define + # the preprocessor definitions. + self._all_precisions = [("RDEF_PRECISION", "64"), + ("R_SOLVER_PRECISION", "32"), + ("R_TRAN_PRECISION", "64"), + ("R_BL_PRECISION", "64")] + + super().__init__(name) + + this_file = Path(__file__) + # The root directory of the LFRic Core + self._lfric_core_root = this_file.parents[3] + + # If the user wants to overwrite the default root symbol (which + # is `name`): + if root_symbol: + self.set_root_symbol(root_symbol) + + self._psyclone_config = (self.config.source_root / 'psyclone_config' / + 'psyclone.cfg') + # Many PSyclone scripts use module(s) from this directory. Additional + # paths might need to be added later. + self._add_python_paths = [str(self.lfric_core_root / "infrastructure" / + "build" / "psyclone")] + + def define_command_line_options( + self, + parser: Optional[argparse.ArgumentParser] = None + ) -> argparse.ArgumentParser: + ''' + This adds LFRic specific command line options to the base class + define_command_line_option. Currently, --rose_picker and + precision-related options are added. + + :param parser: optional a pre-defined argument parser. + + :returns: the argument parser with the LFRic specific options added. + ''' + parser = super().define_command_line_options() + + parser.add_argument( + '--rose_picker', '-rp', type=str, default="system", + help="Version of rose_picker. Use 'system' to use an installed " + "version.") + + parser.add_argument( + '--no-xios', action="store_true", default=False, + help="Disable compilation with XIOS.") + + # Precision related command line arguments + # ---------------------------------------- + group = parser.add_argument_group( + title="Precisions", + description="Arguments related to setting the floating " + "point precision.") + + group.add_argument( + '--precision-default', type=str, default=None, + choices=['32', '64'], help="Default precision for reals.") + + # We need to distinguish if a user specified a value (even if it is + # the default), or not. Use the following action for argparse: + class StoreWithFlag(argparse.Action): + """ + Helper class to add a `XX_specified` entry for command line + options that the user has explicitly specified. + """ + def __call__(self, parser, namespace, values, option_string=None): + setattr(namespace, self.dest, values) + setattr(namespace, f"{self.dest}_specified", True) + + for prec_name, default in self._all_precisions: + lower_name = prec_name.lower() + group.add_argument( + f'--{lower_name}', type=str, choices=['32', '64'], + default=default, action=StoreWithFlag, + help=f"Precision for '{prec_name}'. Default will be " + f"overwritten by ${prec_name} or --precision-default " + f"in this order.") + + return parser + + @property + def lfric_core_root(self) -> Path: + ''' + :returns: the root directory of the LFRic core repository. + ''' + return self._lfric_core_root + + def setup_site_specific_location(self): + ''' + This method adds the required directories for site-specific + configurations to the Python search path. We want to add the + directory where this lfric_base class is located, and not the + directory in which the application script is (which is what + baf base would set up). + ''' + this_dir = Path(__file__).parent + sys.path.insert(0, str(this_dir)) + # We need to add the 'site_specific' directory to the path, so + # each config can import from 'default' (instead of having to + # use 'site_specific.default', which would hard-code the name + # `site_specific` in more scripts). + sys.path.insert(0, str(this_dir / "site_specific")) + + def define_preprocessor_flags_step(self) -> None: + ''' + This method overwrites the base class define_preprocessor_flags. + It uses add_preprocessor_flags to set up preprocessing flags for LFRic + applications. This includes: + - various floating point precision related directives + - Use of XIOS (if not disabled using --no-xios command line option) + - Disabling MPI (if disabled using --no-mpi) + ''' + preprocessor_flags: List[str] = [] + + # Take the value of --precision-default (or None if not specified): + generic_default = self.args.precision_default + + # Check all required precision defines + for prec_name, prec_default in self._all_precisions: + # Check if a value was specified on the command line: + if getattr(self.args, f"{prec_name.lower()}_specified", False): + value = getattr(self.args, prec_name.lower()) + preprocessor_flags.append(f"-D{prec_name}={value}") + continue + # Check for environment variable which can overwrite the default: + env_precision = os.environ.get(prec_name) + if env_precision: + preprocessor_flags.append(f"-D{prec_name}={env_precision}") + continue + + # No command line option for the current precision name. + # Check if a default was set (--precision-default) + if generic_default: + preprocessor_flags.append(f"-D{prec_name}=" + f"{generic_default}") + else: + # Otherwise, use the default for this precision + preprocessor_flags.append(f"-D{prec_name}={prec_default}") + + # core/components/lfric-xios/build/import.mk + if not self.args.no_xios: + preprocessor_flags.append('-DUSE_XIOS') + + if not self.config.mpi: + preprocessor_flags.append("-DNO_MPI") + + self.add_preprocessor_flags(preprocessor_flags) + + def get_linker_flags(self) -> List[str]: + ''' + This method overwrites the base class get_liner_flags. It passes the + libraries that LFRic uses to the linker. Currently, these libraries + include yaxt, xios, netcdf and hdf5. + + :returns: list of flags for the linker. + ''' + libs = ['yaxt', 'xios', 'netcdf', 'hdf5'] + return libs + super().get_linker_flags() + + def grab_files_step(self) -> None: + ''' + This method overwrites the base class grab_files_step. It includes all + the LFRic core directories that are commonly required for building + LFRic applications. It also grabs the psydata directory for profiling, + if required. + ''' + dirs = ['infrastructure/source/', + 'components/driver/source/', + 'components/inventory/source/', + 'components/science/source/', + 'components/lfric-xios/source/', + ] + + # pylint: disable=redefined-builtin + for dir in dirs: + grab_folder(self.config, src=self.lfric_core_root / dir, + dst_label='') + + # Copy the PSyclone Config file into a separate directory + dir = "etc" + grab_folder(self.config, src=self.lfric_core_root / dir, + dst_label='psyclone_config') + + def find_source_files_step( + self, + path_filters: Optional[Iterable[Union[Exclude, Include]]] = None + ) -> None: + ''' + This method overwrites the base class find_source_files_step. + It first calls the configurator_step to set up the configurator. + Then it finds all the source files in the LFRic core directories, + excluding the unit tests. Finally, it calls the templaterator_step. + + :param path_filters: optional list of path filters to be passed to + Fab find_source_files, default is None. + :type path_filters: Optional[Iterable[Exclude, Include]] + ''' + self.configurator_step() + + path_filter_list = list(path_filters) if path_filters else [] + path_filter_list.append(Exclude('unit-test', '/test/')) + super().find_source_files_step(path_filters=path_filter_list) + + self.templaterator_step(self.config) + + def configurator_step( + self, + include_paths: Optional[list[Path]] = None) -> None: + ''' + This method first gets the rose meta data information by calling + get_rose_meta. If the rose meta data is available, it then get the + rose picker tool by calling the get_rose_picker. Finally, it runs + the LFRic configurator with the LFRic core and apps sources by calling + configurator. + + :param include_paths: optional additional include paths + ''' + rose_meta = self.get_rose_meta() + if rose_meta: + # Get the right version of rose-picker, depending on + # command line option (defaulting to v2.0.0) + # TODO: Ideally we would just put this into the toolbox, + # but atm we can't put several tools of one category in + # (so ToolBox will need to support more than one MISC tool) + rp = get_rose_picker(self.args.rose_picker) + # Ideally we would want to get all source files created in + # the build directory, but then we need to know the list of + # files to add them to the list of files to process. Instead, + # we create the files in the source directory, and find them + # there later. + include_paths = include_paths or [] + configurator(self.config, lfric_core_source=self.lfric_core_root, + rose_meta_conf=rose_meta, + include_paths=include_paths, + rose_picker=rp) + + def templaterator_step(self, config: BuildConfig) -> None: + ''' + This method runs the LFRic templaterator Fab tool. + + :param config: the Fab build configuration + :type config: :py:class:`fab.BuildConfig` + ''' + base_dir = self.lfric_core_root / "infrastructure" / "build" / "tools" + + templaterator = Templaterator(base_dir/"Templaterator") + config.artefact_store["template_files"] = set() + t90_filter = SuffixFilter(ArtefactSet.INITIAL_SOURCE_FILES, + [".t90", ".T90"]) + template_files = t90_filter(config.artefact_store) + # Don't bother with parallelising this, atm there is only one file: + for template_file in template_files: + out_dir = input_to_output_fpath(config=config, + input_path=template_file).parent + out_dir.mkdir(parents=True, exist_ok=True) + templ_r32 = {"kind": "real32", "type": "real"} + templ_r64 = {"kind": "real64", "type": "real"} + templ_i32 = {"kind": "int32", "type": "integer"} + for key_values in [templ_r32, templ_r64, templ_i32]: + out_file = out_dir / f"field_{key_values['kind']}_mod.f90" + templaterator.process(template_file, out_file, + key_values=key_values) + config.artefact_store.add(ArtefactSet.FORTRAN_COMPILER_FILES, + out_file) + + def get_rose_meta(self) -> Optional[Path]: + ''' + This method returns the path to the rose meta data config file. + Currently, it returns none. It's up to the LFRic applications to + overwrite if required. + ''' + return None + + def analyse_step( + self, + ignore_dependencies: Optional[Iterable[str]] = None, + find_programs: bool = False + ) -> None: + ''' + The method overwrites the base class analyse_step. + For LFRic, it first runs the preprocess_x90_step and then runs + psyclone_step. Finally, it calls Fab's analyse for dependency + analysis, ignoring the third party modules that are commonly + used by LFRic. + ''' + if ignore_dependencies is None: + ignore_dependencies = [] + # core/infrastructure/build/import.mk + ignore_dep_list = list(ignore_dependencies) + ignore_dep_list += ['netcdf', 'mpi', 'mpi_f08', 'yaxt'] + # From core/components/lfric-xios/build/import.mk + ignore_dep_list += ['xios', 'icontext', 'mod_wait'] + + self.preprocess_x90_step() + self.psyclone_step(ignore_dependencies=ignore_dep_list) + super().analyse_step( + ignore_dependencies=ignore_dep_list, + find_programs=find_programs) + + def preprocess_x90_step(self) -> None: + """ + Invokes the Fab preprocess step for all X90 files. + """ + # TODO: Fab does not support path-specific flags for X90 files. + preprocess_x90(self.config, + common_flags=self.preprocess_flags_common) + + def psyclone_step( + self, + ignore_dependencies: Optional[Iterable[str]] = None, + additional_parameters: Optional[list[str]] = None + ) -> None: + ''' + This method runs Fab's psyclone. It first sets the additional psyclone + command line arguments by calling get_psyclone_config to get the + PSyclone configuration file and by calling + `get_additional_psyclone_options` to get additional psyclone command + line set by the user, e.g. for profiling, if any. Finally, Fab's + psyclone is called with the Fab build configuration, the kernel root + directory, the transformation script got through calling + `get_transformation_script`, the api, and the additional psyclone + command line arguments. + + :param ignore_dependencies: + :param additional_parameters: optional additional parameter for the + PSyclone. + ''' + psyclone_cli_args = self.get_psyclone_config() + psyclone_cli_args.extend(self.get_additional_psyclone_options()) + if additional_parameters: + psyclone_cli_args.extend(additional_parameters) + + # To avoid impacting other code, store the original search path + old_sys_path = sys.path[:] + sys.path.extend(self._add_python_paths) + psyclone(self.config, kernel_roots=[(self.config.build_output / + "kernel")], + transformation_script=self.get_transformation_script, + api="dynamo0.3", + cli_args=psyclone_cli_args, + ignore_dependencies=ignore_dependencies) + sys.path = old_sys_path + + def get_psyclone_config(self) -> List[str]: + ''' + :returns: the command line options to pick the right + PSyclone config file. + ''' + return ["--config", str(self._psyclone_config)] + + def get_additional_psyclone_options(self) -> List[str]: + ''' + A placeholder for additional PSyclone comand line options. + ''' + return [] + + def get_transformation_script(self, fpath: Path, + config: BuildConfig) -> Optional[Path]: + ''' + This method returns the path to the transformation script that PSyclone + will use for each x90 file. It first checks if there is a specific + transformation script for the x90 file. If not, it will see whether a + global transformation script can be used. + + :param fpath: the path to the file being processed. + :param config: the FAB BuildConfig instance. + :returns: the transformation script to be used by PSyclone. + ''' + # Newer LFRic versions have a psykal directory + optimisation_path = (config.source_root / "optimisation" / + f"{self.site}-{self.platform}" / "psykal") + relative_path = None + for base_path in [config.source_root, config.build_output]: + try: + relative_path = fpath.relative_to(base_path) + except ValueError: + pass + if relative_path: + local_transformation_script = (optimisation_path / + (relative_path.with_suffix('.py'))) + if local_transformation_script.exists(): + return local_transformation_script + + global_transformation_script = optimisation_path / 'global.py' + if global_transformation_script.exists(): + return global_transformation_script + return None diff --git a/infrastructure/build/fab/rose_picker_tool.py b/infrastructure/build/fab/rose_picker_tool.py new file mode 100755 index 000000000..202af1c07 --- /dev/null +++ b/infrastructure/build/fab/rose_picker_tool.py @@ -0,0 +1,120 @@ +############################################################################## +# (c) Crown copyright Met Office. All rights reserved. +# The file LICENCE, distributed with this code, contains details of the terms +# under which the code may be used. +############################################################################## +# Author J. Henrichs, Bureau of Meteorology +# Author J. Lyu, Bureau of Meteorology + +""" +This module contains a function that returns a working version of a +rose_picker tool. It can either be a version installed in the system, +or otherwise a checked-out version in the fab-workspace will be used. +If required, a version of rose_picker will be checked out. +""" + +import logging +import os +from pathlib import Path +import shutil +from typing import cast, List, Union + +from fab.api import Category, Tool, ToolRepository +from fab.tools.versioning import Fcm +from fab.util import get_fab_workspace + +logger = logging.getLogger('fab') + + +class RosePicker(Tool): + '''This implements rose_picker as a Fab tool. It supports dynamically + adding the required PYTHONPATH to the environment in case that rose_picker + is not installed, but downloaded. + + :param Path path: the path to the rose picker binary. + ''' + def __init__(self, path: Path): + super().__init__("rose_picker", exec_name=str(path)) + # This is the required PYTHONPATH for running rose_picker + # when it is installed from the repository: + self._pythonpath = path.parents[1] / "lib" / "python" + + def check_available(self) -> bool: + ''' + :returns bool: whether rose_picker works by running + `rose_picker -help`. + ''' + try: + self.run(additional_parameters="-help") + except RuntimeError: + return False + + return True + + def execute(self, parameters: List[Union[Path, str]]) -> None: + ''' + This wrapper adds the required PYTHONPATH, and passes all + parameters through to the tool's run function. + + :param additional_parameter: A list of parameters for rose picker. + ''' + env = os.environ.copy() + env["PYTHONPATH"] = (f"{env.get('PYTHONPATH', '')}:" + f"{self._pythonpath}") + + self.run(additional_parameters=parameters, env=env) + + +# ============================================================================= +def get_rose_picker(tag: str = "v2.0.0") -> RosePicker: + ''' + Returns a Fab RosePicker tool. It can either be a version installed + in the system, which is requested by setting tag to `system`, or a + newly installed version via an FCM checkout. If there is already a + checked-out version, it will be used (i.e. no repeated downloads are + done). + + :param tag: Either the tag in the repository to use, + or 'system' to indicate to use a version installed in the system. + + :returns RosePicker: a Fab RosePicker tool instance + ''' + + if tag.lower() == "system": + # 'system' means to use a rose_picker installed in the system + which_rose_picker = shutil.which("rose_picker") + if not which_rose_picker: + raise RuntimeError("Cannot find system rose_picker tool.") + return RosePicker(Path(which_rose_picker)) + + # Otherwise use rose_picker from the default Fab workspace. It will + # create a instance of the class above, which will add its path to + # PYTHONPATH when executing a rose_picker command. + + gpl_utils = get_fab_workspace() / f"gpl-utils-{tag}" / "source" + rp_path = gpl_utils / "bin" / "rose_picker" + rp = RosePicker(rp_path) + + # If the tool is not available (the class will run `rose_picker -help` + # to verify this ), install it + if not rp.is_available: + fcm = ToolRepository().get_default(Category.FCM) + fcm = cast(Fcm, fcm) + # TODO: atm we are using fcm for the checkout, because using FCM + # keywords is more portable. We cannot use a Fab config (since this + # function is called from within a Fab build), so that means the + # gpl-utils-* directories in the Fab workspace directories do not + # have the normal directory layout. + logger.info(f"Installing rose_picker tag '{tag}'.") + fcm.checkout(src=f'fcm:lfric_gpl_utils.x/tags/{tag}', + dst=gpl_utils) + + # We need to create a new instance, since `is_available` is + # cached (I.e. it's always false in the previous instance) + rp = RosePicker(rp_path) + + if not rp.is_available: + msg = f"Cannot run rose_picker tag '{tag}'." + logger.exception(msg) + raise RuntimeError(msg) + return rp diff --git a/infrastructure/build/fab/site_specific/default/__init__.py b/infrastructure/build/fab/site_specific/default/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/infrastructure/build/fab/site_specific/default/config.py b/infrastructure/build/fab/site_specific/default/config.py new file mode 100644 index 000000000..74f1291bd --- /dev/null +++ b/infrastructure/build/fab/site_specific/default/config.py @@ -0,0 +1,163 @@ +#! /usr/bin/env python3 + + +''' +This module contains the default Baf configuration class. +''' + +import argparse +from typing import List + +from fab.api import AddFlags, BuildConfig, Category, ToolRepository + +from default.setup_cray import setup_cray +from default.setup_gnu import setup_gnu +from default.setup_intel_classic import setup_intel_classic +from default.setup_intel_llvm import setup_intel_llvm +from default.setup_nvidia import setup_nvidia + + +class Config: + ''' + This class is the default Configuration object for Baf builds. + It provides several callbacks which will be called from the build + scripts to allow site-specific customisations. + ''' + + def __init__(self): + self._args = None + + @property + def args(self) -> argparse.Namespace: + ''' + :returns argparse.Namespace: the command line options specified by + the user. + ''' + return self._args + + def get_valid_profiles(self) -> List[str]: + ''' + Determines the list of all allowed compiler profiles. The first + entry in this list is the default profile to be used. This method + can be overwritten by site configs to add or modify the supported + profiles. + + :returns List[str]: list of all supported compiler profiles. + ''' + return ["full-debug", "fast-debug", "production", "unit-tests"] + + def update_toolbox(self, build_config: BuildConfig) -> None: + ''' + Set the default compiler flags for the various compiler + that are supported. + + :param build_config: the Fab build configuration instance + ''' + # First create the default compiler profiles for all available + # compilers. While we have a tool box with exactly one compiler + # in it, compiler wrappers will require more than one compiler + # to be initialised - so we just initialise all of them (including + # the linker): + tr = ToolRepository() + for compiler in (tr[Category.C_COMPILER] + + tr[Category.FORTRAN_COMPILER] + + tr[Category.LINKER]): + # Define a base profile, which contains the common + # compilation flags. This 'base' is not accessible to + # the user, so it's not part of the profile list. Also, + # make it inherit from the default profile '', so that + # a user does not have to specify the "base" profile. + # Note that we set this even if a compiler is not available. + # This is required in case that compilers are not in PATH, + # so e.g. mpif90-ifort works, but ifort cannot be found. + # We still need to be able to set and query flags for ifort. + compiler.define_profile("base", inherit_from="") + for profile in self.get_valid_profiles(): + compiler.define_profile(profile, inherit_from="base") + + self.setup_intel_classic(build_config) + self.setup_intel_llvm(build_config) + self.setup_gnu(build_config) + self.setup_nvidia(build_config) + self.setup_cray(build_config) + + def handle_command_line_options(self, args: argparse.Namespace) -> None: + ''' + Additional callback function executed once all command line + options have been added. This is for example used to add + Vernier profiling flags, which are site-specific. + + :param argparse.Namespace args: the command line options added in + the site configs + ''' + # Keep a copy of the args, so they can be used when + # initialising compilers + self._args = args + + def setup_cray(self, build_config: BuildConfig) -> None: + ''' + This method sets up the Cray compiler and linker flags. + For now call an external function, since it is expected that + this configuration can be very lengthy (once we support + compiler modes). + + :param build_config: the Fab build configuration instance + :type build_config: :py:class:`fab.BuildConfig` + ''' + setup_cray(build_config, self.args) + + def setup_gnu(self, build_config: BuildConfig) -> None: + ''' + This method sets up the Gnu compiler and linker flags. + For now call an external function, since it is expected that + this configuration can be very lengthy (once we support + compiler modes). + + :param build_config: the Fab build configuration instance + :type build_config: :py:class:`fab.BuildConfig` + ''' + setup_gnu(build_config, self.args) + + def setup_intel_classic(self, build_config: BuildConfig) -> None: + ''' + This method sets up the Intel classic compiler and linker flags. + For now call an external function, since it is expected that + this configuration can be very lengthy (once we support + compiler modes). + + :param build_config: the Fab build configuration instance + :type build_config: :py:class:`fab.BuildConfig` + ''' + setup_intel_classic(build_config, self.args) + + def setup_intel_llvm(self, build_config: BuildConfig) -> None: + ''' + This method sets up the Intel LLVM compiler and linker flags. + For now call an external function, since it is expected that + this configuration can be very lengthy (once we support + compiler modes). + + :param build_config: the Fab build configuration instance + :type build_config: :py:class:`fab.BuildConfig` + ''' + setup_intel_llvm(build_config, self.args) + + def setup_nvidia(self, build_config: BuildConfig) -> None: + ''' + This method sets up the Nvidia compiler and linker flags. + For now call an external function, since it is expected that + this configuration can be very lengthy (once we support + compiler modes). + + :param build_config: the Fab build configuration instance + :type build_config: :py:class:`fab.BuildConfig` + ''' + setup_nvidia(build_config, self.args) + + def get_path_flags(self, build_config: BuildConfig) -> List[AddFlags]: + ''' + Returns the path-specific flags to be used. + TODO FAB #313: Ideally we have only one kind of flag, but as a quick + work around we provide this method. + ''' + return [] diff --git a/infrastructure/build/fab/site_specific/default/setup_cray.py b/infrastructure/build/fab/site_specific/default/setup_cray.py new file mode 100644 index 000000000..346b452fe --- /dev/null +++ b/infrastructure/build/fab/site_specific/default/setup_cray.py @@ -0,0 +1,119 @@ +#!/usr/bin/env python3 + +''' +This file contains a function that sets the default flags for the Cray +compilers and linkers in the ToolRepository. + +This function gets called from the default site-specific config file +''' + +import argparse +from typing import cast + +from fab.api import BuildConfig, Category, Compiler, Linker, ToolRepository + + +def setup_cray(build_config: BuildConfig, args: argparse.Namespace) -> None: + # pylint: disable=unused-argument + ''' + Defines the default flags for ftn. + + :param build_config: the Fab build config instance from which + required parameters can be taken. + :param args: all command line options + ''' + + tr = ToolRepository() + ftn = tr.get_tool(Category.FORTRAN_COMPILER, "crayftn-ftn") + ftn = cast(Compiler, ftn) + + if not ftn.is_available: + return + + # The base flags + # ============== + flags = ["-g", "-G0", "-m", "0", # ? + "-M", "E664,E7208,E7212", # ? + "-en", # Fortran standard + "-ef", # use lowercase module names!Important! + "-hnocaf", # Required for linking with C++ + ] + + # Handle accelerator options: + if args.openacc or args.openmp: + host = args.host.lower() + else: + # Neither openacc nor openmp specified + host = "" + + if args.openacc: + if host == "gpu": + flags.extend(["-h acc"]) + else: + # CPU + flags.extend(["-h acc"]) + elif args.openmp: + if host == "gpu": + flags.extend([]) + else: + # OpenMP on CPU, that's already handled by Fab + pass + + ftn.add_flags(flags, "base") + + # Full debug + # ========== + ftn.add_flags(["-Ktrap=fp", # floating point checking + "-R", "bcdps", # bounds, array shape, collapse, + # pointer, string checking + "-O0"], # No optimisation + "full-debug") + if ftn.get_version() >= (15, 0): + ftn.add_flags(["-G0"], "full-debug") + else: + ftn.add_flags(["-Gfast"], "full-debug") + + # Fast debug + # ========== + ftn.add_flags(["-O2", "-hflex_mp=strict"], "fast-debug") + if ftn.get_version() >= (15, 0): + ftn.add_flags(["-G2"], "fast-debug") + else: + ftn.add_flags(["-Gfast"], "fast-debug") + + # Production + # ========== + ftn.add_flags(["-O3", "-hipa3", "-m", "3"], "production") + + # Set up the linker + # ================= + linker = tr.get_tool(Category.LINKER, f"linker-{ftn.name}") + linker = cast(Linker, linker) + + # ATM we don't use a shell when running a tool, and as such + # we can't directly use "$()" as parameter. So query these values using + # Fab's shell tool (doesn't really matter which shell we get, so just + # ask for the default): + shell = tr.get_default(Category.SHELL) + + try: + # We must remove the trailing new line, and create a list: + nc_flibs = shell.run(additional_parameters=["-c", "nf-config --flibs"], + capture_output=True).strip().split() + except RuntimeError: + nc_flibs = [] + + linker.add_lib_flags("netcdf", nc_flibs) + linker.add_lib_flags("yaxt", ["-lyaxt", "-lyaxt_c"]) + linker.add_lib_flags("xios", ["-lxios"]) + linker.add_lib_flags("hdf5", ["-lhdf5"]) + linker.add_lib_flags("shumlib", ["-lshum"]) + linker.add_lib_flags("vernier", ["-lvernier_f", "-lvernier_c", + "-lvernier"]) + + linker.add_post_lib_flags(["-lcraystdc++"]) + + # Using the GNU compiler on Crays for now needs the additional + # flag -fallow-argument-mismatch to compile mpi_mod.f90 + ftn = tr.get_tool(Category.FORTRAN_COMPILER, "crayftn-gfortran") + ftn.add_flags("-fallow-argument-mismatch") diff --git a/infrastructure/build/fab/site_specific/default/setup_gnu.py b/infrastructure/build/fab/site_specific/default/setup_gnu.py new file mode 100644 index 000000000..c00c99b91 --- /dev/null +++ b/infrastructure/build/fab/site_specific/default/setup_gnu.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python3 + +''' +This file contains a function that sets the default flags for all +GNU based compilers and linkers in the ToolRepository. + +This function gets called from the default site-specific config file +''' + +import argparse +from typing import cast + +from fab.api import BuildConfig, Category, Linker, ToolRepository + + +def setup_gnu(build_config: BuildConfig, args: argparse.Namespace) -> None: + # pylint: disable=unused-argument + ''' + Defines the default flags for all GNU compilers and linkers. + + :param build_config: the Fab build config instance from which + required parameters can be taken. + :param args: all command line options + ''' + + tr = ToolRepository() + gfortran = tr.get_tool(Category.FORTRAN_COMPILER, "gfortran") + + if not gfortran.is_available: + gfortran = tr.get_tool(Category.FORTRAN_COMPILER, "mpif90-gfortran") + if not gfortran.is_available: + return + + if gfortran.get_version() < (4, 9): + raise RuntimeError(f"GFortran is too old to build dynamo. " + f"Must be at least 4.9.0, it is " + f"'{gfortran.get_version_string()}'.") + + # The base flags + # ============== + + # TODO: It should use -Werror=conversion, but: + # Most lfric_atm dependencies contain code with implicit lossy + # conversions. + # This should be restricted to only the files/directories + # that need it, but this needs Fab updates. + + gfortran.add_flags( + ['-ffree-line-length-none', '-Wall', '-g', + '-Werror=character-truncation', + '-Werror=unused-value', + '-Werror=tabs', + '-std=f2008', + '-fdefault-real-8', + '-fdefault-double-8', + ], + "base") + + # TODO - Remove the -fallow-arguments-mismatch flag when MPICH no longer + # fails to build as a result of its mismatched arguments (see + # ticket summary for #2549 for reasoning). + if gfortran.get_version() >= (10, 0): + gfortran.add_flags("-fallow-argument-mismatch", "base") + + runtime = ["-fcheck=all", "-ffpe-trap=invalid,zero,overflow"] + init = ["-finit-integer=31173", "-finit-real=snan", + "-finit-logical=true", "-finit-character=85"] + # Full debug + # ========== + gfortran.add_flags(runtime + ["-O0"] + init, "full-debug") + + # Fast debug + # ========== + gfortran.add_flags(runtime + ["-Og"], "fast-debug") + + # Production + # ========== + gfortran.add_flags(["-Ofast"], "production") + + # unit-tests + # ========== + gfortran.add_flags(runtime + ["-O0"] + init, "unit-tests") + + # Set up the linker + # ================= + # This will implicitly affect all gfortran based linkers, e.g. + # linker-mpif90-gfortran will use these flags as well. + linker = tr.get_tool(Category.LINKER, f"linker-{gfortran.name}") + linker = cast(Linker, linker) + + # ATM we don't use a shell when running a tool, and as such + # we can't directly use "$()" as parameter. So query these values using + # Fab's shell tool (doesn't really matter which shell we get, so just + # ask for the default): + shell = tr.get_default(Category.SHELL) + + try: + # We must remove the trailing new line, and create a list: + nc_flibs = shell.run(additional_parameters=["-c", "nf-config --flibs"], + capture_output=True).strip().split() + except RuntimeError: + nc_flibs = [] + + linker.add_lib_flags("netcdf", nc_flibs) + linker.add_lib_flags("yaxt", ["-lyaxt", "-lyaxt_c"]) + linker.add_lib_flags("xios", ["-lxios"]) + linker.add_lib_flags("hdf5", ["-lhdf5"]) + linker.add_lib_flags("shumlib", ["-lshum"]) + linker.add_lib_flags("vernier", ["-lvernier_f", "-lvernier_c", + "-lvernier"]) + + # Always link with C++ libs + linker.add_post_lib_flags(["-lstdc++"], "base") diff --git a/infrastructure/build/fab/site_specific/default/setup_intel_classic.py b/infrastructure/build/fab/site_specific/default/setup_intel_classic.py new file mode 100644 index 000000000..ab451f30d --- /dev/null +++ b/infrastructure/build/fab/site_specific/default/setup_intel_classic.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python3 + +''' +This file contains a function that sets the default flags for all +Intel classic based compilers in the ToolRepository (ifort, icc). + +This function gets called from the default site-specific config file +''' + +import argparse +from typing import cast + +from fab.api import BuildConfig, Category, Compiler, Linker, ToolRepository + + +def setup_intel_classic(build_config: BuildConfig, + args: argparse.Namespace) -> None: + # pylint: disable=unused-argument, too-many-locals + ''' + Defines the default flags for all Intel classic compilers and linkers. + + :param build_config: the Fab build config instance from which + required parameters can be taken. + :param args: all command line options + ''' + + tr = ToolRepository() + ifort = tr.get_tool(Category.FORTRAN_COMPILER, "ifort") + ifort = cast(Compiler, ifort) + + if not ifort.is_available: + # This can happen if ifort is not in path (in spack environments). + # To support this common use case, see if mpif90-ifort is available, + # and initialise this otherwise. + ifort = tr.get_tool(Category.FORTRAN_COMPILER, "mpif90-ifort") + ifort = cast(Compiler, ifort) + if not ifort.is_available: + # Since some flags depends on version, the code below requires + # that the intel compiler actually works. + return + + # The base flags + # ============== + # The following flags will be applied to all modes: + ifort.add_flags(["-stand", "f08"], "base") + ifort.add_flags(["-g", "-traceback"], "base") + # With -warn errors we get externals that are too long. While this + # is a (usually safe) warning, the long externals then causes the + # build to abort. So for now we cannot use `-warn errors` + ifort.add_flags(["-warn", "all"], "base") + + # By default turning interface warnings on causes "genmod" files to be + # created. This adds unnecessary files to the build so we disable that + # behaviour. + ifort.add_flags(["-gen-interfaces", "nosource"], "base") + + # The "-assume realloc-lhs" switch causes Intel Fortran prior to v17 to + # actually implement the Fortran2003 standard. At version 17 it becomes the + # default behaviour. + if ifort.get_version() < (17, 0): + ifort.add_flags(["-assume", "realloc-lhs"], "base") + + # Full debug + # ========== + # ifort.mk: bad interaction between array shape checking and + # the matmul" intrinsic in at least some iterations of v19. + if (19, 0, 0) <= ifort.get_version() < (19, 1, 0): + runtime_flags = ["-check", "all,noshape", "-fpe0"] + else: + runtime_flags = ["-check", "all", "-fpe0"] + ifort.add_flags(runtime_flags, "full-debug") + ifort.add_flags(["-O0", "-ftrapuv"], "full-debug") + + # Fast debug + # ========== + ifort.add_flags(["-O2", "-fp-model=strict"], "fast-debug") + + # Production + # ========== + ifort.add_flags(["-O3", "-xhost"], "production") + + # Set up the linker + # ================= + # This will implicitly affect all ifort based linkers, e.g. + # linker-mpif90-ifort will use these flags as well. + linker = tr.get_tool(Category.LINKER, f"linker-{ifort.name}") + linker = cast(Linker, linker) + + # ATM we don't use a shell when running a tool, and as such + # we can't directly use "$()" as parameter. So query these values using + # Fab's shell tool (doesn't really matter which shell we get, so just + # ask for the default): + shell = tr.get_default(Category.SHELL) + try: + # We must remove the trailing new line, and create a list: + nc_flibs = shell.run(additional_parameters=["-c", "nf-config --flibs"], + capture_output=True).strip().split() + except RuntimeError: + nc_flibs = [] + + linker.add_lib_flags("netcdf", nc_flibs) + linker.add_lib_flags("yaxt", ["-lyaxt", "-lyaxt_c"]) + linker.add_lib_flags("xios", ["-lxios"]) + linker.add_lib_flags("hdf5", ["-lhdf5"]) + linker.add_lib_flags("shumlib", ["-lshum"]) + linker.add_lib_flags("vernier", ["-lvernier_f", "-lvernier_c", + "-lvernier"]) + + # Always link with C++ libs + linker.add_post_lib_flags(["-lstdc++"]) diff --git a/infrastructure/build/fab/site_specific/default/setup_intel_llvm.py b/infrastructure/build/fab/site_specific/default/setup_intel_llvm.py new file mode 100644 index 000000000..d3a99bb68 --- /dev/null +++ b/infrastructure/build/fab/site_specific/default/setup_intel_llvm.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python3 + +''' +This file contains a function that sets the default flags for all +Intel llvm based compilers and linkers in the ToolRepository (ifx, icx). + +This function gets called from the default site-specific config file +''' + +import argparse +from typing import cast + +from fab.api import BuildConfig, Category, Compiler, Linker, ToolRepository + + +def setup_intel_llvm(build_config: BuildConfig, + args: argparse.Namespace) -> None: + # pylint: disable=unused-argument, too-many-locals + ''' + Defines the default flags for all Intel llvm compilers. + + :param build_config: the Fab build config instance from which + required parameters can be taken. + :param args: all command line options + ''' + + tr = ToolRepository() + ifx = tr.get_tool(Category.FORTRAN_COMPILER, "ifx") + ifx = cast(Compiler, ifx) + + if not ifx.is_available: + ifx = tr.get_tool(Category.FORTRAN_COMPILER, "mpif90-ifx") + ifx = cast(Compiler, ifx) + if not ifx.is_available: + return + + # The base flags + # ============== + # The following flags will be applied to all modes: + ifx.add_flags(["-stand", "f08"], "base") + ifx.add_flags(["-g", "-traceback"], "base") + # With -warn errors we get externals that are too long. While this + # is a (usually safe) warning, the long externals then causes the + # build to abort. So for now we cannot use `-warn errors` + ifx.add_flags(["-warn", "all"], "base") + + # By default turning interface warnings on causes "genmod" files to be + # created. This adds unnecessary files to the build so we disable that + # behaviour. + ifx.add_flags(["-gen-interfaces", "nosource"], "base") + + # Full debug + # ========== + ifx.add_flags(["-check", "all", "-fpe0"], "full-debug") + ifx.add_flags(["-O0", "-ftrapuv"], "full-debug") + + # Fast debug + # ========== + ifx.add_flags(["-O2", "-fp-model=strict"], "fast-debug") + + # Production + # ========== + ifx.add_flags(["-O3", "-xhost"], "production") + + # Set up the linker + # ================= + # This will implicitly affect all ifx based linkers, e.g. + # linker-mpif90-ifx will use these flags as well. + linker = tr.get_tool(Category.LINKER, f"linker-{ifx.name}") + linker = cast(Linker, linker) # Make mypy happy + # ATM we don't use a shell when running a tool, and as such + # we can't directly use "$()" as parameter. So query these values using + # Fab's shell tool (doesn't really matter which shell we get, so just + # ask for the default): + shell = tr.get_default(Category.SHELL) + try: + # We must remove the trailing new line, and create a list: + nc_flibs = shell.run(additional_parameters=["-c", "nf-config --flibs"], + capture_output=True).strip().split() + except RuntimeError: + nc_flibs = [] + + linker.add_lib_flags("netcdf", nc_flibs) + linker.add_lib_flags("yaxt", ["-lyaxt", "-lyaxt_c"]) + linker.add_lib_flags("xios", ["-lxios"]) + linker.add_lib_flags("hdf5", ["-lhdf5"]) + linker.add_lib_flags("shumlib", ["-lshum"]) + linker.add_lib_flags("vernier", ["-lvernier_f", "-lvernier_c", + "-lvernier"]) + + # Always link with C++ libs + linker.add_post_lib_flags(["-lstdc++"]) diff --git a/infrastructure/build/fab/site_specific/default/setup_nvidia.py b/infrastructure/build/fab/site_specific/default/setup_nvidia.py new file mode 100644 index 000000000..63b179a79 --- /dev/null +++ b/infrastructure/build/fab/site_specific/default/setup_nvidia.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python3 + +''' +This file contains a function that sets the default flags for the NVIDIA +compilers and linkers in the ToolRepository. + +This function gets called from the default site-specific config file +''' + +import argparse +from typing import cast + +from fab.api import BuildConfig, Category, Compiler, Linker, ToolRepository + + +def setup_nvidia(build_config: BuildConfig, args: argparse.Namespace) -> None: + # pylint: disable=unused-argument + ''' + Defines the default flags for nvfortran. + + :param build_config: the Fab build config instance from which + required parameters can be taken. + :param args: all command line options + ''' + + tr = ToolRepository() + nvfortran = tr.get_tool(Category.FORTRAN_COMPILER, "nvfortran") + nvfortran = cast(Compiler, nvfortran) + + if not nvfortran.is_available: + nvfortran = tr.get_tool(Category.FORTRAN_COMPILER, "mpif90-nvfortran") + nvfortran = cast(Compiler, nvfortran) + if not nvfortran.is_available: + return + + # The base flags + # ============== + flags = ["-Mextend", # 132 characters line length + "-g", "-traceback", + "-r8", # Default 8 bytes reals + "-O0", # No optimisations + ] + + lib_flags = ["-c++libs"] + + # Handle accelerator options: + if args.openacc or args.openmp: + host = args.host.lower() + else: + # Neither openacc nor openmp specified + host = "" + + if args.openacc: + if host == "gpu": + flags.extend(["-acc=gpu", "-gpu=managed"]) + lib_flags.extend(["-aclibs", "-cuda"]) + else: + # CPU + flags.extend(["-acc=cpu"]) + elif args.openmp: + if host == "gpu": + flags.extend(["-mp=gpu", "-gpu=managed"]) + lib_flags.append("-cuda") + else: + # OpenMP on CPU, that's already handled by Fab + pass + + nvfortran.add_flags(flags, "base") + + # Full debug + # ========== + nvfortran.add_flags(["-O0", "-fp-model=strict"], "full-debug") + + # Fast debug + # ========== + nvfortran.add_flags(["-O2", "-fp-model=strict"], "fast-debug") + + # Production + # ========== + nvfortran.add_flags(["-O4"], "production") + + # Set up the linker + # ================= + # This will implicitly affect all nvfortran based linkers, e.g. + # linker-mpif90-nvfortran will use these flags as well. + linker = tr.get_tool(Category.LINKER, f"linker-{nvfortran.name}") + linker = cast(Linker, linker) + + # ATM we don't use a shell when running a tool, and as such + # we can't directly use "$()" as parameter. So query these values using + # Fab's shell tool (doesn't really matter which shell we get, so just + # ask for the default): + shell = tr.get_default(Category.SHELL) + try: + # We must remove the trailing new line, and create a list: + nc_flibs = shell.run(additional_parameters=["-c", "nf-config --flibs"], + capture_output=True).strip().split() + except RuntimeError: + nc_flibs = [] + + linker.add_lib_flags("netcdf", nc_flibs) + linker.add_lib_flags("yaxt", ["-lyaxt", "-lyaxt_c"]) + linker.add_lib_flags("xios", ["-lxios"]) + linker.add_lib_flags("hdf5", ["-lhdf5"]) + linker.add_lib_flags("shumlib", ["-lshum"]) + linker.add_lib_flags("vernier", ["-lvernier_f", "-lvernier_c", + "-lvernier"]) + + # Always link with C++ libs + linker.add_post_lib_flags(lib_flags) diff --git a/infrastructure/build/fab/site_specific/meto_ex1a/config.py b/infrastructure/build/fab/site_specific/meto_ex1a/config.py new file mode 100644 index 000000000..c62925243 --- /dev/null +++ b/infrastructure/build/fab/site_specific/meto_ex1a/config.py @@ -0,0 +1,42 @@ +#! /usr/bin/env python3 + +'''This module contains a setup for METO-EX1A +''' + +from typing import cast + +from fab.api import BuildConfig, Category, Linker, ToolRepository + +from default.config import Config as DefaultConfig + + +class Config(DefaultConfig): + '''This config class sets specific flags for METO-EX1A + ''' + + def __init__(self): + super().__init__() + tr = ToolRepository() + # Set cray as default compiler suite + # It has crayftn-ftn as Fortran compiler + # It also has craycc-cc as C compiler + tr.set_default_compiler_suite("cray") + + def setup_cray(self, build_config: BuildConfig): + '''First call the base class to get all default options. + See the file ../default/setup_cray.py for the current + default. + Very likely, linker options need to be changed: + ''' + super().setup_cray(build_config) + tr = ToolRepository() + + # Update the linker. This is what the default sets up + # (except NetCDF, which is normally defined using nf-config) + linker = tr.get_tool(Category.LINKER, "linker-crayftn-ftn") + linker = cast(Linker, linker) # make mypy happy + + # Don't know whether Cray uses nf-config. So hard-code + # these flags for now until the transition to pkg-config + linker.add_lib_flags("netcdf", ["-lnetcdff", "-lnetcdf", + "-lnetcdf", "-lm"]) diff --git a/infrastructure/build/fab/site_specific/ncas_ex/config.py b/infrastructure/build/fab/site_specific/ncas_ex/config.py new file mode 100644 index 000000000..7d287e5ee --- /dev/null +++ b/infrastructure/build/fab/site_specific/ncas_ex/config.py @@ -0,0 +1,52 @@ +#! /usr/bin/env python3 + +'''This module contains a setup for NCAS-EX (archer2) +''' + +from typing import cast + +from fab.api import BuildConfig, Category, Linker, ToolRepository + +from default.config import Config as DefaultConfig + + +class Config(DefaultConfig): + '''This config class sets specific flags for NCAS-EX (archer2) + ''' + + def __init__(self): + super().__init__() + tr = ToolRepository() + tr.set_default_compiler_suite("gnu") + + def setup_cray(self, build_config: BuildConfig): + '''First call the base class to get all default options. + See the file ../default/setup_cray.py for the current + default. + Very likely, linker options need to be changed: + ''' + super().setup_cray(build_config) + tr = ToolRepository() + ftn = tr.get_tool(Category.FORTRAN_COMPILER, "gfortran") + # Any gfortran on Cray's EX need this flag in order to + # compile mpi_mod: + ftn.add_flags(["-fallow-argument-mismatch"]) + + # Update the linker. This is what the default sets up + # (except NetCDF, which is defined using nf-config, and + # should likely work the way it is): + linker = tr.get_tool(Category.LINKER, "linker-gfortran") + linker = cast(Linker, linker) # make mypy happy + + # Cray's don't have nf-config. Till we have figured out the + # proper solution, hard-code some flags that might work + # with a plain gfortran build in a spack environment: + linker.add_lib_flags("netcdf", ["-lnetcdff", "-lnetcdf", + "-lnetcdf", "-lm"]) + # That's pretty much the default: + linker.add_lib_flags("yaxt", ["-lyaxt", "-lyaxt_c"]) + linker.add_lib_flags("xios", ["-lxios"]) + linker.add_lib_flags("hdf5", ["-lhdf5"]) + linker.add_lib_flags("shumlib", ["-lshum"]) + linker.add_lib_flags("vernier", ["-lvernier_f", "-lvernier_c", + "-lvernier"]) diff --git a/infrastructure/build/fab/site_specific/nci_gadi/__init__.py b/infrastructure/build/fab/site_specific/nci_gadi/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/infrastructure/build/fab/site_specific/nci_gadi/config.py b/infrastructure/build/fab/site_specific/nci_gadi/config.py new file mode 100644 index 000000000..20d015b9a --- /dev/null +++ b/infrastructure/build/fab/site_specific/nci_gadi/config.py @@ -0,0 +1,43 @@ +#! /usr/bin/env python3 + +''' +This module contains the default configuration for NCI. It will be invoked +by the Baf scripts. This script: +- sets intel-classic as the default compiler suite to use. +- Adds the tau compiler wrapper as (optional) compilers to the ToolRepository. +''' + +from fab.api import Category, ToolRepository + +from default.config import Config as DefaultConfig + + +class Config(DefaultConfig): + ''' + For NCI, make intel the default, and add the Tau wrapper. + ''' + + def __init__(self): + super().__init__() + tr = ToolRepository() + tr.set_default_compiler_suite("intel-classic") + + # ATM we don't use a shell when running a tool, and as such + # we can't directly use "$()" as parameter. So query these values using + # Fab's shell tool (doesn't really matter which shell we get, so just + # ask for the default): + shell = tr.get_default(Category.SHELL) + # We must remove the trailing new line, and create a list: + nc_flibs = shell.run(additional_parameters=["-c", "nf-config --flibs"], + capture_output=True).strip().split() + linker = tr.get_tool(Category.LINKER, "linker-tau-ifort") + + # Setup all linker flags: + linker.add_lib_flags("netcdf", nc_flibs) + linker.add_lib_flags("yaxt", ["-lyaxt", "-lyaxt_c"]) + linker.add_lib_flags("xios", ["-lxios"]) + linker.add_lib_flags("hdf5", ["-lhdf5"]) + linker.add_lib_flags("shumlib", ["-lshum"]) + + # Always link with C++ libs + linker.add_post_lib_flags(["-lstdc++"]) diff --git a/infrastructure/build/fab/site_specific/niwa_xc50/config.py b/infrastructure/build/fab/site_specific/niwa_xc50/config.py new file mode 100644 index 000000000..f9e6008b2 --- /dev/null +++ b/infrastructure/build/fab/site_specific/niwa_xc50/config.py @@ -0,0 +1,61 @@ +#! /usr/bin/env python3 + +''' +This module contains a setup NIWA's XC-50 +''' + +import os +from typing import cast + +from fab.api import BuildConfig, Category, Linker, ToolRepository + +from default.config import Config as DefaultConfig + + +class Config(DefaultConfig): + ''' + This config class sets specific flags for NIWA's XC-50 + ''' + + def __init__(self): + super().__init__() + tr = ToolRepository() + tr.set_default_compiler_suite("intel-classic") + + def setup_cray(self, build_config: BuildConfig) -> None: + ''' + First call the base class to get all default options. + See the file ../default/setup_cray.py for the current + default. Then the NIWA's XC-50 specific flags are added. + The linker is also updated. + + :param build_config: the Fab build config instance from which + required parameters can be taken. + :type build_config: :py:class:`fab.BuildConfig` + ''' + super().setup_cray(build_config) + tr = ToolRepository() + ftn = tr.get_tool(Category.FORTRAN_COMPILER, "crayftn-ifort") + # Add any flags you want to have: + ftn.add_flags([f"-I{os.environ['EBROOTXIOS']}/inc"]) + + # Update the linker. This is what the default sets up + # (except NetCDF, which is defined using nf-config, and + # should likely work the way it is): + linker = tr.get_tool(Category.LINKER, "linker-crayftn-ftn") + linker = cast(Linker, linker) # make mypy happy + + # The first parameter specifies the internal name for libraries, + # followed by a list of linker options. If you should need additional + # library paths, you could e.g. use: + # linker.add_lib_flags("yaxt", ["-L", "/my/path/to/yaxt", "-lyaxt", + # "-lyaxt_c"]) + # Make sure to not use a space as ONE parameter ("-L /my/lib"), + # you have to specify them as two separate list elements + + linker.add_lib_flags("yaxt", ["-lyaxt", "-lyaxt_c"]) + linker.add_lib_flags("xios", ["-lxios"]) + linker.add_lib_flags("hdf5", ["-lhdf5"]) + linker.add_lib_flags("shumlib", ["-lshum"]) + linker.add_lib_flags("vernier", ["-lvernier_f", "-lvernier_c", + "-lvernier"]) diff --git a/infrastructure/build/fab/templaterator.py b/infrastructure/build/fab/templaterator.py new file mode 100755 index 000000000..655b9566d --- /dev/null +++ b/infrastructure/build/fab/templaterator.py @@ -0,0 +1,66 @@ +############################################################################## +# (c) Crown copyright Met Office. All rights reserved. +# The file LICENCE, distributed with this code, contains details of the terms +# under which the code may be used. +############################################################################## +# Author: J. Henrichs, Bureau of Meteorology +# Author: J. Lyu, Bureau of Meteorology + +''' +This module contains the Fab Templaterator class. +''' + +import logging +from pathlib import Path +from typing import Dict, List, Union + +from fab.api import Tool + +logger = logging.getLogger('fab') + + +class Templaterator(Tool): + '''This implements the LFRic templaterator as a Fab tool. + It can check whether the templaterator is available and + creates command line options for it to run. + + :param Path exec_name: the path to the templaterator binary. + ''' + def __init__(self, exec_name: Path): + # Remove suffix as a name + super().__init__(exec_name.stem, + exec_name=exec_name) + + def check_available(self) -> bool: + ''' + :returns bool: whether templaterator works by running + `Templaterator -help`. + ''' + try: + super().run(additional_parameters="-h") + except RuntimeError: + return False + + return True + + def process(self, input_template: Path, + output_file: Path, + key_values: Dict[str, str]) -> None: + """ + This wrapper runs the Templaterator, which replaces the + give keys in the input template with the value in the + `key_values` dictionary. The new file is written to the + specified output file. + + :param input_template: the path to the input template. + :param output_file: the output file path. + :param key_values: the keys and values for the keys to + define as a dictionary. + """ + replace_list = [f"-s {key}={value}" + for key, value in key_values.items()] + params: List[Union[str, Path]] + params = [input_template, "-o", output_file] + params.extend(replace_list) + + super().run(additional_parameters=params) diff --git a/infrastructure/build/fab/test/configurator_test.py b/infrastructure/build/fab/test/configurator_test.py new file mode 100644 index 000000000..0bb3f0373 --- /dev/null +++ b/infrastructure/build/fab/test/configurator_test.py @@ -0,0 +1,101 @@ +############################################################################## +# (c) Crown copyright Met Office. All rights reserved. +# The file LICENCE, distributed with this code, contains details of the terms +# under which the code may be used. +############################################################################## +# Author J. Henrichs, Bureau of Meteorology + +""" +This module tests the configurator. +""" + +from unittest.mock import MagicMock + +import pytest + +from configurator import configurator # Replace with actual module name +from fab.tools.category import Category +from fab.tools.tool_box import ToolBox +from fab.build_config import BuildConfig + + +@pytest.fixture +def mock_shell(): + """ + A simple shell mock to check that all expected calls are executed. + """ + shell = MagicMock() + shell.exec = MagicMock() + # Set the category to the shell can be added to the ToolBox. + shell.category = Category.SHELL + return shell + + +def test_configurator_runs_expected_sequence(mock_shell, tmp_path): + """ + Check that the expected series of calls is executed. + """ + + # Create a tool box and add the mocked shell that is used + # to test the expected calls. + tb = ToolBox() + tb.add_tool(mock_shell) + config = BuildConfig("Stub config", tb, + fab_workspace=tmp_path / 'fab') + + # Create a rose_picker mock: + rose_picker = MagicMock() + rose_picker.execute = MagicMock() + + # Setup source directories and rose-meta config file + lfric_core = tmp_path / "lfric_core" + lfric_apps = tmp_path / "lfric_apps" + rose_meta_conf = tmp_path / "rose-meta.conf" + + # Simulate rose-meta.json and config_namelists.txt creation + config_dir = tmp_path / "build_output" / "configuration" + config_dir.mkdir(parents=True) + rose_meta = config_dir / "rose-meta.json" + rose_meta.write_text("{}", encoding="utf8") + config_namelist = config_dir / "config_namelists.txt" + config_namelist.write_text("namelist1\nnamelist2\n", encoding="utf8") + + # Run configurator + with pytest.warns(match="_metric_send_conn not set, cannot send metrics"): + configurator( + config=config, + lfric_core_source=lfric_core, + rose_meta_conf=rose_meta_conf, + rose_picker=rose_picker, + include_paths=[lfric_apps], + config_dir=config_dir + ) + + tools_dir = lfric_core / "infrastructure" / "build" / "tools" + + # Check rose_picker was called with the expected arguments: + rose_picker.execute.assert_called_once() + kwargs = rose_picker.execute.call_args_list[0].kwargs + assert kwargs["parameters"] == [ + rose_meta_conf, + '-directory', config_dir, + '-include_dirs', lfric_core, + '-include_dirs', lfric_core / "rose-meta", + '-include_dirs', lfric_apps, + '-include_dirs', lfric_apps / "rose-meta" + ] + + # Check shell.exec was called with expected commands + expected_calls = [ + ((f"{tools_dir / 'GenerateNamelist'} " + f"-verbose {config_dir / 'rose-meta.json'} " + f"-directory {config_dir}"),), + ((f"{tools_dir / 'GenerateLoader'} " + f"{config_dir / 'configuration_mod.f90'} " + f"namelist1 namelist2"),), + ((f"{tools_dir / 'GenerateFeigns'} {config_dir / 'rose-meta.json'} " + f"-output {config_dir / 'feign_config_mod.f90'}"),) + ] + + actual_calls = [call.args for call in mock_shell.exec.call_args_list] + assert actual_calls == expected_calls diff --git a/infrastructure/build/fab/test/lfric_base_test.py b/infrastructure/build/fab/test/lfric_base_test.py new file mode 100644 index 000000000..9ae1daef9 --- /dev/null +++ b/infrastructure/build/fab/test/lfric_base_test.py @@ -0,0 +1,756 @@ +############################################################################## +# (c) Crown copyright Met Office. All rights reserved. +# The file LICENCE, distributed with this code, contains details of the terms +# under which the code may be used. +############################################################################## +# Author: J. Lyu, Bureau of Meteorology +# Author: J. Henrichs, Bureau of Meteorology + +""" +Tests the LFRicBase class. +""" + +from pathlib import Path +import os +import sys +import argparse +import inspect +from unittest import mock +from typing import List, Optional + +import pytest + +from fab.api import (ArtefactSet, BuildConfig, Category, ToolRepository, + Linker) +from fab.tools.compiler import CCompiler, FortranCompiler + +from lfric_base import LFRicBase + + +class MockSiteConfig: + """ + Creates a mock site config class. + """ + def __init__(self) -> None: + self.args: Optional[argparse.Namespace] = None + + def get_valid_profiles(self) -> List[str]: + """ + :return: list of valid compilation profiles. + """ + return ["default-profile"] + + def update_toolbox(self, build_config: BuildConfig) -> None: + """ + Dummy function where the tool box could be modified + """ + + def handle_command_line_options(self, args: argparse.Namespace) -> None: + """ + Simple function to handle command line options. + """ + self.args = args + + def get_path_flags(self, _build_config: BuildConfig) -> List[str]: + """ + :returns: list of path-specific flags. + """ + return [] + + +@pytest.fixture(name="stub_fortran_compiler", scope='function') +def stub_fortran_compiler_init() -> FortranCompiler: + """ + Provides a minimal Fortran compiler. + """ + compiler = FortranCompiler('some Fortran compiler', 'sfc', 'stub', + r'([\d.]+)', openmp_flag='-omp', + module_folder_flag='-mods') + return compiler + + +@pytest.fixture(name="stub_c_compiler", scope='function') +def stub_c_compiler_init() -> CCompiler: + """ + Provides a minimal C compiler. + """ + compiler = CCompiler("some C compiler", "scc", "stub", + version_regex=r"([\d.]+)", openmp_flag='-omp') + return compiler + + +@pytest.fixture(name="stub_linker", scope='function') +def stub_linker_init(stub_c_compiler) -> Linker: + """ + Provides a minimal linker. + """ + linker = Linker(stub_c_compiler, None, 'sln') + return linker + + +@pytest.fixture(scope="function", autouse=True) +def setup_site_specific_config_environment(tmp_path): + """ + This sets up the environment for the mocked site_specific config class + (MockSiteConfig) to be used by tests of LFRicBase class methods without + errors. This fixture is automatically executed for any test in this file. + """ + # Creates mock module with __file__ attribute + mock_site_module = mock.MagicMock() + mock_site_module.Config = MockSiteConfig + mock_site_module.__file__ = str(tmp_path / "site_specific" / + "default" / "config.py") + + # Mocks site-specific imports + sys.modules['site_specific'] = mock.MagicMock() + sys.modules['site_specific.default'] = mock.MagicMock() + sys.modules['site_specific.default.config'] = mock_site_module + + # Clears environment variables + with mock.patch.dict(os.environ, clear=True): + yield + + # Cleanups + for module in ['site_specific', 'site_specific.default', + 'site_specific.default.config']: + if module in sys.modules: + del sys.modules[module] + + +@pytest.fixture(scope="function", autouse=True) +def setup_tool_repository(stub_fortran_compiler, stub_c_compiler, + stub_linker): + ''' + This sets up a ToolRepository that allows the LFRicBase class + to proceed without raising errors. This fixture is automatically + executed for any test in this file. + ''' + # pylint: disable=protected-access + # Make sure we always get a new ToolRepository to not be affected by + # other tests: + ToolRepository._singleton = None + + # Remove all compiler and linker, so we get results independent + # of the software available on the platform this test is running + tr = ToolRepository() + for category in [Category.C_COMPILER, Category.FORTRAN_COMPILER, + Category.LINKER]: + tr[category] = [] + + # Add compilers and linkers, and mark them all as available, + # as well as supporting MPI and OpenMP + for tool in [stub_c_compiler, stub_fortran_compiler, stub_linker]: + tool._mpi = True + tool._openmp_flag = "-some-openmp-flag" + tool._is_available = True + tool._version = (1, 2, 3) + tr.add_tool(tool) + + # Remove environment variables that could affect tests + with mock.patch.dict(os.environ, clear=True): + yield + + # Reset tool repository for other tests + ToolRepository._singleton = None + + +def test_constructor(monkeypatch) -> None: + ''' + Tests constructor. + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + lfric_base = LFRicBase(name="test_name") + + # Check root symbol defaults to name if not specified + assert lfric_base.root_symbol == ["test_name"] + + # Check root symbol can be specified + lfric_base = LFRicBase(name="test_name", + root_symbol="root1") + assert lfric_base.root_symbol == ["root1"] + + # Check root symbol list + lfric_base = LFRicBase(name="test_name", + root_symbol=["root1", "root2"]) + assert lfric_base.root_symbol == ["root1", "root2"] + + +def test_get_directory(monkeypatch, tmp_path) -> None: + ''' + Tests the correct setup of lfric_core_root and lfric_apps_root. + ''' + + # Create mock directory structure + mock_core = tmp_path / "core" + mock_core.mkdir(parents=True) + + # Create mock LFRic base file location + mock_base_dir = mock_core / "infrastructure" / "build" / "fab" + mock_base_dir.mkdir(parents=True) + mock_base_file = mock_base_dir / "lfric_base.py" + mock_base_file.write_text("", encoding='utf-8') + + # Mock __file__ attribute + monkeypatch.setattr('lfric_base.__file__', str(mock_base_file)) + + mock_apps = tmp_path / "apps" + mock_apps.mkdir() + deps_file = mock_apps / "dependencies.sh" + deps_file.write_text("", encoding='utf-8') + + mock_caller = mock_apps / "some_app" / "build.py" + mock_caller.parent.mkdir(parents=True) + mock_caller.write_text("", encoding='utf-8') + + # Create mock frame objects with proper structure + def create_frame(filename): + frame = mock.Mock() + frame.f_globals = {'__file__': filename} + return frame + + def create_frame_info(filename): + return (create_frame(filename), filename, None, None, None, None, + None, None) + + # Mock inspect.stack() to return our test callers with proper + # frame info structure + mock_stack = [ + create_frame_info(str(mock_base_file)), # First call in base dir + create_frame_info(str(mock_caller)) # Second call in apps + ] + monkeypatch.setattr('inspect.stack', lambda: mock_stack) + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + + lfric_base = LFRicBase(name="test") + + # Verify core root is set correctly + assert lfric_base.lfric_core_root == mock_core + + +def test_command_line_options(monkeypatch) -> None: + ''' + Tests LFRic specific command line options. + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py", + "--rose_picker", "custom", + "--precision-default", "32"]) + + lfric_base = LFRicBase(name="test") + + assert lfric_base.args.rose_picker == "custom" + assert lfric_base.args.precision_default == "32" + + +def test_precision_definition_without_default(monkeypatch) -> None: + ''' + Tests specification of precision if no default precision is + specified on the command line (--precision-default). Tests all + other ways a precision can be specified: default command line, + explicit command line, environment variable, and the per + R_*PRECISION default. + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py", + "--rdef_precision", "32"]) + monkeypatch.setattr(os, 'environ', {"R_BL_PRECISION": "64"}) + + lfric_base = LFRicBase(name="test") + lfric_base.define_preprocessor_flags_step() + flags = lfric_base.preprocess_flags_common + + # Explicitly set on command line: + assert '-DRDEF_PRECISION=32' in flags + # Original default of this precision + assert '-DR_SOLVER_PRECISION=32' in flags + # Original default of this precision + assert '-DR_TRAN_PRECISION=64' in flags + # From environment variable + assert '-DR_BL_PRECISION=64' in flags + + +def test_precision_definition_with_default(monkeypatch) -> None: + ''' + Tests specification of precision. Test all ways a precision + can be specified: default command line, explicit command + line, environment variable, and the per R_*PRECISION default. + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py", + "--precision-default", "32", + "--rdef_precision", "64"]) + monkeypatch.setattr(os, 'environ', {"R_BL_PRECISION": "64"}) + + lfric_base = LFRicBase(name="test") + lfric_base.define_preprocessor_flags_step() + + flags = lfric_base.preprocess_flags_common + # Explicitly set on command line: + assert '-DRDEF_PRECISION=64' in flags + # Specified default of any precision + assert '-DR_SOLVER_PRECISION=32' in flags + # Specified default of any precision + assert '-DR_TRAN_PRECISION=32' in flags + # From environment variable + assert '-DR_BL_PRECISION=64' in flags + + +@pytest.mark.parametrize('no_xios', [True, False]) +@pytest.mark.parametrize('mpi', [True, False]) +def test_preprocessor_flags(monkeypatch, no_xios, mpi) -> None: + """ + Tests setting of preprocessor flags, and also that we get + the expected defaults for the precision variables. + """ + argv = ["fab_script", "--no-openmp"] + if no_xios: + argv.append("--no-xios") + if not mpi: + argv.append("--no-mpi") + monkeypatch.setattr(sys, "argv", argv) + + # Mark the compiler to have MPI or not, depending on what is needed + tr = ToolRepository() + fc = tr.get_tool(Category.FORTRAN_COMPILER, "sfc") + monkeypatch.setattr(fc, "_mpi", mpi) + + lfric_base = LFRicBase(name="test") + lfric_base.define_preprocessor_flags_step() + + expected_flags = [ + '-DRDEF_PRECISION=64', + '-DR_SOLVER_PRECISION=32', + '-DR_TRAN_PRECISION=64', + '-DR_BL_PRECISION=64' + ] + if not no_xios: + expected_flags.append("-DUSE_XIOS") + if not mpi: + expected_flags.append("-DNO_MPI") + assert set(lfric_base.preprocess_flags_common) == set(expected_flags) + + +def test_setup_site_specific_location(monkeypatch) -> None: + ''' + Tests site specific path setup for LFRicBase. + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + lfric_base = LFRicBase(name="test") + + old_path = sys.path.copy() + lfric_base.setup_site_specific_location() + + # Check paths added correctly + base_dir = Path(inspect.getfile(LFRicBase)).parent + assert str(base_dir) in sys.path + assert str(base_dir / "site_specific") in sys.path + + # Restore path + sys.path = old_path + + +def test_get_linker_flags(monkeypatch) -> None: + ''' + Tests linker flags include required libraries. + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + + lfric_base = LFRicBase(name="test") + flags = lfric_base.get_linker_flags() + + expected_libs = ['yaxt', 'xios', 'netcdf', 'hdf5'] + assert set(flags) == set(expected_libs) + + +def test_grab_files_step(monkeypatch) -> None: + ''' + Tests grabbing required source files + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + + # Create mock objects + mock_grab = mock.MagicMock() + mock_core = Path("/mock/core") + + # Setup mocks + monkeypatch.setattr('lfric_base.grab_folder', mock_grab) + + lfric_base = LFRicBase(name="test") + monkeypatch.setattr(lfric_base, '_lfric_core_root', mock_core) + + # Call method under test + lfric_base.grab_files_step() + + # Verify grab_folder called for all required directories + expected_calls = [ + # Source directories + mock.call(lfric_base.config, + src=mock_core/'infrastructure'/'source', + dst_label=''), + mock.call(lfric_base.config, + src=mock_core/'components'/'driver'/'source', + dst_label=''), + mock.call(lfric_base.config, + src=mock_core/'components'/'inventory'/'source', + dst_label=''), + mock.call(lfric_base.config, + src=mock_core/'components'/'science'/'source', + dst_label=''), + mock.call(lfric_base.config, + src=mock_core/'components'/'lfric-xios'/'source', + dst_label=''), + # PSyclone config directory + mock.call(lfric_base.config, + src=mock_core/'etc', + dst_label='psyclone_config') + ] + + # Check both number of calls and call arguments + assert mock_grab.call_count == len(expected_calls) + mock_grab.assert_has_calls(expected_calls, any_order=True) + + +def test_find_source_files_step(monkeypatch) -> None: + ''' + Tests finding and filtering source files + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + + # Create mocks + with (mock.patch('lfric_base.FabBase.find_source_files_step') as find_step, + mock.patch('lfric_base.LFRicBase.templaterator_step') as temp_step, + mock.patch('lfric_base.LFRicBase.configurator_step') as conf_step, + mock.patch('lfric_base.Exclude') as mock_exclude): + lfric_base = LFRicBase(name="test") + lfric_base.find_source_files_step() + + # Verify exclusion filter added and super called + mock_exclude.assert_called_once_with('unit-test', '/test/') + find_step.assert_called_once() + # Verify configurator and templaterator called + conf_step.assert_called_once() + temp_step.assert_called_once_with(lfric_base.config) + + +def test_configurator_step(monkeypatch) -> None: + ''' + Tests the configurator setup and execution. + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + + # Create mock objects + mock_config = mock.MagicMock() + mock_picker = mock.MagicMock(return_value="rose_picker_tool") + mock_meta = mock.MagicMock(return_value="rose_meta.conf") + + # Set up mocks using monkeypatch + monkeypatch.setattr('lfric_base.configurator', mock_config) + monkeypatch.setattr('lfric_base.get_rose_picker', mock_picker) + + lfric_base = LFRicBase(name="test") + monkeypatch.setattr(lfric_base, 'get_rose_meta', mock_meta) + + lfric_base.configurator_step() + + # Verify configurator called with correct arguments + mock_config.assert_called_once_with( + lfric_base.config, + lfric_core_source=lfric_base.lfric_core_root, + rose_meta_conf="rose_meta.conf", + include_paths=[], + rose_picker="rose_picker_tool" + ) + + +def test_templaterator_step(monkeypatch, tmp_path) -> None: + ''' + Tests the templaterator step processes template files correctly. + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + + # Create mock template file + template_file = tmp_path / "field.t90" + template_file.write_text("template content", encoding='utf-8') + + # Create mock templaterator + mock_templaterator = mock.MagicMock() + mock_templaterator_instance = mock.MagicMock() + mock_templaterator.return_value = mock_templaterator_instance + monkeypatch.setattr('lfric_base.Templaterator', mock_templaterator) + + # Mock input_to_output_fpath + mock_output_path = tmp_path / "build" / "output" + mock_output_path.mkdir(parents=True) + monkeypatch.setattr('lfric_base.input_to_output_fpath', + lambda config, input_path: (mock_output_path / + input_path.name)) + + # Mock SuffixFilter to return our template file + mock_filter = mock.MagicMock() + mock_filter.return_value = {template_file} + monkeypatch.setattr('lfric_base.SuffixFilter', lambda *args: mock_filter) + + # Create mock config with proper artefact store + mock_artefact_store = mock.MagicMock() + mock_artefact_store.__getitem__.return_value = set() + + config = mock.MagicMock() + config.artefact_store = mock_artefact_store + config.build_output = tmp_path + + # Create LFRicBase instance + lfric_base = LFRicBase(name="test") + monkeypatch.setattr(lfric_base, '_lfric_core_root', tmp_path) + + # Run templaterator step + lfric_base.templaterator_step(config) + + # Verify templaterator initialization + mock_templaterator.assert_called_once_with(tmp_path / "infrastructure" / + "build" / "tools" / + "Templaterator") + + # Verify template processing + expected_calls = [] + templates = [ + {"kind": "real32", "type": "real"}, + {"kind": "real64", "type": "real"}, + {"kind": "int32", "type": "integer"} + ] + + for template in templates: + out_file = mock_output_path / f"field_{template['kind']}_mod.f90" + expected_calls.append( + mock.call(template_file, out_file, key_values=template) + ) + + assert mock_templaterator_instance.process.call_count == 3 + mock_templaterator_instance.process.assert_has_calls(expected_calls) + + # Verify artefact store add calls + expected_add_calls = [] + for template in templates: + out_file = mock_output_path / f"field_{template['kind']}_mod.f90" + expected_add_calls.append( + mock.call(ArtefactSet.FORTRAN_COMPILER_FILES, out_file) + ) + + assert mock_artefact_store.add.call_count == 3 + mock_artefact_store.add.assert_has_calls(expected_add_calls) + + # Test empty template files case + mock_filter.return_value = set() + lfric_base.templaterator_step(config) + # Call count should remain the same since no new files processed + assert mock_templaterator_instance.process.call_count == 3 + + +def test_get_rose_meta(monkeypatch) -> None: + ''' + Tests getting rose meta configuration + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + + lfric_base = LFRicBase(name="test") + assert lfric_base.get_rose_meta() is None + + +def test_analyse_step(monkeypatch) -> None: + '''Tests analysis step configuration and execution''' + + # Test case 1: No ignore_dependencies argument specified + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + + # Create mocks + mock_analyse = mock.MagicMock() + mock_preprocess = mock.MagicMock() + mock_psyclone = mock.MagicMock() + + # Setup mocks + monkeypatch.setattr('fab.fab_base.fab_base.FabBase.analyse_step', + mock_analyse) + + lfric_base = LFRicBase(name="test") + + # Mock instance methods + monkeypatch.setattr(lfric_base, 'preprocess_x90_step', mock_preprocess) + monkeypatch.setattr(lfric_base, 'psyclone_step', mock_psyclone) + + # The PSyclone step will modify sys.path (to allow import of + # psyclone_tools by PSyclone scripts). Make sure sys.path is unchanged: + old_sys_path = sys.path[:] + # Call analyse_step (which calls PSyclone) + lfric_base.analyse_step() + assert sys.path == old_sys_path + + # Verify method calls + mock_preprocess.assert_called_once() + mock_psyclone.assert_called_once() + + # Verify analyse called with correct default ignore_dependencies + expected_ignore = ['netcdf', 'mpi', 'mpi_f08', 'yaxt', + 'xios', 'icontext', 'mod_wait'] + mock_analyse.assert_called_once_with( + ignore_dependencies=expected_ignore, + find_programs=False + ) + + # Test case 2: Custom ignore_dependencies arguments specified + custom_ignore = ['custom_dep1', 'custom_dep2'] + mock_analyse.reset_mock() + mock_preprocess.reset_mock() + mock_psyclone.reset_mock() + + lfric_base = LFRicBase(name="test") + monkeypatch.setattr(lfric_base, 'preprocess_x90_step', mock_preprocess) + monkeypatch.setattr(lfric_base, 'psyclone_step', mock_psyclone) + + # Call analyse_step + lfric_base.analyse_step(ignore_dependencies=custom_ignore) + + # Verify methods still called + mock_preprocess.assert_called_once() + mock_psyclone.assert_called_once() + + # Verify analyse called with custom_ignore added to ignore list + expected_ignore = ['custom_dep1', 'custom_dep2', 'netcdf', 'mpi', + 'mpi_f08', 'yaxt', 'xios', 'icontext', + 'mod_wait'] + mock_analyse.assert_called_once_with( + ignore_dependencies=expected_ignore, + find_programs=False + ) + + +def test_preprocess_x90_step(monkeypatch) -> None: + ''' + Tests preprocessing of X90 files. + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + + mock_preproc = mock.MagicMock() + monkeypatch.setattr('lfric_base.preprocess_x90', mock_preproc) + + lfric_base = LFRicBase(name="test") + lfric_base.add_preprocessor_flags(["-flag1", "-flag2"]) + lfric_base.preprocess_x90_step() + + mock_preproc.assert_called_once_with( + lfric_base.config, + common_flags=["-flag1", "-flag2"] + ) + + +def test_psyclone_step(monkeypatch) -> None: + ''' + Tests the PSyclone step. + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + + # Create mock objects + mock_psy = mock.MagicMock() + mock_config_opts = ["--config", "/mock/psyclone.cfg"] + mock_additional_opts: List[str] = [] + + # Set up monkeypatch for module level import + monkeypatch.setattr('lfric_base.psyclone', mock_psy) + + lfric_base = LFRicBase(name="test") + + # Patch instance methods. Return a copy to avoid that + # PSyclone modified these lists in the lambdas when it modifies the list + monkeypatch.setattr(lfric_base, 'get_psyclone_config', + lambda: mock_config_opts[:]) + monkeypatch.setattr(lfric_base, 'get_additional_psyclone_options', + lambda: mock_additional_opts[:]) + + # Call method under test + lfric_base.psyclone_step(additional_parameters=["-additional"]) + + # Verify psyclone called with correct arguments + print(mock_psy.mock_calls) + print("UUU", mock_config_opts, mock_additional_opts) + mock_psy.assert_called_once_with( + lfric_base.config, + kernel_roots=[(lfric_base.config.build_output / "kernel")], + transformation_script=lfric_base.get_transformation_script, + api="dynamo0.3", + cli_args=mock_config_opts + mock_additional_opts + ["-additional"], + ignore_dependencies=None + ) + + +def test_get_psyclone_config(monkeypatch) -> None: + ''' + Tests getting PSyclone config. + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + + lfric_base = LFRicBase(name="test") + config_args = lfric_base.get_psyclone_config() + + assert config_args == ["--config", + str(lfric_base.config.source_root / + 'psyclone_config/psyclone.cfg')] + + +def test_get_additional_psyclone_options(monkeypatch) -> None: + ''' + Tests getting additional PSyclone options (for profiling). + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + + lfric_base = LFRicBase(name="test") + assert not lfric_base.get_additional_psyclone_options() + + +def test_get_transformation_script(monkeypatch, tmp_path) -> None: + ''' + Tests finding PSyclone transformation scripts. + ''' + monkeypatch.setattr(sys, "argv", ["lfric_base.py"]) + + # Create LFRicBase instance with mocked site/platform + lfric_base = LFRicBase(name="test") + + # Create mock config + config = mock.MagicMock() + config.source_root = tmp_path + config.build_output = tmp_path / "build" + config.build_output.mkdir() + + # Create x90 test source file + source_path = tmp_path / "some/path" + source_path.mkdir(parents=True) + test_file = source_path / "file.x90" + test_file.touch() + + # Test case 1: x90 file not in source or build directories + outside_file = tmp_path.parent / "outside.x90" + assert lfric_base.get_transformation_script(outside_file, config) is None + + # Test case 2: No optimisation directory, no transformation script + assert lfric_base.get_transformation_script(test_file, config) is None + + # Test case 3: No PSykal but optimisation directory + optimisation_folder_path = (tmp_path / "optimisation" / "default-default" / + "psykal") + global_script = optimisation_folder_path / "global.py" + global_script.parent.mkdir(parents=True) + global_script.touch() + + # No file-specific transformation script, use global script + other_file = tmp_path / "other/path/test.x90" + other_file.parent.mkdir(parents=True) + other_file.touch() + assert (lfric_base.get_transformation_script(other_file, config) == + global_script) + + # Test case 4: Psykal directory exists + psykal_path = tmp_path / "optimisation/default-default/psykal" + + # Create specific transformation script in psykal dir + specific_script = psykal_path / "some/path/file.py" + specific_script.parent.mkdir(parents=True) + specific_script.touch() + + # Use specific script in psykal directory + assert lfric_base.get_transformation_script(test_file, config) == \ + specific_script diff --git a/infrastructure/build/fab/test/rose_picker_tool_test.py b/infrastructure/build/fab/test/rose_picker_tool_test.py new file mode 100644 index 000000000..f0993174c --- /dev/null +++ b/infrastructure/build/fab/test/rose_picker_tool_test.py @@ -0,0 +1,125 @@ +############################################################################## +# (c) Crown copyright Met Office. All rights reserved. +# The file LICENCE, distributed with this code, contains details of the terms +# under which the code may be used. +############################################################################## +# Author J. Henrichs, Bureau of Meteorology + +""" +This module tests rose_picker_tool. +""" + +import os +from pathlib import Path +from unittest.mock import patch, MagicMock, PropertyMock + +import pytest + +from fab.tools.category import Category +from fab.tools.tool import Tool +from rose_picker_tool import get_rose_picker, RosePicker + + +def test_get_rose_picker_system_found() -> None: + """ + Test that a system-wide installed rose_picker works as expected. + """ + with patch("shutil.which", return_value="/usr/bin/rose_picker"): + rp = get_rose_picker("system") + assert isinstance(rp, RosePicker) + assert rp.exec_path == Path("/usr/bin/rose_picker") + + +def test_get_rose_picker_system_not_found() -> None: + """ + Test error if a system rose_picker is requested, but does not exist. + """ + with patch("shutil.which", return_value=None): + with pytest.raises(RuntimeError) as err: + get_rose_picker("system") + assert "Cannot find system rose_picker tool." == str(err.value) + + +def test_get_rose_picker_local_checkout(tmp_path) -> None: + """ + Tests that we will invoke rose picker from a local checkout + (mocked, so we don't need an actual checkout) + """ + tag = "v2.0.0" + fake_workspace = tmp_path / "fab-workspace" + gpl_utils = fake_workspace / f"gpl-utils-{tag}" / "source" + rose_picker_bin = gpl_utils / "bin" + rose_picker_path = rose_picker_bin / "rose_picker" + + # Patch get_fab_workspace to return our tmp_path + pm = PropertyMock("is_available", side_effect=[False, True]) + with patch("rose_picker_tool.get_fab_workspace", + return_value=fake_workspace), \ + patch("rose_picker_tool.ToolRepository") as mock_repo_class, \ + patch.object(Tool, "is_available", pm): + + mock_fcm = MagicMock() + mock_repo = MagicMock() + mock_repo.get_default.return_value = mock_fcm + mock_repo_class.return_value = mock_repo + + rp = get_rose_picker(tag) + + # Ensure checkout was called + mock_fcm.checkout.assert_called_once_with( + src=f"fcm:lfric_gpl_utils.x/tags/{tag}", + dst=gpl_utils + ) + + # Ensure the returned object is a RosePicker with correct path + assert isinstance(rp, RosePicker) + assert Path(rp.exec_path) == rose_picker_path + + +def test_get_rose_picker_local_checkout_fails() -> None: + """ + This functions tests the behaviour if a local checkout fails, + i.e. rose_picker cannot be executed. This test patches the + ToolRepository (so that FCM is not actually called), and makes + sure RosePicker is always not available: + """ + + tag = "v2.0.0" + + # Make sure rose_picker will always return to be not available: + with patch("rose_picker_tool.ToolRepository.get_default") as mock_repo, \ + patch.object(RosePicker, "check_available", return_value=False), \ + pytest.raises(RuntimeError) as err: + get_rose_picker(tag) + + assert f"Cannot run rose_picker tag '{tag}'." == str(err.value) + # Also make sure that we indeed got FCM :) + mock_repo.assert_called_with(Category.FCM) + + +def test_get_rose_picker_check_available() -> None: + """ + Test RosePicker's check_available. + """ + rose_picker = RosePicker(Path("/usr/bin/rose_picker")) + with patch.object(RosePicker, "run", return_value=True) as mock_run: + assert rose_picker.check_available() + mock_run.assert_called_once_with(additional_parameters="-help") + + with patch.object(RosePicker, "run", side_effect=RuntimeError) as mock_run: + assert not rose_picker.check_available() + mock_run.assert_called_once_with(additional_parameters="-help") + + +def test_get_rose_picker_execute() -> None: + """ + Test RosePicker's check_available. + """ + rose_picker = RosePicker(Path("/usr/bin/rose_picker")) + with patch.object(RosePicker, "run", return_value=0) as mock_run, \ + patch.object(os, "environ", {}): + rose_picker.execute(["arg"]) + # Rose picker prepends the existing python path, separated by ":". + # Since python path is not set, there will be a leading ":"" + mock_run.assert_called_once_with(additional_parameters=["arg"], + env={'PYTHONPATH': ':/usr/lib/python'}) diff --git a/infrastructure/build/fab/test/templaterator_test.py b/infrastructure/build/fab/test/templaterator_test.py new file mode 100644 index 000000000..c65e3ba95 --- /dev/null +++ b/infrastructure/build/fab/test/templaterator_test.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 + +############################################################################## +# (c) Crown copyright Met Office. All rights reserved. +# The file LICENCE, distributed with this code, contains details of the terms +# under which the code may be used. +############################################################################## +# Author: J. Henrichs, Bureau of Meteorology + +""" +This module tests the Templaterator tool. +""" + +from pathlib import Path +from unittest.mock import patch + +import pytest + +from templaterator import Templaterator # adjust import as needed + + +@pytest.fixture(name="templaterator") +def templaterator_setup(tmp_path: Path) -> Templaterator: + """ + :returns: A dummy Templaterator object (in tmp_path, + which does not exist, but it's all we need for these tests since + all tests are mocked. + """ + return Templaterator(tmp_path / "Templaterator.py") + + +def test_init(templaterator: Templaterator, + tmp_path: Path) -> None: + """ + Test the constructor. + """ + assert templaterator.name == "Templaterator" + assert templaterator.exec_path == tmp_path / "Templaterator.py" + assert templaterator.exec_name == "Templaterator.py" + + +def test_check_available(templaterator: Templaterator) -> None: + """ + Test the check_available function. + """ + with patch("fab.tools.tool.Tool.run", return_value=0) as mock_run: + assert templaterator.check_available() is True + mock_run.assert_called_once_with(additional_parameters="-h") + + with patch("fab.tools.tool.Tool.run", + side_effect=RuntimeError()) as mock_run: + assert templaterator.check_available() is False + mock_run.assert_called_once_with(additional_parameters="-h") + + +def test_process_call(templaterator: Templaterator, + tmp_path: Path) -> None: + """ + Test that execution passes on the right parameter to the + Templaterator script. + """ + input_template = tmp_path / "input.txt" + output_file = tmp_path / "output.txt" + key_values = {"A": "1", "B": "2"} + + with patch("fab.tools.tool.Tool.run", return_value=0) as mock_run: + templaterator.process(input_template, output_file, key_values) + + expected_params = [ + input_template, + "-o", + output_file, + "-s A=1", + "-s B=2", + ] + + mock_run.assert_called_once_with(additional_parameters=expected_params)