diff --git a/.gitignore b/.gitignore index e92da48887..7e286433ca 100644 --- a/.gitignore +++ b/.gitignore @@ -176,6 +176,7 @@ ush/global_cycle_driver.sh ush/gsi_satbias2ioda_all.sh ush/jediinc2fv3.py ush/imsfv3_scf2ioda.py +ush/ghcn_snod2ioda.py ush/bufr_snocvr_snomad.py ush/atparse.bash ush/bufr2ioda_insitu* diff --git a/dev/parm/config/gfs/config.esnowanl.j2 b/dev/parm/config/gfs/config.esnowanl.j2 index 786a450858..086b3620e9 100644 --- a/dev/parm/config/gfs/config.esnowanl.j2 +++ b/dev/parm/config/gfs/config.esnowanl.j2 @@ -8,15 +8,19 @@ echo "BEGIN: config.esnowanl" # Get task specific resources source "${EXPDIR}/config.resources" esnowanl -export TASK_CONFIG_YAML="${PARMgfs}/gdas/snow/snow_ens_config.yaml.j2" -export OBS_LIST_YAML="${PARMgfs}/gdas/snow/snow_obs_list.yaml.j2" - # Name of the executable that applies increment to bkg and its namelist template export APPLY_INCR_EXE="${EXECgfs}/gdas_apply_incr.x" export ENS_APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/ens_apply_incr_nml.j2" +export TASK_CONFIG_YAML="${PARMgfs}/gdas/snow/snow_ens_config.yaml.j2" +export OBS_LIST_YAML="${PARMgfs}/gdas/snow/snow_obs_list.yaml.j2" +export ims_scf_obs_suffix="asc" # asc-ascii; nc-netcdf +export fail_on_missing_snowobs=False # False: just warn; True: fail & exit + export PREP_SNOCVR_SNOMAD_YAML="${PARMgfs}/gdas/snow/prep/prep_snocvr_snomad.yaml.j2" export OBSBUILDER="${USHgfs}/bufr_snocvr_snomad.py" +export PREP_GHCN_YAML="${PARMgfs}/gdas/snow/prep/prep_ghcn.yaml.j2" +export GHCN2IODACONV="${USHgfs}/ghcn_snod2ioda.py" export io_layout_x="{{ IO_LAYOUT_X }}" export io_layout_y="{{ IO_LAYOUT_Y }}" diff --git a/dev/parm/config/gfs/config.snowanl.j2 b/dev/parm/config/gfs/config.snowanl.j2 index b88cb42976..a76e584246 100644 --- a/dev/parm/config/gfs/config.snowanl.j2 +++ b/dev/parm/config/gfs/config.snowanl.j2 @@ -14,9 +14,13 @@ export APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/apply_incr_nml.j2" export TASK_CONFIG_YAML="${PARMgfs}/gdas/snow/snow_det_config.yaml.j2" export OBS_LIST_YAML="${PARMgfs}/gdas/snow/snow_obs_list.yaml.j2" +export ims_scf_obs_suffix="asc" # asc-ascii; nc-netcdf +export fail_on_missing_snowobs=False # False: just warn; True: fail & exit export PREP_SNOCVR_SNOMAD_YAML="${PARMgfs}/gdas/snow/prep/prep_snocvr_snomad.yaml.j2" export OBSBUILDER="${USHgfs}/bufr_snocvr_snomad.py" +export PREP_GHCN_YAML="${PARMgfs}/gdas/snow/prep/prep_ghcn.yaml.j2" +export GHCN2IODACONV="${USHgfs}/ghcn_snod2ioda.py" export io_layout_x="{{ IO_LAYOUT_X }}" export io_layout_y="{{ IO_LAYOUT_Y }}" diff --git a/dev/scripts/exglobal_analysis_stats.py b/dev/scripts/exglobal_analysis_stats.py index 3497a8afc0..8f7eb79b50 100755 --- a/dev/scripts/exglobal_analysis_stats.py +++ b/dev/scripts/exglobal_analysis_stats.py @@ -31,11 +31,15 @@ else: config.STAT_ANALYSES.append('atmos_gsi') + # GCDAS uses offline GDAS, remove atmos analysis + if config.RUN == 'gcdas': + config.STAT_ANALYSES = [anl for anl in config.STAT_ANALYSES if 'atmos' not in anl] + # Instantiate the analysis stats task AnlStats = AnalysisStats(config) # Initialize JEDI variational analysis - if not config.DO_JEDIATMVAR: + if 'atmos_gsi' in config.STAT_ANALYSES: AnlStats.convert_gsi_diags() AnlStats.initialize() for anl in config.STAT_ANALYSES: diff --git a/dev/scripts/exglobal_snow_analysis.py b/dev/scripts/exglobal_snow_analysis.py index 9923187d11..dbd443114a 100755 --- a/dev/scripts/exglobal_snow_analysis.py +++ b/dev/scripts/exglobal_snow_analysis.py @@ -31,6 +31,10 @@ if snow_anl.task_config.DO_IMS_SCF: snow_anl.execute('scf_to_ioda') + # Process GHCN (if applicable) + if snow_anl.task_config.DO_GHCN: + snow_anl.prepare_GHCN() + # Execute JEDI snow analysis snow_anl.execute('snowanlvar') diff --git a/dev/scripts/exglobal_snowens_analysis.py b/dev/scripts/exglobal_snowens_analysis.py index 4ab13d86b9..aa4b44b5f0 100755 --- a/dev/scripts/exglobal_snowens_analysis.py +++ b/dev/scripts/exglobal_snowens_analysis.py @@ -37,6 +37,10 @@ if snow_ens_anl.task_config.DO_IMS_SCF: snow_ens_anl.execute('scf_to_ioda') + # Process GHCN (if applicable) + if snow_ens_anl.task_config.DO_GHCN: + snow_ens_anl.prepare_GHCN() + # Execute JEDI snow analysis snow_ens_anl.execute('snowanlvar') diff --git a/sorc/gdas.cd b/sorc/gdas.cd index dd911ba37f..3e0593d7de 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit dd911ba37f5c98a03e5e7cf89699d280e79f538b +Subproject commit 3e0593d7de31944b3aef5330c36f2ddfecba3014 diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index e995387ef7..76f2a5de5a 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -275,7 +275,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then cd "${HOMEgfs}/ush" || exit 1 ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/gsi_satbias2ioda_all.sh" . ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/snow/bufr_snocvr_snomad.py" . - ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/build/bin/imsfv3_scf2ioda.py" . + ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/ush/snow/ghcn_snod2ioda.py" . fi #------------------------------ @@ -439,7 +439,6 @@ fi # GDASApp executables if [[ -d "${HOMEgfs}/sorc/gdas.cd/install" ]]; then cp -f "${HOMEgfs}/sorc/gdas.cd/install/bin"/gdas* ./ - cp -f "${HOMEgfs}/sorc/gdas.cd/install/bin/calcfIMS.exe" ./gdas_calcfIMS.x cp -f "${HOMEgfs}/sorc/gdas.cd/install/bin/apply_incr.exe" ./gdas_apply_incr.x fi diff --git a/ush/python/pygfs/task/analysis_stats.py b/ush/python/pygfs/task/analysis_stats.py index 44c4f9cfca..7b6415657e 100644 --- a/ush/python/pygfs/task/analysis_stats.py +++ b/ush/python/pygfs/task/analysis_stats.py @@ -3,6 +3,7 @@ import os import glob import gsincdiag_to_ioda.proc_gsi_ncdiag as gsid +import gsincdiag_to_ioda.combine_obsspace as gsios import gzip import tarfile from logging import getLogger @@ -11,7 +12,6 @@ from wxflow import (AttrDict, FileHandler, - add_to_datetime, to_timedelta, to_YMDH, parse_j2yaml, logit) from pygfs.jedi import Jedi @@ -198,16 +198,19 @@ def convert_gsi_diags(self) -> None: FileHandler({'mkdir': [diag_ioda_dir_ges_path, diag_ioda_dir_anl_path, output_dir_path]}).sync() diag_tar_copy_list = [] for diag in diag_tars: - input_tar_basename = f"{self.task_config.APREFIX}{diag}" + input_tar_basename = f"{self.task_config.APREFIX}{diag}.tar" input_tar = os.path.join(self.task_config.COMIN_ATMOS_ANALYSIS, - f"{input_tar_basename}.tar") + input_tar_basename) dest = os.path.join(diag_dir_path, input_tar_basename) if os.path.exists(input_tar): + logger.info(f"{input_tar} exists. Preparing to copy it to {dest}") diag_tar_copy_list.append([input_tar, dest]) + else: + logger.warning(f"{input_tar} does not exist to copy. Skipping ...") FileHandler({'copy_opt': diag_tar_copy_list}).sync() # Untar and gunzip diag files - gsi_diag_tars = glob.glob(os.path.join(diag_dir_path, f"{self.task_config.APREFIX}*stat")) + gsi_diag_tars = glob.glob(os.path.join(diag_dir_path, f"{self.task_config.APREFIX}*stat.tar")) for diag_tar in gsi_diag_tars: logger.info(f"Untarring {diag_tar}") with tarfile.open(diag_tar, "r") as tar: @@ -234,12 +237,14 @@ def convert_gsi_diags(self) -> None: FileHandler({'copy_opt': copy_ges_diags}).sync() # Convert GSI diag files to ioda files using gsincdiag2ioda converter scripts + logger.info("Converting GSI guess diag files to IODA files") gsid.proc_gsi_ncdiag(ObsDir=diag_ioda_dir_ges_path, DiagDir=diag_dir_ges_path) + logger.info("Converting GSI analysis diag files to IODA files") gsid.proc_gsi_ncdiag(ObsDir=diag_ioda_dir_anl_path, DiagDir=diag_dir_anl_path) # now we need to combine the two sets of ioda files into one file # by adding certain groups from the anl file to the ges file - ges_ioda_files = glob.glob(os.path.join(diag_ioda_dir_ges_path, '*nc4')) + ges_ioda_files = glob.glob(os.path.join(diag_ioda_dir_ges_path, '*nc')) for ges_ioda_file in ges_ioda_files: anl_ioda_file = ges_ioda_file.replace('_ges_', '_anl_').replace(diag_ioda_dir_ges_path, diag_ioda_dir_anl_path) if os.path.exists(anl_ioda_file): @@ -250,21 +255,25 @@ def convert_gsi_diags(self) -> None: logger.warning(f"{anl_ioda_file} does not exist to combine with {ges_ioda_file}") logger.warning("Skipping this file ...") + # now, for conventional data, we need to combine certain obspaces + logger.info("Combining conventional GSI IODA files by obspace") + conv_obsspaces = ['sondes', 'aircraft', 'sfcship', 'sfc'] + for obspace in conv_obsspaces: + logger.info(f"Combining conventional GSI IODA files for obspace {obspace}") + FileList = glob.glob(os.path.join(output_dir_path, f"{obspace}_*_gsi_*.nc")) + timestamp = self.task_config.current_cycle.strftime('%Y%m%d%H') + combined_outfile = os.path.join(output_dir_path, f"{obspace}_gsi_{timestamp}.nc") + gsios.combine_obsspace(FileList, combined_outfile, False) + # Tar up the ioda files iodastatzipfile = os.path.join(self.task_config.DATA, 'atmos_gsi', 'atmos_gsi_ioda', f"{self.task_config.APREFIX}atmos_gsi_analysis.ioda_hofx.tar.gz") logger.info(f"Compressing GSI IODA files to {iodastatzipfile}") # get list of iodastat files to put in tarball - iodastatfiles = glob.glob(os.path.join(output_dir_path, '*nc4')) + iodastatfiles = glob.glob(os.path.join(output_dir_path, '*nc')) logger.info(f"Gathering {len(iodastatfiles)} GSI IODA files to {iodastatzipfile}") with tarfile.open(iodastatzipfile, "w|gz") as archive: for targetfile in iodastatfiles: - # gzip the file before adding to tar - with open(targetfile, 'rb') as f_in: - with gzip.open(f"{targetfile}.gz", 'wb') as f_out: - f_out.writelines(f_in) - os.remove(targetfile) - targetfile = f"{targetfile}.gz" archive.add(targetfile, arcname=os.path.basename(targetfile)) logger.info(f"Finished compressing GSI IODA files to {iodastatzipfile}") # copy to COMOUT diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index 5109070f2b..9c31421bcd 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -48,14 +48,46 @@ def __init__(self, config: Dict[str, Any]): super().__init__(config) _res = int(self.task_config['CASE'][1:]) + _fail_on_missing = str(self.task_config.fail_on_missing_snowobs[0]).lower() == "true" \ + if isinstance(self.task_config.fail_on_missing_snowobs, list) \ + else bool(self.task_config.fail_on_missing_snowobs) # if 00z, do SCF preprocessing - _ims_file = os.path.join(self.task_config.COMIN_OBS, f'{self.task_config.OPREFIX}imssnow96.asc') + _ims_file = os.path.join( + self.task_config.COMIN_OBS, + f'{self.task_config.OPREFIX}imssnow96.{self.task_config.ims_scf_obs_suffix}' + ) logger.info(f"Checking for IMS file: {_ims_file}") - if self.task_config.cyc == 0 and os.path.exists(_ims_file): - _DO_IMS_SCF = True + _DO_IMS_SCF = False + if self.task_config.cyc == 0: + if os.path.exists(_ims_file): + _DO_IMS_SCF = True + else: + if _fail_on_missing: + raise FileNotFoundError( + f"IMS obs file required but not found: {_ims_file}" + ) + else: + logger.warning(f"IMS obs file missing: {_ims_file}") + else: + logger.info("Not 00z cycle — Skipping IMS preprocessing.") + + # if 00z, do GHCN preprocessing + _ghcn_file = os.path.join(self.task_config.COMIN_OBS, f'{self.task_config.OPREFIX}ghcn_snow.csv') + logger.info(f"Checking for GHCN csv file: {_ghcn_file}") + _DO_GHCN = False + if self.task_config.cyc == 0: + if os.path.exists(_ghcn_file): + _DO_GHCN = True + else: + if _fail_on_missing: + raise FileNotFoundError( + f"GHCN obs file required but not found: {_ghcn_file}" + ) + else: + logger.warning(f"GHCN obs file missing: {_ghcn_file}") else: - _DO_IMS_SCF = False + logger.info("Not 00z cycle — Skipping GHCN preprocessing.") # Extend task_config with variables repeatedly used across this class self.task_config.update(AttrDict( @@ -68,6 +100,7 @@ def __init__(self, config: Dict[str, Any]): 'snow_prepobs_path': os.path.join(self.task_config.DATA, 'prep'), 'ims_file': _ims_file, 'DO_IMS_SCF': _DO_IMS_SCF, # Boolean to decide if IMS snow cover processing is done + 'DO_GHCN': _DO_GHCN, # Boolean to decide if GHCN processing is done } )) @@ -222,6 +255,74 @@ def prepare_SNOCVR_SNOMAD(self) -> None: logger.info(f"Copy {output_file} successfully generated") FileHandler(prep_snocvr_snomad_config.netcdf).sync() + @logit(logger) + def prepare_GHCN(self) -> None: + """Prepare the GHCN data for a global snow analysis + + This method will prepare GHCN data for a global snow analysis using JEDI. + This includes: + - creating GHCN snowdepth data in IODA format. + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + + # Read and render the prep_ghcn.yaml.j2 + logger.info(f"Reading {self.task_config.PREP_GHCN_YAML}") + prep_ghcn_config = parse_j2yaml(self.task_config.PREP_GHCN_YAML, self.task_config) + logger.debug(f"{self.task_config.PREP_GHCN_YAML}:\n{pformat(prep_ghcn_config)}") + + # Define these locations in gdas/snow/prep/prep_ghcn.yaml.j2 + logger.info("Copying GHCN obs to DATA") + FileHandler(prep_ghcn_config.stage).sync() + + # Execute ioda converter to create the GHCN obs data in IODA format + logger.info("Create GHCN obs data in IODA format") + + csv_file = f'{self.task_config.OPREFIX}ghcn_snow.csv' + station_file = f'ghcnd-stations.txt' + output_file = f'{self.task_config.OPREFIX}ghcn_snow.nc' + if os.path.exists(f"{os.path.join(self.task_config.DATA, output_file)}"): + rm_p(output_file) + if not os.path.isfile(csv_file): + logger.warning(f"WARNING: GHCN obs file not found.") + return + + logger.info("Link GHCN2IODACONV into DATA/") + exe_src = self.task_config.GHCN2IODACONV + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + exe = Executable(exe_dest) + exe.add_default_arg(["-i", f"{os.path.join(self.task_config.DATA, csv_file)}"]) + exe.add_default_arg(["-o", f"{os.path.join(self.task_config.DATA, output_file)}"]) + exe.add_default_arg(["-f", f"{os.path.join(self.task_config.DATA, station_file)}"]) + exe.add_default_arg(["-d", f"{to_YMDH(self.task_config.current_cycle)}"]) + try: + logger.debug(f"Executing {exe}") + exe() + except OSError: + logger.exception(f"Failed to execute {exe}") + raise + except Exception as err: + logger.exception(f"An error occured during execution of {exe}") + raise WorkflowException(f"An error occured during execution of {exe}") from err + + # Ensure the IODA snow depth GHCN file is produced by the IODA converter + # If so, copy to DATA/prep/ + if not os.path.isfile(f"{os.path.join(self.task_config.DATA, output_file)}"): + logger.warning(f"{output_file} not produced - continuing without it.") + else: + logger.info(f"Copy {output_file} successfully generated") + FileHandler(prep_ghcn_config.ghcn2ioda).sync() + @logit(logger) def add_increments(self) -> None: """Executes the program "apply_incr.exe" to create analysis "sfc_data" files by adding increments to backgrounds diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py index c876d94f29..5f289aff01 100644 --- a/ush/python/pygfs/task/snowens_analysis.py +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -51,14 +51,46 @@ def __init__(self, config: Dict[str, Any]): super().__init__(config) _res = int(self.task_config['CASE_ENS'][1:]) + _fail_on_missing = str(self.task_config.fail_on_missing_snowobs[0]).lower() == "true" \ + if isinstance(self.task_config.fail_on_missing_snowobs, list) \ + else bool(self.task_config.fail_on_missing_snowobs) # if 00z, do SCF preprocessing - _ims_file = os.path.join(self.task_config.COMIN_OBS, f'{self.task_config.OPREFIX}imssnow96.asc') + _ims_file = os.path.join( + self.task_config.COMIN_OBS, + f'{self.task_config.OPREFIX}imssnow96.{self.task_config.ims_scf_obs_suffix}' + ) logger.info(f"Checking for IMS file: {_ims_file}") - if self.task_config.cyc == 0 and os.path.exists(_ims_file): - _DO_IMS_SCF = True + _DO_IMS_SCF = False + if self.task_config.cyc == 0: + if os.path.exists(_ims_file): + _DO_IMS_SCF = True + else: + if _fail_on_missing: + raise FileNotFoundError( + f"IMS obs file required but not found: {_ims_file}" + ) + else: + logger.warning(f"IMS obs file missing: {_ims_file}") + else: + logger.info("Not 00z cycle — Skipping IMS preprocessing.") + + # if 00z, do GHCN preprocessing + _ghcn_file = os.path.join(self.task_config.COMIN_OBS, f'{self.task_config.OPREFIX}ghcn_snow.csv') + logger.info(f"Checking for GHCN csv file: {_ghcn_file}") + _DO_GHCN = False + if self.task_config.cyc == 0: + if os.path.exists(_ghcn_file): + _DO_GHCN = True + else: + if _fail_on_missing: + raise FileNotFoundError( + f"GHCN obs file required but not found: {_ghcn_file}" + ) + else: + logger.warning(f"GHCN obs file missing: {_ghcn_file}") else: - _DO_IMS_SCF = False + logger.info("Not 00z cycle — Skipping GHCN preprocessing.") # Extend task_config with variables repeatedly used across this class self.task_config.update(AttrDict( @@ -69,8 +101,10 @@ def __init__(self, config: Dict[str, Any]): 'npz': self.task_config.LEVS - 1, 'CASE': self.task_config.CASE_ENS, 'snow_bkg_path': os.path.join('.', 'bkg', 'ensmean/'), + 'snow_prepobs_path': os.path.join(self.task_config.DATA, 'prep'), 'ims_file': _ims_file, 'DO_IMS_SCF': _DO_IMS_SCF, # Boolean to decide if IMS snow cover processing is done + 'DO_GHCN': _DO_GHCN, # Boolean to decide if GHCN processing is done } )) @@ -226,6 +260,71 @@ def prepare_SNOCVR_SNOMAD(self) -> None: logger.info(f"Copy {output_file} successfully generated") FileHandler(prep_snocvr_snomad_config.netcdf).sync() + @logit(logger) + def prepare_GHCN(self) -> None: + """Prepare the GHCN data for a global snow analysis + This method will prepare GHCN data for a global snow analysis using JEDI. + This includes: + - creating GHCN snowdepth data in IODA format. + Parameters + ---------- + Analysis: parent class for GDAS task + Returns + ---------- + None + """ + + # Read and render the prep_ghcn.yaml.j2 + logger.info(f"Reading {self.task_config.PREP_GHCN_YAML}") + prep_ghcn_config = parse_j2yaml(self.task_config.PREP_GHCN_YAML, self.task_config) + logger.debug(f"{self.task_config.PREP_GHCN_YAML}:\n{pformat(prep_ghcn_config)}") + + # Define these locations in gdas/snow/prep/prep_ghcn.yaml.j2 + logger.info("Copying GHCN obs to DATA") + FileHandler(prep_ghcn_config.stage).sync() + + # Execute ioda converter to create the GHCN obs data in IODA format + logger.info("Create GHCN obs data in IODA format") + + csv_file = f'{self.task_config.OPREFIX}ghcn_snow.csv' + station_file = f'ghcnd-stations.txt' + output_file = f'{self.task_config.OPREFIX}ghcn_snow.nc' + if os.path.exists(f"{os.path.join(self.task_config.DATA, output_file)}"): + rm_p(output_file) + if not os.path.isfile(csv_file): + logger.warning(f"WARNING: GHCN obs file not found.") + return + + logger.info("Link GHCN2IODACONV into DATA/") + exe_src = self.task_config.GHCN2IODACONV + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + exe = Executable(exe_dest) + exe.add_default_arg(["-i", f"{os.path.join(self.task_config.DATA, csv_file)}"]) + exe.add_default_arg(["-o", f"{os.path.join(self.task_config.DATA, output_file)}"]) + exe.add_default_arg(["-f", f"{os.path.join(self.task_config.DATA, station_file)}"]) + exe.add_default_arg(["-d", f"{to_YMDH(self.task_config.current_cycle)}"]) + try: + logger.debug(f"Executing {exe}") + exe() + except OSError: + logger.exception(f"Failed to execute {exe}") + raise + except Exception as err: + logger.exception(f"An error occured during execution of {exe}") + raise WorkflowException(f"An error occured during execution of {exe}") from err + + # Ensure the IODA snow depth GHCN file is produced by the IODA converter + # If so, copy to DATA/prep/ + if not os.path.isfile(f"{os.path.join(self.task_config.DATA, output_file)}"): + logger.warning(f"{output_file} not produced - continuing without it.") + else: + logger.info(f"Copy {output_file} successfully generated") + FileHandler(prep_ghcn_config.ghcn2ioda).sync() + @logit(logger) def add_increments(self) -> None: """Executes the program "apply_incr.exe" to create analysis "sfc_data" files by adding increments to backgrounds