From 5d0421e84aeff502e83dad4f8de94e75c3d99e64 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Tue, 16 Jul 2024 13:19:32 -0400 Subject: [PATCH 01/40] (1) Added the milan modulefile to support/lisf-557ww-7.5 (2) Updated Python 3.11 (3) Center-justified the colorbar in plot_utils.py (4) Added group submission capability to s2s-run.sh to optimize computer resources --- env/discover/lisf_7.5_intel_2021.4.0_s2s | 5 +- env/discover/lisf_7.5_intel_2023.2.1_s2s | 168 ++++++++++++ lis/utils/usaf/s2s/s2s_app/s2s_api.py | 9 +- lis/utils/usaf/s2s/s2s_app/s2s_run.sh | 243 ++++++++++++++---- .../s2s/s2s_modules/s2splots/plot_utils.py | 2 +- .../usaf/s2s/s2s_modules/shared/utils.py | 9 +- 6 files changed, 380 insertions(+), 56 deletions(-) create mode 100644 env/discover/lisf_7.5_intel_2023.2.1_s2s diff --git a/env/discover/lisf_7.5_intel_2021.4.0_s2s b/env/discover/lisf_7.5_intel_2021.4.0_s2s index 5f203e1aa..e43890c85 100644 --- a/env/discover/lisf_7.5_intel_2021.4.0_s2s +++ b/env/discover/lisf_7.5_intel_2021.4.0_s2s @@ -171,4 +171,7 @@ prepend-path LD_LIBRARY_PATH "$def_lis_petsc/lib" prepend-path PATH "$def_lis_netcdf/bin:$def_lis_eccodes/bin" # EMK Miniconda3 environment for S2S -prepend-path PATH "/discover/nobackup/projects/usaf_lis/emkemp/miniconda3/20221027/bin" +prepend-path PATH "/discover/nobackup/projects/usaf_lis/emkemp/miniconda3/20240712_py311_sles15_hpc11/bin" +setenv ESMFMKFILE /discover/nobackup/projects/usaf_lis/emkemp/miniconda3/20240712_py311_sles15_hpc11/lib/esmf.mk +setenv PROJ_LIB /discover/nobackup/projects/usaf_lis/emkemp/miniconda3/20240712_py311_sles15_hpc11/share/proj/ + diff --git a/env/discover/lisf_7.5_intel_2023.2.1_s2s b/env/discover/lisf_7.5_intel_2023.2.1_s2s new file mode 100644 index 000000000..6260e359a --- /dev/null +++ b/env/discover/lisf_7.5_intel_2023.2.1_s2s @@ -0,0 +1,168 @@ +#%Module1.0################################################################### + +proc ModulesHelp { } { + puts stderr "\t[module-info name] - loads the LISF_7_5_INTEL_2023_2_1 env" + puts stderr "" + puts stderr "This is for use on NCCS' discover system running SLES 15.4." + puts stderr "" + puts stderr "\tThe following env variables are set:" + puts stderr "\t\tDEV_ENV" + puts stderr "\t\tLIS_ARCH" + puts stderr "\t\tLIS_SPMD" + puts stderr "\t\tLIS_FC" + puts stderr "\t\tLIS_CC" + puts stderr "\t\tLIS_RPC" + puts stderr "\t\tLIS_OPENJPEG" + puts stderr "\t\tLIS_ECCODES" + puts stderr "\t\tLIS_NETCDF" + puts stderr "\t\tLIS_HDF4" + puts stderr "\t\tLIS_HDFEOS" + puts stderr "\t\tLIS_HDF5" + puts stderr "\t\tLIS_MODESMF" + puts stderr "\t\tLIS_LIBESMF" + puts stderr "\t\tLIS_MINPACK" + puts stderr "\t\tLIS_CRTM" + puts stderr "\t\tLIS_CRTM_PROF" + puts stderr "\t\tLIS_CMEM" + puts stderr "\t\tLIS_LAPACK" + puts stderr "\t\tLIS_PETSC" + puts stderr "\t\tLDT_ARCH" + puts stderr "\t\tLDT_FC" + puts stderr "\t\tLDT_CC" + puts stderr "\t\tLDT_RPC" + puts stderr "\t\tLDT_OPENJPEG" + puts stderr "\t\tLDT_ECCODES" + puts stderr "\t\tLDT_NETCDF" + puts stderr "\t\tLDT_HDF4" + puts stderr "\t\tLDT_HDFEOS" + puts stderr "\t\tLDT_HDF5" + puts stderr "\t\tLDT_MODESMF" + puts stderr "\t\tLDT_LIBESMF" + puts stderr "\t\tLDT_GDAL" + puts stderr "\t\tLDT_FORTRANGIS" + puts stderr "\t\tLDT_LIBGEOTIFF" + puts stderr "\t\tLVT_ARCH" + puts stderr "\t\tLVT_FC" + puts stderr "\t\tLVT_CC" + puts stderr "\t\tLVT_RPC" + puts stderr "\t\tLVT_OPENJPEG" + puts stderr "\t\tLVT_ECCODES" + puts stderr "\t\tLVT_NETCDF" + puts stderr "\t\tLVT_HDF4" + puts stderr "\t\tLVT_HDFEOS" + puts stderr "\t\tLVT_HDF5" + puts stderr "\t\tLVT_MODESMF" + puts stderr "\t\tLVT_LIBESMF" + puts stderr "\t\tLVT_GDAL" + puts stderr "\t\tLVT_FORTRANGIS" + puts stderr "" + puts stderr "\tThe following modules are loaded:" + puts stderr "\t\tcomp/gcc/13.2.0" + puts stderr "\t\tcomp/intel/2023.2.1" + puts stderr "\t\tmpi/impi/2021.11" + puts stderr "\t\tgit/2.42.0" + puts stderr "" +} + +conflict comp mpi + +module-whatis "loads the [module-info name] environment" + +set modname [module-info name] +set modmode [module-info mode] + +module load comp/gcc/13.2.0 +module load comp/intel/2023.2.1 +module load mpi/impi/2021.11 +module load git/2.42.0 + +set def_lis_rpc /usr/lib64/libtirpc.so +set def_lis_hdf5 /discover/nobackup/projects/lis/libs/sles-15.4/hdf5/1.14.2_intel-2023.2.1 +set def_lis_netcdf /discover/nobackup/projects/lis/libs/sles-15.4/netcdf/4.9.2_intel-2023.2.1 +set def_lis_openjpeg /discover/nobackup/projects/lis/libs/sles-15.4/openjpeg/2.3.0-150000.3.13.1.x86_64/usr +set def_lis_eccodes /discover/nobackup/projects/lis/libs/sles-15.4/eccodes/2.32.0_intel-2023.2.1 +set def_lis_hdf4 /discover/nobackup/projects/lis/libs/sles-15.4/hdf4/4.2.16-2_intel-2023.2.1 +set def_lis_hdfeos /discover/nobackup/projects/lis/libs/sles-15.4/hdfeos2/3.0_intel-2023.2.1 +set def_lis_modesmf /discover/nobackup/projects/lis/libs/sles-15.4/esmf/8.5.0_intel-2023.2.1_impi-2021.11/mod/modO/Linux.intel.64.intelmpi.default +set def_lis_libesmf /discover/nobackup/projects/lis/libs/sles-15.4/esmf/8.5.0_intel-2023.2.1_impi-2021.11/lib/libO/Linux.intel.64.intelmpi.default +set def_lvt_proj /discover/nobackup/projects/lis/libs/sles-15.4/proj/9.3.0_intel-2023.2.1 +set def_ldt_libgeotiff /discover/nobackup/projects/lis/libs/sles-15.4/geotiff/1.7.1_intel-2023.2.1 +set def_lvt_gdal /discover/nobackup/projects/lis/libs/sles-15.4/gdal/3.7.2_intel-2023.2.1 +set def_lvt_fortrangis /discover/nobackup/projects/lis/libs/sles-15.4/fortrangis/3.0-1_intel-2023.2.1 +set def_lis_petsc /discover/nobackup/projects/lis/libs/sles-15.4/petsc/3.20.0_intel-2023.2.1_impi-2021.11 +set def_lis_minpack /discover/nobackup/projects/lis/libs/minpack/intel_11_1_038 +set def_lis_crtm /discover/nobackup/projects/lis/libs/JCSDA_CRTM/REL-2.0.2.Surface-rev_intel_18_0_3_222 +set def_lis_crtm_prof /discover/nobackup/projects/lis/libs/CRTM_Profile_Utility/intel_18_0_3_222 +set def_lis_cmem /discover/nobackup/projects/lis/libs/LIS-MEM/intel_18_0_3_222 +set def_lis_lapack /discover/nobackup/projects/lis/libs/lapack/3.6.0_intel_14_0_3_174 + +setenv DEV_ENV LISF_7_5_INTEL_2023_2_1 +setenv LIS_ARCH linux_ifc +setenv LIS_SPMD parallel +setenv LIS_FC mpiifort +setenv LIS_CC mpicc +setenv LIS_RPC $def_lis_rpc +setenv LIS_OPENJPEG $def_lis_openjpeg +setenv LIS_ECCODES $def_lis_eccodes +setenv LIS_NETCDF $def_lis_netcdf +setenv LIS_HDF4 $def_lis_hdf4 +setenv LIS_HDFEOS $def_lis_hdfeos +setenv LIS_HDF5 $def_lis_hdf5 +setenv LIS_MODESMF $def_lis_modesmf +setenv LIS_LIBESMF $def_lis_libesmf +setenv LIS_MINPACK $def_lis_minpack +setenv LIS_CRTM $def_lis_crtm +setenv LIS_CRTM_PROF $def_lis_crtm_prof +setenv LIS_CMEM $def_lis_cmem +setenv LIS_LAPACK $def_lis_lapack +setenv LIS_PETSC $def_lis_petsc + +setenv LDT_ARCH linux_ifc +setenv LDT_FC mpiifort +setenv LDT_CC mpicc +setenv LDT_RPC $def_lis_rpc +setenv LDT_OPENJPEG $def_lis_openjpeg +setenv LDT_ECCODES $def_lis_eccodes +setenv LDT_NETCDF $def_lis_netcdf +setenv LDT_HDF4 $def_lis_hdf4 +setenv LDT_HDFEOS $def_lis_hdfeos +setenv LDT_HDF5 $def_lis_hdf5 +setenv LDT_MODESMF $def_lis_modesmf +setenv LDT_LIBESMF $def_lis_libesmf +setenv LDT_GDAL $def_lvt_gdal +setenv LDT_FORTRANGIS $def_lvt_fortrangis +setenv LDT_LIBGEOTIFF $def_ldt_libgeotiff + + +setenv LVT_ARCH linux_ifc +setenv LVT_FC mpiifort +setenv LVT_CC mpicc +setenv LVT_RPC $def_lis_rpc +setenv LVT_OPENJPEG $def_lis_openjpeg +setenv LVT_ECCODES $def_lis_eccodes +setenv LVT_NETCDF $def_lis_netcdf +setenv LVT_HDF4 $def_lis_hdf4 +setenv LVT_HDFEOS $def_lis_hdfeos +setenv LVT_HDF5 $def_lis_hdf5 +setenv LVT_MODESMF $def_lis_modesmf +setenv LVT_LIBESMF $def_lis_libesmf +setenv LVT_GDAL $def_lvt_gdal +setenv LVT_FORTRANGIS $def_lvt_fortrangis + + +prepend-path LD_LIBRARY_PATH "$def_lis_openjpeg/lib" +prepend-path LD_LIBRARY_PATH "$def_ldt_libgeotiff/lib" +prepend-path LD_LIBRARY_PATH "$def_lvt_proj/lib" +prepend-path LD_LIBRARY_PATH "$def_lvt_gdal/lib" +prepend-path LD_LIBRARY_PATH "$def_lis_hdf4/lib" +prepend-path LD_LIBRARY_PATH "$def_lis_hdf5/lib" +prepend-path LD_LIBRARY_PATH "$def_lis_libesmf" +prepend-path LD_LIBRARY_PATH "$def_lis_netcdf/lib" +prepend-path LD_LIBRARY_PATH "$def_lis_eccodes/lib" +prepend-path LD_LIBRARY_PATH "$def_lis_petsc/lib" +prepend-path PATH "$def_lis_netcdf/bin:$def_lis_eccodes/bin" + +# EMK Miniconda3 environment for S2S +prepend-path PATH "/discover/nobackup/projects/usaf_lis/emkemp/miniconda3/20240712_py311_sles15_hpc11/bin" +setenv ESMFMKFILE /discover/nobackup/projects/usaf_lis/emkemp/miniconda3/20240712_py311_sles15_hpc11/lib/esmf.mk +setenv PROJ_LIB /discover/nobackup/projects/usaf_lis/emkemp/miniconda3/20240712_py311_sles15_hpc11/share/proj/ diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_api.py b/lis/utils/usaf/s2s/s2s_app/s2s_api.py index 632b5677b..caccae135 100644 --- a/lis/utils/usaf/s2s/s2s_app/s2s_api.py +++ b/lis/utils/usaf/s2s/s2s_app/s2s_api.py @@ -25,6 +25,7 @@ PARSER.add_argument('-f', '--JOBFILE', required=False, help='job file name') PARSER.add_argument('-t', '--NTASKS', required=False, help='NTASKS') PARSER.add_argument('-c', '--CONFIGFILE', required=False, help='config file name') +PARSER.add_argument('-C', '--command_list', required=False, help='list of commands for group jobs') PARSER.add_argument('-H', '--HOURS', required=False, help='time HOURS') PARSER.add_argument('-j', '--JOBNAME', required=False, help='job-name') PARSER.add_argument('-w', '--CWD', required=False, help='current working directory') @@ -85,4 +86,10 @@ HOURS = ARGS.HOURS JOBNAME = ARGS.JOBNAME CWD = ARGS.CWD - utils.job_script(CONFIGFILE, JOBFILE, JOBNAME, NTASKS, str(HOURS), CWD) + if ARGS.command_list is None: + utils.job_script(CONFIGFILE, JOBFILE, JOBNAME, NTASKS, str(HOURS), CWD) + else: + with open(ARGS.command_list, 'r') as file: + commands = [line.strip() for line in file if line.strip()] + + utils.job_script(CONFIGFILE, JOBFILE, JOBNAME, NTASKS, str(HOURS), CWD, command_list=commands) diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh index 86eb46d4c..4812205bd 100755 --- a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh +++ b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh @@ -19,6 +19,7 @@ # SHARED FUNCTIONS ###################################################################### SOURCE_ONLY='N' +GROUP_JOBS='Y' submit_job(){ if [[ $1 == "" ]] || [[ $1 == ":" ]]; then submit_ID="`sbatch $2 | cut -d' ' -f4`" @@ -549,21 +550,38 @@ bcsd_fcst(){ # hindcast does not run bcsd01 and bcsd03 since they have been preprocessed. # Task 1: Generate and rescale 6-hourly files to 25 KM (forecast_task_01.py) # -------------------------------------------------------------------------- + cmdfile="bcsd01.file" jobname=bcsd01 python $LISHDIR/s2s_modules/bcsd_fcst/forecast_task_01.py -s $YYYY -m $mmm -c $BWD/$CFILE -w ${CWD} -t 1 -H 2 -j $jobname - job_list="$jobname*.j" bcsd01_ID= for jfile in $job_list do - thisID=$(submit_job "" "${jfile}") - bcsd01_ID=`echo $bcsd01_ID`' '$thisID + if [ $GROUP_JOBS == "Y" ]; then + job_comm=`grep python ${jfile} | cut -d'|' -f1` + echo "$job_comm" >> "$cmdfile" + else + thisID=$(submit_job "" "${jfile}") + bcsd01_ID=`echo $bcsd01_ID`' '$thisID + fi done bcsd01_ID=`echo $bcsd01_ID | sed "s| |:|g"` - + if [ $GROUP_JOBS == "Y" ]; then + bcsd01_ID= + /bin/rm bcsd01_*.j + split -l 6 $cmdfile part_ + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_01_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C "part_aa" + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_02_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C "part_ab" + /bin/rm ${cmdfile} "part_aa" "part_ab" + bcsd01_ID=$(submit_job "" "${jobname}_01_run.j") + thisID=$(submit_job "" "${jobname}_02_run.j") + bcsd01_ID=`echo $bcsd01_ID`' '$thisID + bcsd01_ID=`echo $bcsd01_ID | sed "s| |:|g"` + fi # Task 3: Rescale and reorganize NMME Data (forecast_task_03.py) # -------------------------------------------------------------- jobname=bcsd03 + cmdfile="bcsd03.file" python $LISHDIR/s2s_modules/bcsd_fcst/forecast_task_03.py -s $YYYY -m $MM -c $BWD/$CFILE -w ${CWD} -t 1 -H 2 -j $jobname unset job_list @@ -571,15 +589,28 @@ bcsd_fcst(){ bcsd03_ID= for jfile in $job_list do - thisID=$(submit_job "" "${jfile}") - bcsd03_ID=`echo $bcsd03_ID`' '$thisID + if [ $GROUP_JOBS == "Y" ]; then + job_comm=`grep python ${jfile} | cut -d'|' -f1` + echo "$job_comm" >> "$cmdfile" + else + thisID=$(submit_job "" "${jfile}") + bcsd03_ID=`echo $bcsd03_ID`' '$thisID + fi done bcsd03_ID=`echo $bcsd03_ID | sed "s| |:|g"` + if [ $GROUP_JOBS == "Y" ]; then + /bin/rm bcsd03*.j + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + /bin/rm ${cmdfile} + bcsd03_ID=$(submit_job "" "${jobname}_run.j") + fi fi # Task 4: Monthly "BC" step applied to CFSv2 (forecast_task_04.py, after 1 and 3) # ------------------------------------------------------------------------------- jobname=bcsd04 + cmdfile="bcsd04-05.file" + /bin/rm bcsd04-05* python $LISHDIR/s2s_modules/bcsd_fcst/forecast_task_04.py -s $YYYY -e $YYYY -m $mmm -n $MM -c $BWD/$CFILE -w ${CWD} -t 1 -H 3 -j $jobname unset job_list @@ -587,12 +618,17 @@ bcsd_fcst(){ bcsd04_ID= for jfile in $job_list do - if [ $DATATYPE == "forecast" ]; then - thisID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jfile}") + if [ $GROUP_JOBS == "Y" ]; then + job_comm=`grep python ${jfile} | cut -d'|' -f1` + echo "$job_comm" >> "$cmdfile" else - thisID=$(submit_job "" "${jfile}") - fi - bcsd04_ID=`echo $bcsd04_ID`' '$thisID + if [ $DATATYPE == "forecast" ]; then + thisID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jfile}") + else + thisID=$(submit_job "" "${jfile}") + fi + bcsd04_ID=`echo $bcsd04_ID`' '$thisID + fi done bcsd04_ID=`echo $bcsd04_ID | sed "s| |:|g"` @@ -609,18 +645,34 @@ bcsd_fcst(){ bcsd05_ID= for jfile in $job_list do - if [ $DATATYPE == "forecast" ]; then - thisID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jfile}") + if [ $GROUP_JOBS == "Y" ]; then + job_comm=`grep python ${jfile} | cut -d'|' -f1` + echo "$job_comm" >> "$cmdfile" else - thisID=$(submit_job "" "${jfile}") + if [ $DATATYPE == "forecast" ]; then + thisID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jfile}") + else + thisID=$(submit_job "" "${jfile}") + fi + bcsd05_ID=`echo $bcsd05_ID`' '$thisID fi - bcsd05_ID=`echo $bcsd05_ID`' '$thisID done bcsd05_ID=`echo $bcsd05_ID | sed "s| |:|g"` + if [ $GROUP_JOBS == "Y" ]; then + jobname=bcsd04-05 + /bin/rm bcsd04*.j + /bin/rm bcsd05*.j + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + /bin/rm ${cmdfile} + bcsd04_ID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jobname}_run.j") + bcsd05_ID= + fi # Task 6: CFSv2 Temporal Disaggregation (forecast_task_06.py: after 4 and 5) # -------------------------------------------------------------------------- jobname=bcsd06 + cmdfile="bcsd06.file" + /bin/rm bcsd06* python $LISHDIR/s2s_modules/bcsd_fcst/forecast_task_06.py -s $YYYY -e $YYYY -m $mmm -n $MM -c $BWD/$CFILE -w ${CWD} -p ${E2ESDIR} -t 1 -H 2 -j $jobname unset job_list @@ -628,10 +680,21 @@ bcsd_fcst(){ bcsd06_ID= for jfile in $job_list do - thisID=$(submit_job "$bcsd04_ID:$bcsd05_ID" "${jfile}") - bcsd06_ID=`echo $bcsd06_ID`' '$thisID + if [ $GROUP_JOBS == "Y" ]; then + job_comm=`grep python ${jfile} | cut -d'|' -f1` + echo "$job_comm" >> "$cmdfile" + else + thisID=$(submit_job "$bcsd04_ID:$bcsd05_ID" "${jfile}") + bcsd06_ID=`echo $bcsd06_ID`' '$thisID + fi done - bcsd06_ID=`echo $bcsd06_ID | sed "s| |:|g"` + bcsd06_ID=`echo $bcsd06_ID | sed "s| |:|g"` + if [ $GROUP_JOBS == "Y" ]; then + /bin/rm bcsd06*.j + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + /bin/rm ${cmdfile} + bcsd06_ID=$(submit_job "$bcsd04_ID" "${jobname}_run.j") + fi # Task 7: Generate symbolic links to sub-daily CFSv2 BC forecasts for NMME # temporal disaggregation due to an uneven number of ensembles between the datasets @@ -644,6 +707,8 @@ bcsd_fcst(){ # Task 8: NMME Temporal Disaggregation (forecast_task_08.py: after 6, 7) # ---------------------------------------------------------------------------- jobname=bcsd08 + cmdfile="bcsd08.file" + /bin/rm bcsd08* for model in $MODELS do python $LISHDIR/s2s_modules/bcsd_fcst/forecast_task_08.py -s $YYYY -e $YYYY -m $mmm -n $MM -c $BWD/$CFILE -w ${CWD} -p ${E2ESDIR} -t 1 -H 3 -M $model -j $jobname @@ -654,18 +719,36 @@ bcsd_fcst(){ bcsd08_ID= for jfile in $job_list do - thisID=$(submit_job "$bcsd06_ID" "${jfile}") - bcsd08_ID=`echo $bcsd08_ID`' '$thisID + if [ $GROUP_JOBS == "Y" ]; then + job_comm=`grep python ${jfile} | cut -d'|' -f1` + echo "$job_comm" >> "$cmdfile" + else + thisID=$(submit_job "$bcsd06_ID" "${jfile}") + bcsd08_ID=`echo $bcsd08_ID`' '$thisID + fi done bcsd08_ID=`echo $bcsd08_ID | sed "s| |:|g"` + if [ $GROUP_JOBS == "Y" ]; then + /bin/rm bcsd08*.j + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + /bin/rm ${cmdfile} + bcsd08_ID=$(submit_job "$bcsd06_ID" "${jobname}_run.j") + fi # Task 9: Combine the CFSv2 forcing fields into final format for LIS to read # (forecast_task_09.py: after 8) # --------------------------------------------------------------------------- + jobname=bcsd09 + cmdfile="bcsd09-10.file" + /bin/rm bcsd09* python $LISHDIR/s2s_modules/bcsd_fcst/forecast_task_09.py -s $YYYY -e $YYYY -m $mmm -n $MM -M CFSv2 -c $BWD/$CFILE -w ${CWD} -p ${E2ESDIR} -j $jobname -t 1 -H 4 - - bcsd09_ID=$(submit_job "$bcsd08_ID" "${jobname}_run.j") + if [ $GROUP_JOBS == "Y" ]; then + job_comm=`grep python ${jobname}_run.j | cut -d'|' -f1` + echo "$job_comm" >> "$cmdfile" + else + bcsd09_ID=$(submit_job "$bcsd08_ID" "${jobname}_run.j") + fi # Task 10: Combine the NMME forcing fields into final format for LIS to read # and symbolically link to the reusable CFSv2 met forcings @@ -680,18 +763,38 @@ bcsd_fcst(){ bcsd10_ID= for jfile in $job_list do - thisID=$(submit_job "$bcsd08_ID" "${jfile}") - bcsd10_ID=`echo $bcsd10_ID`' '$thisID + if [ $GROUP_JOBS == "Y" ]; then + job_comm=`grep python ${jfile} | cut -d'|' -f1` + echo "$job_comm" >> "$cmdfile" + else + thisID=$(submit_job "$bcsd08_ID" "${jfile}") + bcsd10_ID=`echo $bcsd10_ID`' '$thisID + fi done bcsd10_ID=`echo $bcsd10_ID | sed "s| |:|g"` + if [ $GROUP_JOBS == "Y" ]; then + jobname=bcsd09-10 + /bin/rm bcsd09*.j + /bin/rm bcsd10*.j + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 4 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + /bin/rm ${cmdfile} + bcsd09_ID=$(submit_job "$bcsd08_ID" "${jobname}_run.j") + bcsd10_ID= + fi # Task 11: Copy 9th forecast lead file as 10th forecast lead for LIS runs # (forecast_task_11.py: after 9 and 10) # --------------------------------------------------------------------------- jobname=bcsd11 + cmdfile="bcsd11-12.file" # NOTE : Task 11 Job scripts are written by forecast_task_09.py to execute: # python $LISHDIR/s2s_modules/bcsd_fcst/forecast_task_11.py -s $YYYY -m $mmm -n $MM -c $BWD/$CFILE -w ${CWD} - bcsd11_ID=$(submit_job "$bcsd09_ID:$bcsd10_ID" "${jobname}_run.j") + if [ $GROUP_JOBS == "Y" ]; then + job_comm=`grep python ${jobname}_run.j | cut -d'|' -f1` + echo "$job_comm" >> "$cmdfile" + else + bcsd11_ID=$(submit_job "$bcsd09_ID:$bcsd10_ID" "${jobname}_run.j") + fi # Task 12: Task to introduce an all-zero variable V10M due to the way wind # is handled in the USAF forcing @@ -700,7 +803,21 @@ bcsd_fcst(){ jobname=bcsd12 # NOTE : Task 12 Job scripts are written by forecast_task_09.py to execute: # python $LISHDIR/s2s_modules/bcsd_fcst/forecast_task_12.py -s $YYYY -m $mmm -n $MM -c $BWD/$CFILE -w ${CWD} - bcsd12_ID=$(submit_job "$bcsd09_ID:$bcsd10_ID" "${jobname}_run.j") + if [ $GROUP_JOBS == "Y" ]; then + job_comm=`grep python ${jobname}_run.j | cut -d'|' -f1` + echo "$job_comm" >> "$cmdfile" + else + bcsd12_ID=$(submit_job "$bcsd09_ID:$bcsd10_ID" "${jobname}_run.j") + fi + if [ $GROUP_JOBS == "Y" ]; then + jobname=bcsd11-12 + /bin/rm bcsd11*.j + /bin/rm bcsd12*.j + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + /bin/rm ${cmdfile} + bcsd11_ID=$(submit_job "$bcsd09_ID" "${jobname}_run.j") + bcsd12_ID= + fi cd ${BWD} } @@ -762,7 +879,11 @@ lis_fcst(){ do if [ $nFiles -gt 1 ]; then if [ $FileNo -eq 1 ]; then - thisID=$(submit_job "$bcsd11_ID:$bcsd12_ID" "$jfile") + if [ $GROUP_JOBS == "Y" ]; then + thisID=$(submit_job "$bcsd11_ID" "$jfile") + else + thisID=$(submit_job "$bcsd11_ID:$bcsd12_ID" "$jfile") + fi lisfcst_ID=`echo $lisfcst_ID`' '$thisID prevID=$thisID else @@ -771,7 +892,11 @@ lis_fcst(){ prevID=$thisID fi else - thisID=$(submit_job "$bcsd11_ID:$bcsd12_ID" "$jfile") + if [ $GROUP_JOBS == "Y" ]; then + thisID=$(submit_job "$bcsd11_ID" "$jfile") + else + thisID=$(submit_job "$bcsd11_ID:$bcsd12_ID" "$jfile") + fi lisfcst_ID=`echo $lisfcst_ID`' '$thisID fi ((FileNo++)) @@ -791,6 +916,7 @@ s2spost(){ ####################################################################### jobname=s2spost + cmdfile="s2spost.file" echo " " >> $JOB_SCHEDULE echo "(5) S2S post-process " >> $JOB_SCHEDULE @@ -826,10 +952,21 @@ s2spost(){ s2spost_ID= for jfile in $job_list do - thisID=$(submit_job "$lisfcst_ID" "$jfile") - s2spost_ID=`echo $s2spost_ID`' '$thisID + if [ $GROUP_JOBS == "Y" ]; then + job_comm=`grep python ${jfile} | cut -d'|' -f1` + echo "$job_comm" >> "$cmdfile" + else + thisID=$(submit_job "$lisfcst_ID" "$jfile") + s2spost_ID=`echo $s2spost_ID`' '$thisID + fi done - s2spost_ID=`echo $s2spost_ID | sed "s| |:|g"` + s2spost_ID=`echo $s2spost_ID | sed "s| |:|g"` + if [ $GROUP_JOBS == "Y" ]; then + /bin/rm s2spost_*.j + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + /bin/rm ${cmdfile} + s2spost_ID=$(submit_job "$lisfcst_ID" "${jobname}_run.j") + fi cd ${BWD} } @@ -842,6 +979,7 @@ s2smetrics(){ ####################################################################### jobname=s2smetric + cmdfile="s2smetric.file" echo " " >> $JOB_SCHEDULE echo "(6) S2S metric " >> $JOB_SCHEDULE @@ -866,10 +1004,21 @@ s2smetrics(){ s2smetric_ID= for jfile in $job_list do - thisID=$(submit_job "$s2spost_ID" "$jfile") - s2smetric_ID=`echo $s2smetric_ID`' '$thisID + if [ $GROUP_JOBS == "Y" ]; then + job_comm=`grep python ${jfile} | cut -d'|' -f1` + echo "$job_comm" >> "$cmdfile" + else + thisID=$(submit_job "$s2spost_ID" "$jfile") + s2smetric_ID=`echo $s2smetric_ID`' '$thisID + fi done s2smetric_ID=`echo $s2smetric_ID | sed "s| |:|g"` + if [ $GROUP_JOBS == "Y" ]; then + /bin/rm s2smetric_*.j + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + /bin/rm ${cmdfile} + s2smetric_ID=$(submit_job "$s2spost_ID" "${jobname}_run.j") + fi # write tiff file python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_tiff_run.j -t 1 -H 2 -j ${jobname}_tiff_ -w ${CWD} @@ -889,6 +1038,7 @@ s2splots(){ ####################################################################### jobname=s2splots + cmdfile="s2splots.file" echo " " >> $JOB_SCHEDULE echo "(7) S2S plots " >> $JOB_SCHEDULE @@ -900,26 +1050,17 @@ s2splots(){ CWD=`pwd` /bin/ln -s ${E2ESDIR}/s2splots/ /bin/ln -s ${E2ESDIR}/s2smetric/ - - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 6 -j ${jobname}_ -w ${CWD} - COMMAND="python ${LISHDIR}/s2s_modules/s2splots/plot_s2smetrics.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFILE" - sed -i "s|COMMAND|${COMMAND}|g" s2splots_run.j - - PLINE=`grep -n plot_s2smetrics.py s2splots_run.j | cut -d':' -f1` - ((PLINE++)) - SEC_COMMAND="python ${LISHDIR}/s2s_modules/s2splots/plot_hybas.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE" - sed -i "${PLINE}i ${SEC_COMMAND}" s2splots_run.j - ((PLINE++)) - THIRD_COMMAND="python ${LISHDIR}/s2s_modules/s2splots/plot_mena.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFILE" - sed -i "${PLINE}i ${THIRD_COMMAND}" s2splots_run.j - ((PLINE++)) - FOURTH_COMMAND="python ${LISHDIR}/s2s_modules/s2splots/plot_anom_verify.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE -l 1" - sed -i "${PLINE}i ${FOURTH_COMMAND}" s2splots_run.j - ((PLINE++)) - FIFTH_COMMAND="python ${LISHDIR}/s2s_modules/s2splots/plot_anom_verify.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE -l 2" - sed -i "${PLINE}i ${FIFTH_COMMAND}" s2splots_run.j + echo "python ${LISHDIR}/s2s_modules/s2splots/plot_s2smetrics.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFIL" >> "$cmdfile" + echo "python ${LISHDIR}/s2s_modules/s2splots/plot_hybas.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE" >> "$cmdfile" + echo "python ${LISHDIR}/s2s_modules/s2splots/plot_mena.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFILE" >> "$cmdfile" + echo "python ${LISHDIR}/s2s_modules/s2splots/plot_anom_verify.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE -l 1" >> "$cmdfile" + echo "ython ${LISHDIR}/s2s_modules/s2splots/plot_anom_verify.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE -l 2" >> "$cmdfile" + + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 6 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + /bin/rm ${cmdfile} s2splots_ID=$(submit_job "$s2smetric_tiff_ID" "${jobname}_run.j") + } ####################################################################### diff --git a/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py b/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py index 12ed6db9d..766d5a89e 100644 --- a/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py @@ -53,7 +53,7 @@ ADD_RIVERS = True RESOL = '50m' # use data at this scale FIGWIDTH = 25 -cbar_axes = [0.15, 0.04, 0.7, 0.02] +cbar_axes = [0.1, 0.02, 0.8, 0.02] mpl.use('pdf') mpl.style.use('bmh') diff --git a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py index c8741965c..bddb6ca2f 100644 --- a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py @@ -63,7 +63,10 @@ def job_script(s2s_configfile, jobfile, job_name, ntasks, hours, cwd, in_command _f.write('#######################################################################' + '\n') _f.write('\n') _f.write('#SBATCH --account=' + sponsor_code + '\n') - _f.write('#SBATCH --ntasks=' + ntasks + '\n') + if command_list is None: + _f.write('#SBATCH --ntasks=' + ntasks + '\n') + else: + _f.write('#SBATCH --nodes=1' + '\n') _f.write('#SBATCH --time=' + hours + ':00:00' + '\n') if 'discover' in platform.node() or 'borg' in platform.node(): _f.write('#SBATCH --constraint=' + cfg['SETUP']['CONSTRAINT'] + '\n') @@ -99,7 +102,9 @@ def job_script(s2s_configfile, jobfile, job_name, ntasks, hours, cwd, in_command _f.write( sec_command + '\n') else: for this_command in command_list: - _f.write( this_command + '\n') + _f.write("nohup " + this_command + ' &' + '\n') + _f.write("wait " + '\n') + #print(len(command_list)) _f.write('\n') _f.write('echo "[INFO] Completed ' + job_name + '!"' + '\n') _f.write('\n') From b87e8e77dfa641626acf42fc3de82101a350e309 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Wed, 17 Jul 2024 09:48:35 -0400 Subject: [PATCH 02/40] further modified to improve memory management --- lis/utils/usaf/s2s/s2s_app/s2s_api.py | 8 +++--- lis/utils/usaf/s2s/s2s_app/s2s_run.sh | 27 ++++++++++++------- .../usaf/s2s/s2s_modules/shared/utils.py | 26 +++++++++--------- 3 files changed, 36 insertions(+), 25 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_api.py b/lis/utils/usaf/s2s/s2s_app/s2s_api.py index caccae135..605cfaf6b 100644 --- a/lis/utils/usaf/s2s/s2s_app/s2s_api.py +++ b/lis/utils/usaf/s2s/s2s_app/s2s_api.py @@ -25,7 +25,7 @@ PARSER.add_argument('-f', '--JOBFILE', required=False, help='job file name') PARSER.add_argument('-t', '--NTASKS', required=False, help='NTASKS') PARSER.add_argument('-c', '--CONFIGFILE', required=False, help='config file name') -PARSER.add_argument('-C', '--command_list', required=False, help='list of commands for group jobs') +PARSER.add_argument('-C', '--group_jobs', required=False, help='list of commands for group jobs') PARSER.add_argument('-H', '--HOURS', required=False, help='time HOURS') PARSER.add_argument('-j', '--JOBNAME', required=False, help='job-name') PARSER.add_argument('-w', '--CWD', required=False, help='current working directory') @@ -86,10 +86,10 @@ HOURS = ARGS.HOURS JOBNAME = ARGS.JOBNAME CWD = ARGS.CWD - if ARGS.command_list is None: + if ARGS.group_jobs is None: utils.job_script(CONFIGFILE, JOBFILE, JOBNAME, NTASKS, str(HOURS), CWD) else: - with open(ARGS.command_list, 'r') as file: + with open(ARGS.group_jobs, 'r') as file: commands = [line.strip() for line in file if line.strip()] - utils.job_script(CONFIGFILE, JOBFILE, JOBNAME, NTASKS, str(HOURS), CWD, command_list=commands) + utils.job_script(CONFIGFILE, JOBFILE, JOBNAME, NTASKS, str(HOURS), CWD, group_jobs=commands) diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh index 4812205bd..b7733ef54 100755 --- a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh +++ b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh @@ -569,13 +569,16 @@ bcsd_fcst(){ if [ $GROUP_JOBS == "Y" ]; then bcsd01_ID= /bin/rm bcsd01_*.j - split -l 6 $cmdfile part_ + split -l 4 $cmdfile part_ python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_01_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C "part_aa" python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_02_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C "part_ab" - /bin/rm ${cmdfile} "part_aa" "part_ab" + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_03_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C "part_ac" + /bin/rm ${cmdfile} "part_aa" "part_ab" "part_ac" bcsd01_ID=$(submit_job "" "${jobname}_01_run.j") thisID=$(submit_job "" "${jobname}_02_run.j") bcsd01_ID=`echo $bcsd01_ID`' '$thisID + thisID=$(submit_job "" "${jobname}_03_run.j") + bcsd01_ID=`echo $bcsd01_ID`' '$thisID bcsd01_ID=`echo $bcsd01_ID | sed "s| |:|g"` fi # Task 3: Rescale and reorganize NMME Data (forecast_task_03.py) @@ -599,6 +602,7 @@ bcsd_fcst(){ done bcsd03_ID=`echo $bcsd03_ID | sed "s| |:|g"` if [ $GROUP_JOBS == "Y" ]; then + bcsd03_ID= /bin/rm bcsd03*.j python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} /bin/rm ${cmdfile} @@ -609,8 +613,7 @@ bcsd_fcst(){ # Task 4: Monthly "BC" step applied to CFSv2 (forecast_task_04.py, after 1 and 3) # ------------------------------------------------------------------------------- jobname=bcsd04 - cmdfile="bcsd04-05.file" - /bin/rm bcsd04-05* + cmdfile="bcsd04.file" python $LISHDIR/s2s_modules/bcsd_fcst/forecast_task_04.py -s $YYYY -e $YYYY -m $mmm -n $MM -c $BWD/$CFILE -w ${CWD} -t 1 -H 3 -j $jobname unset job_list @@ -631,10 +634,18 @@ bcsd_fcst(){ fi done bcsd04_ID=`echo $bcsd04_ID | sed "s| |:|g"` + if [ $GROUP_JOBS == "Y" ]; then + bcsd04_ID= + /bin/rm bcsd04*.j + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 4 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + /bin/rm ${cmdfile} + bcsd04_ID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jobname}_run.j") + fi # Task 5: Monthly "BC" step applied to NMME (forecast_task_05.py: after 1 and 3) # ------------------------------------------------------------------------------ jobname=bcsd05 + cmdfile="bcsd05.file" for model in $MODELS do python $LISHDIR/s2s_modules/bcsd_fcst/forecast_task_05.py -s $YYYY -e $YYYY -m $mmm -n $MM -c $BWD/$CFILE -w ${CWD} -t 1 -H 3 -M $model -j $jobname @@ -659,13 +670,11 @@ bcsd_fcst(){ done bcsd05_ID=`echo $bcsd05_ID | sed "s| |:|g"` if [ $GROUP_JOBS == "Y" ]; then - jobname=bcsd04-05 - /bin/rm bcsd04*.j + bcsd05_ID= /bin/rm bcsd05*.j python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} /bin/rm ${cmdfile} - bcsd04_ID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jobname}_run.j") - bcsd05_ID= + bcsd05_ID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jobname}_run.j") fi # Task 6: CFSv2 Temporal Disaggregation (forecast_task_06.py: after 4 and 5) @@ -693,7 +702,7 @@ bcsd_fcst(){ /bin/rm bcsd06*.j python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} /bin/rm ${cmdfile} - bcsd06_ID=$(submit_job "$bcsd04_ID" "${jobname}_run.j") + bcsd06_ID=$(submit_job "$bcsd04_ID:$bcsd05_ID" "${jobname}_run.j") fi # Task 7: Generate symbolic links to sub-daily CFSv2 BC forecasts for NMME diff --git a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py index bddb6ca2f..c7c095342 100644 --- a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py @@ -35,7 +35,7 @@ #pylint: disable=consider-using-f-string, too-many-statements, too-many-locals, too-many-arguments def job_script(s2s_configfile, jobfile, job_name, ntasks, hours, cwd, in_command = None, - command2 = None, command_list = None): + command2 = None, command_list = None, group_jobs=None): ''' writes SLURM job script ''' if in_command is None: this_command = 'COMMAND' @@ -63,10 +63,7 @@ def job_script(s2s_configfile, jobfile, job_name, ntasks, hours, cwd, in_command _f.write('#######################################################################' + '\n') _f.write('\n') _f.write('#SBATCH --account=' + sponsor_code + '\n') - if command_list is None: - _f.write('#SBATCH --ntasks=' + ntasks + '\n') - else: - _f.write('#SBATCH --nodes=1' + '\n') + _f.write('#SBATCH --ntasks=' + ntasks + '\n') _f.write('#SBATCH --time=' + hours + ':00:00' + '\n') if 'discover' in platform.node() or 'borg' in platform.node(): _f.write('#SBATCH --constraint=' + cfg['SETUP']['CONSTRAINT'] + '\n') @@ -97,14 +94,19 @@ def job_script(s2s_configfile, jobfile, job_name, ntasks, hours, cwd, in_command _f.write('\n') _f.write('cd ' + cwd + '\n') - if command_list is None: - _f.write( this_command + ' || exit 1' + '\n') - _f.write( sec_command + '\n') + + if command_list is None and group_jobs is None: + _f.write(f"{this_command} || exit 1\n") + _f.write(f"{sec_command}\n") else: - for this_command in command_list: - _f.write("nohup " + this_command + ' &' + '\n') - _f.write("wait " + '\n') - #print(len(command_list)) + if group_jobs: + for cmd in group_jobs: + _f.write(f"nohup {cmd} &\n") + _f.write("wait\n") + if command_list: + for cmd in command_list: + _f.write(f"{cmd}\n") + _f.write('\n') _f.write('echo "[INFO] Completed ' + job_name + '!"' + '\n') _f.write('\n') From b17542f7907b78742bea16c9025d8c627fa2777d Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Wed, 17 Jul 2024 12:46:19 -0400 Subject: [PATCH 03/40] increased wall times --- lis/utils/usaf/s2s/s2s_app/s2s_run.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh index b7733ef54..5efb3d8e9 100755 --- a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh +++ b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh @@ -672,7 +672,7 @@ bcsd_fcst(){ if [ $GROUP_JOBS == "Y" ]; then bcsd05_ID= /bin/rm bcsd05*.j - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 4 -j ${jobname}_ -w ${CWD} -C ${cmdfile} /bin/rm ${cmdfile} bcsd05_ID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jobname}_run.j") fi @@ -972,7 +972,7 @@ s2spost(){ s2spost_ID=`echo $s2spost_ID | sed "s| |:|g"` if [ $GROUP_JOBS == "Y" ]; then /bin/rm s2spost_*.j - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 4 -j ${jobname}_ -w ${CWD} -C ${cmdfile} /bin/rm ${cmdfile} s2spost_ID=$(submit_job "$lisfcst_ID" "${jobname}_run.j") fi From fd7a24598a1c8bee341d284560a58b1f2014d72a Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Wed, 17 Jul 2024 14:01:51 -0400 Subject: [PATCH 04/40] increased walltime for few other jobs for milan --- lis/utils/usaf/s2s/s2s_app/s2s_run.sh | 6 +++--- lis/utils/usaf/s2s/s2s_modules/shared/utils.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh index 5efb3d8e9..961c9c43e 100755 --- a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh +++ b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh @@ -700,7 +700,7 @@ bcsd_fcst(){ bcsd06_ID=`echo $bcsd06_ID | sed "s| |:|g"` if [ $GROUP_JOBS == "Y" ]; then /bin/rm bcsd06*.j - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 4 -j ${jobname}_ -w ${CWD} -C ${cmdfile} /bin/rm ${cmdfile} bcsd06_ID=$(submit_job "$bcsd04_ID:$bcsd05_ID" "${jobname}_run.j") fi @@ -739,7 +739,7 @@ bcsd_fcst(){ bcsd08_ID=`echo $bcsd08_ID | sed "s| |:|g"` if [ $GROUP_JOBS == "Y" ]; then /bin/rm bcsd08*.j - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 4 -j ${jobname}_ -w ${CWD} -C ${cmdfile} /bin/rm ${cmdfile} bcsd08_ID=$(submit_job "$bcsd06_ID" "${jobname}_run.j") fi @@ -822,7 +822,7 @@ bcsd_fcst(){ jobname=bcsd11-12 /bin/rm bcsd11*.j /bin/rm bcsd12*.j - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 4 -j ${jobname}_ -w ${CWD} -C ${cmdfile} /bin/rm ${cmdfile} bcsd11_ID=$(submit_job "$bcsd09_ID" "${jobname}_run.j") bcsd12_ID= diff --git a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py index c7c095342..4a2f23a08 100644 --- a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py @@ -102,7 +102,7 @@ def job_script(s2s_configfile, jobfile, job_name, ntasks, hours, cwd, in_command if group_jobs: for cmd in group_jobs: _f.write(f"nohup {cmd} &\n") - _f.write("wait\n") + _f.write("wait\n") if command_list: for cmd in command_list: _f.write(f"{cmd}\n") From aee953579089226c6f68048236ef65f20a987005 Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Wed, 17 Jul 2024 17:12:16 -0400 Subject: [PATCH 05/40] Start of ESPC-D support. Not usable yet. --- ldt/USAFSI/USAFSI_espcdMod.F90 | 721 +++++++++++++++++++++++++++++++++ ldt/USAFSI/USAFSI_run.F90 | 1 + 2 files changed, 722 insertions(+) create mode 100644 ldt/USAFSI/USAFSI_espcdMod.F90 diff --git a/ldt/USAFSI/USAFSI_espcdMod.F90 b/ldt/USAFSI/USAFSI_espcdMod.F90 new file mode 100644 index 000000000..6ca4c4cae --- /dev/null +++ b/ldt/USAFSI/USAFSI_espcdMod.F90 @@ -0,0 +1,721 @@ +!-----------------------BEGIN NOTICE -- DO NOT EDIT----------------------- +! NASA Goddard Space Flight Center +! Land Information System Framework (LISF) +! Version 7.4 +! +! Copyright (c) 2022 United States Government as represented by the +! Administrator of the National Aeronautics and Space Administration. +! All Rights Reserved. +!-------------------------END NOTICE -- DO NOT EDIT----------------------- +! +! MODULE: USAFSI_espcdMod +! +! REVISION HISTORY: +! 17 Jul 2024 Eric Kemp First version. (Based on USAF_gofsMod.F90) +! +! DESCRIPTION: +! Source code for reading US Navy ESPC-D data. +!------------------------------------------------------------------------- + +#include "LDT_misc.h" +#include "LDT_NetCDF_inc.h" + +module USAFSI_espcdMod + + ! Defaults + implicit none + private + + ! Public routines + public :: process_espcd_sst + public :: process_espcd_cice + +contains + + ! Find ESPCD CICE file on file system + subroutine find_espcd_cice_file(rootdir, region, & + yyyy, mm, dd, hh, fh, filename) + + ! Imports + use LDT_logMod, only: LDT_logunit + use LDT_timeMgrMod, only: LDT_get_julhr, LDT_julhr_date + + ! Defaults + implicit none + + ! Arguments + character(len=*), intent(in) :: rootdir + character*3, intent(in) :: region + integer, intent(inout) :: yyyy + integer, intent(inout) :: mm + integer, intent(inout) :: dd + integer, intent(inout) :: hh + integer, intent(inout) :: fh + character*255, intent(out) :: filename + + ! Locals + integer :: julhr, julhr_orig + logical :: file_exists + + ! Build the file name. Note that all ESPC-D CICE runs start at 12Z. + call LDT_get_julhr(yyyy, mm, dd, 12, 0, 0, julhr) + if (hh >= 12) then + julhr_orig = julhr + fh = 0 + else + julhr_orig = julhr - 24 ! Must use previous day's run + fh = 12 + end if + call LDT_julhr_date(julhr_orig, yyyy, mm, dd, hh) + call construct_espcd_cice_filename(rootdir, region, & + yyyy, mm, dd, hh, fh, filename) + + write(LDT_logunit,*) & + '------------------------------------------------------------------' + write(LDT_logunit,*)'[INFO] *** SEARCHING FOR ESPC-D CICE FOR ',& + trim(region),' REGION ***' + inquire(file=trim(filename), exist=file_exists) + if (file_exists) then + write(LDT_logunit,*)'[INFO] Will use ', trim(filename) + return + end if + + ! At this point, we are rolling back to earlier CICE file + ! Start looping for earlier files + julhr = julhr_orig + do + write(LDT_logunit,*)'[WARN] Cannot find ', trim(filename) + fh = fh + 24 + julhr = julhr - 24 + if ( (julhr_orig - julhr) > 24*5) then + write(LDT_logunit,*)& + '[WARN] *** GIVING UP ON ESPC-D CICE FOR ', & + trim(region),' ***' + write(LDT_logunit,*) & + '[WARN] *** NO ESPC-D CICE DATA FOR ',trim(region), & + ' AVAILABLE!!! ***' + filename = 'NONE' + return + end if + call LDT_julhr_date(julhr, yyyy, mm, dd, hh) + + call construct_espcd_cice_filename(rootdir, region, & + yyyy, mm, dd, hh, fh, filename) + inquire(file=trim(filename), exist=file_exists) + if (file_exists) then + write(LDT_logunit,*)'[INFO] Will use ', trim(filename) + return + end if + end do + + end subroutine find_espcd_cice_file + + ! Builds path to ESPC-D CICE netcdf file + subroutine construct_espcd_cice_filename(rootdir, region, & + yyyy, mm, dd, hh, fh, filename) + + ! Defaults + implicit none + + ! Arguments + character(len=*), intent(in) :: rootdir + character*3, intent(in) :: region + integer, intent(in) :: yyyy + integer, intent(in) :: mm + integer, intent(in) :: dd + integer, intent(in) :: hh + integer, intent(in) :: fh + character*255, intent(out) :: filename + + ! Local variables + character*10 :: yyyymmddhh + character*5 :: thhhh + + write(yyyymmddhh,'(i4.4,i2.2,i2.2,i2.2)') yyyy, mm, dd, hh + write(thhhh,'(a1,i4.4)') 't', fh + + filename = trim(rootdir) // '/espc-d_031_cice_' // trim(region) & + // 'u0.04_' // yyyymmddhh // '_' // thhhh // '.nc' + + end subroutine construct_espcd_cice_filename + + ! Find ESPC-D SST file on file system + subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, fh, & + filename) + + ! Imports + use LDT_logMod, only: LDT_logunit + use LDT_timeMgrMod, only: LDT_get_julhr, LDT_julhr_date + + ! Defaults + implicit none + + ! Arguments + character(len=*), intent(in) :: rootdir + integer, intent(inout) :: yyyy + integer, intent(inout) :: mm + integer, intent(inout) :: dd + integer, intent(inout) :: hh + integer, intent(inout) :: fh + character*255, intent(inout) :: filename + + ! Locals + integer :: julhr, julhr_orig + logical :: file_exists + + ! Build the file name. Note that all ESPC-D SST runs start at 00Z. + if (hh < 6) then + fh = 0 + else if (hh < 12) then + fh = 6 + else if (hh < 18) then + fh = 12 + else + fh = 18 + end if + hh = 0 + call construct_espcd_sst_filename(rootdir, & + yyyy, mm, dd, hh, fh, filename) + + ! Check if file exists + write(LDT_logunit,*) & + '------------------------------------------------------------------' + write(LDT_logunit,*) & + '[INFO] *** SEARCHING FOR ESPC-D SST ***' + inquire(file=trim(filename), exist=file_exists) + if (file_exists) then + write(LDT_logunit,*)'[INFO] Will use ',trim(filename) + return + end if + + ! At this point, we are rolling back to earlier SST file + call LDT_get_julhr(yyyy, mm, dd, hh, 0, 0, julhr) + julhr_orig = julhr + + ! Start looping for earlier files + do + write(LDT_logunit,*)'[WARN] Cannot find ',trim(filename) + fh = fh - 6 + if (fh < 0) then + fh = 24 + julhr = julhr - 24 ! Roll back to previous 00Z cycle + ! Give up after 5 days + if ( (julhr_orig - julhr) > 24*5) then + write(LDT_logunit,*)"[WARN] *** GIVING UP ON ESPC-D SST! ***" + write(LDT_logunit,*) & + "[WARN] *** NO ESPC-D SST AVAILABLE!!! ***" + filename = "NONE" + return + end if + call LDT_julhr_date(julhr, yyyy, mm, dd, hh) + end if + + call construct_espcd_sst_filename(rootdir, & + yyyy, mm, dd, hh, fh, filename) + inquire(file=trim(filename), exist=file_exists) + if (file_exists) then + write(LDT_logunit,*)'[INFO] Will use ',trim(filename) + return + end if + end do + + end subroutine find_espcd_sst_file + + ! Builds path to ESPC-D SST netcdf file + subroutine construct_espcd_sst_filename(rootdir, & + yyyy, mm, dd, hh, fh, filename) + + ! Defaults + implicit none + + ! Arguments + character(len=*), intent(in) :: rootdir + integer, intent(in) :: yyyy + integer, intent(in) :: mm + integer, intent(in) :: dd + integer, intent(in) :: hh + integer, intent(in) :: fh + character*255, intent(out) :: filename + + ! Locals + character*10 :: yyyymmddhh + character*5 :: thhhh + + write(yyyymmddhh,'(i4.4,i2.2,i2.2,i2.2)') yyyy, mm, dd, hh + write(thhhh,'(a1,i4.4)') 't', fh + + filename = trim(rootdir) // "/espc-d_hycom_sfc_u_" // yyyymmddhh // & + "_" // thhhh // ".nc" + + end subroutine construct_espcd_sst_filename + + +#if (defined USE_NETCDF3 || defined USE_NETCDF4) + ! Read ESPC-D sea surface temperature and reproject to LDT grid + subroutine process_espcd_sst(rootdir, nc, nr, landmask, sst, & + yyyy, mm, dd, hh, fh, ierr) + + ! Imports + use LDT_coreMod, only: LDT_rc, LDT_domain + use LDT_logMod, only: LDT_verify, ldt_logunit + use netcdf + + ! Defaults + implicit none + + ! Arguments + character(len=*), intent(in) :: rootdir + integer, intent(in) :: nc + integer, intent(in) :: nr + real, intent(in) :: landmask(nc,nr) + real, intent(inout) :: sst(nc,nr) + integer, intent(inout) :: yyyy + integer, intent(inout) :: mm + integer, intent(inout) :: dd + integer, intent(inout) :: hh + integer, intent(inout) :: fh + integer, intent(out) :: ierr + + ! Locals + integer, parameter :: nlat = 8001 + integer, parameter :: nlon = 9000 + character*255 :: filename + integer :: ncid, water_temp_varid + real, allocatable :: water_temp(:,:,:,:) + real, allocatable :: water_temp_1d(:) + real, allocatable :: sst_1d(:) + integer :: c, r, c1, r1 + logical*1, allocatable :: lb(:) + logical*1, allocatable :: lo(:) + real :: griddesci(50) + real, allocatable :: n11(:) + integer :: gindex + real :: rlat + + ! External subroutines + external :: upscaleByAveraging_input + external :: upscaleByAveraging + + ! Find a valid file on the file system + call find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, fh, filename) + if (trim(filename) == "NONE") then + ierr = 1 + return + end if + + ! Open the file + call LDT_verify(nf90_open(path=trim(filename), & + mode=nf90_nowrite, & + ncid=ncid), & + "[ERR] Error in nf90_open for " // trim(filename)) + + write(ldt_logunit,*)'[INFO] Reading ', trim(filename) + + ! Get the varid for water_temp + call LDT_verify(nf90_inq_varid(ncid, "water_temp", & + water_temp_varid), & + "[ERR] Error in nf90_inq_varid for water_temp") + + ! Allocate the water_temp array + allocate(water_temp(nlon, nlat, 1, 1)) + + ! Pull from the ESPC-D file + call LDT_verify(nf90_get_var(ncid, water_temp_varid, water_temp), & + "[ERR] Error in nf90_get_var for water_temp") + + ! Close the file + call LDT_verify(nf90_close(ncid), & + "[ERR] Error in nf90_close for "// trim(filename)) + + ! We need to interpolate to the LDT grid. First, copy to 1D array + allocate(water_temp_1d(nlon*nlat*1*1)) + water_temp_1d = -9999.0 + allocate(lb(nlon*nlat*1*1)) + lb = .false. + do r = 1, nlat + do c = 1, nlon + if (water_temp(c,r,1,1) .eq. -30000) cycle ! Missing value + ! Convert from Celsius to Kelvin, taking into account the scale + ! factor and offset. Also, wrap the data so it starts at 180W + if (c .gt. 4500) then + c1 = c - 4500 + r1 = r + else + c1 = c + 4500 + r1 = r + end if + water_temp_1d(c1 + (r1-1)*nlon) = & + (0.001*water_temp(c,r,1,1)) + 20.0 + 273.15 + lb(c1 + (r1-1)*nlon) = .true. + end do ! c + end do ! r + deallocate(water_temp) + + ! Set up interpolation weights + gridDesci = 0 + gridDesci(1) = 0 ! Lat/lon projection + gridDesci(2) = nlon ! Number of columns + gridDesci(3) = nlat ! Number of rows + gridDesci(4) = -80.0 ! Lower-left latitude (deg) + gridDesci(5) = -180.0 ! Lower-left longitude (deg) + gridDesci(6) = 128 ! Not used + gridDesci(7) = 80.0 ! Upper-right latitude (deg) + gridDesci(8) = 180.0 ! Upper-right longitude(deg) + gridDesci(9) = 0.0400390625 ! Delta longitude (deg) + gridDesci(10) = 0.01999664306640625 ! Delta latitude (deg) + gridDesci(20) = 64 ! E-W ordering + allocate(n11(nlon*nlat)) + call upscaleByAveraging_input(gridDesci, LDT_rc%gridDesc, & + nlon*nlat, nc*nr, n11) + + ! Now interpolate + allocate(sst_1d(nc*nr)) + sst_1d = -9999. + allocate(lo(nc*nr)) + lo = .false. + call upscaleByAveraging(nlon*nlat, nc*nr, -9999., & + n11, lb, water_temp_1d, lo, sst_1d) + + ! Since SST is missing north of 80N, we need to set water points in + ! this region to a reasonable value. We follow the typical + ! UKMET SURF value of 271.35K. + sst = -1 + do r = 1, nr + do c = 1, nc + ! Skip land points + if (landmask(c,r) >= 0.5) cycle + + gindex = c + (r-1)*nc + rlat = LDT_domain(1)%lat(gindex) + if (rlat >= 80.) then + sst(c,r) = 271.35 + else + if (sst_1d(gindex) > 0) then + sst(c,r) = sst_1d(gindex) + end if + end if + end do ! c + end do ! r + + ! Clean up + deallocate(water_temp_1d) + deallocate(lb) + deallocate(lo) + deallocate(sst_1d) + deallocate(n11) + + ! The end + ierr = 0 + end subroutine process_espcd_sst + +#else + + ! Dummy version with no netCDF support + subroutine process_espcd_sst(rootdir, nc, nr, landmask, sst, & + yyyy, mm, dd, hh, fh, vierr) + + ! Imports + use LDT_logMod, only: LDT_logunit, LDT_endrun + + ! Defaults + implicit none + + ! Arguments + character(len=*), intent(in) :: rootdir + integer, intent(in) :: nc + integer, intent(in) :: nr + real, intent(in) :: landmask(nc,nr) + real, intent(inout) :: sst(nc,nr) + integer, intent(inout) :: yyyy + integer, intent(inout) :: mm + integer, intent(inout) :: dd + integer, intent(inout) :: hh + integer, intent(inout) :: fh + integer, intent(out) :: ierr + + write(LDT_logunit,*) & + '[ERR] LDT was compiled without netCDF support!' + write(LDT_logunit,*) "[ERR] Recompile and try again!" + ierr = 1 + call LDT_endrun() + end subroutine process_espcd_sst + +#endif + + ! Read ESPC-D sea ice and reproject to LDT grid + subroutine process_espcd_cice(rootdir, nc, nr, landmask, icecon, & + yyyy, mm, dd, hh, fh, ierr) + + ! Imports + use LDT_coreMod, only: LDT_domain + + ! Defaults + implicit none + + ! Arguments + character(len=*), intent(in) :: rootdir + integer, intent(in) :: nc + integer, intent(in) :: nr + real, intent(in) :: landmask(nc,nr) + real, intent(inout) :: icecon(nc,nr) + integer, intent(inout) :: yyyy + integer, intent(inout) :: mm + integer, intent(inout) :: dd + integer, intent(inout) :: hh + integer, intent(inout) :: fh + integer, intent(out) :: ierr + + ! Locals + real, allocatable :: icecon_arc(:,:) + real, allocatable :: icecon_ant(:,:) + integer :: c, r + integer :: gindex + real :: rlat + + ! First handle Arctic region + call process_espcd_cice_region('ARC', rootdir, nc, nr, landmask, & + yyyy, mm, dd, hh, fh, icecon_arc, ierr) + if (ierr .ne. 0) then + if (allocated(icecon_arc)) deallocate(icecon_arc) + return + end if + + ! Next handle Antarctic region + call process_espcd_cice_region('ANT', rootdir, nc, nr, landmask, & + yyyy, mm, dd, hh, fh, icecon_ant, ierr) + if (ierr .ne. 0) then + if (allocated(icecon_arc)) deallocate(icecon_arc) + if (allocated(icecon_ant)) deallocate(icecon_ant) + return + end if + + ! Merge the two regions together + icecon = -1 + do r = 1, nr + do c = 1, nc + ! Skip land points + if (landmask(c,r) > 0.5) cycle + + gindex = c + (r-1)*nc + rlat = LDT_domain(1)%lat(gindex) + if (rlat >= 0) then + icecon(c,r) = icecon_arc(c,r) + else + icecon(c,r) = icecon_ant(c,r) + end if + + end do ! c + end do ! r + + ! Clean up + deallocate(icecon_arc) + deallocate(icecon_ant) + ierr = 0 + end subroutine process_espcd_cice + +#if (defined USE_NETCDF3 || defined USE_NETCDF4) + + ! Process a particular region of ESPC-D CICE data (Arctic or Antarctic + subroutine process_espcd_cice_region(region, rootdir, nc, nr, & + landmask, yyyy, mm, dd, hh, fh, icecon, ierr) + + ! Imports + use LDT_coreMod, only: LDT_rc + use LDT_logMod, only: LDT_logunit, LDT_endrun, LDT_verify + use netcdf + + ! Arguments + character*3, intent(in) :: region + character(len=*), intent(in) :: rootdir + integer, intent(in) :: nc + integer, intent(in) :: nr + real, intent(in) :: landmask(nc,nr) + integer, intent(inout) :: yyyy + integer, intent(inout) :: mm + integer, intent(inout) :: dd + integer, intent(inout) :: hh + integer, intent(inout) :: fh + real, allocatable, intent(out) :: icecon(:,:) + integer, intent(out) :: ierr + + ! Locals + integer, parameter :: nlat_arc = 2501 + integer, parameter :: nlat_ant = 1549 + integer, parameter :: nlon = 9000 + character*255 :: filename + integer :: ncid, aice_varid + real, allocatable :: aice(:,:,:) + real, allocatable :: aice_1d(:) + real, allocatable :: icecon_1d(:) + integer :: c, r + logical*1, allocatable :: lb(:) + logical*1, allocatable :: lo(:) + real :: griddesci(50) + real, allocatable :: n11(:) + integer :: gindex, nlat + + ! External subroutines + external :: upscaleByAveraging_input + external :: upscaleByAveraging + + ! Sanity check the region + if (region .eq. 'ARC') then + nlat = nlat_arc + else if (region .eq. 'ANT') then + nlat = nlat_ant + else + write(LDT_logunit,*)'[ERR] Invalid ESPC-D region for cice: ' & + // region + write(LDT_logunit,*)'[ERR] Must be either ARC or ANT' + ierr = 1 + call LDT_endrun() + end if + + ! Find a valid file on the file system + call find_espcd_cice_file(rootdir, region, yyyy, mm, dd, hh, fh, & + filename) + if (trim(filename) == "NONE") then + ierr = 1 + return + end if + + ! Open the file + call LDT_verify(nf90_open(path=trim(filename), & + mode=nf90_nowrite, & + ncid=ncid), & + "[ERR] Error in nf90_open for " // trim(filename)) + write(ldt_logunit,*)'[INFO] Reading ', trim(filename) + + ! Get the varid for aice + call LDT_verify(nf90_inq_varid(ncid, "aice", aice_varid), & + "[ERR] Error in nf90_inq_varid for aice") + + ! Allocate the aice array + allocate(aice(nlon, nlat, 1)) + + ! Pull from the ESPC-D file + call LDT_verify(nf90_get_var(ncid, aice_varid, aice), & + "[ERR] Error in nf90_get_var for aice") + + ! Close the file + call LDT_verify(nf90_close(ncid), & + "[ERR] Error in nf90_close for "// trim(filename)) + + ! We need to interpolate to the LDT grid. First, copy to 1D array + allocate(aice_1d(nlon*nlat*1)) + aice_1d = -9999 + allocate(lb(nlon*nlat*1)) + lb = .false. + do r = 1, nlat + do c = 1, nlon + if (aice(c,r,1) .eq. -30000) cycle + + ! Take into account the scale factor, and convert to % + aice_1d(c + (r-1)*nlon) = & + aice(c,r,1)*0.0001*100 + lb(c + (r-1)*nlon) = .true. + end do ! c + end do ! r + deallocate(aice) + + ! Set up interpolation weights + if (region .eq. 'ARC') then + gridDesci = 0 + gridDesci(1) = 0 ! Lat/lon projection + gridDesci(2) = nlon ! Number of columns + gridDesci(3) = nlat ! Number of rows + gridDesci(4) = 40. ! Lower-left latitude (deg N) + gridDesci(5) = -180.0 ! Lower-left longitude (deg E) + gridDesci(6) = 128 ! Not used + gridDesci(7) = 90.0 ! Upper-right latitude (deg N) + gridDesci(8) = 179.9599609375 ! Upper-right longitude (deg E) + gridDesci(9) = 0.039978027344005795 ! delta-lon (deg) + gridDesci(10) = 0.0200004577637 ! delta-lat (deg) + gridDesci(20) = 64 ! East-west ordering + else if (region .eq. 'ANT') then + gridDesci = 0 + gridDesci(1) = 0 ! Lat/lon projection + gridDesci(2) = nlon ! Number of columns + gridDesci(3) = nlat ! Number of rows + gridDesci(4) = -80.4800033569336 ! Lower-left latitude (deg N) + gridDesci(5) = -180.0 ! Lower-left longitude (deg E) + gridDesci(6) = 128 ! Not used + gridDesci(7) = -49.5200004577637 ! Upper-right latitude (deg N) + gridDesci(8) = 179.9599609375 ! Upper-right longitude (deg E) + gridDesci(9) = 0.039978027344005795 ! Delta-lon (deg) + gridDesci(10) = 0.020004272460894867 ! Delta-lat (deg) + gridDesci(20) = 64 ! East-west ordering + end if + allocate(n11(nlon*nlat)) + + call upscaleByAveraging_input(gridDesci, LDT_rc%gridDesc, & + nlon*nlat, nc*nr, n11) + + ! Now interpolate + allocate(icecon_1d(nc*nr)) + icecon_1d = -9999 + allocate(lo(nc*nr)) + lo = .false. + call upscaleByAveraging(nlon*nlat, nc*nr, -9999., & + n11, lb, aice_1d, lo, icecon_1d) + + ! Just copy the non-missing values to the output array. This should + ! prevent overwriting of data outside of the ESPC-D polar region. + allocate(icecon(nc,nr)) + icecon = 0.0 + do r = 1, nr + do c = 1, nc + ! Skip land points + if (landmask(c,r) >= 0.5) cycle + + gindex = c + (r-1)*nc + if (icecon_1d(gindex) .ne. -9999) then + icecon(c,r) = icecon_1d(gindex) + end if + end do ! c + end do ! r + + ! Clean up + deallocate(aice_1d) + deallocate(lb) + deallocate(lo) + deallocate(icecon_1d) + deallocate(n11) + + ! The end + ierr = 0 + end subroutine process_espcd_cice_region + +#else + ! Dummy version + subroutine process_espcd_cice_region(region, rootdir, nc, nr, & + landmask, icecon, yyyy, mm, dd, hh, fh, ierr) + + ! Imports + use LDT_logMod, only: LDT_logunit, LDT_endrun + + ! Arguments + character*3, intent(in) :: region + character(len=*), intent(in) :: rootdir + integer, intent(in) :: nc + integer, intent(in) :: nr + real, intent(in) :: landmask(nc,nr) + real, intent(inout) :: icecon(nc,nr) + integer, intent(inout) :: yyyy + integer, intent(inout) :: mm + integer, intent(inout) :: dd + integer, intent(inout) :: hh + integer, intent(inout) :: fh + integer, intent(out) :: ierr + + write(LDT_logunit,*) & + '[ERR] LDT was compiled without netCDF support!' + write(LDT_logunit,*) "[ERR] Recompile and try again!" + ierr = 1 + call LDT_endrun() + + end subroutine process_espcd_cice_region + +#endif + +end module USAFSI_espcdMod diff --git a/ldt/USAFSI/USAFSI_run.F90 b/ldt/USAFSI/USAFSI_run.F90 index 770f57932..315da2e2a 100644 --- a/ldt/USAFSI/USAFSI_run.F90 +++ b/ldt/USAFSI/USAFSI_run.F90 @@ -86,6 +86,7 @@ subroutine USAFSI_run(n) #endif use USAFSI_analysisMod use USAFSI_arraysMod, only: USAFSI_arrays + use USAFSI_espcdMod use USAFSI_galwemMod, only: USAFSI_get_galwem_t2m use USAFSI_gofsMod use USAFSI_lisMod, only: read_gr2_t2 From 5a534a3981154fd1c0ccea73a56fae0363141081 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Thu, 18 Jul 2024 06:27:57 -0400 Subject: [PATCH 06/40] some minor modifications --- lis/utils/usaf/s2s/s2s_app/s2s_run.sh | 6 +++--- lis/utils/usaf/s2s/s2s_modules/shared/utils.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh index 961c9c43e..51af68399 100755 --- a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh +++ b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh @@ -570,9 +570,9 @@ bcsd_fcst(){ bcsd01_ID= /bin/rm bcsd01_*.j split -l 4 $cmdfile part_ - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_01_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C "part_aa" - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_02_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C "part_ab" - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_03_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C "part_ac" + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_01_run.j -t 1 -H 3 -j ${jobname}_01_ -w ${CWD} -C "part_aa" + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_02_run.j -t 1 -H 3 -j ${jobname}_02_ -w ${CWD} -C "part_ab" + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_03_run.j -t 1 -H 3 -j ${jobname}_03_ -w ${CWD} -C "part_ac" /bin/rm ${cmdfile} "part_aa" "part_ab" "part_ac" bcsd01_ID=$(submit_job "" "${jobname}_01_run.j") thisID=$(submit_job "" "${jobname}_02_run.j") diff --git a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py index 4a2f23a08..f820bddb4 100644 --- a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py @@ -134,7 +134,7 @@ def read_out (this_no, nfiles, ofile, job_file): if (not re.search(pattern_not1, line)) and (not re.search(pattern_not2, line)): _l2 = [int(x) for x in line.split(":")[1:4]] if re.search(pattern_sbu, line): - sbu = np.float (line.split(":")[1]) + sbu = float (line.split(":")[1]) file.close() print ('{:>3}/{:>3} {:<35}{:>2}h {:>2}m {:>2}s'.format (this_no,nfiles,job_file,_l2[0],_l2[1],_l2[2])) From 64d8fb6a8cbe8f3bd54f691ce93d8c66a31fd2a7 Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Thu, 18 Jul 2024 09:38:07 -0400 Subject: [PATCH 07/40] Integrated GSPC-D logic into USAFSI. Not tested yet. --- ldt/USAFSI/LDT_usafsiMod.F90 | 68 +++++++++++++++++++++++++------ ldt/USAFSI/USAFSI_analysisMod.F90 | 12 +++--- ldt/USAFSI/USAFSI_arraysMod.F90 | 2 +- ldt/USAFSI/USAFSI_run.F90 | 65 +++++++++++++++++------------ 4 files changed, 103 insertions(+), 44 deletions(-) diff --git a/ldt/USAFSI/LDT_usafsiMod.F90 b/ldt/USAFSI/LDT_usafsiMod.F90 index 57451c4ea..daf758d0e 100644 --- a/ldt/USAFSI/LDT_usafsiMod.F90 +++ b/ldt/USAFSI/LDT_usafsiMod.F90 @@ -77,8 +77,11 @@ module LDT_usafsiMod ! Other new settings real :: fill_climo + character*255 :: source_of_ocean_data ! EMK 20240718 character*255 :: gofs_sst_dir character*255 :: gofs_cice_dir + character*255 :: espcd_sst_dir ! EMK 20240718 + character*255 :: espcd_cice_dir ! EMK 20240718 character*255 :: lis_grib2_dir character*20 :: security_class character*20 :: data_category @@ -106,7 +109,7 @@ subroutine LDT_usafsiInit() ! Imports use ESMF use LDT_coreMod, only: LDT_config - use LDT_logMod, only: LDT_verify + use LDT_logMod, only: LDT_verify, LDT_logunit, LDT_endrun ! Defaults implicit none @@ -497,23 +500,64 @@ subroutine LDT_usafsiInit() rc=rc) call LDT_verify(rc, trim(cfg_entry)//" not specified") - ! Get gofs_sst_dir - cfg_entry = "USAFSI GOFS SST data directory:" + ! EMK 20240718...Specify source of ocean data. + cfg_entry = "USAFSI Source of ocean data:" call ESMF_ConfigFindLabel(LDT_config, trim(cfg_entry), rc=rc) call LDT_verify(rc, trim(cfg_entry)//" not specified") call ESMF_ConfigGetAttribute(LDT_config, & - usafsi_settings%gofs_sst_dir, & + usafsi_settings%source_of_ocean_data, & rc=rc) call LDT_verify(rc, trim(cfg_entry)//" not specified") + if (usafsi_settings%source_of_ocean_data .ne. "GOFS" .and. & + usafsi_settings%source_of_ocean_data .ne. "ESPC-D") then + write(LDT_logunit,*)'[ERR] Unrecognized source of ocean data' + write(LDT_logunit,*)'[ERR] Must be GOFS or ESPC-D' + write(LDT_logunit,*) & + "[ERR] Update entry for 'USAFSI Source of ocean data:'" + write(LDT_logunit,*)'[ERR] LDT will halt.' + call LDT_endrun() + end if - ! Get gofs_cice_dir - cfg_entry = "USAFSI GOFS CICE data directory:" - call ESMF_ConfigFindLabel(LDT_config, trim(cfg_entry), rc=rc) - call LDT_verify(rc, trim(cfg_entry)//" not specified") - call ESMF_ConfigGetAttribute(LDT_config, & - usafsi_settings%gofs_cice_dir, & - rc=rc) - call LDT_verify(rc, trim(cfg_entry)//" not specified") + if (usafsi_settings%source_of_ocean_data == "GOFS") then + ! Get gofs_sst_dir + cfg_entry = "USAFSI GOFS SST data directory:" + call ESMF_ConfigFindLabel(LDT_config, trim(cfg_entry), rc=rc) + call LDT_verify(rc, trim(cfg_entry)//" not specified") + call ESMF_ConfigGetAttribute(LDT_config, & + usafsi_settings%gofs_sst_dir, & + rc=rc) + call LDT_verify(rc, trim(cfg_entry)//" not specified") + + ! Get gofs_cice_dir + cfg_entry = "USAFSI GOFS CICE data directory:" + call ESMF_ConfigFindLabel(LDT_config, trim(cfg_entry), rc=rc) + call LDT_verify(rc, trim(cfg_entry)//" not specified") + call ESMF_ConfigGetAttribute(LDT_config, & + usafsi_settings%gofs_cice_dir, & + rc=rc) + call LDT_verify(rc, trim(cfg_entry)//" not specified") + + else if (usafsi_settings%source_of_ocean_data == "ESPC-D") then + + ! Get espcd_sst_dir + cfg_entry = "USAFSI ESPC-D SST data directory:" + call ESMF_ConfigFindLabel(LDT_config, trim(cfg_entry), rc=rc) + call LDT_verify(rc, trim(cfg_entry)//" not specified") + call ESMF_ConfigGetAttribute(LDT_config, & + usafsi_settings%espcd_sst_dir, & + rc=rc) + call LDT_verify(rc, trim(cfg_entry)//" not specified") + + ! Get espcd_cice_dir + cfg_entry = "USAFSI ESPC-D CICE data directory:" + call ESMF_ConfigFindLabel(LDT_config, trim(cfg_entry), rc=rc) + call LDT_verify(rc, trim(cfg_entry)//" not specified") + call ESMF_ConfigGetAttribute(LDT_config, & + usafsi_settings%espcd_cice_dir, & + rc=rc) + call LDT_verify(rc, trim(cfg_entry)//" not specified") + + end if ! Get lis_grib2_dir cfg_entry = "USAFSI LIS GRIB2 data directory:" diff --git a/ldt/USAFSI/USAFSI_analysisMod.F90 b/ldt/USAFSI/USAFSI_analysisMod.F90 index 89fdcb6ca..091df5166 100644 --- a/ldt/USAFSI/USAFSI_analysisMod.F90 +++ b/ldt/USAFSI/USAFSI_analysisMod.F90 @@ -44,7 +44,7 @@ module USAFSI_analysisMod public :: run_snow_analysis_noglacier ! EMK public :: run_snow_analysis_glacier ! EMK public :: run_seaice_analysis_ssmis ! EMK - public :: run_seaice_analysis_gofs ! EMK + public :: run_seaice_analysis_navy ! EMK public :: getclimo ! Yeosang Yoon ! Internal constant @@ -3357,8 +3357,8 @@ subroutine run_seaice_analysis_ssmis(month,runcycle,nc,nr,landmask) end subroutine run_seaice_analysis_ssmis - ! Update sea ice based on remapped US Navy GOFS data - subroutine run_seaice_analysis_gofs(month, runcycle, nc, nr, landmask) + ! Update sea ice based on remapped US Navy GOFS/ESPC-D data + subroutine run_seaice_analysis_navy(month, runcycle, nc, nr, landmask) ! Imports use LDT_usafsiMod, only: usafsi_settings @@ -3402,10 +3402,10 @@ subroutine run_seaice_analysis_gofs(month, runcycle, nc, nr, landmask) ! Use the GOFS data if available. Otherwise, try to fall back ! on prior analysis subject to certain constraints. - if (USAFSI_arrays%gofs_icecon(c,r) >= 0) then + if (USAFSI_arrays%navy_icecon(c,r) >= 0) then ! We have valid GOFS data USAFSI_arrays%icecon(c,r) = & - nint(USAFSI_arrays%gofs_icecon(c,r)) + nint(USAFSI_arrays%navy_icecon(c,r)) if (USAFSI_arrays%icecon(c,r) > usafsi_settings%minice) then USAFSI_arrays%icemask(c,r) = icepnt else @@ -3482,7 +3482,7 @@ subroutine run_seaice_analysis_gofs(month, runcycle, nc, nr, landmask) end do ! c end do ! r - end subroutine run_seaice_analysis_gofs + end subroutine run_seaice_analysis_navy ! Private subroutine subroutine summer (obelev, hemi, oblat, month, towarm) diff --git a/ldt/USAFSI/USAFSI_arraysMod.F90 b/ldt/USAFSI/USAFSI_arraysMod.F90 index 59cdffed1..cc2b73fba 100644 --- a/ldt/USAFSI/USAFSI_arraysMod.F90 +++ b/ldt/USAFSI/USAFSI_arraysMod.F90 @@ -65,7 +65,7 @@ module USAFSI_arraysMod real, allocatable :: snofrac ( : , : ) ! FRACTIONAL SNOW DATA ON USAFSI GRID real, allocatable :: ssmis_depth ( : , : ) ! SNOW DEPTH FROM SSMIS EDRS real, allocatable :: sst ( : , : ) ! NAVY SEA SURFACE TEMPERATURES (KELVIN) - real, allocatable :: gofs_icecon(:,:) + real, allocatable :: navy_icecon(:,:) end type USAFSI_arrays_t type(USAFSI_arrays_t), public :: USAFSI_arrays diff --git a/ldt/USAFSI/USAFSI_run.F90 b/ldt/USAFSI/USAFSI_run.F90 index 315da2e2a..3953fb499 100644 --- a/ldt/USAFSI/USAFSI_run.F90 +++ b/ldt/USAFSI/USAFSI_run.F90 @@ -147,7 +147,7 @@ subroutine USAFSI_run(n) integer :: maxsobs integer :: yyyy, mm, dd, hh, fh integer :: ierr - logical :: found_gofs_cice + logical :: found_navy_cice logical :: just_12z ! PMW snow depth retrievals, Yeosang Yoon @@ -231,7 +231,7 @@ subroutine USAFSI_run(n) allocate (USAFSI_arrays%ssmis_icecon (nc, nr)) allocate (USAFSI_arrays%sst (nc, nr)) allocate (USAFSI_arrays%viirsmap (nc, nr)) - allocate (USAFSI_arrays%gofs_icecon(nc,nr)) + allocate (USAFSI_arrays%navy_icecon(nc,nr)) ! RETRIEVE STATIC DATA SETS. write (LDT_logunit,*) '[INFO] CALLING GETGEO TO GET STATIC FIELDS' @@ -334,17 +334,25 @@ subroutine USAFSI_run(n) end if ! RETRIEVE NAVY SEA SURFACE TEMPERATURE (SST) DATA. - ! First try the US Navy 0.08 deg GOFS data + ! EMK 20240718...Try GOFS or ESPC-D read (date10(1: 4), '(i4)', err=4200) yyyy read (date10(5: 6), '(i2)', err=4200) mm read (date10(7: 8), '(i2)', err=4200) dd read (date10(9:10), '(i2)', err=4200) hh fh = 0 ! Dummy value - write (LDT_logunit,*) & - '[INFO] CALLING PROCESS_GOFS_SST TO GET SEA SURFACE TEMPERATURES' - call process_gofs_sst(usafsi_settings%gofs_sst_dir, & - nc, nr, landmask, usafSI_arrays%sst, & - yyyy, mm, dd, hh, fh, ierr) + if (usafsi_settings%source_of_ocean_data == "ESPC-D") then + write (LDT_logunit,*) & + '[INFO] CALLING PROCESS_ESPCD_SST TO GET SEA SURFACE TEMPERATURES' + call process_espcd_sst(usafsi_settings%espcd_sst_dir, & + nc, nr, landmask, usafSI_arrays%sst, & + yyyy, mm, dd, hh, fh, ierr) + else if (usafsi_settings%source_of_ocean_data == "GOFS") then + write (LDT_logunit,*) & + '[INFO] CALLING PROCESS_GOFS_SST TO GET SEA SURFACE TEMPERATURES' + call process_gofs_sst(usafsi_settings%gofs_sst_dir, & + nc, nr, landmask, usafSI_arrays%sst, & + yyyy, mm, dd, hh, fh, ierr) + end if if (ierr .ne. 0) then ! Fall back on legacy GETSST for 0.25 deg data. write (LDT_logunit,*) & @@ -397,23 +405,29 @@ subroutine USAFSI_run(n) deallocate(staelv) deallocate(stadep) - ! Try to get the GOFS sea ice data - write(LDT_logunit,*) & - '[INFO] CALLING PROCESS_GOFS_CICE TO GET GOFS SEA ICE DATA' + ! EMK 20240718...Choose between ESPC-D and GOFS. read (date10(1: 4), '(i4)', err=4200) yyyy read (date10(5: 6), '(i2)', err=4200) mm read (date10(7: 8), '(i2)', err=4200) dd read (date10(9:10), '(i2)', err=4200) hh fh = 0 ! Dummy value - call process_gofs_cice(usafsi_settings%gofs_cice_dir, & - nc, nr, landmask, USAFSI_arrays%gofs_icecon, & - yyyy, mm, dd, hh, fh, ierr) - if (ierr == 0) then - found_gofs_cice = .true. - else - found_gofs_cice = .false. + found_navy_cice = .false. + if (usafsi_settings%source_of_ocean_data == "ESPC-D") then + write(LDT_logunit,*) & + '[INFO] CALLING PROCESS_ESPCD_CICE TO GET GOFS SEA ICE DATA' + call process_espcd_cice(usafsi_settings%espcd_cice_dir, & + nc, nr, landmask, USAFSI_arrays%navy_icecon, & + yyyy, mm, dd, hh, fh, ierr) + if (ierr == 0) found_navy_cice = .true. + else if (usafsi_settings%source_of_ocean_data == "GOFS") then + ! Try to get the GOFS sea ice data + write(LDT_logunit,*) & + '[INFO] CALLING PROCESS_GOFS_CICE TO GET GOFS SEA ICE DATA' + call process_gofs_cice(usafsi_settings%gofs_cice_dir, & + nc, nr, landmask, USAFSI_arrays%navy_icecon, & + yyyy, mm, dd, hh, fh, ierr) + if (ierr == 0) found_navy_cice = .true. end if - !---------------------------------------------------------------------kyh20201118 ! Estimates TB-based snow depth if (usafsi_settings%TB_option == 1) then !SSMIS @@ -459,12 +473,13 @@ subroutine USAFSI_run(n) '[INFO] CALLING RUN_SNOW_ANALYSIS_GLACIER' call run_snow_analysis_glacier(runcycle, nc, nr, landmask, landice) - ! FIXME...Try using GOFS data first, and if unsuccessful, then run - ! the old SSMIS analysis. - if (found_gofs_cice) then + ! FIXME...Try using ESPC-D or GOFS data first, and if + ! unsuccessful, then run the old SSMIS analysis. + if (found_navy_cice) then write(LDT_logunit,*) & - '[INFO] CALLING RUN_SEAICE_ANALYSIS_GOFS' - call run_seaice_analysis_gofs(month, runcycle, nc, nr, landmask) + '[INFO] CALLING RUN_SEAICE_ANALYSIS_NAVY' + call run_seaice_analysis_navy(month, runcycle, nc, nr, & + landmask) else write(LDT_logunit,*) & '[INFO] CALLING RUN_SEAICE_ANALYSIS_SSMIS' @@ -509,7 +524,7 @@ subroutine USAFSI_run(n) deallocate (usafsi_arrays%ssmis_icecon) deallocate (usafsi_arrays%sst) deallocate (usafsi_arrays%viirsmap) - deallocate (usafsi_arrays%gofs_icecon) + deallocate (usafsi_arrays%navy_icecon) deallocate(landmask) deallocate(elevations) deallocate(landice) From daaf8e3c1e16f65c39d311281e3b3242b7de2964 Mon Sep 17 00:00:00 2001 From: David Mocko Date: Thu, 18 Jul 2024 10:03:17 -0400 Subject: [PATCH 08/40] Change start date of SMOPS ASCAT v4 data to match our archive. This pull request changes the start data of the v4 (version 4) of the SMOPS ASCAT data. The new start date is 18Z 4 July 2024, which matches the start date of the v4 data in our archive. Resolves: #1581 --- lis/dataassim/obs/SMOPS_ASCATsm/SMOPS_ASCATsm_Mod.F90 | 2 +- lis/dataassim/obs/SMOPS_ASCATsm/read_SMOPS_ASCATsm.F90 | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lis/dataassim/obs/SMOPS_ASCATsm/SMOPS_ASCATsm_Mod.F90 b/lis/dataassim/obs/SMOPS_ASCATsm/SMOPS_ASCATsm_Mod.F90 index 7ccd94c6d..d236f5697 100755 --- a/lis/dataassim/obs/SMOPS_ASCATsm/SMOPS_ASCATsm_Mod.F90 +++ b/lis/dataassim/obs/SMOPS_ASCATsm/SMOPS_ASCATsm_Mod.F90 @@ -603,7 +603,7 @@ subroutine SMOPS_ASCATsm_setup(k, OBS_State, OBS_Pert_State) call LIS_date2time(SMOPS_ASCATsm_struc(n)%version3_time,updoy,upgmt,& yr1,mo1,da1,hr1,mn1,ss1) - yr1 = 2024; mo1 = 4; da1 = 25; hr1 = 0; mn1 = 0; ss1 = 0 + yr1 = 2024; mo1 = 7; da1 = 4; hr1 = 18; mn1 = 0; ss1 = 0 call LIS_date2time(SMOPS_ASCATsm_struc(n)%version4_time,updoy,upgmt,& yr1,mo1,da1,hr1,mn1,ss1) diff --git a/lis/dataassim/obs/SMOPS_ASCATsm/read_SMOPS_ASCATsm.F90 b/lis/dataassim/obs/SMOPS_ASCATsm/read_SMOPS_ASCATsm.F90 index f4b4cdf26..ee7423edb 100755 --- a/lis/dataassim/obs/SMOPS_ASCATsm/read_SMOPS_ASCATsm.F90 +++ b/lis/dataassim/obs/SMOPS_ASCATsm/read_SMOPS_ASCATsm.F90 @@ -1125,9 +1125,9 @@ subroutine create_SMOPS_ASCATsm_filename(ndir, useRT, yr, mo,da, hr, conv, filen call ESMF_TimeSet(Ver4_blended_time, & yy = 2024, & - mm = 4, & - dd = 25, & - h = 0, & + mm = 7, & + dd = 4, & + h = 18, & m = 0, & s = 0, & calendar = LIS_calendar, & From fa76145303f54785185bde3be0adadeb0ec89e01 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Thu, 18 Jul 2024 10:17:27 -0400 Subject: [PATCH 09/40] bcsd01 now 4 jobs --- lis/utils/usaf/s2s/s2s_app/s2s_run.sh | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh index 51af68399..b1356c363 100755 --- a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh +++ b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh @@ -569,16 +569,19 @@ bcsd_fcst(){ if [ $GROUP_JOBS == "Y" ]; then bcsd01_ID= /bin/rm bcsd01_*.j - split -l 4 $cmdfile part_ + split -l 3 $cmdfile part_ python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_01_run.j -t 1 -H 3 -j ${jobname}_01_ -w ${CWD} -C "part_aa" python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_02_run.j -t 1 -H 3 -j ${jobname}_02_ -w ${CWD} -C "part_ab" python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_03_run.j -t 1 -H 3 -j ${jobname}_03_ -w ${CWD} -C "part_ac" - /bin/rm ${cmdfile} "part_aa" "part_ab" "part_ac" + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_04_run.j -t 1 -H 3 -j ${jobname}_04_ -w ${CWD} -C "part_ad" + /bin/rm ${cmdfile} "part_aa" "part_ab" "part_ac" "part_ad" bcsd01_ID=$(submit_job "" "${jobname}_01_run.j") thisID=$(submit_job "" "${jobname}_02_run.j") bcsd01_ID=`echo $bcsd01_ID`' '$thisID thisID=$(submit_job "" "${jobname}_03_run.j") bcsd01_ID=`echo $bcsd01_ID`' '$thisID + thisID=$(submit_job "" "${jobname}_04_run.j") + bcsd01_ID=`echo $bcsd01_ID`' '$thisID bcsd01_ID=`echo $bcsd01_ID | sed "s| |:|g"` fi # Task 3: Rescale and reorganize NMME Data (forecast_task_03.py) From 3aeb10744c44ef22afc24f28c8526d83423e09c7 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Thu, 18 Jul 2024 11:59:17 -0400 Subject: [PATCH 10/40] cas/sky needed an extra job --- lis/utils/usaf/s2s/s2s_app/s2s_run.sh | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh index b1356c363..f052ab8d0 100755 --- a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh +++ b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh @@ -640,9 +640,14 @@ bcsd_fcst(){ if [ $GROUP_JOBS == "Y" ]; then bcsd04_ID= /bin/rm bcsd04*.j - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 4 -j ${jobname}_ -w ${CWD} -C ${cmdfile} - /bin/rm ${cmdfile} - bcsd04_ID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jobname}_run.j") + split -l 4 $cmdfile part_ + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_01_run.j -t 1 -H 3 -j ${jobname}_01_ -w ${CWD} -C "part_aa" + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_02_run.j -t 1 -H 3 -j ${jobname}_02_ -w ${CWD} -C "part_ab" + /bin/rm ${cmdfile} "part_aa" "part_ab" + bcsd04_ID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jobname}_01_run.j") + thisID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jobname}_02_run.j") + bcsd04_ID=`echo $bcsd04_ID`' '$thisID + bcsd04_ID=`echo $bcsd04_ID | sed "s| |:|g"` fi # Task 5: Monthly "BC" step applied to NMME (forecast_task_05.py: after 1 and 3) From 9867427f0d483d578542842b9a2dce7ebf35d3d1 Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Thu, 18 Jul 2024 16:13:44 -0400 Subject: [PATCH 11/40] Tweaked new config entries. --- ldt/USAFSI/LDT_usafsiMod.F90 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ldt/USAFSI/LDT_usafsiMod.F90 b/ldt/USAFSI/LDT_usafsiMod.F90 index daf758d0e..c0545e7f0 100644 --- a/ldt/USAFSI/LDT_usafsiMod.F90 +++ b/ldt/USAFSI/LDT_usafsiMod.F90 @@ -501,7 +501,7 @@ subroutine LDT_usafsiInit() call LDT_verify(rc, trim(cfg_entry)//" not specified") ! EMK 20240718...Specify source of ocean data. - cfg_entry = "USAFSI Source of ocean data:" + cfg_entry = "USAFSI source of ocean data:" call ESMF_ConfigFindLabel(LDT_config, trim(cfg_entry), rc=rc) call LDT_verify(rc, trim(cfg_entry)//" not specified") call ESMF_ConfigGetAttribute(LDT_config, & @@ -513,7 +513,7 @@ subroutine LDT_usafsiInit() write(LDT_logunit,*)'[ERR] Unrecognized source of ocean data' write(LDT_logunit,*)'[ERR] Must be GOFS or ESPC-D' write(LDT_logunit,*) & - "[ERR] Update entry for 'USAFSI Source of ocean data:'" + "[ERR] Update entry for 'USAFSI source of ocean data:'" write(LDT_logunit,*)'[ERR] LDT will halt.' call LDT_endrun() end if From 094c0a26874bf64f2df446b9602a17a1073cfad0 Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Thu, 18 Jul 2024 16:14:23 -0400 Subject: [PATCH 12/40] Bug fixes to logic for fetching ESPC-D files. --- ldt/USAFSI/USAFSI_espcdMod.F90 | 49 +++++++++++++++++++--------------- 1 file changed, 28 insertions(+), 21 deletions(-) diff --git a/ldt/USAFSI/USAFSI_espcdMod.F90 b/ldt/USAFSI/USAFSI_espcdMod.F90 index 6ca4c4cae..ab1a30b88 100644 --- a/ldt/USAFSI/USAFSI_espcdMod.F90 +++ b/ldt/USAFSI/USAFSI_espcdMod.F90 @@ -46,29 +46,32 @@ subroutine find_espcd_cice_file(rootdir, region, & ! Arguments character(len=*), intent(in) :: rootdir character*3, intent(in) :: region - integer, intent(inout) :: yyyy - integer, intent(inout) :: mm - integer, intent(inout) :: dd - integer, intent(inout) :: hh - integer, intent(inout) :: fh + integer, intent(in) :: yyyy + integer, intent(in) :: mm + integer, intent(in) :: dd + integer, intent(in) :: hh + integer, intent(in) :: fh character*255, intent(out) :: filename ! Locals integer :: julhr, julhr_orig logical :: file_exists + integer :: yyyy_local, mm_local, dd_local, hh_local + integer :: fh_local ! Build the file name. Note that all ESPC-D CICE runs start at 12Z. call LDT_get_julhr(yyyy, mm, dd, 12, 0, 0, julhr) if (hh >= 12) then julhr_orig = julhr - fh = 0 + fh_local = 0 else julhr_orig = julhr - 24 ! Must use previous day's run - fh = 12 + fh_local = 12 end if - call LDT_julhr_date(julhr_orig, yyyy, mm, dd, hh) + call LDT_julhr_date(julhr_orig, yyyy_local, mm_local, dd_local, & + hh_local) call construct_espcd_cice_filename(rootdir, region, & - yyyy, mm, dd, hh, fh, filename) + yyyy_local, mm_local, dd_local, hh_local, fh_local, filename) write(LDT_logunit,*) & '------------------------------------------------------------------' @@ -85,7 +88,7 @@ subroutine find_espcd_cice_file(rootdir, region, & julhr = julhr_orig do write(LDT_logunit,*)'[WARN] Cannot find ', trim(filename) - fh = fh + 24 + fh_local = fh_local + 24 julhr = julhr - 24 if ( (julhr_orig - julhr) > 24*5) then write(LDT_logunit,*)& @@ -97,10 +100,11 @@ subroutine find_espcd_cice_file(rootdir, region, & filename = 'NONE' return end if - call LDT_julhr_date(julhr, yyyy, mm, dd, hh) + call LDT_julhr_date(julhr, yyyy_local, mm_local, dd_local, & + hh_local) call construct_espcd_cice_filename(rootdir, region, & - yyyy, mm, dd, hh, fh, filename) + yyyy_local, mm_local, dd_local, hh_local, fh_local, filename) inquire(file=trim(filename), exist=file_exists) if (file_exists) then write(LDT_logunit,*)'[INFO] Will use ', trim(filename) @@ -468,21 +472,24 @@ subroutine process_espcd_cice(rootdir, nc, nr, landmask, icecon, & ! Locals real, allocatable :: icecon_arc(:,:) real, allocatable :: icecon_ant(:,:) + integer :: fh_internal integer :: c, r integer :: gindex real :: rlat ! First handle Arctic region - call process_espcd_cice_region('ARC', rootdir, nc, nr, landmask, & - yyyy, mm, dd, hh, fh, icecon_arc, ierr) + fh_internal = fh + call process_espcd_cice_region('arc', rootdir, nc, nr, landmask, & + yyyy, mm, dd, hh, fh_internal, icecon_arc, ierr) if (ierr .ne. 0) then if (allocated(icecon_arc)) deallocate(icecon_arc) return end if ! Next handle Antarctic region - call process_espcd_cice_region('ANT', rootdir, nc, nr, landmask, & - yyyy, mm, dd, hh, fh, icecon_ant, ierr) + fh_internal = fh + call process_espcd_cice_region('ant', rootdir, nc, nr, landmask, & + yyyy, mm, dd, hh, fh_internal, icecon_ant, ierr) if (ierr .ne. 0) then if (allocated(icecon_arc)) deallocate(icecon_arc) if (allocated(icecon_ant)) deallocate(icecon_ant) @@ -559,14 +566,14 @@ subroutine process_espcd_cice_region(region, rootdir, nc, nr, & external :: upscaleByAveraging ! Sanity check the region - if (region .eq. 'ARC') then + if (region .eq. 'arc') then nlat = nlat_arc - else if (region .eq. 'ANT') then + else if (region .eq. 'ant') then nlat = nlat_ant else write(LDT_logunit,*)'[ERR] Invalid ESPC-D region for cice: ' & // region - write(LDT_logunit,*)'[ERR] Must be either ARC or ANT' + write(LDT_logunit,*)'[ERR] Must be either arc or ant' ierr = 1 call LDT_endrun() end if @@ -619,7 +626,7 @@ subroutine process_espcd_cice_region(region, rootdir, nc, nr, & deallocate(aice) ! Set up interpolation weights - if (region .eq. 'ARC') then + if (region .eq. 'arc') then gridDesci = 0 gridDesci(1) = 0 ! Lat/lon projection gridDesci(2) = nlon ! Number of columns @@ -632,7 +639,7 @@ subroutine process_espcd_cice_region(region, rootdir, nc, nr, & gridDesci(9) = 0.039978027344005795 ! delta-lon (deg) gridDesci(10) = 0.0200004577637 ! delta-lat (deg) gridDesci(20) = 64 ! East-west ordering - else if (region .eq. 'ANT') then + else if (region .eq. 'ant') then gridDesci = 0 gridDesci(1) = 0 ! Lat/lon projection gridDesci(2) = nlon ! Number of columns From f297dbd07216eb40468ad38eeb8eb581c4960430 Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Fri, 19 Jul 2024 10:59:01 -0400 Subject: [PATCH 13/40] Initialized variable. --- ldt/USAFSI/USAFSI_analysisMod.F90 | 1 + 1 file changed, 1 insertion(+) diff --git a/ldt/USAFSI/USAFSI_analysisMod.F90 b/ldt/USAFSI/USAFSI_analysisMod.F90 index 091df5166..04e2f725b 100644 --- a/ldt/USAFSI/USAFSI_analysisMod.F90 +++ b/ldt/USAFSI/USAFSI_analysisMod.F90 @@ -2250,6 +2250,7 @@ subroutine getsst (date10, stmpdir, sstdir) found = .false. limit = 3 tries = 1 + grstat = 0 call date10_julhr (date10, julsst, program_name, routine_name) From 502af1da0cf36d1eab73b3ef89f9e89b2c08e8b3 Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Fri, 19 Jul 2024 10:59:50 -0400 Subject: [PATCH 14/40] Revamped logic for selecting ESPC-D files. --- ldt/USAFSI/USAFSI_espcdMod.F90 | 97 ++++++++++++++++++++-------------- 1 file changed, 57 insertions(+), 40 deletions(-) diff --git a/ldt/USAFSI/USAFSI_espcdMod.F90 b/ldt/USAFSI/USAFSI_espcdMod.F90 index ab1a30b88..ac0bd9ccf 100644 --- a/ldt/USAFSI/USAFSI_espcdMod.F90 +++ b/ldt/USAFSI/USAFSI_espcdMod.F90 @@ -37,7 +37,7 @@ subroutine find_espcd_cice_file(rootdir, region, & yyyy, mm, dd, hh, fh, filename) ! Imports - use LDT_logMod, only: LDT_logunit + use LDT_logMod, only: LDT_logunit, LDT_endrun use LDT_timeMgrMod, only: LDT_get_julhr, LDT_julhr_date ! Defaults @@ -60,15 +60,23 @@ subroutine find_espcd_cice_file(rootdir, region, & integer :: fh_local ! Build the file name. Note that all ESPC-D CICE runs start at 12Z. - call LDT_get_julhr(yyyy, mm, dd, 12, 0, 0, julhr) - if (hh >= 12) then - julhr_orig = julhr + call LDT_get_julhr(yyyy, mm, dd, hh, 0, 0, julhr) + if (hh == 12) then fh_local = 0 - else - julhr_orig = julhr - 24 ! Must use previous day's run + else if (hh == 18) then + fh_local = 6 + else if (hh == 00) then fh_local = 12 + else if (hh == 06) then + fh_local = 18 + else + write(LDT_logunit,*)'[ERR] Bad USAFSI hour ', hh + write(LDT_logunit,*)'[ERR] Must be 00, 06, 12, or 18' + write(LDT_logunit,*)'[ERR] LDT will exit...' + call LDT_endrun() end if - call LDT_julhr_date(julhr_orig, yyyy_local, mm_local, dd_local, & + julhr = julhr - fh_local + call LDT_julhr_date(julhr, yyyy_local, mm_local, dd_local, & hh_local) call construct_espcd_cice_filename(rootdir, region, & yyyy_local, mm_local, dd_local, hh_local, fh_local, filename) @@ -84,12 +92,14 @@ subroutine find_espcd_cice_file(rootdir, region, & end if ! At this point, we are rolling back to earlier CICE file + julhr_orig = julhr + ! Start looping for earlier files - julhr = julhr_orig do write(LDT_logunit,*)'[WARN] Cannot find ', trim(filename) fh_local = fh_local + 24 - julhr = julhr - 24 + julhr = julhr - 24 ! Roll back to previous 12Z cycle + ! Give up after 5 days if ( (julhr_orig - julhr) > 24*5) then write(LDT_logunit,*)& '[WARN] *** GIVING UP ON ESPC-D CICE FOR ', & @@ -148,7 +158,7 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, fh, & filename) ! Imports - use LDT_logMod, only: LDT_logunit + use LDT_logMod, only: LDT_logunit, LDT_endrun use LDT_timeMgrMod, only: LDT_get_julhr, LDT_julhr_date ! Defaults @@ -156,30 +166,40 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, fh, & ! Arguments character(len=*), intent(in) :: rootdir - integer, intent(inout) :: yyyy - integer, intent(inout) :: mm - integer, intent(inout) :: dd - integer, intent(inout) :: hh - integer, intent(inout) :: fh + integer, intent(in) :: yyyy + integer, intent(in) :: mm + integer, intent(in) :: dd + integer, intent(in) :: hh + integer, intent(in) :: fh character*255, intent(inout) :: filename ! Locals integer :: julhr, julhr_orig + integer :: yyyy_local, mm_local, dd_local, hh_local, fh_local logical :: file_exists - ! Build the file name. Note that all ESPC-D SST runs start at 00Z. - if (hh < 6) then - fh = 0 - else if (hh < 12) then - fh = 6 - else if (hh < 18) then - fh = 12 + ! Build the file name. Note that all ESPC-D SST runs start at 12Z. + call LDT_get_julhr(yyyy, mm, dd, hh, 0, 0, & + julhr) + if (hh == 12) then + fh_local = 0 + else if (hh == 18) then + fh_local = 6 + else if (hh == 00) then + fh_local = 12 + else if (hh == 06) then + fh_local = 18 else - fh = 18 + write(LDT_logunit,*)'[ERR] Bad USAFSI hour ', hh + write(LDT_logunit,*)'[ERR] Must be 00, 06, 12, or 18' + write(LDT_logunit,*)'[ERR] LDT will exit...' + call LDT_endrun() end if - hh = 0 + julhr = julhr - fh_local + call LDT_julhr_date(julhr, yyyy_local, mm_local, dd_local, & + hh_local) call construct_espcd_sst_filename(rootdir, & - yyyy, mm, dd, hh, fh, filename) + yyyy_local, mm_local, dd_local, hh_local, fh_local, filename) ! Check if file exists write(LDT_logunit,*) & @@ -193,29 +213,26 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, fh, & end if ! At this point, we are rolling back to earlier SST file - call LDT_get_julhr(yyyy, mm, dd, hh, 0, 0, julhr) julhr_orig = julhr ! Start looping for earlier files do write(LDT_logunit,*)'[WARN] Cannot find ',trim(filename) - fh = fh - 6 - if (fh < 0) then - fh = 24 - julhr = julhr - 24 ! Roll back to previous 00Z cycle - ! Give up after 5 days - if ( (julhr_orig - julhr) > 24*5) then - write(LDT_logunit,*)"[WARN] *** GIVING UP ON ESPC-D SST! ***" - write(LDT_logunit,*) & - "[WARN] *** NO ESPC-D SST AVAILABLE!!! ***" - filename = "NONE" - return - end if - call LDT_julhr_date(julhr, yyyy, mm, dd, hh) + fh_local = fh_local + 24 + julhr = julhr - 24 ! Roll back to previous 12Z cycle + ! Give up after 5 days + if ( (julhr_orig - julhr) > 24*5) then + write(LDT_logunit,*)"[WARN] *** GIVING UP ON ESPC-D SST! ***" + write(LDT_logunit,*) & + "[WARN] *** NO ESPC-D SST AVAILABLE!!! ***" + filename = "NONE" + return end if + call LDT_julhr_date(julhr, yyyy_local, mm_local, dd_local, & + hh_local) call construct_espcd_sst_filename(rootdir, & - yyyy, mm, dd, hh, fh, filename) + yyyy_local, mm_local, dd_local, hh_local, fh_local, filename) inquire(file=trim(filename), exist=file_exists) if (file_exists) then write(LDT_logunit,*)'[INFO] Will use ',trim(filename) From b6e1aadb7509f4a13844a26c2c918c802d5b6e5b Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Fri, 19 Jul 2024 12:19:59 -0400 Subject: [PATCH 15/40] Added alert files for missing ESPC-D data. --- ldt/USAFSI/USAFSI_espcdMod.F90 | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/ldt/USAFSI/USAFSI_espcdMod.F90 b/ldt/USAFSI/USAFSI_espcdMod.F90 index ac0bd9ccf..af873553f 100644 --- a/ldt/USAFSI/USAFSI_espcdMod.F90 +++ b/ldt/USAFSI/USAFSI_espcdMod.F90 @@ -39,6 +39,8 @@ subroutine find_espcd_cice_file(rootdir, region, & ! Imports use LDT_logMod, only: LDT_logunit, LDT_endrun use LDT_timeMgrMod, only: LDT_get_julhr, LDT_julhr_date + use USAFSI_paramsMod, only: program_name, msglns + use USAFSI_utilMod, only: error_message ! Defaults implicit none @@ -58,6 +60,11 @@ subroutine find_espcd_cice_file(rootdir, region, & logical :: file_exists integer :: yyyy_local, mm_local, dd_local, hh_local integer :: fh_local + character*255 :: message (msglns) + character*12 :: routine_name + + message = '' + routine_name = 'find_espcd_cice_file' ! Build the file name. Note that all ESPC-D CICE runs start at 12Z. call LDT_get_julhr(yyyy, mm, dd, hh, 0, 0, julhr) @@ -89,6 +96,10 @@ subroutine find_espcd_cice_file(rootdir, region, & if (file_exists) then write(LDT_logunit,*)'[INFO] Will use ', trim(filename) return + else + message(1) = '[WARN] CANNOT FIND FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) end if ! At this point, we are rolling back to earlier CICE file @@ -119,6 +130,10 @@ subroutine find_espcd_cice_file(rootdir, region, & if (file_exists) then write(LDT_logunit,*)'[INFO] Will use ', trim(filename) return + else + message(1) = '[WARN] CANNOT FIND FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) end if end do @@ -160,6 +175,8 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, fh, & ! Imports use LDT_logMod, only: LDT_logunit, LDT_endrun use LDT_timeMgrMod, only: LDT_get_julhr, LDT_julhr_date + use USAFSI_paramsMod, only: program_name, msglns + use USAFSI_utilMod, only: error_message ! Defaults implicit none @@ -177,6 +194,11 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, fh, & integer :: julhr, julhr_orig integer :: yyyy_local, mm_local, dd_local, hh_local, fh_local logical :: file_exists + character*255 :: message (msglns) + character*12 :: routine_name + + message = '' + routine_name = 'find_espcd_sst_file' ! Build the file name. Note that all ESPC-D SST runs start at 12Z. call LDT_get_julhr(yyyy, mm, dd, hh, 0, 0, & @@ -210,6 +232,10 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, fh, & if (file_exists) then write(LDT_logunit,*)'[INFO] Will use ',trim(filename) return + else + message(1) = '[WARN] CANNOT FIND FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) end if ! At this point, we are rolling back to earlier SST file @@ -237,6 +263,10 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, fh, & if (file_exists) then write(LDT_logunit,*)'[INFO] Will use ',trim(filename) return + else + message(1) = '[WARN] CANNOT FIND FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) end if end do From d513bfedb9f00eede8bf5af26ef1529aacf859cd Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Fri, 19 Jul 2024 15:32:47 -0400 Subject: [PATCH 16/40] Bug fix to ESPC-D CICE time selection. --- ldt/USAFSI/USAFSI_espcdMod.F90 | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/ldt/USAFSI/USAFSI_espcdMod.F90 b/ldt/USAFSI/USAFSI_espcdMod.F90 index af873553f..d6f31e4b0 100644 --- a/ldt/USAFSI/USAFSI_espcdMod.F90 +++ b/ldt/USAFSI/USAFSI_espcdMod.F90 @@ -67,22 +67,26 @@ subroutine find_espcd_cice_file(rootdir, region, & routine_name = 'find_espcd_cice_file' ! Build the file name. Note that all ESPC-D CICE runs start at 12Z. + ! NOTE: CICE output is 12-hrly. call LDT_get_julhr(yyyy, mm, dd, hh, 0, 0, julhr) if (hh == 12) then fh_local = 0 else if (hh == 18) then - fh_local = 6 + fh_local = 0 + julhr = julhr - 6 else if (hh == 00) then fh_local = 12 + julhr = julhr - 12 else if (hh == 06) then - fh_local = 18 + fh_local = 12 + julhr = julhr - 18 else write(LDT_logunit,*)'[ERR] Bad USAFSI hour ', hh write(LDT_logunit,*)'[ERR] Must be 00, 06, 12, or 18' write(LDT_logunit,*)'[ERR] LDT will exit...' call LDT_endrun() end if - julhr = julhr - fh_local + call LDT_julhr_date(julhr, yyyy_local, mm_local, dd_local, & hh_local) call construct_espcd_cice_filename(rootdir, region, & From c2b73be4b48a02d6564ec398e45387a07f0fab4f Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Fri, 19 Jul 2024 16:02:56 -0400 Subject: [PATCH 17/40] Updated comments. --- ldt/USAFSI/USAFSI_run.F90 | 1 + 1 file changed, 1 insertion(+) diff --git a/ldt/USAFSI/USAFSI_run.F90 b/ldt/USAFSI/USAFSI_run.F90 index 3953fb499..49d4cbcf4 100644 --- a/ldt/USAFSI/USAFSI_run.F90 +++ b/ldt/USAFSI/USAFSI_run.F90 @@ -71,6 +71,7 @@ subroutine USAFSI_run(n) !** 13 Jan 22 Added support for FNMOC SST GRIB1 file.........Eric Kemp/NASA GSFC/SSAI !** 27 Jun 23 Removed LDT_endrun for normal termination, to avoid error ! code 1.........................................Eric Kemp/SSAI + !** 19 Jul 24 Added ESPC-D support...........................Eric Kemp/SSAI !***************************************************************************************** !***************************************************************************************** From 27d771575d2615a5509cc14d6d1d5bf9612db37c Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Fri, 19 Jul 2024 16:06:47 -0400 Subject: [PATCH 18/40] Added comments. --- ldt/USAFSI/USAFSI_analysisMod.F90 | 4 ++++ ldt/USAFSI/USAFSI_arraysMod.F90 | 3 ++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/ldt/USAFSI/USAFSI_analysisMod.F90 b/ldt/USAFSI/USAFSI_analysisMod.F90 index 04e2f725b..aa27d1ad7 100644 --- a/ldt/USAFSI/USAFSI_analysisMod.F90 +++ b/ldt/USAFSI/USAFSI_analysisMod.F90 @@ -17,6 +17,10 @@ ! 02 Nov 2020 Eric Kemp Removed blacklist code at request of 557WW. ! 22 Jan 2021 Yeosang Yoon Add subroutine for new 0.1 deg snow climatology ! 13 Jan 2022 Eric Kemp Added support for GRIB1 FNMOC SST file. +! 19 Jul 2024 Eric Kemp Renamed run_seaice_analysis_gofs to +! run_seaice_analysis_navy to reflect use of +! ESPC-D or GOFS data. Also fixed uninitialized +! variable. ! ! DESCRIPTION: ! Source code for Air Force snow depth analysis. diff --git a/ldt/USAFSI/USAFSI_arraysMod.F90 b/ldt/USAFSI/USAFSI_arraysMod.F90 index cc2b73fba..3a0d3b71f 100644 --- a/ldt/USAFSI/USAFSI_arraysMod.F90 +++ b/ldt/USAFSI/USAFSI_arraysMod.F90 @@ -32,7 +32,8 @@ !** 22 Mar 19 Ported to LDT...Eric Kemp, NASA GSFC/SSAI !** 09 May 19 Renamed LDTSI...Eric Kemp, NASA GSFC/SSAI !** 13 Dec 19 Renamed USAFSI...Eric Kemp, NASA GSFC/SSAI -!** +!** 19 Jul 24 Renamed gofs_icecon array to navy_icecon array to reflect +! source as either ESPC-D or GOFS. !***************************************************************************************** !***************************************************************************************** From cee02b81254be4f243571e4e1a11b4a724531ca7 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Mon, 22 Jul 2024 19:06:31 -0400 Subject: [PATCH 19/40] added packable and using srun --- lis/utils/usaf/s2s/s2s_app/s2s_api.py | 1 + lis/utils/usaf/s2s/s2s_modules/shared/utils.py | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_api.py b/lis/utils/usaf/s2s/s2s_app/s2s_api.py index 605cfaf6b..65e8b6b93 100644 --- a/lis/utils/usaf/s2s/s2s_app/s2s_api.py +++ b/lis/utils/usaf/s2s/s2s_app/s2s_api.py @@ -91,5 +91,6 @@ else: with open(ARGS.group_jobs, 'r') as file: commands = [line.strip() for line in file if line.strip()] + NTASKS = len(commands) utils.job_script(CONFIGFILE, JOBFILE, JOBNAME, NTASKS, str(HOURS), CWD, group_jobs=commands) diff --git a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py index f820bddb4..378b57c7b 100644 --- a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py @@ -23,7 +23,6 @@ #------------------------------------------------------------------------------ """ - import glob import os import platform @@ -63,10 +62,12 @@ def job_script(s2s_configfile, jobfile, job_name, ntasks, hours, cwd, in_command _f.write('#######################################################################' + '\n') _f.write('\n') _f.write('#SBATCH --account=' + sponsor_code + '\n') - _f.write('#SBATCH --ntasks=' + ntasks + '\n') + _f.write('#SBATCH --ntasks=' + str(ntasks) + '\n') _f.write('#SBATCH --time=' + hours + ':00:00' + '\n') if 'discover' in platform.node() or 'borg' in platform.node(): _f.write('#SBATCH --constraint=' + cfg['SETUP']['CONSTRAINT'] + '\n') + if cfg['SETUP']['CONSTRAINT'] == 'mil': + _f.write('#SBATCH --partition=packable' + '\n') else: # _f.write('#SBATCH --cluster-constraint=green' + '\n') _f.write('#SBATCH --cluster-constraint=' + cfg['SETUP']['CONSTRAINT'] + '\n') @@ -94,14 +95,13 @@ def job_script(s2s_configfile, jobfile, job_name, ntasks, hours, cwd, in_command _f.write('\n') _f.write('cd ' + cwd + '\n') - if command_list is None and group_jobs is None: _f.write(f"{this_command} || exit 1\n") _f.write(f"{sec_command}\n") else: if group_jobs: for cmd in group_jobs: - _f.write(f"nohup {cmd} &\n") + _f.write(f"srun --exclusive --ntasks 1 {cmd} &\n") _f.write("wait\n") if command_list: for cmd in command_list: From fb6126f49a4e2f89838e769c6e96a3b557315393 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Wed, 24 Jul 2024 14:08:39 -0400 Subject: [PATCH 20/40] updates for job grouping --- lis/utils/usaf/s2s/s2s_app/s2s_run.sh | 46 +++++++++++-------- .../s2s_modules/bcsd_fcst/forecast_task_01.py | 4 +- .../s2s_modules/bcsd_fcst/forecast_task_03.py | 2 +- .../s2s_modules/bcsd_fcst/forecast_task_04.py | 2 +- .../s2s_modules/bcsd_fcst/forecast_task_05.py | 2 +- .../s2s_modules/bcsd_fcst/forecast_task_06.py | 4 +- .../s2s_modules/bcsd_fcst/forecast_task_08.py | 2 +- .../s2s_modules/bcsd_fcst/forecast_task_09.py | 8 ++-- .../s2smetric/postprocess_nmme_job.py | 8 ++-- .../s2s_modules/s2spost/run_s2spost_1month.py | 4 +- .../s2spost/run_s2spost_9months.py | 2 +- .../usaf/s2s/s2s_modules/shared/utils.py | 11 ++++- 12 files changed, 56 insertions(+), 39 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh index f052ab8d0..feb507bf7 100755 --- a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh +++ b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh @@ -574,7 +574,7 @@ bcsd_fcst(){ python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_02_run.j -t 1 -H 3 -j ${jobname}_02_ -w ${CWD} -C "part_ab" python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_03_run.j -t 1 -H 3 -j ${jobname}_03_ -w ${CWD} -C "part_ac" python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_04_run.j -t 1 -H 3 -j ${jobname}_04_ -w ${CWD} -C "part_ad" - /bin/rm ${cmdfile} "part_aa" "part_ab" "part_ac" "part_ad" + /bin/rm ${cmdfile} "part_aa" "part_ab" "part_ac" "part_ad" bcsd01_ID=$(submit_job "" "${jobname}_01_run.j") thisID=$(submit_job "" "${jobname}_02_run.j") bcsd01_ID=`echo $bcsd01_ID`' '$thisID @@ -641,9 +641,9 @@ bcsd_fcst(){ bcsd04_ID= /bin/rm bcsd04*.j split -l 4 $cmdfile part_ - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_01_run.j -t 1 -H 3 -j ${jobname}_01_ -w ${CWD} -C "part_aa" - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_02_run.j -t 1 -H 3 -j ${jobname}_02_ -w ${CWD} -C "part_ab" - /bin/rm ${cmdfile} "part_aa" "part_ab" + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_01_run.j -t 1 -H 4 -j ${jobname}_01_ -w ${CWD} -C "part_aa" + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_02_run.j -t 1 -H 4 -j ${jobname}_02_ -w ${CWD} -C "part_ab" + /bin/rm ${cmdfile} "part_aa" "part_ab" bcsd04_ID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jobname}_01_run.j") thisID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jobname}_02_run.j") bcsd04_ID=`echo $bcsd04_ID`' '$thisID @@ -680,9 +680,14 @@ bcsd_fcst(){ if [ $GROUP_JOBS == "Y" ]; then bcsd05_ID= /bin/rm bcsd05*.j - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 4 -j ${jobname}_ -w ${CWD} -C ${cmdfile} - /bin/rm ${cmdfile} - bcsd05_ID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jobname}_run.j") + split -l 3 $cmdfile part_ + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_01_run.j -t 1 -H 4 -j ${jobname}_01_ -w ${CWD} -C "part_aa" + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_02_run.j -t 1 -H 4 -j ${jobname}_02_ -w ${CWD} -C "part_ab" + /bin/rm ${cmdfile} "part_aa" "part_ab" + bcsd05_ID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jobname}_01_run.j") + thisID=$(submit_job "$bcsd01_ID:$bcsd03_ID" "${jobname}_02_run.j") + bcsd05_ID=`echo $bcsd05_ID`' '$thisID + bcsd05_ID=`echo $bcsd05_ID | sed "s| |:|g"` fi # Task 6: CFSv2 Temporal Disaggregation (forecast_task_06.py: after 4 and 5) @@ -694,7 +699,6 @@ bcsd_fcst(){ unset job_list job_list=`ls $jobname*.j` - bcsd06_ID= for jfile in $job_list do if [ $GROUP_JOBS == "Y" ]; then @@ -707,6 +711,7 @@ bcsd_fcst(){ done bcsd06_ID=`echo $bcsd06_ID | sed "s| |:|g"` if [ $GROUP_JOBS == "Y" ]; then + bcsd06_ID= /bin/rm bcsd06*.j python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 4 -j ${jobname}_ -w ${CWD} -C ${cmdfile} /bin/rm ${cmdfile} @@ -980,9 +985,14 @@ s2spost(){ s2spost_ID=`echo $s2spost_ID | sed "s| |:|g"` if [ $GROUP_JOBS == "Y" ]; then /bin/rm s2spost_*.j - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 4 -j ${jobname}_ -w ${CWD} -C ${cmdfile} - /bin/rm ${cmdfile} - s2spost_ID=$(submit_job "$lisfcst_ID" "${jobname}_run.j") + split -l 27 $cmdfile part_ + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_01_run.j -t 1 -H 4 -j ${jobname}_01_ -w ${CWD} -C "part_aa" + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_02_run.j -t 1 -H 4 -j ${jobname}_02_ -w ${CWD} -C "part_ab" + /bin/rm ${cmdfile} "part_aa" "part_ab" + s2spost_ID=$(submit_job "$lisfcst_ID" "${jobname}_01_run.j") + thisID=$(submit_job "$lisfcst_ID" "${jobname}_02_run.j") + s2spost_ID=`echo $s2spost_ID`' '$thisID + s2spost_ID=`echo $s2spost_ID | sed "s| |:|g"` fi cd ${BWD} } @@ -1032,14 +1042,14 @@ s2smetrics(){ s2smetric_ID=`echo $s2smetric_ID | sed "s| |:|g"` if [ $GROUP_JOBS == "Y" ]; then /bin/rm s2smetric_*.j - python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C ${cmdfile} + python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 3 -j ${jobname}_ -w ${CWD} -C $cmdfile /bin/rm ${cmdfile} s2smetric_ID=$(submit_job "$s2spost_ID" "${jobname}_run.j") fi # write tiff file python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_tiff_run.j -t 1 -H 2 -j ${jobname}_tiff_ -w ${CWD} - COMMAND="python $LISHDIR/s2s_modules/s2smetric/postprocess_nmme_job.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFILE" + COMMAND="srun --exclusive --ntasks 1 python $LISHDIR/s2s_modules/s2smetric/postprocess_nmme_job.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFILE" sed -i "s|COMMAND|${COMMAND}|g" ${jobname}_tiff_run.j s2smetric_tiff_ID=$(submit_job "$s2smetric_ID" "${jobname}_tiff_run.j") @@ -1068,11 +1078,11 @@ s2splots(){ /bin/ln -s ${E2ESDIR}/s2splots/ /bin/ln -s ${E2ESDIR}/s2smetric/ - echo "python ${LISHDIR}/s2s_modules/s2splots/plot_s2smetrics.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFIL" >> "$cmdfile" - echo "python ${LISHDIR}/s2s_modules/s2splots/plot_hybas.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE" >> "$cmdfile" - echo "python ${LISHDIR}/s2s_modules/s2splots/plot_mena.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFILE" >> "$cmdfile" - echo "python ${LISHDIR}/s2s_modules/s2splots/plot_anom_verify.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE -l 1" >> "$cmdfile" - echo "ython ${LISHDIR}/s2s_modules/s2splots/plot_anom_verify.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE -l 2" >> "$cmdfile" + echo "srun --exclusive --ntasks 1 python ${LISHDIR}/s2s_modules/s2splots/plot_s2smetrics.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFIL" >> "$cmdfile" + echo "srun --exclusive --ntasks 1 python ${LISHDIR}/s2s_modules/s2splots/plot_hybas.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE" >> "$cmdfile" + echo "srun --exclusive --ntasks 1 python ${LISHDIR}/s2s_modules/s2splots/plot_mena.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFILE" >> "$cmdfile" + echo "srun --exclusive --ntasks 1 python ${LISHDIR}/s2s_modules/s2splots/plot_anom_verify.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE -l 1" >> "$cmdfile" + echo "srun --exclusive --ntasks 1 python ${LISHDIR}/s2s_modules/s2splots/plot_anom_verify.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE -l 2" >> "$cmdfile" python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 6 -j ${jobname}_ -w ${CWD} -C ${cmdfile} /bin/rm ${cmdfile} diff --git a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_01.py b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_01.py index 4b31b8e29..f058fd2c8 100644 --- a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_01.py +++ b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_01.py @@ -133,7 +133,7 @@ def _driver(): if eyear is not None: cmd_list = [] for cyear in range(syear,eyear+1): - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {srcdir}/process_forecast_data.py" cmd += f" {cyear:04d}" cmd += f" {ens_num:02d}" @@ -149,7 +149,7 @@ def _driver(): hours, cwd, command_list=cmd_list) else: - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {srcdir}/process_forecast_data.py" cmd += f" {syear:04d}" cmd += f" {ens_num:02d}" diff --git a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_03.py b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_03.py index b6b9995ef..032069c48 100644 --- a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_03.py +++ b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_03.py @@ -93,7 +93,7 @@ def _driver(): for nmme_model in config['EXP']['NMME_models']: ensemble_size = ensemble_sizes[nmme_model] - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {srcdir}/nmme_reorg_f.py" cmd += f" {month_num}" cmd += f" {current_year}" diff --git a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_04.py b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_04.py index f22c5e5d8..e8c11c357 100644 --- a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_04.py +++ b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_04.py @@ -128,7 +128,7 @@ def _driver(): fcst_var = fcst_var_list[var_num] unit = unit_list[var_num] #print(f"{var_num} {fcst_var}") - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {srcdir}/bias_correction_modulefast.py" cmd += f" {obs_var}" cmd += f" {fcst_var}" diff --git a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_05.py b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_05.py index 9af39fd04..4605f179d 100644 --- a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_05.py +++ b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_05.py @@ -133,7 +133,7 @@ def _driver(): ens_num = ensemble_sizes[nmme_model] for year in range(int(fcst_syr), (int(fcst_eyr) + 1)): - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {srcdir}/bias_correction_nmme_modulefast.py" cmd += f" {obs_var}" cmd += f" {fcst_var}" diff --git a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_06.py b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_06.py index 32a225787..d0ef5f84d 100644 --- a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_06.py +++ b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_06.py @@ -132,7 +132,7 @@ def _driver(): for var_num, var_value in enumerate(obs_var_list): if var_num == 1: var_type = "PRCP" - cmd2 = "python" + cmd2 = "srun --exclusive --ntasks 1 python" cmd2 += f" {srcdir2}/forecast_task_07.py" cmd2 += f" -s {year}" cmd2 += f" -m {month_abbr}" @@ -146,7 +146,7 @@ def _driver(): fcst_var = fcst_var_list[var_num] unit = unit_list[var_num] - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {srcdir}/temporal_disaggregation_6hourly_module.py" cmd += f" {obs_var}" cmd += f" {fcst_var}" diff --git a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_08.py b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_08.py index 97e343a89..0a67c8072 100644 --- a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_08.py +++ b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_08.py @@ -130,7 +130,7 @@ def _driver(): print("[INFO] Processing temporal disaggregation of CFSv2 variables") for year in range(int(fcst_syr), (int(fcst_eyr) + 1)): - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {srcdir}/temporal_disaggregation_nmme_6hourly_module.py" cmd += f" {obs_var}" cmd += f" {fcst_var}" diff --git a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_09.py b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_09.py index 276779d90..a0d00a6f3 100644 --- a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_09.py +++ b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_09.py @@ -104,7 +104,7 @@ def driver(): print("[INFO] Combining subdaily BC CFSv2 non-precip variables") for year in range(int(fcst_syr), (int(fcst_eyr) + 1)): - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {srcdir}/combine_sub_daily_downscaled_forcings.py" cmd += f" {year}" cmd += f" {month_num}" @@ -121,7 +121,7 @@ def driver(): # Now write task 10 scripts for nmme_model in config['EXP']['NMME_models']: - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {srcdir2}/forecast_task_10.py" cmd += f" -c {config_file}" cmd += f" -s {year}" @@ -135,7 +135,7 @@ def driver(): # Now write task 11 scripts - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {srcdir2}/forecast_task_11.py" cmd += f" -s {year}" cmd += f" -m {month_abbr}" @@ -148,7 +148,7 @@ def driver(): # Now write task 12 scripts - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {srcdir2}/forecast_task_12.py" cmd += f" -s {year}" cmd += f" -m {month_abbr}" diff --git a/lis/utils/usaf/s2s/s2s_modules/s2smetric/postprocess_nmme_job.py b/lis/utils/usaf/s2s/s2s_modules/s2smetric/postprocess_nmme_job.py index f6f7287fe..dbbbed343 100644 --- a/lis/utils/usaf/s2s/s2s_modules/s2smetric/postprocess_nmme_job.py +++ b/lis/utils/usaf/s2s/s2s_modules/s2smetric/postprocess_nmme_job.py @@ -69,7 +69,7 @@ def _run_convert_s2s_anom_cf(config, currentdate, baseoutdir): metricfile += f"/{nmme_model}_{metric_var}" metricfile += f"_{anom_type.upper()}_init_monthly_" metricfile += f"{month:02d}_{year:04d}.nc" - cmd = f"python {rundir}/convert_s2s_anom_cf.py" + cmd = f"srun --exclusive --ntasks 1 python {rundir}/convert_s2s_anom_cf.py" cmd += f" {metricfile} {cfoutdir}" print(cmd) if subprocess.call(cmd, shell=True) != 0: @@ -103,7 +103,7 @@ def _run_merge_s2s_anom_cf(config, currentdate, configfile, baseoutdir): rundir = config['SETUP']['LISFDIR'] + '/lis/utils/usaf/s2s/s2s_modules/s2smetric/' nmme_models = config["EXP"]["NMME_models"] for nmme_model in nmme_models: - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {rundir}/merge_s2s_anom_cf.py" cmd += f" {input_dir} {output_dir}" cmd += f" {startdate.year:04d}{startdate.month:02d}{startdate.day:02d}" @@ -126,7 +126,7 @@ def _run_make_s2s_median_metric_geotiff(config, configfile, baseoutdir): for metric1 in metrics: metric = metric1.replace('-', '_') - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {rundir}" cmd += "/make_s2s_median_metric_geotiff.py" cmd += f" {input_dir} {metric} {configfile}" @@ -173,7 +173,7 @@ def _driver(): '/lis/utils/usaf/s2s/s2s_modules/s2smetric/metrics_library/' for anom_type in ["anom", "sanom"]: py_script = "convert_dyn_fcst_to_" + anom_type + ".py" - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {pylibdir}{py_script}" cmd += f" {currentdate.month:02d}" cmd += f" {currentdate.year:04d}" diff --git a/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_1month.py b/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_1month.py index f503aee49..3b2c1df44 100644 --- a/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_1month.py +++ b/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_1month.py @@ -150,7 +150,7 @@ def _loop_daily(config, configfile, topdatadir, fcstdate, startdate, model_forci curdate = firstdate while curdate <= enddate: - cmd = f"python {scriptdir}/daily_s2spost_nc.py {configfile}" + cmd = f"srun --exclusive --ntasks 1 python {scriptdir}/daily_s2spost_nc.py {configfile}" for model in ["SURFACEMODEL", "ROUTING"]: cmd += f" lis_fcst/{model_forcing}/{model}/" cmd += f"{curdate.year:04d}{curdate.month:02d}" @@ -193,7 +193,7 @@ def _proc_month(config, configfile, topdatadir, fcstdate, startdate, model_forci enddate = datetime.datetime(year=startdate.year, month=(startdate.month + 1), day=1) - cmd = f"python {scriptdir}/monthly_s2spost_nc.py {configfile} " + cmd = f"srun --exclusive --ntasks 1 python {scriptdir}/monthly_s2spost_nc.py {configfile} " workdir = f"{topdatadir}/cf_{model_forcing}_" workdir += f"{startdate.year:04d}{startdate.month:02d}" diff --git a/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_9months.py b/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_9months.py index 81f3da990..4625e6e1b 100644 --- a/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_9months.py +++ b/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_9months.py @@ -94,7 +94,7 @@ def _submit_batch_jobs(args): txt = "[INFO] Submitting batch job for" txt += f" cf_{model_forcing}_{curdate.year:04d}{curdate.month:02d}" print(txt) - cmd = "python" + cmd = "srun --exclusive --ntasks 1 python" cmd += f" {scriptdir}/run_s2spost_1month.py" cmd += f" {configfile} {topdatadir}" cmd += f" {fcstdate.year:04d}{fcstdate.month:02d}" diff --git a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py index 378b57c7b..777142ece 100644 --- a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py @@ -29,6 +29,7 @@ import re import datetime import numpy as np +import math from netCDF4 import Dataset as nc4 #pylint: disable=no-name-in-module import yaml #pylint: disable=consider-using-f-string, too-many-statements, too-many-locals, too-many-arguments @@ -62,8 +63,14 @@ def job_script(s2s_configfile, jobfile, job_name, ntasks, hours, cwd, in_command _f.write('#######################################################################' + '\n') _f.write('\n') _f.write('#SBATCH --account=' + sponsor_code + '\n') - _f.write('#SBATCH --ntasks=' + str(ntasks) + '\n') + _f.write('#SBATCH --nodes=1' + '\n') + _f.write('#SBATCH --ntasks-per-node=' + str(ntasks) + '\n') _f.write('#SBATCH --time=' + hours + ':00:00' + '\n') + if group_jobs: + mpc = min(math.ceil(240 / ntasks), 80) + _f.write('#SBATCH --mem-per-cpu=' + str(mpc) + 'GB' + '\n') + else: + _f.write('#SBATCH --mem-per-cpu=40GB' + '\n') if 'discover' in platform.node() or 'borg' in platform.node(): _f.write('#SBATCH --constraint=' + cfg['SETUP']['CONSTRAINT'] + '\n') if cfg['SETUP']['CONSTRAINT'] == 'mil': @@ -101,7 +108,7 @@ def job_script(s2s_configfile, jobfile, job_name, ntasks, hours, cwd, in_command else: if group_jobs: for cmd in group_jobs: - _f.write(f"srun --exclusive --ntasks 1 {cmd} &\n") + _f.write(f"{cmd} &\n") _f.write("wait\n") if command_list: for cmd in command_list: From 687619cafed8a447d0e4eb50a54936af3ccc4c64 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Wed, 24 Jul 2024 20:26:28 -0400 Subject: [PATCH 21/40] fixed a bug that I had entered this afternoon in the previous chjeck in --- .../usaf/s2s/s2s_modules/s2smetric/postprocess_nmme_job.py | 6 +++--- .../usaf/s2s/s2s_modules/s2spost/run_s2spost_1month.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_modules/s2smetric/postprocess_nmme_job.py b/lis/utils/usaf/s2s/s2s_modules/s2smetric/postprocess_nmme_job.py index dbbbed343..d5556b604 100644 --- a/lis/utils/usaf/s2s/s2s_modules/s2smetric/postprocess_nmme_job.py +++ b/lis/utils/usaf/s2s/s2s_modules/s2smetric/postprocess_nmme_job.py @@ -69,7 +69,7 @@ def _run_convert_s2s_anom_cf(config, currentdate, baseoutdir): metricfile += f"/{nmme_model}_{metric_var}" metricfile += f"_{anom_type.upper()}_init_monthly_" metricfile += f"{month:02d}_{year:04d}.nc" - cmd = f"srun --exclusive --ntasks 1 python {rundir}/convert_s2s_anom_cf.py" + cmd = f"python {rundir}/convert_s2s_anom_cf.py" cmd += f" {metricfile} {cfoutdir}" print(cmd) if subprocess.call(cmd, shell=True) != 0: @@ -103,7 +103,7 @@ def _run_merge_s2s_anom_cf(config, currentdate, configfile, baseoutdir): rundir = config['SETUP']['LISFDIR'] + '/lis/utils/usaf/s2s/s2s_modules/s2smetric/' nmme_models = config["EXP"]["NMME_models"] for nmme_model in nmme_models: - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {rundir}/merge_s2s_anom_cf.py" cmd += f" {input_dir} {output_dir}" cmd += f" {startdate.year:04d}{startdate.month:02d}{startdate.day:02d}" @@ -126,7 +126,7 @@ def _run_make_s2s_median_metric_geotiff(config, configfile, baseoutdir): for metric1 in metrics: metric = metric1.replace('-', '_') - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {rundir}" cmd += "/make_s2s_median_metric_geotiff.py" cmd += f" {input_dir} {metric} {configfile}" diff --git a/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_1month.py b/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_1month.py index 3b2c1df44..f503aee49 100644 --- a/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_1month.py +++ b/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_1month.py @@ -150,7 +150,7 @@ def _loop_daily(config, configfile, topdatadir, fcstdate, startdate, model_forci curdate = firstdate while curdate <= enddate: - cmd = f"srun --exclusive --ntasks 1 python {scriptdir}/daily_s2spost_nc.py {configfile}" + cmd = f"python {scriptdir}/daily_s2spost_nc.py {configfile}" for model in ["SURFACEMODEL", "ROUTING"]: cmd += f" lis_fcst/{model_forcing}/{model}/" cmd += f"{curdate.year:04d}{curdate.month:02d}" @@ -193,7 +193,7 @@ def _proc_month(config, configfile, topdatadir, fcstdate, startdate, model_forci enddate = datetime.datetime(year=startdate.year, month=(startdate.month + 1), day=1) - cmd = f"srun --exclusive --ntasks 1 python {scriptdir}/monthly_s2spost_nc.py {configfile} " + cmd = f"python {scriptdir}/monthly_s2spost_nc.py {configfile} " workdir = f"{topdatadir}/cf_{model_forcing}_" workdir += f"{startdate.year:04d}{startdate.month:02d}" From 51f3e70b1277d2761895f3209203c15ec4007b7b Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Wed, 24 Jul 2024 21:34:40 -0400 Subject: [PATCH 22/40] riverted some of the changes I made this afternoon --- .../usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_01.py | 4 ++-- .../usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_03.py | 2 +- .../usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_04.py | 2 +- .../usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_05.py | 2 +- .../usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_06.py | 4 ++-- .../usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_08.py | 2 +- .../usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_09.py | 8 ++++---- .../s2s/s2s_modules/s2smetric/postprocess_nmme_job.py | 2 +- .../usaf/s2s/s2s_modules/s2spost/run_s2spost_9months.py | 2 +- lis/utils/usaf/s2s/s2s_modules/shared/utils.py | 2 +- 10 files changed, 15 insertions(+), 15 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_01.py b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_01.py index f058fd2c8..4b31b8e29 100644 --- a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_01.py +++ b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_01.py @@ -133,7 +133,7 @@ def _driver(): if eyear is not None: cmd_list = [] for cyear in range(syear,eyear+1): - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {srcdir}/process_forecast_data.py" cmd += f" {cyear:04d}" cmd += f" {ens_num:02d}" @@ -149,7 +149,7 @@ def _driver(): hours, cwd, command_list=cmd_list) else: - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {srcdir}/process_forecast_data.py" cmd += f" {syear:04d}" cmd += f" {ens_num:02d}" diff --git a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_03.py b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_03.py index 032069c48..b6b9995ef 100644 --- a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_03.py +++ b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_03.py @@ -93,7 +93,7 @@ def _driver(): for nmme_model in config['EXP']['NMME_models']: ensemble_size = ensemble_sizes[nmme_model] - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {srcdir}/nmme_reorg_f.py" cmd += f" {month_num}" cmd += f" {current_year}" diff --git a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_04.py b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_04.py index e8c11c357..f22c5e5d8 100644 --- a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_04.py +++ b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_04.py @@ -128,7 +128,7 @@ def _driver(): fcst_var = fcst_var_list[var_num] unit = unit_list[var_num] #print(f"{var_num} {fcst_var}") - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {srcdir}/bias_correction_modulefast.py" cmd += f" {obs_var}" cmd += f" {fcst_var}" diff --git a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_05.py b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_05.py index 4605f179d..9af39fd04 100644 --- a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_05.py +++ b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_05.py @@ -133,7 +133,7 @@ def _driver(): ens_num = ensemble_sizes[nmme_model] for year in range(int(fcst_syr), (int(fcst_eyr) + 1)): - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {srcdir}/bias_correction_nmme_modulefast.py" cmd += f" {obs_var}" cmd += f" {fcst_var}" diff --git a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_06.py b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_06.py index d0ef5f84d..32a225787 100644 --- a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_06.py +++ b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_06.py @@ -132,7 +132,7 @@ def _driver(): for var_num, var_value in enumerate(obs_var_list): if var_num == 1: var_type = "PRCP" - cmd2 = "srun --exclusive --ntasks 1 python" + cmd2 = "python" cmd2 += f" {srcdir2}/forecast_task_07.py" cmd2 += f" -s {year}" cmd2 += f" -m {month_abbr}" @@ -146,7 +146,7 @@ def _driver(): fcst_var = fcst_var_list[var_num] unit = unit_list[var_num] - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {srcdir}/temporal_disaggregation_6hourly_module.py" cmd += f" {obs_var}" cmd += f" {fcst_var}" diff --git a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_08.py b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_08.py index 0a67c8072..97e343a89 100644 --- a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_08.py +++ b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_08.py @@ -130,7 +130,7 @@ def _driver(): print("[INFO] Processing temporal disaggregation of CFSv2 variables") for year in range(int(fcst_syr), (int(fcst_eyr) + 1)): - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {srcdir}/temporal_disaggregation_nmme_6hourly_module.py" cmd += f" {obs_var}" cmd += f" {fcst_var}" diff --git a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_09.py b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_09.py index a0d00a6f3..276779d90 100644 --- a/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_09.py +++ b/lis/utils/usaf/s2s/s2s_modules/bcsd_fcst/forecast_task_09.py @@ -104,7 +104,7 @@ def driver(): print("[INFO] Combining subdaily BC CFSv2 non-precip variables") for year in range(int(fcst_syr), (int(fcst_eyr) + 1)): - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {srcdir}/combine_sub_daily_downscaled_forcings.py" cmd += f" {year}" cmd += f" {month_num}" @@ -121,7 +121,7 @@ def driver(): # Now write task 10 scripts for nmme_model in config['EXP']['NMME_models']: - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {srcdir2}/forecast_task_10.py" cmd += f" -c {config_file}" cmd += f" -s {year}" @@ -135,7 +135,7 @@ def driver(): # Now write task 11 scripts - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {srcdir2}/forecast_task_11.py" cmd += f" -s {year}" cmd += f" -m {month_abbr}" @@ -148,7 +148,7 @@ def driver(): # Now write task 12 scripts - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {srcdir2}/forecast_task_12.py" cmd += f" -s {year}" cmd += f" -m {month_abbr}" diff --git a/lis/utils/usaf/s2s/s2s_modules/s2smetric/postprocess_nmme_job.py b/lis/utils/usaf/s2s/s2s_modules/s2smetric/postprocess_nmme_job.py index d5556b604..f6f7287fe 100644 --- a/lis/utils/usaf/s2s/s2s_modules/s2smetric/postprocess_nmme_job.py +++ b/lis/utils/usaf/s2s/s2s_modules/s2smetric/postprocess_nmme_job.py @@ -173,7 +173,7 @@ def _driver(): '/lis/utils/usaf/s2s/s2s_modules/s2smetric/metrics_library/' for anom_type in ["anom", "sanom"]: py_script = "convert_dyn_fcst_to_" + anom_type + ".py" - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {pylibdir}{py_script}" cmd += f" {currentdate.month:02d}" cmd += f" {currentdate.year:04d}" diff --git a/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_9months.py b/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_9months.py index 4625e6e1b..81f3da990 100644 --- a/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_9months.py +++ b/lis/utils/usaf/s2s/s2s_modules/s2spost/run_s2spost_9months.py @@ -94,7 +94,7 @@ def _submit_batch_jobs(args): txt = "[INFO] Submitting batch job for" txt += f" cf_{model_forcing}_{curdate.year:04d}{curdate.month:02d}" print(txt) - cmd = "srun --exclusive --ntasks 1 python" + cmd = "python" cmd += f" {scriptdir}/run_s2spost_1month.py" cmd += f" {configfile} {topdatadir}" cmd += f" {fcstdate.year:04d}{fcstdate.month:02d}" diff --git a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py index 777142ece..902ca1b38 100644 --- a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py @@ -108,7 +108,7 @@ def job_script(s2s_configfile, jobfile, job_name, ntasks, hours, cwd, in_command else: if group_jobs: for cmd in group_jobs: - _f.write(f"{cmd} &\n") + _f.write(f"srun --exclusive --ntasks 1 {cmd} &\n") _f.write("wait\n") if command_list: for cmd in command_list: From a12ecc06c2c3acbf22030ccd1e1dd055181cd3b3 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Thu, 25 Jul 2024 09:36:54 -0400 Subject: [PATCH 23/40] a bug fix to the previous check in --- lis/utils/usaf/s2s/s2s_app/s2s_run.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh index feb507bf7..8710cf830 100755 --- a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh +++ b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh @@ -1078,11 +1078,11 @@ s2splots(){ /bin/ln -s ${E2ESDIR}/s2splots/ /bin/ln -s ${E2ESDIR}/s2smetric/ - echo "srun --exclusive --ntasks 1 python ${LISHDIR}/s2s_modules/s2splots/plot_s2smetrics.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFIL" >> "$cmdfile" - echo "srun --exclusive --ntasks 1 python ${LISHDIR}/s2s_modules/s2splots/plot_hybas.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE" >> "$cmdfile" - echo "srun --exclusive --ntasks 1 python ${LISHDIR}/s2s_modules/s2splots/plot_mena.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFILE" >> "$cmdfile" - echo "srun --exclusive --ntasks 1 python ${LISHDIR}/s2s_modules/s2splots/plot_anom_verify.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE -l 1" >> "$cmdfile" - echo "srun --exclusive --ntasks 1 python ${LISHDIR}/s2s_modules/s2splots/plot_anom_verify.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE -l 2" >> "$cmdfile" + echo "python ${LISHDIR}/s2s_modules/s2splots/plot_s2smetrics.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFIL" >> "$cmdfile" + echo "python ${LISHDIR}/s2s_modules/s2splots/plot_hybas.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE" >> "$cmdfile" + echo "python ${LISHDIR}/s2s_modules/s2splots/plot_mena.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFILE" >> "$cmdfile" + echo "python ${LISHDIR}/s2s_modules/s2splots/plot_anom_verify.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE -l 1" >> "$cmdfile" + echo "python ${LISHDIR}/s2s_modules/s2splots/plot_anom_verify.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE -l 2" >> "$cmdfile" python $LISHDIR/s2s_app/s2s_api.py -c $BWD/$CFILE -f ${jobname}_run.j -t 1 -H 6 -j ${jobname}_ -w ${CWD} -C ${cmdfile} /bin/rm ${cmdfile} From 100fce941136b8c9b136930ca980acc996ac19b8 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Thu, 25 Jul 2024 17:09:09 -0400 Subject: [PATCH 24/40] a typo was fixed in s2splots --- lis/utils/usaf/s2s/s2s_app/s2s_run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh index 8710cf830..d1c3e822d 100755 --- a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh +++ b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh @@ -1078,7 +1078,7 @@ s2splots(){ /bin/ln -s ${E2ESDIR}/s2splots/ /bin/ln -s ${E2ESDIR}/s2smetric/ - echo "python ${LISHDIR}/s2s_modules/s2splots/plot_s2smetrics.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFIL" >> "$cmdfile" + echo "python ${LISHDIR}/s2s_modules/s2splots/plot_s2smetrics.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFILE" >> "$cmdfile" echo "python ${LISHDIR}/s2s_modules/s2splots/plot_hybas.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE" >> "$cmdfile" echo "python ${LISHDIR}/s2s_modules/s2splots/plot_mena.py -y ${YYYY} -m ${MM} -w ${CWD} -c $BWD/$CFILE" >> "$cmdfile" echo "python ${LISHDIR}/s2s_modules/s2splots/plot_anom_verify.py -y ${YYYY} -m ${mon} -w ${CWD} -c $BWD/$CFILE -l 1" >> "$cmdfile" From 2135b10d2e3a0d7a39a70c8ac31cb7782c52f4a0 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Fri, 26 Jul 2024 11:24:27 -0400 Subject: [PATCH 25/40] updated print_walltime to update to deal with multiple start, end, elapse times of all srun commands --- lis/utils/usaf/s2s/s2s_app/s2s_run.sh | 72 +++++++++++++++++++++++++-- 1 file changed, 67 insertions(+), 5 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh index d1c3e822d..ca681f2f5 100755 --- a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh +++ b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh @@ -91,6 +91,57 @@ print_walltimes(){ echo " " echo " JOB FILE WALLTIME (HH:MM:SS)" echo " " + + datetime_to_seconds() { + date -d "$1" +%s + } + + seconds_to_datetime() { + date -d "@$1" '+%Y-%m-%dT%H:%M:%S' + } + compute_elapse (){ + local JOB_ID=$1 + + local job_times + job_times=$(sacct -j $JOB_ID --format=Start,End -P | tail -n +2) + + local start_times + local end_times + start_times=($(echo "$job_times" | cut -d'|' -f1)) + end_times=($(echo "$job_times" | cut -d'|' -f2)) + + local min_start + local max_end + min_start=$(datetime_to_seconds "${start_times[0]}") + max_end=$(datetime_to_seconds "${end_times[0]}") + + + for start in "${start_times[@]}"; do + local start_sec + start_sec=$(datetime_to_seconds "$start") + if [[ $start_sec -lt $min_start ]]; then + min_start=$start_sec + fi + done + + for end in "${end_times[@]}"; do + local end_sec + end_sec=$(datetime_to_seconds "$end") + if [[ $end_sec -gt $max_end ]]; then + max_end=$end_sec + fi + done + + local elapsed_seconds + elapsed_seconds=$((max_end - min_start)) + + local min_start_datetime + local max_end_datetime + min_start_datetime=$(seconds_to_datetime "$min_start") + max_end_datetime=$(seconds_to_datetime "$max_end") + + echo "$min_start_datetime|$max_end_datetime|$elapsed_seconds" + } jobids=(`grep '.j' ${SCRDIR}/SLURM_JOB_SCHEDULE | tr -s ' ' | cut -d' ' -f1`) jobfiles=(`grep '.j' ${SCRDIR}/SLURM_JOB_SCHEDULE | tr -s ' ' | cut -d' ' -f2`) @@ -109,8 +160,12 @@ print_walltimes(){ start_job=`echo $times | cut -d' ' -f1` end_job=`echo $times | cut -d' ' -f2` if [ $end_job != 'Unknown' ] && [ $start_job != 'Unknown' ] && [ $start_job != 'None' ] && [ $end_job != 'None' ]; then - elapse=`echo $times | cut -d' ' -f3` - ehms=`echo $elapse| cut -d':' -f1`'h '`echo $elapse| cut -d':' -f2`'m '`echo $elapse| cut -d':' -f3`'s' + result=$(compute_elapse "${jobids[$cjobs]}") + IFS='|' read -r start_job end_job elapsed_seconds <<< "$result" + hours=$((elapsed_seconds / 3600)) + minutes=$(( (elapsed_seconds % 3600) / 60)) + seconds=$((elapsed_seconds % 60)) + ehms=`echo $hours`'h '`echo $minutes`'m '`echo $seconds`'s' printf "${fmt}" $((cjobs+1))/$tLen ${jobfiles[$cjobs]} $ehms if [ ${cjobs} -eq 0 ]; then strart_time=$start_job @@ -121,10 +176,17 @@ print_walltimes(){ fi fi done - tdays=`date -u -d @$(($(date -d "$end_job" '+%s') - $(date -d "$strart_time" '+%s'))) | cut -d' ' -f4` - hms=`date -u -d @$(($(date -d "$end_job" '+%s') - $(date -d "$strart_time" '+%s'))) | cut -d' ' -f5` + min_start=$(datetime_to_seconds "${strart_time}") + max_end=$(datetime_to_seconds "${end_job}") + elapsed_seconds=$((max_end - min_start)) + + days=$((elapsed_seconds / 86400)) + hours=$(( (elapsed_seconds % 86400) / 3600 )) + minutes=$(( (elapsed_seconds % 3600) / 60 )) + seconds=$((elapsed_seconds % 60)) + ehms=`echo $days`'d '`echo $hours`'h '`echo $minutes`'m '`echo $seconds`'s' echo ' ' - echo 'ELAPSED TIME : ' $(($tdays-1))'d' `echo $hms| cut -d':' -f1`'h '`echo $hms| cut -d':' -f2`'m '`echo $hms| cut -d':' -f3`'s' + echo 'ELAPSED TIME : ' $ehms } if [ "${1}" == "--source-only" ]; then From bd9d0063eebcb560a24b94fe932ef87f0862a3eb Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Fri, 26 Jul 2024 12:43:14 -0400 Subject: [PATCH 26/40] changed to check if CONSTRAINT contains mil --- lis/utils/usaf/s2s/s2s_modules/shared/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py index 902ca1b38..a894b0231 100644 --- a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py @@ -73,7 +73,7 @@ def job_script(s2s_configfile, jobfile, job_name, ntasks, hours, cwd, in_command _f.write('#SBATCH --mem-per-cpu=40GB' + '\n') if 'discover' in platform.node() or 'borg' in platform.node(): _f.write('#SBATCH --constraint=' + cfg['SETUP']['CONSTRAINT'] + '\n') - if cfg['SETUP']['CONSTRAINT'] == 'mil': + if 'mil' in cfg['SETUP']['CONSTRAINT']: _f.write('#SBATCH --partition=packable' + '\n') else: # _f.write('#SBATCH --cluster-constraint=green' + '\n') From 21ea62749fdee733446449f1a51c880534348039 Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Tue, 30 Jul 2024 16:26:34 -0400 Subject: [PATCH 27/40] Improved fault tolerance, removed unneeded variables and arguments. --- ldt/USAFSI/USAFSI_espcdMod.F90 | 274 ++++++++++++++++++++++++++++++--- ldt/USAFSI/USAFSI_run.F90 | 7 +- 2 files changed, 251 insertions(+), 30 deletions(-) diff --git a/ldt/USAFSI/USAFSI_espcdMod.F90 b/ldt/USAFSI/USAFSI_espcdMod.F90 index d6f31e4b0..36b3bedb6 100644 --- a/ldt/USAFSI/USAFSI_espcdMod.F90 +++ b/ldt/USAFSI/USAFSI_espcdMod.F90 @@ -34,9 +34,10 @@ module USAFSI_espcdMod ! Find ESPCD CICE file on file system subroutine find_espcd_cice_file(rootdir, region, & - yyyy, mm, dd, hh, fh, filename) + yyyy, mm, dd, hh, filename) ! Imports + use netcdf use LDT_logMod, only: LDT_logunit, LDT_endrun use LDT_timeMgrMod, only: LDT_get_julhr, LDT_julhr_date use USAFSI_paramsMod, only: program_name, msglns @@ -52,7 +53,6 @@ subroutine find_espcd_cice_file(rootdir, region, & integer, intent(in) :: mm integer, intent(in) :: dd integer, intent(in) :: hh - integer, intent(in) :: fh character*255, intent(out) :: filename ! Locals @@ -62,6 +62,17 @@ subroutine find_espcd_cice_file(rootdir, region, & integer :: fh_local character*255 :: message (msglns) character*12 :: routine_name + integer :: nlat + integer, parameter :: nlat_arc = 2501 + integer, parameter :: nlat_ant = 1549 + integer, parameter :: nlon = 9000 + integer :: ncid, aice_varid + real, allocatable :: aice(:,:,:) + integer, allocatable :: dimids(:), lens(:) + integer :: ndims + logical :: good + integer :: ierr + integer :: i message = '' routine_name = 'find_espcd_cice_file' @@ -131,14 +142,122 @@ subroutine find_espcd_cice_file(rootdir, region, & call construct_espcd_cice_filename(rootdir, region, & yyyy_local, mm_local, dd_local, hh_local, fh_local, filename) inquire(file=trim(filename), exist=file_exists) - if (file_exists) then - write(LDT_logunit,*)'[INFO] Will use ', trim(filename) - return - else + if (.not. file_exists) then message(1) = '[WARN] CANNOT FIND FILE' message(2) = '[WARN] PATH = ' // trim(filename) call error_message(program_name, routine_name, message) + cycle + end if + + ! Try opening the file. + ierr = nf90_open(path=trim(filename), & + mode=nf90_nowrite, & + ncid=ncid) + if (ierr .ne. nf90_noerr) then + message(1) = '[WARN] CANNOT OPEN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + cycle + end if + + ! See if aice is in the file. + ierr = nf90_inq_varid(ncid, "aice", aice_varid) + if (ierr .ne. nf90_noerr) then + message(1) = '[WARN] CANNOT FIND aice IN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + ierr = nf90_close(ncid) + cycle + end if + + ! Check the dimensions + ierr = nf90_inquire_variable(ncid, aice_varid, ndims=ndims) + if (ierr .ne. nf90_noerr) then + message(1) = '[WARN] CANNOT GET DIMENSIONS FOR aice IN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + ierr = nf90_close(ncid) + cycle + end if + allocate(dimids(ndims)) + ierr = nf90_inquire_variable(ncid, aice_varid, dimids=dimids) + if (ierr .ne. nf90_noerr) then + message(1) = '[WARN] CANNOT GET DIMENSIONS FOR aice IN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + deallocate(dimids) + ierr = nf90_close(ncid) + cycle end if + allocate(lens(ndims)) + good = .true. + do i = 1, ndims + ierr = nf90_inquire_dimension(ncid, dimids(i), len=lens(i)) + if (ierr .ne. nf90_noerr) then + message(1) = '[WARN] CANNOT GET DIMENSIONS FOR aice IN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + deallocate(dimids) + deallocate(lens) + ierr = nf90_close(ncid) + good = .false. + exit + end if + end do + if (.not. good) cycle + + deallocate(dimids) + + ! Sanity check the dimensions + if (region .eq. 'arc') then + if (lens(3) .ne. 1 .or. & + lens(2) .ne. nlat_arc .or. & + lens(1) .ne. nlon) then + message(1) = '[WARN] BAD DIMENSIONS FOR aice IN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + deallocate(lens) + ierr = nf90_close(ncid) + cycle + end if + ! Good dimensions + nlat = nlat_arc + else if (region .eq. 'ant') then + if (lens(3) .ne. 1 .or. & + lens(2) .ne. nlat_ant .or. & + lens(1) .ne. nlon) then + message(1) = '[WARN] BAD DIMENSIONS FOR aice IN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + deallocate(lens) + ierr = nf90_close(ncid) + cycle + end if + ! Good dimensions + nlat = nlat_ant + end if + + ! Allocate the aice array + deallocate(lens) ! Don't need this anymore + allocate(aice(nlon, nlat, 1)) + + ! Try reading the file + ierr = nf90_get_var(ncid, aice_varid, aice) + if (ierr .ne. nf90_noerr) then + message(1) = '[WARN] CANNOT READ aice IN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + deallocate(aice) + ierr = nf90_close(ncid) + cycle + end if + + ! We have a winner. + deallocate(aice) + ierr = nf90_close(ncid) + write(LDT_logunit,*)'[INFO] Will use ', trim(filename) + return + end do end subroutine find_espcd_cice_file @@ -173,10 +292,11 @@ subroutine construct_espcd_cice_filename(rootdir, region, & end subroutine construct_espcd_cice_filename ! Find ESPC-D SST file on file system - subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, fh, & + subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, & filename) ! Imports + use netcdf use LDT_logMod, only: LDT_logunit, LDT_endrun use LDT_timeMgrMod, only: LDT_get_julhr, LDT_julhr_date use USAFSI_paramsMod, only: program_name, msglns @@ -191,7 +311,6 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, fh, & integer, intent(in) :: mm integer, intent(in) :: dd integer, intent(in) :: hh - integer, intent(in) :: fh character*255, intent(inout) :: filename ! Locals @@ -200,6 +319,15 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, fh, & logical :: file_exists character*255 :: message (msglns) character*12 :: routine_name + integer, parameter :: nlat = 8001 + integer, parameter :: nlon = 9000 + integer :: ncid, water_temp_varid + real, allocatable :: water_temp(:,:,:,:) + integer :: ndims + integer, allocatable :: dimids(:), lens(:) + logical :: good + integer :: ierr + integer :: i message = '' routine_name = 'find_espcd_sst_file' @@ -264,14 +392,116 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, fh, & call construct_espcd_sst_filename(rootdir, & yyyy_local, mm_local, dd_local, hh_local, fh_local, filename) inquire(file=trim(filename), exist=file_exists) - if (file_exists) then - write(LDT_logunit,*)'[INFO] Will use ',trim(filename) - return - else + if (.not. file_exists) then message(1) = '[WARN] CANNOT FIND FILE' message(2) = '[WARN] PATH = ' // trim(filename) call error_message(program_name, routine_name, message) + cycle + end if + + ! Try opening the file + ierr = nf90_open(path=trim(filename), & + mode=nf90_nowrite, & + ncid=ncid) + if (ierr .ne. nf90_noerr) then + message(1) = '[WARN] CANNOT OPEN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + cycle + end if + + ! See if water_temp is in the file. + ierr = nf90_inq_varid(ncid, "water_temp", & + water_temp_varid) + if (ierr .ne. nf90_noerr) then + message(1) = '[WARN] CANNOT FIND water_temp IN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + ierr = nf90_close(ncid) + cycle + end if + + ! Check the dimensions + ierr = nf90_inquire_variable(ncid, water_temp_varid, ndims=ndims) + if (ierr .ne. nf90_noerr) then + message(1) = & + '[WARN] CANNOT GET DIMENSIONS FOR water_temp IN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + ierr = nf90_close(ncid) + cycle + end if + if (ndims .ne. 4) then + message(1) = & + '[WARN] BAD DIMENSIONS FOR water_temp IN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + ierr = nf90_close(ncid) + cycle + end if + allocate(dimids(ndims)) + allocate(lens(ndims)) + ierr = nf90_inquire_variable(ncid, water_temp_varid, dimids=dimids) + if (ierr .ne. nf90_noerr) then + message(1) = & + '[WARN] CANNOT GET DIMENSIONS FOR water_temp IN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + deallocate(dimids) + ierr = nf90_close(ncid) + cycle end if + good = .true. + do i = 1, ndims + ierr = nf90_inquire_dimension(ncid, dimids(i), & + len=lens(i)) + if (ierr .ne. nf90_noerr) then + message(1) = & + '[WARN] CANNOT GET DIMENSIONS FOR water_temp IN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + deallocate(dimids) + deallocate(lens) + ierr = nf90_close(ncid) + good = .false. + exit + end if + end do + if (.not. good) cycle + deallocate(dimids) + + if (lens(1) .ne. nlon .or. & + lens(2) .ne. nlat .or. & + lens(3) .ne. 1 .or. & + lens(4) .ne. 1) then + message(1) = & + '[WARN] CANNOT GET DIMENSIONS FOR water_temp IN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + deallocate(lens) + ierr = nf90_close(ncid) + cycle + end if + deallocate(lens) + + ! Allocate a subset of water temp + allocate(water_temp(nlon, nlat, 1, 1)) + ierr = nf90_get_var(ncid, water_temp_varid, water_temp) + if (ierr .ne. nf90_noerr) then + message(1) = & + '[WARN] CANNOT READ water_temp IN FILE' + message(2) = '[WARN] PATH = ' // trim(filename) + call error_message(program_name, routine_name, message) + deallocate(water_temp) + ierr = nf90_close(ncid) + cycle + end if + + ! We have a winner + deallocate(water_temp) + write(LDT_logunit,*)'[INFO] Will use ',trim(filename) + return + end do end subroutine find_espcd_sst_file @@ -308,7 +538,7 @@ end subroutine construct_espcd_sst_filename #if (defined USE_NETCDF3 || defined USE_NETCDF4) ! Read ESPC-D sea surface temperature and reproject to LDT grid subroutine process_espcd_sst(rootdir, nc, nr, landmask, sst, & - yyyy, mm, dd, hh, fh, ierr) + yyyy, mm, dd, hh, ierr) ! Imports use LDT_coreMod, only: LDT_rc, LDT_domain @@ -328,7 +558,6 @@ subroutine process_espcd_sst(rootdir, nc, nr, landmask, sst, & integer, intent(inout) :: mm integer, intent(inout) :: dd integer, intent(inout) :: hh - integer, intent(inout) :: fh integer, intent(out) :: ierr ! Locals @@ -352,7 +581,7 @@ subroutine process_espcd_sst(rootdir, nc, nr, landmask, sst, & external :: upscaleByAveraging ! Find a valid file on the file system - call find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, fh, filename) + call find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, filename) if (trim(filename) == "NONE") then ierr = 1 return @@ -499,7 +728,7 @@ end subroutine process_espcd_sst ! Read ESPC-D sea ice and reproject to LDT grid subroutine process_espcd_cice(rootdir, nc, nr, landmask, icecon, & - yyyy, mm, dd, hh, fh, ierr) + yyyy, mm, dd, hh, ierr) ! Imports use LDT_coreMod, only: LDT_domain @@ -517,30 +746,26 @@ subroutine process_espcd_cice(rootdir, nc, nr, landmask, icecon, & integer, intent(inout) :: mm integer, intent(inout) :: dd integer, intent(inout) :: hh - integer, intent(inout) :: fh integer, intent(out) :: ierr ! Locals real, allocatable :: icecon_arc(:,:) real, allocatable :: icecon_ant(:,:) - integer :: fh_internal integer :: c, r integer :: gindex real :: rlat ! First handle Arctic region - fh_internal = fh call process_espcd_cice_region('arc', rootdir, nc, nr, landmask, & - yyyy, mm, dd, hh, fh_internal, icecon_arc, ierr) + yyyy, mm, dd, hh, icecon_arc, ierr) if (ierr .ne. 0) then if (allocated(icecon_arc)) deallocate(icecon_arc) return end if ! Next handle Antarctic region - fh_internal = fh call process_espcd_cice_region('ant', rootdir, nc, nr, landmask, & - yyyy, mm, dd, hh, fh_internal, icecon_ant, ierr) + yyyy, mm, dd, hh, icecon_ant, ierr) if (ierr .ne. 0) then if (allocated(icecon_arc)) deallocate(icecon_arc) if (allocated(icecon_ant)) deallocate(icecon_ant) @@ -575,7 +800,7 @@ end subroutine process_espcd_cice ! Process a particular region of ESPC-D CICE data (Arctic or Antarctic subroutine process_espcd_cice_region(region, rootdir, nc, nr, & - landmask, yyyy, mm, dd, hh, fh, icecon, ierr) + landmask, yyyy, mm, dd, hh, icecon, ierr) ! Imports use LDT_coreMod, only: LDT_rc @@ -592,7 +817,6 @@ subroutine process_espcd_cice_region(region, rootdir, nc, nr, & integer, intent(inout) :: mm integer, intent(inout) :: dd integer, intent(inout) :: hh - integer, intent(inout) :: fh real, allocatable, intent(out) :: icecon(:,:) integer, intent(out) :: ierr @@ -630,7 +854,7 @@ subroutine process_espcd_cice_region(region, rootdir, nc, nr, & end if ! Find a valid file on the file system - call find_espcd_cice_file(rootdir, region, yyyy, mm, dd, hh, fh, & + call find_espcd_cice_file(rootdir, region, yyyy, mm, dd, hh, & filename) if (trim(filename) == "NONE") then ierr = 1 diff --git a/ldt/USAFSI/USAFSI_run.F90 b/ldt/USAFSI/USAFSI_run.F90 index 49d4cbcf4..9040a101e 100644 --- a/ldt/USAFSI/USAFSI_run.F90 +++ b/ldt/USAFSI/USAFSI_run.F90 @@ -140,8 +140,6 @@ subroutine USAFSI_run(n) real, allocatable :: elevations(:,:) real, allocatable :: landice(:,:) integer :: j - character*120 :: line - integer :: icount integer :: c, r real :: arctlatr real, allocatable :: climo_tmp(:,:) @@ -153,7 +151,6 @@ subroutine USAFSI_run(n) ! PMW snow depth retrievals, Yeosang Yoon character*255 :: TB_raw_dir ! Brightness temperature raw file directory path !kyh20201118 - integer :: ssmis_option ! option for snow depth retrieval algorithm maxsobs = usafsi_settings%maxsobs @@ -346,7 +343,7 @@ subroutine USAFSI_run(n) '[INFO] CALLING PROCESS_ESPCD_SST TO GET SEA SURFACE TEMPERATURES' call process_espcd_sst(usafsi_settings%espcd_sst_dir, & nc, nr, landmask, usafSI_arrays%sst, & - yyyy, mm, dd, hh, fh, ierr) + yyyy, mm, dd, hh, ierr) else if (usafsi_settings%source_of_ocean_data == "GOFS") then write (LDT_logunit,*) & '[INFO] CALLING PROCESS_GOFS_SST TO GET SEA SURFACE TEMPERATURES' @@ -418,7 +415,7 @@ subroutine USAFSI_run(n) '[INFO] CALLING PROCESS_ESPCD_CICE TO GET GOFS SEA ICE DATA' call process_espcd_cice(usafsi_settings%espcd_cice_dir, & nc, nr, landmask, USAFSI_arrays%navy_icecon, & - yyyy, mm, dd, hh, fh, ierr) + yyyy, mm, dd, hh, ierr) if (ierr == 0) found_navy_cice = .true. else if (usafsi_settings%source_of_ocean_data == "GOFS") then ! Try to get the GOFS sea ice data From afa7f16518cd6f42f2f57b3dcf58efaa5da58a24 Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Wed, 31 Jul 2024 13:43:53 -0400 Subject: [PATCH 28/40] Further fault tolerance for SST data. --- ldt/USAFSI/USAFSI_espcdMod.F90 | 87 +++++++++++----------------------- 1 file changed, 27 insertions(+), 60 deletions(-) diff --git a/ldt/USAFSI/USAFSI_espcdMod.F90 b/ldt/USAFSI/USAFSI_espcdMod.F90 index 36b3bedb6..2754c4796 100644 --- a/ldt/USAFSI/USAFSI_espcdMod.F90 +++ b/ldt/USAFSI/USAFSI_espcdMod.F90 @@ -293,7 +293,7 @@ end subroutine construct_espcd_cice_filename ! Find ESPC-D SST file on file system subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, & - filename) + filename, water_temp, nlat, nlon) ! Imports use netcdf @@ -312,6 +312,9 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, & integer, intent(in) :: dd integer, intent(in) :: hh character*255, intent(inout) :: filename + real, allocatable, intent(inout) :: water_temp(:,:,:,:) + integer, intent(out) :: nlat + integer, intent(out) :: nlon ! Locals integer :: julhr, julhr_orig @@ -319,16 +322,17 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, & logical :: file_exists character*255 :: message (msglns) character*12 :: routine_name - integer, parameter :: nlat = 8001 - integer, parameter :: nlon = 9000 integer :: ncid, water_temp_varid - real, allocatable :: water_temp(:,:,:,:) integer :: ndims integer, allocatable :: dimids(:), lens(:) logical :: good + logical :: first_time integer :: ierr integer :: i + nlat = 8001 + nlon = 9000 + message = '' routine_name = 'find_espcd_sst_file' @@ -350,34 +354,17 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, & call LDT_endrun() end if julhr = julhr - fh_local - call LDT_julhr_date(julhr, yyyy_local, mm_local, dd_local, & - hh_local) - call construct_espcd_sst_filename(rootdir, & - yyyy_local, mm_local, dd_local, hh_local, fh_local, filename) - - ! Check if file exists - write(LDT_logunit,*) & - '------------------------------------------------------------------' - write(LDT_logunit,*) & - '[INFO] *** SEARCHING FOR ESPC-D SST ***' - inquire(file=trim(filename), exist=file_exists) - if (file_exists) then - write(LDT_logunit,*)'[INFO] Will use ',trim(filename) - return - else - message(1) = '[WARN] CANNOT FIND FILE' - message(2) = '[WARN] PATH = ' // trim(filename) - call error_message(program_name, routine_name, message) - end if - - ! At this point, we are rolling back to earlier SST file julhr_orig = julhr - ! Start looping for earlier files + ! Loop through possible files + first_time = .true. do - write(LDT_logunit,*)'[WARN] Cannot find ',trim(filename) - fh_local = fh_local + 24 - julhr = julhr - 24 ! Roll back to previous 12Z cycle + if (.not. first_time) then + fh_local = fh_local + 24 + julhr = julhr - 24 ! Roll back to previous 12Z cycle + end if + first_time = .false. + ! Give up after 5 days if ( (julhr_orig - julhr) > 24*5) then write(LDT_logunit,*)"[WARN] *** GIVING UP ON ESPC-D SST! ***" @@ -386,11 +373,13 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, & filename = "NONE" return end if + call LDT_julhr_date(julhr, yyyy_local, mm_local, dd_local, & hh_local) - call construct_espcd_sst_filename(rootdir, & yyyy_local, mm_local, dd_local, hh_local, fh_local, filename) + + ! See if file exists inquire(file=trim(filename), exist=file_exists) if (.not. file_exists) then message(1) = '[WARN] CANNOT FIND FILE' @@ -440,7 +429,6 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, & cycle end if allocate(dimids(ndims)) - allocate(lens(ndims)) ierr = nf90_inquire_variable(ncid, water_temp_varid, dimids=dimids) if (ierr .ne. nf90_noerr) then message(1) = & @@ -452,6 +440,7 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, & cycle end if good = .true. + allocate(lens(ndims)) do i = 1, ndims ierr = nf90_inquire_dimension(ncid, dimids(i), & len=lens(i)) @@ -486,6 +475,8 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, & ! Allocate a subset of water temp allocate(water_temp(nlon, nlat, 1, 1)) + water_temp = 0 + ierr = nf90_get_var(ncid, water_temp_varid, water_temp) if (ierr .ne. nf90_noerr) then message(1) = & @@ -498,7 +489,6 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, & end if ! We have a winner - deallocate(water_temp) write(LDT_logunit,*)'[INFO] Will use ',trim(filename) return @@ -542,7 +532,7 @@ subroutine process_espcd_sst(rootdir, nc, nr, landmask, sst, & ! Imports use LDT_coreMod, only: LDT_rc, LDT_domain - use LDT_logMod, only: LDT_verify, ldt_logunit + use LDT_logMod, only: LDT_verify, ldt_logunit, LDT_endrun use netcdf ! Defaults @@ -561,8 +551,8 @@ subroutine process_espcd_sst(rootdir, nc, nr, landmask, sst, & integer, intent(out) :: ierr ! Locals - integer, parameter :: nlat = 8001 - integer, parameter :: nlon = 9000 + integer :: nlat + integer :: nlon character*255 :: filename integer :: ncid, water_temp_varid real, allocatable :: water_temp(:,:,:,:) @@ -581,36 +571,13 @@ subroutine process_espcd_sst(rootdir, nc, nr, landmask, sst, & external :: upscaleByAveraging ! Find a valid file on the file system - call find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, filename) + call find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, filename, & + water_temp, nlat, nlon) if (trim(filename) == "NONE") then ierr = 1 return end if - ! Open the file - call LDT_verify(nf90_open(path=trim(filename), & - mode=nf90_nowrite, & - ncid=ncid), & - "[ERR] Error in nf90_open for " // trim(filename)) - - write(ldt_logunit,*)'[INFO] Reading ', trim(filename) - - ! Get the varid for water_temp - call LDT_verify(nf90_inq_varid(ncid, "water_temp", & - water_temp_varid), & - "[ERR] Error in nf90_inq_varid for water_temp") - - ! Allocate the water_temp array - allocate(water_temp(nlon, nlat, 1, 1)) - - ! Pull from the ESPC-D file - call LDT_verify(nf90_get_var(ncid, water_temp_varid, water_temp), & - "[ERR] Error in nf90_get_var for water_temp") - - ! Close the file - call LDT_verify(nf90_close(ncid), & - "[ERR] Error in nf90_close for "// trim(filename)) - ! We need to interpolate to the LDT grid. First, copy to 1D array allocate(water_temp_1d(nlon*nlat*1*1)) water_temp_1d = -9999.0 From 97de95878706b4c8681c5dbd230976ad7256ef25 Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Wed, 31 Jul 2024 17:46:08 -0400 Subject: [PATCH 29/40] More fault tolerance changes. --- ldt/USAFSI/USAFSI_espcdMod.F90 | 83 +++++++++------------------------- 1 file changed, 22 insertions(+), 61 deletions(-) diff --git a/ldt/USAFSI/USAFSI_espcdMod.F90 b/ldt/USAFSI/USAFSI_espcdMod.F90 index 2754c4796..50088bfc4 100644 --- a/ldt/USAFSI/USAFSI_espcdMod.F90 +++ b/ldt/USAFSI/USAFSI_espcdMod.F90 @@ -34,7 +34,8 @@ module USAFSI_espcdMod ! Find ESPCD CICE file on file system subroutine find_espcd_cice_file(rootdir, region, & - yyyy, mm, dd, hh, filename) + yyyy, mm, dd, hh, filename, & + aice, nlon, nlat) ! Imports use netcdf @@ -54,6 +55,9 @@ subroutine find_espcd_cice_file(rootdir, region, & integer, intent(in) :: dd integer, intent(in) :: hh character*255, intent(out) :: filename + real, allocatable, intent(inout) :: aice(:,:,:) + integer, intent(out) :: nlon + integer, intent(out) :: nlat ! Locals integer :: julhr, julhr_orig @@ -62,18 +66,18 @@ subroutine find_espcd_cice_file(rootdir, region, & integer :: fh_local character*255 :: message (msglns) character*12 :: routine_name - integer :: nlat integer, parameter :: nlat_arc = 2501 integer, parameter :: nlat_ant = 1549 - integer, parameter :: nlon = 9000 integer :: ncid, aice_varid - real, allocatable :: aice(:,:,:) integer, allocatable :: dimids(:), lens(:) integer :: ndims logical :: good + logical :: first_time integer :: ierr integer :: i + nlon = 9000 + message = '' routine_name = 'find_espcd_cice_file' @@ -98,33 +102,18 @@ subroutine find_espcd_cice_file(rootdir, region, & call LDT_endrun() end if - call LDT_julhr_date(julhr, yyyy_local, mm_local, dd_local, & - hh_local) - call construct_espcd_cice_filename(rootdir, region, & - yyyy_local, mm_local, dd_local, hh_local, fh_local, filename) - - write(LDT_logunit,*) & - '------------------------------------------------------------------' - write(LDT_logunit,*)'[INFO] *** SEARCHING FOR ESPC-D CICE FOR ',& - trim(region),' REGION ***' - inquire(file=trim(filename), exist=file_exists) - if (file_exists) then - write(LDT_logunit,*)'[INFO] Will use ', trim(filename) - return - else - message(1) = '[WARN] CANNOT FIND FILE' - message(2) = '[WARN] PATH = ' // trim(filename) - call error_message(program_name, routine_name, message) - end if - - ! At this point, we are rolling back to earlier CICE file julhr_orig = julhr ! Start looping for earlier files + first_time = .true. do - write(LDT_logunit,*)'[WARN] Cannot find ', trim(filename) - fh_local = fh_local + 24 - julhr = julhr - 24 ! Roll back to previous 12Z cycle + + if (.not. first_time) then + fh_local = fh_local + 24 + julhr = julhr - 24 ! Roll back to previous 12Z cycle + end if + first_time = .false. + ! Give up after 5 days if ( (julhr_orig - julhr) > 24*5) then write(LDT_logunit,*)& @@ -253,7 +242,6 @@ subroutine find_espcd_cice_file(rootdir, region, & end if ! We have a winner. - deallocate(aice) ierr = nf90_close(ncid) write(LDT_logunit,*)'[INFO] Will use ', trim(filename) return @@ -532,7 +520,7 @@ subroutine process_espcd_sst(rootdir, nc, nr, landmask, sst, & ! Imports use LDT_coreMod, only: LDT_rc, LDT_domain - use LDT_logMod, only: LDT_verify, ldt_logunit, LDT_endrun + use LDT_logMod, only: LDT_verify, LDT_endrun use netcdf ! Defaults @@ -554,7 +542,6 @@ subroutine process_espcd_sst(rootdir, nc, nr, landmask, sst, & integer :: nlat integer :: nlon character*255 :: filename - integer :: ncid, water_temp_varid real, allocatable :: water_temp(:,:,:,:) real, allocatable :: water_temp_1d(:) real, allocatable :: sst_1d(:) @@ -790,9 +777,8 @@ subroutine process_espcd_cice_region(region, rootdir, nc, nr, & ! Locals integer, parameter :: nlat_arc = 2501 integer, parameter :: nlat_ant = 1549 - integer, parameter :: nlon = 9000 + integer :: nlon = 9000 character*255 :: filename - integer :: ncid, aice_varid real, allocatable :: aice(:,:,:) real, allocatable :: aice_1d(:) real, allocatable :: icecon_1d(:) @@ -808,11 +794,8 @@ subroutine process_espcd_cice_region(region, rootdir, nc, nr, & external :: upscaleByAveraging ! Sanity check the region - if (region .eq. 'arc') then - nlat = nlat_arc - else if (region .eq. 'ant') then - nlat = nlat_ant - else + if (region .ne. 'arc' .and. & + region .ne. 'ant') then write(LDT_logunit,*)'[ERR] Invalid ESPC-D region for cice: ' & // region write(LDT_logunit,*)'[ERR] Must be either arc or ant' @@ -822,35 +805,13 @@ subroutine process_espcd_cice_region(region, rootdir, nc, nr, & ! Find a valid file on the file system call find_espcd_cice_file(rootdir, region, yyyy, mm, dd, hh, & - filename) + filename, & + aice, nlon, nlat) if (trim(filename) == "NONE") then ierr = 1 return end if - ! Open the file - call LDT_verify(nf90_open(path=trim(filename), & - mode=nf90_nowrite, & - ncid=ncid), & - "[ERR] Error in nf90_open for " // trim(filename)) - write(ldt_logunit,*)'[INFO] Reading ', trim(filename) - - ! Get the varid for aice - call LDT_verify(nf90_inq_varid(ncid, "aice", aice_varid), & - "[ERR] Error in nf90_inq_varid for aice") - - ! Allocate the aice array - allocate(aice(nlon, nlat, 1)) - - ! Pull from the ESPC-D file - call LDT_verify(nf90_get_var(ncid, aice_varid, aice), & - "[ERR] Error in nf90_get_var for aice") - - ! Close the file - call LDT_verify(nf90_close(ncid), & - "[ERR] Error in nf90_close for "// trim(filename)) - - ! We need to interpolate to the LDT grid. First, copy to 1D array allocate(aice_1d(nlon*nlat*1)) aice_1d = -9999 allocate(lb(nlon*nlat*1)) From e19b973bf9a56db7d03841622ce96b63dad9859a Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Mon, 12 Aug 2024 10:16:40 -0400 Subject: [PATCH 30/40] 1) deleted a few blank lines and unnecessary /bin/rm lines in s2s_run.sh 2) added 2 new color pallettes to plot_utils.py 3) added Mollewide projections to plot_utils.py 4) added the option to plot 2 different colorbars for 2 different set of plots within a single page in plot_utils.py --- lis/utils/usaf/s2s/s2s_app/s2s_run.sh | 7 +- .../s2s/s2s_modules/s2splots/plot_utils.py | 118 ++++++++++++++++-- 2 files changed, 112 insertions(+), 13 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh index ca681f2f5..c592af411 100755 --- a/lis/utils/usaf/s2s/s2s_app/s2s_run.sh +++ b/lis/utils/usaf/s2s/s2s_app/s2s_run.sh @@ -99,6 +99,7 @@ print_walltimes(){ seconds_to_datetime() { date -d "@$1" '+%Y-%m-%dT%H:%M:%S' } + compute_elapse (){ local JOB_ID=$1 @@ -115,7 +116,6 @@ print_walltimes(){ min_start=$(datetime_to_seconds "${start_times[0]}") max_end=$(datetime_to_seconds "${end_times[0]}") - for start in "${start_times[@]}"; do local start_sec start_sec=$(datetime_to_seconds "$start") @@ -756,7 +756,6 @@ bcsd_fcst(){ # -------------------------------------------------------------------------- jobname=bcsd06 cmdfile="bcsd06.file" - /bin/rm bcsd06* python $LISHDIR/s2s_modules/bcsd_fcst/forecast_task_06.py -s $YYYY -e $YYYY -m $mmm -n $MM -c $BWD/$CFILE -w ${CWD} -p ${E2ESDIR} -t 1 -H 2 -j $jobname unset job_list @@ -792,7 +791,7 @@ bcsd_fcst(){ # ---------------------------------------------------------------------------- jobname=bcsd08 cmdfile="bcsd08.file" - /bin/rm bcsd08* + for model in $MODELS do python $LISHDIR/s2s_modules/bcsd_fcst/forecast_task_08.py -s $YYYY -e $YYYY -m $mmm -n $MM -c $BWD/$CFILE -w ${CWD} -p ${E2ESDIR} -t 1 -H 3 -M $model -j $jobname @@ -825,7 +824,7 @@ bcsd_fcst(){ jobname=bcsd09 cmdfile="bcsd09-10.file" - /bin/rm bcsd09* + python $LISHDIR/s2s_modules/bcsd_fcst/forecast_task_09.py -s $YYYY -e $YYYY -m $mmm -n $MM -M CFSv2 -c $BWD/$CFILE -w ${CWD} -p ${E2ESDIR} -j $jobname -t 1 -H 4 if [ $GROUP_JOBS == "Y" ]; then job_comm=`grep python ${jobname}_run.j | cut -d'|' -f1` diff --git a/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py b/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py index 766d5a89e..3885b267a 100644 --- a/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py @@ -234,9 +234,10 @@ def load_table (table_key): [ 0,195,255], [ 0,227,255], [ 0,255,255], + [ 55,255,199], [255,255,255], [255,255,255], - [190,255, 67], + [120,255,135], [240,255, 15], [255,219, 0], [255,187, 0], @@ -425,6 +426,52 @@ def load_table (table_key): [255, 215, 0], [ 0, 0, 0], [119, 119, 119]], + 'clim_reanaly': [[255,240,242], + [237,216,240], + [217,191,237], + [198,166,232], + [179,141,228], + [179,141,228], + [130,114,207], + [91,85,187], + [51,56,166], + [7,26,145], + [55,87,180], + [99,145,216], + [142,201,250], + [165,212,251], + [189,224,253], + [212,235,254], + [235,245,255], + [255,255,255], + [255,255,255], + [255,237,217], + [255,210,163], + [255,183,108], + [255,156,51], + [255,129,0], + [211,84,0], + [166,38,0], + [121,0,0], + [155,0,0], + [189,0,0], + [222,0,0], + [255,0,0], + [255,112,96], + [255,172,156], + [255,225,205], + [255,240,241]], + 'CBR_wet': [[255, 255, 255], + [247, 252, 240], + [224, 243, 219], + [204, 235, 197], + [168, 221, 181], + [123, 204, 196], + [ 78, 179, 211], + [ 43, 140, 190], + [ 8, 104, 172], + [ 8, 64, 129], + [ 0, 32, 62]], 'cb_9step':[[255, 0, 0], [255, 128, 0], [255, 255, 0], @@ -625,8 +672,19 @@ def get_image(self, tile): def contours (_x, _y, nrows, ncols, var, color_palette, titles, domain, figure, \ under_over, min_val=None, max_val=None, fscale=None, levels=None, \ - stitle=None, clabel=None, cartopy_datadir=None): + stitle=None, clabel=None, cartopy_datadir=None, colorbar2=None, \ + projection=None): ''' plot contour maps''' + ''' + added 2nd colorbar option + colorbar2 = {levels: [], + begin : int (0-indexed) + color_palette: '' + cax_axes: [[], []] + orientation: [, ]' + clabel: ''} + + ''' cartopy_dir(cartopy_datadir) if fscale is None: fscale = FONT_SCALE @@ -643,8 +701,12 @@ def contours (_x, _y, nrows, ncols, var, color_palette, titles, domain, figure, if levels is None: if min_val is None: - min_val = np.floor(np.nanmin(var [var>0.])) + min_val = np.floor(np.nanmin(var)) max_val = np.ceil(np.nanmax(var)) + abs_max = max(abs(min_val), abs(max_val)) + min_val = -1.*abs_max + max_val = abs_max + levels = np.linspace(min_val, max_val, len(style_color)) cmap.set_under(under_over[0]) cmap.set_over(under_over[1]) @@ -652,15 +714,41 @@ def contours (_x, _y, nrows, ncols, var, color_palette, titles, domain, figure, nplots = len(titles) fig = plt.figure(figsize= figure_size(FIGWIDTH, domain, nrows, ncols)) gs_ = gridspec.GridSpec(nrows, ncols, wspace=0.1, hspace=0.1) - cax = fig.add_axes(cbar_axes) + + if colorbar2 is None: + cax = fig.add_axes(cbar_axes) + orientation = 'horizontal' + else: + cax_one = fig.add_axes(colorbar2['cax_axes'][0]) + cax_two = fig.add_axes(colorbar2['cax_axes'][1]) # plot maps for count_plot in range(nplots): - ax_ = fig.add_subplot(gs_[count_plot], projection=ccrs.PlateCarree()) + if colorbar2 is not None: + ''' invoke the 2nd colorbar if required ''' + if count_plot == colorbar2['begin']: + style_color = load_table(colorbar2['color_palette']) + color_arr = [] + for color in style_color: + rgb = [float(value) / 255 for value in color] + color_arr.append(rgb) + cmap = colors.LinearSegmentedColormap.from_list('my_palette', color_arr, N=256) + levels = colorbar2['levels'] + clabel = colorbar2['clabel'] + + if projection is None: + ax_ = fig.add_subplot(gs_[count_plot], projection=ccrs.PlateCarree()) + else: + ax_ = fig.add_subplot(gs_[count_plot], projection=ccrs.Mollweide()) + cs_ = plt.pcolormesh(_x, _y, var[count_plot,], norm=colors.BoundaryNorm(levels,ncolors=cmap.N, clip=False), cmap=cmap,zorder=3, alpha=0.8) - gl_ = ax_.gridlines(draw_labels=True) + + if projection is None: + gl_ = ax_.gridlines(draw_labels=True) + else: + ax_.gridlines(draw_labels=True, dms=True, x_inline=False, y_inline=False) gl_.top_labels = False gl_.bottom_labels = False gl_.left_labels = False @@ -685,13 +773,25 @@ def contours (_x, _y, nrows, ncols, var, color_palette, titles, domain, figure, if (domain[3] - domain[2]) < 180.: ax_.add_feature(cfeature.STATES, linestyle=':',linewidth=0.9, edgecolor='black', facecolor='none') + if colorbar2 is not None: + if count_plot < colorbar2['begin']: + cax = cax_one + orientation = colorbar2['orientation'][0] + else: + cax = cax_two + orientation = colorbar2['orientation'][1] + if under_over[0] == "white" and under_over[1] == "white": - cbar = fig.colorbar(cs_, cax=cax, orientation='horizontal', ticks=levels) + cbar = fig.colorbar(cs_, cax=cax, orientation=orientation, ticks=levels) + else: + cbar = fig.colorbar(cs_, cax=cax, orientation=orientation, ticks=levels,extend=EXTEND) + if orientation == 'horizontal': + cbar.ax.tick_params(labelsize=fscale*20, labelrotation=90) else: - cbar = fig.colorbar(cs_, cax=cax, orientation='horizontal', ticks=levels,extend=EXTEND) - cbar.ax.tick_params(labelsize=fscale*20, labelrotation=90) + cbar.ax.tick_params(labelsize=fscale*10, labelrotation=0) if clabel is not None: cbar.set_label(clabel, fontsize=fscale*30) + plt.savefig(figure, dpi=150, format='png', bbox_inches='tight') plt.close() From 7cf37e442b6c709023a8dea0879ef1e0ff8b1e15 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Mon, 12 Aug 2024 13:14:22 -0400 Subject: [PATCH 31/40] Kristi's modifications pertaining to HPC-11 --- lis/utils/usaf/s2s/s2s_modules/shared/utils.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) mode change 100644 => 100755 lis/utils/usaf/s2s/s2s_modules/shared/utils.py diff --git a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py old mode 100644 new mode 100755 index a894b0231..0cbcbb07a --- a/lis/utils/usaf/s2s/s2s_modules/shared/utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/shared/utils.py @@ -66,17 +66,17 @@ def job_script(s2s_configfile, jobfile, job_name, ntasks, hours, cwd, in_command _f.write('#SBATCH --nodes=1' + '\n') _f.write('#SBATCH --ntasks-per-node=' + str(ntasks) + '\n') _f.write('#SBATCH --time=' + hours + ':00:00' + '\n') - if group_jobs: - mpc = min(math.ceil(240 / ntasks), 80) - _f.write('#SBATCH --mem-per-cpu=' + str(mpc) + 'GB' + '\n') - else: - _f.write('#SBATCH --mem-per-cpu=40GB' + '\n') if 'discover' in platform.node() or 'borg' in platform.node(): _f.write('#SBATCH --constraint=' + cfg['SETUP']['CONSTRAINT'] + '\n') if 'mil' in cfg['SETUP']['CONSTRAINT']: _f.write('#SBATCH --partition=packable' + '\n') + if group_jobs: + mpc = min(math.ceil(240 / ntasks), 80) + _f.write('#SBATCH --mem-per-cpu=' + str(mpc) + 'GB' + '\n') + else: + _f.write('#SBATCH --mem-per-cpu=40GB' + '\n') + else: -# _f.write('#SBATCH --cluster-constraint=green' + '\n') _f.write('#SBATCH --cluster-constraint=' + cfg['SETUP']['CONSTRAINT'] + '\n') _f.write('#SBATCH --partition=batch' + '\n') _f.write('#SBATCH --exclusive' + '\n') From e54c885853dc6fadc47701094fa24b934359cca3 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Mon, 12 Aug 2024 13:17:34 -0400 Subject: [PATCH 32/40] added mono_YOB color pallete --- lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py b/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py index 3885b267a..a03b8c61d 100644 --- a/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py @@ -572,6 +572,16 @@ def load_table (table_key): rgb_list.append([rgb[0],rgb[1], rgb[2]]) tables['mono_green'] = rgb_list + num_steps = 26 + start_yellow = np.array([255, 255, 153])# Yellow + end_orange = np.array([255, 165, 0]) # Orange + end_brown = np.array([51, 25, 0]) # Brown + gradient_yellow_to_orange = np.linspace(start_yellow, end_orange, num_steps // 2, dtype=int) + gradient_orange_to_brown = np.linspace(end_orange, end_brown, num_steps // 2, dtype=int) + gradient_yellow_to_brown = np.concatenate((gradient_yellow_to_orange, gradient_orange_to_brown)) + rgb_list = gradient_yellow_to_brown.tolist() + tables['mono_YOB'] = rgb_list + if table_key[-1] == '_': ct_ = tables[table_key[:-1]] ct_.reverse() From ba2934dd40090c8caaad82963e27f38bd2df8b84 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Tue, 13 Aug 2024 10:07:49 -0400 Subject: [PATCH 33/40] fixed a bug in Mollewide, added two new scripts to compute/plot weekly anomalies (for now they are not part of s2s_run.sh workflow) --- .../metrics_library/compute_weekly_anom.py | 142 ++++++++++++++++++ .../s2s/s2s_modules/s2splots/plot_utils.py | 5 +- .../s2s_modules/s2splots/plot_weekly_anom.py | 68 +++++++++ 3 files changed, 213 insertions(+), 2 deletions(-) create mode 100644 lis/utils/usaf/s2s/s2s_modules/s2smetric/metrics_library/compute_weekly_anom.py create mode 100644 lis/utils/usaf/s2s/s2s_modules/s2splots/plot_weekly_anom.py diff --git a/lis/utils/usaf/s2s/s2s_modules/s2smetric/metrics_library/compute_weekly_anom.py b/lis/utils/usaf/s2s/s2s_modules/s2smetric/metrics_library/compute_weekly_anom.py new file mode 100644 index 000000000..ecfd1a119 --- /dev/null +++ b/lis/utils/usaf/s2s/s2s_modules/s2smetric/metrics_library/compute_weekly_anom.py @@ -0,0 +1,142 @@ +from datetime import datetime, date +import glob +import os +import sys +import yaml +from dateutil.relativedelta import relativedelta +import numpy as np +import xarray as xr + +# pylint: disable=import-error +from metricslib import sel_var, compute_anomaly, compute_sanomaly +# pylint: enable=import-error +# pylint: disable=consider-using-f-string + +FCST_INIT_MON = int(sys.argv[1]) +TARGET_YEAR = int(sys.argv[2]) +NMME_MODEL = sys.argv[3] +CONFIGFILE = sys.argv[4] +BASEOUTDIR = sys.argv[5] +ANOM = sys.argv[6] + +# Load CONFIG file +with open(CONFIGFILE, 'r', encoding="utf-8") as file: + CONFIG = yaml.safe_load(file) +HYD_MODEL = CONFIG["EXP"]["lsmdir"] +DOMAIN_NAME = CONFIG["EXP"]["DOMAIN"] +CLIM_SYR = int(CONFIG["BCSD"]["clim_start_year"]) +CLIM_EYR = int(CONFIG["BCSD"]["clim_end_year"]) +BASEDIR = BASEOUTDIR + "/DYN_" + ANOM + "/" +METRIC_VARS = CONFIG["POST"]["metric_vars"] +HINDCASTS = CONFIG["SETUP"]["E2ESDIR"] + '/hindcast/s2spost/' + '{:02d}/'.format(FCST_INIT_MON) +FORECASTS = "./s2spost/" + +OUTDIR = BASEDIR + '/' + HYD_MODEL +if not os.path.exists(OUTDIR): + os.makedirs(OUTDIR, exist_ok=True) + +OUTFILE_TEMPLATE = '{}/{}_{}_{}_init_weekly_{:02d}_{:04d}.nc' +TARGET_INFILE_TEMPLATE = \ + '{}/{:04d}{:02d}/{}/PS.557WW_SC.U_DI.C_GP.LIS-S2S-{}_GR.C0P25DEG_AR.{}_' \ + 'PA.ALL_DD.{:04d}{:02d}01_DT.0000_FD.{}_DT.0000_DF.NC' + +CLIM_INFILE_TEMPLATE = \ + '{}/????{:02d}/{}/PS.557WW_SC.U_DI.C_GP.LIS-S2S-{}_GR.C0P25DEG_AR.{}_' \ + 'PA.ALL_DD.*{:02d}01_DT.0000_FD.*{:02d}{:02d}_DT.0000_DF.NC' + +LEAD_WEEKS = 6 +#for var_name in METRIC_VARS: +for var_name in ['AirT']: + OUTFILE = OUTFILE_TEMPLATE.format(OUTDIR, NMME_MODEL, \ + var_name, ANOM, FCST_INIT_MON, TARGET_YEAR) + + CURRENTDATE = date(TARGET_YEAR, FCST_INIT_MON, 2) + for lead in range(LEAD_WEEKS): + fcast_list = [] + clim_list = [] + print(f"[INFO] Computing {var_name} forecast anomaly for week {lead}") + for count_days in range(7): + # processing climatology + INFILE = glob.glob(CLIM_INFILE_TEMPLATE.format(HINDCASTS, \ + FCST_INIT_MON, NMME_MODEL, \ + NMME_MODEL.upper(), DOMAIN_NAME, \ + FCST_INIT_MON, \ + CURRENTDATE.month, CURRENTDATE.day)) + + + day_xr = xr.open_mfdataset(INFILE, combine='by_coords') + sel_cim_data = day_xr.sel(time= \ + (day_xr.coords['time.year'] >= \ + CLIM_SYR) & (day_xr.coords['time.year'] <= \ + CLIM_EYR)) + clim_list.append(sel_var(sel_cim_data, var_name, HYD_MODEL).mean(dim = ['time','ensemble'], skipna = True)) + + # reading forecast + INFILE = TARGET_INFILE_TEMPLATE.format(FORECASTS, \ + TARGET_YEAR, FCST_INIT_MON, NMME_MODEL, \ + NMME_MODEL.upper(), DOMAIN_NAME, \ + TARGET_YEAR, FCST_INIT_MON, \ + CURRENTDATE.strftime("%Y%m%d")) + print(f"[INFO] {INFILE}") + fcast_list.append(INFILE) + CURRENTDATE += relativedelta(days=1) + + weekly_clim = xr.concat(clim_list, dim='day').mean(dim='day') + fcst_xr = xr.open_mfdataset(fcast_list, combine='by_coords') + fcst_da = sel_var(fcst_xr, var_name, HYD_MODEL).mean(dim = ['time'], skipna = True) + + # Step-3 loop through each grid cell and convert data into anomaly + # Defining array to store anomaly data + lat_count, lon_count, ens_count = \ + len(fcst_xr.coords['lat']), \ + len(fcst_xr.coords['lon']), \ + len(fcst_xr.coords['ensemble']) + + # 4 members in anomaly output and so on. + if lead == 0: + all_anom = np.ones((ens_count, LEAD_WEEKS, lat_count, lon_count))*-9999 + + print('[INFO] Converting data into anomaly') + if (not np.array_equal(weekly_clim.lat.values, fcst_da.lat.values)) or \ + (not np.array_equal(weekly_clim.lon.values, fcst_da.lon.values)): + weekly_clim = weekly_clim.assign_coords({"lon": fcst_da.lon.values, + "lat": fcst_da.lat.values}) + + if ANOM == 'ANOM': + this_anom = xr.apply_ufunc( + compute_anomaly, + fcst_da.chunk({"lat": "auto", "lon": "auto"}).compute(), + weekly_clim.chunk({"lat": "auto", "lon": "auto"}).compute(), + input_core_dims=[['ensemble',],[]], + exclude_dims=set(('ensemble',)), + output_core_dims=[['ensemble',]], + vectorize=True, # loop over non-core dims + dask="forbidden", + output_dtypes=[np.float64]) + + for ens in range(ens_count): + all_anom[ens, lead, :, :] = this_anom [:,:,ens] + + + ### Step-4 Writing output file + all_anom = np.ma.masked_array(all_anom, mask=(all_anom == -9999)) + + ## Creating an latitude and longitude array based on locations of corners + lats = np.arange(fcst_xr.attrs['SOUTH_WEST_CORNER_LAT'], \ + fcst_xr.attrs['SOUTH_WEST_CORNER_LAT'] + \ + (lat_count*0.25), 0.25) + lons = np.arange(fcst_xr.attrs['SOUTH_WEST_CORNER_LON'], \ + fcst_xr.attrs['SOUTH_WEST_CORNER_LON'] + \ + (lon_count*0.25), 0.25) + anom_xr = xr.Dataset() + anom_xr['anom'] = (('ens', 'time', 'latitude', 'longitude'), all_anom) + anom_xr.coords['latitude'] = (('latitude'), lats) + anom_xr.coords['longitude'] = (('longitude'), lons) + anom_xr.coords['time'] = (('time'), np.arange(0, LEAD_WEEKS, dtype=int)) + anom_xr.coords['ens'] = (('ens'), np.arange(0, ens_count, dtype=int)) + print(f"[INFO] Writing {OUTFILE}") + anom_xr.to_netcdf(OUTFILE, format="NETCDF4", + encoding = {'anom': {"zlib":True, "complevel":6, "shuffle":True, "missing_value": -9999., "_FillValue": -9999.}}) + + + diff --git a/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py b/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py index a03b8c61d..f3edae0f5 100644 --- a/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py @@ -753,12 +753,13 @@ def contours (_x, _y, nrows, ncols, var, color_palette, titles, domain, figure, cs_ = plt.pcolormesh(_x, _y, var[count_plot,], norm=colors.BoundaryNorm(levels,ncolors=cmap.N, clip=False), - cmap=cmap,zorder=3, alpha=0.8) + cmap=cmap,zorder=3, alpha=0.8, + transform=ccrs.PlateCarree()) if projection is None: gl_ = ax_.gridlines(draw_labels=True) else: - ax_.gridlines(draw_labels=True, dms=True, x_inline=False, y_inline=False) + gl_ = ax_.gridlines(draw_labels=True, dms=True, x_inline=False, y_inline=False) gl_.top_labels = False gl_.bottom_labels = False gl_.left_labels = False diff --git a/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_weekly_anom.py b/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_weekly_anom.py new file mode 100644 index 000000000..ab806d5b8 --- /dev/null +++ b/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_weekly_anom.py @@ -0,0 +1,68 @@ +import os +import calendar +from datetime import datetime, date +from dateutil.relativedelta import relativedelta +import argparse +import xarray as xr +import numpy as np +import yaml +# pylint: disable=import-error +import plot_utils +# pylint: enable=import-error + +parser = argparse.ArgumentParser() +parser.add_argument('-y', '--fcst_year', required=True, help='forecast start year') +parser.add_argument('-m', '--fcst_mon', required=True, help= 'forecast end year') +parser.add_argument('-c', '--configfile', required=True, help='config file name') +parser.add_argument('-w', '--cwd', required=True, help='current working directory') + +args = parser.parse_args() +configfile = args.configfile +fcst_year = int(args.fcst_year) +fcst_mon = int(args.fcst_mon) +cwd = args.cwd + +# load config file +with open(configfile, 'r', encoding="utf-8") as file: + config = yaml.safe_load(file) + +plotdir_template = cwd + '/s2splots/{:04d}{:02d}/' + config["EXP"]["lsmdir"] + '/' +plotdir = plotdir_template.format(fcst_year, fcst_mon) +if not os.path.exists(plotdir): + os.makedirs(plotdir) + +infile_template = '{}/{}_ANOM_init_weekly_{:02d}_{:04d}.nc' +figure_template = '{}/NMME_{}_weekly_anom_{}-{}.png' + +lead_week = [0, 1, 2, 3, 4, 5] + +data_dir_template = cwd + '/s2smetric/{:04d}{:02d}/DYN_ANOM/' + \ + config["EXP"]["lsmdir"] + '/' +data_dir = data_dir_template.format(fcst_year, fcst_mon) + +nrows = 1 +ncols = 1 +clabel = 'Anomaly (K)' +cartopy_dir = config['SETUP']['supplementarydir'] + '/s2splots/share/cartopy/' + +#for var_name in config_["POST"]["metric_vars"]: +for var_name in ['AirT']: + infile = infile_template.format(data_dir, '*_' + var_name, fcst_mon, fcst_year) + print("Reading infile {}".format(infile)) + anom = xr.open_mfdataset(infile, concat_dim='ens', combine='nested') + median_anom = np.median(anom.anom.values, axis=0) + + BEGDATE = date(fcst_year, fcst_mon, 1) + for lead in lead_week: + ENDDATE = BEGDATE + relativedelta(days=6) + titles = [var_name + ' '+ BEGDATE.strftime("%Y%m%d") + '-' + ENDDATE.strftime("%Y%m%d")] + plot_arr = median_anom[lead, ] + print(np.nanmin(plot_arr), np.nanmax(plot_arr)) + figure = figure_template.format(plotdir, var_name, BEGDATE.strftime("%Y%m%d"), ENDDATE.strftime("%Y%m%d")) + print(BEGDATE.strftime("%Y%m%d"),' ' , ENDDATE.strftime("%Y%m%d")) + plot_utils.contours (anom.longitude.values, anom.latitude.values, nrows, + ncols, np.expand_dims(plot_arr, axis=0), 'clim_reanaly', titles, [-90, 90, -180., 180.], + figure, ["white", "white"], + fscale=0.8, clabel=clabel, levels=np.arange(-32,32+1,2), + cartopy_datadir=cartopy_dir, projection="mol") + BEGDATE += relativedelta(days=7) From 5fe59b27ab730d75b5a575d4089f18fd446a8301 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Tue, 13 Aug 2024 15:44:05 -0400 Subject: [PATCH 34/40] added polar steorographic projection --- .../s2s/s2s_modules/s2splots/plot_utils.py | 33 ++++++++++++++++--- 1 file changed, 29 insertions(+), 4 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py b/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py index f3edae0f5..7e7f4f7e4 100644 --- a/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py @@ -38,6 +38,7 @@ import cartopy.feature as cfeature import cartopy.io.img_tiles as cimgt import shapely.geometry as sgeom +import cartopy.mpl.ticker as cticker import requests import dask import PIL @@ -722,7 +723,11 @@ def contours (_x, _y, nrows, ncols, var, color_palette, titles, domain, figure, cmap.set_over(under_over[1]) nplots = len(titles) - fig = plt.figure(figsize= figure_size(FIGWIDTH, domain, nrows, ncols)) + if projection[0] == 'polar': + fig = plt.figure(figsize= (FIGWIDTH * ncols, FIGWIDTH * nrows)) + else: + fig = plt.figure(figsize= figure_size(FIGWIDTH, domain, nrows, ncols)) + gs_ = gridspec.GridSpec(nrows, ncols, wspace=0.1, hspace=0.1) if colorbar2 is None: @@ -748,9 +753,12 @@ def contours (_x, _y, nrows, ncols, var, color_palette, titles, domain, figure, if projection is None: ax_ = fig.add_subplot(gs_[count_plot], projection=ccrs.PlateCarree()) - else: + elif projection[0] == "mol": ax_ = fig.add_subplot(gs_[count_plot], projection=ccrs.Mollweide()) - + elif projection[0] == 'polar': + ax_ = fig.add_subplot(gs_[count_plot], projection=ccrs.Stereographic(central_latitude=projection[1])) + ax_.set_extent([domain[2], domain[3], domain[0], domain[1]], crs=ccrs.PlateCarree()) + cs_ = plt.pcolormesh(_x, _y, var[count_plot,], norm=colors.BoundaryNorm(levels,ncolors=cmap.N, clip=False), cmap=cmap,zorder=3, alpha=0.8, @@ -759,12 +767,28 @@ def contours (_x, _y, nrows, ncols, var, color_palette, titles, domain, figure, if projection is None: gl_ = ax_.gridlines(draw_labels=True) else: - gl_ = ax_.gridlines(draw_labels=True, dms=True, x_inline=False, y_inline=False) + gl_ = ax_.gridlines(draw_labels=True, dms=True, x_inline=False, y_inline=False, zorder=4) + + if projection[0] == 'polar': + gl_.xformatter = cticker.LongitudeFormatter() + gl_.yformatter = cticker.LatitudeFormatter() + gl_.linewidth = 1 + gl_.color = 'gray' + gl_.linestyle = '--' + gl_.top_labels = False gl_.bottom_labels = False gl_.left_labels = False gl_.right_labels = False + if projection[0] == 'polar': + gl_.top_labels = False + gl_.bottom_labels = True + gl_.left_labels = True + gl_.right_labels = True + gl_.xformatter = cticker.LongitudeFormatter() + gl_.yformatter = cticker.LatitudeFormatter() + plt.title(titles[count_plot], fontsize=fscale*FONT_SIZE2) if np.mod (count_plot, ncols) == 0: @@ -784,6 +808,7 @@ def contours (_x, _y, nrows, ncols, var, color_palette, titles, domain, figure, if (domain[3] - domain[2]) < 180.: ax_.add_feature(cfeature.STATES, linestyle=':',linewidth=0.9, edgecolor='black', facecolor='none') + if colorbar2 is not None: if count_plot < colorbar2['begin']: cax = cax_one From 0aaf002a35f3770956c4b8d313a8e402aebb9039 Mon Sep 17 00:00:00 2001 From: Sarith Mahanama Date: Wed, 14 Aug 2024 10:51:31 -0400 Subject: [PATCH 35/40] a bug fix in polar stereographic plotting --- .../s2s/s2s_modules/s2splots/plot_utils.py | 34 ++++++++++--------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py b/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py index 7e7f4f7e4..60387151e 100644 --- a/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py +++ b/lis/utils/usaf/s2s/s2s_modules/s2splots/plot_utils.py @@ -723,7 +723,9 @@ def contours (_x, _y, nrows, ncols, var, color_palette, titles, domain, figure, cmap.set_over(under_over[1]) nplots = len(titles) - if projection[0] == 'polar': + if projection is None: + fig = plt.figure(figsize= figure_size(FIGWIDTH, domain, nrows, ncols)) + elif projection[0] == 'polar': fig = plt.figure(figsize= (FIGWIDTH * ncols, FIGWIDTH * nrows)) else: fig = plt.figure(figsize= figure_size(FIGWIDTH, domain, nrows, ncols)) @@ -768,27 +770,27 @@ def contours (_x, _y, nrows, ncols, var, color_palette, titles, domain, figure, gl_ = ax_.gridlines(draw_labels=True) else: gl_ = ax_.gridlines(draw_labels=True, dms=True, x_inline=False, y_inline=False, zorder=4) - - if projection[0] == 'polar': - gl_.xformatter = cticker.LongitudeFormatter() - gl_.yformatter = cticker.LatitudeFormatter() - gl_.linewidth = 1 - gl_.color = 'gray' - gl_.linestyle = '--' + if projection[0] == 'polar': + gl_.xformatter = cticker.LongitudeFormatter() + gl_.yformatter = cticker.LatitudeFormatter() + gl_.linewidth = 1 + gl_.color = 'gray' + gl_.linestyle = '--' gl_.top_labels = False gl_.bottom_labels = False gl_.left_labels = False gl_.right_labels = False - if projection[0] == 'polar': - gl_.top_labels = False - gl_.bottom_labels = True - gl_.left_labels = True - gl_.right_labels = True - gl_.xformatter = cticker.LongitudeFormatter() - gl_.yformatter = cticker.LatitudeFormatter() - + if projection is not None: + if projection[0] == 'polar': + gl_.top_labels = False + gl_.bottom_labels = True + gl_.left_labels = True + gl_.right_labels = True + gl_.xformatter = cticker.LongitudeFormatter() + gl_.yformatter = cticker.LatitudeFormatter() + plt.title(titles[count_plot], fontsize=fscale*FONT_SIZE2) if np.mod (count_plot, ncols) == 0: From 200ef186b0b507da986730ec9262e50aa3a2bf90 Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Thu, 22 Aug 2024 17:14:01 -0400 Subject: [PATCH 36/40] Improved netCDF processing wrapping. --- ldt/USAFSI/USAFSI_espcdMod.F90 | 65 ++++++++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) diff --git a/ldt/USAFSI/USAFSI_espcdMod.F90 b/ldt/USAFSI/USAFSI_espcdMod.F90 index 50088bfc4..fc420a0a4 100644 --- a/ldt/USAFSI/USAFSI_espcdMod.F90 +++ b/ldt/USAFSI/USAFSI_espcdMod.F90 @@ -33,6 +33,7 @@ module USAFSI_espcdMod contains ! Find ESPCD CICE file on file system +#if (defined USE_NETCDF3 || defined USE_NETCDF4) subroutine find_espcd_cice_file(rootdir, region, & yyyy, mm, dd, hh, filename, & aice, nlon, nlat) @@ -250,6 +251,39 @@ subroutine find_espcd_cice_file(rootdir, region, & end subroutine find_espcd_cice_file +#else + + ! Dummy version w/o netCDF + subroutine find_espcd_cice_file(rootdir, region, & + yyyy, mm, dd, hh, filename, & + aice, nlon, nlat) + + ! Imports + use LDT_logMod, only: LDT_logunit, LDT_endrun + + ! Defaults + implicit none + + ! Arguments + character(len=*), intent(in) :: rootdir + character*3, intent(in) :: region + integer, intent(in) :: yyyy + integer, intent(in) :: mm + integer, intent(in) :: dd + integer, intent(in) :: hh + character*255, intent(out) :: filename + real, allocatable, intent(inout) :: aice(:,:,:) + integer, intent(out) :: nlon + integer, intent(out) :: nlat + + write(LDT_logunit,*) & + '[ERR] LDT was compiled without netCDF support!' + write(LDT_logunit,*) "[ERR] Recompile and try again!" + call LDT_endrun() + + end subroutine find_espcd_cice_file +#endif + ! Builds path to ESPC-D CICE netcdf file subroutine construct_espcd_cice_filename(rootdir, region, & yyyy, mm, dd, hh, fh, filename) @@ -280,6 +314,7 @@ subroutine construct_espcd_cice_filename(rootdir, region, & end subroutine construct_espcd_cice_filename ! Find ESPC-D SST file on file system +#if (defined USE_NETCDF3 || defined USE_NETCDF4) subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, & filename, water_temp, nlat, nlon) @@ -484,6 +519,36 @@ subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, & end subroutine find_espcd_sst_file +#else + ! Dummy version w/o netCDF + subroutine find_espcd_sst_file(rootdir, yyyy, mm, dd, hh, & + filename, water_temp, nlat, nlon) + + ! Imports + use LDT_logMod, only: LDT_logunit, LDT_endrun + + ! Defaults + implicit none + + ! Arguments + character(len=*), intent(in) :: rootdir + integer, intent(in) :: yyyy + integer, intent(in) :: mm + integer, intent(in) :: dd + integer, intent(in) :: hh + character*255, intent(inout) :: filename + real, allocatable, intent(inout) :: water_temp(:,:,:,:) + integer, intent(out) :: nlat + integer, intent(out) :: nlon + + write(LDT_logunit,*) & + '[ERR] LDT was compiled without netCDF support!' + write(LDT_logunit,*) "[ERR] Recompile and try again!" + call LDT_endrun() + + end subroutine find_espcd_sst_file +#endif + ! Builds path to ESPC-D SST netcdf file subroutine construct_espcd_sst_filename(rootdir, & yyyy, mm, dd, hh, fh, filename) From 6bb4fea61c82e8ec70f5cf7e020510fc3de87b82 Mon Sep 17 00:00:00 2001 From: Eric Kemp Date: Mon, 26 Aug 2024 10:22:39 -0400 Subject: [PATCH 37/40] Updated sample USAFSI config settings for ESPC-D entries. --- ldt/USAFSI/ldt.config.sample | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ldt/USAFSI/ldt.config.sample b/ldt/USAFSI/ldt.config.sample index 863af756a..de409652d 100644 --- a/ldt/USAFSI/ldt.config.sample +++ b/ldt/USAFSI/ldt.config.sample @@ -98,8 +98,11 @@ USAFSI elevQC difference threshold (m): 400. USAFSI skewed backQC snow depth threshold (m): 0.4 # Other new settings USAFSI bogus climatology snow depth value (m): 0.2 +USAFSI source of ocean data: "ESPC-D" USAFSI GOFS SST data directory: ./USAFSIIN/GOFS USAFSI GOFS CICE data directory: ./USAFSIIN/GOFS +USAFSI ESPC-D SST data directory: ./USAFSIIN/ESPC-D +USAFSI ESPC-D CICE data directory: ./USAFSIIN/ESPC-D USAFSI LIS GRIB2 data directory: ./USAFSIIN/LIS_T2 USAFSI LIS GRIB2 security class: U USAFSI LIS GRIB2 data category: C From ae48b55551467e3e54c3c45bb112a1e4b0691898 Mon Sep 17 00:00:00 2001 From: "James V. Geiger" Date: Mon, 26 Aug 2024 20:48:01 -0400 Subject: [PATCH 38/40] Add ESPC-D related documentation --- ldt/configs/ldt.config.adoc | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/ldt/configs/ldt.config.adoc b/ldt/configs/ldt.config.adoc index bea6392f3..105844a3f 100644 --- a/ldt/configs/ldt.config.adoc +++ b/ldt/configs/ldt.config.adoc @@ -4588,10 +4588,24 @@ USAFSI skewed backQC snow depth threshold (m): 0.4 `USAFSI bogus climatology snow depth value (m):` bogus value for climatology if not available +`USAFSI source of ocean data:` specifies the source of the USAFSI ocean data. Acceptable values are: + +[cols="<,<",] +|=== +|Value |Description + +|GOFS | Global Ocean Forecast System +|ESPC-D | Earth System Prediction Capability - Deterministic +|=== + `USAFSI GOFS SST data directory:` directory with GOFS sea surface temperature netCDF files `USAFSI GOFS CICE data directory:` directory with GOFS sea ice netCDF files +`USAFSI ESPC-D SST data directory:` directory with ESPC-D sea surface temperature netCDF files + +`USAFSI ESPC-D CICE data directory:` directory with ESPC-D sea ice netCDF files + `USAFSI LIS GRIB2 data directory:` directory with LIS GRIB2 2-m temperature analyses `USAFSI LIS GRIB2 security class:` used for constructing LIS GRIB2 filename @@ -4613,8 +4627,9 @@ USAFSI skewed backQC snow depth threshold (m): 0.4 .Example _ldt.config_ entry .... USAFSI bogus climatology snow depth value (m): 0.2 -USAFSI GOFS SST data directory: ./SNODEPIN//GOFS -USAFSI GOFS CICE data directory: ./SNODEPIN//GOFS +USAFSI source of ocean data: GOFS +USAFSI GOFS SST data directory: ./SNODEPIN/GOFS +USAFSI GOFS CICE data directory: ./SNODEPIN/GOFS USAFSI LIS GRIB2 data directory: /discover/nobackup/emkemp/AFWA/data/LIS_GRIB2 USAFSI LIS GRIB2 security class: U USAFSI LIS GRIB2 data category: C From f45450cae2fdf87d0acc9c533f2cc5ccf876e021 Mon Sep 17 00:00:00 2001 From: "James V. Geiger" Date: Tue, 27 Aug 2024 09:27:34 -0400 Subject: [PATCH 39/40] Add sample config to documentation --- ldt/configs/ldt.config.adoc | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/ldt/configs/ldt.config.adoc b/ldt/configs/ldt.config.adoc index 105844a3f..a2ff78577 100644 --- a/ldt/configs/ldt.config.adoc +++ b/ldt/configs/ldt.config.adoc @@ -4627,14 +4627,16 @@ USAFSI skewed backQC snow depth threshold (m): 0.4 .Example _ldt.config_ entry .... USAFSI bogus climatology snow depth value (m): 0.2 -USAFSI source of ocean data: GOFS -USAFSI GOFS SST data directory: ./SNODEPIN/GOFS -USAFSI GOFS CICE data directory: ./SNODEPIN/GOFS -USAFSI LIS GRIB2 data directory: /discover/nobackup/emkemp/AFWA/data/LIS_GRIB2 -USAFSI LIS GRIB2 security class: U -USAFSI LIS GRIB2 data category: C +USAFSI source of ocean data: "ESPC-D" +USAFSI GOFS SST data directory: ./USAFSIIN/GOFS +USAFSI GOFS CICE data directory: ./USAFSIIN/GOFS +USAFSI ESPC-D SST data directory: ./USAFSIIN/ESPC-D +USAFSI ESPC-D CICE data directory: ./USAFSIIN/ESPC-D +USAFSI LIS GRIB2 data directory: ./USAFSIIN/LIS_T2 +USAFSI LIS GRIB2 security class: U +USAFSI LIS GRIB2 data category: C USAFSI LIS GRIB2 data resolution: C0P09DEG -USAFSI LIS GRIB2 area of data: GLOBAL +USAFSI LIS GRIB2 area of data: GLOBAL USAFSI GALWEM root directory: USAF_FORCING USAFSI GALWEM subdirectory: GALWEM USAFSI GALWEM use timestamp directories: 1 From 3bc09e483c5e24bce43d15233dc5fffd918dd2f1 Mon Sep 17 00:00:00 2001 From: "James V. Geiger" Date: Tue, 27 Aug 2024 10:55:20 -0400 Subject: [PATCH 40/40] Update revision history for LDT Users' Guide for LISF 557WW 7.5.18 release --- docs/LDT_users_guide/LDT_users_guide.adoc | 4 ++-- docs/LDT_users_guide/revision_table.adoc | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/LDT_users_guide/LDT_users_guide.adoc b/docs/LDT_users_guide/LDT_users_guide.adoc index eff676390..6acd26e44 100644 --- a/docs/LDT_users_guide/LDT_users_guide.adoc +++ b/docs/LDT_users_guide/LDT_users_guide.adoc @@ -1,6 +1,6 @@ = Land Data Toolkit (LDT): LDT {lisfrevision} Users`' Guide -:revnumber: 2.3 -:revdate: 05 Jul 2023 +:revnumber: 2.5 +:revdate: 27 Aug 2024 :doctype: book :sectnums: :toc: diff --git a/docs/LDT_users_guide/revision_table.adoc b/docs/LDT_users_guide/revision_table.adoc index 4a75992f0..fbc8744bd 100644 --- a/docs/LDT_users_guide/revision_table.adoc +++ b/docs/LDT_users_guide/revision_table.adoc @@ -2,6 +2,7 @@ |==== | Revision | Summary of Changes | Date +| 2.5 | LISF 557WW 7.5.18 release | Aug 27, 2024 | 2.3 | LISF 557WW 7.5.9 release | Jul 05, 2023 | 2.2 | LISF 557WW 7.5.8 release | Jun 30, 2023 | 2.1 | LISF 557WW 7.5.0 release | Nov 30, 2022