diff --git a/guide/images/WRF_tutorial_domain.png b/guide/images/WRF_tutorial_domain.png new file mode 100644 index 0000000000..f324bff661 Binary files /dev/null and b/guide/images/WRF_tutorial_domain.png differ diff --git a/guide/images/WRF_tutorial_linkobs1.png b/guide/images/WRF_tutorial_linkobs1.png index c2d0fbae5e..a397d448f6 100644 Binary files a/guide/images/WRF_tutorial_linkobs1.png and b/guide/images/WRF_tutorial_linkobs1.png differ diff --git a/guide/images/WRF_tutorial_linkobs2.png b/guide/images/WRF_tutorial_linkobs2.png index fca2003e2b..74223a0614 100644 Binary files a/guide/images/WRF_tutorial_linkobs2.png and b/guide/images/WRF_tutorial_linkobs2.png differ diff --git a/guide/images/WRF_tutorial_ncview1.png b/guide/images/WRF_tutorial_ncview1.png index a89c1b4905..bdd927be74 100644 Binary files a/guide/images/WRF_tutorial_ncview1.png and b/guide/images/WRF_tutorial_ncview1.png differ diff --git a/guide/images/WRF_tutorial_ncview2.png b/guide/images/WRF_tutorial_ncview2.png index 1898991e43..2a2ae6eb64 100644 Binary files a/guide/images/WRF_tutorial_ncview2.png and b/guide/images/WRF_tutorial_ncview2.png differ diff --git a/guide/images/WRF_tutorial_oneline1.png b/guide/images/WRF_tutorial_oneline1.png index a0e6b980c8..e5ce11f7b7 100644 Binary files a/guide/images/WRF_tutorial_oneline1.png and b/guide/images/WRF_tutorial_oneline1.png differ diff --git a/guide/images/WRF_tutorial_oneline2.png b/guide/images/WRF_tutorial_oneline2.png index a648ab7bd8..55dfbd1e94 100644 Binary files a/guide/images/WRF_tutorial_oneline2.png and b/guide/images/WRF_tutorial_oneline2.png differ diff --git a/guide/images/WRF_tutorial_profile1.png b/guide/images/WRF_tutorial_profile1.png index aef98f2436..18519fc3b8 100644 Binary files a/guide/images/WRF_tutorial_profile1.png and b/guide/images/WRF_tutorial_profile1.png differ diff --git a/guide/images/WRF_tutorial_profile2.png b/guide/images/WRF_tutorial_profile2.png index bf040cee53..d7cc196a0b 100644 Binary files a/guide/images/WRF_tutorial_profile2.png and b/guide/images/WRF_tutorial_profile2.png differ diff --git a/guide/images/WRF_tutorial_radar1.png b/guide/images/WRF_tutorial_radar1.png new file mode 100644 index 0000000000..bff7616d92 Binary files /dev/null and b/guide/images/WRF_tutorial_radar1.png differ diff --git a/guide/images/WRF_tutorial_radar2.png b/guide/images/WRF_tutorial_radar2.png new file mode 100644 index 0000000000..442ecd677f Binary files /dev/null and b/guide/images/WRF_tutorial_radar2.png differ diff --git a/guide/images/WRF_tutorial_radiosonde_obs1.png b/guide/images/WRF_tutorial_radiosonde_obs1.png new file mode 100644 index 0000000000..a7cf3425da Binary files /dev/null and b/guide/images/WRF_tutorial_radiosonde_obs1.png differ diff --git a/guide/images/WRF_tutorial_radiosonde_obs2.png b/guide/images/WRF_tutorial_radiosonde_obs2.png new file mode 100644 index 0000000000..f13750c2f3 Binary files /dev/null and b/guide/images/WRF_tutorial_radiosonde_obs2.png differ diff --git a/guide/images/WRF_tutorial_surface_obs.png b/guide/images/WRF_tutorial_surface_obs.png deleted file mode 100644 index 107b549c7e..0000000000 Binary files a/guide/images/WRF_tutorial_surface_obs.png and /dev/null differ diff --git a/models/wrf/shell_scripts/add_bank_perts.ncl b/models/wrf/shell_scripts/add_bank_perts.ncl index 964cb61f2d..e283a06190 100644 --- a/models/wrf/shell_scripts/add_bank_perts.ncl +++ b/models/wrf/shell_scripts/add_bank_perts.ncl @@ -1,6 +1,6 @@ ; Simple utility to add perturbation fields from the ; perturbation bank files to a provided wrfinput file. -; consistent with use in the advance_model.csh script +; consistent with use in the advance_model.sh script ; where we are substituting this call for running ; da_wrfvar.exe, so the wrfinput file to perturb ; should be called 'wrfinput_next', and is the mean @@ -16,15 +16,23 @@ begin ens_mem_num = ""+MEM_NUM pert_bank_path = PERTS_DIR -; perturbation scaling: +;perturbation scaling: default tutorial settings scale_T = 1.3 scale_U = 1.3 scale_V = 1.3 scale_Q = 1.3 scale_M = 0.6 +; Shuts off perturbations, only used for forecast mode +;perturbation scaling: +; scale_T = 0.0 +; scale_U = 0.0 +; scale_V = 0.0 +; scale_Q = 0.0 +; scale_M = 0.0 + ; for random pert, pick from larger bank - bank_size = 100 + bank_size = 60 con = bank_size / 32766.0 ; 32766.0 forces a 0.0 to 1.0 range ; get a seed wallClock = stringtoint(systemfunc("date -u +%N")) @@ -45,7 +53,7 @@ begin ;For WRFv4 or later prognostic temp variable is THM - pert_fields = (/"U", "V", "T", "QVAPOR","MU"/) + pert_fields = (/"U", "V", "THM", "QVAPOR","MU"/) wrf_fields = (/"U", "V", "THM", "QVAPOR","MU"/) pert_scale = (/scale_U,scale_V,scale_T,scale_Q,scale_M/) nperts = dimsizes(pert_fields) diff --git a/models/wrf/shell_scripts/assim_advance.sh b/models/wrf/shell_scripts/assim_advance.sh new file mode 100755 index 0000000000..58c8a6680f --- /dev/null +++ b/models/wrf/shell_scripts/assim_advance.sh @@ -0,0 +1,107 @@ +#!/usr/bin/env bash +# +# DART software - Copyright UCAR. This open source software is provided +# by UCAR, "as is", without charge, subject to all terms of use at +# http://www.image.ucar.edu/DAReS/DART/DART_download +# +# datea, emember, paramfile are command-line arguments - OR - +# are set by a string editor (sed) command. + +set -uo pipefail + +datea="${1:-}" +emember="${2:-}" +paramfile="${3:-}" + +if [[ -z "${datea}" || -z "${emember}" || -z "${paramfile}" ]]; then + echo "usage: $0 " + exit 2 +fi + +source "${paramfile}" + +domains="${NUM_DOMAINS}" + +start_time="$(date +%s)" +echo "host is $(hostname)" +echo "assim_advance.sh is running in $(pwd)" + +cd "${RUN_DIR}" + +read -r -a gdate < <(echo "${datea} 0 -g" | "${RUN_DIR}/advance_time") + +if (( ASSIM_INT_MINUTES <= 0 )); then + read -r -a gdatef < <(echo "${datea} ${ASSIM_INT_HOURS} -g" | "${RUN_DIR}/advance_time") +else + read -r -a gdatef < <(echo "${datea} ${ASSIM_INT_MINUTES}m -g" | "${RUN_DIR}/advance_time") +fi + +yyyy="${datea:0:4}" +mm="${datea:4:2}" +dd="${datea:6:2}" +hh="${datea:8:2}" +nn="00" +ss="00" + +# Copy files to appropriate location +echo "${start_time}" > "${RUN_DIR}/start_member_${emember}" + +# Go into member directory and generate the needed wrf.info file +cd "${RUN_DIR}/advance_temp${emember}" + +icnum="$(printf "%04d" "${emember}")" +if [[ -e "${RUN_DIR}/advance_temp${emember}/wrf.info" ]]; then + ${REMOVE} "${RUN_DIR}/advance_temp${emember}/wrf.info" +fi +: > wrf.info + +if [[ "${SUPER_PLATFORM}" == "LSF queuing system" ]]; then + + cat > "${RUN_DIR}/advance_temp${emember}/wrf.info" < "${RUN_DIR}/advance_temp${emember}/wrf.info" < "${RUN_DIR}/filter_control${icnum}" + +dn=1 +while (( dn <= domains )); do + dchar="$(printf "%02d" "${dn}")" + echo "filter_restart_d${dchar}.${icnum}" >> "${RUN_DIR}/filter_control${icnum}" + echo "prior_d${dchar}.${icnum}" >> "${RUN_DIR}/filter_control${icnum}" + (( dn++ )) +done # loop through domains + +# integrate the model forward in time +"${RUN_DIR}/new_advance_model.sh" "${emember}" "${domains}" "filter_control${icnum}" "${paramfile}" +${REMOVE} "${RUN_DIR}/filter_control${icnum}" + +end_time="$(date +%s)" +length_time=$(( end_time - start_time )) +echo "duration = ${length_time}" + +exit 0 + diff --git a/models/wrf/shell_scripts/assimilate.sh b/models/wrf/shell_scripts/assimilate.sh new file mode 100755 index 0000000000..42c2bad705 --- /dev/null +++ b/models/wrf/shell_scripts/assimilate.sh @@ -0,0 +1,59 @@ +#!/bin/bash +# +# DART software - Copyright UCAR. This open source software is provided +# by UCAR, "as is", without charge, subject to all terms of use at +# http://www.image.ucar.edu/DAReS/DART/DART_download + +# datea and paramfile are command-line arguments - OR - +# are set by a string editor (sed) command. + +set -uo pipefail + +datea="$1" +paramfile="$2" + +# shellcheck disable=SC1090 +source "$paramfile" + +start_time="$(date +%s)" +echo "host is $(hostname)" + +cd "${RUN_DIR}" +echo "${start_time}" > "${RUN_DIR}/filter_started" + +# Make sure the previous results are not hanging around +if [[ -e "${RUN_DIR}/obs_seq.final" ]]; then + ${REMOVE} "${RUN_DIR}/obs_seq.final" +fi +if [[ -e "${RUN_DIR}/filter_done" ]]; then + ${REMOVE} "${RUN_DIR}/filter_done" +fi + +# run data assimilation system +if [[ "${SUPER_PLATFORM}" == "LSF queuing system" ]]; then + + export TARGET_CPU_LIST=-1 + export FORT_BUFFERED=true + mpirun.lsf ./filter || exit 1 + +elif [[ "${SUPER_PLATFORM}" == "derecho" ]]; then + + export MPI_SHEPHERD=FALSE + export TMPDIR=/dev/shm + + ulimit -s unlimited + + mpiexec -n 256 -ppn 128 ./filter || exit 1 + +fi + +if [[ -e "${RUN_DIR}/obs_seq.final" ]]; then + touch "${RUN_DIR}/filter_done" +fi + +end_time="$(date +%s)" +length_time=$(( end_time - start_time )) +echo "duration = $length_time" + +exit 0 + diff --git a/models/wrf/shell_scripts/diagnostics_obs.sh b/models/wrf/shell_scripts/diagnostics_obs.sh new file mode 100755 index 0000000000..d37148aba9 --- /dev/null +++ b/models/wrf/shell_scripts/diagnostics_obs.sh @@ -0,0 +1,138 @@ +#!/bin/bash +# +# DART software - Copyright UCAR. This open source software is provided +# by UCAR, "as is", without charge, subject to all terms of use at +# http://www.image.ucar.edu/DAReS/DART/DART_download + +# diagnostics_obs.sh - shell script that computes observation +# specific diagnostics. +# +# $1 - analysis date +# $2 - parameter file +# + +set -uo pipefail + +datea="${1:?usage: diagnostics_obs.sh YYYYMMDDHH paramfile}" +paramfile="${2:?usage: diagnostics_obs.sh YYYYMMDDHH paramfile}" + +source "${paramfile}" + +cd "${OBS_DIAG_DIR}" +${COPY} "${RUN_DIR}/input.nml" input.nml + +read -r -a gdate < <(echo "${datea} 0 -g" | "${DART_DIR}/models/wrf/work/advance_time") +yyyy2="${datea:0:4}" +mm2="${datea:4:2}" +dd2="${datea:6:2}" +hh2="${datea:8:2}" + +# Determine appropriate dates for observation diagnostics +datef="$(echo "${datea} -${ASSIM_INT_HOURS}" | "${DART_DIR}/models/wrf/work/advance_time")" +# Forcing the obs_diag_output.nc diagnostic to be the last analysis time only (not cumulative) +yyyy1="${datea:0:4}" +mm1="${datea:4:2}" +dd1="${datea:6:2}" +hh1="${datea:8:2}" + +half_bin=$(( ASSIM_INT_HOURS / 2 )) +datefbs="$(echo "${datef} -${half_bin}" | "${DART_DIR}/models/wrf/work/advance_time")" +fbs_yyyy1="${datefbs:0:4}" +fbs_mm1="${datefbs:4:2}" +fbs_dd1="${datefbs:6:2}" +fbs_hh1="${datefbs:8:2}" + +datefbe="$(echo "${datef} ${half_bin}" | "${DART_DIR}/models/wrf/work/advance_time")" +fbe_yyyy1="${datefbe:0:4}" +fbe_mm1="${datefbe:4:2}" +fbe_dd1="${datefbe:6:2}" +fbe_hh1="${datefbe:8:2}" + +datelbe="$(echo "${datea} ${half_bin}" | "${DART_DIR}/models/wrf/work/advance_time")" +lbe_yyyy1="${datelbe:0:4}" +lbe_mm1="${datelbe:4:2}" +lbe_dd1="${datelbe:6:2}" +lbe_hh1="${datelbe:8:2}" + +while [[ "${datef}" -le "${datea}" ]]; do + if [[ -e "${OUTPUT_DIR}/${datef}/obs_seq.final" ]]; then + ${LINK} "${OUTPUT_DIR}/${datef}/obs_seq.final" "obs_seq.final_${datef}" + fi + datef="$(echo "${datef} ${ASSIM_INT_HOURS}" | "${DART_DIR}/models/wrf/work/advance_time")" +done + +# Create flist (absolute paths) +readlink -f obs_seq.final_* > flist + +cat > script.sed < input.nml + +# Create the state-space diagnostic summary +"${DART_DIR}/models/wrf/work/obs_diag" +${MOVE} obs_diag_output.nc "${OUTPUT_DIR}/${datea}/." +${MOVE} "$(ls -1 observation_locations.*.dat | tail -1)" "${OUTPUT_DIR}/${datea}/observation_locations.dat" + +# Create a netCDF file with the original observation data (may not have some of the unusual metadata) +"${DART_DIR}/models/wrf/work/obs_seq_to_netcdf" +${MOVE} obs_epoch* "${OUTPUT_DIR}/${datea}/" +${REMOVE} ./*.txt obs_seq.final_* flist observation_locations.*.dat + +# Prune the obs_seq.final and store result keeps first 5 copies? why not set num_output_obs = 0 +# is it the time subsetting that is of interest? +${LINK} "${OUTPUT_DIR}/${datea}/obs_seq.final" . +"${DART_DIR}/models/wrf/work/obs_sequence_tool" +${MOVE} obs_seq.final_reduced "${OUTPUT_DIR}/${datea}/." +${REMOVE} obs_seq.final + +# Process the mean analysis increment +cd "${OUTPUT_DIR}/${datea}" +${COPY} "${SHELL_SCRIPTS_DIR}/mean_increment.ncl" . +dn=1 +while (( dn <= ${NUM_DOMAINS} )); do + dchar="$(echo "${dn} + 100" | bc | cut -b2-3)" + analysis_in="analysis_increment_d${dchar}.nc" + mean_out="mean_increments_d${dchar}.nc" + + ncl "fname=\"${analysis_in}\"" "fout=\"${mean_out}\"" "${OUTPUT_DIR}/${datea}/mean_increment.ncl" > nclrun_d${dchar}.out + + (( dn++ )) +done # loop through domains + +touch "${OUTPUT_DIR}/${datea}/obs_diags_done" + +exit 0 + diff --git a/models/wrf/shell_scripts/driver.sh b/models/wrf/shell_scripts/driver.sh new file mode 100755 index 0000000000..2c4c8d1ca6 --- /dev/null +++ b/models/wrf/shell_scripts/driver.sh @@ -0,0 +1,714 @@ +#!/bin/bash +# +# DART software - Copyright UCAR. This open source software is provided +# by UCAR, "as is", without charge, subject to all terms of use at +# http://www.image.ucar.edu/DAReS/DART/DART_download + +# driver.sh - script that performs assimilation cycling +# +# run script as: ./driver.sh 2024051906 param.sh >& run.out & +# +# This provides an input argument of the first +# analysis time in yyyymmddhh format. +######################################################################## +# Set the correct values here +paramfile="$(readlink -f "${2}")" # Get absolute path for param.sh from command line arg +datefnl=2024051912 # target date YYYYMMDDHH # set this appropriately #%%%# +######################################################################## + +source "$paramfile" +uname -a +cd "${RUN_DIR}" + +# First determine the appropriate analysis date + +if (( $# > 0 )); then + datea="${1}" # starting date + export restore=1 # set the restore variable + echo 'starting a restore' +else + echo "please enter a date: yyyymmddhh" + exit 1 +fi + +touch "${RUN_DIR}/cycle_started_${datea}" + +while true; do + + if [[ ! -d "${OUTPUT_DIR}/${datea}" && "${restore}" == "1" ]]; then + ${REMOVE} "${RUN_DIR}/ABORT_RETRO" + echo 'exiting because output directory does not exist and this is a restore' + exit 0 + fi + + datep="$(echo "${datea} -${ASSIM_INT_HOURS}" | "${DART_DIR}/models/wrf/work/advance_time")" + read -r -a gdate < <(echo "${datea} 0 -g" | "${DART_DIR}/models/wrf/work/advance_time") + read -r -a gdatef < <(echo "${datea} ${ASSIM_INT_HOURS} -g" | "${DART_DIR}/models/wrf/work/advance_time") + wdate="$(echo "${datea} 0 -w" | "${DART_DIR}/models/wrf/work/advance_time")" + hh="${datea:8:2}" + + echo 'ready to check inputs' + domains="${NUM_DOMAINS}" # from the param file + + # Check to make sure all input data exists + dn=1 + while (( dn <= domains )); do + dchar="$(echo "${dn} + 100" | bc | cut -b2-3)" + + for infile in \ + "wrfinput_d${dchar}_${gdate[0]}_${gdate[1]}_mean" \ + "wrfinput_d${dchar}_${gdatef[0]}_${gdatef[1]}_mean" \ + "wrfbdy_d01_${gdatef[0]}_${gdatef[1]}_mean" \ + "obs_seq.out" + do + if [[ ! -e "${OUTPUT_DIR}/${datea}/${infile}" ]]; then + echo "${OUTPUT_DIR}/${datea}/${infile} is missing! Stopping the system" + touch ABORT_RETRO + exit 2 + fi + done + + (( dn++ )) + done # loop through domains + + # Clear the advance_temp directory, write in new template file, and + # overwrite variables with the compact prior netcdf files + # + # NOTE that multiple domains might be present, but only looking for domain 1 + + if [[ "${SUPER_PLATFORM}" == "LSF queuing system" ]]; then + ic_queue="caldera" + logfile="${RUN_DIR}/ic_gen.log" + sub_command=( bsub -q "${ic_queue}" -W 00:05 -o "${logfile}" -n 1 -P "${COMPUTER_CHARGE_ACCOUNT}" ) + elif [[ "${SUPER_PLATFORM}" == "derecho" ]]; then + ic_queue="main" + # NOTE: qsub flags here match the original intent; users may adjust for their environment. + sub_command=( qsub -l "select=1:ncpus=128:mpiprocs=128:mem=5GB" -l "walltime=00:03:00" -q "${ic_queue}" -A "${COMPUTER_CHARGE_ACCOUNT}" -j oe -k eod -N icgen ) + else + echo "Unknown SUPER_PLATFORM='${SUPER_PLATFORM}'" + exit 2 + fi + + echo "this platform is $SUPER_PLATFORM and the job submission command is ${sub_command[*]}" + + dn=1 + while (( dn <= domains )); do + dchar="$(echo "${dn} + 100" | bc | cut -b2-3)" + n=1 + while (( n <= NUM_ENS )); do + ensstring="$(echo "${n} + 10000" | bc | cut -b2-5)" + if [[ -e "${OUTPUT_DIR}/${datep}/PRIORS/prior_d${dchar}.${ensstring}" ]]; then + + if (( dn == 1 )) && [[ -d "${RUN_DIR}/advance_temp${n}" ]]; then + ${REMOVE} "${RUN_DIR}/advance_temp${n}" + fi + + mkdir -p "${RUN_DIR}/advance_temp${n}" + ${LINK} "${OUTPUT_DIR}/${datea}/wrfinput_d${dchar}_${gdate[0]}_${gdate[1]}_mean" \ + "${RUN_DIR}/advance_temp${n}/wrfinput_d${dchar}" + else + echo "${OUTPUT_DIR}/${datep}/PRIORS/prior_d${dchar}.${ensstring} is missing! Stopping the system" + touch ABORT_RETRO + exit 3 + fi + (( n++ )) + done # loop through ensemble members + (( dn++ )) + done # loop through domains + + # Fire off a bunch of small jobs to create the initial conditions for the short model forecast. + # the prep_ic.sh script creates a file "${RUN_DIR}/ic_d${dchar}_${n}_ready" to signal a + # successful completion. + # NOTE : Submit commands here are system specific and work for this tutorial, users may want/need to change + # for their system and/or production. + + n=1 + while (( n <= NUM_ENS )); do + if [[ "${SUPER_PLATFORM}" == "derecho" ]]; then # can't pass along arguments in the same way + "${sub_command[@]}" -v "mem_num=${n},date=${datep},domain=${domains},paramf=${paramfile}" "${SHELL_SCRIPTS_DIR}/prep_ic.sh" + else + # LSF: pass args directly + "${sub_command[@]}" "${SHELL_SCRIPTS_DIR}/prep_ic.sh" "${n}" "${datep}" "${dn}" "${paramfile}" + fi + (( n++ )) + done # loop through ensemble members + + # If any of the queued jobs has not completed in 5 minutes, run them manually + # cleanup any failed stuffs + # NOTE : No automated cleanup for queued jobs. User may want to add system specific monitoring. + dn=1 + while (( dn <= domains )); do + dchar="$(echo "${dn} + 100" | bc | cut -b2-3)" + n=1 + loop=1 + while (( n <= NUM_ENS )); do + if [[ -e "${RUN_DIR}/ic_d${dchar}_${n}_ready" ]]; then + ${REMOVE} "${RUN_DIR}/ic_d${dchar}_${n}_ready" + (( n++ )) + loop=1 + else + echo "waiting for ic member $n in domain $dn" + sleep 5 + (( loop++ )) + if (( loop > 60 )); then # wait 5 minutes for the ic file to be ready, else run manually + echo "gave up on ic member $n - redo" + "${SHELL_SCRIPTS_DIR}/prep_ic.sh" "${n}" "${datep}" "${dn}" "${paramfile}" + # If manual execution of script, shouldn't queued job be killed? + fi + fi + done + (( dn++ )) + done # loop through domains + + mkdir -p "${OUTPUT_DIR}/${datea}/logs" + ${MOVE} icgen.o* "${OUTPUT_DIR}/${datea}/logs/" 2>/dev/null || true + + # Get wrfinput source information + dn=1 + while (( dn <= domains )); do + dchar="$(echo "${dn} + 100" | bc | cut -b2-3)" + ${COPY} "${OUTPUT_DIR}/${datea}/wrfinput_d${dchar}_${gdate[0]}_${gdate[1]}_mean" "wrfinput_d${dchar}" + (( dn++ )) + done + + # Copy the inflation files from the previous time and for all domains + # The ADAPTIVE_INFLATION variable is set in scripts/param.sh and should + # be consistent with DART's input.nml inflation setting (inf_flavor) + + if [[ "${ADAPTIVE_INFLATION}" == "1" ]]; then + # Create the home for inflation and future state space diagnostic files + mkdir -p "${RUN_DIR}/Inflation_input" "${RUN_DIR}/Output" + + dn=1 + while (( dn <= domains )); do + dchar="$(echo "${dn} + 100" | bc | cut -b2-3)" + + # Single domain (no dchar appendix) + if [[ -e "${OUTPUT_DIR}/${datep}/Inflation_input/input_priorinf_mean.nc" ]]; then + + ${LINK} "${OUTPUT_DIR}/${datep}/Inflation_input/input_priorinf_mean.nc" "${RUN_DIR}/." + ${LINK} "${OUTPUT_DIR}/${datep}/Inflation_input/input_postinf_mean.nc" "${RUN_DIR}/." + + ${LINK} "${OUTPUT_DIR}/${datep}/Inflation_input/input_priorinf_sd.nc" "${RUN_DIR}/." + ${LINK} "${OUTPUT_DIR}/${datep}/Inflation_input/input_postinf_sd.nc" "${RUN_DIR}/." + + + # Multiple domains (dchar appendix) + elif [[ -e "${OUTPUT_DIR}/${datep}/Inflation_input/input_priorinf_mean_d${dchar}.nc" ]]; then + + ${LINK} "${OUTPUT_DIR}/${datep}/Inflation_input/input_priorinf_mean_d${dchar}.nc" "${RUN_DIR}/." + ${LINK} "${OUTPUT_DIR}/${datep}/Inflation_input/input_postinf_mean_d${dchar}.nc" "${RUN_DIR}/." + + ${LINK} "${OUTPUT_DIR}/${datep}/Inflation_input/input_priorinf_sd_d${dchar}.nc" "${RUN_DIR}/." + ${LINK} "${OUTPUT_DIR}/${datep}/Inflation_input/input_postinf_sd_d${dchar}.nc" "${RUN_DIR}/." + + else + + echo "${OUTPUT_DIR}/${datep}/Inflation_input/input_priorinf_mean**.nc file does not exist. Stopping" + echo "If first assimilation cycle make sure fill_inflation_restart was used to generate mean and sd inflation files" + touch ABORT_RETRO + exit 3 + + fi + (( dn++ )) + done # Loop through domains + + fi # ADAPTIVE_INFLATION file check + + ${LINK} "${OUTPUT_DIR}/${datea}/obs_seq.out" . + ${REMOVE} "${RUN_DIR}/WRF" + ${REMOVE} "${RUN_DIR}/prev_cycle_done" + ${LINK} "${OUTPUT_DIR}/${datea}" "${RUN_DIR}/WRF" + + # Run filter to generate the analysis + ${REMOVE} script.sed 2>/dev/null || true + + assimilate_job="${RUN_DIR}/assimilate.sh" + + if [[ "${SUPER_PLATFORM}" == "LSF queuing system" ]]; then + + cat > "${assimilate_job}" < "${assimilate_job}" < filter_thresh )); then + + # If the job needs to be aborted ... we need to qdel the hanging job + + echo "Time exceeded the maximum allowable time. Exiting." + touch ABORT_RETRO + ${REMOVE} filter_started + exit 5 + + fi + + fi + sleep 10 + + done + + echo "filter is done, cleaning up" + + ${MOVE} icgen.o* "${OUTPUT_DIR}/${datea}/logs/" 2>/dev/null || true + ${REMOVE} "${RUN_DIR}/filter_started" \ + "${RUN_DIR}/filter_done" \ + "${RUN_DIR}/obs_seq.out" \ + "${RUN_DIR}/postassim_priorinf"* \ + "${RUN_DIR}/preassim_priorinf"* 2>/dev/null || true + + if [[ -e assimilate.sh ]]; then + ${REMOVE} "${RUN_DIR}/assimilate.sh" + fi + + echo "Listing contents of rundir before archiving at $(date)" + ls -l *.nc blown* dart_log* filter_* input.nml obs_seq* Output/inf_ic* 2>/dev/null || true + mkdir -p "${OUTPUT_DIR}/${datea}/Inflation_input" "${OUTPUT_DIR}/${datea}/WRFIN" "${OUTPUT_DIR}/${datea}/PRIORS" "${OUTPUT_DIR}/${datea}/logs" + + extract_str="" + if declare -p increment_vars_a &>/dev/null; then + for v in "${increment_vars_a[@]}"; do + if [[ -z "${extract_str}" ]]; then + extract_str="${v}" + else + extract_str="${extract_str},${v}" + fi + done + fi + + # Create an analysis increment file that has valid static data. + # First, create the difference of a subset of variables + # Second, create a netCDF file with just the static data + # Third, append the static data onto the difference. + dn=1 + while (( dn <= domains )); do + dchar="$(echo "${dn} + 100" | bc | cut -b2-3)" + + if (( domains == 1 )); then + ncdiff -F -O -v "${extract_str}" "postassim_mean.nc" "preassim_mean.nc" "analysis_increment.nc" + ncks -F -O -x -v "${extract_str}" "postassim_mean.nc" "static_data.nc" + ncks -A "static_data.nc" "analysis_increment.nc" + else + ncdiff -F -O -v "${extract_str}" "postassim_mean_d${dchar}.nc" "preassim_mean_d${dchar}.nc" "analysis_increment_d${dchar}.nc" + ncks -F -O -x -v "${extract_str}" "postassim_mean_d${dchar}.nc" "static_data_d${dchar}.nc" + ncks -A "static_data_d${dchar}.nc" "analysis_increment_d${dchar}.nc" + fi + + # Move diagnostic and obs_seq.final data to storage directories + if (( dn == 1 )) && [[ -e obs_seq.final ]]; then + ${MOVE} obs_seq.final "${OUTPUT_DIR}/${datea}/." + if [[ $? -ne 0 ]]; then + echo "failed moving ${RUN_DIR}/obs_seq.final" + touch BOMBED + fi + else + if (( dn == 1 )); then + echo "${OUTPUT_DIR}/obs_seq.final does not exist and should." + echo "Stopping driver.sh" + exit 0 + + fi + fi + + if (( domains == 1 )); then + files=( + "postassim_mean.nc" "preassim_mean.nc" + "postassim_sd.nc" "preassim_sd.nc" + "analysis_increment.nc" + "output_mean.nc" "output_sd.nc" + ) + else + files=( + "postassim_mean_d${dchar}.nc" "preassim_mean_d${dchar}.nc" + "postassim_sd_d${dchar}.nc" "preassim_sd_d${dchar}.nc" + "analysis_increment_d${dchar}.nc" + "output_mean_d${dchar}.nc" "output_sd_d${dchar}.nc" + ) + fi + + for FILE in "${files[@]}"; do + if [[ -s "${FILE}" ]]; then + ${MOVE} "${FILE}" "${OUTPUT_DIR}/${datea}/." || { + echo "failed moving ${RUN_DIR}/${FILE}" + touch BOMBED + } + else + echo "${RUN_DIR}/${FILE} does not exist (or is empty) and should." + ls -l + touch BOMBED + fi + done + + (( dn++ )) + done # loop through domains + + echo "past the analysis file moves" + + # Move inflation files to storage directories + # The output inflation file is used as the input for the next cycle, + # so rename the current inflation output for the next cycle input. + cd "${RUN_DIR}" + + if [[ "${ADAPTIVE_INFLATION}" == "1" ]]; then + + dn=1 + while (( dn <= domains )); do + dchar="$(echo "${dn} + 100" | bc | cut -b2-3)" + + if ((domains == 1)); then + old_file=( "input_postinf_mean.nc" "input_postinf_sd.nc" "input_priorinf_mean.nc" "input_priorinf_sd.nc" ) + new_file=( "output_postinf_mean.nc" "output_postinf_sd.nc" "output_priorinf_mean.nc" "output_priorinf_sd.nc" ) + else + old_file=( "input_postinf_mean_d${dchar}.nc" "input_postinf_sd_d${dchar}.nc" "input_priorinf_mean_d${dchar}.nc" "input_priorinf_sd_d${dchar}.nc" ) + new_file=( "output_postinf_mean_d${dchar}.nc" "output_postinf_sd_d${dchar}.nc" "output_priorinf_mean_d${dchar}.nc" "output_priorinf_sd_d${dchar}.nc" ) + fi + + nfiles="${#new_file[@]}" + i=0 + while (( i < nfiles )); do + if [[ -e "${new_file[$i]}" && -s "${new_file[$i]}" ]]; then + ${MOVE} "${new_file[$i]}" "${OUTPUT_DIR}/${datea}/Inflation_input/${old_file[$i]}" + if [[ $? -ne 0 ]]; then + echo "failed moving ${RUN_DIR}/Output/${new_file[$i]}" + touch BOMBED + fi + fi + (( i++ )) + done + (( dn++ )) + done # loop through domains + + echo "past the inflation file moves" + fi # adaptive_inflation file moves + + # Submit jobs to integrate ensemble members to next analysis time ... + # BEFORE calculating the observation-space diagnostics for the existing cycle. + + echo "ready to integrate ensemble members" + + # Removing old start_member and done_member diagnostics + if [[ -e "${RUN_DIR}/start_member_1" ]]; then + ${REMOVE} "${RUN_DIR}/start_member_"* "${RUN_DIR}/done_member_"* 2>/dev/null || true + fi + + n=1 + while (( n <= NUM_ENS )); do + + if [[ "${SUPER_PLATFORM}" == "LSF queuing system" ]]; then + + jobfile="assim_advance_mem${n}.sh" + cat > "${jobfile}" < "${jobfile}" <& "${RUN_DIR}/obs_diag.log" & + + # --------------------------------------------------------------------------- + # Check to see if all of the ensemble members have advanced + # --------------------------------------------------------------------------- + + advance_thresh_min="$(echo "${ADVANCE_TIME}" | cut -b3-4)" + advance_thresh_hr="$(echo "${ADVANCE_TIME}" | cut -b1-1)" + advance_thresh=$(( 10#${advance_thresh_min} * 60 + 10#${advance_thresh_hr} * 3600 )) + + n=1 + while (( n <= NUM_ENS )); do + + ensstring="$(echo "${n} + 10000" | bc | cut -b2-5)" + keep_trying=true + max_retry=1 + + while [[ "${keep_trying}" == "true" ]]; do + + # Wait for the script to start + while [[ ! -e "${RUN_DIR}/start_member_${n}" ]]; do + + if [[ "${SUPER_PLATFORM}" == "LSF queuing system" ]]; then + + if [[ "$(bjobs -w | grep -c "assim_advance_${n}" || true)" -eq 0 ]]; then + echo "assim_advance_${n} is missing from the queue" + if [[ -n "${reservation:-}" ]]; then + echo "MEMBER ${n} USING RESERVATION," "$(/contrib/lsf/get_my_rsvid)" + bsub -U "$(/contrib/lsf/get_my_rsvid)" < "assim_advance_mem${n}.sh" + else + bsub < "assim_advance_mem${n}.sh" + fi + fi + + elif [[ "${SUPER_PLATFORM}" == "derecho" ]]; then + + if [[ "$(qstat -wa | grep -c "assim_advance_${n}" || true)" -eq 0 ]]; then + + echo "Warning, detected that assim_advance_${n} is missing from the queue" + echo "If this warning leads to missing output from ensemble ${n}" + echo "consider enabling the qsub command within keep_trying while statement in driver.sh" + + #qsub assim_advance_mem${n}.sh + fi + + fi + sleep 5 + + done + + start_time="$(head -1 "start_member_${n}")" + echo "Member $n has started. Start time $start_time" + + # Wait for the output file + while true; do + + current_time="$(date +%s)" + length_time=$(( current_time - start_time )) + + if [[ -e "${RUN_DIR}/done_member_${n}" ]]; then + + # If the output file already exists, move on + keep_trying=false + break + + elif (( length_time > advance_thresh )); then + + # If WRF member has failed 2 resubmission attempts, immediately stop driver.sh + if (( max_retry > 2 )); then + + echo "Stopping the driver.sh script! The WRF ensemble member ${n}" + echo "has exceeded the maximum resubmission attempts (2) without completing." + echo "This typically means the WRF integration has failed." + echo "Check your BASE_DIR/rundir/advance_temp${n} directory and locate" + echo "the WRF rsl.out.0000 or rsl.error.0000 log files for further information." + echo "If applicable, check the DART analysis_increment.nc from previous assimilation step" + exit 7 + + fi + + # The WRF job did not complete. Resubmit to queue + ${REMOVE} "start_member_${n}" + echo "Did not find the member done file, WRF run did not complete" + echo "Attempting resubmission $max_retry" + (( max_retry++ )) + + if [[ "${SUPER_PLATFORM}" == "LSF queuing system" ]]; then + + if [[ -n "${reservation:-}" ]]; then + echo "MEMBER ${n} USING RESERVATION," "$(/contrib/lsf/get_my_rsvid)" + bsub -U "$(/contrib/lsf/get_my_rsvid)" < "assim_advance_mem${n}.sh" + else + bsub < "assim_advance_mem${n}.sh" + fi + + elif [[ "${SUPER_PLATFORM}" == "derecho" ]]; then + + qsub "assim_advance_mem${n}.sh" + sleep 5 + fi + break + + fi + sleep 15 # this might need to be longer, though I moved the done flag lower in the + # advance_model.sh to hopefully avoid the file moves below failing + done + + done + + # Move output data to correct location + dn=1 + while (( dn <= domains )); do + dchar="$(echo "${dn} + 100" | bc | cut -b2-3)" + echo "moving ${n} ${ensstring} for domain ${dn}" + + if (( dn == 1 )); then + ${MOVE} "${RUN_DIR}/assim_advance_${n}.o"* "${OUTPUT_DIR}/${datea}/logs/." 2>/dev/null || true + ${MOVE} "WRFOUT/wrf.out_${gdatef[0]}_${gdatef[1]}_${n}" "${OUTPUT_DIR}/${datea}/logs/." 2>/dev/null || true + ${REMOVE} "start_member_${n}" "done_member_${n}" + if [[ -e "assim_advance_mem${n}.sh" ]]; then + ${REMOVE} "assim_advance_mem${n}.sh" + fi + pert="$(cat "${RUN_DIR}/advance_temp${n}/mem${n}_pert_bank_num" 2>/dev/null || true)" + echo "Member $n uses perturbation bank ensemble member $pert" >> "${OUTPUT_DIR}/${datea}/pert_bank_members.txt" + fi + + ${MOVE} "WRFIN/wrfinput_d${dchar}_${n}.gz" "${OUTPUT_DIR}/${datea}/WRFIN/." + ${MOVE} "${RUN_DIR}/prior_d${dchar}.${ensstring}" "${OUTPUT_DIR}/${datea}/PRIORS/." + ${REMOVE} "filter_restart_d${dchar}.${ensstring}" + + (( dn++ )) + done #loop through domains + + (( n++ )) + done + + # --------------------------------------------------------------------------- + # All ensemble members should have advanced by now. + # --------------------------------------------------------------------------- + + if [[ -e obs_prep.log ]]; then + ${REMOVE} obs_prep.log + fi + + # Clean everything up and finish + + # Move DART-specific data to storage directory + ${COPY} input.nml "${OUTPUT_DIR}/${datea}/." + ${MOVE} "${RUN_DIR}/dart_log.out" "${RUN_DIR}/dart_log.nml" "${RUN_DIR}/"*.log "${OUTPUT_DIR}/${datea}/logs/." 2>/dev/null || true + + # Remove temporary files from both the run directory and old storage directories + ${REMOVE} "${OUTPUT_DIR}/${datep}/wrfinput_d"*"_mean" "${RUN_DIR}/wrfinput_d"* "${RUN_DIR}/WRF" 2>/dev/null || true + + # Prep data for archive + cd "${OUTPUT_DIR}/${datea}" + gzip -f wrfinput_d*_"${gdate[0]}"_"${gdate[1]}"_mean wrfinput_d*_"${gdatef[0]}"_"${gdatef[1]}"_mean wrfbdy_d*_mean + tar -cvf retro.tar obs_seq.out wrfin*.gz wrfbdy_d*.gz + tar -rvf dart_data.tar obs_seq.out obs_seq.final wrfinput_d*.gz wrfbdy_d*.gz \ + Inflation_input/* logs/* input.nml + ${REMOVE} wrfinput_d*_"${gdate[0]}"_"${gdate[1]}"_mean.gz wrfbdy_d*.gz + gunzip -f wrfinput_d*_"${gdatef[0]}"_"${gdatef[1]}"_mean.gz + + cd "${RUN_DIR}" + ${MOVE} "${RUN_DIR}/assim"*".o"* "${OUTPUT_DIR}/${datea}/logs/." 2>/dev/null || true + ${REMOVE} "${RUN_DIR}/input_priorinf_"* 2>/dev/null || true + ${REMOVE} "${RUN_DIR}/static_data"* 2>/dev/null || true + touch prev_cycle_done + touch "${RUN_DIR}/cycle_finished_${datea}" + if [[ -e "cycle_started_${datea}" ]]; then + rm -f "${RUN_DIR}/cycle_started_${datea}" + fi + + # If doing a reanalysis, increment the time if not done. Otherwise, let the script exit + if [[ "${restore}" == "1" ]]; then + if [[ "${datea}" == "${datefnl}" ]]; then + echo "Reached the final date " + echo "Script exiting normally" + exit 0 + fi + datea="$(echo "${datea} ${ASSIM_INT_HOURS}" | "${DART_DIR}/models/wrf/work/advance_time")" + else + echo "Script exiting normally cycle ${datea}" + exit 0 + fi + +done + diff --git a/models/wrf/shell_scripts/first_advance.sh b/models/wrf/shell_scripts/first_advance.sh new file mode 100755 index 0000000000..397bc7839a --- /dev/null +++ b/models/wrf/shell_scripts/first_advance.sh @@ -0,0 +1,103 @@ +#!/bin/bash +# +# DART software - Copyright UCAR. This open source software is provided +# by UCAR, "as is", without charge, subject to all terms of use at +# http://www.image.ucar.edu/DAReS/DART/DART_download + +set -uo pipefail + +datea="$1" +emember="$2" +paramfile="$3" + +source "$paramfile" + +start_time="$(date +%s)" +echo "host is $(hostname)" + +domains="$NUM_DOMAINS" + +cd "${RUN_DIR}" + +read -r -a gdate < <(echo "$datea 0 -g" | "${RUN_DIR}/advance_time") +read -r -a gdatef < <(echo "$datea $ASSIM_INT_HOURS -g" | "${RUN_DIR}/advance_time") +yyyy="${datea:0:4}" +mm="${datea:4:2}" +dd="${datea:6:2}" +hh="${datea:8:2}" +nn="00" +ss="00" + +echo "$start_time" > "${RUN_DIR}/start_member_${emember}" + +# Enter the member directory and generate the wrf.info file to prepare WRF integration +cd "${RUN_DIR}/advance_temp${emember}" + +icnum="$(echo "$emember + 10000" | bc | cut -b2-5)" +if [[ -e "${RUN_DIR}/advance_temp${emember}/wrf.info" ]]; then + ${REMOVE} "${RUN_DIR}/advance_temp${emember}/wrf.info" +fi + +touch wrf.info + +if [[ "${SUPER_PLATFORM}" == "LSF queuing system" ]]; then + + cat > "${RUN_DIR}/advance_temp${emember}/wrf.info" << EOF + ${gdatef[1]} ${gdatef[0]} + ${gdate[1]} ${gdate[0]} + $yyyy $mm $dd $hh $nn $ss + 1 + mpirun.lsf ./wrf.exe +EOF + +elif [[ "${SUPER_PLATFORM}" == "derecho" ]]; then + + export MPI_SHEPHERD=false + + cat > "${RUN_DIR}/advance_temp${emember}/wrf.info" << EOF + ${gdatef[1]} ${gdatef[0]} + ${gdate[1]} ${gdate[0]} + $yyyy $mm $dd $hh $nn $ss + $domains + mpiexec -n 4 -ppn 4 ./wrf.exe +EOF + +fi + +cd "${RUN_DIR}" + +# The filter_control file accounts for multiple domains and it appends +# input (filter_restart) and output (prior) in consecutive pairs for each domain.. +# This is consistent with the filter_control setup within assim_advance.sh +# for assimilation steps after first_advance. + +echo "$emember" > "${RUN_DIR}/filter_control${icnum}" + +dn=1 +while (( dn <= domains )); do + dchar="$(echo "$dn + 100" | bc | cut -b2-3)" + echo "filter_restart_d${dchar}.${icnum}" >> "${RUN_DIR}/filter_control${icnum}" + echo "prior_d${dchar}.${icnum}" >> "${RUN_DIR}/filter_control${icnum}" + (( dn++ )) +done # loop through domains + +# Call new_advance_model.sh to integrate WRF forward in time +"${RUN_DIR}/new_advance_model.sh" "${emember}" "${domains}" "filter_control${icnum}" "$paramfile" +${REMOVE} "${RUN_DIR}/filter_control${icnum}" + +# Move the WRF forecast (prior) to the appropriate directory +mkdir -p "${OUTPUT_DIR}/${datea}/PRIORS" + +dn=1 +while (( dn <= domains )); do + dchar="$(echo "$dn + 100" | bc | cut -b2-3)" + mv "${RUN_DIR}/prior_d${dchar}.${icnum}" "${OUTPUT_DIR}/${datea}/PRIORS/prior_d${dchar}.${icnum}" + (( dn++ )) +done # loop through domains + +end_time="$(date +%s)" +length_time=$(( end_time - start_time )) +echo "duration = $length_time" + +exit 0 + diff --git a/models/wrf/shell_scripts/gen_pert_bank.sh b/models/wrf/shell_scripts/gen_pert_bank.sh new file mode 100755 index 0000000000..d3440f1305 --- /dev/null +++ b/models/wrf/shell_scripts/gen_pert_bank.sh @@ -0,0 +1,153 @@ +#!/bin/bash +# +# DART software - Copyright UCAR. This open source software is provided +# by UCAR, "as is", without charge, subject to all terms of use at +# http://www.image.ucar.edu/DAReS/DART/DART_download + +# Script to save a 'perturbation bank' generated from the WRFDA CV3 option +# +# provide the following: +# namelist.input +# wrfinput_d01 +# ensemble size +# list of perturbed variables +# wrfda executable and be.dat + +set -uo pipefail + +datea=2024051900 # need to start from a known valid date matching the wrfinput_d01 date +paramfile="/glade/derecho/scratch/bmraczka/WRFv4.5_nested_bash/scripts/param.sh" + +echo "Sourcing parameter file" +source "$paramfile" + +# This has all wrf and wrfda executables and support files +wrfda_dir="${RUN_DIR}/WRF_RUN" # set this appropriately +work_dir="${PERTS_DIR}/work" # set this appropriately +# Put the final eperturbation files here for later use +save_dir="${PERTS_DIR}/work/boundary_perts" # set this appropriately + +# These scale variables are not used directly -- default is to use hard coded values within +# wrfvar section of namelist.input.3dvar file +IC_PERT_SCALE=0.1 +IC_HORIZ_SCALE=0.8 +IC_VERT_SCALE=0.8 + + +# Number of perturbations to generate, suggest 3-4X the ensemble size. +# Recommended to test single member first to confirm functionality and desired performance. + +num_ens=60 + + +# Get wrfinput_d01 file directly from the 'mean' file generated from real.exe during gen_retro_icbc.sh +# set wrfin_dir = ${work_dir}/wrfin +ASSIM_INT_HOURS=6 + +module load nco + +mkdir -p "${save_dir}" +cd "$work_dir" || exit 1 +cp "${TEMPLATE_DIR}/input.nml.template" input.nml + +# get a wrfdate and parse +read -r -a gdate < <(echo "$datea 0h -g" | "${DART_DIR}/models/wrf/work/advance_time") +read -r -a gdatef < <(echo "$datea ${ASSIM_INT_HOURS}h -g" | "${DART_DIR}/models/wrf/work/advance_time") +wdate=$(echo "$datea 0h -w" | "${DART_DIR}/models/wrf/work/advance_time") + +yyyy="${datea:0:4}" +mm="${datea:4:2}" +dd="${datea:6:2}" +hh="${datea:8:2}" + +for ((n=1; n<=num_ens; n++)); do + + mkdir -p "${work_dir}/mem_${n}" + cd "${work_dir}/mem_${n}" || exit 1 + cp "${wrfda_dir}"/* "${work_dir}/mem_${n}/" + + ln -sf "${OUTPUT_DIR}/${datea}/wrfinput_d01_${gdate[0]}_${gdate[1]}_mean" "${work_dir}/mem_${n}/fg" + + seed_array2=$((n*10)) + + cat > script.sed << EOF +/run_hours/c\ +run_hours = 0, + /run_minutes/c\ + run_minutes = 0, + /run_seconds/c\ + run_seconds = 0, + /start_year/c\ + start_year = 1*${yyyy}, + /start_month/c\ + start_month = 1*${mm}, + /start_day/c\ + start_day = 1*${dd}, + /start_hour/c\ + start_hour = 1*${hh}, + /start_minute/c\ + start_minute = 1*00, + /start_second/c\ + start_second = 1*00, + /end_year/c\ + end_year = 1*${yyyy}, + /end_month/c\ + end_month = 1*${mm}, + /end_day/c\ + end_day = 1*${dd}, + /end_hour/c\ + end_hour = 1*${hh}, + /end_minute/c\ + end_minute = 1*00, + /end_second/c\ + end_second = 1*00, + /analysis_date/c\ + analysis_date = \'${wdate}.0000\', + s/PERT_SCALING/${IC_PERT_SCALE}/ + s/HORIZ_SCALE/${IC_HORIZ_SCALE}/ + s/VERT_SCALE/${IC_VERT_SCALE}/ + /seed_array1/c\ + seed_array1 = ${datea}, +/seed_array2/c\ +seed_array2 = ${seed_array2} / +EOF + + # namelist.input.3dvar must be set for single parent domain to work with gen_pert_bank.sh, + # contains all namelist options wrfvar1-14 + + sed -f script.sed "${TEMPLATE_DIR}/namelist.input.3dvar" > "${work_dir}/mem_${n}/namelist.input" + + # Create PBS script + cat > "${work_dir}/mem_${n}/gen_pert_${n}.sh" << EOF +#!/bin/sh +#================================================================= +#PBS -N gen_pert_bank_mem${n} +#PBS -j oe +#PBS -A ${COMPUTER_CHARGE_ACCOUNT} +#PBS -l walltime=0:05:00 +#PBS -q ${ADVANCE_QUEUE} +#PBS -l job_priority=${ADVANCE_PRIORITY} +#PBS -o gen_pert_bank_mem${n}.out +#PBS -l select=1:ncpus=4:mpiprocs=4 +#PBS -k eod +#PBS -V +#================================================================= + +cd "${work_dir}/mem_${n}" || exit 3 + +mpiexec -n 1 -ppn 1 ./da_wrfvar.exe >& output.wrfvar +mv wrfvar_output wrfinput_d01 + +# Extract only the fields that are updated by wrfvar, then diff to generate the pert file for this member +ncks -h -F -A -a -v U,V,THM,QVAPOR,MU fg orig_data.nc +ncks -h -F -A -a -v U,V,THM,QVAPOR,MU wrfinput_d01 pert_data.nc +ncdiff pert_data.nc orig_data.nc "pert_bank_mem_${n}.nc" +mv "pert_bank_mem_${n}.nc" "${save_dir}/pert_bank_mem_${n}.nc" +EOF + + qsub "${work_dir}/mem_${n}/gen_pert_${n}.sh" + +done + +exit 0 + diff --git a/models/wrf/shell_scripts/gen_retro_icbc.sh b/models/wrf/shell_scripts/gen_retro_icbc.sh new file mode 100755 index 0000000000..613998baf6 --- /dev/null +++ b/models/wrf/shell_scripts/gen_retro_icbc.sh @@ -0,0 +1,298 @@ +#!/bin/bash +# +# DART software - Copyright UCAR. This open source software is provided +# by UCAR, "as is", without charge, subject to all terms of use at +# http://www.image.ucar.edu/DAReS/DART/DART_download + +# DART / WRF gen_retro_icbc – Bash version for PBS (Derecho) +# +# Original behavior: +# - Loops over times from datea to datefnl +# - Runs WPS (geogrid/ungrib/metgrid) +# - Runs real.exe twice (first to make wrfinput+wrfbdy, then to make second-time wrfinput) +# - Writes: +# output/${date}/wrfbdy_d01_{gdayf}_{gsecf}_mean +# output/${date}/wrfinput_d01_{gday}_{gsec}_mean +# output/${date}/wrfinput_d02_{gday}_{gsec}_mean + + + + +#================================================================== +#BSUB -J gen_retro_icbc +#BSUB -o gen_retro_icbc.%J.log +#BSUB -P 25000077 +#BSUB -W 0:38 +#BSUB -q regular +#BSUB -n 64 +#BSUB -x +#BSUB -R "span[ptile=64]" + +#PBS -N gen_retro_icbc +#PBS -A 25000077 +#PBS -l walltime=00:38:00 +#PBS -q regular +#PBS -o gen_retro_icbc.out +#PBS -j oe +#PBS -k eod +#PBS -l select=5:ncpus=60:mpiprocs=60 +#PBS -V + +set -uo pipefail + +echo "gen_retro_icbc.sh is running in $(pwd)" + +############################################################################### +# User-configurable dates and param file +############################################################################### + +datea=2024051812 # initial cycle time (YYYYMMDDHH) +datefnl=2024052018 # final cycle time (YYYYMMDDHH) +paramfile="/glade/derecho/scratch/bmraczka/WRFv4.5_nested_bash/scripts/param.sh" + +echo "Sourcing parameter file: $paramfile" +source "$paramfile" + +# Backups if REMOVE/MOVE/LINK not set in param.sh +: "${REMOVE:=rm -rf}" +: "${MOVE:=mv -f}" +: "${COPY:=cp -p}" +: "${LINK:=ln -sf}" + +DART_ADVANCE_TIME="${DART_DIR}/models/wrf/work/advance_time" + +############################################################################### +# One-time ICBC directory prep +############################################################################### + +cd "$ICBC_DIR" + +$REMOVE geo_*.nc namelist.wps namelist.input geogrid_done +mkdir -p geogrid +$LINK "${WPS_SRC_DIR}/geogrid/GEOGRID.TBL" "${ICBC_DIR}/geogrid/GEOGRID.TBL" + +mkdir -p "${ICBC_DIR}/metgrid" +$LINK "${WPS_SRC_DIR}/metgrid/METGRID.TBL" "${ICBC_DIR}/metgrid/METGRID.TBL" + +############################################################################### +# Helper: run real.exe via PBS job that calls your real.sh +############################################################################### + +run_real_via_pbs() { + local pbs_script="$ICBC_DIR/run_real.pbs" + + cat > "$pbs_script" << EOF +#!/bin/bash +#PBS -N run_real +#PBS -A ${COMPUTER_CHARGE_ACCOUNT} +#PBS -l walltime=00:05:00 +#PBS -q ${ADVANCE_QUEUE} +#PBS -l job_priority=${ADVANCE_PRIORITY} +#PBS -o run_real.out +#PBS -j oe +#PBS -k eod +#PBS -l select=1:ncpus=4:mpiprocs=4 +#PBS -V + +cd "$ICBC_DIR" +"${SHELL_SCRIPTS_DIR}/real.sh" "$paramfile" +EOF + + jobid=$(qsub "$pbs_script") + echo "Submitted real.exe PBS job: $jobid" + + # Wait for the marker file from real.sh + while [[ ! -e "$ICBC_DIR/real_done" ]]; do + sleep 15 + done + rm -f "$ICBC_DIR/real_done" + + # Accumulate log + if [[ -e "$ICBC_DIR/rsl.out.0000" ]]; then + cat "$ICBC_DIR/rsl.out.0000" >> "$ICBC_DIR/out.real.exe" + fi +} + +############################################################################### +# Main cycle loop +############################################################################### + +while :; do + echo " " + echo "Entering gen_retro_icbc.sh for datea = $datea" + + # Ensure output directory exists for this cycle + mkdir -p "${OUTPUT_DIR}/${datea}" + + cd "$ICBC_DIR" + + # Link DART input.nml + $LINK "${RUN_DIR}/input.nml" input.nml + + # Clean old GRIB files + $REMOVE gfs*pgrb2* *grib2 || true + + # Compute WPS start and end dates + start_date=$(echo "$datea 0 -w" | "$DART_ADVANCE_TIME") + end_date=$(echo "$datea 6 -w" | "$DART_ADVANCE_TIME") + echo "start_date = $start_date" + echo "end_date = $end_date" + + # Build namelist.wps via sed script + cat > script.sed << EOF +/start_date/c\ + start_date = ${start_date},${start_date} +/end_date/c\ + end_date = ${end_date},${end_date} +EOF + + if [[ "$GRIB_SRC" != "GFS" ]]; then + echo "ERROR: GRIB_SRC=$GRIB_SRC not supported in this script (expects GFS)." + exit 2 + fi + + # GRIB file names (GFS 0.25) + gribfile_a="${GRIB_DATA_DIR}/gfs.0p25.${datea}.f000.grib2" + gribfile_b="${GRIB_DATA_DIR}/gfs.0p25.${datea}.f006.grib2" + + if [[ ! -r "$gribfile_a" || ! -r "$gribfile_b" ]]; then + echo "ERROR: GRIB input files not found:" + echo " $gribfile_a" + echo " $gribfile_b" + exit 2 + fi + + $LINK "$gribfile_a" GRIBFILE.AAA + $LINK "$gribfile_b" GRIBFILE.AAB + + sed -f script.sed "${TEMPLATE_DIR}/namelist.wps.template" > namelist.wps + $LINK "${WPS_SRC_DIR}/ungrib/Variable_Tables/Vtable.${GRIB_SRC}" Vtable + + # Run geogrid once + if [[ ! -e "${ICBC_DIR}/geogrid_done" ]]; then + echo "Executing geogrid.exe" + "${WPS_SRC_DIR}/geogrid.exe" >& output.geogrid.exe + touch "${ICBC_DIR}/geogrid_done" + fi + + echo "Executing ungrib.exe" + $REMOVE output.ungrib.exe."${GRIB_SRC}" || true + "${WPS_SRC_DIR}/ungrib.exe" >& output.ungrib.exe."${GRIB_SRC}" + + echo "Executing metgrid.exe" + $REMOVE output.metgrid.exe || true + "${WPS_SRC_DIR}/metgrid.exe" >& output.metgrid.exe + + # Compute end of assimilation window + datef=$(echo "$datea $ASSIM_INT_HOURS" | "$DART_ADVANCE_TIME") + # Gregorian version for wrfbdy naming + read -r gdayf gsecf _rest < <(echo "$datef 0 -g" | "$DART_ADVANCE_TIME") + hh=${datea:8:2} + + ########################################################################### + # Run real.exe twice: first (datea → datef), then (datef → datef) + ########################################################################### + + for n in 1 2; do + echo + echo "RUNNING REAL, STEP $n" + echo + + if [[ "$n" -eq 1 ]]; then + date1="$datea" + date2="$datef" + fcst_hours="$ASSIM_INT_HOURS" + else + date1="$datef" + date2="$datef" + fcst_hours=0 + fi + + yyyy1=${date1:0:4} + mm1=${date1:4:2} + dd1=${date1:6:2} + hh1=${date1:8:2} + + yyyy2=${date2:0:4} + mm2=${date2:4:2} + dd2=${date2:6:2} + hh2=${date2:8:2} + + # Build namelist.input from template namelist.input.meso + cat > script.sed << EOF +/run_hours/c\ + run_hours = ${fcst_hours}, +/run_minutes/c\ + run_minutes = 0, +/run_seconds/c\ + run_seconds = 0, +/start_year/c\ + start_year = ${yyyy1}, ${yyyy1}, +/start_month/c\ + start_month = ${mm1}, ${mm1}, +/start_day/c\ + start_day = ${dd1}, ${dd1}, +/start_hour/c\ + start_hour = ${hh1}, ${hh1}, +/start_minute/c\ + start_minute = 00, 00, +/start_second/c\ + start_second = 00, 00, +/end_year/c\ + end_year = ${yyyy2}, ${yyyy2}, +/end_month/c\ + end_month = ${mm2}, ${mm2}, +/end_day/c\ + end_day = ${dd2}, ${dd2}, +/end_hour/c\ + end_hour = ${hh2}, ${hh2}, +/end_minute/c\ + end_minute = 00, 00, +/end_second/c\ + end_second = 00, 00, +EOF + + sed -f script.sed "${TEMPLATE_DIR}/namelist.input.meso" > namelist.input + + # Clean flags & logs + $REMOVE real_done rsl.* script.sed || true + : > out.real.exe + + # Submit PBS job that runs real.exe via real.sh + run_real_via_pbs + + # On return, move wrfinput/wrfbdy to appropriate locations + read -r gday gsec _rest < <(echo "$date1 0 -g" | "$DART_ADVANCE_TIME") + + # Move wrfinput_d01/d02 + if [[ -e wrfinput_d01 ]]; then + $MOVE wrfinput_d01 "${OUTPUT_DIR}/${datea}/wrfinput_d01_${gday}_${gsec}_mean" + fi + if [[ -e wrfinput_d02 ]]; then + $MOVE wrfinput_d02 "${OUTPUT_DIR}/${datea}/wrfinput_d02_${gday}_${gsec}_mean" + fi + + # For first real.exe call, also move wrfbdy_d01 + if [[ "$n" -eq 1 && -e wrfbdy_d01 ]]; then + $MOVE wrfbdy_d01 "${OUTPUT_DIR}/${datea}/wrfbdy_d01_${gdayf}_${gsecf}_mean" + fi + + done # end n=1,2 + + ########################################################################### + # Move to next time, or exit if final time reached + ########################################################################### + if [[ "$datea" == "$datefnl" ]]; then + echo "Reached final date $datefnl – script exiting normally." + exit 0 + fi + + # Advance datea by ASSIM_INT_HOURS + datea=$(echo "$datea $ASSIM_INT_HOURS" | "$DART_ADVANCE_TIME" | awk '{print $1}') + echo " " + echo "Starting next time: $datea" + +done + +exit 0 + diff --git a/models/wrf/shell_scripts/init_ensemble_var.sh b/models/wrf/shell_scripts/init_ensemble_var.sh new file mode 100755 index 0000000000..6b8ef04ce6 --- /dev/null +++ b/models/wrf/shell_scripts/init_ensemble_var.sh @@ -0,0 +1,199 @@ +#!/bin/bash +# +# DART software - Copyright UCAR. This open source software is provided +# by UCAR, "as is", without charge, subject to all terms of use at +# http://www.image.ucar.edu/DAReS/DART/DART_download + +# init_ensemble_var.sh - script that creates perturbed initial +# conditions from the WRF-VAR system. +# (perts are drawn from the perturbation bank) + +set -euo pipefail + +initial_date="$1" +paramfile="$(readlink -f "$2")" # Get absolute path for param.sh from command line arg +source "$paramfile" + +cd "${RUN_DIR}" + +# Generate the i/o lists in rundir automatically when initializing the ensemble +# Required to run filter during assimilation step +num_ens="${NUM_ENS}" # set from param file +domains="${NUM_DOMAINS}" +dn=1 + +while (( dn <= domains )); do + dchar="$(echo "$dn + 100" | bc | cut -b2-3)" + input_file_name="input_list_d${dchar}.txt" + input_file_path="./advance_temp" + output_file_name="output_list_d${dchar}.txt" + n=1 + + [[ -e "$input_file_name" ]] && rm -f "$input_file_name" + [[ -e "$output_file_name" ]] && rm -f "$output_file_name" + + while (( n <= num_ens )); do + + ensstring="$(printf %04d "$n")" + in_file_name="${input_file_path}${n}/wrfinput_d${dchar}" + out_file_name="filter_restart_d${dchar}.${ensstring}" + + echo "$in_file_name" >> "$input_file_name" + echo "$out_file_name" >> "$output_file_name" + + (( n++ )) + done # loop through ensemble members + (( dn++ )) +done # loop through domains + + +read -r -a gdate < <(echo "$initial_date 0h -g" | "${DART_DIR}/models/wrf/work/advance_time") +read -r -a gdatef < <(echo "$initial_date ${ASSIM_INT_HOURS}h -g" | "${DART_DIR}/models/wrf/work/advance_time") +wdate="$(echo "$initial_date 0h -w" | "${DART_DIR}/models/wrf/work/advance_time")" +yyyy="${initial_date:0:4}" +mm="${initial_date:4:2}" +dd="${initial_date:6:2}" +hh="${initial_date:8:2}" +nn="00" +ss="00" + +${COPY} "${TEMPLATE_DIR}/namelist.input.meso" namelist.input +${REMOVE} "${RUN_DIR}/WRF" +${LINK} "${OUTPUT_DIR}/${initial_date}" WRF + +n=1 +while (( n <= NUM_ENS )); do + + echo " QUEUEING ENSEMBLE MEMBER $n at $(date)" + + mkdir -p "${RUN_DIR}/advance_temp${n}" + + ${LINK} "${RUN_DIR}/WRF_RUN/"* "${RUN_DIR}/advance_temp${n}/." + ${LINK} "${RUN_DIR}/input.nml" "${RUN_DIR}/advance_temp${n}/input.nml" + + ${REMOVE} script.sed + cat > script.sed << EOF +/start_year/c\ +start_year = ${yyyy}, +/start_month/c\ +start_month = ${mm}, +/start_day/c\ +start_day = ${dd}, +/start_hour/c\ +start_hour = ${hh}, +/start_minute/c\ +start_minute = ${nn}, +/start_second/c\ +start_second = ${ss}, +/end_year/c\ +end_year = ${yyyy}, +/end_month/c\ +end_month = ${mm}, +/end_day/c\ +end_day = ${dd}, +/end_hour/c\ +end_hour = ${hh}, +/end_minute/c\ +end_minute = ${nn}, +/end_second/c\ +end_second = ${ss}, +/max_dom/c\ +max_dom = ${NUM_DOMAINS}, +EOF + + sed -f script.sed "${RUN_DIR}/namelist.input" > "${RUN_DIR}/advance_temp${n}/namelist.input" + + ${COPY} "${OUTPUT_DIR}/${initial_date}/wrfinput_d01_${gdate[0]}_${gdate[1]}_mean" \ + "${RUN_DIR}/advance_temp${n}/wrfvar_output.nc" + sleep 3 + ${COPY} "${RUN_DIR}/add_bank_perts.ncl" "${RUN_DIR}/advance_temp${n}/." + + cmd3="ncl 'MEM_NUM=${n}' 'PERTS_DIR=\"${PERTS_DIR}/work/boundary_perts\"' ${RUN_DIR}/advance_temp${n}/add_bank_perts.ncl" + ${REMOVE} "${RUN_DIR}/advance_temp${n}/nclrun3.out" + cat > "${RUN_DIR}/advance_temp${n}/nclrun3.out" << EOF +${cmd3} +EOF + + cat > "${RUN_DIR}/rt_assim_init_${n}.sh" << EOF +#!/bin/bash +#================================================================= +#PBS -N first_advance_${n} +#PBS -j oe +#PBS -A ${COMPUTER_CHARGE_ACCOUNT} +#PBS -l walltime=${ADVANCE_TIME} +#PBS -q ${ADVANCE_QUEUE} +#PBS -l job_priority=${ADVANCE_PRIORITY} +#PBS -m ae +#PBS -M ${EMAIL} +#PBS -k eod +#PBS -l select=${ADVANCE_NODES}:ncpus=${ADVANCE_PROCS}:mpiprocs=${ADVANCE_MPI} +#================================================================= + +set -uo pipefail + +echo "rt_assim_init_${n}.sh is running in \$(pwd)" + +cd "${RUN_DIR}/advance_temp${n}" + +if [[ -e wrfvar_output.nc ]]; then + echo "Running nclrun3.out to create wrfinput_d01 for member ${n} at \$(date)" + + chmod +x nclrun3.out + ./nclrun3.out > add_perts.out 2>&1 + + if [[ ! -s add_perts.err ]]; then + echo "Perts added to member ${n}" + else + echo "ERROR! Non-zero status returned from add_bank_perts.ncl. Check ${RUN_DIR}/advance_temp${n}/add_perts.err." + cat add_perts.err + exit 1 + fi + + ${MOVE} wrfvar_output.nc wrfinput_d01 + + # For nested domain setups only, downscale perturbations to inner domains + # For single domains, next section is skipped, no downscaling applied + + dn="${domains}" + while (( dn > 1 )); do + + # Prep domain files for ndown.exe + # input files of wrfout_d01_[time] (parent domain), wrfndi_d02 (nested domain) + # output files of wrfinput_d02 and wrfbdy_d02 + + dchar="$(echo "$dn + 100" | bc | cut -b2-3)" + + ${LINK} wrfinput_d01 wrfout_d01_${yyyy}-${mm}-${dd}_${hh}:00:00 + + ${COPY} "${OUTPUT_DIR}/${initial_date}/wrfinput_d${dchar}_${gdate[0]}_${gdate[1]}_mean" \ + "${RUN_DIR}/advance_temp${n}/wrfndi_d02" + + echo "Running ndown.exe to downscale perturbed wrfinput_d01 onto wrfinput_d${dchar} for member ${n}" + + # Downscale parent domain to nested domain (wrfinput_d{??}) + mpiexec -n 4 -ppn 4 ./ndown.exe > ndown_d${dchar}.out + + ${MOVE} wrfinput_d02 wrfinput_d${dchar} + ${MOVE} wrfbdy_d02 wrfbdy_d${dchar} + ${REMOVE} wrfndi_d02 + (( dn-- )) + done # loop through domains + + +fi + +cd "${RUN_DIR}" + +echo "Running first_advance.sh for member ${n} at \$(date)" +"${SHELL_SCRIPTS_DIR}/first_advance.sh" "${initial_date}" "${n}" "${paramfile}" +EOF + + chmod +x "${RUN_DIR}/rt_assim_init_${n}.sh" + qsub "${RUN_DIR}/rt_assim_init_${n}.sh" + + (( n++ )) + +done + +exit 0 + diff --git a/models/wrf/shell_scripts/mean_increment.ncl b/models/wrf/shell_scripts/mean_increment.ncl index c12b8e7af6..e1dd444fbd 100644 --- a/models/wrf/shell_scripts/mean_increment.ncl +++ b/models/wrf/shell_scripts/mean_increment.ncl @@ -1,14 +1,33 @@ -; find the mean state space increment, output the fields to a single mean file +; Find the mean state space increment, output the fields to a single mean file ; that can be used to make plots -; G. Romine 2011-12 -; Updating for 1 domain only B. Raczka 2024-08 + +; Incoming arguments are: +; 1) fname (analysis_increment.nc) +; 2) fout (mean_increment.nc) + +; This script is called from the diagnostic_obs.sh script as: +; ncl fname=analysis_increment_{d??}.nc fout=mean_increments_{d??}.nc mean_increment.ncl + begin -; get the list of files to read in - fname = "analysis_increment.nc" - flist = systemfunc("ls ../*/" + fname) - nfils = dimsizes(flist) -; if we only want say the last 7 days, then grab only the last 28 + if (.not. isvar("fname")) then + print("ERROR in mean_increment.ncl: fname not provided.") + exit + end if + if (.not. isvar("fout")) then + print("ERROR in mean_increment.ncl: fout not provided.") + exit + end if + + flist = systemfunc("ls -1 ../*/" + fname + " 2>/dev/null") + nfils = dimsizes(flist) + + if (nfils .le. 0) then + print("ERROR in mean_increment.ncl: no files found matching ../*/" + fname) + exit + end if + +; If we only want say the last 7 days, then grab only the last 28 ; here we practice with 3 days anl_days = 7 ntimes = anl_days*4 @@ -27,14 +46,14 @@ begin npulls = dimsizes(pull_2D_field_names) ; Below will dump out the data to a file for replotting later - cnew = addfile("mean_increments"+".nc","c") -; work through 2D fields + cnew = addfile(fout,"c") +; Work through 2D fields do i=0,npulls-1 print(" Extracting 2d variable "+pull_2D_field_names(i)) do fil_num=0,nfils-1 ; print(" reading file "+flist(fil_num)) -; dimensions are ncljoin, Time, south_north, west_east -; copy zero is the ensemble mean +; Dimensions are ncljoin, Time, south_north, west_east +; Copy zero is the ensemble mean pull_var = fil[fil_num]->$pull_2D_field_names(i)$(:,:,:,:) dims = dimsizes(pull_var) if (fil_num .eq. 0) then ; first iteration, make var diff --git a/models/wrf/shell_scripts/new_advance_model.sh b/models/wrf/shell_scripts/new_advance_model.sh new file mode 100755 index 0000000000..c48249e6a7 --- /dev/null +++ b/models/wrf/shell_scripts/new_advance_model.sh @@ -0,0 +1,830 @@ +#!/bin/bash +# +# DART software - Copyright UCAR. This open source software is provided +# by UCAR, "as is", without charge, subject to all terms of use at +# http://www.image.ucar.edu/DAReS/DART/DART_download + +# Shell script to run the WRF model from DART input. +# where the model advance is executed as a separate process. +# +# This script performs the following: +# 1. Creates a temporary directory to run a WRF realization (see options) +# 2. Copies or links the necessary files into the temporary directory +# 3. Converts DART state vectors to wrf input +# 4. Updates LBCs (optionally draws perturbations from WRF-Var random covariances) +# 5. Writes a WRF namelist from a template +# 6. Runs WRF +# 7. Checks for incomplete runs +# 8. Converts wrf output to DART state vectors + +# NOTES: +# 1. This version executes da_wrfvar.exe in serial (no mpirun) +# 2. If the ensemble mean assim_model_state_ic_mean is present in the +# $CENTRALDIR, it is converted to a WRF netCDF format. +# It is then used in update_wrf_bc to calculate the deviation from the mean. +# This deviation from the mean is then added at the end of the interval to +# calculate new boundary tendencies. The magnitude of the perturbation added +# at the end of the interval is controlled by infl. The purpose is to increase +# time correlation at the lateral boundaries. + + +#------------------------------------------------------- +# Dependencies (user responsibility) +#------------------------------------------------------- +# REQUIRED: +# 1. advance_time (from DART), located in your $CENTRALDIR +# 2. one of either (da_wrfvar.exe and pert_wrf_bc) or update_wrf_bc if you +# want to run real-data cases with specified LBCs. Elaborated below. +# 3. directory $CENTRALDIR/WRF_RUN containing all the WRF run-time files +# (typically files with data for the physics: LANDUSE.TBL, RRTM_DATA, etc +# but also anything else you want to link into the wrf-run directory. If +# using WRF-Var then be.dat should be in there too. +# 4. wrf.exe, located in your $CENTRALDIR +# 5. A wrfinput_d01 file in your $CENTRALDIR. +# 6. namelist.input in your $CENTRALDIR for use as a template. This file +# should include the WRF-Var namelists if you are using WRF-Var (v3.1++ required). +# +# OPTIONAL: +# ####EITHER 1 or 2 is required for specified LBC runs +# 1. da_wrfvar.exe (version 3.1 or later) and pert_wrf_bc in your $CENTRALDIR/WRF_RUN. +# In this case you also need be.dat (the be.dat.cv3 file from the WRF-Var +# distribution) in your $CENTRALDIR/WRF_RUN, and WRF-Var namelists in +# your $CENTRALDIR/namelist.input +# 2. update_wrf_bc in your $CENTRALDIR for using pre-existing LBC files. Pre-existing LBC files should live in $CENTRALDIR/WRF + +# File naming conventions: +# mean wrfinput - wrfinput_d0X_${gday}_${gsec}_mean +# mean wrfbdy - wrfbdy_d01_${gday}_${gsec}_mean +# wrfbdy members - wrfbdy_d01_${gday}_${gsec}_${member} + + +echo "new_advance_model.sh is running in $(pwd)" + +# Arguments are the process number of caller, the number of state copies +# belonging to that process, and the name of the filter_control_file for +# that process +process="$1" +num_domains="$2" +control_file="$3" +num_states=1 # Forcing option of only one model advance per execution +paramfile="$4" # Need this to load modules/environment +source "$paramfile" + +# Setting to vals > 0 saves wrfout files, +# will save all member output files <= to this value +save_ensemble_member=3 +delete_temp_dir=false + +# Set this to true if you want to maintain complete individual wrfinput/output +# for each member (to carry through non-updated fields) +individual_members=true + +# Next line ensures that the last cycle leaves everything in the temp dirs +if [[ "$individual_members" == true ]]; then delete_temp_dir=false; fi + +myname="$0" +CENTRALDIR="$(pwd)" +echo "$CENTRALDIR" +WRFOUTDIR="${CENTRALDIR}/WRFOUT" +REMOVE='/bin/rm -rf' +COPY='/bin/cp -p' +MOVE='/bin/mv -f' +LN='/bin/ln -sf' + + +export TARGET_CPU_LIST=-1 + +# If process 0 go ahead and check for dependencies here +if [[ "$process" -eq 0 ]]; then + + if [[ ! -x "${CENTRALDIR}/advance_time" ]]; then + echo "ABORT: advance_model.sh could not find required executable dependency ${CENTRALDIR}/advance_time" + exit 1 + fi + + if [[ ! -d "WRF_RUN" ]]; then + echo "ABORT: advance_model.sh could not find required data directory ${CENTRALDIR}/WRF_RUN, which contains all the WRF run-time input files" + exit 1 + fi + + if [[ ! -x "${CENTRALDIR}/WRF_RUN/da_wrfvar.exe" ]]; then + echo + echo "WARNING: advance_model.sh could not find optional executable dependency ${CENTRALDIR}/WRF_RUN/da_wrfvar.exe" + echo + if [[ ! -x "update_wrf_bc" ]]; then + # if the boundary conditions are specified, we need update_wrf_bc. otherwise, it's ok if it isn't found. + SPEC_BC="$(grep specified "${CENTRALDIR}/namelist.input" | grep true | wc -l)" + if (( SPEC_BC > 0 )); then + echo "ABORT: advance_model.sh could not find required executable dependency ${CENTRALDIR}/update_wrf_bc" + exit 1 + fi + fi + + else + + echo + echo "WARNING: da_wrfvar.exe found, using it to update LBCs on the fly" + echo + if [[ ! -x "${CENTRALDIR}/pert_wrf_bc" ]]; then + echo "ABORT: advance_model.sh could not find required executable dependency ${CENTRALDIR}/pert_wrf_bc" + exit 1 + fi + if [[ ! -r "${CENTRALDIR}/WRF_RUN/be.dat" ]]; then + echo "ABORT: advance_model.sh could not find required readable dependency ${CENTRALDIR}/WRF_RUN/be.dat" + exit 1 + fi + if [[ ! -e "${CENTRALDIR}/bc_pert_scale" ]]; then + echo "WARNING: using default VAR covariance scales" + fi + + fi + +fi # process 0 dependency checking + +# Set this flag here for all processes so we don't have to keep checking +if [[ -x "${CENTRALDIR}/WRF_RUN/da_wrfvar.exe" ]]; then + USE_WRFVAR=1 + echo "use wrfvar set" +else + USE_WRFVAR=0 +fi +# Set this flag here if the radar additive noise script is found +if [[ -e "${CENTRALDIR}/add_noise.csh" ]]; then + USE_NOISE=1 +else + USE_NOISE=0 +fi +if [[ -e "${CENTRALDIR}/replace_wrf_fields" ]]; then + USE_REPLACE=1 +else + USE_REPLACE=0 +fi + +sleep 5 + +# Each parallel task may need to advance more than one ensemble member. +# This control file has the actual ensemble number, the input filename, +# and the output filename for each advance. Be prepared to loop and +# do the rest of the script more than once. + +USE_WRFVAR=1 +state_copy=1 +ensemble_member_line=1 +linein=2 +lineout=3 + +# Note: The input and output file information not required, leaving in as placeholder +# Leaving in place if wrf_to_dart/dart_to_wrf functionality required in future. + +# Code identifies input and output file from control file from multiple domains +# Works with both first_advance.sh and assim_advance.sh scripting +# Assumes input (filter_restart) and output (prior) files are appended to control_file +# in consecutive pairs ordered by domain + +while (( state_copy <= num_states )); do # We don't expect advance model to run more than one member anymore. Reuse num_states for # domains? + +ensemble_member="$(head -n "$ensemble_member_line" "${CENTRALDIR}/${control_file}" | tail -n 1)" + +dn=1 +while (( dn <= num_domains )); do + eval "input_file${dn}=\"\$(head -n $linein ${CENTRALDIR}/${control_file} | tail -n 1)\"" + eval "output_file${dn}=\"\$(head -n $lineout ${CENTRALDIR}/${control_file} | tail -n 1)\"" + + (( dn++ )) + linein=$(( linein + 2 )) + lineout=$(( lineout + 2 )) +done # loop through domains + +infl="0.0" + +# Create a new temp directory for each member unless requested to keep and it exists already +temp_dir="advance_temp${ensemble_member}" +cd "$temp_dir" + +# Link WRF-runtime files (required) and be.dat (if using WRF-Var) +$LN "${CENTRALDIR}/WRF_RUN/"* . + +# Copy DART namelist if necessary +if [[ ! -e input.nml ]]; then + $COPY "${CENTRALDIR}/input.nml" . +fi + +# Append LSM data from previous cycle +if [[ -e "${CENTRALDIR}/append_lsm_data" ]]; then + $LN "${CENTRALDIR}/LSM/lsm_data_${ensemble_member}.nc" lsm_data.nc + "${CENTRALDIR}/append_lsm_data" + $REMOVE lsm_data.nc +fi + +# nfile is required when using MPICH to run wrf.exe +# nfile is machine specific. Not needed on all platforms + +hostname > nfile +hostname >> nfile + +# Specialized code for moving domains (e.g. Tropical Cyclones) +# MULTIPLE_DOMAINS - need a more general instrument here +if [[ -e "${CENTRALDIR}/moving_domain_info" ]]; then + + MY_NUM_DOMAINS="$(head -n 1 "${CENTRALDIR}/moving_domain_info" | tail -n 1)" + $MOVE input.nml input.nml-- + cat > script.sed << EOF +/num_domains/c\ +num_domains = ${MY_NUM_DOMAINS}, +EOF + sed -f script.sed input.nml-- > input.nml + $REMOVE input.nml-- + +fi + +# DMODS - we don't have this option right now, and don't need to convert a file +# +# # if a mean state ic file exists convert it to a wrfinput_mean netcdf file +# if ( -e ${CENTRALDIR}/assim_model_state_ic_mean ) then +# ${LN} ${CENTRALDIR}/assim_model_state_ic_mean dart_wrf_vector +# ${CENTRALDIR}/dart_to_wrf >&! out.dart_to_wrf_mean +# ${COPY} wrfinput_d01 wrfinput_mean +# ${REMOVE} wrf.info dart_wrf_vector +# endif + +# Execution of dart_to_wrf not required. Leaving as placeholder +# ICs for this wrf run; Convert DART file to wrfinput netcdf file +# ${MOVE} ${CENTRALDIR}/${input_file} dart_wrf_vector +# ${CENTRALDIR}/dart_to_wrf >&! out.dart_to_wrf +# ${REMOVE} dart_wrf_vector + +stuff_vars=("${increment_vars_a[@]}") + +# Currently hard coded to overwrite only increment_vars_a to +# all domains. No custom increment_vars_a and increment_vars_b +stuff_str='' # these are variables we want to cycle +num_vars="${#stuff_vars[@]}" + +echo "num_vars variable is ${num_vars}" + +i=0 +while (( i < num_vars-1 )); do + stuff_str+="${stuff_vars[$i]}," + (( i++ )) +done +stuff_str+="${stuff_vars[$((num_vars-1))]}" +echo "stuff var ${stuff_str}" + +echo "stuff_str variable is: ${stuff_str}" + +dn=1 +while (( dn <= num_domains )); do + + dchar="$(echo "$dn + 100" | bc | cut -b2-3)" + icnum="$(echo "$ensemble_member + 10000" | bc | cut -b2-5)" + + this_file="filter_restart_d${dchar}.${icnum}" + + if [[ -e "../${this_file}" ]]; then + ncks -A -v "${stuff_str}" "../${this_file}" "wrfinput_d${dchar}" + else + echo "WARNING: ../${this_file} is the posterior from filter and does not exist" + echo "WARNING: this is expected for the first cycle ONLY when only forecast is run" + fi + + (( dn++ )) # Cycle through domains +done + +# Move and remove unnecessary domains +if [[ -e "${CENTRALDIR}/moving_domain_info" ]]; then + + REMOVE_STRING="$(cat "${CENTRALDIR}/remove_domain_info")" + if [[ -n "${REMOVE_STRING}" ]]; then + $REMOVE ${REMOVE_STRING} + fi + + n=1 + NUMBER_FILE_MOVE="$(cat "${CENTRALDIR}/rename_domain_info" | wc -l)" + while (( n <= NUMBER_FILE_MOVE )); do + $MOVE $(head -n "${n}" "${CENTRALDIR}/rename_domain_info" | tail -n 1) + (( n++ )) + done + +fi + +# DMODS - note the wrf.info file was pre-generated, not from dart_to_wrf +read -r targsecs targdays < <(head -n 1 wrf.info) +targkey="$(echo "$targdays * 86400 + $targsecs" | bc)" + +read -r wrfsecs wrfdays < <(head -n 2 wrf.info | tail -n 1) +wrfkey="$(echo "$wrfdays * 86400 + $wrfsecs" | bc)" + +echo "wrf.info is read" +echo "$USE_WRFVAR" + +# Find all BC's file available and sort them with "keys". BC's are required +# for real WRF simulations +# NOTE: this needs a fix for the idealized wrf case in which there are no +# boundary files (also same for global wrf). Right now some of the +# commands below give errors, which are ok to ignore for the idealized case. + + +# Check if BCs are "specified" (in which case wrfbdy files are req'd) +# and we need to set up a key list to manage target times +SPEC_BC="$(grep specified "${CENTRALDIR}/namelist.input" | grep true | wc -l)" + +if (( SPEC_BC > 0 )); then + + if (( USE_WRFVAR )); then + mapfile -t bdyfiles < <(ls "${CENTRALDIR}/WRF/wrfbdy_d01_"*_mean) + else + mapfile -t bdyfiles < <(ls "${CENTRALDIR}/WRF/wrfbdy_d01_"*"_${ensemble_member}" | grep -v mean) + fi + echo "${bdyfiles[*]}" + + keylist=() + for f in "${bdyfiles[@]}"; do + day="$(echo "$f" | awk -F_ '{print $(NF-2)}')" + sec="$(echo "$f" | awk -F_ '{print $(NF-1)}')" + key="$(echo "$day * 86400 + $sec" | bc)" + keylist+=("$key") + done + + # numeric sort + read -r -a keys < <(printf "%s\n" "${keylist[@]}" | sort -n | tr '\n' ' ') +else # idealized WRF with non-specified BCs + + keys=("$targkey") + +fi + +read -r START_YEAR START_MONTH START_DAY START_HOUR START_MIN START_SEC < <(head -n 3 wrf.info | tail -n 1) + +START_STRING="${START_YEAR}-${START_MONTH}-${START_DAY}_${START_HOUR}:${START_MIN}:${START_SEC}" +datea="${START_YEAR}${START_MONTH}${START_DAY}${START_HOUR}" + +MY_NUM_DOMAINS="$(head -n 4 wrf.info | tail -n 1)" +ADV_MOD_COMMAND="$(head -n 5 wrf.info | tail -n 1)" + +# Code for dealing with TC nests +if [[ -e "${CENTRALDIR}/fixed_domain_info" ]]; then + + MY_NUM_DOMAINS="$(head -n 1 "${CENTRALDIR}/fixed_domain_info" | tail -n 1)" + +elif [[ -e "${CENTRALDIR}/moving_domain_info" ]]; then + + MY_NUM_DOMAINS="$(head -n 2 "${CENTRALDIR}/moving_domain_info" | tail -n 1)" + $MOVE input.nml input.nml-- + cat > script.sed << EOF +/num_domains/c\ +num_domains = ${MY_NUM_DOMAINS}, +EOF + sed -f script.sed input.nml-- > input.nml + $REMOVE input.nml-- + +fi + +# Find the next BC's file available. + +ifile=0 +while (( keys[ifile] <= wrfkey )); do + if (( ifile < ${#bdyfiles[@]}-1 )); then + (( ifile++ )) + else + echo No boundary file available to move beyond + echo "$START_STRING" + exit 1 + fi +done + +# Radar additive noise option. If shell script is available +# in the centraldir, it will be called here. +if (( USE_NOISE )); then + "${CENTRALDIR}/add_noise.sh" "$wrfsecs" "$wrfdays" "$state_copy" "$ensemble_member" "$temp_dir" "$CENTRALDIR" +fi + +# Run the replace_wrf_fields utility to update the static fields +if (( USE_REPLACE )); then + echo ../wrfinput_d01 wrfinput_d01 | "${CENTRALDIR}/replace_wrf_fields" +fi + + +############################################################### +# Advance the model with new BC until target time is reached. # +############################################################### + +while (( wrfkey < targkey )); do + + iday="$(echo "${keys[$ifile]} / 86400" | bc)" + isec="$(echo "${keys[$ifile]} % 86400" | bc)" + + # Copy the boundary condition file to the temp directory if needed. + # Note: BC's only exist for parent domain (d01) + if (( SPEC_BC > 0 )); then + + if (( USE_WRFVAR )); then + $COPY "${CENTRALDIR}/WRF/wrfbdy_d01_${iday}_${isec}_mean" wrfbdy_d01 + else + $COPY "${CENTRALDIR}/WRF/wrfbdy_d01_${iday}_${isec}_${ensemble_member}" wrfbdy_d01 + fi + + fi + + if (( targkey > keys[ifile] )); then + INTERVAL_SS="$(echo "${keys[$ifile]} - $wrfkey" | bc)" + else + INTERVAL_SS="$(echo "$targkey - $wrfkey" | bc)" + fi + + INTERVAL_MIN=$(( INTERVAL_SS / 60 )) + + END_STRING="$(echo "${START_STRING} ${INTERVAL_SS}s -w" | "${CENTRALDIR}/advance_time")" + END_YEAR="$(echo "$END_STRING" | cut -c1-4)" + END_MONTH="$(echo "$END_STRING" | cut -c6-7)" + END_DAY="$(echo "$END_STRING" | cut -c9-10)" + END_HOUR="$(echo "$END_STRING" | cut -c12-13)" + END_MIN="$(echo "$END_STRING" | cut -c15-16)" + END_SEC="$(echo "$END_STRING" | cut -c18-19)" + + # Update boundary conditions. + # WARNING: da_wrfvar.exe will only work correctly if running WRF V3.1 or later! + # If it is found in the central dir, use it to regnerate perturbed boundary files + # Otherwise, do the original call to update_wrf_bc + if (( USE_WRFVAR )); then + + # Set the covariance perturbation scales using file or default values + if [[ -e "${CENTRALDIR}/bc_pert_scale" ]]; then + pscale="$(head -n 1 "${CENTRALDIR}/bc_pert_scale" | tail -n 1)" + hscale="$(head -n 2 "${CENTRALDIR}/bc_pert_scale" | tail -n 1)" + vscale="$(head -n 3 "${CENTRALDIR}/bc_pert_scale" | tail -n 1)" + else + pscale="0.25" + hscale="1.0" + vscale="1.5" + fi + iseed2=$(( ensemble_member * 10 )) + + $REMOVE script.sed + # Note: For WRFDA perturbation code, max_domain = 1, even for nested domain setups + cat > script.sed << EOF +/analysis_date/c\ +analysis_date = '${END_STRING}.0000', +/as1/c\ +as1 = ${pscale}, ${hscale}, ${vscale}, +/as2/c\ +as2 = ${pscale}, ${hscale}, ${vscale}, +/as3/c\ +as3 = ${pscale}, ${hscale}, ${vscale}, +/as4/c\ +as4 = ${pscale}, ${hscale}, ${vscale}, +/as5/c\ +as5 = ${pscale}, ${hscale}, ${vscale}, +/seed_array1/c\ +seed_array1 = 1${END_MONTH}${END_DAY}${END_HOUR}, +/seed_array2/c\ +seed_array2 = ${iseed2}, +/start_year/c\ +start_year = ${END_YEAR}, +/start_month/c\ +start_month = ${END_MONTH}, +/start_day/c\ +start_day = ${END_DAY}, +/start_hour/c\ +start_hour = ${END_HOUR}, +/start_minute/c\ +start_minute = ${END_MIN}, +/start_second/c\ +start_second = ${END_SEC}, +/end_year/c\ +end_year = ${END_YEAR}, +/end_month/c\ +end_month = ${END_MONTH}, +/end_day/c\ +end_day = ${END_DAY}, +/end_hour/c\ +end_hour = ${END_HOUR}, +/end_minute/c\ +end_minute = ${END_MIN}, +/end_second/c\ +end_second = ${END_SEC}, +/max_dom/c\ +max_dom = 1, +EOF +# The EOF on the line above MUST REMAIN in column 1. + + sed -f script.sed "${CENTRALDIR}/namelist.input" > namelist.input + + # Only need parent domain (d01) here, even for nested domain setups + $LN "${CENTRALDIR}/WRF/wrfinput_d01_${targdays}_${targsecs}_mean" ./fg +################################ +# Instead of running wrfda, just add static pertubations from the pert bank (gen_pert_bank.sh) +# Note the static perturbation path is defined in the ncl script + cp fg wrfvar_output + cp "${CENTRALDIR}/add_bank_perts.ncl" . + cmd3="ncl 'MEM_NUM=${ensemble_member}' 'PERTS_DIR=\"${PERTS_DIR}/work/boundary_perts\"' ${CENTRALDIR}/advance_temp${ensemble_member}/add_bank_perts.ncl" + $REMOVE nclrun3.out + + cat > nclrun3.out << EOF +${cmd3} +EOF + chmod +x nclrun3.out + ./nclrun3.out > add_perts.out 2>&1 + + if [[ ! -s add_perts.err ]]; then + echo "Perts added to member ${ensemble_member}" + else + echo "Error! Non-zero status returned from add_bank_perts.ncl. Check ${RUN_DIR}/advance_temp${ensemble_member}/add_perts.err." + cat add_perts.err + exit 1 + fi +################################ + cp namelist.input namelist.input.3dvar + if [[ -e rsl.out.0000 ]]; then cat rsl.out.0000 >> out.wrfvar; fi + + $MOVE wrfvar_output wrfinput_next + $LN wrfinput_d01 wrfinput_this + $LN wrfbdy_d01 wrfbdy_this + + # If wrfinput_mean file found, rename it + if [[ -e wrfinput_mean ]]; then + $MOVE wrfinput_mean wrfinput_this_mean + $MOVE fg wrfinput_next_mean + fi + + "${CENTRALDIR}/pert_wrf_bc" > out.pert_wrf_bc 2>&1 + $REMOVE wrfinput_this wrfinput_next wrfbdy_this + if [[ -e wrfinput_this_mean ]]; then $REMOVE wrfinput_this_mean wrfinput_next_mean; fi + + else # Update boundary conditions from existing wrfbdy files + + echo "$infl" | "${CENTRALDIR}/update_wrf_bc" > out.update_wrf_bc 2>&1 + + fi + + $REMOVE script.sed namelist.input + cat > script.sed << EOF +/run_hours/c\ +run_hours = 0, +/run_minutes/c\ +run_minutes = 0, +/run_seconds/c\ +run_seconds = ${INTERVAL_SS}, +/start_year/c\ +start_year = ${MY_NUM_DOMAINS}*${START_YEAR}, +/start_month/c\ +start_month = ${MY_NUM_DOMAINS}*${START_MONTH}, +/start_day/c\ +start_day = ${MY_NUM_DOMAINS}*${START_DAY}, +/start_hour/c\ +start_hour = ${MY_NUM_DOMAINS}*${START_HOUR}, +/start_minute/c\ +start_minute = ${MY_NUM_DOMAINS}*${START_MIN}, +/start_second/c\ +start_second = ${MY_NUM_DOMAINS}*${START_SEC}, +/end_year/c\ +end_year = ${MY_NUM_DOMAINS}*${END_YEAR}, +/end_month/c\ +end_month = ${MY_NUM_DOMAINS}*${END_MONTH}, +/end_day/c\ +end_day = ${MY_NUM_DOMAINS}*${END_DAY}, +/end_hour/c\ +end_hour = ${MY_NUM_DOMAINS}*${END_HOUR}, +/end_minute/c\ +end_minute = ${MY_NUM_DOMAINS}*${END_MIN}, +/end_second/c\ +end_second = ${MY_NUM_DOMAINS}*${END_SEC}, +/history_interval/c\ +history_interval = ${MY_NUM_DOMAINS}*${INTERVAL_MIN}, +/frames_per_outfile/c\ +frames_per_outfile = ${MY_NUM_DOMAINS}*1, +/max_dom/c\ +max_dom = ${MY_NUM_DOMAINS}, +EOF +# The EOF on the line above MUST REMAIN in column 1. + + if [[ -e "${CENTRALDIR}/fixed_domain_info" ]]; then + + nx_string="$(head -n 2 "${CENTRALDIR}/fixed_domain_info" | tail -n 1)" + ny_string="$(head -n 3 "${CENTRALDIR}/fixed_domain_info" | tail -n 1)" + i_start_str="$(head -n 4 "${CENTRALDIR}/fixed_domain_info" | tail -n 1)" + j_start_str="$(head -n 5 "${CENTRALDIR}/fixed_domain_info" | tail -n 1)" + + cat >> script.sed << EOF +/e_we/c\ +e_we = ${nx_string}, +/e_sn/c\ +e_sn = ${ny_string}, +/i_parent_start/c\ +i_parent_start = ${i_start_str}, +/j_parent_start/c\ +j_parent_start = ${j_start_str}, +EOF + + elif [[ -e "${CENTRALDIR}/moving_domain_info" ]]; then + + nx_string="$(head -n 3 "${CENTRALDIR}/moving_domain_info" | tail -n 1)" + ny_string="$(head -n 4 "${CENTRALDIR}/moving_domain_info" | tail -n 1)" + i_start_str="$(head -n 5 "${CENTRALDIR}/moving_domain_info" | tail -n 1)" + j_start_str="$(head -n 6 "${CENTRALDIR}/moving_domain_info" | tail -n 1)" + input_file="$(head -n 7 "${CENTRALDIR}/moving_domain_info" | tail -n 1)" + num_move_str="$(head -n 8 "${CENTRALDIR}/moving_domain_info" | tail -n 1)" + id_move_str="$(head -n 9 "${CENTRALDIR}/moving_domain_info" | tail -n 1)" + move_time_str="$(head -n 10 "${CENTRALDIR}/moving_domain_info" | tail -n 1)" + x_move_string="$(head -n 11 "${CENTRALDIR}/moving_domain_info" | tail -n 1)" + y_move_string="$(head -n 12 "${CENTRALDIR}/moving_domain_info" | tail -n 1)" + + cat >> script.sed << EOF +/e_we/c\ +e_we = ${nx_string}, +/e_sn/c\ +e_sn = ${ny_string}, +/i_parent_start/c\ +i_parent_start = ${i_start_str}, +/j_parent_start/c\ +j_parent_start = ${j_start_str}, +/input_from_file/c\ +input_from_file = ${input_file}, +/num_moves/c\ +num_moves = ${num_move_str}, +/move_id/c\ +move_id = ${id_move_str} +/move_interval/c\ +move_interval = ${move_time_str} +/move_cd_x/c\ +move_cd_x = ${x_move_string} +/move_cd_y/c\ +move_cd_y = ${y_move_string} +EOF + + fi + + if (( ensemble_member <= 1 )); then + echo " /auxhist1_interval/c\\" >> script.sed + echo " auxhist1_interval = 0, 3, 3" >> script.sed + fi + + sed -f script.sed "${CENTRALDIR}/namelist.input" > namelist.input + + #------------------------------------------------------------- + # + # HERE IS A GOOD PLACE TO GRAB FIELDS FROM OTHER SOURCES + # AND STUFF THEM INTO YOUR wrfinput_d0? FILES + # + #------------------------------------------------------------ + + if [[ -e rsl.out.integration ]]; then $REMOVE rsl.*; fi + + # RUNNING WRF FORECAST HERE !! + export MPI_SHEPHERD=FALSE + eval "${ADV_MOD_COMMAND}" >> rsl.out.integration 2>&1 + + if [[ -e rsl.out.0000 ]]; then cat rsl.out.0000 >> rsl.out.integration; fi + $COPY rsl.out.integration "${WRFOUTDIR}/wrf.out_${targdays}_${targsecs}_${ensemble_member}" + + SUCCESS="$(grep "wrf: SUCCESS COMPLETE WRF" rsl.out.integration | cat | wc -l)" + if (( SUCCESS == 0 )); then + echo "$ensemble_member" > "${CENTRALDIR}/blown_${targdays}_${targsecs}.out" + echo "Model failure! Check file ${CENTRALDIR}/blown_${targdays}_${targsecs}.out" + echo "for a list of failed ensemble_members, and check here for the individual output files:" + echo " ${CENTRALDIR}/wrf.out_${targdays}_${targsecs}_${ensemble_member} " + exit 255 + fi + + if [[ -e "${CENTRALDIR}/append_precip_to_diag" ]]; then + dn=1 ; which ncks >/dev/null 2>&1 + while (( dn <= num_domains )); do + ncks -h -O -F -v RAINC,RAINNC "wrfout_d0${dn}_${END_STRING}" wrf_precip.nc + $MOVE wrf_precip.nc "${CENTRALDIR}/wrf_precip_d0${dn}_${END_STRING}_${ensemble_member}" + (( dn++ )) + done + fi + +# zip up the wrfin file + dn=1 + while (( dn <= num_domains )); do + $MOVE "wrfinput_d0${dn}" "wrfinput_d0${dn}_${ensemble_member}" + gzip "wrfinput_d0${dn}_${ensemble_member}" & + # Wait for zip operation to complete + while [[ -e "wrfinput_d0${dn}_${ensemble_member}" ]]; do + sleep 3 + touch "${CENTRALDIR}/HAD_TO_WAIT" + done + (( dn++ )) + done + +# Forecast date + dn=1 + while (( dn <= num_domains )); do + if (( ensemble_member <= save_ensemble_member )); then + $COPY "wrfout_d0${dn}_${END_STRING}" "${WRFOUTDIR}/wrfout_d0${dn}_${END_STRING}_${ensemble_member}" + fi + $MOVE "wrfinput_d0${dn}_${ensemble_member}.gz" "../WRFIN/wrfinput_d0${dn}_${ensemble_member}.gz" + $MOVE "wrfout_d0${dn}_${END_STRING}" "wrfinput_d0${dn}" + (( dn++ )) + done + + $REMOVE wrfout* + + START_YEAR="$END_YEAR" + START_MONTH="$END_MONTH" + START_DAY="$END_DAY" + START_HOUR="$END_HOUR" + START_MIN="$END_MIN" + START_SEC="$END_SEC" + wrfkey="${keys[$ifile]}" + (( ifile++ )) + + done + + ############################################## + # At this point, the target time is reached. # + ############################################## + # Withdraw LSM data to use in next cycle This is remnant from the Lanai days, we now pull soil state + # together with everything else + if [[ -e "${CENTRALDIR}/fixed_domain_info" ]]; then MY_NUM_DOMAINS=1; fi + if [[ -e "${CENTRALDIR}/append_lsm_data" ]]; then + dn=1 + while (( dn <= num_domains )); do + ncks -h -F -A -a -v TSLB,SMOIS,SH2O,TSK "wrfinput_d0${dn}" lsm_data.nc + ncrename -h -v TSLB,TSLB_d0${dn} -v SMOIS,SMOIS_d0${dn} -v SH2O,SH2O_d0${dn} -v TSK,TSK_d0${dn} \ + -d west_east,west_east_d0${dn} -d south_north,south_north_d0${dn} \ + -d soil_layers_stag,soil_layers_stag_d0${dn} lsm_data.nc + (( dn++ )) + done + $REMOVE "${CENTRALDIR}/LSM/lsm_data_${ensemble_member}.nc" + $MOVE lsm_data.nc "${CENTRALDIR}/LSM/lsm_data_${ensemble_member}.nc" + fi + + if [[ -e "${CENTRALDIR}/fixed_domain_info" || -e "${CENTRALDIR}/moving_domain_info" ]]; then + ln -sf "${CENTRALDIR}/wrfinput_d01" wrfinput_d01_base + "${CENTRALDIR}/recalc_wrf_base" > out.recalc_wrf_base 2>&1 + fi + +# Execution of wrf_to_dart not required. Leaving as placeholder +# Create new input to DART (taken from "wrfinput") +# ${CENTRALDIR}/wrf_to_dart >&! out.wrf_to_dart +# ${MOVE} dart_wrf_vector ${CENTRALDIR}/${output_file} + +# Extract the cycle variables + num_vars="${#extract_vars_a[@]}" + extract_str_a='' + i=0 + while (( i < num_vars-1 )); do + extract_str_a+="${extract_vars_a[$i]}," + (( i++ )) + done + extract_str_a+="${extract_vars_a[$((num_vars-1))]}" + echo "${extract_str_a}" + + num_vars="${#extract_vars_b[@]}" + extract_str_b='' + i=0 + while (( i < num_vars-1 )); do + extract_str_b+="${extract_vars_b[$i]}," + (( i++ )) + done + extract_str_b+="${extract_vars_b[$((num_vars-1))]}" + echo "${extract_str_b}" + + +# Loop through all wrf domain files that are present + dn=1 + while (( dn <= num_domains )); do + dchar="$(echo "$dn + 100" | bc | cut -b2-3)" + icnum="$(echo "$ensemble_member + 10000" | bc | cut -b2-5)" + outfile="prior_d${dchar}.${icnum}" + if (( dn == 1 )); then + ncks -O -v "${extract_str_a}" "wrfinput_d${dchar}" "../${outfile}" + else + ncks -O -v "${extract_str_b}" "wrfinput_d${dchar}" "../${outfile}" + fi + (( dn++ )) + echo "should have made $outfile" + done + + if [[ -e "${CENTRALDIR}/moving_domain_info" && "$ensemble_member" -eq 1 ]]; then + dn=2 + while (( dn <= num_domains )); do + $COPY "wrfinput_d0${dn}" "${CENTRALDIR}/wrfinput_d0${dn}_new" + (( dn++ )) + done + fi + + + touch "${CENTRALDIR}/done_member_${ensemble_member}" + + cd "$CENTRALDIR" + + # Delete the temp directory for each member if desired + if [[ "$delete_temp_dir" == true ]]; then $REMOVE "${temp_dir}"; fi + echo "Ensemble Member $ensemble_member completed" + + # Repeat the entire process for any other ensemble member that + # needs to be advanced by this task. Don't expect this to ever be run + (( state_copy++ )) + ensemble_member_line=$(( ensemble_member_line + 3 )) + +done + +# Remove the filter_control file to signal completion +$REMOVE "$control_file" +if (( SUCCESS == 1 )); then + echo " done_member_$ensemble_member" +fi +exit 0 + diff --git a/models/wrf/shell_scripts/param.sh b/models/wrf/shell_scripts/param.sh new file mode 100755 index 0000000000..45ce62baf6 --- /dev/null +++ b/models/wrf/shell_scripts/param.sh @@ -0,0 +1,142 @@ +#!/bin/bash + +# DART software - Copyright UCAR. +# This open source software is provided "as is" without charge. + +# ADAPTIVE_INFLATION is disconnected from input.nml +# ASSIM_INT_HOURS is implicit in (ALL) the scripts except assim_advance.sh +# ASSIM_INT_MINUTES support needs to be added to param.sh + +# ----------------------------------------------------------- +# Environment setup (example for NCAR Derecho) +# ----------------------------------------------------------- +module load nco +module load ncl/6.6.2 +set -uo pipefail +# ----------------------------------------------------------- +# Assimilation parameters +# ----------------------------------------------------------- +NUM_ENS=20 +ASSIM_INT_MINUTES=0 # 0 means use ASSIM_INT_HOURS +ASSIM_INT_HOURS=6 # ignored if ASSIM_INT_MINUTES > 0 +IC_PERT_SCALE=0.25 +# Set to 1 to enable adaptive inflation +# For pure forecast mode turn off adaptive inflation (set = 0) +ADAPTIVE_INFLATION=1 +NUM_DOMAINS=2 +# ----------------------------------------------------------- +# Directory structure +# IMPORTANT: scripts rely on these relative names +# ----------------------------------------------------------- +BASE_DIR=/glade/derecho/scratch/bmraczka/WRFv4.5_kansas +RUN_DIR="${BASE_DIR}/rundir" +TEMPLATE_DIR="${BASE_DIR}/template" +OBSPROC_DIR="${BASE_DIR}/obsproc" +OUTPUT_DIR="${BASE_DIR}/output" +ICBC_DIR="${BASE_DIR}/icbc" +POST_STAGE_DIR="${BASE_DIR}/post" +OBS_DIAG_DIR="${BASE_DIR}/obs_diag" +PERTS_DIR="${BASE_DIR}/perts" +# ----------------------------------------------------------- +# Component paths +# ----------------------------------------------------------- +SHELL_SCRIPTS_DIR="${BASE_DIR}/scripts" +DART_DIR=/glade/work/bmraczka/DART +WRF_DM_SRC_DIR=/glade/work/bmraczka/WRF/WRFv4.5_git +WPS_SRC_DIR=/glade/work/bmraczka/WRF/WPSv4.5_git +VAR_SRC_DIR=/glade/work/bmraczka/WRF/WRFDAv4.5_git +if [[ ${NUM_DOMAINS} -gt 1 ]]; then + echo + DART_DOM_DIR="${DART_DIR}/models/wrf/tutorial/template_nest" + echo "NUM_DOMAINS = ${NUM_DOMAINS}" + echo "Assigning input.nml.template for multiple WRF domains" + echo +else + echo + DART_DOM_DIR="${DART_DIR}/models/wrf/tutorial/template" + echo "NUM_DOMAINS = ${NUM_DOMAINS}" + echo "Assigning input.nml.template for single WRF domain" +fi + +# ----------------------------------------------------------- +# Template / IC file sources +# ----------------------------------------------------------- +GEO_FILES_DIR=/glade/u/home/wrfhelp/WPS_GEOG +GRIB_DATA_DIR="${ICBC_DIR}/grib_data" +GRIB_SRC='GFS' + +# ----------------------------------------------------------- +# Variable lists for extraction/cycling +# ----------------------------------------------------------- + +extract_vars_a=( U V PH THM MU QVAPOR QCLOUD QRAIN QICE QSNOW QGRAUP QNICE QNRAIN \ + U10 V10 T2 Q2 PSFC TSLB SMOIS TSK RAINC RAINNC GRAUPELNC ) + +extract_vars_b=( U V W PH THM MU QVAPOR QCLOUD QRAIN QICE QSNOW QGRAUP QNICE QNRAIN \ + U10 V10 T2 Q2 PSFC TSLB SMOIS TSK RAINC RAINNC GRAUPELNC ) + +cycle_vars_a=( U V PH THM MU QVAPOR QCLOUD QRAIN QICE QSNOW QGRAUP QNICE QNRAIN \ + U10 V10 T2 Q2 PSFC TSLB SMOIS TSK ) + +cycle_vars_b=( U V PH THM MU QVAPOR QCLOUD QRAIN QICE QSNOW QGRAUP QNICE QNRAIN \ + U10 V10 T2 Q2 PSFC TSLB SMOIS TSK ) + +increment_vars_a=( U V PH THM MU QVAPOR QCLOUD QRAIN QICE QSNOW QGRAUP QNICE QNRAIN U10 V10 T2 Q2 PSFC ) +increment_vars_b=( U V PH THM MU QVAPOR QCLOUD QRAIN QICE QSNOW QGRAUP QNICE QNRAIN U10 V10 T2 Q2 PSFC ) + +# ----------------------------------------------------------- +# Diagnostics +# ----------------------------------------------------------- +# OBS_VERIF_HRS=$(( ASSIM_INT_HOURS / 2 )) + +# ----------------------------------------------------------- +# Queueing / HPC system settings +# ----------------------------------------------------------- +SUPER_PLATFORM="derecho" +COMPUTER_CHARGE_ACCOUNT=P86850054 +EMAIL="bmraczka@ucar.edu" + +if [[ "$SUPER_PLATFORM" == "derecho" ]]; then + + # PBS queue example + FILTER_QUEUE="main" + FILTER_PRIORITY="premium" + FILTER_TIME="0:35:00" + FILTER_NODES=2 + FILTER_PROCS=128 + FILTER_MPI=128 + + ADVANCE_QUEUE="main" + ADVANCE_PRIORITY="premium" + ADVANCE_TIME="0:20:00" + ADVANCE_NODES=1 + ADVANCE_PROCS=128 + ADVANCE_MPI=128 + +else + + # LSF/SLURM example values + FILTER_QUEUE="regular" + FILTER_TIME="0:25" + FILTER_CORES=512 + + ADVANCE_QUEUE="regular" + ADVANCE_TIME="0:18" + ADVANCE_CORES=64 + + FILTER_PTILE=16 + ADVANCE_PTILE=16 +fi + +# ----------------------------------------------------------- +# System commands +# ----------------------------------------------------------- +export REMOVE='rm -rf' +export COPY='cp -pfr' +export MOVE='mv -f' +export LINK='ln -fs' +export WGET='/usr/bin/wget' +export LIST='ls' + +return + diff --git a/models/wrf/shell_scripts/prep_ic.sh b/models/wrf/shell_scripts/prep_ic.sh new file mode 100755 index 0000000000..1abf78d952 --- /dev/null +++ b/models/wrf/shell_scripts/prep_ic.sh @@ -0,0 +1,71 @@ +#!/bin/bash +# +# DART software - Copyright UCAR. This open source software is provided +# by UCAR, "as is", without charge, subject to all terms of use at +# http://www.image.ucar.edu/DAReS/DART/DART_download + +set -uo pipefail + +if [[ $# -gt 0 ]]; then + n="$1" # pass in the ensemble member number + datep="$2" # needed for correct path to file + domains="$3" + paramfile="$4" +else # values come from environment variables + n="${mem_num}" + datep="${date}" + domains="${domain}" + paramfile="${paramf}" +fi + +source "$paramfile" + +echo "prep_ic.sh using n=$n datep=$datep domains=$domains paramfile=$paramfile" +echo "domain 1 using cycle_vars_a, any other nested domains using cycle_vars_b" + +dn=1 +while (( dn <= domains )); do + dchar="$(echo "$dn + 100" | bc | cut -b2-3)" + + if (( dn == 1 )); then + + # cycle_vars_a defined in paramfile (bash array) + num_vars="${#cycle_vars_a[@]}" + cycle_str='' # these are variables we want to cycle + i=0 + while (( i < num_vars-1 )); do + cycle_str+="${cycle_vars_a[$i]}," + (( i++ )) + done + cycle_str+="${cycle_vars_a[$((num_vars-1))]}" + echo "${cycle_str}" + + else # larger (nested) domains can use a different list of cycled variables (e.g. radar) + + # cycle_vars_b defined in paramfile (bash array) + num_vars="${#cycle_vars_b[@]}" + cycle_str='' # these are variables we want to cycle + i=0 + while (( i < num_vars-1 )); do + cycle_str+="${cycle_vars_b[$i]}," + (( i++ )) + done + cycle_str+="${cycle_vars_b[$((num_vars-1))]}" + echo "${cycle_str}" + + fi + + ensstring="$(printf "%04d" "$n")" + dchar="$(printf "%02d" "$dn")" + + ncks -A -v "${cycle_str}" \ + "${OUTPUT_DIR}/${datep}/PRIORS/prior_d${dchar}.${ensstring}" \ + "${RUN_DIR}/advance_temp${n}/wrfinput_d${dchar}" + + touch "${RUN_DIR}/ic_d${dchar}_${n}_ready" + + (( dn++ )) +done # loop through domains + +exit 0 + diff --git a/models/wrf/shell_scripts/real.sh b/models/wrf/shell_scripts/real.sh new file mode 100755 index 0000000000..514d71650e --- /dev/null +++ b/models/wrf/shell_scripts/real.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +paramfile="$1" + +# Load parameter file +source "$paramfile" + +# Move to ICBC directory +cd "$ICBC_DIR" || { echo "ERROR: cannot cd to $ICBC_DIR"; exit 1; } + +# Run real.exe with MPI +#mpiexec -n 128 -ppn 128 "${RUN_DIR}/WRF_RUN/real.exe" +mpiexec -n 4 -ppn 4 "${RUN_DIR}/WRF_RUN/real.exe" + +# Unconditionally set real_done, matching the last two lines of original script +touch "${ICBC_DIR}/real_done" + +exit 0 + diff --git a/models/wrf/shell_scripts/setup.sh b/models/wrf/shell_scripts/setup.sh new file mode 100755 index 0000000000..d19f7f8c18 --- /dev/null +++ b/models/wrf/shell_scripts/setup.sh @@ -0,0 +1,114 @@ +#!/bin/bash + +# DART software - Copyright UCAR. This open source software is provided +# by UCAR, "as is", without charge, subject to all terms of use at +# http://www.image.ucar.edu/DAReS/DART/DART_download + +# This script sets up the proper directory structure for running the WRF/DART tutorial. +# It also places the required DART, WRF, and WRFDA executables and files in the proper location. +# +# >> ./setup.sh param.sh + +set -uo pipefail + +myname="$0" +paramfile="$1" + +if [[ $# -eq 1 ]]; then + echo + echo "$myname starting at $(date)" + echo "parameter filename is $paramfile" + echo +else + echo + echo "usage: $(basename "$myname") " + echo + exit 1 +fi + +# Load parameter file +source "$paramfile" +COPY="cp -rL" + +# Create directories +dirs=( + "$RUN_DIR" "$TEMPLATE_DIR" "$OBSPROC_DIR" "$OUTPUT_DIR" "$ICBC_DIR" + "$POST_STAGE_DIR" "$OBS_DIAG_DIR" "$PERTS_DIR" "$SHELL_SCRIPTS_DIR" + "$RUN_DIR/WRFIN" "$RUN_DIR/WRFOUT" "$RUN_DIR/WRF_RUN" +) + + +for dir in "${dirs[@]}"; do + if [[ ! -d "$dir" ]]; then + mkdir -p "$dir" + fi + if [[ ! -d "$dir" ]]; then + echo "ERROR: unable to make directory '$dir'" + exit 2 + fi +done + +# Put DART executables in proper place +executables=( + advance_time filter pert_wrf_bc obs_diag obs_sequence_tool + obs_seq_to_netcdf fill_inflation_restart wrf_dart_obs_preprocess +) + +for exe in "${executables[@]}"; do + $COPY "${DART_DIR}/models/wrf/work/$exe" "${RUN_DIR}/$exe" + if [[ ! -e "${RUN_DIR}/$exe" ]]; then + echo "ERROR: ${DART_DIR}/models/wrf/work/$exe not copied to ${RUN_DIR}" + echo "ERROR: Check DART build." + exit 3 + fi +done + +SEC_DIR="assimilation_code/programs/gen_sampling_err_table/work" +$COPY "${DART_DIR}/${SEC_DIR}/sampling_error_correction_table.nc" "${RUN_DIR}" +if [[ ! -e "${RUN_DIR}/sampling_error_correction_table.nc" ]]; then + echo "ERROR: ${DART_DIR}/${SEC_DIR}/sampling_error_correction_table.nc not copied to ..." + echo "ERROR: ${RUN_DIR}/sampling_error_correction_table.nc" + echo "ERROR: Check DART directory." + exit 4 +fi + +# Put WRF executables and supporting files in proper place +$COPY "${WRF_DM_SRC_DIR}/run/"* "${RUN_DIR}/WRF_RUN" +if [[ ! -e "${RUN_DIR}/WRF_RUN/wrf.exe" || ! -e "${RUN_DIR}/WRF_RUN/real.exe" ]]; then + echo "ERROR: real.exe or wrf.exe not copied into ${RUN_DIR}/WRF_RUN" + echo "ERROR: Check WRF build" + exit 5 +fi + +# WRF namelist.input gets replaced +${REMOVE} "${RUN_DIR}/WRF_RUN/namelist.input" || exit 4 + +# Put WRFDA executables and supporting files in proper place +$COPY "${VAR_SRC_DIR}/var/build/da_wrfvar.exe" "${RUN_DIR}/WRF_RUN/da_wrfvar.exe" +if [[ ! -e "${RUN_DIR}/WRF_RUN/da_wrfvar.exe" ]]; then + echo "ERROR: ${VAR_SRC_DIR}/var/build/da_wrfvar.exe not copied to ${RUN_DIR}/WRF_RUN/" + echo "ERROR: Check WRFDA build." + exit 6 +fi + +$COPY "${VAR_SRC_DIR}/var/run/be.dat.cv3" "${RUN_DIR}/WRF_RUN/be.dat" +if [[ ! -e "${RUN_DIR}/WRF_RUN/be.dat" ]]; then + echo "ERROR: ${VAR_SRC_DIR}/var/run/be.dat.cv3 not found; cannot be copied to ${RUN_DIR}/WRF_RUN/" + echo "ERROR: Check WRFDA build." + exit 7 +fi + +# Put scripts in proper place +$COPY "${SHELL_SCRIPTS_DIR}/add_bank_perts.ncl" "${RUN_DIR}" || exit 8 +$COPY "${SHELL_SCRIPTS_DIR}/new_advance_model.sh" "${RUN_DIR}" || exit 9 + +# Edit input.nml.template +sed "s/ens_size.*/ens_size = $NUM_ENS,/g" \ + "${DART_DOM_DIR}/input.nml.template" \ + > "${RUN_DIR}/input.nml" || exit 8 + +echo "$myname complete at $(date)" +echo + +exit 0 + diff --git a/models/wrf/shell_scripts_csh/add_bank_perts.ncl b/models/wrf/shell_scripts_csh/add_bank_perts.ncl new file mode 100644 index 0000000000..964cb61f2d --- /dev/null +++ b/models/wrf/shell_scripts_csh/add_bank_perts.ncl @@ -0,0 +1,66 @@ +; Simple utility to add perturbation fields from the +; perturbation bank files to a provided wrfinput file. +; consistent with use in the advance_model.csh script +; where we are substituting this call for running +; da_wrfvar.exe, so the wrfinput file to perturb +; should be called 'wrfinput_next', and is the mean +; state for the target lateral boundaries. + +begin + + err = NhlGetErrorObjectId() + setvalues err + "errFileName" : "add_perts.err" + end setvalues + + ens_mem_num = ""+MEM_NUM + pert_bank_path = PERTS_DIR + +; perturbation scaling: + scale_T = 1.3 + scale_U = 1.3 + scale_V = 1.3 + scale_Q = 1.3 + scale_M = 0.6 + +; for random pert, pick from larger bank + bank_size = 100 + con = bank_size / 32766.0 ; 32766.0 forces a 0.0 to 1.0 range +; get a seed + wallClock = stringtoint(systemfunc("date -u +%N")) + srand(wallClock) + ens_mem_num = floattoint(con * rand()) + tmpvar = (/1,toint(ens_mem_num)/) + ens_mem_num = max(tmpvar) + delete(tmpvar) + tmpvar = (/bank_size,toint(ens_mem_num)/) + ens_mem_num = min(tmpvar) + delete(tmpvar) +; open selected file + pert_bank_file = "pert_bank_mem_"+ens_mem_num+".nc" + wrf_file = "wrfvar_output"+".nc" + + asciiwrite("mem"+MEM_NUM+"_pert_bank_num",ens_mem_num) + print ("bank member number "+ens_mem_num) + + +;For WRFv4 or later prognostic temp variable is THM + pert_fields = (/"U", "V", "T", "QVAPOR","MU"/) + wrf_fields = (/"U", "V", "THM", "QVAPOR","MU"/) + pert_scale = (/scale_U,scale_V,scale_T,scale_Q,scale_M/) + nperts = dimsizes(pert_fields) + pert_in = addfile(pert_bank_path+"/"+pert_bank_file,"r") + wrf_in = addfile(wrf_file,"w") + do n=0,nperts-1 + temp_w = wrf_in->$wrf_fields(n)$ + temp_p = pert_in->$pert_fields(n)$ + temp_c = temp_w+(temp_p * pert_scale(n)) + wrf_in->$wrf_fields(n)$ = temp_c + delete(temp_w) + delete(temp_p) + delete(temp_c) + end do + delete(pert_in) + delete(wrf_in) + print("perts added") +end diff --git a/models/wrf/shell_scripts/assim_advance.csh b/models/wrf/shell_scripts_csh/assim_advance.csh similarity index 80% rename from models/wrf/shell_scripts/assim_advance.csh rename to models/wrf/shell_scripts_csh/assim_advance.csh index b9fa8e5e44..dc4aeeca5a 100755 --- a/models/wrf/shell_scripts/assim_advance.csh +++ b/models/wrf/shell_scripts_csh/assim_advance.csh @@ -68,16 +68,26 @@ ${gdatef[2]} ${gdatef[1]} ${gdate[2]} ${gdate[1]} $yyyy $mm $dd $hh $nn $ss $domains - mpiexec -n 128 -ppn 128 ./wrf.exe + mpiexec -n 4 -ppn 4 ./wrf.exe EOF endif cd $RUN_DIR +# filter_control accounts for multiple domains +# Appends input (filter_restart) and output (prior) in consecutive pairs +# Should be consistent with filter_control setup for first_advance.csh +# during intial forecast step + echo $emember >! ${RUN_DIR}/filter_control${icnum} -echo filter_restart_d01.${icnum} >> ${RUN_DIR}/filter_control${icnum} -echo prior_d01.${icnum} >> ${RUN_DIR}/filter_control${icnum} +set dn = 1 +while ( $dn <= $domains ) + set dchar = `echo $dn + 100 | bc | cut -b2-3` + echo filter_restart_d${dchar}.${icnum} >> ${RUN_DIR}/filter_control${icnum} + echo prior_d${dchar}.${icnum} >> ${RUN_DIR}/filter_control${icnum} + @ dn ++ +end # loop through domains # integrate the model forward in time ${RUN_DIR}/new_advance_model.csh ${emember} $domains filter_control${icnum} $paramfile diff --git a/models/wrf/shell_scripts/assimilate.csh b/models/wrf/shell_scripts_csh/assimilate.csh similarity index 100% rename from models/wrf/shell_scripts/assimilate.csh rename to models/wrf/shell_scripts_csh/assimilate.csh diff --git a/models/wrf/shell_scripts/diagnostics_obs.csh b/models/wrf/shell_scripts_csh/diagnostics_obs.csh similarity index 100% rename from models/wrf/shell_scripts/diagnostics_obs.csh rename to models/wrf/shell_scripts_csh/diagnostics_obs.csh diff --git a/models/wrf/shell_scripts/driver.csh b/models/wrf/shell_scripts_csh/driver.csh similarity index 85% rename from models/wrf/shell_scripts/driver.csh rename to models/wrf/shell_scripts_csh/driver.csh index 55d0f655a7..c9704b6189 100755 --- a/models/wrf/shell_scripts/driver.csh +++ b/models/wrf/shell_scripts_csh/driver.csh @@ -20,13 +20,12 @@ ######################################################################## # Set the correct values here set paramfile = `readlink -f ${2}` # Get absolute path for param.csh from command line arg -set datefnl = 2017042712 # target date YYYYMMDDHH # set this appropriately #%%%# +set datefnl = 2024051912 # target date YYYYMMDDHH # set this appropriately #%%%# ######################################################################## # Likely do not need to change anything below ######################################################################## source $paramfile - echo `uname -a` cd ${RUN_DIR} @@ -59,10 +58,13 @@ while ( 1 == 1 ) echo 'ready to check inputs' set domains = $NUM_DOMAINS # from the param file + # Check to make sure all input data exists - if ( $domains == 1 ) then - foreach infile ( wrfinput_d01_${gdate[1]}_${gdate[2]}_mean \ - wrfinput_d01_${gdatef[1]}_${gdatef[2]}_mean \ + set dn = 1 + while ( $dn <= $domains ) + set dchar = `echo $dn + 100 | bc | cut -b2-3` + foreach infile ( wrfinput_d${dchar}_${gdate[1]}_${gdate[2]}_mean \ + wrfinput_d${dchar}_${gdatef[1]}_${gdatef[2]}_mean \ wrfbdy_d01_${gdatef[1]}_${gdatef[2]}_mean obs_seq.out ) if ( ! -e ${OUTPUT_DIR}/${datea}/${infile} ) then @@ -71,7 +73,8 @@ while ( 1 == 1 ) exit 2 endif end - endif + @ dn++ + end # loop through domains # Clear the advance_temp directory, write in new template file, and # overwrite variables with the compact prior netcdf files @@ -159,7 +162,6 @@ while ( 1 == 1 ) ${MOVE} icgen.o* ${OUTPUT_DIR}/${datea}/logs/ # Get wrfinput source information - ${COPY} ${OUTPUT_DIR}/${datea}/wrfinput_d01_${gdate[1]}_${gdate[2]}_mean wrfinput_d01 set dn = 1 while ( $dn <= $domains ) @@ -169,36 +171,38 @@ while ( 1 == 1 ) end - # Copy the inflation files from the previous time, update for domains - #TJH ADAPTIVE_INFLATION comes from scripts/param.csh but is disjoint from input.nml + # Copy the inflation files from the previous time and for all domains + # The ADAPTIVE_INFLATION variable is set in scripts/param.csh and should + # be consistent with DART's input.nml inflation setting (inf_flavor) if ( $ADAPTIVE_INFLATION == 1 ) then # Create the home for inflation and future state space diagnostic files - # Should try to check each file here, but shortcutting for prior (most common) and link them all mkdir -p ${RUN_DIR}/{Inflation_input,Output} - if ( $domains == 1) then - if ( -e ${OUTPUT_DIR}/${datep}/Inflation_input/input_priorinf_mean.nc ) then - ${LINK} ${OUTPUT_DIR}/${datep}/Inflation_input/input_priorinf*.nc ${RUN_DIR}/. - ${LINK} ${OUTPUT_DIR}/${datep}/Inflation_input/input_postinf*.nc ${RUN_DIR}/. + set dn = 1 + while ( $dn <= $domains ) + set dchar = `echo $dn + 100 | bc | cut -b2-3` + + if ( -e ${OUTPUT_DIR}/${datep}/Inflation_input/input_priorinf_mean_d${dchar}.nc ) then + + ${LINK} ${OUTPUT_DIR}/${datep}/Inflation_input/input_priorinf_mean_d${dchar}.nc ${RUN_DIR}/. + ${LINK} ${OUTPUT_DIR}/${datep}/Inflation_input/input_postinf_mean_d${dchar}.nc ${RUN_DIR}/. + + ${LINK} ${OUTPUT_DIR}/${datep}/Inflation_input/input_priorinf_sd_d${dchar}.nc ${RUN_DIR}/. + ${LINK} ${OUTPUT_DIR}/${datep}/Inflation_input/input_postinf_sd_d${dchar}.nc ${RUN_DIR}/. else - echo "${OUTPUT_DIR}/${datep}/Inflation_input/input_priorinf_mean.nc file does not exist. Stopping" + echo "${OUTPUT_DIR}/${datep}/Inflation_input/input_priorinf_mean_d${dchar}.nc file does not exist. Stopping" + echo "If first assimilation cycle make sure fill_inflation_restart was used to generate mean and sd inflation files" touch ABORT_RETRO exit 3 endif - - else # multiple domains so multiple inflation files for each domain - # TJH this should error out much earlier - echo "This script doesn't support multiple domains. Stopping" - touch ABORT_RETRO - exit 4 - - endif # number of domains check + @dn ++ + end # Loop through domains endif # ADAPTIVE_INFLATION file check @@ -322,14 +326,34 @@ while ( 1 == 1 ) # First, create the difference of a subset of variables # Second, create a netCDF file with just the static data # Third, append the static data onto the difference. - ncdiff -F -O -v $extract_str postassim_mean.nc preassim_mean.nc analysis_increment.nc - ncks -F -O -x -v ${extract_str} postassim_mean.nc static_data.nc - ncks -A static_data.nc analysis_increment.nc + + + set dn = 1 + while ( $dn <= $domains ) + set dchar = `echo $dn + 100 | bc | cut -b2-3` + ncdiff -F -O -v $extract_str postassim_mean_d${dchar}.nc preassim_mean_d${dchar}.nc analysis_increment_d${dchar}.nc + ncks -F -O -x -v ${extract_str} postassim_mean_d${dchar}.nc static_data_d${dchar}.nc + ncks -A static_data_d${dchar}.nc analysis_increment_d${dchar}.nc # Move diagnostic and obs_seq.final data to storage directories + # + if (dn == 1 && -e obs_seq.final) then + ${MOVE} obs_seq.final ${OUTPUT_DIR}/${datea}/. + if ( ! $status == 0 ) then + echo "failed moving ${RUN_DIR}/obs_seq.final" + touch BOMBED @ dn++ + end # loop through domains + endif + else + echo "${OUTPUT_DIR}/obs_seq.final does not exist and should." + ls -l + touch BOMBED + endif + + + foreach FILE ( postassim_mean_d${dchar}.nc preassim_mean_d${dchar}.nc postassim_sd_d${dchar}.nc preassim_sd_d${dchar}.nc \ + obs_seq.final analysis_increment_d${dchar}.nc output_mean_d${dchar}.nc output_sd_d${dchar}.nc ) - foreach FILE ( postassim_mean.nc preassim_mean.nc postassim_sd.nc preassim_sd.nc \ - obs_seq.final analysis_increment.nc output_mean.nc output_sd.nc ) if ( -e $FILE && ! -z $FILE ) then ${MOVE} $FILE ${OUTPUT_DIR}/${datea}/. if ( ! $status == 0 ) then @@ -342,16 +366,23 @@ while ( 1 == 1 ) touch BOMBED endif end + @ dn++ + end # loop through domains echo "past the analysis file moves" # Move inflation files to storage directories # The output inflation file is used as the input for the next cycle, - # so rename the file 'on the fly'. - cd ${RUN_DIR} # TJH is this necessary? + # so rename the current inflation output for the next cycle input. + cd ${RUN_DIR} + if ( $ADAPTIVE_INFLATION == 1 ) then - set old_file = ( input_postinf_mean.nc input_postinf_sd.nc input_priorinf_mean.nc input_priorinf_sd.nc ) - set new_file = ( output_postinf_mean.nc output_postinf_sd.nc output_priorinf_mean.nc output_priorinf_sd.nc ) + + set dn = 1 + while ( $dn <= $domains ) + set dchar = `echo $dn + 100 | bc | cut -b2-3` + set old_file = ( input_postinf_mean_d${dchar}.nc input_postinf_sd_d${dchar}.nc input_priorinf_mean_d${dchar}.nc input_priorinf_sd_d${dchar}.nc ) + set new_file = ( output_postinf_mean_d${dchar}.nc output_postinf_sd_d${dchar}.nc output_priorinf_mean_d${dchar}.nc output_priorinf_sd_d${dchar}.nc ) set i = 1 set nfiles = $#new_file while ($i <= $nfiles) @@ -364,6 +395,9 @@ while ( 1 == 1 ) endif @ i++ end + @ dn++ + end # loop through domains + echo "past the inflation file moves" endif # adaptive_inflation file moves @@ -549,18 +583,28 @@ while ( 1 == 1 ) end # Move output data to correct location - echo "moving ${n} ${ensstring}" - ${MOVE} ${RUN_DIR}/assim_advance_${n}.o* ${OUTPUT_DIR}/${datea}/logs/. - ${MOVE} WRFOUT/wrf.out_${gdatef[1]}_${gdatef[2]}_${n} ${OUTPUT_DIR}/${datea}/logs/. - ${MOVE} WRFIN/wrfinput_d01_${n}.gz ${OUTPUT_DIR}/${datea}/WRFIN/. - ${MOVE} ${RUN_DIR}/prior_d01.${ensstring} ${OUTPUT_DIR}/${datea}/PRIORS/. - ${REMOVE} start_member_${n} done_member_${n} filter_restart_d01.${ensstring} - if ( -e assim_advance_mem${n}.csh ) ${REMOVE} assim_advance_mem${n}.csh - set pert = `cat ${RUN_DIR}/advance_temp${n}/mem${n}_pert_bank_num` - echo "Member $n uses perturbation bank ensemble member $pert" >> ${OUTPUT_DIR}/${datea}/pert_bank_members.txt + set dn = 1 + while ( $dn <= $domains ) + set dchar = `echo $dn + 100 | bc | cut -b2-3` + echo "moving ${n} ${ensstring} for domain ${dn}" + + if ( $dn == 1 ) then + ${MOVE} ${RUN_DIR}/assim_advance_${n}.o* ${OUTPUT_DIR}/${datea}/logs/. + ${MOVE} WRFOUT/wrf.out_${gdatef[1]}_${gdatef[2]}_${n} ${OUTPUT_DIR}/${datea}/logs/. + ${REMOVE} start_member_${n} done_member_${n} + if ( -e assim_advance_mem${n}.csh ) ${REMOVE} assim_advance_mem${n}.csh + set pert = `cat ${RUN_DIR}/advance_temp${n}/mem${n}_pert_bank_num` + echo "Member $n uses perturbation bank ensemble member $pert" >> ${OUTPUT_DIR}/${datea}/pert_bank_members.txt + endif + + ${MOVE} WRFIN/wrfinput_d${dchar}_${n}.gz ${OUTPUT_DIR}/${datea}/WRFIN/. + ${MOVE} ${RUN_DIR}/prior_d${dchar}.${ensstring} ${OUTPUT_DIR}/${datea}/PRIORS/. + ${REMOVE} filter_restart_d${dchar}.${ensstring} + + @ dn ++ + end #loop through domains @ n++ - end # --------------------------------------------------------------------------- @@ -583,18 +627,17 @@ while ( 1 == 1 ) gzip -f wrfinput_d*_${gdate[1]}_${gdate[2]}_mean wrfinput_d*_${gdatef[1]}_${gdatef[2]}_mean wrfbdy_d*_mean tar -cvf retro.tar obs_seq.out wrfin*.gz wrfbdy_d*.gz tar -rvf dart_data.tar obs_seq.out obs_seq.final wrfinput_d*.gz wrfbdy_d*.gz \ - Inflation_input/* logs/* *.dat input.nml + Inflation_input/* logs/* input.nml ${REMOVE} wrfinput_d*_${gdate[1]}_${gdate[2]}_mean.gz wrfbdy_d*.gz gunzip -f wrfinput_d*_${gdatef[1]}_${gdatef[2]}_mean.gz cd $RUN_DIR ${MOVE} ${RUN_DIR}/assim*.o* ${OUTPUT_DIR}/${datea}/logs/. - ${MOVE} ${RUN_DIR}/*log ${OUTPUT_DIR}/${datea}/logs/. ${REMOVE} ${RUN_DIR}/input_priorinf_* ${REMOVE} ${RUN_DIR}/static_data* touch prev_cycle_done touch $RUN_DIR/cycle_finished_${datea} - rm $RUN_DIR/cycle_started_${datea} + if ( -e cycle_started_${datea} ) rm $RUN_DIR/cycle_started_${datea} # If doing a reanalysis, increment the time if not done. Otherwise, let the script exit if ( $restore == 1 ) then diff --git a/models/wrf/shell_scripts/first_advance.csh b/models/wrf/shell_scripts_csh/first_advance.csh similarity index 73% rename from models/wrf/shell_scripts/first_advance.csh rename to models/wrf/shell_scripts_csh/first_advance.csh index 32ec373706..a32377a845 100755 --- a/models/wrf/shell_scripts/first_advance.csh +++ b/models/wrf/shell_scripts_csh/first_advance.csh @@ -57,24 +57,36 @@ else if ( $SUPER_PLATFORM == 'derecho' ) then ${gdate[2]} ${gdate[1]} $yyyy $mm $dd $hh $nn $ss $domains - mpiexec -n 128 -ppn 128 ./wrf.exe + mpiexec -n 4 -ppn 4 ./wrf.exe EOF endif cd $RUN_DIR +# filter_control accounts for multiple domains +# Appends input (filter_restart) and output (prior) in consecutive pairs +# Should be consistent with filter_control setup for assim_advance.csh +# for future assimilation steps + echo $emember >! ${RUN_DIR}/filter_control${icnum} -echo filter_restart_d01.${icnum} >> ${RUN_DIR}/filter_control${icnum} -echo prior_d01.${icnum} >> ${RUN_DIR}/filter_control${icnum} + +set dn = 1 +while ( $dn <= $domains ) + set dchar = `echo $dn + 100 | bc | cut -b2-3` + echo filter_restart_d${dchar}.${icnum} >> ${RUN_DIR}/filter_control${icnum} + echo prior_d${dchar}.${icnum} >> ${RUN_DIR}/filter_control${icnum} + @ dn++ +end # loop through domains # integrate the model forward in time -${RUN_DIR}/new_advance_model.csh ${emember} 1 filter_control${icnum} $paramfile +${RUN_DIR}/new_advance_model.csh ${emember} ${domains} filter_control${icnum} $paramfile ${REMOVE} ${RUN_DIR}/filter_control${icnum} # move the output to the appropriate directory mkdir -p ${OUTPUT_DIR}/${datea}/PRIORS mv $RUN_DIR/prior_d01.${icnum} ${OUTPUT_DIR}/${datea}/PRIORS/prior_d01.${icnum} +mv $RUN_DIR/prior_d02.${icnum} ${OUTPUT_DIR}/${datea}/PRIORS/prior_d02.${icnum} set end_time = `date +%s` @ length_time = $end_time - $start_time diff --git a/models/wrf/shell_scripts/gen_pert_bank.csh b/models/wrf/shell_scripts_csh/gen_pert_bank.csh similarity index 63% rename from models/wrf/shell_scripts/gen_pert_bank.csh rename to models/wrf/shell_scripts_csh/gen_pert_bank.csh index d53b88b3f2..39c99198b3 100755 --- a/models/wrf/shell_scripts/gen_pert_bank.csh +++ b/models/wrf/shell_scripts_csh/gen_pert_bank.csh @@ -13,34 +13,51 @@ # list of perturbed variables # wrfda executable and be.dat -set datea = 2017042700 # need to start from a known valid date matching the wrfinput_d01 date + +set datea = 2024051900 # need to start from a known valid date matching the wrfinput_d01 date +set paramfile = /glade/derecho/scratch/bmraczka/WRFv4.5_nested/scripts/param.csh + +echo "Sourcing parameter file" +source $paramfile +mkdir -p ${PERTS_DIR}/work/boundary_perts # this has all wrf and wrfda executables and support files -set wrfda_dir = /glade/scratch/romine/pert_hrrr/wrfda # set this appropriately #%%%# +# /glade/work/bmraczka/WRF/WRFDAv4.5_git +set wrfda_dir = ${RUN_DIR}/WRF_RUN # set this appropriately #%%%# -set work_dir = /glade/scratch/romine/pert_hwt2018 # set this appropriately #%%%# +set work_dir = ${PERTS_DIR}/work # set this appropriately #%%%# # put the final eperturbation files here for later use -set save_dir = /glade/p/nmmm0001/romine/hwt2018/boundary_perts # set this appropriately #%%%# +set save_dir = ${PERTS_DIR}/work/boundary_perts # set this appropriately #%%%# -set DART_DIR = /glade/p/work/romine/c_codes/DART_manhattan # set this appropriately #%%%# +#This is set already in param.csh +# set DART_DIR = /glade/p/work/romine/c_codes/DART_manhattan # set this appropriately #%%%# # where the template namelist is for wrfvar -set template_dir = /glade/scratch/romine/pert_hwt2018/template # set this appropriately #%%%# -set IC_PERT_SCALE = 0.009 +# This is set already in param.csh +#set template_dir = /glade/scratch/romine/pert_hwt2018/template # set this appropriately #%%%# + + +# BMR --> Need to optimize for new domain +# These scale variables are not passed in -- hard coded to template file +set IC_PERT_SCALE = 0.1 set IC_HORIZ_SCALE = 0.8 set IC_VERT_SCALE = 0.8 -set num_ens = 150 # number of perturbations to generate, must be at least ensemble size, suggest 3-4X. SUGGEST testing + +set num_ens = 150 +#set num_ens = 150 # number of perturbations to generate, must be at least ensemble size, suggest 3-4X. SUGGEST testing # a single member until you are sure the script works, and are happy with the settings. -set wrfin_dir = ${work_dir}/wrfin + + +# Get wrfinput_d01 file directly from the 'mean' file generated from real.exe +# set wrfin_dir = ${work_dir}/wrfin set ASSIM_INT_HOURS = 6 module load nco -# mkdir ${work_dir} cd ${work_dir} -cp ${template_dir}/input.nml.template input.nml +cp ${TEMPLATE_DIR}/input.nml.template input.nml # get a wrfdate and parse set gdate = (`echo $datea 0h -g | ${DART_DIR}/models/wrf/work/advance_time`) @@ -57,7 +74,15 @@ while ( $n <= $num_ens ) mkdir ${work_dir}/mem_${n} cd ${work_dir}/mem_${n} cp ${wrfda_dir}/* ${work_dir}/mem_${n}/. - ln -sf ${wrfin_dir}/wrfinput_d01 ${work_dir}/mem_${n}/fg + + # Use the wrfinput_d01 file generated from real.exe during gen_retro_icbc.csh + + + ln -sf ${OUTPUT_DIR}/${datea}/wrfinput_d01_${gdate[1]}_${gdate[2]}_mean ${work_dir}/mem_${n}/fg + + # Old method + #ln -sf ${wrfin_dir}/wrfinput_d01 ${work_dir}/mem_${n}/fg + # prep the namelist to run wrfvar @ seed_array2 = $n * 10 cat >! script.sed << EOF @@ -101,7 +126,9 @@ while ( $n <= $num_ens ) /seed_array2/c\ seed_array2 = $seed_array2 / EOF - sed -f script.sed ${template_dir}/namelist.input.3dvar >! ${work_dir}/mem_${n}/namelist.input + + # namelist.input.3dvar --> single parent domain only, contains all & wrfvar1-14 + sed -f script.sed ${TEMPLATE_DIR}/namelist.input.3dvar >! ${work_dir}/mem_${n}/namelist.input # make a run file for wrfvar cat >> ${work_dir}/mem_${n}/gen_pert_${n}.csh << EOF @@ -109,24 +136,26 @@ EOF #================================================================= #PBS -N gen_pert_bank_mem${n} #PBS -j oe -#PBS -A COMPUTER_CHARGE_ACCOUNT +#PBS -A ${COMPUTER_CHARGE_ACCOUNT} #PBS -l walltime=0:05:00 -#PBS -q regular -#PBS -m a -#PBS -M USERNAME@X.X # set this appropriately #%%%# -#PBS -l select=4:ncpus=32:mpiprocs=16 +#PBS -q ${ADVANCE_QUEUE} +#PBS -l job_priority=${ADVANCE_PRIORITY} +#PBS -o gen_pert_bank_mem${n}.out +#PBS -l select=1:ncpus=4:mpiprocs=4 #PBS -k eod +#PBS -V #================================================================= cd ${work_dir}/mem_${n} -mpiexec_mpt dplace -s 1 ./da_wrfvar.exe >& output.wrfvar +mpiexec -n 1 -ppn 1 ./da_wrfvar.exe >& output.wrfvar mv wrfvar_output wrfinput_d01 # extract only the fields that are updated by wrfvar, then diff to generate the pert file for this member -ncks -h -F -A -a -v U,V,T,QVAPOR,MU fg orig_data.nc -ncks -h -F -A -a -v U,V,T,QVAPOR,MU wrfinput_d01 pert_data.nc + +ncks -h -F -A -a -v U,V,THM,QVAPOR,MU fg orig_data.nc +ncks -h -F -A -a -v U,V,THM,QVAPOR,MU wrfinput_d01 pert_data.nc ncdiff pert_data.nc orig_data.nc pert_bank_mem_${n}.nc mv pert_bank_mem_${n}.nc ${save_dir}/pert_bank_mem_${n}.nc EOF diff --git a/models/wrf/shell_scripts/gen_retro_icbc.csh b/models/wrf/shell_scripts_csh/gen_retro_icbc.csh similarity index 80% rename from models/wrf/shell_scripts/gen_retro_icbc.csh rename to models/wrf/shell_scripts_csh/gen_retro_icbc.csh index 20fb3202d4..0216d38ea4 100755 --- a/models/wrf/shell_scripts/gen_retro_icbc.csh +++ b/models/wrf/shell_scripts_csh/gen_retro_icbc.csh @@ -29,25 +29,31 @@ echo "gen_retro_icbc.csh is running in `pwd`" ######################################################################## # # gen_retro_icbc.csh - shell script that generates the -# necessary wrfinput_d01 and -# wrfbdy_d01 files for running +# necessary wrfinput_d01, wrfinput_d02 +# and wrfbdy_d01 files for running # a real-time analysis system. # # created May 2009, Ryan Torn, U. Albany -# +# update Oct 2025, Brett Raczka NCAR # This creates output/${date}/wrfbdy_d01_{days}_{seconds}_mean # output/${date}/wrfinput_d01_{days}_{time_step1}_mean # output/${date}/wrfinput_d01_{days}_{time_step2}_mean +# output/${date}/wrfinput_d02_{days}_{time_step1}_mean +# output/${date}/wrfinput_d02_{days}_{time_step2}_mean ######################################################################## -set datea = 2017042700 -set datefnl = 2017042712 # set this appropriately #%%%# -set paramfile = /glade/derecho/scratch/USERNAME/WORK_DIR/scripts/param.csh # set this appropriately #%%%# +set datea = 2024051812 +set datefnl = 2024052018 # set this appropriately #%%%# +set paramfile = /glade/derecho/scratch/bmraczka/WRFv4.5_nested/scripts/param.csh # set this appropriately #%%%# +echo "Sourcing parameter file" source $paramfile -# The geo_*.nc files should already be in the ${ICBC_DIR}/*/ directories. -# ${LINK} ${GEO_FILES_DIR}/geo_*.nc . +cd ${ICBC_DIR} +${REMOVE} geo_*.nc namelist.wps namelist.input geogrid_done +mkdir -p geogrid +${LINK} ${WPS_SRC_DIR}/geogrid/GEOGRID.TBL ${ICBC_DIR}/geogrid/GEOGRID.TBL +#${LINK} ${WPS_SRC_DIR}/geogrid/geo_*.nc . mkdir -p ${ICBC_DIR}/metgrid ${LINK} ${WPS_SRC_DIR}/metgrid/METGRID.TBL ${ICBC_DIR}/metgrid/METGRID.TBL @@ -70,9 +76,9 @@ while ( 1 == 1 ) ${REMOVE} namelist.wps cat >! script.sed << EOF /start_date/c\ - start_date = 2*'${start_date}', + start_date = ${start_date},${start_date} /end_date/c\ - end_date = 2*'${end_date}', + end_date = ${end_date},${end_date} EOF # build grib file names - may need to change for other data sources. @@ -85,21 +91,31 @@ EOF exit 2 endif - set gribfile_a = ${GRIB_DATA_DIR}/${datea}/gfs_ds084.1/gfs.0p25.${datea}.f000.grib2 - set gribfile_b = ${GRIB_DATA_DIR}/${datea}/gfs_ds084.1/gfs.0p25.${datea}.f006.grib2 + # set gribfile_a = ${GRIB_DATA_DIR}/${datea}/gfs_ds084.1/gfs.0p25.${datea}.f000.grib2 + # set gribfile_b = ${GRIB_DATA_DIR}/${datea}/gfs_ds084.1/gfs.0p25.${datea}.f006.grib2 + set gribfile_a = ${GRIB_DATA_DIR}/gfs.0p25.${datea}.f000.grib2 + set gribfile_b = ${GRIB_DATA_DIR}/gfs.0p25.${datea}.f006.grib2 + ${LINK} $gribfile_a GRIBFILE.AAA ${LINK} $gribfile_b GRIBFILE.AAB - sed -f script.sed ${TEMPLATE_DIR}/namelist.wps.template >! namelist.wps ${LINK} ${WPS_SRC_DIR}/ungrib/Variable_Tables/Vtable.${GRIB_SRC} Vtable + if ( ! -e ${ICBC_DIR}/geogrid_done ) then + echo "Executing geogrid.exe" + ${WPS_SRC_DIR}/geogrid.exe >& output.geogrid.exe + touch geogrid_done + endif + + echo "Executing ungrib.exe" ${REMOVE} output.ungrib.exe.${GRIB_SRC} ${WPS_SRC_DIR}/ungrib.exe >& output.ungrib.exe.${GRIB_SRC} - + + echo "Executing metgrid.exe" ${REMOVE} output.metgrid.exe ${WPS_SRC_DIR}/metgrid.exe >& output.metgrid.exe - ${LINK} ${WPS_SRC_DIR}/met_em.d01.* . + # ${LINK} ${WPS_SRC_DIR}/met_em.d01.* . set datef = `echo $datea $ASSIM_INT_HOURS | ${DART_DIR}/models/wrf/work/advance_time` set gdatef = (`echo $datef 0 -g | ${DART_DIR}/models/wrf/work/advance_time`) set hh = `echo $datea | cut -b9-10` @@ -131,7 +147,7 @@ EOF set dd2 = `echo $date2 | cut -c 7-8` set hh2 = `echo $date2 | cut -c 9-10` - ${REMOVE} namelist.input script.sed + ${REMOVE} script.sed cat >! script.sed << EOF /run_hours/c\ run_hours = ${fcst_hours}, @@ -170,7 +186,7 @@ EOF #${RUN_DIR}/WRF_RUN/real.serial.exe >& out.real.exe #if ( -e rsl.out.0000 ) cat rsl.out.0000 >> out.real.exe - rm script.sed real_done rsl.* + ${REMOVE} script.sed real_done rsl.* echo "2i\" >! script.sed echo "#======================================\" >> script.sed echo "#PBS -N run_real\" >> script.sed @@ -181,7 +197,8 @@ EOF echo "#PBS -o run_real.out\" >> script.sed echo "#PBS -j oe\" >> script.sed echo "#PBS -k eod\" >> script.sed - echo "#PBS -l select=1:ncpus=128:mpiprocs=128\" >> script.sed + #echo "#PBS -l select=1:ncpus=128:mpiprocs=128\" >> script.sed + echo "#PBS -l select=1:ncpus=4:mpiprocs=4\" >> script.sed echo "#PBS -V\" >> script.sed echo "#======================================\" >> script.sed echo "\" >> script.sed @@ -200,6 +217,7 @@ EOF # move output files to storage set gdate = (`echo $date1 0 -g | ${DART_DIR}/models/wrf/work/advance_time`) ${MOVE} wrfinput_d01 ${OUTPUT_DIR}/${datea}/wrfinput_d01_${gdate[1]}_${gdate[2]}_mean + ${MOVE} wrfinput_d02 ${OUTPUT_DIR}/${datea}/wrfinput_d02_${gdate[1]}_${gdate[2]}_mean if ( $n == 1 ) ${MOVE} wrfbdy_d01 ${OUTPUT_DIR}/${datea}/wrfbdy_d01_${gdatef[1]}_${gdatef[2]}_mean @ n++ diff --git a/models/wrf/shell_scripts/init_ensemble_var.csh b/models/wrf/shell_scripts_csh/init_ensemble_var.csh similarity index 58% rename from models/wrf/shell_scripts/init_ensemble_var.csh rename to models/wrf/shell_scripts_csh/init_ensemble_var.csh index a6af8845a4..99aedabdff 100755 --- a/models/wrf/shell_scripts/init_ensemble_var.csh +++ b/models/wrf/shell_scripts_csh/init_ensemble_var.csh @@ -17,12 +17,17 @@ source $paramfile cd ${RUN_DIR} -# KRF Generate the i/o lists in rundir automatically when initializing the ensemble -set num_ens = ${NUM_ENS} -set input_file_name = "input_list_d01.txt" -set input_file_path = "./advance_temp" -set output_file_name = "output_list_d01.txt" - +# Generate the i/o lists in rundir automatically when initializing the ensemble +# Required to run filter during assimilation step +set num_ens = $NUM_ENS # set from param file +set domains = $NUM_DOMAINS +set dn = 1 + +while ( $dn <= $domains ) + set dchar = `echo $dn + 100 | bc | cut -b2-3` + set input_file_name = "input_list_d${dchar}.txt" + set input_file_path = "./advance_temp" + set output_file_name = "output_list_d${dchar}.txt" set n = 1 if ( -e $input_file_name ) rm $input_file_name @@ -31,15 +36,17 @@ if ( -e $output_file_name ) rm $output_file_name while ($n <= $num_ens) set ensstring = `printf %04d $n` - set in_file_name = ${input_file_path}${n}"/wrfinput_d01" - set out_file_name = "filter_restart_d01."$ensstring + set in_file_name = ${input_file_path}${n}"/wrfinput_d${dchar}" + set out_file_name = "filter_restart_d${dchar}."$ensstring - echo $in_file_name >> $input_file_name - echo $out_file_name >> $output_file_name + echo $in_file_name >> $input_file_name + echo $out_file_name >> $output_file_name + + @ n ++ + end # loop through ensemble members + @ dn ++ +end # loop through domains - @ n++ -end -### set gdate = (`echo $initial_date 0h -g | ${DART_DIR}/models/wrf/work/advance_time`) set gdatef = (`echo $initial_date ${ASSIM_INT_HOURS}h -g | ${DART_DIR}/models/wrf/work/advance_time`) @@ -48,6 +55,8 @@ set yyyy = `echo $initial_date | cut -b1-4` set mm = `echo $initial_date | cut -b5-6` set dd = `echo $initial_date | cut -b7-8` set hh = `echo $initial_date | cut -b9-10` +set nn = "00" +set ss = "00" ${COPY} ${TEMPLATE_DIR}/namelist.input.meso namelist.input ${REMOVE} ${RUN_DIR}/WRF @@ -63,12 +72,45 @@ while ( $n <= $NUM_ENS ) ${LINK} ${RUN_DIR}/WRF_RUN/* ${RUN_DIR}/advance_temp${n}/. ${LINK} ${RUN_DIR}/input.nml ${RUN_DIR}/advance_temp${n}/input.nml + ${REMOVE} script.sed + cat >! script.sed << EOF + /start_year/c\ + start_year = ${yyyy}, + /start_month/c\ + start_month = ${mm}, + /start_day/c\ + start_day = ${dd}, + /start_hour/c\ + start_hour = ${hh}, + /start_minute/c\ + start_minute = ${nn}, + /start_second/c\ + start_second = ${ss}, + /end_year/c\ + end_year = ${yyyy}, + /end_month/c\ + end_month = ${mm}, + /end_day/c\ + end_day = ${dd}, + /end_hour/c\ + end_hour = ${hh}, + /end_minute/c\ + end_minute = ${nn}, + /end_second/c\ + end_second = ${ss}, + /max_dom/c\ + max_dom = ${NUM_DOMAINS}, +EOF + + + sed -f script.sed ${RUN_DIR}/namelist.input >! ${RUN_DIR}/advance_temp${n}/namelist.input + ${COPY} ${OUTPUT_DIR}/${initial_date}/wrfinput_d01_${gdate[1]}_${gdate[2]}_mean \ ${RUN_DIR}/advance_temp${n}/wrfvar_output.nc sleep 3 ${COPY} ${RUN_DIR}/add_bank_perts.ncl ${RUN_DIR}/advance_temp${n}/. - set cmd3 = "ncl 'MEM_NUM=${n}' 'PERTS_DIR="\""${PERTS_DIR}"\""' ${RUN_DIR}/advance_temp${n}/add_bank_perts.ncl" + set cmd3 = "ncl 'MEM_NUM=${n}' 'PERTS_DIR="\""${PERTS_DIR}/work/boundary_perts"\""' ${RUN_DIR}/advance_temp${n}/add_bank_perts.ncl" ${REMOVE} ${RUN_DIR}/advance_temp${n}/nclrun3.out cat >! ${RUN_DIR}/advance_temp${n}/nclrun3.out << EOF $cmd3 @@ -109,6 +151,18 @@ EOF endif ${MOVE} wrfvar_output.nc wrfinput_d01 + + # Prep domain files for ndown.exe + ${LINK} wrfinput_d01 wrfout_d01_${yyyy}-${mm}-${dd}_${hh}:00:00 + + ${COPY} ${OUTPUT_DIR}/${initial_date}/wrfinput_d02_${gdate[1]}_${gdate[2]}_mean \ + ${RUN_DIR}/advance_temp${n}/wrfndi_d02 + + echo "Running ndown.exe to downscale perturbed wrfinput_d01 onto wrfinput_d02 for member $n at `date`" + + # Downscale parent domain to nested domain (wrfinput_d02) + mpiexec -n 4 -ppn 4 ./ndown.exe > ndown.out + endif cd $RUN_DIR diff --git a/models/wrf/shell_scripts_csh/mean_increment.ncl b/models/wrf/shell_scripts_csh/mean_increment.ncl new file mode 100644 index 0000000000..1798e231bb --- /dev/null +++ b/models/wrf/shell_scripts_csh/mean_increment.ncl @@ -0,0 +1,99 @@ +; find the mean state space increment, output the fields to a single mean file +; that can be used to make plots +; G. Romine 2011-12 +; Run for parent domain (d01) only B. Raczka 2024-08 +begin + +; get the list of files to read in + fname = "analysis_increment_d01.nc" + flist = systemfunc("ls ../*/" + fname) + nfils = dimsizes(flist) +; if we only want say the last 7 days, then grab only the last 28 +; here we practice with 3 days + anl_days = 7 + ntimes = anl_days*4 + if (nfils .gt. ntimes) then + tempf = flist(nfils-ntimes:nfils-1) + delete(flist) + flist = tempf + nfils = ntimes + delete(tempf) + end if + fil = addfiles(flist, "r") + ListSetType(fil, "join") + + pull_2D_field_names = (/"T2", "Q2", "U10", "V10", "PSFC"/) + pull_3D_field_names = (/"U", "V", "THM", "QVAPOR"/) + npulls = dimsizes(pull_2D_field_names) + +; Below will dump out the data to a file for replotting later + cnew = addfile("mean_increments_d01"+".nc","c") +; work through 2D fields + do i=0,npulls-1 + print(" Extracting 2d variable "+pull_2D_field_names(i)) + do fil_num=0,nfils-1 +; print(" reading file "+flist(fil_num)) +; dimensions are ncljoin, Time, south_north, west_east +; copy zero is the ensemble mean + pull_var = fil[fil_num]->$pull_2D_field_names(i)$(:,:,:,:) + dims = dimsizes(pull_var) + if (fil_num .eq. 0) then ; first iteration, make var + alltimes_var = new ( (/nfils,dims(2),dims(3)/), typeof(pull_var) ) + end if +; printVarSummary(pull_var) + alltimes_var(fil_num,:,:) = pull_var(0,0,:,:) +; printVarSummary(alltimes_var) + delete(pull_var) + end do +; average over time (first dimension) + mean_alltimes_var = dim_avg_n(alltimes_var,0) +; standard deviation over time (first dimension) + stdv_alltimes_var = dim_stddev_n(alltimes_var,0) +; write to new file + varname ="mean_"+pull_2D_field_names(i) + cnew->$varname$ = mean_alltimes_var + delete(varname) + varname ="stdv_"+pull_2D_field_names(i) + cnew->$varname$ = stdv_alltimes_var + delete(varname) + delete(alltimes_var) + delete(mean_alltimes_var) + delete(stdv_alltimes_var) + delete(dims) + end do + +; work through 3D fields + npulls = dimsizes(pull_3D_field_names) + do i=0,npulls-1 + print(" Extracting 3d variable "+pull_3D_field_names(i)) + do fil_num=0,nfils-1 +; print(" reading file "+flist(fil_num)) +; dimensions are ncljoin, Time, level, south_north, west_east +; copy zero is the ensemble mean + pull_var = fil[fil_num]->$pull_3D_field_names(i)$(:,:,:,:,:) + dims = dimsizes(pull_var) + if (fil_num .eq. 0) then ; first iteration, make var + alltimes_var = new ( (/nfils,dims(2),dims(3),dims(4)/), typeof(pull_var) ) + end if +; printVarSummary(pull_var) + alltimes_var(fil_num,:,:,:) = pull_var(0,0,:,:,:) + delete(pull_var) + end do +; average over time (first dimension) + mean_alltimes_var = dim_avg_n(alltimes_var,0) +; standard deviation over time (first dimension) + stdv_alltimes_var = dim_stddev_n(alltimes_var,0) +; write to new file + varname ="mean_"+pull_3D_field_names(i) + cnew->$varname$ = mean_alltimes_var + delete(varname) + varname ="stdv_"+pull_3D_field_names(i) + cnew->$varname$ = stdv_alltimes_var + delete(varname) + delete(alltimes_var) + delete(mean_alltimes_var) + delete(stdv_alltimes_var) + delete(dims) + end do + +end diff --git a/models/wrf/shell_scripts/new_advance_model.csh b/models/wrf/shell_scripts_csh/new_advance_model.csh similarity index 91% rename from models/wrf/shell_scripts/new_advance_model.csh rename to models/wrf/shell_scripts_csh/new_advance_model.csh index 51646ac794..8833609ad6 100755 --- a/models/wrf/shell_scripts/new_advance_model.csh +++ b/models/wrf/shell_scripts_csh/new_advance_model.csh @@ -78,8 +78,6 @@ set control_file = $3 set num_states = 1 # forcing option of only one model advance per execution set paramfile = $4 # Need this to load modules/environment source $paramfile -# MULTIPLE DOMAINS - pass along the # of domains here? We just default a value of 1 for the second variable, process is the ensemble member # - # Setting to vals > 0 saves wrfout files, # will save all member output files <= to this value @@ -160,7 +158,6 @@ if ( -x ${CENTRALDIR}/WRF_RUN/da_wrfvar.exe ) then else set USE_WRFVAR = 0 endif - # set this flag here if the radar additive noise script is found if ( -e ${CENTRALDIR}/add_noise.csh ) then set USE_NOISE = 1 @@ -180,20 +177,33 @@ sleep 5 # This control file has the actual ensemble number, the input filename, # and the output filename for each advance. Be prepared to loop and # do the rest of the script more than once. + set USE_WRFVAR = 1 set state_copy = 1 set ensemble_member_line = 1 -set input_file_line = 2 # MUTIPLE DOMAINS - this doesn't work for multiple domains, need something more general, maybe just a header name -set output_file_line = 3 - -# MULTIPLE DOMAINS - need a way to tell this shell script if there are multiple wrf domains in the analysis - -while($state_copy <= $num_states) # MULTIPLE DOMAINS - we don't expect advance model to run more than one member anymore. Reuse num_states for # domains? - - set ensemble_member = `head -n $ensemble_member_line ${CENTRALDIR}/${control_file} | tail -n 1` - set input_file = `head -n $input_file_line ${CENTRALDIR}/${control_file} | tail -n 1` - set output_file = `head -n $output_file_line ${CENTRALDIR}/${control_file} | tail -n 1` - +set linein = 2 +set lineout = 3 + +# Note: The input and output file information not required, leaving in as placeholder +# Leaving in place if wrf_to_dart/dart_to_wrf functionality required in future. + +# Code identifies input and output file from control file from multiple domains +# Works with both first_advance.csh and assim_advance.csh scripting +# Assumes input (filter_restart) and output (prior) files are appended to control_file +# in consecutive pairs ordered by domain + +while($state_copy <= $num_states) # We don't expect advance model to run more than one member anymore. Reuse num_states for # domains? +set ensemble_member = `head -n $ensemble_member_line ${CENTRALDIR}/${control_file} | tail -n 1` +set dn = 1 +while ( $dn <= $num_domains ) + + set input_file${dn} = `head -n $linein ${CENTRALDIR}/${control_file} | tail -n 1` + set output_file${dn} = `head -n $lineout ${CENTRALDIR}/${control_file} | tail -n 1` + + @ dn ++ + @ linein = $linein + 2 + @ lineout = $lineout + 2 +end # loop through domains set infl = 0.0 # create a new temp directory for each member unless requested to keep and it exists already @@ -238,14 +248,12 @@ while($state_copy <= $num_states) # MULTIPLE DOMAINS - we don't expect advan hostname >! nfile hostname >>! nfile - # Add number of domains information # MULTIPLE_DOMAINS - need a more general instrument here if ( -e ${CENTRALDIR}/moving_domain_info ) then set MY_NUM_DOMAINS = `head -n 1 ${CENTRALDIR}/moving_domain_info | tail -n 1` ${MOVE} input.nml input.nml-- -# sed /num_domains/c\ " num_domains = ${MY_NUM_DOMAINS}," input.nml-- >! input.nml cat >! script.sed << EOF /num_domains/c\ num_domains = ${MY_NUM_DOMAINS}, @@ -264,15 +272,17 @@ EOF # ${COPY} wrfinput_d01 wrfinput_mean # ${REMOVE} wrf.info dart_wrf_vector # endif -# -# # ICs for this wrf run; Convert DART file to wrfinput netcdf file + +# Execution of dart_to_wrf not required. Leaving as placeholder +# ICs for this wrf run; Convert DART file to wrfinput netcdf file # ${MOVE} ${CENTRALDIR}/${input_file} dart_wrf_vector # ${CENTRALDIR}/dart_to_wrf >&! out.dart_to_wrf # ${REMOVE} dart_wrf_vector set stuff_vars = $increment_vars_a -# may want multiple lists here, e.g. do we want w from the analysis? +# Currently hard coded to overwrite only increment_vars_a to +# all domains. No custom increment_vars_a and increment_vars_b set stuff_str = '' # these are variables we want to cycle set i = 1 @@ -288,22 +298,21 @@ EOF while ( $dn <= $num_domains ) set dchar = `echo $dn + 100 | bc | cut -b2-3` - set dchar = `printf %02d $dn` set icnum = `echo $ensemble_member + 10000 | bc | cut -b2-5` - set icnum = `printf %04d $ensemble_member` set this_file = filter_restart_d${dchar}.${icnum} if ( -e ../${this_file} ) then ncks -A -v ${stuff_str} ../${this_file} wrfinput_d${dchar} else - echo "WARNING: ../${this_file} does not exist ..." - echo "WARNING: this is expected for the first cycle ONLY!" + echo "WARNING: ../${this_file} is the posterior from filter and does not exist" + echo "WARNING: this is expected for the first cycle ONLY when only forecast is run" endif @ dn ++ # end - # Move and remove unnecessary domains MULTIPLE DOMAINS - this problably needs to be removed to avoid confusion + + # Move and remove unnecessary domains if ( -e ${CENTRALDIR}/moving_domain_info ) then set REMOVE_STRING = `cat ${CENTRALDIR}/remove_domain_info` @@ -318,11 +327,6 @@ EOF endif - # The program dart_to_wrf has created the file wrf.info. - # Time information is extracted from wrf.info. - # (bc in the following few lines is the calculator program, - # not boundary conditions.) - # DMODS - note the wrf.info file was pre-generated, not from dart_to_wrf set secday = `head -n 1 wrf.info` set targsecs = $secday[1] @@ -336,13 +340,15 @@ EOF echo "wrf.info is read" echo $USE_WRFVAR - # Find all BC's file available and sort them with "keys". + + # Find all BC's file available and sort them with "keys". BC's are required + # for real WRF simulations # NOTE: this needs a fix for the idealized wrf case in which there are no # boundary files (also same for global wrf). right now some of the # commands below give errors, which are ok to ignore in the idealized case # but it is not good form to generate spurious error messages. - # check if LBCs are "specified" (in which case wrfbdy files are req'd) + # check if BCs are "specified" (in which case wrfbdy files are req'd) # and we need to set up a key list to manage target times set SPEC_BC = `grep specified ${CENTRALDIR}/namelist.input | grep true | wc -l` @@ -438,6 +444,7 @@ EOF set isec = `echo "$keys[$ifile] % 86400" | bc` # Copy the boundary condition file to the temp directory if needed. + # Note: BC's only exist for parent domain (d01) if ( $SPEC_BC > 0 ) then if ( $USE_WRFVAR ) then @@ -485,6 +492,7 @@ EOF @ iseed2 = $ensemble_member * 10 ${REMOVE} script.sed + # Note: For WRFDA perturbation code, max_domain = 1, even for nested domain setups cat >! script.sed << EOF /analysis_date/c\ analysis_date = \'${END_STRING}.0000\', @@ -533,6 +541,7 @@ EOF sed -f script.sed ${CENTRALDIR}/namelist.input >! namelist.input + # Only need parent domain (d01) here, even for nested domain setups ${LN} ${CENTRALDIR}/WRF/wrfinput_d01_${targdays}_${targsecs}_mean ./fg ################################ ## instead of running wrfda, just add static pertubations from the pert bank @@ -541,8 +550,9 @@ EOF # mpiexec_mpt dplace -s 1 ${CENTRALDIR}/WRF_RUN/da_wrfvar.exe >>&! out.wrfvar cp fg wrfvar_output cp ${CENTRALDIR}/add_bank_perts.ncl . - set cmd3 = "ncl 'MEM_NUM=${ensemble_member}' 'PERTS_DIR="\""${PERTS_DIR}"\""' ${CENTRALDIR}/advance_temp${ensemble_member}/add_bank_perts.ncl" + set cmd3 = "ncl 'MEM_NUM=${ensemble_member}' 'PERTS_DIR="\""${PERTS_DIR}/work/boundary_perts"\""' ${CENTRALDIR}/advance_temp${ensemble_member}/add_bank_perts.ncl" ${REMOVE} nclrun3.out + cat >! nclrun3.out << EOF $cmd3 EOF @@ -693,7 +703,7 @@ EOF # clean out any old rsl files if ( -e rsl.out.integration ) ${REMOVE} rsl.* - # run WRF here + # RUNNING WRF HERE !! setenv MPI_SHEPHERD FALSE ${ADV_MOD_COMMAND} >>&! rsl.out.integration @@ -734,6 +744,11 @@ EOF while ( $dn <= $num_domains ) ${MOVE} wrfinput_d0${dn} wrfinput_d0${dn}_${ensemble_member} gzip wrfinput_d0${dn}_${ensemble_member} & + # Wait for zip operation to complete + while ( -e wrfinput_d0${dn}_${ensemble_member} ) + sleep 3 + touch ${CENTRALDIR}/HAD_TO_WAIT + end @ dn ++ end @@ -741,13 +756,12 @@ EOF set dn = 1 while ( $dn <= $num_domains ) if ( $ensemble_member <= $save_ensemble_member ) ${COPY} wrfout_d0${dn}_${END_STRING} ${WRFOUTDIR}/wrfout_d0${dn}_${END_STRING}_${ensemble_member} -# if the wrfinput file zip operation is finished, wrfinput_d0${dn}_$ensemble_member should no -# longer be in the directory -# test for this, and wait if the zip operation is not yet finished - while ( -e wrfinput_d0${dn}_${ensemble_member} ) - sleep 3 - touch ${CENTRALDIR}/HAD_TO_WAIT - end + ## if the wrfinput file zip operation is finished, wrfinput_d0${dn}_$ensemble_member should no + ## longer be in the directory + # while ( -e wrfinput_d0${dn}_${ensemble_member} ) + # sleep 3 + # touch ${CENTRALDIR}/HAD_TO_WAIT + # end ${MOVE} wrfinput_d0${dn}_${ensemble_member}.gz ../WRFIN/wrfinput_d0${dn}_${ensemble_member}.gz ${MOVE} wrfout_d0${dn}_${END_STRING} wrfinput_d0${dn} @ dn ++ @@ -789,10 +803,13 @@ EOF ln -sf ${CENTRALDIR}/wrfinput_d01 wrfinput_d01_base ${CENTRALDIR}/recalc_wrf_base >&! out.recalc_wrf_base endif -# extract the cycle variables -# # create new input to DART (taken from "wrfinput") -# ${CENTRALDIR}/wrf_to_dart >&! out.wrf_to_dart -# ${MOVE} dart_wrf_vector ${CENTRALDIR}/${output_file} + +# Execution of wrf_to_dart not required. Leaving as placeholder +# Create new input to DART (taken from "wrfinput") +# ${CENTRALDIR}/wrf_to_dart >&! out.wrf_to_dart +# ${MOVE} dart_wrf_vector ${CENTRALDIR}/${output_file} + +# Extract the cycle variables set num_vars = $#extract_vars_a set extract_str_a = '' set i = 1 @@ -814,21 +831,21 @@ EOF echo ${extract_str_b} -# MULTIPLE DOMAINS - loop through wrf files that are present +# Loop through all wrf domain files that are present set dn = 1 while ( $dn <= $num_domains ) set dchar = `echo $dn + 100 | bc | cut -b2-3` set icnum = `echo $ensemble_member + 10000 | bc | cut -b2-5` set outfile = prior_d${dchar}.${icnum} if ( $dn == 1) then - ncks -O -v ${extract_str_a} wrfinput_d${dchar} ../$outfile # MULTIPLE DOMAINS - output file is incomplete filename? + ncks -O -v ${extract_str_a} wrfinput_d${dchar} ../$outfile else - ncks -O -v ${extract_str_b} wrfinput_d${dchar} ../$outfile # MULTIPLE DOMAINS - output file is incomplete filename? + ncks -O -v ${extract_str_b} wrfinput_d${dchar} ../$outfile endif @ dn ++ echo "should have made $outfile" end -# MULTIPLE DOMAINS - may need to remove below to avoid confusion + if ( -e ${CENTRALDIR}/moving_domain_info && $ensemble_member == 1 ) then set dn = 2 while ( $dn <= $num_domains ) @@ -842,7 +859,7 @@ EOF cd $CENTRALDIR - # delete the temp directory for each member if desired + # Delete the temp directory for each member if desired if ( $delete_temp_dir == true ) ${REMOVE} ${temp_dir} echo "Ensemble Member $ensemble_member completed" diff --git a/models/wrf/shell_scripts/param.csh b/models/wrf/shell_scripts_csh/param.csh similarity index 75% rename from models/wrf/shell_scripts/param.csh rename to models/wrf/shell_scripts_csh/param.csh index e1dc1aff96..fad49d602a 100755 --- a/models/wrf/shell_scripts/param.csh +++ b/models/wrf/shell_scripts_csh/param.csh @@ -14,17 +14,17 @@ module load nco # set this appropriately #%%%# module load ncl/6.6.2 # set this appropriately #%%%# # Set the assimilation parameters -set NUM_ENS = 50 +set NUM_ENS = 3 set ASSIM_INT_MINUTES = 0 # 0 means use ASSIM_INT_HOURS set ASSIM_INT_HOURS = 6 # ignored if ASSIM_INT_MINUTES > 0 set IC_PERT_SCALE = 0.25 set ADAPTIVE_INFLATION = 1 # set to 1 if using adaptive inflation to tell the scripts to look for the files -set NUM_DOMAINS = 1 +set NUM_DOMAINS = 2 # Directories where things are run # IMPORTANT : Scripts provided rely on this directory structure and names relative to BASE_DIR. # Do not change, otherwise tutorial will fail. -set BASE_DIR = /glade/derecho/scratch/USER/WORK_DIR # set this appropriately #%%%# +set BASE_DIR = /glade/derecho/scratch/bmraczka/WRFv4.5_nested # set this appropriately #%%%# set RUN_DIR = ${BASE_DIR}/rundir set TEMPLATE_DIR = ${BASE_DIR}/template set OBSPROC_DIR = ${BASE_DIR}/obsproc @@ -36,33 +36,36 @@ set PERTS_DIR = ${BASE_DIR}/perts # Assign path to DART, WRF, WPS and WRFDA build set SHELL_SCRIPTS_DIR = ${BASE_DIR}/scripts -set DART_DIR = /glade/work/USER/DART # set this appropriately #%%%# -set WRF_DM_SRC_DIR = /glade/work/USER/WRFV3 # set this appropriately #%%%# -set WPS_SRC_DIR = /glade/work/USER/WPS # set this appropriately #%%%# -set VAR_SRC_DIR = /glade/work/USER/WRFDA # set this appropriately #%%%# +set DART_DIR = /glade/work/bmraczka/DART # set this appropriately #%%%# +set WRF_DM_SRC_DIR = /glade/work/bmraczka/WRF/WRFv4.5_git # set this appropriately #%%%# +set WPS_SRC_DIR = /glade/work/bmraczka/WRF/WPSv4.5_git # set this appropriately #%%%# +set VAR_SRC_DIR = /glade/work/bmraczka/WRF/WRFDAv4.5_git # set this appropriately #%%%# # for generating wrf template files set GEO_FILES_DIR = /glade/u/home/wrfhelp/WPS_GEOG # set this appropriately #%%%# -set GRIB_DATA_DIR = ${ICBC_DIR}/grib_data # set this appropriately #%%%# +#set GRIB_DATA_DIR = ${ICBC_DIR}/grib_data # set this appropriately #%%%# +set GRIB_DATA_DIR = /glade/work/bmraczka/WRF_nest_tutorial/GFS # set this appropriately #%%%# set GRIB_SRC = 'GFS' # set this appropriately #%%%# # list of variables for extraction and cycling set extract_vars_a = ( U V PH THM MU QVAPOR QCLOUD QRAIN QICE QSNOW QGRAUP QNICE QNRAIN \ U10 V10 T2 Q2 PSFC TSLB SMOIS TSK RAINC RAINNC GRAUPELNC ) set extract_vars_b = ( U V W PH THM MU QVAPOR QCLOUD QRAIN QICE QSNOW QGRAUP QNICE QNRAIN \ - U10 V10 T2 Q2 PSFC TSLB SMOIS TSK RAINC RAINNC GRAUPELNC \ - REFL_10CM VT_DBZ_WT ) + U10 V10 T2 Q2 PSFC TSLB SMOIS TSK RAINC RAINNC GRAUPELNC ) set cycle_vars_a = ( U V PH THM MU QVAPOR QCLOUD QRAIN QICE QSNOW QGRAUP QNICE QNRAIN \ U10 V10 T2 Q2 PSFC TSLB SMOIS TSK ) +set cycle_vars_b = ( U V PH THM MU QVAPOR QCLOUD QRAIN QICE QSNOW QGRAUP QNICE QNRAIN \ + U10 V10 T2 Q2 PSFC TSLB SMOIS TSK ) set increment_vars_a = ( U V PH THM MU QVAPOR QCLOUD QRAIN QICE QSNOW QGRAUP QNICE QNRAIN U10 V10 T2 Q2 PSFC ) +set increment_vars_b = ( U V PH THM MU QVAPOR QCLOUD QRAIN QICE QSNOW QGRAUP QNICE QNRAIN U10 V10 T2 Q2 PSFC ) # Diagnostic parameters set OBS_VERIF_DAYS = 7 # Generic queuing system parameters set SUPER_PLATFORM = derecho -set COMPUTER_CHARGE_ACCOUNT = YOUR_ACCT # set this appropriately #%%%# -set EMAIL = YOUR_EMAIL # set this appropriately #%%%# +set COMPUTER_CHARGE_ACCOUNT = P86850054 # set this appropriately #%%%# +set EMAIL = bmraczka@ucar.edu # set this appropriately #%%%# if ( $SUPER_PLATFORM == 'derecho') then # Derecho values (uses 'PBS' queueing system) diff --git a/models/wrf/shell_scripts/prep_ic.csh b/models/wrf/shell_scripts_csh/prep_ic.csh similarity index 77% rename from models/wrf/shell_scripts/prep_ic.csh rename to models/wrf/shell_scripts_csh/prep_ic.csh index ec8c7d43c2..429376f11e 100755 --- a/models/wrf/shell_scripts/prep_ic.csh +++ b/models/wrf/shell_scripts_csh/prep_ic.csh @@ -7,17 +7,23 @@ if ( $#argv > 0 ) then set n = ${1} # pass in the ensemble member number set datep = ${2} # needed for correct path to file - set dn = ${3} + set domains = ${3} set paramfile = ${4} else # values come from environment variables #TJH If these are not set .... set n = $mem_num set datep = $date - set dn = $domain + set domains = $domain set paramfile = $paramf endif source $paramfile -echo "prep_ic.csh using n=$n datep=$datep dn=$dn paramfile=$paramf" +echo "prep_ic.csh using n=$n datep=$datep domains=$domains paramfile=$paramf" +echo "domain 1 using cycle_vars_a, any other nested domains using cycle_vars_b" + +set dn = 1 +while ( $dn <= $domains ) +set dchar = `echo $dn + 100 | bc | cut -b2-3` + if ( $dn == 1 ) then @@ -31,7 +37,7 @@ if ( $dn == 1 ) then set cycle_str = `echo ${cycle_str}$cycle_vars_a[$num_vars]` echo ${cycle_str} -else # larger domain numbers use a different list of cycled variables (includes radar) +else # larger (nested) domains can use a different list of cycled variables (e.g. radar) set num_vars = $#cycle_vars_b # defined in paramfile set cycle_str = '' # these are variables we want to cycle @@ -54,5 +60,9 @@ ncks -A -v ${cycle_str} \ touch ${RUN_DIR}/ic_d${dchar}_${n}_ready + @ dn++ +end # loop through domains + + exit 0 diff --git a/models/wrf/shell_scripts/real.csh b/models/wrf/shell_scripts_csh/real.csh similarity index 68% rename from models/wrf/shell_scripts/real.csh rename to models/wrf/shell_scripts_csh/real.csh index d1459b6168..132d02500d 100755 --- a/models/wrf/shell_scripts/real.csh +++ b/models/wrf/shell_scripts_csh/real.csh @@ -5,8 +5,8 @@ source $paramfile cd ${ICBC_DIR} - mpiexec -n 128 -ppn 128 ${RUN_DIR}/WRF_RUN/real.exe - +# mpiexec -n 128 -ppn 128 ${RUN_DIR}/WRF_RUN/real.exe + mpiexec -n 4 -ppn 4 ${RUN_DIR}/WRF_RUN/real.exe #if ( `grep "Successful completion of program real.exe" ./rsl.out.0000 | wc -l ` == 1 ) touch ${ICBC_DIR}/real_done touch ${ICBC_DIR}/real_done diff --git a/models/wrf/shell_scripts/setup.csh b/models/wrf/shell_scripts_csh/setup.csh similarity index 100% rename from models/wrf/shell_scripts/setup.csh rename to models/wrf/shell_scripts_csh/setup.csh diff --git a/models/wrf/tutorial/README.rst b/models/wrf/tutorial/README.rst index b40c5b380c..daa7f57d2f 100644 --- a/models/wrf/tutorial/README.rst +++ b/models/wrf/tutorial/README.rst @@ -8,27 +8,33 @@ Introduction This document will describe how to get started with your own Weather Research and Forecasting (WRF) data assimilation experiments using DART -and only covers the WRF-specific aspects of coupling with DART. -It is not wise to try to run WRF-DART if you have no experience with -either WRF or DART. +and focuses on the WRF-specific aspects of coupling with DART. These +instructions provide a realistic nested (2-domain) WRFv4.5 example for a +severe storm event in the Great Plains during 2024. The tutorial provides +the user with NCEP prepbufr atmospheric observations and WRF +grib files to generate observation files and the inital WRF domain and +boundary conditions. It is recommended the user work through the tutorial +example completely and confirm the setup works on their own system. +At that time, the scripts can be used as a template to apply to your own +scientfic WRF-DART application. .. Important :: - This tutorial was designed to be compatible with WRF Version 4 and was - tested with WRFv4.5.2. This tutorial should not be used with DART - versions 11.4.0 and earlier because those older versions do not account - for different coordinate systems including the sigma hybrid coordinates as - described in `DART Issue #650 `__. + This tutorial was designed to be compatible with WRF Version 4 and later, and was + tested with WRFv4.5.2. It is mandatory to use the terrain following coordinate + system (hybrid_opt=0) and not the default sigma hybrid coordinates (hybrid_opt=1) + when using WRF-DART. Using the sigma hybrid coordinate can lead to adverse effects + when generating ensemble spread leading to poor forecast performance. For more + details see `DART Issue #650 `__. - Furthermore, older versions do not account for the prognostic temperature variable - switch from ``T`` (perturbation potential temperature) to ``THM``, (either perturbation - potential temperature or perturbation moist potential temperature) as described in - `DART issue #661 `__. The current implementation - of the code sets ``T=THM`` because within &dynamics section of ``namelist.input`` - ``use_theta_m=0``. For this reason, It is mandatory to include ``THM`` instead of - ``T`` as the ``TYPE_T`` within the wrf_state_variables namelist. - - Earlier version of WRF (v3.9) may run without errors with more recent versions of + It is also mandatory to include the prognostic temperature variable ``THM`` within + the DART state. This means that ``THM`` must be included alongside ``TYPE_T`` within + the wrf_state_variables namelist. The current implementation + of the code sets ``use_theta_m=0`` (&dynamics section of ``namelist.input``) such that + ``THM=perturbation potential tempature``. For more discussion on this topic see: + `DART issue #661 `__. + + Earlier versions of WRF (v3.9) may run without errors with more recent versions of DART (later than 11.4.0), but the assimilation performance will be deprecated. If you need to run with earlier versions of WRF, please review the changes required to switch from WRFv4 to WRFv3 as documented within @@ -47,58 +53,56 @@ The DART team is not responsible for and does not maintain the WRF code. For WRF `WRF User Forum `__ or the `WRF github page. `__ -If you are new to DART, we recommend that you become familiar with DART -by working through the :doc:`../../../theory/readme` and then +If you are new to DART, we recommend that you become familiar with EnKF +theory by working through the :doc:`../../../theory/readme` and then understanding the :ref:`DART getting started ` documentation. -This tutorial is **not** a toy simulation, but represents a realistic WRF-DART -assimilation for the continental United States. It uses a WRF -ensemble of 50 members that will be initialized from GFS initial -conditions at 2017/04/27 00:00 UTC. The data included in the tutorial lasts -until 2017/04/30 18:00 UTC. During this period, there was a strong rain and wind event -that affected a large portion of the United States, causing record -rains, localized flooding, and numerous tornadoes. For more information -on the physical account of this case, see -`weather.gov `__. - -By default, the tutorial case will only cover 12 hours of this event -starting at 2017/04/27 00:00 UTC. The WRF model will be “spun-up” for -six hours to generate a prior distribution. An assimilation of PREPBUFR -observations will then be performed at 06:00 UTC, at which time analysis -files will be generated to begin a new ensemble forecast. The WRF model -will be advanced for 6 hours and a final assimilation cycle will be -performed at 12:00 UTC. This process could then continue in order to -investigate the strong rain and wind event. On NSF NCAR's *Derecho*, -the tutorial requires at least 30 minutes of run time, and can take -much longer (1-2 hours) depending upon the PBS queue wait time. - -The goal of this tutorial is to demonstrate how WRF-DART works, and to provide an -understanding of the major steps within a data assimilation (DA) experiment. -However, you will need to do additional work before you can apply -WRF-DART to your own research application, as some of the steps involved -in this tutorial (in particular, the perturbation bank and the -observation sequence files) are provided for you in order to simplify -the process. We provide a diagnostic section at the end of the tutorial to -assess the skill/success of the assimilation. Be aware, an assimilation is -not successful just because it runs to completion. A successful assimilation -generally uses the vast majority of the observations provided and minimizes -the bias and RMSE between the posterior model state and the observations. - -Finally, if you are not running on the NSF NCAR Derecho (PBS) supercomputing system, you will -need to customize the assimilation scripts (located in /DART/models/wrf/shell_scripts/) to match the details of your particular system. -Specifically, you will need to edit the DART csh scripting to match your system settings -whether that be, for example, a PBS, SLURM or LSF HPC system. Although the DART team can -offer advice on how to customize the scripting to accomodate your HPC system, your -HPC system administrator is likely the best resource to resolve these issues. +May 2024 Great Plains Severe Storm Event +---------------------------------------- + +This tutorial examines a Derecho and HP Supercell storm event that affected +the Great Plains area on May 19th 2024. For more information on this event +see `weather.gov `__. + +The figures below provides snapshots of the local radar during the evolution of +the storm event. The left panel (05-19-2024 18:00 UTC) and middle panel (05-20-2024 00:00 UTC) +illustrate the timing of storm development, whereas the right panel shows the nested +domain configuration for WRF. The nested domain (d02) (0.1x0.1 degrees) is centered in Kansas, +whereas the parent domain (d01) (0.2x0.2 degrees) covers a signifcant portion of the Great Plains. + ++-------------------------+-------------------------+-------------------------+ +| |radar1| | |radar2| | |wrf_domain| | ++-------------------------+-------------------------+-------------------------+ + + +The tutorial uses a 20 member ensemble initialized from the GFS at +05-19-2024 00:00 UTC. It performs an ensemble spinup from 00 to 06 UTC +by applying perturbations to the GFS initial condition. It then assimilates +atmospheric observations at 06 and 12 UTC respectively. Finally, a forecast +is conducted (no observations assimilated) from 12 to 24 UTC. This sequence +of ensemble spinup, assimilation mode and forecast mode generally +follows published literature for atmospheric DA. Although we have strived +to maintain scientific realism in this tutorial, we have made an effort +to reduce the computational expense for reduced runtime by reducing +the ensemble size (20) and coarsening the WRF spatial resolution (0.1 and 0.2 degrees). +**For science applications we recommend at least using 40 ensemble members +which helps reduce sampling error and improves the assimlation performance.** + +On NSF NCAR's *Derecho*,the tutorial requires roughly 40 minutes of computational +run time, but can take longer depending upon the PBS queue wait time. + +The goals of this tutorial are to: 1) provide an understanding of the major steps +within a DA experiment, 2) port and test the WRF-DART scripts on the user's system +and 3) use the WRF-DART tutorial scripts as a template for the user's own +research application. .. Important :: - The tutorial scripting and instructions are based on the NSF NCAR supercomputer - Derecho, so you will need to edit the scripts and interpret the instructions for - other HPC systems. The scripting uses examples of a PBS queuing system (e.g. Derecho) - and LSF queuing system (e.g. decommissioned Yellowstone). You can use these as a - template for your own system. + The tutorial scripting and instructions are intended for the NSF NCAR supercomputer + Derecho. The user must modify the scripts and interpret the instructions for + other HPC systems. The scripting uses examples for a PBS (e.g. Derecho) + and LSF queuing system. These will need to be modified for other systems (e.g. SLURM). Step 1: Setup @@ -126,7 +130,7 @@ packages can be automatically loaded using the following commands: module load nco module load ncl/6.6.2 -These commands are provided by default with the param.csh script. More details +These commands are provided by default with the param.sh script. More details are provided below. There are multiple phases for the setup: building the DART executables, downloading the initial WRF boundary conditions, building (or using existing) WRF executables, and configuring and staging the scripting @@ -201,13 +205,13 @@ might need for an experiment with that model. integer, parameter :: r8 = r4 ! alias r8 to r4 3. Copy the tutorial DART namelist from - ``$DART_DIR/models/wrf/tutorial/template/input.nml.template`` to + ``$DART_DIR/models/wrf/tutorial/template_nest/input.nml.template`` to ``$DART_DIR/models/wrf/work/input.nml``. :: cd $DART_DIR/models/wrf - cp tutorial/template/input.nml.template work/input.nml + cp tutorial/template_nest/input.nml.template work/input.nml 4. Build the WRF-DART executables: @@ -217,7 +221,7 @@ might need for an experiment with that model. ./quickbuild.sh Many executables are built, the following executables are needed for the - tutorial and will be copied to the right place by the *setup.csh* script + tutorial and will be copied to the right place by the *setup.sh* script in a subsequent step: :: @@ -234,11 +238,10 @@ might need for an experiment with that model. Preparing the experiment directory. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Approximately 100Gb of space is needed to run the tutorial. Create a -"work" directory someplace with a lot of free space. The rest of the -instructions assume you have an environment variable called *BASE_DIR* -that points to this directory. On Derecho it is convenient to use your -scratch directory for this purpose. +Create a "work" directory that can accomodate approximately 40 GB of space +to run the tutorial. The rest of the instructions assume you have an +environment variable called *BASE_DIR* that points to this directory. +On Derecho it is convenient to use your scratch directory for this purpose. ===== ==================================================== shell command @@ -247,32 +250,28 @@ tcsh ``setenv BASE_DIR `` bash ``export BASE_DIR=`` ===== ==================================================== -1. The WRF boundary conditions and perturbations required to make a - viable ensemble are available in a 15 GB tar file. Put this file in - your ``$BASE_DIR``. Since this is a large file, we suggest using - 'wget' to download the file directly to your local system: +1. The grib files required to generate WRF initial and boundary conditions + and the observation files (obs_seq.out) have already been generated for you + within a 10GB tar file. Put this file in your ``$BASE_DIR``. + Download the file directly to your local system: :: cd $BASE_DIR - wget data.dart.ucar.edu/WRF/wrf_dart_tutorial_29Apr2024.tar.gz - tar -xzvf wrf_dart_tutorial_29Apr2024.tar.gz + wget data.dart.ucar.edu/wrfdart/tutorial/wrf_dart_nested_tutorial_15Jan2026.tar.gz + tar -xzvf wrf_dart_nested_tutorial_15Jan2026.tar.gz After untarring the file you should see the following directories: *icbc, output, perts,* and *template.* The directory names (case sensitive) are important, as the scripts rely on these local paths - and file names. Please note that the perturbation, surface and initial - condition files were derived from an earlier version (pre-4.0) of WRF/WPS/WRFDA - but still maintains compatibility with the (post-4.0, post-11.4.0) - WRF-DART versions recommended to run this WRF assimilation example. + and file names. Only the icbc and output folders contain files. 2. You will need template WRF namelists from the - ``$DART_DIR/models/wrf/tutorial/template`` directory: + ``$DART_DIR/models/wrf/tutorial/template_nest`` directory: :: - cp $DART_DIR/models/wrf/tutorial/template/namelist.input.meso $BASE_DIR/template/. - cp $DART_DIR/models/wrf/tutorial/template/namelist.wps.template $BASE_DIR/template/. + cp $DART_DIR/models/wrf/tutorial/template_nest/*.* $BASE_DIR/template/. 3. You will also need scripting to run a WRF/DART experiment. Copy the contents of ``$DART_DIR/models/wrf/shell_scripts`` to the ``$BASE_DIR/scripts`` directory. @@ -293,9 +292,10 @@ WRF Preprocessing System (WPS) and WRF Data Assimilation System (WRFDA). Importantly, DART is used to perform the ensemble DA for this tutorial, however, the WRFDA package is required to generate a set of perturbed initial ensemble member -files and also to generate perturbed boundary condition files. Since the -tutorial provides a perturbation bank for a specific case, it is not -required to actually *run da_wrfvar.exe* but it needs to be in the +files and also to generate perturbed boundary condition files. The **da_wrfvar.exe** +executable is required to generate a perturbation bank for the ensemble spinup step. +Importantly, DART performs ensemble DA using the **filter** executable, whereas the +WRFDA package is only used to generature perturbations. ``WRF_RUN`` directory for the tutorial. WRF and WRFDA should be built with the "dmpar" option, while WPS can be @@ -307,20 +307,20 @@ about building these packages. For consistency and to avoid errors, you should build WRF, WPS, WRFDA, and DART with the same compiler you use for NetCDF. Likewise MPI should use the same compiler. You will need the location of the WRF and WRFDA builds to customize the - *params.csh* script in the next step. If using gfortran to compile WRF on Derecho + *param.sh* script in the next step. If using gfortran to compile WRF on Derecho we recommend using option 34 (gnu dmpar) to configure WRF, option 1 (gnu serial) to configure WPS, and option 34 (gnu dmpar) to configure WRFDA. You will need the location - of the WRF, WPS,and WRFDA builds to customize the *params.csh* script in the next step. + of the WRF, WPS,and WRFDA builds to customize the *param.sh* script in the next step. Using the gfortan compiler on Derecho required custom flag settings to successfully compile the WRF, WPS and WRFDA executables. For more information please see NCAR/DART `github issue 627. `__ -Configure ``$BASE_DIR/scripts/param.csh`` with proper paths, info, etc. -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Configure ``$BASE_DIR/scripts/param.sh`` with proper paths and varaibles +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -This is a script that sets variables which will be read by other +The param.sh script sets variables which will be read by other WRF-DART scripts. There are some specific parameters for either the Derecho supercomputing system using the `PBS `__ queueing system or the @@ -330,10 +330,8 @@ script to set your queueing-system specific parameters. .. important:: - All variables that are marked - ``'set this appropriately #%%%#'`` need to be set. This list is intended - to provide some guidance on what needs to be set, but it is not an - exhaustive list. + Make sure all the variables within *param.sh* as described in the table below are set appropriately. + Remember, that Derecho HPC is used for the default settings. +-------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------+ | Script variable | Description | @@ -342,10 +340,18 @@ script to set your queueing-system specific parameters. +-------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------+ | module load ncl/6.6.2 | The ncl package. | +-------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------+ - | BASE_DIR | The directory containing icbc, output, perts, etc. | + | BASE_DIR | The main working directory. | +-------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------+ | DART_DIR | The DART directory. | +-------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------+ + | NUM_ENS | The total number of WRF ensemble members. The tutorial uses 20 for computational efficiency. | + +-------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------+ + | ASSIM_INT_HOURS | The frequency of assimilation steps, and temporal spacing between observations. This tutorial uses 6 hours. | + +-------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------+ + | ADAPTIVE_INFLATION | A DART tool used to adjust ensemble spread. Set to 1 (on) for assimilation mode and set to 0 (off) for forecast mode. | + +-------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------+ + | NUM_DOMAINS | The number of WRF domains. This tutorial uses a 2 domain setup (parent d01, nested d02). Scripting works for both single and multi-domains. | + +-------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------+ | WRF_DM_SRC_DIR | The directory of the WRF dmpar installation. | +-------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------+ | WPS_SRC_DIR | The directory of the WPS installation. | @@ -360,17 +366,17 @@ script to set your queueing-system specific parameters. +-------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------+ | COMPUTER_CHARGE_ACCOUNT | The project account for supercomputing charges. See your supercomputing project administrator for more information. | +-------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------+ - | EMAIL | The e-mail address used by the queueing system to send job summary information. This is optional. | + | EMAIL | An optional e-mail address used by the queueing system to send job summary information. | +-------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------+ -Run the *setup.csh* script to create the proper directory structure and -move executables to proper locations. +Now that ``param.sh`` is set properly, run the ``setup.sh`` script to create the proper directory structure and +to move the executables and support files to the proper locations. :: cd $BASE_DIR/scripts - ./setup.csh param.csh + ./setup.sh param.sh So far, your ``$BASE_DIR`` should contain the following directories: @@ -410,7 +416,7 @@ Your ``$BASE_DIR/rundir`` directory should contain the following: **scripts:** - *add_bank_perts.ncl* -- *new_advance_model.csh* +- *new_advance_model.sh* **support data:** @@ -429,20 +435,15 @@ Check to make sure your ``$BASE_DIR/rundir/WRF_RUN`` directory contains: .. note:: - Be aware that the *setup.csh* script is designed to remove + Be aware that the *setup.sh* script is designed to remove ``$BASE_DIR/rundir/WRF_RUN/namelist.input``. Subsequent scripting will modify ``$BASE_DIR/template/namlist.input.meso`` to create the ``namelist.input`` for the experiment. -For this tutorial, we are providing you with a specified WRF domain. To -make your own, you would need to define your own wps namelist and use -WPS to make your own geogrid files. See the WRF site for help with -building and running those tools as needed. You would also need to get -the appropriate grib files to generate initial and boundary condition -files for the full period you plan to cycle. In this tutorial we have -provided you with geogrid files, a small set of grib files, and a -namelist to generate series of analyses for several days covering a -North American region. +For this tutorial, we are providing you with the namelist settings for +a nested WRF domain which specifies the location, spatial resolution and +relative positioning of the parent and nested domain. These namelist settings +are used in conjunction with the grib files to generate the intial and boundary conditions. Let's now look inside the ``$BASE_DIR/scripts`` directory. You should find the following scripts: @@ -450,189 +451,238 @@ find the following scripts: +-----------------------+-------------------------------------------------------------------------------------------+ | Script name | Description | +=======================+===========================================================================================+ -| add_bank_perts.ncl | Adds perturbations to each member. | +| add_bank_perts.ncl | Applies perturbations to each WRF ensemble member to increase ensemble spread. | +-----------------------+-------------------------------------------------------------------------------------------+ -| assim_advance.csh | Advances 1 WRF ensemble member to the next analysis time. | +| assim_advance.sh | Advances each WRF ensemble member between each assimilation time. | +-----------------------+-------------------------------------------------------------------------------------------+ -| assimilate.csh | Runs filter ... i.e. the assimilation. | +| assimilate.sh | Runs filter at each assimilation time step. | +-----------------------+-------------------------------------------------------------------------------------------+ -| diagnostics_obs.csh | Computes observation-space diagnostics and the model-space mean analysis increment. | +| diagnostics_obs.sh | Computes observation-space diagnostics and the model-space mean analysis increment. | +-----------------------+-------------------------------------------------------------------------------------------+ -| driver.csh | Primary script for running the cycled analysis system. | +| driver.sh | Primary script for running the cycled analysis (DA) system. | +-----------------------+-------------------------------------------------------------------------------------------+ -| first_advance.csh | Advances 1 WRF ensemble member (on the first time). | +| first_advance.sh | Advances each WRF ensemble member during initial ensemble spinup. | +-----------------------+-------------------------------------------------------------------------------------------+ -| gen_pert_bank.csh | Saves the perturbations generated by WRFDA CV3. | +| gen_pert_bank.sh | Generates perturbations using WRFDA CV3. | +-----------------------+-------------------------------------------------------------------------------------------+ -| gen_retro_icbc.csh | Generates the wrfinput and wrfbdy files. | +| gen_retro_icbc.sh | Generates the wrfinput and wrfbdy mean files for each assimilation time. | +-----------------------+-------------------------------------------------------------------------------------------+ -| init_ensemble_var.csh | Creates the perturbed initial conditions from the WRF-VAR system. | +| init_ensemble_var.sh | Performs the initial ensemble spinup. | +-----------------------+-------------------------------------------------------------------------------------------+ | mean_increment.ncl | Computes the mean state-space increment, which can be used for plotting. | +-----------------------+-------------------------------------------------------------------------------------------+ -| new_advance_model.csh | advances the WRF model after running DART in a cycling context. | +| new_advance_model.sh | Advances the WRF model in between assimilation times. | +-----------------------+-------------------------------------------------------------------------------------------+ -| param.csh | Contains most of the key settings to run the WRF-DART system. | +| param.sh | Contains key variables and paths to run the WRF-DART system. | +-----------------------+-------------------------------------------------------------------------------------------+ -| prep_ic.csh | Prepares the initial conditions for a single ensemble member. | +| prep_ic.sh | Prepares the initial conditions for each WRF ensemble member. | +-----------------------+-------------------------------------------------------------------------------------------+ -| real.csh | Runs the WRF real.exe program. | +| real.sh | Runs the WRF real.exe program that advances WRF forward in time. | +-----------------------+-------------------------------------------------------------------------------------------+ -| setup.csh | Creates the proper directory structure and place executables/scripts in proper locations. | +| setup.sh | Creates the proper directory structure and puts executables/scripts in proper locations. | +-----------------------+-------------------------------------------------------------------------------------------+ -You will need to edit the following scripts to provide the paths to +You will need to edit the following scripts in the table below to provide the paths to where you are running the experiment, to connect up files, and to set -desired dates. Search for the string ``'set this appropriately #%%%#'`` +desired dates. Search for the string ``'set this appropriately'`` for locations that you need to edit. :: cd $BASE_DIR/scripts - grep -r 'set this appropriately #%%%#' . + grep -r 'set this appropriately' . -Other than *param.csh*, which was covered above, make the following +Other than ``param.sh``, which was covered above, make the following changes: -+--------------------+--------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| File name | Variable / value | Change description | -+====================+======================================+=========================================================================================================================================================================================================================================================+ -| driver.csh | datefnl = 2017042712 | Change to the final target date; here the final date is already set correctly for this tutorial. | -+--------------------+--------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| gen_retro_icbc.csh | datefnl = 2017042712 | Set to the final target date of the tutorial. However, it is possible (not necessary) to create WRF initial/boundary conditions to 2017043000. This is the latest date that files are included in the tutorial. | -+--------------------+--------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| gen_retro_icbc.csh | paramfile = | The full path to param.csh. Change this on the line after the comment. While these two files are in the same directory here, in general it is helpful to have one param.csh for each experiment. | -+--------------------+--------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| gen_pert_bank.csh | All changes | As the tutorial includes a perturbation bank, you will not need to run this script for the tutorial, so you will not need to change these values. However, you should set appropriate values when you are ready to generate your own perturbation bank. | -+--------------------+--------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+ - - -Next, move to the ``$BASE_DIR/perts`` directory. Here you will find 100 -perturbation files, called a "perturbation bank." For your own case, you -would need to create a perturbation bank of your own. A brief -description for running the script is available inside the comments of -that file. However, again, for this tutorial, this step has already been -run for you. The ``$BASE_DIR/icbc`` directory contains a *geo_em_d01.nc* -file (geo information for our test domain), and grib files that will be -used to generate the initial and boundary condition files. The -``$BASE_DIR/template`` directory should contain namelists for WRF, WPS, -and filter, along with a wrfinput file that matches what will be the -analysis domain. Finally, the ``$BASE_DIR/output`` directory contains -observations within each directory name. Template files will be placed -here once created (done below), and as we get into the cycling the -output will go in these directories. - - - - -Step 2: Initial conditions --------------------------- - -To get an initial set of ensemble files, depending on the size of your -ensemble and data available to you, you might have options to initialize -the ensemble from, say, a global ensemble set of states. Here, we -develop a set of flow dependent errors by starting with random -perturbations and conducting a short forecast. We will use the WRFDA -random CV option 3 to provide an initial set of random errors, and since -this is already available in the perturbation bank developed in the -setup, we can simply add these to a deterministic GFS state. Further, -lateral boundary uncertainty will come from adding a random perturbation -to the forecast (target) lateral boundary state, such that after the -integration the lateral boundaries have random errors. - -First, we need to generate a set of GFS states and boundary conditions -that will be used in the cycling. Use -``$BASE_DIR/scripts/gen_retro_icbc.csh`` to create this set of files, -which will be added to a subdirectory corresponding to the date of the -run in the ``$BASE_DIR/output`` directory. Make sure -*gen_retro_icbc.csh* has the appropriate path to your *param.csh* -script. If the *param.csh* script also has the correct edits for paths -and you have the executables placed in the rundir, etc., then running -*gen_retro_icbc.csh* should execute a series of operations to extract -the grib data, run metgrid, and then twice execute *real.exe* to -generate a pair of WRF files and a boundary file for each analysis time. ++--------------------+---------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| File name | Variable / value | Change description | ++====================+=============================================+=======================================================================================================================================+ +| driver.sh | datefnl = 2024051912 | Change to the final assimilation target date. In this example observations are assimilated at time steps 2024051906 and 2024051912. | ++--------------------+---------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| gen_retro_icbc.sh | datea = 2024051900 | Set to the starting time of the tutorial. This is the beginning time of the ensemble spinup. | ++--------------------+---------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| gen_retro_icbc.sh | datefnl = 2024052000 | Set to the final time of the tutorial. This is the end of the forecast mode. | ++--------------------+---------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| gen_retro_icbc.sh | paramfile = /full/path/to/param.sh | Script sources information from param.sh file. | ++--------------------+---------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| gen_pert_bank.sh | datea = 2024051900 | Set to the starting time of the tutorial. | ++--------------------+---------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| gen_pert_bank.sh | num_ens = 60 | Total number of perturbation members. Set to 3-4X that of model ensemble (20) | ++--------------------+---------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| gen_pert_bank.sh | paramfile = /full/path/to/param.sh | Script sources information from param.sh file. | ++--------------------+---------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| gen_pert_bank.sh | savedir = ${PERTS_DIR}/work/boundary_perts. | Location of perturbation bank. | ++--------------------+---------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| add_bank_pert.ncl | bank_size = 60 | Recommended to set to same value as gen_pert_bank.sh num_ens value (60). Cannot be greater than total perturbations in bank. | ++--------------------+---------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------+ + +The setup is now complete. The tarred tutorial file provides the grib files and +should be located within the ``$BASE_DIR/icbc`` directory that will be used to +generate the WRF initial and boundary condition files. +The ``$BASE_DIR/output`` directory contains the NCEP prepbufr observations (obs_seq.out) +within each assimilation time sub-directory. + +The ``$BASE_DIR/template`` directory should contain namelists for WRF, WPS, +and DART. + + + +Step 2: Create Initial and Boundary Conditions +---------------------------------------------- + +We use GFS data to generate the initial and boundary conditions +that will be used in the tutorial. The ``gen_retro_icbc.sh`` script +executes a series of operations to extract the grib data, runs +WPS executables geogrid, ungrib, metgrid, and then twice executes *real.exe* to generate +a pair of WRF files and a boundary file for each analysis time and +domain. These files are then added to a subdirectory corresponding to the +date within the ``$BASE_DIR/output`` directory. :: cd $BASE_DIR/scripts - ./gen_retro_icbc.csh + ./gen_retro_icbc.sh .. note:: - Ignore any ``rm: No match`` errors, as the script attempts to - delete output files if they already exist, and they will not for the - first run. - -Once the script completes, inside your ``$BASE_DIR/output/2017042700`` -directory you should see these files: +Once the script completes, you should confirm the following files +have been created within the ``$BASE_DIR/output/2024051900`` +directory: :: - wrfbdy_d01_152057_21600_mean - wrfinput_d01_152057_0_mean - wrfinput_d01_152057_21600_mean + wrfbdy_d01_154636_21600_mean + wrfinput_d01_154636_0_mean + wrfinput_d01_154636_21600_mean + wrfinput_d02_154636_0_mean + wrfinput_d02_154636_21600_mean + +These filenames are appended with the Gregorian dates used within DART. +Similar files (with different dates) should appear in all of the output +sub-directories between the *datea* and *datef* dates set in the ``gen_retro_icbc.sh`` +script. + + -These filenames include the Gregorian dates for these files, which is -used by the dart software for time schedules. Similar files (with -different dates) should appear in all of the date directories between -the *datea* and *datef* dates set in the *gen_retro_icbc.csh* script. -All directories with later dates will also have an observation sequence -file *obs_seq.out* that contains observations to be assimilated at that -time. +Step 3: Generate Perturbation Bank +---------------------------------- -Next, we will execute the script to generate an initial ensemble of -states for the first analysis. For this we run the script -*init_ensemble_var.csh*, which takes two arguments: a date string and -the location of the *param.csh* script. +We use the WRFDA random CV option 3 to provide an initial set of random errors that +we refer to as the 'perturbation bank.' During the subsequent ensemble +spinup (Step 4) these perturbations are added to the deterministic, +single instance GFS state generated in Step 2. Furthermore, during the +subsequent assimilation cycling (Step 8), these perturbations are added to the forecast +(target) boundary state, such that boundaries include random errors introducing +uncertainty, which promotes ensemble spread to the WRF ensemble domain(s). + +The spatial pattern and magnitude of the perturbations are controlled through +the ``&wrfvar7`` ``cv_options``, ``as1``, ``as2``, ``as3`` and ``as4`` namelist settings included +within the ``namelist.input.3dvar`` template. These settings were customized for +this tutorial example. These will likely need to be modified for your own science +application. For more information please see the WRFDA documentation. :: cd $BASE_DIR/scripts - ./init_ensemble_var.csh 2017042700 param.csh + ./gen_pert_bank.sh + +.. note:: + +The script will generate a batch job for each perturbation (60 total). +The rule of thumb is to generate 3-4X as many perturbations as the +model ensemble (20). This is done to increase the probability each +ensemble member receives a unique perturbation. You should confirm +the following files have been created within the +``$PERTS_DIR/work/boundary_perts`` directory: + +:: + + pert_bank_mem_01.nc + pert_bank_mem_02.nc + .. + .. + pert_bank_mem_60.nc + + +Step 4: Perform Ensemble Spinup +------------------------------- + +Next, we generate an initial ensemble of WRF states to prepare for the +first assimilation (analysis) step. We run the script +``init_ensemble_var.sh``, which takes two arguments: a date string for the +starting time and the path to the ``param.sh`` script. + +The ``init_ensemble_var.sh`` script adds perturbations to the single instance +WRF domain (generated in Step 2) which generates an ensemble of WRF simulations. +Please note that the perturbations are added to the WRF state randomly, thus the results of the +tutorial should be similar each time it is run, but it will not be deterministic. +If there are multiple domains (like in this tutorial example) the code will automatically +apply the perurbations from the parent domain to the nested domains through +downscaling. This ensures that the location of perturbations are consistent across +the domain boundaries. Next, the model ensemble is then advanced (spun-up) from the +starting date (2024051900) to the first assimilation time (2024051906). To accomplish this, +the ``init_ensemble_var.sh`` script orchestrates a series of calls as shown below: + +1. add_bank_perts.ncl +2. first_advance.sh +3. new_advance_model.sh + +This series of scripts executes ``wrf.exe`` to advance the WRF model +using the following mpi run command within ``first_advance.sh`` as follows: + +:: -This script generates 50 small scripts and submits them to the batch -system. It assumes a PBS batch system and the 'qsub' command for -submitting jobs. If you have a different batch system, edit this script -and look near the end. You will need to modify the lines staring with -#PBS and change 'qsub' to the right command for your system. You might -also want to modify this script to test running a single member first — -just in case you have some debugging to do. + mpiexec -n 4 -ppn 4 ./wrf.exe -However, be warned that to successfully complete the tutorial, including -running the *driver.csh* script in Step 6, using a smaller ensemble -(e.g. < 20 members) can lead to spurious updates during the analysis step, -causing the WRF simulation to fail. +Please be aware that the mpi run command is customized for the Derecho environment. +In addition, the processor setup was customized for the tutorial WRF domain setup. Please refer +to the WRF documentation for more details on how to optimize the processor setup +for other WRF domains. This script submits 20 batch jobs to the queuing system. +It assumes a PBS batch system and the 'qsub' command for submitting jobs. If you +have a different batch system, you will need to modify the commands such as #PBS and 'qsub'. +Fore more information you should familiarize yourself with `running jobs on +Derecho `__ or your own HPC system. -When complete for the full ensemble, you should find 50 new files in the -directory ``output/2017042700/PRIORS`` with names like *prior_d01.0001*, -*prior_d01.0002*, etc... You may receive an e-mail to helpfully inform -you when each ensemble member has finished. +The ``init_ensemble_var.sh`` script requires two command-line arguments - +a date string for the starting time and the path to the ``param.sh`` script as +shown below: +:: + + cd $BASE_DIR/scripts + ./init_ensemble_var.sh 2024051900 param.sh + +When the scripts complete for the all ensemble members, you should find 20 new files +for each domain (40 total files) in the directory ``output/2024051900/PRIORS`` +named *prior_d01.0001*, *prior_d02.0001*, etc. -Step 3: Prepare observations [Informational Only] + +Step 5: Prepare observations [Informational Only] ------------------------------------------------- .. Important:: - The observation sequence (obs_seq) files used in this tutorial are already provided - for you within the output directory. Proceed to step 5 if you wish to complete the + The observation sequence (obs_seq.out) files used in this tutorial are already provided + for you within the output directory. Proceed to step 7 if you wish to complete only the required tutorial steps. If you are interested in customizing a WRF-DART experiment - for your own application, steps 3 and 4 provide useful guidance. The obs_seq file used - in this tutorial is roughly based on the NCEP PREPBUFR data files which are + for your own application, steps 5 and 6 provide useful guidance. The obs_seq.out files + provided in this tutorial are generated from the NCEP PREPBUFR data files which are located at the `NSF NCAR Research Data Archive `__ - (ds090 or ds337). There are additional observation types used in - this tutorial (e.g. :doc:`MADIS <../../../observations/obs_converters/MADIS/MADIS>`) - besides the PREPBUFR data, and we **do not** provide instructions to - reconstruct the tutorial obs_seq files exactly. + (ds090 or ds337). Although we **do not** provide explicit instructions here to + reconstruct the tutorial obs_seq.out files, you can follow the links for the prepbufr + observation converter provided below. We used prepbufr data from the + A26943-202405prepqmB.tar file that includes the date range for this tutorial + (prepqm24051900.nr through prepqm24052100.nr). Observation processing is critical to the success of running DART and is covered in :ref:`Getting Started `. In brief, to add your own observations to WRF-DART you will need to understand the relationship between observation definitions and -observation sequences, observation types and observation quantities (see Step 4), and +observation sequences, observation types and observation quantities (see Step 6), and understand how observation converters extract observations from their native formats into the DART specific format. @@ -656,7 +706,7 @@ as described below: observation converter instructions are :doc:`here <../../../observations/obs_converters/MADIS/MADIS>`. -Step 4: Overview of forward operators [Informational Only] +Step 6: Overview of Forward Operators [Informational Only] -------------------------------------------------------------- This section is for informational purposes only and does not include any @@ -667,9 +717,11 @@ An introduction to important namelist variables that control the operation of th operator are located in the :ref:`WRF namelist documentation`. -The ``obs_seq.out`` file provided with the tutorial contains -30 different observation types. Here we examine an excerpt of that file, focusing -on a single temperature observation to describe the process: +The ``obs_seq.out`` files provided with the tutorial contains over +10 different observation types (e.g. RADIOSONDE, AIRCRAFT etc). +Here we examine a single temperature observation type. Please note +that METAR type observations are not used in this tutorial example, +but the file structure and concepts are exactly the same. :: @@ -875,20 +927,20 @@ k+1 WRF vertical levels followed by vertical interpolation). -Step 5: Creating the first set of adaptive inflation files ----------------------------------------------------------- +Step 7: Create the First Set of Inflation Files +----------------------------------------------- -In this section we describe how to create initial adaptive inflation +In this section we describe how to create the initial adaptive inflation files. These will be used by DART to control how the ensemble is -inflated during the first assimilation cycle. +inflated (increases spread) during the first assimilation cycle. It is convenient to create initial inflation files before you start an experiment. The initial inflation files may be created with -*fill_inflation_restart*, which was built by the *quickbuild.sh* step. +``fill_inflation_restart``, which was built by the ``quickbuild.sh`` step. A pair of inflation files is needed for each WRF domain. -Within the ``$BASE_DIR/rundir`` directory, the *input.nml* file has some -settings that control the behavior of *fill_inflation_restart*. Within +Within the ``$BASE_DIR/rundir`` directory, the ``input.nml`` file has +settings that control the behavior of ``fill_inflation_restart``. Within this file there is the section: :: @@ -902,12 +954,12 @@ this file there is the section: post_inf_mean = 1.00 post_inf_sd = 0.6 - input_state_files = 'wrfinput_d01' + input_state_files = 'wrfinput_d01','wrfinput_d02' single_file = .false. verbose = .false. / -These settings write a prior inflation file with a inflation mean of 1.0 +These settings create a prior inflation file with a inflation mean of 1.0 and a prior inflation standard deviation of 0.6. These are reasonable defaults to use. The *input_state_files* variable controls which file to use as a template. You can either modify this namelist value to point to @@ -922,100 +974,219 @@ the program needs one template for each domain. This is a comma-separated list of strings in single 'quotes'. After running the program, the inflation files must then be moved to the -directory expected by the *driver.csh* script. +directory expected by the ``driver.sh`` script. Run the following commands with the dates for this particular tutorial: :: cd $BASE_DIR/rundir - cp ../output/2017042700/wrfinput_d01_152057_0_mean ./wrfinput_d01 + cp ../output/2024051900/wrfinput_d01_154636_0_mean ./wrfinput_d01 + cp ../output/2024051900/wrfinput_d02_154636_0_mean ./wrfinput_d02 + cp ../output/2024051900/wrfinput_d01_154636_0_mean $DART_DIR/models/wrf/work/wrfinput_d01 + cp ../output/2024051900/wrfinput_d02_154636_0_mean $DART_DIR/models/wrf/work/wrfinput_d02 ./fill_inflation_restart - mkdir ../output/2017042700/Inflation_input - mv input_priorinf_*.nc ../output/2017042700/Inflation_input/ - -Once these files are in the right place, the scripting should take care -of renaming the output from the previous cycle as the input for the next -cycle. - - - - -Step 6: Cycled analysis system ------------------------------- - -While the DART system provides executables to perform individual tasks -necessary for ensemble data assimilation, for large models such as WRF -that are run on a supercomputer queueing system, an additional layer of -scripts is necessary to glue all of the pieces together. A set of -scripts is provided with the tutorial tarball to provide you a starting -point for your own WRF-DART system. You will need to edit these scripts, -perhaps extensively, to run them within your particular computing -environment. If you will run on NSF NCAR's Derecho environment, fewer edits -may be needed, but you should familiarize yourself with `running jobs on -Derecho `__ -if necessary. A single forecast/assimilation cycle of this tutorial can -take up to 10 minutes on Derecho - longer if debug options are enabled or -if there is a wait time during the queue submission. - -In this tutorial, we have previously edited the *param.csh* and other -scripts. Throughout the WRF-DART scripts, there are many options to -adjust cycling frequency, domains, ensemble size, etc., which are -available when adapting this set of scripts for your own research. To -become more famililar with this set of scripts and to eventually make -these scripts your own, we advise commenting out all the places the -script submits jobs while debugging, placing an 'exit' in the script at -each job submission step. This way you will be able to understand how -all of the pieces work together. - -However, for this tutorial, we will only show you how the major -components work. The next step in our process is the main *driver.csh* -script, which expects a starting date (YYYYMMDDHH) and the full path of -the resource file as command line arguments. In this example (which uses -csh/tcsh syntax), we are also capturing the run-time output into a file -named *run.out* and the entire command will be running in the -background: + mkdir ../output/2024051900/Inflation_input + mv input_priorinf_*.nc ../output/2024051900/Inflation_input/ + +Please note that the inflation files are manually generated and moved +during the first assimilation time step only. During all subseqent times the +inflation files are automatically generated and moved by the scripting. + + + + +Step 8: Perform the Assimilation (ASSIMILATION MODE) +----------------------------------------------------- + +We are now ready to assimilate observations. The ``driver.sh`` script +accomplishes this through a series of scripts that 1) assimilates +observations using the DART filter, 2) calculates observation space +diagnostics for that assimilation time step, and 3) advances the WRF +ensemble members to the next assimilation time step. It then repeats this +cycle for the remaining assimilation steps. The sequence of scripts that +are run are as follows: + +1. prep_ic.sh (Extracts the DART state from the WRF prior state) +2. assimilate.sh (Executes DART filter and produces obs_seq.final) +3. diagnostic_obs.sh (Calculates observation space diagnostics: analysis_increment.nc, mean_increment.nc) +4. assim_advance.sh +5. new_advance_model.sh (Advances WRF model to next assimilation time) +6. add_bank_pert.ncl (Adds uncertainty to boundary conditions) + + +For each assimilation cycle there are two instances where a batch job +is submitted to Derecho which uses an mpi run command. The first instance +is during the execution of DART ``filter`` within ``assimilate.sh`` where +``filter`` is executed as follows: + +:: + + mpiexec -n 256 -ppn 128 ./filter || exit 1 + +The second instance is during the advancement of the WRF ensemble to reach +the next assimilation time step. This occurs within the ``assim_advance.sh`` +script as follows: + +:: + + mpiexec -n 4 -ppn 4 ./wrf.exe + +Remember that (similar to Step 4) these batch sumbission commands are specific to the Derecho +environment and the WRF domain. These will likely need to be modified to work for other +WRF-DART science applications. + + +An important reminder is that ``driver.sh`` is the most complex step of the tutorial. A single +forecast/assimilation cycle of this tutorial can take up to 10 minutes on Derecho +- longer if debug options are enabled or if there is a wait time during +the queue submission. + + +The main ``driver.sh`` script expects a starting date (YYYYMMDDHH) and +the ``param.sh`` file as command line arguments. The run time output is +redirected to a file named ``run.out`` as: :: cd $BASE_DIR/scripts - ./driver.csh 2017042706 param.csh >& run.out & + ./driver.sh 2024051906 param.sh >& run.out & + -*driver.csh* will - check that the input files are present (wrfinput -files, wrfbdy, observation sequence, and DART restart files), - create a -job script to run *filter* in ``$BASE_DIR/rundir``, - monitor that -expected output from *filter* is created, - submit jobs to advance the -ensemble to the next analysis time, - (simultaneously with the ensemble -advance) compute assimilation diagnostics - archive and clean up - and -continue to cycle until the final analysis time has been reached. +You can monitor the progress of the ``driver.sh`` execution by periodically viewing +the ``run.out`` file. When the ``driver.sh`` is completed and successful the +``run.out`` file should print out: **Reached the final date, Script exiting normally**. +In addition, a successful run will produce ``obs_seq.final``, ``analysis_increment.nc`` and +``mean_incremant.nc`` files for each each assimilation time step located within +``${OUTPUT_DIR}/2024051906``, and ``${OUTPUT_DIR}/2024051912``. +If the script **does not** complete successfully based on the criteria just described, +and viewing the ``run.out`` file provides inconclusive troubleshooting guidance, you must +view the specific log files for the individual DART scripts located either in ``${RUNDIR}`` or the +``${RUNDIR}/advance_temp${ens}`` folders. These log files include: ``dart_log.out``, ``assimilate_${datea}.0*``, +``assim_advance_${ens}.o*``, and ``add_perts.out``. If there were problems during the WRF simulation +you can view the WRF ``rsl.out.0000`` and ``rsl.error.0000`` files within ``${RUNDIR}/advance_temp${ens}``. -Step 7: Diagnosing the assimilation results -------------------------------------------- +Step 9: Perform the Forecast (FORECAST MODE) +-------------------------------------------- -Once you have successfully completed steps 1-6, it is important to +The next step is to run the WRF model forecast to quantify the impact the +assimilation of observations had on the forecast skill. This is a common +step within the atmospheric DA literature. Here, we reuse the same ``driver.sh`` +script as described in Step 8 except we modify the namelist settings to switch +from assimilate to forecast mode. + +First modify ``input.nml`` for both within ``${RUN_DIR}`` and ``${TEMPLATE_DIR}`` such that +the adaptive inflation is turned off by setting ``inf_flavor = 0``. +In addition, set all observation types to be **evaluated** as shown below: + +:: + + &obs_kind_nml + evaluate_these_obs_types = 'RADIOSONDE_TEMPERATURE', + 'RADIOSONDE_U_WIND_COMPONENT', + 'RADIOSONDE_V_WIND_COMPONENT', + 'RADIOSONDE_SPECIFIC_HUMIDITY', + 'RADIOSONDE_SURFACE_ALTIMETER', + 'ACARS_U_WIND_COMPONENT', + 'ACARS_V_WIND_COMPONENT', + 'ACARS_TEMPERATURE', + 'ACARS_DEWPOINT', + 'SAT_U_WIND_COMPONENT', + 'SAT_V_WIND_COMPONENT', + 'GPSRO_REFRACTIVITY', + 'PROFILER_U_WIND_COMPONENT', + 'PROFILER_V_WIND_COMPONENT', + 'METAR_U_10_METER_WIND', + 'METAR_V_10_METER_WIND', + 'METAR_TEMPERATURE_2_METER', + 'METAR_DEWPOINT_2_METER', + 'METAR_ALTIMETER', + 'MARINE_SFC_U_WIND_COMPONENT', + 'MARINE_SFC_V_WIND_COMPONENT', + 'MARINE_SFC_TEMPERATURE', + 'MARINE_SFC_ALTIMETER', + 'MARINE_SFC_DEWPOINT', + 'LAND_SFC_TEMPERATURE', + 'LAND_SFC_U_WIND_COMPONENT', + 'LAND_SFC_V_WIND_COMPONENT', + 'LAND_SFC_ALTIMETER', + 'LAND_SFC_DEWPOINT', + assimilate_these_obs_types = '' + +Next, turn off the ensemble perturbation within ``add_bank_perts.ncl`` for both +the ``${RUNDIR}`` and ``${BASE_DIR}/scripts`` as follows: + +:: + + ; Shuts off perturbations, only used for forecast mode + ;perturbation scaling: + scale_T = 0.0 + scale_U = 0.0 + scale_V = 0.0 + scale_Q = 0.0 + scale_M = 0.0 + + +Finally, turn off the adaptive inflation within the ``param.sh`` file by setting +``ADAPTIVE_INFLATION = 0``. + + +Modify your ``driver.sh`` to run a forecast until 20240520 by setting ``datefnl = 2024052000``. +Then execute the following command: + +:: + + cd ${BASE_DIR}/scripts + ./driver.sh 2024051918 param.sh >& run.out & + + +To monitor the progress and success of the scripts follow the same guidance as described in Step 8. +Remember all the steps and output files produced during forecast mode are identical to assimilation mode. +The only difference is that the scripting will not update the WRF posterior state. We are performing +an extended forecast (free) simulation. + + + +Step 10: Diagnose the Assimilation and Forecast Results +------------------------------------------------------- + +Once you have successfully completed steps 1-9, it is important to check the quality of the assimilation. In order to do this, DART provides -analysis system diagnostics in both state and observation space. +analysis system diagnostics in both state and observation space. Here we +provide instructions to diagnose performance based on a single assimilation +time step (2024051912). However, be aware that you can perform these +same diagnostics for any assimilation/forecast performed during the tutorial. +We leave that as an exercise to be performed on your own. + +.. Important:: + + If the tutorial is performed successfully your diagnostic plots should look similar + to the figures shown here, but they will not be identical because the + assimilation results are not deterministic. This is primarily because the + perturbations are randomly chosen to generate the ensemble spread. This + influences all subsequent steps in the tutorial and will lead to unique results. As a preliminary check, confirm that the analysis system actually updated -the WRF state. Locate the file in the ``$BASE_DIR/output/*`` directory called -``analysis_increment.nc`` which is the difference of the ensemble mean state +the WRF state. Locate the file in the ``$BASE_DIR/output/2024051912`` directory called +``analysis_increment_d01.nc`` which is the difference of the ensemble mean state between the background (prior) and the analysis (posterior) after running ``filter``. Use a tool, such as **ncview**, to look at this file as follows: :: - cd $BASE_DIR/output/datefnl + cd $BASE_DIR/output/2024051912 module load ncview - ncview analysis_increment.nc + ncview analysis_increment_d01.nc -The ``analysis_increment.nc`` file includes the following atmospheric variables: -``MU, PH, PSFC, QRAIN, QCLOUD, QGRAUP, QICE, QNICE, QSNOW, QVAPOR, THM`` and ``T2``. -The example figure below shows the increments for THM (perturbation potential temperature) -only. You can use **ncview** to advance through all 11 atmospheric pressure levels. +The ``analysis_increment_d01.nc`` file includes the following atmospheric variables: +such as ``MU, PH, PSFC, QRAIN, QCLOUD, QGRAUP, QICE, QNICE, QSNOW, QVAPOR, THM, U, V`` and ``T2``. +The example figure below shows the increments for QVAPOR (water vapor mixing ratio) +only. You can use **ncview** to advance through all atmospheric pressure levels, by clicking +on the "bottom_top" button within the ncview gui. You should see spatial patterns that look something like the meteorology of the day. +--------------------------+--------------------------------+ @@ -1024,7 +1195,7 @@ You should see spatial patterns that look something like the meteorology of the For more information on how the increments were calculated, we recommend -(but do not require to complete the tutorial) that you review the +that you review the :doc:`Diagnostics Section <../../../guide/checking-your-assimilation>` of the DART Documentation. There are seven sections within the diagnostics section including 1) Checking your initial assimilation, 2) Computing @@ -1035,82 +1206,16 @@ this says nothing about the quality of the assimilation. For example, how many of the observations were assimilated? Does the posterior state better represent the observed conditions of the atmosphere? These questions can be addressed with the tools described in the remainder of this section. -All of the diagnostic files (**obs_epoch*.nc** and **obs_diag_output.nc**) +All of the diagnostic files (**obs_epoch.nc** and **obs_diag_output.nc**) have already been generated from the tutorial. -(**driver.csh* executes **diagnostics_obs.csh**). Therefore you are ready +(**driver.sh** executes **diagnostics_obs.sh**). Therefore you are ready to start the next sections. -Visualizing the observation locations and acceptance rate +Visualize the observation locations and acceptance rate --------------------------------------------------------- -An important assimilation diagnostic is whether observations were accepted -or rejected. Observations can be rejected for many reasons, but the two most common -rejection modes in DART are: 1) **violation of the outlier threshold**, meaning the -observations were too far away from the prior model estimate of the observation or -2) **forward operator failure**, meaning the calculation to generate the expected -observation failed. A full list of rejection criteria are provided -:doc:`here. <../../../guide/dart-quality-control>` Regardless of the reason for -the failure, a successful simulation assimilates the vast majority of observations. -The tools below provide methods to visualize the spatial patterns, statistics and -failure mode for all observations. - -The observation diagnostics use the **obs_epoch*.nc** file as input. This file is -automatically generated by the **obs_diagnostic.csh** script within Step 6 of this -tutorial. - -The **obs_epoch*.nc** file is located in the output directory of each time step. -In some cases there could be multiple obs_epoch*.nc files, but in general, the user -should use the obs_epoch file appended with the largest numeric value as it -contains the most complete set of observations. The diagnostic scripts used here -are included within the DART package, and require a license of Matlab to run. The -commands shown below to run the diagnostics use NSF NCAR's Derecho, but a user could -also run on their local machine. - -First explore the obs_epoch*.nc file and identify the variety of observations included -in the assimilation including aircraft, surface, satelllite and radiosonde types. - - -:: - - ncdump -h $BASEDIR/output/datefnl/obs_epoch_029.nc - - .. - .. - RADIOSONDE_U_WIND_COMPONENT - RADIOSONDE_V_WIND_COMPONENT - RADIOSONDE_TEMPERATURE - RADIOSONDE_SPECIFIC_HUMIDITY - ACARS_U_WIND_COMPONENT - ACARS_V_WIND_COMPONENT - ACARS_TEMPERATURE - MARINE_SFC_U_WIND_COMPONENT - MARINE_SFC_V_WIND_COMPONENT - MARINE_SFC_TEMPERATURE - MARINE_SFC_SPECIFIC_HUMIDITY - LAND_SFC_U_WIND_COMPONENT - LAND_SFC_V_WIND_COMPONENT - LAND_SFC_TEMPERATURE - LAND_SFC_SPECIFIC_HUMIDITY - SAT_U_WIND_COMPONENT - SAT_V_WIND_COMPONENT - RADIOSONDE_SURFACE_ALTIMETER - MARINE_SFC_ALTIMETER - LAND_SFC_ALTIMETER - METAR_ALTIMETER - METAR_U_10_METER_WIND - METAR_V_10_METER_WIND - METAR_TEMPERATURE_2_METER - METAR_SPECIFIC_HUMIDITY_2_METER - METAR_DEWPOINT_2_METER - RADIOSONDE_DEWPOINT - LAND_SFC_DEWPOINT - RADIOSONDE_RELATIVE_HUMIDITY - LAND_SFC_RELATIVE_HUMIDITY - .. - .. - -The example below uses the **plot_obs_netcdf.m** script to visulaize +The example below uses the **plot_obs_netcdf.m** script to visualize the observation type: ``RADIOSONDE_TEMPERATURE`` which includes both horizontal and vertical coverage across North America. We recommend to view the script's contents with a text editor, paying special attention to the beginning of the file @@ -1129,9 +1234,9 @@ Within Matlab declare the following variables, then run the script :: - >> fname = '$BASEDIR/output/2017042712/obs_epoch_029.nc'; - >> ObsTypeString = 'RADIOSONDE_TEMPERATURE'; - >> region = [200 330 0 90 -Inf Inf]; + >> fname = '$BASEDIR/output/2024051912/obs_epoch_002.nc'; + >> ObsTypeString = 'RADIOSONDE_TEMPERATURE'; % 'ACARS_U_WIND_COMPONENT' + >> region = [250 270 30 45 -Inf Inf]; >> CopyString = 'NCEP BUFR observation'; >> QCString = 'DART quality control'; >> maxgoodQC = 2; @@ -1139,10 +1244,11 @@ Within Matlab declare the following variables, then run the script >> twoup = 1; % anything > 0 == 'true' >> plotdat = plot_obs_netcdf(fname, ObsTypeString, region, CopyString, QCString, maxgoodQC, verbose, twoup); -Below is an example of the figure produced by **plot_obs_netcdf.m**. -Note that the top panel includes both the 3-D location of all possible -``RADIOSONDE_TEMPERATURE`` observations, which are color-coded based upon -the temperature value. The bottom panel, on the other hand, provides only +Below are two examples of the figure produced by **plot_obs_netcdf.m** for +observations of RADIOSONDE_TEMPERATURE (left) and ACARS_U_WIND_COMPONENT (right) +respectively. Note that the top panel includes both the 3-D location of all possible +observations, which are color-coded based upon the temperature or wind value. +The bottom panel, on the other hand, provides only the location of the observations that were rejected by the assimilation. The color code indicates the reason for the rejection based on the :doc:`DART quality control (QC). <../../../guide/dart-quality-control>` @@ -1152,10 +1258,9 @@ Text is included within the figures that give more details regarding the rejected observations (bottom left of figure), and percentage of observations that were rejected (flagged, located within title of figure). - -+-------------------------------------------------------------+ -| |radiosonde_obs| | -+-------------------------------------------------------------+ ++-----------------------------------+------------------------------+ +| |radiosonde_obs1| | |radiosonde_obs2| | ++-----------------------------------+------------------------------+ .. Tip:: The user can manually adjust the appearance of the data by accessing the @@ -1164,19 +1269,25 @@ that were rejected (flagged, located within title of figure). desired orientation. -For the next figure (below) the same steps are taken as described -above, however, the observation type (``ObsTypeString``) is set to -``METAR_TEMPERATURE_2_METER``. Notice in this case the observations -are limited to near the land surface. This is because the vertical location -of this observation type was defined to be at the land surface -(VERTISSURFACE), as opposed to the ``RADIOSONDE_TEMPERATURE`` observation -in which the vertical location was defined as pressure (VERTISPRESSURE). The -vertical coordinate system is defined in the ``obs_seq.out`` file and -:ref:`documented here`. - -+-------------------------------------------------------------+ -| |surface_obs| | -+-------------------------------------------------------------+ +The **plot_obs_netcdf.m** also provides information for all the +observations available for assimilation at that time step. You can adjust +the ObsTypeString setting to examine one observation type at a time. + + +:: + + N = 1019 RADIOSONDE_U_WIND_COMPONENT (type 1) tween levels 470.00 and 99000.00 + N = 1019 RADIOSONDE_V_WIND_COMPONENT (type 2) tween levels 470.00 and 99000.00 + N = 751 RADIOSONDE_TEMPERATURE (type 5) tween levels 440.00 and 99000.00 + N = 312 RADIOSONDE_SPECIFIC_HUMIDITY (type 6) tween levels 30000.00 and 99000.00 + N = 1 AIRCRAFT_U_WIND_COMPONENT (type 12) tween levels 21660.00 and 21660.00 + N = 1 AIRCRAFT_V_WIND_COMPONENT (type 13) tween levels 21660.00 and 21660.00 + N = 3440 ACARS_U_WIND_COMPONENT (type 16) tween levels 17870.00 and 99780.00 + N = 3440 ACARS_V_WIND_COMPONENT (type 17) tween levels 17870.00 and 99780.00 + N = 3375 ACARS_TEMPERATURE (type 18) tween levels 17870.00 and 99780.00 + N = 7 RADIOSONDE_SURFACE_ALTIMETER (type 40) tween levels 195.00 and 1095.00 + N = 25 LAND_SFC_ALTIMETER (type 43) tween levels 141.00 and 2299.00 + Next we will demonstrate the use of the **link_obs.m** script which @@ -1192,9 +1303,9 @@ following within Matlab being sure to modify ``fname`` for your case: >> clear all >> close all - >> fname = '$BASEDIR/output/2017042712/obs_epoch_029.nc'; + >> fname = '$BASEDIR/output/2024051912/obs_epoch_002.nc'; >> ObsTypeString = 'RADIOSONDE_TEMPERATURE'; - >> region = [200 330 0 90 -Inf Inf]; + >> region = [250 270 30 45 -Inf Inf]; >> ObsCopyString = 'NCEP BUFR observation'; >> CopyString = 'prior ensemble mean'; >> QCString = 'DART quality control'; @@ -1215,7 +1326,7 @@ following within Matlab being sure to modify ``fname`` for your case: you can access the brush tool through the menu bar (Tools > Brush). -Another useful application of the **link_obs.m** script is to visually identify +Another useful application of the **link_obs.m** script is to visualize the improvement of the model estimate of the observation through the 1:1 plot. One way to do this is to compare the prior and posterior model estimate of the either the ensemble mean or a single ensemble member. In the example figures below, @@ -1226,13 +1337,13 @@ estimate (left figure) compares less favorably to the observations as compared to the posterior member estimate (right figure). The improved alignment (blue circles closer to 1:1 line) between the posterior estimate and the observations indicates that the DART filter update provided an improved representation of the -observed atmospheric state. +observed atmospheric state. +-------------------------+-------------------------+ | |oneline1| | |oneline2| | +-------------------------+-------------------------+ -So far the example figures have provided primarily qualitative estimates +So far the example figures have provided primarily qualitative estimates of the assimilation performance. The next step demonstrates how to apply more quantitative measures to assess assimilation skill. @@ -1247,18 +1358,20 @@ observation acceptance and other statistics. Here we choose the ensemble ‘total spread’ statistic to plot alongside RMSE, however, you can choose other statistics including 'bias', 'ens_mean' and 'spread'. For a full list of statistics perform the command ``ncdump -v CopyMetaData obs_diag_output.nc``. +The figure below illustrates vertical profile statistics for observations +of RADIOSONDE_TEMPERAURE (left) and ACARS_U_WIND_COMPONENT (right). :: - >> fname ='$BASEDIR/output/2017042712/obs_diag_output.nc'; - >> copy = 'totalspread'; - >> obsname = 'RADIOSONDE_TEMPERATURE'; + >> fname ='$BASEDIR/output/2024051912/obs_diag_output.nc'; + >> copy = 'totalspread'; + >> obsname = 'RADIOSONDE_TEMPERATURE'; %ACARS_U_WIND_COMPONENT >> plotdat = plot_rmse_xxx_profile(fname,copy,'obsname',obsname) -+-------------------------------------------------------------+ -| |profile1| | -+-------------------------------------------------------------+ ++--------------------------+----------------------------------+ +| |profile1| | |profile2| | ++--------------------------+----------------------------------+ Note in the figure above that the prior RMSE and total spread values (solid black and teal lines) are significantly greater than the posterior @@ -1278,64 +1391,30 @@ quite high (>90%). This high acceptance percentage is typical of a high-quality assimilation and consistent with the strong reduction in RMSE. -The same plot as above except for the observation type: -``RADIOSONE_SPECIFIC_HUMIDITY``. - -+-------------------------------------------------------------+ -| |profile2| | -+-------------------------------------------------------------+ +Remember, a full list of observations rejection criteria are provided +:doc:`here. <../../../guide/dart-quality-control>` Regardless of the reason for +the failure, a successful simulation assimilates the vast majority of observations +as shown in the figure above. -Although the plot_rmse_xxx_profile.m script is valuable for visualizing +Although the **plot_rmse_xxx_profile.m** script is valuable for visualizing vertical profiles of assimilation statistics, it doesn’t capture the temporal evolution. Temporal evolving statistics are valuable because the skill of an assimilation often begins poorly because of biases between the model and observations, which should improve with time. Also the quality of the assimilation may change because of changes in the quality of the observations. In these cases the **plot_rmse_xxx_evolution.m** script is used to illustrate temporal changes in -assimilation skill. To generate the figures below the following matlab commands were used: +assimilation skill. -:: +This time evolving diagnostic works best when all the assimilation times +steps are combined into one **obs_diag.output.nc** file, however the +**obs_diag_output.nc** files automatically generated during the tutorial are for +indivdual assimilation times. We leave it as an exercise on your own to +generate a custom **obs_diag_output.nc** that combines several differenct assimilation +time steps. Please use the instructions in the next section as a guide. - >> fname = '$BASEDIR/output/2017042712/obs_diag_output.nc'; - >> copy = 'totalspread'; - >> obsname = 'RADIOSONDE_TEMPERATURE'; - >> plotdat = plot_rmse_xxx_evolution(fname,copy,'obsname',obsname,'level',3); - -.. NOTE:: - The figures below only evaluate two different assimilation - cycles (hour 6 and hour 12 on 4/27/17), thus it is difficult to evaluate the - temporal progression of the assimilation statistics. This is given purely as an - example. Real world assimilations generally span for months and years thus - evaluating temporal evolution of statistics is more straightforward. The x-axis was - also manually adjusted in the figure below. To do this - **plot_rmse_xxx_evolution.m** was edited such that the ``bincenters`` were replaced - with ``datenum`` values when defining ``axlims`` as: - - axlims = [datenum(2017,4,27,2,0,0) datenum(2017,4,27,14,0,0) plotdat.Yrange]; -+-------------------------------------------------------------+ -| |evolution1| | -+-------------------------------------------------------------+ - -The above figure is evaluated at model level 850hPa ('level',3), whereas -the figure below is generated in the same way except is evaluated at -300 hPa ('level',7) using: -plotdat = plot_rmse_xxx_evolution(fname,copy,'obsname',obsname,'level',7) - - -+-------------------------------------------------------------+ -| |evolution2| | -+-------------------------------------------------------------+ - - -.. Important:: - The example diagnostics provided here are only a subset of the diagnostics - available in the DART package. Please see the web-based diagnostic - :doc:`documentation. <../../../guide/matlab-observation-space>` or - `DART LAB and DART Tutorial `__ - for more details. @@ -1344,9 +1423,8 @@ Generating the obs_diag_output.nc and obs_epoch*.nc files manually **[OPTIONAL]* This step is optional because the WRF-DART Tutorial automatically generates the diagnostic files (obs_diag_output.nc and obs_epoch_*.nc). However, these -files were generated with pre-set options (e.g. spatial domain, temporal bin size etc.) -that you may wish to modify. Also, it is uncommon to generate these diagnostics -files automatically for a new assimilation application. Therefore this section +files were generated with pre-set options (e.g. spatial and temporal domains, and bin size etc.) +that you may wish to modify. Therefore this section describes the steps to generate the diagnostic files directly from the DART scripts by using the WRF Tutorial as an example. @@ -1358,15 +1436,15 @@ Generating the obs_epoch*.nc file cd $DARTROOT/models/wrf/work -Generate a list of all the **obs_seq.final** files created by the assimilation -step (filter step). This command creates a text list file. +Generate a list of all the **obs_seq.final** files for all steps +in the tutorial. This command creates a text list file. :: - ls /glade/scratch/bmraczka/WRF_DART_Tut4/output/2017*/obs_seq.final > obs_seq_tutorial.txt + ls ${BASE_DIR}/output/2024*/obs_seq.final > obs_seq_tutorial.txt -The DART exectuable **obs_seq_to_netcdf** is used to generate the obs_epoch -type files. Modify the ``obs_seq_to_netcdf`` and ``schedule`` namelist settings +The DART exectuable ``obs_seq_to_netcdf`` is used to generate the obs_epoch +type files. Modify the ``obs_seq_to_netcdf`` and 'schedule' namelist settings (using a text editor like `vi`) with the **input.nml** file to specify the spatial domain and temporal binning. The values below are intended to include the entire time period of the assimilation. @@ -1400,6 +1478,8 @@ Finally, run the exectuable: ./obs_seq_to_netcdf +This should generate obs_epoch*.nc files. + Generating the obs_diag_output.nc file ----------------------------------------- @@ -1419,8 +1499,8 @@ file as described in the previous section. &obs_diag_nml obs_sequence_name = '', obs_sequence_list = 'obs_seq_tutorial.txt', - first_bin_center = 2017, 4, 27, 0, 0, 0 , - last_bin_center = 2017, 4, 27, 12, 0, 0 , + first_bin_center = 2024, 5, 19, 0, 0, 0 , + last_bin_center = 2024, 5, 19, 18, 0, 0 , bin_separation = 0, 0, 0, 6, 0, 0 , bin_width = 0, 0, 0, 6, 0, 0 , time_to_skip = 0, 0, 0, 0, 0, 0 , @@ -1441,12 +1521,10 @@ Finally, run the exectuable: ./obs_diag +This should produce an obs_diag_output.nc file. - -If you encounter difficulties setting up, running, or evaluating the -system performance, please consider using the `GitHub -Issue `__ facility or feel free to -contact us at dart(at)ucar(dot)edu. +If you encounter difficulties setting up, running, or evaluating your +system performance, please consider contact DART support at dart(at)ucar(dot)edu. Additional materials from previous in-person tutorials ------------------------------------------------------ @@ -1474,6 +1552,18 @@ More Resources - :ref:`Preparing MATLAB` to use with DART. - `WRF model users page `__ +.. |radar1| image:: ../../../guide/images/WRF_tutorial_radar1.png + :height: 200px + :width: 100% + +.. |radar2| image:: ../../../guide/images/WRF_tutorial_radar2.png + :height: 200px + :width: 100% + +.. |wrf_domain| image:: ../../../guide/images/WRF_tutorial_domain.png + :height: 200px + :width: 100% + .. |ncview1| image:: ../../../guide/images/WRF_tutorial_ncview1.png :height: 300px :width: 100% @@ -1482,11 +1572,11 @@ More Resources :height: 300px :width: 100% -.. |radiosonde_obs| image:: ../../../guide/images/WRF_tutorial_radiosonde_obs.png +.. |radiosonde_obs1| image:: ../../../guide/images/WRF_tutorial_radiosonde_obs1.png :height: 300px :width: 100% -.. |surface_obs| image:: ../../../guide/images/WRF_tutorial_surface_obs.png +.. |radiosonde_obs2| image:: ../../../guide/images/WRF_tutorial_radiosonde_obs2.png :height: 300px :width: 100% diff --git a/models/wrf/tutorial/template/namelist.input.meso b/models/wrf/tutorial/template/namelist.input.meso index 841bd16635..7ff03e596d 100644 --- a/models/wrf/tutorial/template/namelist.input.meso +++ b/models/wrf/tutorial/template/namelist.input.meso @@ -111,6 +111,7 @@ epssm = 0.1 use_theta_m = 0 zadvect_implicit = 1 + hybrid_opt = 0 / &bdy_control diff --git a/models/wrf/tutorial/template_nest/input.nml.template b/models/wrf/tutorial/template_nest/input.nml.template new file mode 100644 index 0000000000..da29d87d57 --- /dev/null +++ b/models/wrf/tutorial/template_nest/input.nml.template @@ -0,0 +1,426 @@ +&probit_transform_nml + / + +&algorithm_info_nml + qceff_table_filename = '' + / + +&filter_nml + async = 2, + adv_ens_command = "./advance_model.csh", + ens_size = 20, + obs_sequence_in_name = "obs_seq.out", + obs_sequence_out_name = "obs_seq.final", + input_state_file_list = "input_list_d01.txt","input_list_d02.txt" + output_state_file_list = "output_list_d01.txt","output_list_d02.txt" + init_time_days = -1, + init_time_seconds = -1, + first_obs_days = -1, + first_obs_seconds = -1, + last_obs_days = -1, + last_obs_seconds = -1, + num_output_state_members = 20, + num_output_obs_members = 20, + output_interval = 1, + num_groups = 1, + output_forward_op_errors = .false., + output_timestamps = .false., + trace_execution = .false., + + stages_to_write = 'preassim', 'postassim', 'output' + output_members = .true. + output_mean = .true. + output_sd = .true. + write_all_stages_at_end = .false. + + inf_flavor = 2, 0, + inf_initial_from_restart = .true., .false., + inf_sd_initial_from_restart = .true., .false., + inf_initial = 1.0, 1.12, + inf_lower_bound = 1.0, 1.0, + inf_upper_bound = 10000.0, 10000.0, + inf_damping = 0.9, 1.0, + inf_sd_initial = 0.6, 0.50, + inf_sd_lower_bound = 0.6, 0.10, + inf_sd_max_change = 1.05, 1.05, + / + +&quality_control_nml + input_qc_threshold = 4.0, + outlier_threshold = 3.0, + enable_special_outlier_code = .false. + / + +&ensemble_manager_nml + layout = 2, + tasks_per_node = 30 + / + +&fill_inflation_restart_nml + write_prior_inf = .true. + prior_inf_mean = 1.00 + prior_inf_sd = 0.6 + + write_post_inf = .false. + post_inf_mean = 1.00 + post_inf_sd = 0.6 + + input_state_files = 'wrfinput_d01','wrfinput_d02' + single_file = .false. + verbose = .false. + / + + +&assim_tools_nml + cutoff = 0.10, + sort_obs_inc = .false., + spread_restoration = .false., + sampling_error_correction = .true., + print_every_nth_obs = 1000, + adaptive_localization_threshold = 2000, + output_localization_diagnostics = .false., + localization_diagnostics_file = 'localization_diagnostics', + convert_all_state_verticals_first = .true. + convert_all_obs_verticals_first = .true. + / + +&cov_cutoff_nml + select_localization = 1 + / + +&closest_member_tool_nml + input_file_name = 'filter_ic_new', + output_file_name = 'closest_restart', + ens_size = 50, + single_restart_file_in = .false., + difference_method = 4, + / + +&assim_model_nml + write_binary_restart_files = .true. + / + +&location_nml + horiz_dist_only = .false., + vert_normalization_pressure = 700000.0, + vert_normalization_height = 80000.0, + vert_normalization_scale_height = 7.5, + vert_normalization_level = 2666.7, + approximate_distance = .false., + nlon = 221, + nlat = 122, + output_box_info = .false. + / + +&model_nml + default_state_variables = .false., + wrf_state_variables = 'U','QTY_U_WIND_COMPONENT','TYPE_U','UPDATE','999', + 'V','QTY_V_WIND_COMPONENT','TYPE_V','UPDATE','999', + 'W','QTY_VERTICAL_VELOCITY','TYPE_W','UPDATE','999', + 'THM','QTY_POTENTIAL_TEMPERATURE','TYPE_T','UPDATE','999', + 'PH','QTY_GEOPOTENTIAL_HEIGHT','TYPE_GZ','UPDATE','999', + 'MU','QTY_PRESSURE','TYPE_MU','UPDATE','999', + 'QVAPOR','QTY_VAPOR_MIXING_RATIO','TYPE_QV','UPDATE','999', + 'QCLOUD','QTY_CLOUD_LIQUID_WATER','TYPE_QC','UPDATE','999', + 'QRAIN','QTY_RAINWATER_MIXING_RATIO','TYPE_QR','UPDATE','999', + 'QSNOW','QTY_SNOW_MIXING_RATIO','TYPE_QS','UPDATE','999', + 'QICE','QTY_CLOUD_ICE','TYPE_QI','UPDATE','999', + 'QGRAUP','QTY_GRAUPEL_MIXING_RATIO','TYPE_QG','UPDATE','999', + 'QNICE','QTY_ICE_NUMBER_CONCENTRATION','TYPE_QNICE','UPDATE','999', + 'QNRAIN','QTY_RAIN_NUMBER_CONCENTR','TYPE_QNRAIN','UPDATE','999', + 'U10','QTY_U_WIND_COMPONENT','TYPE_U10','UPDATE','999', + 'V10','QTY_V_WIND_COMPONENT','TYPE_V10','UPDATE','999', + 'T2','QTY_TEMPERATURE','TYPE_T2','UPDATE','999', + 'Q2','QTY_SPECIFIC_HUMIDITY','TYPE_Q2','UPDATE','999', + 'PSFC','QTY_PRESSURE','TYPE_PS','UPDATE','999', + + wrf_state_bounds = 'QVAPOR','0.0','NULL','CLAMP', + 'QCLOUD','0.0','NULL','CLAMP', + 'QRAIN','0.0','NULL','CLAMP', + 'QSNOW','0.0','NULL','CLAMP', + 'QICE','0.0','NULL','CLAMP', + 'QGRAUP','0.0','NULL','CLAMP', + 'QNICE','0.0','NULL','CLAMP', + 'QNRAIN','0.0','NULL','CLAMP', + + num_domains = 2, + calendar_type = 3, + assimilation_period_seconds = 21600, + vert_localization_coord = 4, + center_search_half_length = 400000.0, + circulation_pres_level = 80000.0, + circulation_radius = 72000.0, + center_spline_grid_scale = 4, + / + +&utilities_nml + TERMLEVEL = 1, + nmlfilename = 'dart_log.nml', + logfilename = 'dart_log.out', + module_details = .false., + print_debug = .true. + / + +&mpi_utilities_nml + reverse_task_layout = .false. + / + +®_factor_nml + select_regression = 1, + input_reg_file = "time_mean_reg", + save_reg_diagnostics = .false., + reg_diagnostics_file = 'reg_diagnostics' + / + +&obs_sequence_nml + write_binary_obs_sequence = .false. + / + +&state_vector_io_nml + single_precision_output = .true., + / + +&preprocess_nml + overwrite_output = .true. + input_obs_qty_mod_file = '../../../assimilation_code/modules/observations/DEFAULT_obs_kind_mod.F90' + output_obs_qty_mod_file = '../../../assimilation_code/modules/observations/obs_kind_mod.f90' + input_obs_def_mod_file = '../../../observations/forward_operators/DEFAULT_obs_def_mod.F90' + output_obs_def_mod_file = '../../../observations/forward_operators/obs_def_mod.f90' + quantity_files = '../../../assimilation_code/modules/observations/atmosphere_quantities_mod.f90' + obs_type_files = '../../../observations/forward_operators/obs_def_reanalysis_bufr_mod.f90', + '../../../observations/forward_operators/obs_def_altimeter_mod.f90', + '../../../observations/forward_operators/obs_def_radar_mod.f90', + '../../../observations/forward_operators/obs_def_metar_mod.f90', + '../../../observations/forward_operators/obs_def_dew_point_mod.f90', + '../../../observations/forward_operators/obs_def_rel_humidity_mod.f90', + '../../../observations/forward_operators/obs_def_gps_mod.f90', + '../../../observations/forward_operators/obs_def_gts_mod.f90', + '../../../observations/forward_operators/obs_def_QuikSCAT_mod.f90', + '../../../observations/forward_operators/obs_def_vortex_mod.f90' + / + + +&obs_kind_nml + assimilate_these_obs_types = 'RADIOSONDE_TEMPERATURE', + 'RADIOSONDE_U_WIND_COMPONENT', + 'RADIOSONDE_V_WIND_COMPONENT', + 'RADIOSONDE_SPECIFIC_HUMIDITY', + 'RADIOSONDE_SURFACE_ALTIMETER', + 'ACARS_U_WIND_COMPONENT', + 'ACARS_V_WIND_COMPONENT', + 'ACARS_TEMPERATURE', + 'ACARS_DEWPOINT', + 'SAT_U_WIND_COMPONENT', + 'SAT_V_WIND_COMPONENT', + 'GPSRO_REFRACTIVITY', + 'PROFILER_U_WIND_COMPONENT', + 'PROFILER_V_WIND_COMPONENT', + 'METAR_U_10_METER_WIND', + 'METAR_V_10_METER_WIND', + 'METAR_TEMPERATURE_2_METER', + 'METAR_DEWPOINT_2_METER', + 'METAR_ALTIMETER', + 'MARINE_SFC_U_WIND_COMPONENT', + 'MARINE_SFC_V_WIND_COMPONENT', + 'MARINE_SFC_TEMPERATURE', + 'MARINE_SFC_ALTIMETER', + 'MARINE_SFC_DEWPOINT', + evaluate_these_obs_types = 'LAND_SFC_TEMPERATURE', + 'LAND_SFC_U_WIND_COMPONENT', + 'LAND_SFC_V_WIND_COMPONENT', + 'LAND_SFC_ALTIMETER', + 'LAND_SFC_DEWPOINT', + / + +&obs_diag_nml + obs_sequence_name = 'obs_seq.out', + obs_sequence_list = '', + first_bin_center = 2011, 2, 3, 6, 0, 0 , + last_bin_center = 2011, 2, 3, 12, 0, 0 , + bin_separation = 0, 0, 0, 6, 0, 0 , + bin_width = 0, 0, 0, 6, 0, 0 , + time_to_skip = 0, 0, 0, 0, 0, 0 , + max_num_bins = 1000, + Nregions = 1, + lonlim1 = 0.0, 246.0, 255.4, 330.1, + lonlim2 = 360.0, 265.0, 268.5, 334.6, + latlim1 = 10.0, 30.0, 30.7, 21.3, + latlim2 = 65.0, 46.0, 40.6, 23.4, + reg_names = 'Full Domain','central-plains','southern-plains' + print_mismatched_locs = .false., + verbose = .true. + / + +&ncepobs_nml + year = 2010, + month = 06, + day = 00, + tot_days = 1, + max_num = 1000000, + ObsBase = 'temp_obs.', + select_obs = 0, + ADPUPA = .false., + AIRCAR = .false., + AIRCFT = .false., + SATEMP = .false., + SFCSHP = .true., + ADPSFC = .false., + SATWND = .false., + obs_U = .true., + obs_V = .true., + obs_T = .true., + obs_PS = .true., + obs_QV = .true., + daily_file = .true., + obs_time = .false., + lat1 = 10.00, + lat2 = 60.00, + lon1 = 210.0, + lon2 = 300.0 + / + +&prep_bufr_nml + obs_window = -1., + obs_window_upa = 1.0, + obs_window_air = 1.0, + obs_window_cw = 1.0, + obs_window_sfc = 0.8, + otype_use = 180.0, 182.0, 183.0, 194.0, 280.0, 282.0, 284.0, 294.0 + qctype_use = 0, 1, 2, 3, 4, 9, 15 + / + +&obs_def_gps_nml + / + +&convert_gpsro_bufr_nml + gpsro_bufr_file = 'nam.gpsro.bufr', + gpsro_bufr_filelist = '', + gpsro_out_file = 'obs_seq.gpsro', + gpsro_aux_file = 'convinfo.txt', + ray_htop = 20000.0, + ray_hbot = 3000.0 + overwrite_obs_error = .false., + convert_to_geopotential_height = .true., + obs_window_hr = 1.5, + debug = .false. + / + +&convert_cosmic_gps_nml + gpsro_netcdf_file = '', + gpsro_netcdf_filelist = 'flist', + gpsro_out_file = 'obs_seq.gpsro', + local_operator = .true., + obs_levels = 0.22, 0.55, 1.1, 1.8, 2.7, 3.7, 4.9, + 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, + ray_ds = 5000.0, + ray_htop = 13000.1, + / + +&wrf_obs_preproc_nml + obs_boundary = 5.0, + increase_bdy_error = .true., + maxobsfac = 2.5, + obsdistbdy = 15.0, + sfc_elevation_check = .true., + sfc_elevation_tol = 300.0, + obs_pressure_top = 10000.0, + obs_height_top = 20000.0, + include_sig_data = .true., + tc_sonde_radii = -1.0, + superob_aircraft = .true., + aircraft_horiz_int = 60.0, + aircraft_pres_int = 2500.0, + superob_sat_winds = .true., + sat_wind_horiz_int = 90.0, + sat_wind_pres_int = 2500.0, + overwrite_ncep_sfc_qc = .false., + overwrite_obs_time = .true., + overwrite_ncep_satwnd_qc = .true., + / + +&obs_sequence_tool_nml + filename_seq = 'obs_seq.out', + filename_seq_list = 'obs_list', + filename_out = 'obs_seq.final', + gregorian_cal = .true., + first_obs_days = -1, + first_obs_seconds = -1, + last_obs_days = -1, + last_obs_seconds = -1, + edit_copies = .true., + min_lat = -90.0 + max_lat = 90.0 + min_lon = 0.0 + max_lon = 360.0 + new_copy_index = 1, 2, 3, 4, 5, + obs_types = '' + keep_types = .true., + synonymous_copy_list = '', + synonymous_qc_list = '', + / + +&wrf_dart_to_fields_nml + include_slp = .true., + include_wind_components = .true., + include_height_on_pres = .true., + include_temperature = .true., + include_rel_humidity = .true., + include_surface_fields = .false., + include_sat_ir_temp = .false., + pres_levels = 70000., + / + + +&schedule_nml + calendar = 'Gregorian', + first_bin_start = _FBS_YY_, _FBS_MM_, _FBS_DD_, _FBS_HH_, 0, 0, + first_bin_end = _FBE_YY_, _FBE_MM_, _FBE_DD_, _FBE_HH_, 0, 0, + last_bin_end = _LBE_YY_, _LBE_MM_, _LBE_DD_, _LBE_HH_, 0, 0, + bin_interval_days = 0, + bin_interval_seconds = 21600, + max_num_bins = 1000, + print_table = .true. + / + +&obs_seq_to_netcdf_nml + obs_sequence_name = 'obs_seq.final' + obs_sequence_list = '', + lonlim1 = 160. + lonlim2 = 40. + latlim1 = 10. + latlim2 = 65. + / + +&replace_wrf_fields_nml + debug = .false., + fail_on_missing_field = .false., + fieldnames = "SNOWC", + "ALBBCK", + "TMN", + "SEAICE", + "SST", + "SNOWH", + "SNOW", + fieldlist_file = '', + / + +&obs_def_radar_mod_nml + apply_ref_limit_to_obs = .false., + reflectivity_limit_obs = -10.0, + lowest_reflectivity_obs = -10.0, + apply_ref_limit_to_fwd_op = .false., + reflectivity_limit_fwd_op = -10.0, + lowest_reflectivity_fwd_op = -10.0, + max_radial_vel_obs = 1000000, + allow_wet_graupel = .false., + microphysics_type = 3 , + allow_dbztowt_conv = .false., + dielectric_factor = 0.224, + n0_rain = 8.0e6, + n0_graupel = 4.0e6, + n0_snow = 3.0e6, + rho_rain = 1000.0, + rho_graupel = 400.0, + rho_snow = 100.0 + / + diff --git a/models/wrf/tutorial/template_nest/namelist.input.3dvar b/models/wrf/tutorial/template_nest/namelist.input.3dvar new file mode 100644 index 0000000000..ecde71710a --- /dev/null +++ b/models/wrf/tutorial/template_nest/namelist.input.3dvar @@ -0,0 +1,255 @@ + &time_control + run_days = 0, + run_hours = 0, + run_minutes = 0, + run_seconds = 0, + start_year = 2019, + start_month = 10, + start_day = 01, + start_hour = 00, + start_minute = 00, + start_second = 00, + end_year = 2019, + end_month = 10, + end_day = 01, + end_hour = 06, + end_minute = 00, + end_second = 00, + interval_seconds = 21600 + input_from_file = .true., + fine_input_stream = 0, + history_interval = 60, + frames_per_outfile = 1, + restart = .false., + restart_interval = 2881, + io_form_history = 2, + io_form_restart = 102, + io_form_input = 2, + io_form_boundary = 2, + io_form_auxhist2 = 2, + debug_level = 0 + diag_print = 0 +/ + &domains + time_step = 60, + time_step_fract_num = 0, + time_step_fract_den = 1, + max_dom = 1, + e_we = 60, + e_sn = 50, + e_vert = 51, + p_top_requested = 1500 + num_metgrid_levels = 34, + num_metgrid_soil_levels = 4, + dx = 25000, + dy = 25000, + grid_id = 1, + parent_id = 1, + i_parent_start = 1, + j_parent_start = 1, + parent_grid_ratio = 1, + parent_time_step_ratio = 1, + feedback = 0, + smooth_option = 0 + lagrange_order = 2 + interp_type = 2 + interp_theta = .false. + hypsometric_opt = 2 + extrap_type = 2 + t_extrap_type = 2 + use_surface = .true. + use_levels_below_ground = .true. + lowest_lev_from_sfc = .false., + force_sfc_in_vinterp = 1 + zap_close_levels = 500 + eta_levels = 1.0000, 0.9980, 0.9940, 0.9870, 0.9750, 0.9590, + 0.9390, 0.9160, 0.8920, 0.8650, 0.8350, 0.8020, 0.7660, + 0.7270, 0.6850, 0.6400, 0.5920, 0.5420, 0.4970, 0.4565, + 0.4205, 0.3877, 0.3582, 0.3317, 0.3078, 0.2863, 0.2670, + 0.2496, 0.2329, 0.2188, 0.2047, 0.1906, 0.1765, 0.1624, + 0.1483, 0.1342, 0.1201, 0.1060, 0.0919, 0.0778, 0.0657, + 0.0568, 0.0486, 0.0409, 0.0337, 0.0271, 0.0209, 0.0151, + 0.0097, 0.0047, 0.0000 + / + + + + &physics + mp_physics = 8, + ra_lw_physics = 4, + ra_sw_physics = 4, + radt = 10, + sf_sfclay_physics = 2, + sf_surface_physics = 2, + bl_pbl_physics = 2, + bldt = 0, + cu_physics = 14, + cudt = 0, + mp_zero_out = 2, + mp_zero_out_thresh = 1.e-10, + isfflx = 1, + ifsnow = 0, + icloud = 1, + surface_input_source = 1, + num_land_cat = 20, + num_soil_layers = 4, + o3input = 2, + aer_opt = 1, + sst_skin = 1, + / + + &dynamics + w_damping = 1, + diff_opt = 1, + km_opt = 4, + damp_opt = 3, + zdamp = 5000., + dampcoef = 0.2, + non_hydrostatic = .true., + moist_adv_opt = 2, + scalar_adv_opt = 2, + diff_6th_opt = 2, + diff_6th_factor = 0.25, + epssm = 0.1, + use_theta_m = 0 + zadvect_implicit = 1 + hybrid_opt = 0 + / + + &bdy_control + spec_bdy_width = 5, + spec_zone = 1, + relax_zone = 4, + specified = .true., + nested = .false., + / + + &namelist_quilt + nio_tasks_per_group = 2, + nio_groups = 1, + / + + &wrfvar1 + check_max_iv_print = .false., + write_increments = .false., + / + + &wrfvar2 + / + + &wrfvar3 + / + &wrfvar4 + use_synopobs = .false., + use_shipsobs = .false., + use_metarobs = .false., + use_soundobs = .false., + use_pilotobs = .false., + use_airepobs = .false., + use_geoamvobs = .false., + use_polaramvobs = .false., + use_bogusobs = .false., + use_buoyobs = .false., + use_profilerobs = .false., + use_satemobs = .false., + use_gpspwobs = .false., + use_gpsrefobs = .false., + use_qscatobs = .false., + use_radarobs = .false., + use_radar_rv = .false., + use_radar_rf = .false., + use_airsretobs = .false., + / + + &wrfvar5 + check_max_iv = .false., + put_rand_seed = .true., +/ + + &wrfvar6 + max_ext_its = 1 + ntmax = 200, + eps = 0.01, + / + + as1 = PERT_SCALING, HORIZ_SCALE, VERT_SCALE, + as2 = PERT_SCALING, HORIZ_SCALE, VERT_SCALE, + as3 = PERT_SCALING, HORIZ_SCALE, VERT_SCALE, + as4 = PERT_SCALING, HORIZ_SCALE, VERT_SCALE, + as5 = PERT_SCALING, HORIZ_SCALE, VERT_SCALE, + + + + &wrfvar7 + cv_options = 3, + as1 = 0.001, 0.5, 0.5, + as2 = 0.001, 0.5, 0.5, + as3 = 0.1, 0.7, 0.7, + as4 = 0.01, 0.5, 0.5, + as5 = 0.5, 0.5, 0.5, + rf_passes = 6, + var_scaling1 = 1.0, + var_scaling2 = 1.0, + var_scaling3 = 1.0, + var_scaling4 = 1.0, + var_scaling5 = 1.0, + len_scaling1 = 1.0, + len_scaling2 = 1.0, + len_scaling3 = 1.0, + len_scaling4 = 1.0, + len_scaling5 = 1.0, + je_factor = 1.0, + / + + &wrfvar8 + / + + &wrfvar9 + trace_use = .false., + trace_memory = .false., + trace_all_pes = .false., + trace_csv = .false., + use_html = .false., + / + &wrfvar10 + / + + &wrfvar11 + cv_options_hum = 1, + check_rh = 1, + set_omb_rand_fac = 1.0, + seed_array1 = 1, + seed_array2 = 1, + / + + &wrfvar12 + / + + &wrfvar13 + vert_corr = 2, + vertical_ip = 0, + vert_evalue = 1, + max_vert_var1 = 99.0, + max_vert_var2 = 99.0, + max_vert_var3 = 99.0, + max_vert_var4 = 99.0, + max_vert_var5 = 0.0, + / + + &wrfvar14 + / + + &wrfvar15 + num_pseudo = 0, + / + + &wrfvar16 + / + + &wrfvar17 + analysis_type = 'RANDOMCV', + / + + &wrfvar18 + analysis_date = 'yyyy-mm-dd_hh:mm:ss.0000', + / diff --git a/models/wrf/tutorial/template_nest/namelist.input.meso b/models/wrf/tutorial/template_nest/namelist.input.meso new file mode 100644 index 0000000000..c28cb02788 --- /dev/null +++ b/models/wrf/tutorial/template_nest/namelist.input.meso @@ -0,0 +1,250 @@ + &time_control + run_days = 0, + run_hours = 0, + run_minutes = 0, + run_seconds = 0, + start_year = 2019, 2019 + start_month = 10, 10 + start_day = 01, 01 + start_hour = 00, 00 + start_minute = 00, 00 + start_second = 00, 00 + end_year = 2019, 2019 + end_month = 10, 10 + end_day = 01, 01 + end_hour = 06, 06 + end_minute = 00, 00 + end_second = 00, 00 + interval_seconds = 21600 + input_from_file = .true.,.true., + fine_input_stream = 0, 0 + history_interval = 60, 60 + frames_per_outfile = 1, 1 + restart = .false., + restart_interval = 2881, + io_form_history = 2, + io_form_restart = 102, + io_form_input = 2, + io_form_boundary = 2, + io_form_auxhist2 = 2, + io_form_auxinput2 = 2, + debug_level = 0 + diag_print = 0 + / + + &domains + time_step = 60, + time_step_fract_num = 0, + time_step_fract_den = 1, + max_dom = 2, + e_we = 60, 31 + e_sn = 50, 21 + e_vert = 51, 51 + p_top_requested = 1500, + num_metgrid_levels = 34, + num_metgrid_soil_levels = 4, + dx = 25000, + dy = 25000, + grid_id = 1, 2 + parent_id = 1, 1 + i_parent_start = 1, 30 + j_parent_start = 1, 25 + parent_grid_ratio = 1, 2 + parent_time_step_ratio = 1, 2 + feedback = 0, + smooth_option = 0 + lagrange_order = 2 + interp_type = 2 + interp_theta = .false. + hypsometric_opt = 2 + extrap_type = 2 + t_extrap_type = 2 + use_surface = .true. + use_levels_below_ground = .true. + lowest_lev_from_sfc = .false., + force_sfc_in_vinterp = 1 + zap_close_levels = 500 + eta_levels = 1.0000, 0.9980, 0.9940, 0.9870, 0.9750, 0.9590, + 0.9390, 0.9160, 0.8920, 0.8650, 0.8350, 0.8020, 0.7660, + 0.7270, 0.6850, 0.6400, 0.5920, 0.5420, 0.4970, 0.4565, + 0.4205, 0.3877, 0.3582, 0.3317, 0.3078, 0.2863, 0.2670, + 0.2496, 0.2329, 0.2188, 0.2047, 0.1906, 0.1765, 0.1624, + 0.1483, 0.1342, 0.1201, 0.1060, 0.0919, 0.0778, 0.0657, + 0.0568, 0.0486, 0.0409, 0.0337, 0.0271, 0.0209, 0.0151, + 0.0097, 0.0047, 0.0000 + / + + + + &physics + mp_physics = 8, 8 + ra_lw_physics = 4, 4 + ra_sw_physics = 4, 4 + radt = 10, 10 + sf_sfclay_physics = 2, 2 + sf_surface_physics = 2, 2 + bl_pbl_physics = 2, 2 + bldt = 0, 0 + cu_physics = 14, 14 + cudt = 0, 0 + mp_zero_out = 2, + mp_zero_out_thresh = 1.e-10, + isfflx = 1, + ifsnow = 0, + icloud = 1, + surface_input_source = 1, + num_land_cat = 20, + num_soil_layers = 4, + o3input = 2, + aer_opt = 1, + sst_skin = 1, + / + + &dynamics + w_damping = 1, + diff_opt = 1, 1 + km_opt = 4, 4 + damp_opt = 3, + zdamp = 5000., 5000., + dampcoef = 0.2, 0.01, + non_hydrostatic = .true., .true., + moist_adv_opt = 2, 2, + scalar_adv_opt = 2, 2, + diff_6th_opt = 2, 2, + diff_6th_factor = 0.25, 0.12, + epssm = 0.1, 0.1 + use_theta_m = 0 + zadvect_implicit = 1 + hybrid_opt = 0 + / + + &bdy_control + spec_bdy_width = 5, + spec_zone = 1, + relax_zone = 4, + specified = .true., .false., + nested = .false.,.true., + / + + &namelist_quilt + nio_tasks_per_group = 2, + nio_groups = 1, + / + + &wrfvar1 + check_max_iv_print = .false., + write_increments = .false., + / + + &wrfvar2 + / + + &wrfvar3 + / + + &wrfvar4 + use_synopobs = .false., + use_shipsobs = .false., + use_metarobs = .false., + use_soundobs = .false., + use_pilotobs = .false., + use_airepobs = .false., + use_geoamvobs = .false., + use_polaramvobs = .false., + use_bogusobs = .false., + use_buoyobs = .false., + use_profilerobs = .false., + use_satemobs = .false., + use_gpspwobs = .false., + use_gpsrefobs = .false., + use_qscatobs = .false., + use_radarobs = .false., + use_radar_rv = .false., + use_radar_rf = .false., + use_airsretobs = .false., + / + + &wrfvar5 + check_max_iv = .false., + put_rand_seed = .true., +/ + + &wrfvar6 + max_ext_its = 1 + ntmax = 200, + eps = 0.01, + / + + &wrfvar7 + cv_options = 3, + as1 = PERT_SCALING, 2.0, 1.5, + as2 = PERT_SCALING, 2.0, 1.5, + as3 = PERT_SCALING, 2.0, 1.5, + as4 = PERT_SCALING, 2.0, 1.5, + as5 = PERT_SCALING, 2.0, 1.5, + rf_passes = 6, + var_scaling1 = 1.0, + var_scaling2 = 1.0, + var_scaling3 = 1.0, + var_scaling4 = 1.0, + var_scaling5 = 1.0, + len_scaling1 = 1.0, + len_scaling2 = 1.0, + len_scaling3 = 1.0, + len_scaling4 = 1.0, + len_scaling5 = 1.0, + je_factor = 1.0, + / + + &wrfvar8 + / + + &wrfvar9 + trace_use = .false., + trace_memory = .false., + trace_all_pes = .false., + trace_csv = .false., + use_html = .false., + / + &wrfvar10 + / + + &wrfvar11 + cv_options_hum = 1, + check_rh = 1, + set_omb_rand_fac = 1.0, + seed_array1 = 1, + seed_array2 = 1, + / + + &wrfvar12 + / + + &wrfvar13 + vert_corr = 2, + vertical_ip = 0, + vert_evalue = 1, + max_vert_var1 = 99.0, + max_vert_var2 = 99.0, + max_vert_var3 = 99.0, + max_vert_var4 = 99.0, + max_vert_var5 = 0.0, + / + + &wrfvar14 + / + + &wrfvar15 + num_pseudo = 0, + / + + &wrfvar16 + / + + &wrfvar17 + analysis_type = 'RANDOMCV', + / + + &wrfvar18 + analysis_date = 'yyyy-mm-dd_hh:mm:ss.0000', + / diff --git a/models/wrf/tutorial/template_nest/namelist.wps.template b/models/wrf/tutorial/template_nest/namelist.wps.template new file mode 100644 index 0000000000..4197a39aef --- /dev/null +++ b/models/wrf/tutorial/template_nest/namelist.wps.template @@ -0,0 +1,53 @@ +&share + wrf_core = 'ARW', + max_dom = 2, + start_date = 'yyyy-mm-dd_hh:mm:ss','yyyy-mm-dd_hh:mm:ss', + end_date = 'yyyy-mm-dd_hh:mm:ss','yyyy-mm-dd_hh:mm:ss', + interval_seconds = 21600 + io_form_geogrid = 2, + debug_level = 0 + active_grid = .true., .true., +/ + +&geogrid + parent_id = 0, 1, + parent_grid_ratio = 1, 2, + i_parent_start = 1, 30, + j_parent_start = 1, 25, + e_we = 60, 31, + e_sn = 50, 21, + geog_data_res = 'modis_30s+30s','modis_30s+30s' + dx = 25000, + dy = 25000, + map_proj = 'lambert', + ref_lat = 37.0, + ref_lon = -100.0, + truelat1 = 32.0, + truelat2 = 42.0, + stand_lon = -100.0, + geog_data_path = '/glade/u/home/wrfhelp/WPS_GEOG/' +/ + +&ungrib + out_format = 'WPS', + prefix = 'FILE', +/ + +&metgrid + fg_name = './FILE' + io_form_metgrid = 2, +/ + +&mod_levs + press_pa = 201300 , 200100 , 100000 , + 95000 , 90000 , + 85000 , 80000 , + 75000 , 70000 , + 65000 , 60000 , + 55000 , 50000 , + 45000 , 40000 , + 35000 , 30000 , + 25000 , 20000 , + 15000 , 10000 , + 5000 , 1000 +/