diff --git a/.gitignore b/.gitignore index 861346a494..f5e33f4f61 100644 --- a/.gitignore +++ b/.gitignore @@ -200,6 +200,7 @@ ush/month_name.sh ush/imsfv3_scf2ioda.py ush/atparse.bash ush/run_bufr2ioda.py +ush/bufr2ioda_insitu* # version files versions/build.ver diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile index 8ed4927c6b..2654adba29 100644 --- a/ci/Jenkinsfile +++ b/ci/Jenkinsfile @@ -78,7 +78,7 @@ pipeline { Machine = machine[0].toUpperCase() + machine.substring(1) echo "Getting Common Workspace for ${Machine}" ws("${custom_workspace[machine]}/${env.CHANGE_ID}") { - properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC', 'Hera-EMC', 'Orion-EMC'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])]) + properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC', 'Hera-EMC', 'Orion-EMC', 'Gaea'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])]) GH = sh(script: "which gh || echo '~/bin/gh'", returnStdout: true).trim() CUSTOM_WORKSPACE = "${WORKSPACE}" sh(script: "mkdir -p ${CUSTOM_WORKSPACE}/RUNTESTS;rm -Rf ${CUSTOM_WORKSPACE}/RUNTESTS/*") @@ -267,7 +267,7 @@ pipeline { } } } - + stage( '5. FINALIZE' ) { agent { label NodeName[machine].toLowerCase() } @@ -297,6 +297,6 @@ pipeline { } } } - } + } } } diff --git a/ci/cases/pr/C96_atmaerosnowDA.yaml b/ci/cases/pr/C96C48_hybatmaerosnowDA.yaml similarity index 94% rename from ci/cases/pr/C96_atmaerosnowDA.yaml rename to ci/cases/pr/C96C48_hybatmaerosnowDA.yaml index dda6103bca..7387e55b24 100644 --- a/ci/cases/pr/C96_atmaerosnowDA.yaml +++ b/ci/cases/pr/C96C48_hybatmaerosnowDA.yaml @@ -6,12 +6,13 @@ arguments: pslot: {{ 'pslot' | getenv }} app: ATMA resdetatmos: 96 + resensatmos: 48 comroot: {{ 'RUNTESTS' | getenv }}/COMROOT expdir: {{ 'RUNTESTS' | getenv }}/EXPDIR icsdir: {{ 'ICSDIR_ROOT' | getenv }}/C96C48/20240610 idate: 2021122012 edate: 2021122100 - nens: 0 + nens: 2 gfs_cyc: 1 start: cold yaml: {{ HOMEgfs }}/ci/cases/yamls/atmaerosnowDA_defaults_ci.yaml diff --git a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml index 1f7179d8d1..f835b34593 100644 --- a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml +++ b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml @@ -22,4 +22,5 @@ skip_ci_on_hosts: - gaea - orion - hercules + - wcoss2 diff --git a/env/AWSPW.env b/env/AWSPW.env index 992281a1d7..7fe17d2492 100755 --- a/env/AWSPW.env +++ b/env/AWSPW.env @@ -27,7 +27,7 @@ if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:- NTHREADS1=${threads_per_task:-1} [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task} [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} - APRUN="${launcher} -n ${ntasks}" + export APRUN="${launcher} -n ${ntasks}" else echo "ERROR config.resources must be sourced before sourcing AWSPW.env" exit 2 @@ -43,6 +43,13 @@ if [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then export APRUN_UFS="${launcher} -n ${ufs_ntasks}" unset nnodes ufs_ntasks +elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then + + export CFP_MP="YES" + if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi + export wavempexec=${launcher} + export wave_mpmd=${mpmd_opt} + elif [[ "${step}" = "post" ]]; then export NTHREADS_NP=${NTHREADS1} @@ -52,6 +59,15 @@ elif [[ "${step}" = "post" ]]; then [[ ${NTHREADS_DWN} -gt ${max_threads_per_task} ]] && export NTHREADS_DWN=${max_threads_per_task} export APRUN_DWN="${launcher} -n ${ntasks_dwn}" +elif [[ "${step}" = "atmos_products" ]]; then + + export USE_CFP="YES" # Use MPMD for downstream product generation on Hera + +elif [[ "${step}" = "oceanice_products" ]]; then + + export NTHREADS_OCNICEPOST=${NTHREADS1} + export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}" + elif [[ "${step}" = "ecen" ]]; then export NTHREADS_ECEN=${NTHREADSmax} diff --git a/env/GOOGLEPW.env b/env/GOOGLEPW.env new file mode 100755 index 0000000000..f5582ccd4d --- /dev/null +++ b/env/GOOGLEPW.env @@ -0,0 +1,55 @@ +#! /usr/bin/env bash + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input argument to set runtime environment variables!" + exit 1 + +fi + +step=$1 + +export launcher="srun -l --export=ALL" +export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" + +# Configure MPI environment +export OMP_STACKSIZE=2048000 +export NTHSTACK=1024000000 + +ulimit -s unlimited +ulimit -a + +# Calculate common variables +# Check first if the dependent variables are set +if [[ -n "${ntasks:-}" && -n "${max_tasks_per_node:-}" && -n "${tasks_per_node:-}" ]]; then + max_threads_per_task=$((max_tasks_per_node / tasks_per_node)) + NTHREADSmax=${threads_per_task:-${max_threads_per_task}} + NTHREADS1=${threads_per_task:-1} + [[ ${NTHREADSmax} -gt ${max_threads_per_task} ]] && NTHREADSmax=${max_threads_per_task} + [[ ${NTHREADS1} -gt ${max_threads_per_task} ]] && NTHREADS1=${max_threads_per_task} + APRUN="${launcher} -n ${ntasks}" +else + echo "ERROR config.resources must be sourced before sourcing GOOGLEPW.env" + exit 2 +fi + +if [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then + + export launcher="srun --mpi=pmi2 -l" + + (( nnodes = (ntasks+tasks_per_node-1)/tasks_per_node )) + (( ufs_ntasks = nnodes*tasks_per_node )) + # With ESMF threading, the model wants to use the full node + export APRUN_UFS="${launcher} -n ${ufs_ntasks}" + unset nnodes ufs_ntasks + +elif [[ "${step}" = "post" ]]; then + + export NTHREADS_NP=${NTHREADS1} + export APRUN_NP="${APRUN}" + + export NTHREADS_DWN=${threads_per_task_dwn:-1} + [[ ${NTHREADS_DWN} -gt ${max_threads_per_task} ]] && export NTHREADS_DWN=${max_threads_per_task} + export APRUN_DWN="${launcher} -n ${ntasks_dwn}" + +fi diff --git a/env/HERA.env b/env/HERA.env index 3f0e7c9f36..697cf21965 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -80,7 +80,7 @@ elif [[ "${step}" = "atmensanlletkf" ]]; then elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} - export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" + export APRUN_ATMENSANLFV3INC="${APRUN} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" elif [[ "${step}" = "aeroanlrun" ]]; then @@ -106,6 +106,13 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowrecen" ]]; then + + export NTHREADS_ESNOWRECEN=${NTHREADSmax} + export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + elif [[ "${step}" = "marinebmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd --multi-prog" diff --git a/env/HERCULES.env b/env/HERCULES.env index 83fa1aadd1..83d934c91a 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -105,6 +105,14 @@ case ${step} in export APRUN_APPLY_INCR="${launcher} -n 6" ;; + "esnowrecen") + + export NTHREADS_ESNOWRECEN=${NTHREADSmax} + export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + ;; + "marinebmat") export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" diff --git a/env/JET.env b/env/JET.env index 810a8cd501..473539ded1 100755 --- a/env/JET.env +++ b/env/JET.env @@ -89,6 +89,13 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowrecen" ]]; then + + export NTHREADS_ESNOWRECEN=${NTHREADSmax} + export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} diff --git a/env/ORION.env b/env/ORION.env index bbbfb59182..65a8871cdd 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -97,6 +97,13 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowrecen" ]]; then + + export NTHREADS_ESNOWRECEN=${NTHREADSmax} + export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} diff --git a/env/S4.env b/env/S4.env index 840ca65898..d0985e44ca 100755 --- a/env/S4.env +++ b/env/S4.env @@ -68,7 +68,7 @@ elif [[ "${step}" = "atmensanlletkf" ]]; then elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${NTHREADSmax} - export APRUN_ATMENSANLFV3INC="${APRUN}" + export APRUN_ATMENSANLFV3INC="${APRUN}" elif [[ "${step}" = "aeroanlrun" ]]; then @@ -89,6 +89,13 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowrecen" ]]; then + + export NTHREADS_ESNOWRECEN=${NTHREADSmax} + export APRUN_ESNOWRECEN="${APRUN} --cpus-per-task=${NTHREADS_ESNOWRECEN}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} diff --git a/env/WCOSS2.env b/env/WCOSS2.env index 18caf1bc03..cf9feeca83 100755 --- a/env/WCOSS2.env +++ b/env/WCOSS2.env @@ -82,6 +82,13 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" +elif [[ "${step}" = "esnowrecen" ]]; then + + export NTHREADS_ESNOWRECEN=${NTHREADSmax} + export APRUN_ESNOWRECEN="${APRUN}" + + export APRUN_APPLY_INCR="${launcher} -n 6" + elif [[ "${step}" = "atmanlfv3inc" ]]; then export NTHREADS_ATMANLFV3INC=${NTHREADSmax} diff --git a/jobs/JGDAS_ENKF_SNOW_RECENTER b/jobs/JGDAS_ENKF_SNOW_RECENTER new file mode 100755 index 0000000000..05d46cffc2 --- /dev/null +++ b/jobs/JGDAS_ENKF_SNOW_RECENTER @@ -0,0 +1,59 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "esnowrecen" -c "base esnowrecen" + +############################################## +# Set variables used in the script +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDUMP="gdas" +export GDUMP + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMIN_OBS:COM_OBS_TMPL \ + COMOUT_ATMOS_ANALYSIS:COM_ATMOS_ANALYSIS_TMPL \ + COMOUT_CONF:COM_CONF_TMPL +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL + +mkdir -p "${COMOUT_SNOW_ANALYSIS}" "${COMOUT_CONF}" + +for imem in $(seq 1 "${NMEM_ENS}"); do + memchar="mem$(printf %03i "${imem}")" + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMOUT_SNOW_ANALYSIS:COM_SNOW_ANALYSIS_TMPL + mkdir -p "${COMOUT_SNOW_ANALYSIS}" +done + +############################################################### +# Run relevant script + +EXSCRIPT=${SNOWANLPY:-${SCRgfs}/exgdas_enkf_snow_recenter.py} +${EXSCRIPT} +status=$? +(( status != 0 )) && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_PREP_SNOW_OBS b/jobs/JGLOBAL_PREP_SNOW_OBS index f5ea3fc122..0e3557697d 100755 --- a/jobs/JGLOBAL_PREP_SNOW_OBS +++ b/jobs/JGLOBAL_PREP_SNOW_OBS @@ -41,4 +41,10 @@ if [[ -e "${pgmout}" ]] ; then cat "${pgmout}" fi +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + exit 0 diff --git a/jobs/JGLOBAL_SNOW_ANALYSIS b/jobs/JGLOBAL_SNOW_ANALYSIS index b7d8c37060..e0f24fa624 100755 --- a/jobs/JGLOBAL_SNOW_ANALYSIS +++ b/jobs/JGLOBAL_SNOW_ANALYSIS @@ -44,4 +44,10 @@ if [[ -e "${pgmout}" ]] ; then cat "${pgmout}" fi +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + exit 0 diff --git a/jobs/rocoto/esnowrecen.sh b/jobs/rocoto/esnowrecen.sh new file mode 100755 index 0000000000..f8c4f8f7fc --- /dev/null +++ b/jobs/rocoto/esnowrecen.sh @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="esnowrecen" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGDAS_ENKF_SNOW_RECENTER" +status=$? +exit "${status}" diff --git a/modulefiles/module_gwsetup.gaea.lua b/modulefiles/module_gwsetup.gaea.lua index 8b9f70e4a0..0bcc689bad 100644 --- a/modulefiles/module_gwsetup.gaea.lua +++ b/modulefiles/module_gwsetup.gaea.lua @@ -15,5 +15,6 @@ load(pathJoin("python", python_ver)) load("py-jinja2") load("py-pyyaml") load("py-numpy") +load("git-lfs") whatis("Description: GFS run setup environment") diff --git a/parm/config/gefs/config.base b/parm/config/gefs/config.base index a92349facd..189b7ba446 100644 --- a/parm/config/gefs/config.base +++ b/parm/config/gefs/config.base @@ -345,4 +345,10 @@ export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arc # Number of regional collectives to create soundings for export NUM_SND_COLLECTIVES=${NUM_SND_COLLECTIVES:-9} +# The tracker, genesis, and METplus jobs are not supported on AWS yet +# TODO: we should place these in workflow/hosts/awspw.yaml as part of AWS setup, not for general. +if [[ "${machine}" == "AWSPW" ]]; then + export DO_WAVE="NO" +fi + echo "END: config.base" diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index 407e48496e..efdedb24f4 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -193,6 +193,7 @@ case ${imp_physics} in export dt_inner=$((DELTIM/2)) export sedi_semi=.true. if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi + if [[ dt_inner -gt 300 ]]; then export dt_inner=300 ; fi export decfl=10 export hord_mt_nh_nonmono=5 diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources index 297bc08c05..5667e5efa4 100644 --- a/parm/config/gefs/config.resources +++ b/parm/config/gefs/config.resources @@ -41,7 +41,7 @@ case ${machine} in ;; "AWSPW") export PARTITION_BATCH="compute" - max_tasks_per_node=40 + max_tasks_per_node=36 ;; *) echo "FATAL ERROR: Unknown machine encountered by ${BASH_SOURCE[0]}" diff --git a/parm/config/gefs/config.resources.AWSPW b/parm/config/gefs/config.resources.AWSPW new file mode 100644 index 0000000000..a735c7622d --- /dev/null +++ b/parm/config/gefs/config.resources.AWSPW @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +# AWS-specific job resources + +export is_exclusive="True" +unset memory + +# shellcheck disable=SC2312 +for mem_var in $(env | grep '^memory_' | cut -d= -f1); do + unset "${mem_var}" +done diff --git a/parm/config/gfs/config.esnowrecen b/parm/config/gfs/config.esnowrecen new file mode 100644 index 0000000000..adb039559a --- /dev/null +++ b/parm/config/gfs/config.esnowrecen @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.esnowrecen ########## +# configuration common to snow ensemble analysis tasks + +echo "BEGIN: config.esnowrecen" + +# Get task specific resources +source "${EXPDIR}/config.resources" esnowrecen + +export JCB_BASE_YAML="${PARMgfs}/gdas/snow/jcb-base.yaml.j2" +export JCB_ALGO_YAML="${PARMgfs}/gdas/snow/jcb-fv3jedi_land_ensrecenter.yaml.j2" + +export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" +export SNOW_ENS_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_ens_update.yaml.j2" +export SNOW_OROG_STAGE_TMPL="${PARMgfs}/gdas/snow_stage_orog.yaml.j2" +export SNOW_ENS_FINALIZE_TMPL="${PARMgfs}/gdas/snow_finalize_ens_update.yaml.j2" + +# Name of the executable that applies increment to bkg and its namelist template +export APPLY_INCR_EXE="${EXECgfs}/apply_incr.exe" +export ENS_APPLY_INCR_NML_TMPL="${PARMgfs}/gdas/snow/letkfoi/ens_apply_incr_nml.j2" + +export io_layout_x=@IO_LAYOUT_X@ +export io_layout_y=@IO_LAYOUT_Y@ + +export JEDIEXE=${EXECgfs}/gdasapp_land_ensrecenter.x +export FREGRID=${EXECgfs}/fregrid.x + +echo "END: config.esnowrecen" diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index 2743ea0745..da336ff73b 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -209,6 +209,7 @@ case ${imp_physics} in export dt_inner=$((DELTIM/2)) export sedi_semi=.true. if [[ "${sedi_semi}" == .true. ]]; then export dt_inner=${DELTIM} ; fi + if [[ dt_inner -gt 300 ]]; then export dt_inner=300; fi export decfl=10 export hord_mt_nh_nonmono=5 diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 719b31342a..978dca6d51 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -15,7 +15,7 @@ if (( $# != 1 )); then echo "prep prepsnowobs prepatmiodaobs" echo "atmanlinit atmanlvar atmanlfv3inc atmanlfinal" echo "atmensanlinit atmensanlletkf atmensanlfv3inc atmensanlfinal" - echo "snowanl" + echo "snowanl esnowrecen" echo "prepobsaero aeroanlinit aeroanlrun aeroanlfinal" echo "anal sfcanl analcalc analdiag fcst echgres" echo "upp atmos_products" @@ -120,6 +120,14 @@ case ${machine} in # shellcheck disable=SC2034 mem_node_max="" ;; + "GOOGLEPW") + export PARTITION_BATCH="compute" + npe_node_max=30 + max_tasks_per_node=30 + # TODO Supply a max mem/node value for GOOGLE + # shellcheck disable=SC2034 + mem_node_max="" + ;; "CONTAINER") max_tasks_per_node=1 # TODO Supply a max mem/node value for a container @@ -348,6 +356,35 @@ case ${step} in tasks_per_node=$(( max_tasks_per_node / threads_per_task )) ;; + "esnowrecen") + # below lines are for creating JEDI YAML + case ${CASE} in + "C768") + layout_x=6 + layout_y=6 + ;; + "C384") + layout_x=5 + layout_y=5 + ;; + "C192" | "C96" | "C48") + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resources not defined for job ${step} at resolution ${CASE}" + exit 4 + esac + + export layout_x + export layout_y + + walltime="00:15:00" + ntasks=$(( layout_x * layout_y * 6 )) + threads_per_task=1 + tasks_per_node=$(( max_tasks_per_node / threads_per_task )) + ;; + "prepobsaero") walltime="00:30:00" ntasks=1 diff --git a/parm/config/gfs/config.resources.AWSPW b/parm/config/gfs/config.resources.AWSPW index 2bb5f35e76..a735c7622d 100644 --- a/parm/config/gfs/config.resources.AWSPW +++ b/parm/config/gfs/config.resources.AWSPW @@ -3,7 +3,7 @@ # AWS-specific job resources export is_exclusive="True" -export memory=None +unset memory # shellcheck disable=SC2312 for mem_var in $(env | grep '^memory_' | cut -d= -f1); do diff --git a/parm/config/gfs/config.resources.GOOGLEPW b/parm/config/gfs/config.resources.GOOGLEPW new file mode 100644 index 0000000000..21e54013c7 --- /dev/null +++ b/parm/config/gfs/config.resources.GOOGLEPW @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +# GOOGLE-specific job resources + +export is_exclusive="True" +unset memory + +# shellcheck disable=SC2312 +for mem_var in $(env | grep '^memory_' | cut -d= -f1); do + unset "${mem_var}" +done diff --git a/parm/gdas/snow_finalize_ens_update.yaml.j2 b/parm/gdas/snow_finalize_ens_update.yaml.j2 new file mode 100644 index 0000000000..a2a5763ab8 --- /dev/null +++ b/parm/gdas/snow_finalize_ens_update.yaml.j2 @@ -0,0 +1,43 @@ +copy: +###################################### +# copy analyses to directories +###################################### +{% for mem in range(1, NMEM_ENS + 1) %} + # define variables + # Declare a dict of search and replace terms to run on each template + {% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':current_cycle | to_YMD , + '${HH}':current_cycle | strftime("%H"), + '${MEMDIR}':"mem" + '%03d' % mem} %} + + {% for tile in range(1, ntiles+1) %} +- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} + {% if DOIAU == True %} + # if using IAU, also need analyses copied at the beginning of the window + {% for tile in range(1, ntiles+1) %} +- ["{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} + {% endif %} +{% endfor %} +###################################### +# copy ensemble mean increment to COM +###################################### +# define variables +# Declare a dict of search and replace terms to run on each template +{% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':current_cycle | to_YMD , + '${HH}':current_cycle | strftime("%H"), + '${MEMDIR}':"ensstat"} %} + +{% for tile in range(1, ntiles+1) %} +- ["{{ DATA }}/inc/ensmean/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +{% endfor %} +{% if DOIAU == True %} + # if using IAU, also need increment copied at the beginning of the window + {% for tile in range(1, ntiles+1) %} +- ["{{ DATA }}/inc/ensmean/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/snowinc.{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} +{% endif %} diff --git a/parm/gdas/snow_stage_ens_update.yaml.j2 b/parm/gdas/snow_stage_ens_update.yaml.j2 new file mode 100644 index 0000000000..4ad5499751 --- /dev/null +++ b/parm/gdas/snow_stage_ens_update.yaml.j2 @@ -0,0 +1,76 @@ +###################################### +# set some variables +###################################### +{% if DOIAU == True %} + {% set bkg_time = SNOW_WINDOW_BEGIN | to_fv3time %} +{% else %} + {% set bkg_time = current_cycle | to_fv3time %} +{% endif %} +###################################### +# create working directories +###################################### +mkdir: +- "{{ DATA }}/bkg/det" +- "{{ DATA }}/bkg/det_ensres" +- "{{ DATA }}/inc/det" +- "{{ DATA }}/inc/det_ensres" +- "{{ DATA }}//inc/ensmean" +{% for mem in range(1, NMEM_ENS + 1) %} +- "{{ DATA }}/bkg/mem{{ '%03d' % mem }}" +- "{{ DATA }}/anl/mem{{ '%03d' % mem }}" +{% endfor %} +copy: +###################################### +# copy deterministic background files +###################################### +# define variables +# Declare a dict of search and replace terms to run on each template +{% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':GDUMP, + '${YMD}':previous_cycle | to_YMD, + '${HH}':previous_cycle | strftime("%H"), + '${MEMDIR}':""} %} + +{% for tile in range(1, ntiles+1) %} +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/det/{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +{% endfor %} +###################################### +# copy deterministic increment files +###################################### +# define variables +# Declare a dict of search and replace terms to run on each template +{% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':GDUMP, + '${YMD}':current_cycle | to_YMD, + '${HH}':current_cycle | strftime("%H"), + '${MEMDIR}':""} %} + +{% for tile in range(1, ntiles+1) %} +- ["{{ COM_SNOW_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}/snowinc.{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/inc/det/snowinc.{{ bkg_time }}.sfc_data.tile{{ tile }}.nc"] +{% endfor %} +###################################### +# copy ensemble background files +###################################### +{% for mem in range(1, NMEM_ENS + 1) %} + # define variables + # Declare a dict of search and replace terms to run on each template + {% set tmpl_dict = {'${ROTDIR}':ROTDIR, + '${RUN}':RUN, + '${YMD}':previous_cycle | to_YMD, + '${HH}':previous_cycle | strftime("%H"), + '${MEMDIR}':"mem" + '%03d' % mem} %} + + # we need to copy them to two places, one serves as the basis for the analysis + {% for tile in range(1, ntiles+1) %} +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ current_cycle | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} + {% if DOIAU == True %} + # if using IAU, also need backgrounds copied at the beginning of the window + # we need to copy them to two places, one serves as the basis for the analysis + {% for tile in range(1, ntiles+1) %} +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/bkg/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] +- ["{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc", "{{ DATA }}/anl/mem{{ '%03d' % mem }}/{{ SNOW_WINDOW_BEGIN | to_fv3time }}.sfc_data.tile{{ tile }}.nc"] + {% endfor %} + {% endif %} +{% endfor %} diff --git a/parm/gdas/snow_stage_orog.yaml.j2 b/parm/gdas/snow_stage_orog.yaml.j2 new file mode 100644 index 0000000000..3cd7d5c327 --- /dev/null +++ b/parm/gdas/snow_stage_orog.yaml.j2 @@ -0,0 +1,12 @@ +mkdir: +- "{{ DATA }}/orog/det" +- "{{ DATA }}/orog/ens" +copy: +- ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}_mosaic.nc", "{{ DATA }}/orog/det/{{ CASE }}_mosaic.nc"] +- ["{{ FIXorog }}/{{ CASE_ENS }}/{{ CASE_ENS }}_mosaic.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_mosaic.nc"] +{% for tile in range(1, ntiles+1) %} +- ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}_grid.tile{{ tile }}.nc"] +- ["{{ FIXorog }}/{{ CASE_ENS }}/{{ CASE_ENS }}_grid.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}_grid.tile{{ tile }}.nc"] +- ["{{ FIXorog }}/{{ CASE }}/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/det/{{ CASE }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] +- ["{{ FIXorog }}/{{ CASE_ENS }}/{{ CASE_ENS }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc", "{{ DATA }}/orog/ens/{{ CASE_ENS }}.mx{{ OCNRES }}_oro_data.tile{{ tile }}.nc" ] +{% endfor %} diff --git a/parm/ufs/fv3/diag_table b/parm/ufs/fv3/diag_table index dad8b6fac6..f44bfd82a4 100644 --- a/parm/ufs/fv3/diag_table +++ b/parm/ufs/fv3/diag_table @@ -77,6 +77,7 @@ #"gfs_dyn", "pfhy", "preshy", "fv3_history", "all", .false., "none", 2 #"gfs_dyn", "pfnh", "presnh", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "omga", "omga", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 "gfs_phys", "refl_10cm", "refl_10cm", "fv3_history", "all", .false., "none", 2 diff --git a/parm/ufs/fv3/diag_table_da b/parm/ufs/fv3/diag_table_da index 5e7149663a..339b6a42a5 100644 --- a/parm/ufs/fv3/diag_table_da +++ b/parm/ufs/fv3/diag_table_da @@ -30,6 +30,7 @@ #"gfs_dyn", "pfhy", "preshy", "fv3_history", "all", .false., "none", 2 #"gfs_dyn", "pfnh", "presnh", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2 +"gfs_dyn", "omga", "omga", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh index 2720dd5d5f..1944325317 100755 --- a/scripts/exgdas_enkf_sfc.sh +++ b/scripts/exgdas_enkf_sfc.sh @@ -68,16 +68,6 @@ export DELTSFC=${DELTSFC:-6} APRUN_ESFC=${APRUN_ESFC:-${APRUN:-""}} NTHREADS_ESFC=${NTHREADS_ESFC:-${NTHREADS:-1}} -################################################################################ -# Preprocessing -mkdata=NO -if [ ! -d $DATA ]; then - mkdata=YES - mkdir -p $DATA -fi -cd $DATA || exit 99 - - ################################################################################ # Update surface fields in the FV3 restart's using global_cycle. @@ -137,6 +127,7 @@ if [ $DOIAU = "YES" ]; then export TILE_NUM=$n + # Copy inputs from COMIN to DATA for imem in $(seq 1 $NMEM_ENS); do smem=$((imem + mem_offset)) if (( smem > NMEM_ENS_MAX )); then @@ -150,24 +141,31 @@ if [ $DOIAU = "YES" ]; then COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL MEMDIR=${gmemchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl \ - COM_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL + COMIN_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMIN_SNOW_ANALYSIS_MEM:COM_SNOW_ANALYSIS_TMPL + + # determine where the input snow restart files come from + if [[ "${DO_JEDISNOWDA:-}" == "YES" ]]; then + sfcdata_dir="${COMIN_SNOW_ANALYSIS_MEM}" + else + sfcdata_dir="${COMIN_ATMOS_RESTART_MEM_PREV}" + fi + [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" - ${NCP} "${COM_ATMOS_RESTART_MEM_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ - "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" - ${NLN} "${COM_ATMOS_RESTART_MEM_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ + ${NCP} "${sfcdata_dir}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" - ${NLN} "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" \ - "${DATA}/fnbgso.${cmem}" - ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" - ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + ${NCP} "${DATA}/fnbgsi.${cmem}" "${DATA}/fnbgso.${cmem}" + ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" if [[ ${GSI_SOILANAL} = "YES" ]]; then FHR=6 - ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}sfci00${FHR}.nc" \ + ${NCP} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}sfci00${FHR}.nc" \ "${DATA}/lnd_incr.${cmem}" fi done # ensembles @@ -175,6 +173,33 @@ if [ $DOIAU = "YES" ]; then CDATE="${PDY}${cyc}" ${CYCLESH} export err=$?; err_chk + # Copy outputs from DATA to COMOUT + for imem in $(seq 1 $NMEM_ENS); do + smem=$((imem + mem_offset)) + if (( smem > NMEM_ENS_MAX )); then + smem=$((smem - NMEM_ENS_MAX)) + fi + gmemchar="mem"$(printf %03i "$smem") + cmem=$(printf %03i $imem) + memchar="mem$cmem" + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL + + [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" + cpfs "${DATA}/fnbgso.${cmem}" "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" + + + if [[ ${GSI_SOILANAL} = "YES" ]]; then + FHR=6 + ${NCP} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}sfci00${FHR}.nc" \ + "${DATA}/lnd_incr.${cmem}" + fi + done # ensembles + done fi @@ -184,6 +209,7 @@ if [ $DOSFCANL_ENKF = "YES" ]; then export TILE_NUM=$n + # Copy inputs from COMIN to DATA for imem in $(seq 1 $NMEM_ENS); do smem=$((imem + mem_offset)) if (( smem > NMEM_ENS_MAX )); then @@ -193,28 +219,49 @@ if [ $DOSFCANL_ENKF = "YES" ]; then cmem=$(printf %03i $imem) memchar="mem$cmem" - MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ - COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL + RUN="${GDUMP_ENS}" MEMDIR=${gmemchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COMIN_SNOW_ANALYSIS_MEM:COM_SNOW_ANALYSIS_TMPL RUN="${GDUMP_ENS}" MEMDIR=${gmemchar} YMD=${gPDY} HH=${gcyc} declare_from_tmpl \ - COM_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL + COMIN_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL - [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" + # determine where the input snow restart files come from + if [[ "${DO_JEDISNOWDA:-}" == "YES" ]]; then + sfcdata_dir="${COMIN_SNOW_ANALYSIS_MEM}" + else + sfcdata_dir="${COMIN_ATMOS_RESTART_MEM_PREV}" + fi - ${NCP} "${COM_ATMOS_RESTART_MEM_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ - "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" - ${NLN} "${COM_ATMOS_RESTART_MEM_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ + ${NCP} "${sfcdata_dir}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ "${DATA}/fnbgsi.${cmem}" - ${NLN} "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" \ - "${DATA}/fnbgso.${cmem}" - ${NLN} "${FIXorog}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" - ${NLN} "${FIXorog}/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + ${NCP} "${DATA}/fnbgsi.${cmem}" "${DATA}/fnbgso.${cmem}" + ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NCP} "${FIXgfs}/orog/${CASE}/${CASE}.mx${OCNRES}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" done CDATE="${PDY}${cyc}" ${CYCLESH} export err=$?; err_chk + # Copy outputs from DATA to COMOUT + for imem in $(seq 1 "${NMEM_ENS}"); do + smem=$((imem + mem_offset)) + if (( smem > NMEM_ENS_MAX )); then + smem=$((smem - NMEM_ENS_MAX)) + fi + gmemchar="mem"$(printf %03i "${smem}") + cmem=$(printf %03i "${imem}") + memchar="mem${cmem}" + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} declare_from_tmpl \ + COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL + + [[ ! -d "${COM_ATMOS_RESTART_MEM}" ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" + + cpfs "${DATA}/fnbgso.${cmem}" "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" + + done + done fi @@ -222,8 +269,7 @@ fi ################################################################################ # Postprocessing -cd $pwd -[[ $mkdata = "YES" ]] && rm -rf $DATA +cd "${pwd}" || exit 1 -exit $err +exit ${err} diff --git a/scripts/exgdas_enkf_snow_recenter.py b/scripts/exgdas_enkf_snow_recenter.py new file mode 100755 index 0000000000..fcd501860c --- /dev/null +++ b/scripts/exgdas_enkf_snow_recenter.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +# exgdas_enkf_snow_recenter.py +# This script creates an SnowEnsAnalysis class +# and will recenter the ensemble mean to the +# deterministic analysis and provide increments +# to create an ensemble of snow analyses +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.snowens_analysis import SnowEnsAnalysis + +# Initialize root logger +logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the snow ensemble analysis task + anl = SnowEnsAnalysis(config) + anl.initialize() + anl.genWeights() + anl.genMask() + anl.regridDetBkg() + anl.regridDetInc() + anl.recenterEns() + anl.addEnsIncrements() + anl.finalize() diff --git a/sorc/gdas.cd b/sorc/gdas.cd index f3fa26d4d6..0431b26650 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit f3fa26d4d6693fcf451184d5ecabb86c1b4190ca +Subproject commit 0431b26650c5e5d4eb741304a05c841d3fda0ddc diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index ae30e7a645..92cc1d50b1 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -329,7 +329,7 @@ ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_model.fd/tests/ufs_model.x" . [[ -s "upp.x" ]] && rm -f upp.x ${LINK_OR_COPY} "${HOMEgfs}/sorc/upp.fd/exec/upp.x" . -for ufs_utilsexe in emcsfc_ice_blend emcsfc_snow2mdl global_cycle; do +for ufs_utilsexe in emcsfc_ice_blend emcsfc_snow2mdl global_cycle fregrid; do [[ -s "${ufs_utilsexe}" ]] && rm -f "${ufs_utilsexe}" ${LINK_OR_COPY} "${HOMEgfs}/sorc/ufs_utils.fd/exec/${ufs_utilsexe}" . done @@ -376,6 +376,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then "gdas_incr_handler.x" \ "gdas_obsprovider2ioda.x" \ "gdas_socahybridweights.x" \ + "gdasapp_land_ensrecenter.x" \ "bufr2ioda.x" \ "calcfIMS.exe" \ "apply_incr.exe" ) diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index c12760125c..ee4f19a0a6 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit c12760125ce7c5a85e8ced92d7f37c9ad6a59afe +Subproject commit ee4f19a0a630fc2245a313bfe20302b5a6b555aa diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index d1a332716a..6255b95175 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -304,6 +304,7 @@ FV3_predet(){ phys_hydrostatic=".false." # enable heating in hydrostatic balance in non-hydrostatic simulation use_hydro_pressure=".false." # use hydrostatic pressure for physics make_nh=".true." # running in non-hydrostatic mode + pass_full_omega_to_physics_in_non_hydrostatic_mode=".true." else # hydrostatic options hydrostatic=".true." phys_hydrostatic=".false." # ignored when hydrostatic = T diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index 60f44a721a..617ecff719 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -134,6 +134,7 @@ cat > input.nml < "${nml_file}" < None: super().__init__(config) # Store location of GDASApp jinja2 templates self.gdasapp_j2tmpl_dir = os.path.join(self.task_config.PARMgfs, 'gdas') + # fix ocnres + self.task_config.OCNRES = f"{self.task_config.OCNRES :03d}" def initialize(self) -> None: super().initialize() diff --git a/ush/python/pygfs/task/snowens_analysis.py b/ush/python/pygfs/task/snowens_analysis.py new file mode 100644 index 0000000000..982f74130c --- /dev/null +++ b/ush/python/pygfs/task/snowens_analysis.py @@ -0,0 +1,430 @@ +#!/usr/bin/env python3 + +import os +from logging import getLogger +from typing import Dict, List, Any +import netCDF4 as nc +import numpy as np + +from wxflow import (AttrDict, + FileHandler, + to_fv3time, to_timedelta, add_to_datetime, + rm_p, chdir, + parse_j2yaml, save_as_yaml, + Jinja, + logit, + Executable, + WorkflowException) +from pygfs.task.analysis import Analysis + +logger = getLogger(__name__.split('.')[-1]) + + +class SnowEnsAnalysis(Analysis): + """ + Class for global ensemble snow analysis tasks + """ + + @logit(logger, name="SnowEnsAnalysis") + def __init__(self, config): + super().__init__(config) + + _res_det = int(self.task_config['CASE'][1:]) + _res_ens = int(self.task_config['CASE_ENS'][1:]) + _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2) + _recenter_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.land_recenter.yaml") + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'npx_ges': _res_ens + 1, + 'npy_ges': _res_ens + 1, + 'npz_ges': self.task_config.LEVS - 1, + 'npz': self.task_config.LEVS - 1, + 'SNOW_WINDOW_BEGIN': _window_begin, + 'SNOW_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", + 'ATM_WINDOW_BEGIN': _window_begin, + 'ATM_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", + 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'jedi_yaml': _recenter_yaml, + } + ) + bkg_time = _window_begin if self.task_config.DOIAU else self.task_config.current_cycle + local_dict['bkg_time'] = bkg_time + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.task_config, **local_dict) + + @logit(logger) + def initialize(self) -> None: + """Initialize method for snow ensemble analysis + This method: + + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + super().initialize() + + # stage background and increment files + logger.info(f"Staging files from {self.task_config.SNOW_ENS_STAGE_TMPL}") + snow_stage_list = parse_j2yaml(self.task_config.SNOW_ENS_STAGE_TMPL, self.task_config) + FileHandler(snow_stage_list).sync() + + # stage orography files + logger.info(f"Staging orography files specified in {self.task_config.SNOW_OROG_STAGE_TMPL}") + snow_orog_stage_list = parse_j2yaml(self.task_config.SNOW_OROG_STAGE_TMPL, self.task_config) + FileHandler(snow_orog_stage_list).sync() + + # stage fix files for fv3-jedi + logger.info(f"Staging JEDI fix files from {self.task_config.JEDI_FIX_YAML}") + jedi_fix_list = parse_j2yaml(self.task_config.JEDI_FIX_YAML, self.task_config) + FileHandler(jedi_fix_list).sync() + + # write land ensemble recentering YAML + save_as_yaml(self.task_config.jedi_config, self.task_config.jedi_yaml) + logger.info(f"Wrote recentering YAML to: {self.task_config.jedi_yaml}") + + # link recentering executable + # placeholder, currently already done by the analysis parent class + + # copy fregrid executable + fregrid_copy = {'copy': [[os.path.join(self.task_config.EXECgfs, 'fregrid'), os.path.join(self.task_config.DATA, 'fregrid.x')]]} + FileHandler(fregrid_copy).sync() + + @logit(logger) + def genWeights(self) -> None: + """Create a modified land_frac file for use by fregrid + to interpolate the snow background from det to ensres + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + chdir(self.task_config.DATA) + + # loop through tiles + for tile in range(1, self.task_config.ntiles + 1): + # open the restart and get the vegetation type + rst = nc.Dataset(f"./bkg/det/{to_fv3time(self.task_config.bkg_time)}.sfc_data.tile{tile}.nc") + vtype = rst.variables['vtype'][:] + rst.close() + # open the oro data and get the land fraction + oro = nc.Dataset(f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_oro_data.tile{tile}.nc") + land_frac = oro.variables['land_frac'][:] + oro.close() + # create an output file + ncfile = nc.Dataset(f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight.tile{tile}.nc", mode='w', format='NETCDF4') + case_int = int(self.task_config.CASE[1:]) + lon = ncfile.createDimension('lon', case_int) + lat = ncfile.createDimension('lat', case_int) + lsm_frac_out = ncfile.createVariable('lsm_frac', np.float32, ('lon', 'lat')) + # set the land fraction to 0 on glaciers to not interpolate that snow + glacier = 15 + land_frac[np.where(vtype[0, ...] == glacier)] = 0 + lsm_frac_out[:] = land_frac + # write out and close the file + ncfile.close() + + @logit(logger) + def genMask(self) -> None: + """Create a mask for use by JEDI + to mask out snow increments on non-LSM gridpoints + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + chdir(self.task_config.DATA) + + # loop through tiles + for tile in range(1, self.task_config.ntiles + 1): + # open the restart and get the vegetation type + rst = nc.Dataset(f"./bkg/mem001/{to_fv3time(self.task_config.bkg_time)}.sfc_data.tile{tile}.nc", mode="r+") + vtype = rst.variables['vtype'][:] + slmsk = rst.variables['slmsk'][:] + # slmsk(Time, yaxis_1, xaxis_1) + # set the mask to 3 on glaciers + glacier = 15 + slmsk[np.where(vtype == glacier)] = 3 + # write out and close the file + rst.variables['slmsk'][:] = slmsk + rst.close() + + @logit(logger) + def regridDetBkg(self) -> None: + """Run fregrid to regrid the deterministic snow background + to the ensemble resolution + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + chdir(self.task_config.DATA) + + arg_list = [ + "--input_mosaic", f"./orog/det/{self.task_config.CASE}_mosaic.nc", + "--input_dir", f"./bkg/det/", + "--input_file", f"{to_fv3time(self.task_config.bkg_time)}.sfc_data", + "--scalar_field", f"snodl", + "--output_dir", f"./bkg/det_ensres/", + "--output_file", f"{to_fv3time(self.task_config.bkg_time)}.sfc_data", + "--output_mosaic", f"./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", + "--interp_method", f"conserve_order1", + "--weight_file", f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight", + "--weight_field", f"lsm_frac", + "--remap_file", f"./remap", + ] + fregrid_exe = os.path.join(self.task_config.DATA, 'fregrid.x') + exec_cmd = Executable(fregrid_exe) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd(*arg_list) + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + @logit(logger) + def regridDetInc(self) -> None: + """Run fregrid to regrid the deterministic snow increment + to the ensemble resolution + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + chdir(self.task_config.DATA) + + arg_list = [ + "--input_mosaic", f"./orog/det/{self.task_config.CASE}_mosaic.nc", + "--input_dir", f"./inc/det/", + "--input_file", f"snowinc.{to_fv3time(self.task_config.bkg_time)}.sfc_data", + "--scalar_field", f"snodl", + "--output_dir", f"./inc/det_ensres/", + "--output_file", f"snowinc.{to_fv3time(self.task_config.bkg_time)}.sfc_data", + "--output_mosaic", f"./orog/ens/{self.task_config.CASE_ENS}_mosaic.nc", + "--interp_method", f"conserve_order1", + "--weight_file", f"./orog/det/{self.task_config.CASE}.mx{self.task_config.OCNRES}_interp_weight", + "--weight_field", f"lsm_frac", + "--remap_file", f"./remap", + ] + fregrid_exe = os.path.join(self.task_config.DATA, 'fregrid.x') + exec_cmd = Executable(fregrid_exe) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd(*arg_list) + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + @logit(logger) + def recenterEns(self) -> None: + """Run recentering code to create an ensemble of snow increments + based on the deterministic increment, and the difference + between the determinstic and ensemble mean forecast + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + logger.info("Running recentering code") + exec_cmd = Executable(self.task_config.APRUN_ESNOWRECEN) + exec_name = os.path.join(self.task_config.DATA, 'gdasapp_land_ensrecenter.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg(self.task_config.jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + @logit(logger) + def finalize(self) -> None: + """Performs closing actions of the snow ensemble analysis task + This method: + - copies the ensemble snow analyses to the proper locations + - copies the ensemble mean increment to COM + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + # save files to COM + logger.info(f"Copying files described in {self.task_config.SNOW_ENS_FINALIZE_TMPL}") + snow_final_list = parse_j2yaml(self.task_config.SNOW_ENS_FINALIZE_TMPL, self.task_config) + FileHandler(snow_final_list).sync() + + @logit(logger) + def addEnsIncrements(self) -> None: + """Loop through all ensemble members and apply increment to create + a surface analysis for snow + + Parameters + ---------- + self : Analysis + Instance of the SnowEnsAnalysis object + """ + + bkg_times = [] + # no matter what, we want to process the center of the window + bkg_times.append(self.task_config.current_cycle) + # if DOIAU, we need to copy the increment to be valid at the center of the window + # and compute the analysis there to restart the model + if self.task_config.DOIAU: + logger.info("Copying increments to beginning of window") + template_in = f'snowinc.{to_fv3time(self.task_config.SNOW_WINDOW_BEGIN)}.sfc_data.tile{{tilenum}}.nc' + template_out = f'snowinc.{to_fv3time(self.task_config.current_cycle)}.sfc_data.tile{{tilenum}}.nc' + inclist = [] + for itile in range(1, 7): + filename_in = template_in.format(tilenum=itile) + filename_out = template_out.format(tilenum=itile) + src = os.path.join(self.task_config.DATA, 'inc', 'ensmean', filename_in) + dest = os.path.join(self.task_config.DATA, 'inc', 'ensmean', filename_out) + inclist.append([src, dest]) + FileHandler({'copy': inclist}).sync() + # if running with IAU, we also need an analysis at the beginning of the window + bkg_times.append(self.task_config.SNOW_WINDOW_BEGIN) + + for bkg_time in bkg_times: + for mem in range(1, self.task_config.NMEM_ENS + 1): + # for now, just looping serially, should parallelize this eventually + logger.info(f"Now applying increment to member mem{mem:03}") + logger.info(f'{os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}")}') + memdict = AttrDict( + { + 'HOMEgfs': self.task_config.HOMEgfs, + 'DATA': os.path.join(self.task_config.DATA, "anl", f"mem{mem:03}"), + 'DATAROOT': self.task_config.DATA, + 'current_cycle': bkg_time, + 'CASE_ENS': self.task_config.CASE_ENS, + 'OCNRES': self.task_config.OCNRES, + 'ntiles': self.task_config.ntiles, + 'ENS_APPLY_INCR_NML_TMPL': self.task_config.ENS_APPLY_INCR_NML_TMPL, + 'APPLY_INCR_EXE': self.task_config.APPLY_INCR_EXE, + 'APRUN_APPLY_INCR': self.task_config.APRUN_APPLY_INCR, + 'MYMEM': f"{mem:03}", + } + ) + self.add_increments(memdict) + + @staticmethod + @logit(logger) + def get_bkg_dict(config: Dict) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method constructs a dictionary of FV3 RESTART files (coupler, sfc_data) + that are needed for global snow DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + config: Dict + Dictionary of key-value pairs needed in this method + Should contain the following keys: + COMIN_ATMOS_RESTART_PREV + DATA + current_cycle + ntiles + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + + bkg_dict = { + 'mkdir': [], + 'copy': [], + } + return bkg_dict + + @staticmethod + @logit(logger) + def add_increments(config: Dict) -> None: + """Executes the program "apply_incr.exe" to create analysis "sfc_data" files by adding increments to backgrounds + + Parameters + ---------- + config: Dict + Dictionary of key-value pairs needed in this method + Should contain the following keys: + HOMEgfs + DATA + DATAROOT + current_cycle + CASE + OCNRES + ntiles + APPLY_INCR_NML_TMPL + APPLY_INCR_EXE + APRUN_APPLY_INCR + + Raises + ------ + OSError + Failure due to OS issues + WorkflowException + All other exceptions + """ + os.chdir(config.DATA) + + logger.info("Create namelist for APPLY_INCR_EXE") + nml_template = config.ENS_APPLY_INCR_NML_TMPL + nml_data = Jinja(nml_template, config).render + logger.debug(f"apply_incr_nml:\n{nml_data}") + + nml_file = os.path.join(config.DATA, "apply_incr_nml") + with open(nml_file, "w") as fho: + fho.write(nml_data) + + logger.info("Link APPLY_INCR_EXE into DATA/") + exe_src = config.APPLY_INCR_EXE + exe_dest = os.path.join(config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + # execute APPLY_INCR_EXE to create analysis files + exe = Executable(config.APRUN_APPLY_INCR) + exe.add_default_arg(os.path.join(config.DATA, os.path.basename(exe_src))) + logger.info(f"Executing {exe}") + try: + exe() + except OSError: + raise OSError(f"Failed to execute {exe}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exe}") + + def get_obs_dict(self) -> Dict[str, Any]: + obs_dict = { + 'mkdir': [], + 'copy': [], + } + return obs_dict + + def get_bias_dict(self) -> Dict[str, Any]: + bias_dict = { + 'mkdir': [], + 'copy': [], + } + return bias_dict diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index bde3be6600..7a1736d85a 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -113,6 +113,8 @@ def _get_app_configs(self): if self.do_jedisnowda: configs += ['prepsnowobs', 'snowanl'] + if self.do_hybvar: + configs += ['esnowrecen'] if self.do_mos: configs += ['mos_stn_prep', 'mos_grd_prep', 'mos_ext_stn_prep', 'mos_ext_grd_prep', @@ -167,6 +169,8 @@ def get_task_names(self): else: hybrid_tasks += ['eobs', 'eupd', 'echgres'] hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] + if self.do_jedisnowda: + hybrid_tasks += ['esnowrecen'] hybrid_after_eupd_tasks += ['stage_ic', 'ecen', 'esfc', 'efcs', 'epos', 'earc', 'cleanup'] # Collect all "gdas" cycle tasks @@ -297,6 +301,7 @@ def get_task_names(self): if self.do_hybvar and 'gfs' in self.eupd_runs: enkfgfs_tasks = hybrid_tasks + hybrid_after_eupd_tasks enkfgfs_tasks.remove("echgres") + enkfgfs_tasks.remove("esnowrecen") tasks['enkfgfs'] = enkfgfs_tasks return tasks diff --git a/workflow/hosts.py b/workflow/hosts.py index 6244cf564e..34ea067ade 100644 --- a/workflow/hosts.py +++ b/workflow/hosts.py @@ -17,7 +17,7 @@ class Host: SUPPORTED_HOSTS = ['HERA', 'ORION', 'JET', 'HERCULES', 'WCOSS2', 'S4', 'CONTAINER', 'GAEA', - 'AWSPW', 'AZUREPW'] + 'AWSPW', 'AZUREPW', 'GOOGLEPW'] def __init__(self, host=None): diff --git a/workflow/hosts/googlepw.yaml b/workflow/hosts/googlepw.yaml new file mode 100644 index 0000000000..38180dd750 --- /dev/null +++ b/workflow/hosts/googlepw.yaml @@ -0,0 +1,26 @@ +BASE_GIT: '' #TODO: This does not yet exist. +DMPDIR: '' # TODO: This does not yet exist. +PACKAGEROOT: '' #TODO: This does not yet exist. +COMINsyn: '' #TODO: This does not yet exist. +HOMEDIR: '/contrib/${USER}' +STMP: '/lustre/${USER}/stmp/' +PTMP: '/lustre/${USER}/ptmp/' +NOSCRUB: '${HOMEDIR}' +ACCOUNT: '${USER}' +SCHEDULER: slurm +QUEUE: batch +QUEUE_SERVICE: batch +PARTITION_BATCH: compute +PARTITION_SERVICE: compute +RESERVATION: '' +CLUSTERS: '' +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' # TODO: This is not yet supported. +HPSSARCH: 'NO' +HPSS_PROJECT: emc-global #TODO: See `ATARDIR` below. +BASE_IC: '/bucket/global-workflow-shared-data/ICSDIR/prototype_ICs' +LOCALARCH: 'NO' +ATARDIR: '' # TODO: This will not yet work from GOOGLE. +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C48', 'C96'] # TODO: Test and support all cubed-sphere resolutions. diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 3439e537ad..f60ac9a549 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -576,6 +576,33 @@ def snowanl(self): task = rocoto.create_task(task_dict) return task + def esnowrecen(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}prepsnowobs'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}snowanl'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': f'{self.run}epmn', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('esnowrecen') + task_name = f'{self.run}esnowrecen' + task_dict = {'task_name': task_name, + 'resources': resources, + 'dependency': dependencies, + 'envars': self.envars, + 'cycledef': self.run.replace('enkf', ''), + 'command': f'{self.HOMEgfs}/jobs/rocoto/esnowrecen.sh', + 'job_name': f'{self.pslot}_{task_name}_@H', + 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', + 'maxtries': '&MAXTRIES;' + } + + task = rocoto.create_task(task_dict) + return task + def prepoceanobs(self): ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'}) @@ -2582,6 +2609,9 @@ def esfc(self): else: dep_dict = {'type': 'task', 'name': f'{self.run}eupd'} deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_jedisnowda: + dep_dict = {'type': 'task', 'name': f'{self.run}esnowrecen'} + deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) resources = self.get_resource('esfc') diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index a55fc9bd2a..907b8ea804 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -20,7 +20,7 @@ class Tasks: 'eobs', 'eomg', 'epos', 'esfc', 'eupd', 'atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', - 'prepsnowobs', 'snowanl', + 'prepsnowobs', 'snowanl', 'esnowrecen', 'fcst', 'atmanlupp', 'atmanlprod', 'atmupp', 'goesupp', 'atmos_prod', 'ocean_prod', 'ice_prod', diff --git a/workflow/rocoto/workflow_xml.py b/workflow/rocoto/workflow_xml.py index 92aebb1818..3ad7c4bd91 100644 --- a/workflow/rocoto/workflow_xml.py +++ b/workflow/rocoto/workflow_xml.py @@ -163,8 +163,7 @@ def _write_crontab(self, crontab_file: str = None, cronint: int = 5) -> None: # AWS need 'SHELL', and 'BASH_ENV' defined, or, the crontab job won't start. if os.environ.get('PW_CSP', None) in ['aws', 'azure', 'google']: strings.extend([f'SHELL="/bin/bash"', - f'BASH_ENV="/etc/bashrc"' - ]) + f'BASH_ENV="/etc/bashrc"']) strings.extend([f'{cronintstr} {rocotorunstr}', '#################################################################', ''])