diff --git a/.gitignore b/.gitignore
index 2935804ac4..c9e384aae0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -43,8 +43,12 @@ fix/wave
# Ignore parm file symlinks
#--------------------------
-parm/config/config.base
-parm/gldas
+parm/gdas/aero
+parm/gdas/atm
+parm/gdas/io
+parm/gdas/ioda
+parm/gdas/snow
+parm/gdas/soca
parm/monitor
parm/post/AEROSOL_LUTS.dat
parm/post/nam_micro_lookup.dat
diff --git a/.gitmodules b/.gitmodules
index 3eb26fb0fe..5c9e569243 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,28 +1,28 @@
[submodule "sorc/ufs_model.fd"]
- path = sorc/ufs_model.fd
- url = https://github.com/ufs-community/ufs-weather-model
- ignore = dirty
+ path = sorc/ufs_model.fd
+ url = https://github.com/ufs-community/ufs-weather-model
+ ignore = dirty
[submodule "sorc/wxflow"]
- path = sorc/wxflow
- url = https://github.com/NOAA-EMC/wxflow
+ path = sorc/wxflow
+ url = https://github.com/NOAA-EMC/wxflow
[submodule "sorc/gfs_utils.fd"]
- path = sorc/gfs_utils.fd
- url = https://github.com/NOAA-EMC/gfs-utils
+ path = sorc/gfs_utils.fd
+ url = https://github.com/NOAA-EMC/gfs-utils
[submodule "sorc/ufs_utils.fd"]
- path = sorc/ufs_utils.fd
- url = https://github.com/ufs-community/UFS_UTILS.git
+ path = sorc/ufs_utils.fd
+ url = https://github.com/ufs-community/UFS_UTILS.git
[submodule "sorc/verif-global.fd"]
- path = sorc/verif-global.fd
- url = https://github.com/NOAA-EMC/EMC_verif-global.git
+ path = sorc/verif-global.fd
+ url = https://github.com/NOAA-EMC/EMC_verif-global.git
[submodule "sorc/gsi_enkf.fd"]
- path = sorc/gsi_enkf.fd
- url = https://github.com/NOAA-EMC/GSI.git
+ path = sorc/gsi_enkf.fd
+ url = https://github.com/NOAA-EMC/GSI.git
[submodule "sorc/gdas.cd"]
- path = sorc/gdas.cd
- url = https://github.com/NOAA-EMC/GDASApp.git
+ path = sorc/gdas.cd
+ url = https://github.com/NOAA-EMC/GDASApp.git
[submodule "sorc/gsi_utils.fd"]
- path = sorc/gsi_utils.fd
- url = https://github.com/NOAA-EMC/GSI-Utils.git
+ path = sorc/gsi_utils.fd
+ url = https://github.com/NOAA-EMC/GSI-Utils.git
[submodule "sorc/gsi_monitor.fd"]
- path = sorc/gsi_monitor.fd
- url = https://github.com/NOAA-EMC/GSI-Monitor.git
+ path = sorc/gsi_monitor.fd
+ url = https://github.com/NOAA-EMC/GSI-Monitor.git
diff --git a/Jenkinsfile b/ci/Jenkinsfile
similarity index 81%
rename from Jenkinsfile
rename to ci/Jenkinsfile
index 52175c2b50..34535ed608 100644
--- a/Jenkinsfile
+++ b/ci/Jenkinsfile
@@ -50,7 +50,7 @@ pipeline {
pullRequest.removeLabel("CI-${Machine}-Ready")
}
}
- pullRequest.comment("Building and running on ${Machine} in directory ${HOME}")
+ echo "Building and running on ${Machine} in directory ${HOME}"
}
}
}
@@ -76,7 +76,6 @@ pipeline {
ws(HOMEgfs) {
if (fileExists("${HOMEgfs}/sorc/BUILT_semaphor")) { // if the system is already built, skip the build in the case of re-runs
sh(script: "cat ${HOMEgfs}/sorc/BUILT_semaphor", returnStdout: true).trim() // TODO: and user configurable control to manage build semphore
- pullRequest.comment("Cloned PR already built (or build skipped) on ${machine} in directory ${HOMEgfs}
Still doing a checkout to get the latest changes")
checkout scm
dir('sorc') {
sh(script: './link_workflow.sh')
@@ -93,10 +92,14 @@ pipeline {
}
}
if (env.CHANGE_ID && system == 'gfs') {
- if (pullRequest.labels.any { value -> value.matches("CI-${Machine}-Building") }) {
- pullRequest.removeLabel("CI-${Machine}-Building")
- }
- pullRequest.addLabel("CI-${Machine}-Running")
+ try {
+ if (pullRequest.labels.any { value -> value.matches("CI-${Machine}-Building") }) {
+ pullRequest.removeLabel("CI-${Machine}-Building")
+ }
+ pullRequest.addLabel("CI-${Machine}-Running")
+ } catch (Exception e) {
+ echo "Failed to update label from Buildng to Running: ${e.getMessage()}"
+ }
}
if (system == 'gfs') {
caseList = sh(script: "${HOMEgfs}/ci/scripts/utils/get_host_case_list.py ${machine}", returnStdout: true).trim().split()
@@ -146,11 +149,9 @@ pipeline {
HOMEgfs = "${HOME}/gfs" // common HOMEgfs is used to launch the scripts that run the experiments
ws(HOMEgfs) {
pslot = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh get_pslot ${HOME}/RUNTESTS ${Case}", returnStdout: true).trim()
- pullRequest.comment("**Running** experiment: ${Case} on ${Machine}
With the experiment in directory:
`${HOME}/RUNTESTS/${pslot}`")
try {
sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${HOME} ${pslot}")
} catch (Exception e) {
- pullRequest.comment("**FAILURE** running experiment: ${Case} on ${Machine}")
sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cancel_all_batch_jobs ${HOME}/RUNTESTS")
ws(HOME) {
if (fileExists('RUNTESTS/error.logs')) {
@@ -165,7 +166,6 @@ pipeline {
error("Failed to run experiments ${Case} on ${Machine}")
}
}
- pullRequest.comment("**SUCCESS** running experiment: ${Case} on ${Machine}")
}
}
}
@@ -179,10 +179,14 @@ pipeline {
always {
script {
if(env.CHANGE_ID) {
- for (label in pullRequest.labels) {
- if (label.contains("${Machine}")) {
- pullRequest.removeLabel(label)
+ try {
+ for (label in pullRequest.labels) {
+ if (label.contains("${Machine}")) {
+ pullRequest.removeLabel(label)
+ }
}
+ } catch (Exception e) {
+ echo "Failed to remove labels: ${e.getMessage()}"
}
}
}
@@ -190,18 +194,26 @@ pipeline {
success {
script {
if(env.CHANGE_ID) {
- pullRequest.addLabel("CI-${Machine}-Passed")
- def timestamp = new Date().format('MM dd HH:mm:ss', TimeZone.getTimeZone('America/New_York'))
- pullRequest.comment("**CI SUCCESS** ${Machine} at ${timestamp}\n\nBuilt and ran in directory `${HOME}`")
+ try {
+ pullRequest.addLabel("CI-${Machine}-Passed")
+ def timestamp = new Date().format('MM dd HH:mm:ss', TimeZone.getTimeZone('America/New_York'))
+ pullRequest.comment("**CI SUCCESS** ${Machine} at ${timestamp}\n\nBuilt and ran in directory `${HOME}`")
+ } catch (Exception e) {
+ echo "Failed to add success label or comment: ${e.getMessage()}"
+ }
}
}
}
failure {
script {
if(env.CHANGE_ID) {
- pullRequest.addLabel("CI-${Machine}-Failed")
- def timestamp = new Date().format('MM dd HH:mm:ss', TimeZone.getTimeZone('America/New_York'))
- pullRequest.comment("**CI FAILED** ${Machine} at ${timestamp}
Built and ran in directory `${HOME}`")
+ try {
+ pullRequest.addLabel("CI-${Machine}-Failed")
+ def timestamp = new Date().format('MM dd HH:mm:ss', TimeZone.getTimeZone('America/New_York'))
+ pullRequest.comment("**CI FAILED** ${Machine} at ${timestamp}
Built and ran in directory `${HOME}`")
+ } catch (Exception e) {
+ echo "Failed to add failure label or comment: ${e.getMessage()}"
+ }
}
}
}
diff --git a/parm/config/gefs/config.base.emc.dyn b/parm/config/gefs/config.base
similarity index 97%
rename from parm/config/gefs/config.base.emc.dyn
rename to parm/config/gefs/config.base
index 0062aed163..69d4ed94a4 100644
--- a/parm/config/gefs/config.base.emc.dyn
+++ b/parm/config/gefs/config.base
@@ -110,6 +110,7 @@ export RUN="gefs" # RUN is defined in the job-card (ecf); CDUMP is used at EMC
# Get all the COM path templates
source "${EXPDIR}/config.com"
+# shellcheck disable=SC2016
export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'}
export LOGSCRIPT=${LOGSCRIPT:-""}
#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"}
@@ -210,7 +211,7 @@ export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4:
export FHMIN_GFS=0
export FHMIN=${FHMIN_GFS}
export FHMAX_GFS=@FHMAX_GFS@
-export FHOUT_GFS=6
+export FHOUT_GFS=6
export FHMAX_HF_GFS=0
export FHOUT_HF_GFS=1
export FHOUT_OCNICE_GFS=6
@@ -259,6 +260,10 @@ export NMEM_ENS=@NMEM_ENS@
export ENSMEM=${ENSMEM:-"000"}
export MEMDIR="mem${ENSMEM}"
+# initialize ocean ensemble members with perturbations
+# if true, only occurs for members greater than zero
+export OCN_ENS_PERTURB_FILES=false
+
export DOIAU="NO" # While we are not doing IAU, we may want to warm start w/ IAU in the future
# Check if cycle is cold starting
if [[ "${EXP_WARM_START}" = ".false." ]]; then
diff --git a/parm/config/gefs/config.stage_ic b/parm/config/gefs/config.stage_ic
index e2bb0af2b8..b332ee1826 100644
--- a/parm/config/gefs/config.stage_ic
+++ b/parm/config/gefs/config.stage_ic
@@ -8,6 +8,12 @@ echo "BEGIN: config.stage_ic"
source "${EXPDIR}/config.resources" stage_ic
case "${CASE}" in
+ "C384")
+ export CPL_ATMIC=""
+ export CPL_ICEIC=""
+ export CPL_OCNIC=""
+ export CPL_WAVIC=""
+ ;;
"C48")
export CPL_ATMIC="gefs_test"
export CPL_ICEIC="gefs_test"
diff --git a/parm/config/gfs/config.base.emc.dyn b/parm/config/gfs/config.base
similarity index 99%
rename from parm/config/gfs/config.base.emc.dyn
rename to parm/config/gfs/config.base
index 32284929c9..205153313e 100644
--- a/parm/config/gfs/config.base.emc.dyn
+++ b/parm/config/gfs/config.base
@@ -132,6 +132,7 @@ export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUM
# Get all the COM path templates
source "${EXPDIR}/config.com"
+# shellcheck disable=SC2016
export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'}
export LOGSCRIPT=${LOGSCRIPT:-""}
#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"}
@@ -247,7 +248,7 @@ export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4:
# GFS output and frequency
export FHMIN_GFS=0
export FHMAX_GFS=@FHMAX_GFS@
-export FHOUT_GFS=3
+export FHOUT_GFS=3
export FHMAX_HF_GFS=0
export FHOUT_HF_GFS=1
export FHOUT_OCNICE_GFS=6
diff --git a/parm/config/gfs/config.prepsnowobs b/parm/config/gfs/config.prepsnowobs
index 64eb8ba896..e2bfdd1905 100644
--- a/parm/config/gfs/config.prepsnowobs
+++ b/parm/config/gfs/config.prepsnowobs
@@ -8,11 +8,14 @@ echo "BEGIN: config.prepsnowobs"
# Get task specific resources
. "${EXPDIR}/config.resources" prepsnowobs
-export GTS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/snow/prep/prep_gts.yaml"
+export GTS_OBS_LIST="${HOMEgfs}/parm/gdas/snow/prep/prep_gts.yaml.j2"
+export IMS_OBS_LIST="${HOMEgfs}/parm/gdas/snow/prep/prep_ims.yaml.j2"
+
export BUFR2IODAX="${HOMEgfs}/exec/bufr2ioda.x"
-export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/snow/prep/fims.nml.j2"
-export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/snow/prep/prep_ims.yaml"
+
export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe"
+export FIMS_NML_TMPL="${HOMEgfs}/parm/gdas/snow/prep/fims.nml.j2"
+
export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py"
echo "END: config.prepsnowobs"
diff --git a/parm/config/gfs/config.snowanl b/parm/config/gfs/config.snowanl
index 3303ce402b..d8554570d3 100644
--- a/parm/config/gfs/config.snowanl
+++ b/parm/config/gfs/config.snowanl
@@ -6,19 +6,13 @@
echo "BEGIN: config.snowanl"
# Get task specific resources
-. "${EXPDIR}/config.resources" snowanl
+source "${EXPDIR}/config.resources" snowanl
-obs_list_name=gdas_snow_gts_only.yaml
-if [[ "${cyc}" = "18" ]]; then
- obs_list_name=gdas_snow_prototype.yaml
-fi
-
-export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/snow/obs/config/
-export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/snow/obs/lists/${obs_list_name}
+export OBS_LIST="${HOMEgfs}/parm/gdas/snow/obs/lists/gdas_snow.yaml.j2"
# Name of the JEDI executable and its yaml template
export JEDIEXE="${HOMEgfs}/exec/fv3jedi_letkf.x"
-export JEDIYAML="${HOMEgfs}/sorc/gdas.cd/parm/snow/letkfoi/letkfoi.yaml"
+export JEDIYAML="${HOMEgfs}/parm/gdas/snow/letkfoi/letkfoi.yaml.j2"
# Ensemble member properties
export SNOWDEPTHVAR="snodl"
@@ -26,7 +20,7 @@ export BESTDDEV="30." # Background Error Std. Dev. for LETKFOI
# Name of the executable that applies increment to bkg and its namelist template
export APPLY_INCR_EXE="${HOMEgfs}/exec/apply_incr.exe"
-export APPLY_INCR_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/snow/letkfoi/apply_incr_nml.j2"
+export APPLY_INCR_NML_TMPL="${HOMEgfs}/parm/gdas/snow/letkfoi/apply_incr_nml.j2"
export io_layout_x=@IO_LAYOUT_X@
export io_layout_y=@IO_LAYOUT_Y@
diff --git a/parm/gdas/snow_jedi_fix.yaml b/parm/gdas/snow_jedi_fix.yaml
deleted file mode 100644
index 3d1ca79f33..0000000000
--- a/parm/gdas/snow_jedi_fix.yaml
+++ /dev/null
@@ -1,7 +0,0 @@
-mkdir:
-- $(DATA)/fv3jedi
-copy:
-- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/akbk$(npz).nc4, $(DATA)/fv3jedi/akbk.nc4]
-- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/fmsmpp.nml, $(DATA)/fv3jedi/fmsmpp.nml]
-- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/field_table_gfdl, $(DATA)/fv3jedi/field_table]
-- [$(HOMEgfs)/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml, $(DATA)/fv3jedi/fv3jedi_fieldmetadata_restart.yaml]
diff --git a/parm/gdas/snow_jedi_fix.yaml.j2 b/parm/gdas/snow_jedi_fix.yaml.j2
new file mode 100644
index 0000000000..4d820a82ba
--- /dev/null
+++ b/parm/gdas/snow_jedi_fix.yaml.j2
@@ -0,0 +1,7 @@
+mkdir:
+- '{{ DATA }}/fv3jedi'
+copy:
+- ['{{ HOMEgfs }}/fix/gdas/fv3jedi/fv3files/akbk{{ npz }}.nc4', '{{ DATA }}/fv3jedi/akbk.nc4']
+- ['{{ HOMEgfs }}/fix/gdas/fv3jedi/fv3files/fmsmpp.nml', '{{ DATA }}/fv3jedi/fmsmpp.nml']
+- ['{{ HOMEgfs }}/fix/gdas/fv3jedi/fv3files/field_table_gfdl', '{{ DATA }}/fv3jedi/field_table']
+- ['{{ HOMEgfs }}/sorc/gdas.cd/parm/io/fv3jedi_fieldmetadata_restart.yaml', '{{ DATA }}/fv3jedi/fv3jedi_fieldmetadata_restart.yaml']
diff --git a/scripts/exglobal_stage_ic.sh b/scripts/exglobal_stage_ic.sh
index 58b37f3114..c12eaa84e0 100755
--- a/scripts/exglobal_stage_ic.sh
+++ b/scripts/exglobal_stage_ic.sh
@@ -104,6 +104,17 @@ for MEMDIR in "${MEMDIR_ARRAY[@]}"; do
;;
esac
+ # Ocean Perturbation Files
+ # Extra zero on MEMDIR ensure we have a number even if the string is empty
+ if (( 0${MEMDIR:3} > 0 )) && [[ "${OCN_ENS_PERTURB_FILES:-false}" == "true" ]]; then
+ src="${BASE_CPLIC}/${CPL_OCNIC:-}/${PDY}${cyc}/${MEMDIR}/ocean/${PDY}.${cyc}0000.mom6_increment.nc"
+ tgt="${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.mom6_increment.nc"
+ ${NCP} "${src}" "${tgt}"
+ rc=${?}
+ ((rc != 0)) && error_message "${src}" "${tgt}" "${rc}"
+ err=$((err + rc))
+ fi
+
# TODO: Do mediator restarts exists in a ATMW configuration?
# TODO: No mediator is presumably involved in an ATMA configuration
if [[ ${EXP_WARM_START:-".false."} = ".true." ]]; then
diff --git a/sorc/build_all.sh b/sorc/build_all.sh
index 261c245142..3de13b7b60 100755
--- a/sorc/build_all.sh
+++ b/sorc/build_all.sh
@@ -16,9 +16,11 @@ function _usage() {
Builds all of the global-workflow components by calling the individual build
scripts in sequence.
-Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-j n][-v][-w]
+Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-d][-h][-j n][-v][-w]
-a UFS_app:
Build a specific UFS app instead of the default
+ -d:
+ Build in debug mode
-g:
Build GSI
-h:
@@ -29,26 +31,29 @@ Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-j n][-v][-w]
Build UFS-DA
-v:
Execute all build scripts with -v option to turn on verbose where supported
- -w:
- Use unstructured wave grid
+ -w:
+ Use unstructured wave grid
EOF
exit 1
}
-script_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd)
-cd "${script_dir}" || exit 1
+# shellcheck disable=SC2155
+readonly HOMEgfs=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )/.." && pwd -P)
+cd "${HOMEgfs}/sorc" || exit 1
_build_ufs_opt=""
_build_ufsda="NO"
_build_gsi="NO"
+_build_debug=""
_verbose_opt=""
_wave_unst=""
_build_job_max=20
# Reset option counter in case this script is sourced
OPTIND=1
-while getopts ":a:ghj:uvw" option; do
+while getopts ":a:dghj:uvw" option; do
case "${option}" in
a) _build_ufs_opt+="-a ${OPTARG} ";;
+ d) _build_debug="-d" ;;
g) _build_gsi="YES" ;;
h) _usage;;
j) _build_job_max="${OPTARG} ";;
@@ -68,24 +73,24 @@ done
shift $((OPTIND-1))
-logs_dir="${script_dir}/logs"
+logs_dir="${HOMEgfs}/sorc/logs"
if [[ ! -d "${logs_dir}" ]]; then
echo "Creating logs folder"
- mkdir "${logs_dir}" || exit 1
+ mkdir -p "${logs_dir}" || exit 1
fi
# Check final exec folder exists
-if [[ ! -d "../exec" ]]; then
- echo "Creating ../exec folder"
- mkdir ../exec
+if [[ ! -d "${HOMEgfs}/exec" ]]; then
+ echo "Creating ${HOMEgfs}/exec folder"
+ mkdir -p "${HOMEgfs}/exec"
fi
#------------------------------------
# GET MACHINE
#------------------------------------
export COMPILER="intel"
-source gfs_utils.fd/ush/detect_machine.sh
-source gfs_utils.fd/ush/module-setup.sh
+source "${HOMEgfs}/ush/detect_machine.sh"
+source "${HOMEgfs}/ush/module-setup.sh"
if [[ -z "${MACHINE_ID}" ]]; then
echo "FATAL: Unable to determine target machine"
exit 1
@@ -117,19 +122,19 @@ declare -A build_opts
big_jobs=0
build_jobs["ufs"]=8
big_jobs=$((big_jobs+1))
-build_opts["ufs"]="${_wave_unst} ${_verbose_opt} ${_build_ufs_opt}"
+build_opts["ufs"]="${_wave_unst} ${_verbose_opt} ${_build_ufs_opt} ${_build_debug}"
build_jobs["upp"]=2
-build_opts["upp"]=""
+build_opts["upp"]="${_build_debug}"
build_jobs["ufs_utils"]=2
-build_opts["ufs_utils"]="${_verbose_opt}"
+build_opts["ufs_utils"]="${_verbose_opt} ${_build_debug}"
build_jobs["gfs_utils"]=1
-build_opts["gfs_utils"]="${_verbose_opt}"
+build_opts["gfs_utils"]="${_verbose_opt} ${_build_debug}"
build_jobs["ww3prepost"]=2
-build_opts["ww3prepost"]="${_wave_unst} ${_verbose_opt} ${_build_ufs_opt}"
+build_opts["ww3prepost"]="${_wave_unst} ${_verbose_opt} ${_build_ufs_opt} ${_build_debug}"
# Optional DA builds
if [[ "${_build_ufsda}" == "YES" ]]; then
@@ -138,21 +143,21 @@ if [[ "${_build_ufsda}" == "YES" ]]; then
else
build_jobs["gdas"]=8
big_jobs=$((big_jobs+1))
- build_opts["gdas"]="${_verbose_opt}"
+ build_opts["gdas"]="${_verbose_opt} ${_build_debug}"
fi
fi
if [[ "${_build_gsi}" == "YES" ]]; then
build_jobs["gsi_enkf"]=8
- build_opts["gsi_enkf"]="${_verbose_opt}"
+ build_opts["gsi_enkf"]="${_verbose_opt} ${_build_debug}"
fi
if [[ "${_build_gsi}" == "YES" || "${_build_ufsda}" == "YES" ]] ; then
build_jobs["gsi_utils"]=1
- build_opts["gsi_utils"]="${_verbose_opt}"
+ build_opts["gsi_utils"]="${_verbose_opt} ${_build_debug}"
if [[ "${MACHINE_ID}" == "hercules" ]]; then
echo "NOTE: The GSI Monitor is not supported on Hercules. Disabling build."
else
build_jobs["gsi_monitor"]=1
- build_opts["gsi_monitor"]="${_verbose_opt}"
+ build_opts["gsi_monitor"]="${_verbose_opt} ${_build_debug}"
fi
fi
@@ -196,13 +201,10 @@ while [[ ${builds_started} -lt ${#build_jobs[@]} ]]; do
if [[ -n "${build_jobs[${build}]+0}" && -z "${build_ids[${build}]+0}" ]]; then
# Do we have enough processors to run it?
if [[ ${_build_job_max} -ge $(( build_jobs[build] + procs_in_use )) ]]; then
- if [[ "${build}" != "upp" ]]; then
- "./build_${build}.sh" -j "${build_jobs[${build}]}" "${build_opts[${build}]:-}" > \
- "${logs_dir}/build_${build}.log" 2>&1 &
- else
- "./build_${build}.sh" "${build_opts[${build}]}" > \
- "${logs_dir}/build_${build}.log" 2>&1 &
- fi
+ # double-quoting build_opts here will not work since it is a string of options
+ #shellcheck disable=SC2086
+ "./build_${build}.sh" ${build_opts[${build}]:-} -j "${build_jobs[${build}]}" > \
+ "${logs_dir}/build_${build}.log" 2>&1 &
build_ids["${build}"]=$!
echo "Starting build_${build}.sh"
procs_in_use=$(( procs_in_use + build_jobs[${build}] ))
diff --git a/sorc/build_gdas.sh b/sorc/build_gdas.sh
index b1a17c33dd..43c503ab4d 100755
--- a/sorc/build_gdas.sh
+++ b/sorc/build_gdas.sh
@@ -2,11 +2,12 @@
set -eux
OPTIND=1
+_opts="-f " # forces a clean build
while getopts ":j:dv" option; do
case "${option}" in
- d) export BUILD_TYPE="DEBUG";;
- j) export BUILD_JOBS=${OPTARG};;
- v) export BUILD_VERBOSE="YES";;
+ d) _opts+="-c -DCMAKE_BUILD_TYPE=Debug " ;;
+ j) BUILD_JOBS=${OPTARG};;
+ v) _opts+="-v ";;
:)
echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
usage
@@ -19,12 +20,10 @@ while getopts ":j:dv" option; do
done
shift $((OPTIND-1))
-# TODO: GDASApp does not presently handle BUILD_TYPE
-
-BUILD_TYPE=${BUILD_TYPE:-"Release"} \
-BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \
+# double quoting opts will not work since it is a string of options
+# shellcheck disable=SC2086
BUILD_JOBS="${BUILD_JOBS:-8}" \
WORKFLOW_BUILD="ON" \
-./gdas.cd/build.sh
+./gdas.cd/build.sh ${_opts} -f
exit
diff --git a/sorc/build_gfs_utils.sh b/sorc/build_gfs_utils.sh
index 09bd4a9656..e53f71ddcd 100755
--- a/sorc/build_gfs_utils.sh
+++ b/sorc/build_gfs_utils.sh
@@ -18,14 +18,12 @@ EOF
exit 1
}
-cwd=$(pwd)
-
OPTIND=1
while getopts ":j:dvh" option; do
case "${option}" in
- d) export BUILD_TYPE="DEBUG";;
- v) export BUILD_VERBOSE="YES";;
- j) export BUILD_JOBS="${OPTARG}";;
+ d) BUILD_TYPE="Debug";;
+ v) BUILD_VERBOSE="YES";;
+ j) BUILD_JOBS="${OPTARG}";;
h)
usage
;;
@@ -44,6 +42,6 @@ shift $((OPTIND-1))
BUILD_TYPE=${BUILD_TYPE:-"Release"} \
BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \
BUILD_JOBS=${BUILD_JOBS:-8} \
-"${cwd}/gfs_utils.fd/ush/build.sh"
+"./gfs_utils.fd/ush/build.sh"
exit
diff --git a/sorc/build_gsi_enkf.sh b/sorc/build_gsi_enkf.sh
index 9ba278e3ec..ba24cefa81 100755
--- a/sorc/build_gsi_enkf.sh
+++ b/sorc/build_gsi_enkf.sh
@@ -4,9 +4,9 @@ set -eux
OPTIND=1
while getopts ":j:dv" option; do
case "${option}" in
- d) export BUILD_TYPE="DEBUG";;
- j) export BUILD_JOBS="${OPTARG}";;
- v) export BUILD_VERBOSE="YES";;
+ d) BUILD_TYPE="Debug";;
+ j) BUILD_JOBS="${OPTARG}";;
+ v) BUILD_VERBOSE="YES";;
:)
echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
usage
diff --git a/sorc/build_gsi_monitor.sh b/sorc/build_gsi_monitor.sh
index 3de1262aac..31add1882a 100755
--- a/sorc/build_gsi_monitor.sh
+++ b/sorc/build_gsi_monitor.sh
@@ -1,14 +1,12 @@
#! /usr/bin/env bash
set -eux
-cwd=$(pwd)
-
OPTIND=1
while getopts ":j:dv" option; do
case "${option}" in
- d) export BUILD_TYPE="DEBUG";;
- j) export BUILD_JOBS="${OPTARG}";;
- v) export BUILD_VERBOSE="YES";;
+ d) BUILD_TYPE="Debug";;
+ j) BUILD_JOBS="${OPTARG}";;
+ v) BUILD_VERBOSE="YES";;
:)
echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
usage
@@ -24,6 +22,6 @@ shift $((OPTIND-1))
BUILD_TYPE=${BUILD_TYPE:-"Release"} \
BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \
BUILD_JOBS=${BUILD_JOBS:-8} \
-"${cwd}/gsi_monitor.fd/ush/build.sh"
+"./gsi_monitor.fd/ush/build.sh"
exit
diff --git a/sorc/build_gsi_utils.sh b/sorc/build_gsi_utils.sh
index 81eab0f628..58c64e6e4a 100755
--- a/sorc/build_gsi_utils.sh
+++ b/sorc/build_gsi_utils.sh
@@ -1,14 +1,12 @@
#! /usr/bin/env bash
set -eux
-cwd=$(pwd)
-
OPTIND=1
while getopts ":j:dv" option; do
case "${option}" in
- d) export BUILD_TYPE="DEBUG";;
- j) export BUILD_JOBS="${OPTARG}";;
- v) export BUILD_VERBOSE="YES";;
+ d) BUILD_TYPE="Debug";;
+ j) BUILD_JOBS="${OPTARG}";;
+ v) BUILD_VERBOSE="YES";;
:)
echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
usage
@@ -25,6 +23,6 @@ BUILD_TYPE=${BUILD_TYPE:-"Release"} \
BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \
BUILD_JOBS=${BUILD_JOBS:-8} \
UTIL_OPTS="-DBUILD_UTIL_ENKF_GFS=ON -DBUILD_UTIL_NCIO=ON" \
-"${cwd}/gsi_utils.fd/ush/build.sh"
+"./gsi_utils.fd/ush/build.sh"
exit
diff --git a/sorc/build_ufs.sh b/sorc/build_ufs.sh
index 24ee8c5f13..d18dbef9fa 100755
--- a/sorc/build_ufs.sh
+++ b/sorc/build_ufs.sh
@@ -9,7 +9,7 @@ CCPP_SUITES="FV3_GFS_v17_p8_ugwpv1,FV3_GFS_v17_coupled_p8_ugwpv1" # TODO: does
while getopts ":da:j:vw" option; do
case "${option}" in
- d) BUILD_TYPE="DEBUG";;
+ d) BUILD_TYPE="Debug";;
a) APP="${OPTARG}";;
j) BUILD_JOBS="${OPTARG}";;
v) export BUILD_VERBOSE="YES";;
@@ -30,7 +30,7 @@ source "./tests/module-setup.sh"
MAKE_OPT="-DAPP=${APP} -D32BIT=ON -DCCPP_SUITES=${CCPP_SUITES}"
[[ ${PDLIB:-"OFF"} = "ON" ]] && MAKE_OPT+=" -DPDLIB=ON"
-[[ ${BUILD_TYPE:-"Release"} = "DEBUG" ]] && MAKE_OPT+=" -DDEBUG=ON"
+[[ ${BUILD_TYPE:-"Release"} = "Debug" ]] && MAKE_OPT+=" -DDEBUG=ON"
COMPILE_NR=0
CLEAN_BEFORE=YES
CLEAN_AFTER=NO
diff --git a/sorc/build_ufs_utils.sh b/sorc/build_ufs_utils.sh
index e78ca3c180..63ec56cb41 100755
--- a/sorc/build_ufs_utils.sh
+++ b/sorc/build_ufs_utils.sh
@@ -4,8 +4,9 @@ set -eux
OPTIND=1
while getopts ":j:dv" option; do
case "${option}" in
- j) export BUILD_JOBS="${OPTARG}";;
- v) export BUILD_VERBOSE="YES";;
+ d) BUILD_TYPE="Debug" ;;
+ j) BUILD_JOBS="${OPTARG}";;
+ v) BUILD_VERBOSE="YES";;
:)
echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
usage
@@ -18,13 +19,11 @@ while getopts ":j:dv" option; do
done
shift $((OPTIND-1))
-script_dir=$(dirname "${BASH_SOURCE[0]}")
-cd "${script_dir}/ufs_utils.fd" || exit 1
-
CMAKE_OPTS="-DGFS=ON" \
+BUILD_TYPE=${BUILD_TYPE:-"Release"} \
BUILD_JOBS=${BUILD_JOBS:-8} \
BUILD_VERBOSE=${BUILD_VERBOSE:-} \
-./build_all.sh
+./ufs_utils.fd/build_all.sh
exit
diff --git a/sorc/build_upp.sh b/sorc/build_upp.sh
index 8a2e1f6fcd..e217e171db 100755
--- a/sorc/build_upp.sh
+++ b/sorc/build_upp.sh
@@ -7,23 +7,23 @@ cd "${script_dir}" || exit 1
OPTIND=1
_opts=""
while getopts ":dj:v" option; do
- case "${option}" in
- d) _opts+="-d ";;
- j) export BUILD_JOBS="${OPTARG}" ;;
- v) _opts+="-v ";;
- :)
- echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
- ;;
- *)
- echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}"
- ;;
- esac
+ case "${option}" in
+ d) _opts+="-d " ;;
+ j) BUILD_JOBS="${OPTARG}" ;;
+ v) _opts+="-v ";;
+ :)
+ echo "[${BASH_SOURCE[0]}]: ${option} requires an argument"
+ ;;
+ *)
+ echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}"
+ ;;
+ esac
done
shift $((OPTIND-1))
# Check final exec folder exists
if [[ ! -d "../exec" ]]; then
- mkdir ../exec
+ mkdir -p ../exec
fi
cd ufs_model.fd/FV3/upp/tests
diff --git a/sorc/build_ww3prepost.sh b/sorc/build_ww3prepost.sh
index 19cdba98da..5b527a1641 100755
--- a/sorc/build_ww3prepost.sh
+++ b/sorc/build_ww3prepost.sh
@@ -6,12 +6,12 @@ cd "${script_dir}" || exit 1
# Default settings
APP="S2SWA"
-PDLIB="OFF"
+PDLIB="OFF"
while getopts ":j:a:dvw" option; do
case "${option}" in
a) APP="${OPTARG}";;
- d) BUILD_TYPE="DEBUG";;
+ d) BUILD_TYPE="Debug";;
j) BUILD_JOBS="${OPTARG}";;
v) export BUILD_VERBOSE="YES";;
w) PDLIB="ON";;
@@ -27,15 +27,15 @@ while getopts ":j:a:dvw" option; do
done
# Determine which switch to use
-if [[ "${APP}" == "ATMW" ]]; then
+if [[ "${APP}" == "ATMW" ]]; then
ww3switch="model/esmf/switch"
-else
- if [[ "${PDLIB}" == "ON" ]]; then
+else
+ if [[ "${PDLIB}" == "ON" ]]; then
ww3switch="model/bin/switch_meshcap_pdlib"
- else
+ else
ww3switch="model/bin/switch_meshcap"
- fi
-fi
+ fi
+fi
# Check final exec folder exists
if [[ ! -d "../exec" ]]; then
@@ -86,15 +86,16 @@ sed -e "s/DIST/SHRD/g"\
"${path_build}/tempswitch" > "${path_build}/switch"
rm "${path_build}/tempswitch"
-echo "Switch file is ${buildswitch} with switches:"
+echo "Switch file is ${buildswitch} with switches:"
cat "${buildswitch}"
#define cmake build options
MAKE_OPT="-DCMAKE_INSTALL_PREFIX=install"
-[[ ${BUILD_TYPE:-"Release"} = "DEBUG" ]] && MAKE_OPT+=" -DDEBUG=ON"
+[[ ${BUILD_TYPE:-"Release"} = "Debug" ]] && MAKE_OPT+=" -DCMAKE_BUILD_TYPE=Debug"
#Build executables:
-cmake "${WW3_DIR}" -DSWITCH="${buildswitch}" "${MAKE_OPT}"
+# shellcheck disable=SC2086
+cmake "${WW3_DIR}" -DSWITCH="${buildswitch}" ${MAKE_OPT}
rc=$?
if (( rc != 0 )); then
echo "Fatal error in cmake."
diff --git a/sorc/gdas.cd b/sorc/gdas.cd
index 10614c9855..37a28d114c 160000
--- a/sorc/gdas.cd
+++ b/sorc/gdas.cd
@@ -1 +1 @@
-Subproject commit 10614c9855042b436bb8c37c7e2faeead01259cb
+Subproject commit 37a28d114c9be6dccff890b33d742b03c22f07c1
diff --git a/sorc/gsi_monitor.fd b/sorc/gsi_monitor.fd
index fb39e83880..4e0f72b8f6 160000
--- a/sorc/gsi_monitor.fd
+++ b/sorc/gsi_monitor.fd
@@ -1 +1 @@
-Subproject commit fb39e83880d44d433bed9af856bc5178bf63d64c
+Subproject commit 4e0f72b8f6117d278c1414de8cb265cb2f41145d
diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh
index be8606a6b5..2b54f3ea10 100755
--- a/sorc/link_workflow.sh
+++ b/sorc/link_workflow.sh
@@ -199,6 +199,18 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then
done
fi
+#------------------------------
+#--add GDASApp parm directory
+#------------------------------
+if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then
+ cd "${HOMEgfs}/parm/gdas" || exit 1
+ declare -a gdasapp_comps=("aero" "atm" "io" "ioda" "snow" "soca")
+ for comp in "${gdasapp_comps[@]}"; do
+ [[ -d "${comp}" ]] && rm -rf "${comp}"
+ ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/parm/${comp}" .
+ done
+fi
+
#------------------------------
#--add GDASApp files
#------------------------------
diff --git a/sorc/wxflow b/sorc/wxflow
index 528f5abb49..dd9ca24a5b 160000
--- a/sorc/wxflow
+++ b/sorc/wxflow
@@ -1 +1 @@
-Subproject commit 528f5abb49e80751f83ebd6eb0a87bc70012bb24
+Subproject commit dd9ca24a5bb14b75acde685e5fa23b300fd47770
diff --git a/ush/forecast_det.sh b/ush/forecast_det.sh
index 198df6505f..a1c153776b 100755
--- a/ush/forecast_det.sh
+++ b/ush/forecast_det.sh
@@ -45,7 +45,8 @@ FV3_det(){
RERUN=${RERUN:-"NO"}
# Get a list of all YYYYMMDD.HH0000.coupler.res files from the atmos restart directory
- mapfile -t file_array < <(find "${COM_ATMOS_RESTART:-/dev/null}" -name "????????.??0000.coupler.res")
+ # shellcheck disable=SC2312
+ mapfile -t file_array < <(find "${COM_ATMOS_RESTART:-/dev/null}" -name "????????.??0000.coupler.res" | sort)
if [[ ( "${RUN}" = "gfs" || "${RUN}" = "gefs" ) \
&& "${#file_array[@]}" -gt 0 ]]; then
diff --git a/ush/module-setup.sh b/ush/module-setup.sh
index e204bae8a2..008ef7ee97 100755
--- a/ush/module-setup.sh
+++ b/ush/module-setup.sh
@@ -1,7 +1,7 @@
#!/bin/bash
set -u
-source "${HOMEgfs}/ush/detect_machine.sh"
+source "${HOMEgfs}/ush/detect_machine.sh"
if [[ ${MACHINE_ID} = jet* ]] ; then
# We are on NOAA Jet
@@ -125,7 +125,7 @@ elif [[ ${MACHINE_ID} = "noaacloud" ]]; then
export SPACK_ROOT=/contrib/global-workflow/spack-stack/spack
export PATH=${PATH}:${SPACK_ROOT}/bin
. "${SPACK_ROOT}"/share/spack/setup-env.sh
-
+
else
echo WARNING: UNKNOWN PLATFORM 1>&2
fi
diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py
index b562eeee4e..5709bc130e 100644
--- a/ush/python/pygfs/task/analysis.py
+++ b/ush/python/pygfs/task/analysis.py
@@ -25,6 +25,8 @@ class Analysis(Task):
def __init__(self, config: Dict[str, Any]) -> None:
super().__init__(config)
self.config.ntiles = 6
+ # Store location of GDASApp jinja2 templates
+ self.gdasapp_j2tmpl_dir = os.path.join(self.config.HOMEgfs, 'parm/gdas')
def initialize(self) -> None:
super().initialize()
@@ -56,7 +58,7 @@ def get_obs_dict(self) -> Dict[str, Any]:
a dictionary containing the list of observation files to copy for FileHandler
"""
logger.debug(f"OBS_LIST: {self.task_config['OBS_LIST']}")
- obs_list_config = parse_j2yaml(self.task_config["OBS_LIST"], self.task_config)
+ obs_list_config = parse_j2yaml(self.task_config["OBS_LIST"], self.task_config, searchpath=self.gdasapp_j2tmpl_dir)
logger.debug(f"obs_list_config: {obs_list_config}")
# get observers from master dictionary
observers = obs_list_config['observers']
@@ -88,7 +90,7 @@ def get_bias_dict(self) -> Dict[str, Any]:
a dictionary containing the list of observation bias files to copy for FileHandler
"""
logger.debug(f"OBS_LIST: {self.task_config['OBS_LIST']}")
- obs_list_config = parse_j2yaml(self.task_config["OBS_LIST"], self.task_config)
+ obs_list_config = parse_j2yaml(self.task_config["OBS_LIST"], self.task_config, searchpath=self.gdasapp_j2tmpl_dir)
logger.debug(f"obs_list_config: {obs_list_config}")
# get observers from master dictionary
observers = obs_list_config['observers']
diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py
index 01c69dbc7b..9eee8314c3 100644
--- a/ush/python/pygfs/task/snow_analysis.py
+++ b/ush/python/pygfs/task/snow_analysis.py
@@ -11,7 +11,7 @@
FileHandler,
to_fv3time, to_YMD, to_YMDH, to_timedelta, add_to_datetime,
rm_p,
- parse_j2yaml, parse_yamltmpl, save_as_yaml,
+ parse_j2yaml, save_as_yaml,
Jinja,
logit,
Executable,
@@ -99,7 +99,7 @@ def prepare_GTS(self) -> None:
def _gtsbufr2iodax(exe, yaml_file):
if not os.path.isfile(yaml_file):
- logger.exception(f"{yaml_file} not found")
+ logger.exception(f"FATAL ERROR: {yaml_file} not found")
raise FileNotFoundError(yaml_file)
logger.info(f"Executing {exe}")
@@ -260,9 +260,9 @@ def initialize(self) -> None:
FileHandler({'mkdir': dirlist}).sync()
# stage fix files
- jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'snow_jedi_fix.yaml')
+ jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'gdas', 'snow_jedi_fix.yaml.j2')
logger.info(f"Staging JEDI fix files from {jedi_fix_list_path}")
- jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config)
+ jedi_fix_list = parse_j2yaml(jedi_fix_list_path, self.task_config)
FileHandler(jedi_fix_list).sync()
# stage backgrounds
@@ -271,10 +271,9 @@ def initialize(self) -> None:
# generate letkfoi YAML file
logger.info(f"Generate JEDI LETKF YAML file: {self.task_config.jedi_yaml}")
- letkfoi_yaml = parse_j2yaml(self.task_config.JEDIYAML, self.task_config)
+ letkfoi_yaml = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir)
save_as_yaml(letkfoi_yaml, self.task_config.jedi_yaml)
logger.info(f"Wrote letkfoi YAML to: {self.task_config.jedi_yaml}")
-
# need output dir for diags and anl
logger.info("Create empty output [anl, diags] directories to receive output from executable")
newdirs = [
diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py
index 3eeb584f46..90e890910f 100755
--- a/workflow/setup_expt.py
+++ b/workflow/setup_expt.py
@@ -252,12 +252,6 @@ def fill_EXPDIR(inputs):
expdir = os.path.join(inputs.expdir, inputs.pslot)
configs = glob.glob(f'{configdir}/config.*')
- exclude_configs = ['base', 'base.emc.dyn', 'base.nco.static', 'fv3.nco.static']
- for exclude in exclude_configs:
- try:
- configs.remove(f'{configdir}/config.{exclude}')
- except ValueError:
- pass
if len(configs) == 0:
raise IOError(f'no config files found in {configdir}')
for config in configs:
@@ -295,7 +289,8 @@ def _update_defaults(dict_in: dict) -> dict:
def edit_baseconfig(host, inputs, yaml_dict):
"""
- Parses and populates the templated `config.base.emc.dyn` to `config.base`
+ Parses and populates the templated `HOMEgfs/parm/config//config.base`
+ to `EXPDIR/pslot/config.base`
"""
tmpl_dict = {
@@ -347,7 +342,7 @@ def edit_baseconfig(host, inputs, yaml_dict):
except KeyError:
pass
- base_input = f'{inputs.configdir}/config.base.emc.dyn'
+ base_input = f'{inputs.configdir}/config.base'
base_output = f'{inputs.expdir}/{inputs.pslot}/config.base'
edit_config(base_input, base_output, tmpl_dict)