diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile
index ae86e33c66..8ed4927c6b 100644
--- a/ci/Jenkinsfile
+++ b/ci/Jenkinsfile
@@ -1,7 +1,8 @@
def Machine = 'none'
def machine = 'none'
def CUSTOM_WORKSPACE = 'none'
-def caseList = ''
+def cases = ''
+def GH = 'none'
// Location of the custom workspaces for each machine in the CI system. They are persitent for each iteration of the PR.
def NodeName = [hera: 'Hera-EMC', orion: 'Orion-EMC', hercules: 'Hercules-EMC', gaea: 'Gaea']
def custom_workspace = [hera: '/scratch1/NCEPDEV/global/CI', orion: '/work2/noaa/stmp/CI/ORION', hercules: '/work2/noaa/stmp/CI/HERCULES', gaea: '/gpfs/f5/epic/proj-shared/global/CI']
@@ -78,6 +79,7 @@ pipeline {
echo "Getting Common Workspace for ${Machine}"
ws("${custom_workspace[machine]}/${env.CHANGE_ID}") {
properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC', 'Hera-EMC', 'Orion-EMC'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])])
+ GH = sh(script: "which gh || echo '~/bin/gh'", returnStdout: true).trim()
CUSTOM_WORKSPACE = "${WORKSPACE}"
sh(script: "mkdir -p ${CUSTOM_WORKSPACE}/RUNTESTS;rm -Rf ${CUSTOM_WORKSPACE}/RUNTESTS/*")
sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-Building" --remove-label "CI-${Machine}-Ready" """)
@@ -97,7 +99,7 @@ pipeline {
}
}
stages {
- stage('build system') {
+ stage('Building') {
steps {
catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
script {
@@ -116,7 +118,7 @@ pipeline {
checkout scm
} catch (Exception e) {
if (env.CHANGE_ID) {
- sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Checkout **Failed** on ${Machine}: ${e.getMessage()}" """)
+ sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Checkout **Failed** on ${Machine} in Build# ${env.BUILD_NUMBER}: ${e.getMessage()}" """)
}
STATUS = 'Failed'
error("Failed to checkout: ${e.getMessage()}")
@@ -149,7 +151,7 @@ pipeline {
try {
sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_BUILD_${env.CHANGE_ID}")
gist_url=sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_BUILD_${env.CHANGE_ID}", returnStdout: true).trim()
- sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Build **FAILED** on **${Machine}** with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """)
+ sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Build **FAILED** on **${Machine}** in Build# ${env.BUILD_NUMBER} with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """)
} catch (Exception error_comment) {
echo "Failed to comment on PR: ${error_comment.getMessage()}"
}
@@ -169,7 +171,7 @@ pipeline {
}
}
if (system == 'gfs') {
- caseList = sh(script: "${HOMEgfs}/ci/scripts/utils/get_host_case_list.py ${machine}", returnStdout: true).trim().split()
+ cases = sh(script: "${HOMEgfs}/ci/scripts/utils/get_host_case_list.py ${machine}", returnStdout: true).trim().split()
}
}
}
@@ -184,98 +186,88 @@ pipeline {
when {
expression { STATUS != 'Failed' }
}
- matrix {
- agent { label NodeName[machine].toLowerCase() }
- axes {
- axis {
- name 'Case'
- // TODO add dynamic list of cases from env vars (needs addtional plugins)
- values 'C48C48_ufs_hybatmDA', 'C48_ATM', 'C48_S2SW', 'C48_S2SWA_gefs', 'C48mx500_3DVarAOWCDA', 'C96C48_hybatmDA', 'C96_atm3DVar', 'C96_atmaerosnowDA'
- }
- }
- stages {
-
- stage('Create Experiments') {
- when {
- expression { return caseList.contains(Case) }
- }
- steps {
- catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
- script {
- sh(script: "sed -n '/{.*}/!p' ${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${Case}.yaml > ${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${Case}.yaml.tmp")
- def yaml_case = readYaml file: "${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${Case}.yaml.tmp"
- system = yaml_case.experiment.system
- def HOMEgfs = "${CUSTOM_WORKSPACE}/${system}" // local HOMEgfs is used to populate the XML on per system basis
- env.RUNTESTS = "${CUSTOM_WORKSPACE}/RUNTESTS"
- try {
- error_output = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh create_experiment ${HOMEgfs}/ci/cases/pr/${Case}.yaml", returnStdout: true).trim()
- } catch (Exception error_create) {
- sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "${Case} **FAILED** to create experment on ${Machine}\n with the error:\n\\`\\`\\`\n${error_output}\\`\\`\\`" """)
- error("Case ${Case} failed to create experment directory")
- }
+ agent { label NodeName[machine].toLowerCase() }
+ steps {
+ script {
+ def parallelStages = cases.collectEntries { caseName ->
+ ["${caseName}": {
+ stage("Create ${caseName}") {
+ catchError(buildResult: 'UNSTABLE', stageResult: 'FAILURE') {
+ script {
+ sh(script: "sed -n '/{.*}/!p' ${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${caseName}.yaml > ${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${caseName}.yaml.tmp")
+ def yaml_case = readYaml file: "${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${caseName}.yaml.tmp"
+ system = yaml_case.experiment.system
+ def HOMEgfs = "${CUSTOM_WORKSPACE}/${system}" // local HOMEgfs is used to populate the XML on per system basis
+ env.RUNTESTS = "${CUSTOM_WORKSPACE}/RUNTESTS"
+ try {
+ error_output = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh create_experiment ${HOMEgfs}/ci/cases/pr/${caseName}.yaml", returnStdout: true).trim()
+ } catch (Exception error_create) {
+ sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "${Case} **FAILED** to create experment on ${Machine} in BUILD# ${env.BUILD_NUMBER}\n with the error:\n\\`\\`\\`\n${error_output}\\`\\`\\`" """)
+ error("Case ${caseName} failed to create experment directory")
+ }
+ }
}
}
- }
- }
- stage('Run Experiments') {
- when {
- expression { return caseList.contains(Case) }
- }
- steps {
- script {
- HOMEgfs = "${CUSTOM_WORKSPACE}/gfs" // common HOMEgfs is used to launch the scripts that run the experiments
- def pslot = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh get_pslot ${CUSTOM_WORKSPACE}/RUNTESTS ${Case}", returnStdout: true).trim()
- def error_file = "${CUSTOM_WORKSPACE}/RUNTESTS/${pslot}_error.logs"
- sh(script: " rm -f ${error_file}")
- try {
- sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${CUSTOM_WORKSPACE} ${pslot} ${system}")
- } catch (Exception error_experment) {
- sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cancel_batch_jobs ${pslot}")
- ws(CUSTOM_WORKSPACE) {
- def error_logs = ""
- def error_logs_message = ""
- if (fileExists(error_file)) {
- def fileContent = readFile error_file
- def lines = fileContent.readLines()
- for (line in lines) {
- echo "archiving: ${line}"
- if (fileExists("${CUSTOM_WORKSPACE}/${line}") && readFile("${CUSTOM_WORKSPACE}/${line}").length() > 0) {
- try {
- archiveArtifacts artifacts: "${line}", fingerprint: true
- error_logs = error_logs + "${CUSTOM_WORKSPACE}/${line} "
- error_logs_message = error_logs_message + "${CUSTOM_WORKSPACE}/${line}\n"
- } catch (Exception error_arch) {
- echo "Failed to archive error log ${line}: ${error_arch.getMessage()}"
+ stage("Running ${caseName}") {
+ catchError(buildResult: 'FAILURE', stageResult: 'FAILURE') {
+ script {
+ HOMEgfs = "${CUSTOM_WORKSPACE}/gfs" // common HOMEgfs is used to launch the scripts that run the experiments
+ def pslot = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh get_pslot ${CUSTOM_WORKSPACE}/RUNTESTS ${caseName}", returnStdout: true).trim()
+ def error_file = "${CUSTOM_WORKSPACE}/RUNTESTS/${pslot}_error.logs"
+ sh(script: " rm -f ${error_file}")
+ try {
+ sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${CUSTOM_WORKSPACE} ${pslot} ${system}")
+ } catch (Exception error_experment) {
+ sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cancel_batch_jobs ${pslot}")
+ ws(CUSTOM_WORKSPACE) {
+ def error_logs = ""
+ def error_logs_message = ""
+ if (fileExists(error_file)) {
+ def fileContent = readFile error_file
+ def lines = fileContent.readLines()
+ for (line in lines) {
+ echo "archiving: ${line}"
+ if (fileExists("${CUSTOM_WORKSPACE}/${line}") && readFile("${CUSTOM_WORKSPACE}/${line}").length() > 0) {
+ try {
+ archiveArtifacts artifacts: "${line}", fingerprint: true
+ error_logs = error_logs + "${CUSTOM_WORKSPACE}/${line} "
+ error_logs_message = error_logs_message + "${CUSTOM_WORKSPACE}/${line}\n"
+ } catch (Exception error_arch) {
+ echo "Failed to archive error log ${line}: ${error_arch.getMessage()}"
+ }
+ }
+ }
+ try {
+ gist_url = sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_${env.CHANGE_ID}", returnStdout: true).trim()
+ sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Experiment ${caseName} **FAILED** on ${Machine} in Build# ${env.BUILD_NUMBER} with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """)
+ sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_${env.CHANGE_ID}")
+ } catch (Exception error_comment) {
+ echo "Failed to comment on PR: ${error_comment.getMessage()}"
+ }
+ } else {
+ echo "No error logs found for failed cases in $CUSTOM_WORKSPACE/RUNTESTS/${pslot}_error.logs"
+ }
+ STATUS = 'Failed'
+ try {
+ sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --remove-label "CI-${Machine}-Running" --add-label "CI-${Machine}-${STATUS}" """, returnStatus: true)
+ sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Experiment ${caseName} **FAILED** on ${Machine} in Build# ${env.BUILD_NUMBER} in\n\\`${CUSTOM_WORKSPACE}/RUNTESTS/EXPDIR/${pslot}\\`" """)
+ } catch (Exception e) {
+ echo "Failed to update label from Running to ${STATUS}: ${e.getMessage()}"
+ }
+ error("Failed to run experiments ${caseName} on ${Machine}")
}
}
- }
- try {
- gist_url = sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --gist PR_${env.CHANGE_ID}", returnStdout: true).trim()
- sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Experiment ${Case} **FAILED** on ${Machine} with error logs:\n\\`\\`\\`\n${error_logs_message}\\`\\`\\`\n\nFollow link here to view the contents of the above file(s): [(link)](${gist_url})" """)
- sh(script: "${HOMEgfs}/ci/scripts/utils/publish_logs.py --file ${error_logs} --repo PR_${env.CHANGE_ID}")
- } catch (Exception error_comment) {
- echo "Failed to comment on PR: ${error_comment.getMessage()}"
- }
- } else {
- echo "No error logs found for failed cases in $CUSTOM_WORKSPACE/RUNTESTS/${pslot}_error.logs"
}
- STATUS = 'Failed'
- try {
- sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --remove-label "CI-${Machine}-Running" --add-label "CI-${Machine}-${STATUS}" """, returnStatus: true)
- sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Experiment ${Case} **FAILED** on ${Machine} in\n\\`${CUSTOM_WORKSPACE}/RUNTESTS/EXPDIR/${pslot}\\`" """)
- } catch (Exception e) {
- echo "Failed to update label from Running to ${STATUS}: ${e.getMessage()}"
- }
- echo "Failed to run experiments ${Case} on ${Machine}"
}
- }
}
- }
+ }]
}
+ parallel parallelStages + [failFast: true]
}
}
}
+
stage( '5. FINALIZE' ) {
agent { label NodeName[machine].toLowerCase() }
@@ -291,7 +283,7 @@ pipeline {
""", returnStatus: true)
sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-${STATUS}" """, returnStatus: true)
if (fileExists("${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log")) {
- sh(script: """echo "**CI ${STATUS}** ${Machine} at
Built and ran in directory \\`${CUSTOM_WORKSPACE}\\`\n\\`\\`\\`\n" | cat - ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log > temp && mv temp ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log""", returnStatus: true)
+ sh(script: """echo "**CI ${STATUS}** on ${Machine} in Build# ${env.BUILD_NUMBER}
Built and ran in directory \\`${CUSTOM_WORKSPACE}\\`\n\\`\\`\\`\n" | cat - ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log > temp && mv temp ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log""", returnStatus: true)
sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body-file ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log """, returnStatus: true)
}
if (STATUS == 'Passed') {
diff --git a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml
index fd056cf895..c8365e12a0 100644
--- a/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml
+++ b/ci/cases/pr/C48mx500_3DVarAOWCDA.yaml
@@ -19,5 +19,6 @@ arguments:
skip_ci_on_hosts:
- wcoss2
+ - gaea
- orion
- hercules
diff --git a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml
index d1556dc1d0..b5634642f3 100644
--- a/ci/cases/pr/C96C48_ufs_hybatmDA.yaml
+++ b/ci/cases/pr/C96C48_ufs_hybatmDA.yaml
@@ -19,6 +19,7 @@ arguments:
skip_ci_on_hosts:
- hera
+ - gaea
- orion
- hercules
diff --git a/ci/cases/pr/C96_atm3DVar_extended.yaml b/ci/cases/pr/C96_atm3DVar_extended.yaml
index 994d3ef3a0..a1ebab7b44 100644
--- a/ci/cases/pr/C96_atm3DVar_extended.yaml
+++ b/ci/cases/pr/C96_atm3DVar_extended.yaml
@@ -18,5 +18,6 @@ arguments:
skip_ci_on_hosts:
- hera
+ - gaea
- orion
- hercules
diff --git a/ci/cases/pr/C96_atmaerosnowDA.yaml b/ci/cases/pr/C96_atmaerosnowDA.yaml
index 7e22955a37..6eceffa27c 100644
--- a/ci/cases/pr/C96_atmaerosnowDA.yaml
+++ b/ci/cases/pr/C96_atmaerosnowDA.yaml
@@ -18,4 +18,5 @@ arguments:
skip_ci_on_hosts:
- orion
+ - gaea
- hercules
diff --git a/ci/cases/yamls/gfs_extended_ci.yaml b/ci/cases/yamls/gfs_extended_ci.yaml
index 42ee612f3a..8caa942eed 100644
--- a/ci/cases/yamls/gfs_extended_ci.yaml
+++ b/ci/cases/yamls/gfs_extended_ci.yaml
@@ -9,5 +9,6 @@ base:
DO_AWIPS: "NO"
DO_NPOESS: "YES"
DO_GENESIS_FSU: "NO"
+ FCST_BREAKPOINTS: 192
FHMAX_GFS: 384
FHMAX_HF_GFS: 120
diff --git a/ci/platforms/config.gaea b/ci/platforms/config.gaea
new file mode 100644
index 0000000000..cce109d494
--- /dev/null
+++ b/ci/platforms/config.gaea
@@ -0,0 +1,8 @@
+#!/usr/bin/bash
+
+export GFS_CI_ROOT=/gpfs/f5/epic/proj-shared/global/GFS_CI_ROOT
+export ICSDIR_ROOT=/gpfs/f5/epic/proj-shared/global/glopara/data/ICSDIR
+export STMP="/gpfs/f5/epic/scratch/${USER}"
+export SLURM_ACCOUNT=ufs-ard
+export max_concurrent_cases=5
+export max_concurrent_pr=4
diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh
index 24c5e242c3..825d8f5e8b 100755
--- a/ci/scripts/check_ci.sh
+++ b/ci/scripts/check_ci.sh
@@ -21,7 +21,7 @@ REPO_URL=${REPO_URL:-"git@github.com:NOAA-EMC/global-workflow.git"}
source "${HOMEgfs}/ush/detect_machine.sh"
case ${MACHINE_ID} in
- hera | orion | hercules | wcoss2)
+ hera | orion | hercules | wcoss2 | gaea)
echo "Running Automated Testing on ${MACHINE_ID}"
source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}"
;;
diff --git a/ci/scripts/driver.sh b/ci/scripts/driver.sh
index 8a99817325..acf54381b8 100755
--- a/ci/scripts/driver.sh
+++ b/ci/scripts/driver.sh
@@ -30,7 +30,7 @@ export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]'
source "${ROOT_DIR}/ush/detect_machine.sh"
case ${MACHINE_ID} in
- hera | orion | hercules | wcoss2)
+ hera | orion | hercules | wcoss2 | gaea)
echo "Running Automated Testing on ${MACHINE_ID}"
source "${ROOT_DIR}/ci/platforms/config.${MACHINE_ID}"
;;
diff --git a/ci/scripts/driver_weekly.sh b/ci/scripts/driver_weekly.sh
index 6cd2493769..3193cc98ed 100755
--- a/ci/scripts/driver_weekly.sh
+++ b/ci/scripts/driver_weekly.sh
@@ -38,7 +38,7 @@ export PS4='+ $(basename ${BASH_SOURCE[0]})[${LINENO}]'
source "${ROOT_DIR}/ush/detect_machine.sh"
case ${MACHINE_ID} in
- hera | orion | hercules | wcoss2)
+ hera | orion | hercules | wcoss2 | gaea)
echo "Running Automated Testing on ${MACHINE_ID}"
source "${ROOT_DIR}/ci/platforms/config.${MACHINE_ID}"
;;
diff --git a/ci/scripts/run_ci.sh b/ci/scripts/run_ci.sh
index f109aa83d4..2da5fa2681 100755
--- a/ci/scripts/run_ci.sh
+++ b/ci/scripts/run_ci.sh
@@ -20,7 +20,7 @@ export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]'
source "${HOMEgfs}/ush/detect_machine.sh"
case ${MACHINE_ID} in
- hera | orion | hercules | wcoss2)
+ hera | orion | hercules | wcoss2 | gaea)
echo "Running Automated Testing on ${MACHINE_ID}"
source "${HOMEgfs}/ci/platforms/config.${MACHINE_ID}"
;;
diff --git a/ci/scripts/utils/launch_java_agent.sh b/ci/scripts/utils/launch_java_agent.sh
index 81dbe002b6..183e671b9d 100755
--- a/ci/scripts/utils/launch_java_agent.sh
+++ b/ci/scripts/utils/launch_java_agent.sh
@@ -74,7 +74,7 @@ host=$(hostname)
source "${HOMEgfs}/ush/detect_machine.sh"
case ${MACHINE_ID} in
- hera | orion | hercules | wcoss2)
+ hera | orion | hercules | wcoss2 | gaea)
echo "Launch Jenkins Java Controler on ${MACHINE_ID}";;
*)
echo "Unsupported platform. Exiting with error."
diff --git a/env/AWSPW.env b/env/AWSPW.env
index 867b9220ba..992281a1d7 100755
--- a/env/AWSPW.env
+++ b/env/AWSPW.env
@@ -9,8 +9,8 @@ fi
step=$1
-export launcher="mpiexec.hydra"
-export mpmd_opt=""
+export launcher="srun -l --export=ALL"
+export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out"
# Configure MPI environment
export OMP_STACKSIZE=2048000
@@ -35,6 +35,8 @@ fi
if [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then
+ export launcher="srun --mpi=pmi2 -l"
+
(( nnodes = (ntasks+tasks_per_node-1)/tasks_per_node ))
(( ufs_ntasks = nnodes*tasks_per_node ))
# With ESMF threading, the model wants to use the full node
diff --git a/jobs/JGLOBAL_FORECAST b/jobs/JGLOBAL_FORECAST
index 9998470618..e64a91d21c 100755
--- a/jobs/JGLOBAL_FORECAST
+++ b/jobs/JGLOBAL_FORECAST
@@ -116,6 +116,17 @@ fi
# Remove the Temporary working directory
##########################################
cd "${DATAROOT}" || true
-[[ "${KEEPDATA}" == "NO" ]] && rm -rf "${DATA}" "${DATArestart}" # do not remove DATAjob. It contains DATAoutput
+# do not remove DATAjob. It contains DATAoutput
+if [[ "${KEEPDATA}" == "NO" ]]; then
+ rm -rf "${DATA}"
+
+ # Determine if this is the last segment
+ commas="${FCST_SEGMENTS//[^,]}"
+ n_segs=${#commas}
+ if (( n_segs - 1 == ${FCST_SEGMENT:-0} )); then
+ # Only delete temporary restarts if it is the last segment
+ rm -rf "${DATArestart}"
+ fi
+fi
exit 0
diff --git a/modulefiles/module_base.noaacloud.lua b/modulefiles/module_base.noaacloud.lua
new file mode 100644
index 0000000000..7997b618e4
--- /dev/null
+++ b/modulefiles/module_base.noaacloud.lua
@@ -0,0 +1,49 @@
+help([[
+Load environment to run GFS on noaacloud
+]])
+
+local spack_mod_path=(os.getenv("spack_mod_path") or "None")
+prepend_path("MODULEPATH", spack_mod_path)
+
+load(pathJoin("stack-intel", (os.getenv("stack_intel_ver") or "None")))
+load(pathJoin("stack-intel-oneapi-mpi", (os.getenv("stack_impi_ver") or "None")))
+load(pathJoin("python", (os.getenv("python_ver") or "None")))
+
+load(pathJoin("jasper", (os.getenv("jasper_ver") or "None")))
+load(pathJoin("libpng", (os.getenv("libpng_ver") or "None")))
+load(pathJoin("cdo", (os.getenv("cdo_ver") or "None")))
+--load(pathJoin("R", (os.getenv("R_ver") or "None")))
+
+load(pathJoin("hdf5", (os.getenv("hdf5_ver") or "None")))
+load(pathJoin("netcdf-c", (os.getenv("netcdf_c_ver") or "None")))
+load(pathJoin("netcdf-fortran", (os.getenv("netcdf_fortran_ver") or "None")))
+
+load(pathJoin("nco", (os.getenv("nco_ver") or "None")))
+load(pathJoin("prod_util", (os.getenv("prod_util_ver") or "None")))
+load(pathJoin("grib-util", (os.getenv("grib_util_ver") or "None")))
+load(pathJoin("g2tmpl", (os.getenv("g2tmpl_ver") or "None")))
+load(pathJoin("gsi-ncdiag", (os.getenv("gsi_ncdiag_ver") or "None")))
+load(pathJoin("crtm", (os.getenv("crtm_ver") or "None")))
+load(pathJoin("bufr", (os.getenv("bufr_ver") or "None")))
+load(pathJoin("wgrib2", (os.getenv("wgrib2_ver") or "None")))
+load(pathJoin("py-f90nml", (os.getenv("py_f90nml_ver") or "None")))
+load(pathJoin("py-netcdf4", (os.getenv("py_netcdf4_ver") or "None")))
+load(pathJoin("py-pyyaml", (os.getenv("py_pyyaml_ver") or "None")))
+load(pathJoin("py-jinja2", (os.getenv("py_jinja2_ver") or "None")))
+load(pathJoin("py-pandas", (os.getenv("py_pandas_ver") or "None")))
+load(pathJoin("py-python-dateutil", (os.getenv("py_python_dateutil_ver") or "None")))
+--load(pathJoin("met", (os.getenv("met_ver") or "None")))
+--load(pathJoin("metplus", (os.getenv("metplus_ver") or "None")))
+load(pathJoin("py-xarray", (os.getenv("py_xarray_ver") or "None")))
+
+setenv("WGRIB2","wgrib2")
+setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None"))
+
+--prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles"))
+--prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles"))
+--load(pathJoin("prepobs", (os.getenv("prepobs_run_ver") or "None")))
+
+--prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles"))
+--load(pathJoin("fit2obs", (os.getenv("fit2obs_ver") or "None")))
+
+whatis("Description: GFS run environment")
diff --git a/modulefiles/module_gwci.noaacloud.lua b/modulefiles/module_gwci.noaacloud.lua
new file mode 100644
index 0000000000..c3142cd60d
--- /dev/null
+++ b/modulefiles/module_gwci.noaacloud.lua
@@ -0,0 +1,15 @@
+help([[
+Load environment to run GFS workflow setup scripts on noaacloud
+]])
+
+prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core")
+
+load(pathJoin("stack-intel", os.getenv("2021.3.0")))
+load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.3.0")))
+
+load(pathJoin("netcdf-c", os.getenv("4.9.2")))
+load(pathJoin("netcdf-fortran", os.getenv("4.6.1")))
+load(pathJoin("nccmp","1.9.0.1"))
+load(pathJoin("wgrib2", "2.0.8"))
+
+whatis("Description: GFS run setup CI environment")
diff --git a/modulefiles/module_gwsetup.noaacloud.lua b/modulefiles/module_gwsetup.noaacloud.lua
new file mode 100644
index 0000000000..f3845e8d72
--- /dev/null
+++ b/modulefiles/module_gwsetup.noaacloud.lua
@@ -0,0 +1,20 @@
+help([[
+Load environment to run GFS workflow setup scripts on noaacloud
+]])
+
+load(pathJoin("rocoto"))
+
+prepend_path("MODULEPATH", "/contrib/spack-stack/spack-stack-1.6.0/envs/unified-env/install/modulefiles/Core")
+
+local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.3.0"
+local python_ver=os.getenv("python_ver") or "3.10.3"
+
+load(pathJoin("stack-intel", stack_intel_ver))
+load(pathJoin("python", python_ver))
+load("py-jinja2")
+load("py-pyyaml")
+load("py-numpy")
+local git_ver=os.getenv("git_ver") or "1.8.3.1"
+load(pathJoin("git", git_ver))
+
+whatis("Description: GFS run setup environment")
diff --git a/parm/archive/enkf.yaml.j2 b/parm/archive/enkf.yaml.j2
index bc5ef03cb8..92ed0095af 100644
--- a/parm/archive/enkf.yaml.j2
+++ b/parm/archive/enkf.yaml.j2
@@ -11,7 +11,7 @@ enkf:
{% endfor %}
- "logs/{{ cycle_YMDH }}/{{ RUN }}echgres.log"
- "logs/{{ cycle_YMDH }}/{{ RUN }}esfc.log"
- {% for grp in range(iaufhrs | length) %}
+ {% for grp in range(IAUFHRS | length) %}
- "logs/{{ cycle_YMDH }}/{{ RUN }}ecen{{ '%03d' % grp }}.log"
{% endfor %}
@@ -68,7 +68,7 @@ enkf:
{% if DOIAU %}
# IAU increments/analyses
- {% for fhr in iaufhrs if fhr != 6 %}
+ {% for fhr in IAUFHRS if fhr != 6 %}
{% if do_calc_increment %}
# Store analyses instead of increments
- "{{ COMIN_ATMOS_ANALYSIS_ENSSTAT | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % fhr }}.ensmean.nc"
diff --git a/parm/archive/enkf_restarta_grp.yaml.j2 b/parm/archive/enkf_restarta_grp.yaml.j2
index 41e03edc92..13c49d4239 100644
--- a/parm/archive/enkf_restarta_grp.yaml.j2
+++ b/parm/archive/enkf_restarta_grp.yaml.j2
@@ -36,14 +36,14 @@ enkf_restarta_grp:
{% endif %}
# Member increments
- {% for iaufhr in iaufhrs if iaufhr != 6 %}
+ {% for iaufhr in IAUFHRS if iaufhr != 6 %}
{% set iaufhr = iaufhr %}
{% if do_calc_increment %}
- "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % iaufhr }}.nc"
{% else %}
- "{{ COMIN_ATMOS_ANALYSIS_MEM | relpath(ROTDIR) }}/{{ head }}ratmi{{ '%03d' % iaufhr }}.nc"
{% endif %}
- {% endfor %} # iaufhr in iaufhrs
+ {% endfor %} # iaufhr in IAUFHRS
# Conventional data
{% if not lobsdiag_forenkf and not DO_JEDIATMENS %}
diff --git a/parm/archive/gdas.yaml.j2 b/parm/archive/gdas.yaml.j2
index ce5054a82f..db92141ede 100644
--- a/parm/archive/gdas.yaml.j2
+++ b/parm/archive/gdas.yaml.j2
@@ -49,7 +49,7 @@ gdas:
- "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.ensres.nc"
{% if DOIAU %}
# Ensemble IAU analysis residuals
- {% for fhr in iaufhrs if fhr != 6 %}
+ {% for fhr in IAUFHRS if fhr != 6 %}
- "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atma{{ '%03d' % fhr }}.ensres.nc"
{% endfor %}
{% endif %}
@@ -108,7 +108,7 @@ gdas:
{% endif %} # End of cycled data
# Forecast and post logs
- - "logs/{{ cycle_YMDH }}/{{ RUN }}fcst.log"
+ - "logs/{{ cycle_YMDH }}/{{ RUN }}fcst_seg0.log"
{% for fhr in range(0, FHMAX + 1, 3) %}
{% set fhr3 = '%03d' % fhr %}
diff --git a/parm/archive/gdas_restarta.yaml.j2 b/parm/archive/gdas_restarta.yaml.j2
index 4c0522fed7..9d86292065 100644
--- a/parm/archive/gdas_restarta.yaml.j2
+++ b/parm/archive/gdas_restarta.yaml.j2
@@ -6,7 +6,7 @@ gdas_restarta:
# Deterministic analysis increments
- "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atminc.nc"
# IAU increments
- {% for iaufhr in iaufhrs if iaufhr != 6 %}
+ {% for iaufhr in IAUFHRS if iaufhr != 6 %}
- "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmi{{ "%03d" % iaufhr }}.nc"
{% endfor %}
diff --git a/parm/archive/gfs_netcdfa.yaml.j2 b/parm/archive/gfs_netcdfa.yaml.j2
index 8c0d4a813f..5a51f86148 100644
--- a/parm/archive/gfs_netcdfa.yaml.j2
+++ b/parm/archive/gfs_netcdfa.yaml.j2
@@ -6,7 +6,7 @@ gfs_netcdfa:
- "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmanl.nc"
- "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}sfcanl.nc"
- "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atminc.nc"
- {% for iauhr in iaufhrs if iauhr != 6 %}
+ {% for iauhr in IAUFHRS if iauhr != 6 %}
- "{{ COMIN_ATMOS_ANALYSIS | relpath(ROTDIR) }}/{{ head }}atmi{{ "%03d" % iauhr }}.nc"
{% endfor %}
optional:
diff --git a/parm/archive/master_enkf.yaml.j2 b/parm/archive/master_enkf.yaml.j2
index 3ebd52dbad..bb8b36c3e0 100644
--- a/parm/archive/master_enkf.yaml.j2
+++ b/parm/archive/master_enkf.yaml.j2
@@ -4,28 +4,6 @@
{% set cycle_YMDH = current_cycle | to_YMDH %}
{% set head = RUN + ".t" + cycle_HH + "z." %}
-# Split IAUFHRS into a list; typically either "3,6,9" or 6 (integer)
-{% if IAUFHRS is string %}
- # "3,6,9"
- {% set iaufhrs = [] %}
- {% for iaufhr in IAUFHRS.split(",") %}
- {% do iaufhrs.append(iaufhr | int) %}
- {% endfor %}
-{% else %}
- # 6 (integer)
- {% set iaufhrs = [IAUFHRS] %}
-{% endif %}
-
-# Repeat for IAUFHRS_ENKF
-{% if IAUFHRS_ENKF is string %}
- {% set iaufhrs_enkf = [] %}
- {% for iaufhr in IAUFHRS_ENKF.split(",") %}
- {% do iaufhrs_enkf.append(iaufhr | int) %}
- {% endfor %}
-{% else %}
- {% set iaufhrs_enkf = [IAUFHRS_ENKF] %}
-{% endif %}
-
# Determine which data to archive
datasets:
{% if ENSGRP == 0 %}
diff --git a/parm/archive/master_gdas.yaml.j2 b/parm/archive/master_gdas.yaml.j2
index 30a2175653..11e83d387b 100644
--- a/parm/archive/master_gdas.yaml.j2
+++ b/parm/archive/master_gdas.yaml.j2
@@ -3,16 +3,6 @@
{% set cycle_YMDH = current_cycle | to_YMDH %}
{% set head = "gdas.t" + cycle_HH + "z." %}
-# Split IAUFHRS into a list; typically either "3,6,9" or 6 (integer)
-{% if IAUFHRS is string %}
- {% set iaufhrs = [] %}
- {% for iaufhr in IAUFHRS.split(",") %}
- {% do iaufhrs.append(iaufhr | int) %}
- {% endfor %}
-{% else %}
- {% set iaufhrs = [IAUFHRS] %}
-{% endif %}
-
datasets:
# Always archive atmosphere forecast/analysis data
{% filter indent(width=4) %}
diff --git a/parm/archive/master_gfs.yaml.j2 b/parm/archive/master_gfs.yaml.j2
index b789598fac..ab9a00c95e 100644
--- a/parm/archive/master_gfs.yaml.j2
+++ b/parm/archive/master_gfs.yaml.j2
@@ -3,18 +3,6 @@
{% set cycle_YMD = current_cycle | to_YMD %}
{% set cycle_YMDH = current_cycle | to_YMDH %}
-# Split IAUFHRS into a list; typically either "3,6,9" or 6 (integer)
-{% if IAUFHRS is string %}
- # "3,6,9"
- {% set iaufhrs = [] %}
- {% for iaufhr in IAUFHRS.split(",") %}
- {% do iaufhrs.append(iaufhr | int) %}
- {% endfor %}
-{% else %}
- # 6 (integer)
- {% set iaufhrs = [IAUFHRS] %}
-{% endif %}
-
# Determine which data to archive
datasets:
# Always archive atmosphere forecast/analysis data
diff --git a/parm/config/gefs/config.base b/parm/config/gefs/config.base
index 735743b568..fad9e3421a 100644
--- a/parm/config/gefs/config.base
+++ b/parm/config/gefs/config.base
@@ -229,8 +229,11 @@ export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4:
# GFS output and frequency
export FHMIN_GFS=0
-export FHMIN=${FHMIN_GFS}
-export FHMAX_GFS=@FHMAX_GFS@
+export FHMAX_GFS="@FHMAX_GFS@"
+# Intermediate times to stop forecast when running in segments
+breakpnts="@FCST_BREAKPOINTS@"
+export FCST_SEGMENTS="${FHMIN_GFS},${breakpnts:+${breakpnts},}${FHMAX_GFS}"
+
export FHOUT_GFS=6
export FHMAX_HF_GFS=@FHMAX_HF_GFS@
export FHOUT_HF_GFS=1
diff --git a/parm/config/gefs/config.extractvars b/parm/config/gefs/config.extractvars
index 706fe18450..cc93fcf5e0 100644
--- a/parm/config/gefs/config.extractvars
+++ b/parm/config/gefs/config.extractvars
@@ -9,12 +9,12 @@ echo "BEGIN: config.extractvars"
export COMPRSCMD=${COMPRSCMD:-bzip2}
-export compress_ocn=0 #1: Compress extracted ocean product, 0: Do not compress extracted ocean product
-export compress_ice=0 #1: Compress extracted ice product, 0: Do not compress extracted ice product
+export compress_ocn=1 #1: Compress extracted ocean product, 0: Do not compress extracted ocean product
+export compress_ice=1 #1: Compress extracted ice product, 0: Do not compress extracted ice product
-export ocnres="5p00" # Resolution of ocean products
-export iceres="5p00" # Resolution of ice products
-export wavres="5p00" # Resolution of wave products
+export ocnres="1p00" # Resolution of ocean products
+export iceres="native" # Resolution of ice products
+export wavres="0p25" # Resolution of wave products
export depthvar_name="z_l" # Name of depth variable in NetCDF ocean products
export zmin="0." # Minimum depth to extract from NetCDF ocean products
diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst
index e66fc15f87..407e48496e 100644
--- a/parm/config/gefs/config.fcst
+++ b/parm/config/gefs/config.fcst
@@ -30,14 +30,19 @@ string="--fv3 ${CASE}"
# shellcheck disable=SC2086
source "${EXPDIR}/config.ufs" ${string}
-# shellcheck disable=SC2153
-export FHMAX=${FHMAX_GFS}
+# Convert comma-separated string into bash array
+IFS=', ' read -ra segments <<< "${FCST_SEGMENTS}"
+# Determine MIN and MAX based on the forecast segment
+export FHMIN=${segments[${FCST_SEGMENT}]}
+export FHMAX=${segments[${FCST_SEGMENT}+1]}
+# Cap other FHMAX variables at FHMAX for the segment
+export FHMAX_HF=$(( FHMAX_HF_GFS > FHMAX ? FHMAX : FHMAX_HF_GFS ))
+export FHMAX_WAV=$(( FHMAX_WAV > FHMAX ? FHMAX : FHMAX_WAV ))
# shellcheck disable=SC2153
export FHOUT=${FHOUT_GFS}
-export FHMAX_HF=${FHMAX_HF_GFS}
export FHOUT_HF=${FHOUT_HF_GFS}
export FHOUT_OCN=${FHOUT_OCN_GFS}
-export FHOUT_ICE=${FHOUT_ICE_GFS}
+export FHOUT_ICE=${FHOUT_ICE_GFS}
# Get task specific resources
source "${EXPDIR}/config.resources" fcst
@@ -242,7 +247,11 @@ export FSICS="0"
#---------------------------------------------------------------------
# Write more variables to output
-export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table"
+if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
+ export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table_replay"
+else
+ export DIAG_TABLE="${PARMgfs}/ufs/fv3/diag_table"
+fi
# Write gfs restart files to rerun fcst from any break point
export restart_interval=${restart_interval_gfs:-12}
diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources
index 8c3ba88940..297bc08c05 100644
--- a/parm/config/gefs/config.resources
+++ b/parm/config/gefs/config.resources
@@ -272,7 +272,7 @@ case ${step} in
export walltime_gefs="00:30:00"
export ntasks_gefs=1
export threads_per_task_gefs=1
- export tasks_per_node_gefs="${ntasks}"
+ export tasks_per_node_gefs="${ntasks_gefs}"
export walltime_gfs="${walltime_gefs}"
export ntasks_gfs="${ntasks_gefs}"
export threads_per_tasks_gfs="${threads_per_task_gefs}"
diff --git a/parm/config/gefs/yaml/defaults.yaml b/parm/config/gefs/yaml/defaults.yaml
index d2b486e7ca..e4666d1aba 100644
--- a/parm/config/gefs/yaml/defaults.yaml
+++ b/parm/config/gefs/yaml/defaults.yaml
@@ -11,5 +11,6 @@ base:
DO_EXTRACTVARS: "NO"
FHMAX_GFS: 120
FHMAX_HF_GFS: 0
+ FCST_BREAKPOINTS: "48"
REPLAY_ICS: "NO"
USE_OCN_PERTURB_FILES: "false"
diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl
index 24a5e92644..a1b7e1d44b 100644
--- a/parm/config/gfs/config.aeroanl
+++ b/parm/config/gfs/config.aeroanl
@@ -24,7 +24,7 @@ if [[ "${DOIAU}" == "YES" ]]; then
export aero_bkg_times="3,6,9"
export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_fgat_gfs_aero.yaml.j2"
else
- export aero_bkg_times="6"
+ export aero_bkg_times="6," # Trailing comma is necessary so this is treated as a list
export JEDIYAML="${PARMgfs}/gdas/aero/variational/3dvar_gfs_aero.yaml.j2"
fi
diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base
index 56005199aa..e6a626cfe3 100644
--- a/parm/config/gfs/config.base
+++ b/parm/config/gfs/config.base
@@ -285,7 +285,10 @@ export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4:
# GFS output and frequency
export FHMIN_GFS=0
-export FHMAX_GFS=@FHMAX_GFS@
+export FHMAX_GFS="@FHMAX_GFS@"
+# Intermediate times to stop forecast when running in segments
+breakpnts="@FCST_BREAKPOINTS@"
+export FCST_SEGMENTS="${FHMIN_GFS},${breakpnts:+${breakpnts},}${FHMAX_GFS}"
export FHOUT_GFS=3 # 3 for ops
export FHMAX_HF_GFS=@FHMAX_HF_GFS@
export FHOUT_HF_GFS=1
@@ -384,10 +387,10 @@ fi
# if 3DVAR and IAU
if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then
- export IAUFHRS="6"
+ export IAUFHRS="6,"
export IAU_FHROT="3"
export IAU_FILTER_INCREMENTS=".true."
- export IAUFHRS_ENKF="6"
+ export IAUFHRS_ENKF="6,"
fi
# Generate post-processing ensemble spread files
@@ -397,10 +400,10 @@ export ENKF_SPREAD="YES"
if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then
export IAU_OFFSET=0
export IAU_FHROT=0
- export IAUFHRS="6"
+ export IAUFHRS="6,"
fi
-if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6"; fi
+if [[ "${DOIAU_ENKF}" = "NO" ]]; then export IAUFHRS_ENKF="6,"; fi
# Determine restart intervals
# For IAU, write restarts at beginning of window also
@@ -480,4 +483,12 @@ export OFFSET_START_HOUR=0
# Number of regional collectives to create soundings for
export NUM_SND_COLLECTIVES=${NUM_SND_COLLECTIVES:-9}
+# The tracker, genesis, and METplus jobs are not supported on AWS yet
+# TODO: we should place these in workflow/hosts/awspw.yaml as part of AWS setup, not for general.
+if [[ "${machine}" == "AWSPW" ]]; then
+ export DO_TRACKER="NO"
+ export DO_GENESIS="NO"
+ export DO_METP="NO"
+fi
+
echo "END: config.base"
diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst
index 4982b8f6e6..2743ea0745 100644
--- a/parm/config/gfs/config.fcst
+++ b/parm/config/gfs/config.fcst
@@ -33,11 +33,16 @@ source "${EXPDIR}/config.ufs" ${string}
# Forecast length for GFS forecast
case ${RUN} in
*gfs)
- # shellcheck disable=SC2153
- export FHMAX=${FHMAX_GFS}
+ # Convert comma-separated string into bash array
+ IFS=', ' read -ra segments <<< "${FCST_SEGMENTS}"
+ # Determine MIN and MAX based on the forecast segment
+ export FHMIN=${segments[${FCST_SEGMENT}]}
+ export FHMAX=${segments[${FCST_SEGMENT}+1]}
+ # Cap other FHMAX variables at FHMAX for the segment
+ export FHMAX_HF=$(( FHMAX_HF_GFS > FHMAX ? FHMAX : FHMAX_HF_GFS ))
+ export FHMAX_WAV=$(( FHMAX_WAV > FHMAX ? FHMAX : FHMAX_WAV ))
# shellcheck disable=SC2153
export FHOUT=${FHOUT_GFS}
- export FHMAX_HF=${FHMAX_HF_GFS}
export FHOUT_HF=${FHOUT_HF_GFS}
export FHOUT_OCN=${FHOUT_OCN_GFS}
export FHOUT_ICE=${FHOUT_ICE_GFS}
diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources
index a596629e76..cec2aef238 100644
--- a/parm/config/gfs/config.resources
+++ b/parm/config/gfs/config.resources
@@ -106,7 +106,8 @@ case ${machine} in
;;
"AWSPW")
export PARTITION_BATCH="compute"
- max_tasks_per_node=40
+ npe_node_max=36
+ max_tasks_per_node=36
# TODO Supply a max mem/node value for AWS
# shellcheck disable=SC2034
mem_node_max=""
diff --git a/parm/config/gfs/config.resources.AWSPW b/parm/config/gfs/config.resources.AWSPW
new file mode 100644
index 0000000000..8649713bb7
--- /dev/null
+++ b/parm/config/gfs/config.resources.AWSPW
@@ -0,0 +1,10 @@
+#! /usr/bin/env bash
+
+# AWS-specific job resources
+
+export is_exclusive="True"
+
+# shellcheck disable=SC2312
+for mem_var in $(env | grep '^memory_' | cut -d= -f1); do
+ unset "${mem_var}"
+done
diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml
index da4d587dff..24729ac43e 100644
--- a/parm/config/gfs/yaml/defaults.yaml
+++ b/parm/config/gfs/yaml/defaults.yaml
@@ -16,6 +16,7 @@ base:
DO_METP: "YES"
FHMAX_GFS: 120
FHMAX_HF_GFS: 0
+ FCST_BREAKPOINTS: ""
DO_VRFY_OCEANDA: "NO"
GSI_SOILANAL: "NO"
EUPD_CYC: "gdas"
diff --git a/parm/post/oceanice_products_gefs.yaml b/parm/post/oceanice_products_gefs.yaml
index 74c0f0653b..fea88df2bb 100644
--- a/parm/post/oceanice_products_gefs.yaml
+++ b/parm/post/oceanice_products_gefs.yaml
@@ -39,14 +39,15 @@ ocean:
- ["{{ COM_OCEAN_HISTORY }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ocean.nc"]
data_out:
mkdir:
- - "{{ COM_OCEAN_NETCDF }}"
+ - "{{ COM_OCEAN_NETCDF }}/native"
{% for grid in product_grids %}
+ - "{{ COM_OCEAN_NETCDF }}/{{ grid }}"
- "{{ COM_OCEAN_GRIB }}/{{ grid }}"
{% endfor %}
copy:
- - ["{{ DATA }}/ocean_subset.nc", "{{ COM_OCEAN_NETCDF }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
+ - ["{{ DATA }}/ocean_subset.nc", "{{ COM_OCEAN_NETCDF }}/native/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
{% for grid in product_grids %}
- - ["{{ DATA }}/ocean.{{ grid }}.nc", "{{ COM_OCEAN_NETCDF }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.nc"]
+ - ["{{ DATA }}/ocean.{{ grid }}.nc", "{{ COM_OCEAN_NETCDF }}/{{ grid }}/{{ RUN }}.ocean.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.nc"]
{% endfor %}
ice:
@@ -62,12 +63,13 @@ ice:
- ["{{ COM_ICE_HISTORY }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ interval }}hr_avg.f{{ '%03d' % forecast_hour }}.nc", "{{ DATA }}/ice.nc"]
data_out:
mkdir:
- - "{{ COM_ICE_NETCDF }}"
+ - "{{ COM_ICE_NETCDF }}/native"
{% for grid in product_grids %}
+ - "{{ COM_ICE_NETCDF }}/{{ grid }}"
- "{{ COM_ICE_GRIB }}/{{ grid }}"
{% endfor %}
copy:
- - ["{{ DATA }}/ice_subset.nc", "{{ COM_ICE_NETCDF }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
+ - ["{{ DATA }}/ice_subset.nc", "{{ COM_ICE_NETCDF }}/native/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.native.f{{ '%03d' % forecast_hour }}.nc"]
{% for grid in product_grids %}
- - ["{{ DATA }}/ice.{{ grid }}.nc", "{{ COM_ICE_NETCDF }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.nc"]
+ - ["{{ DATA }}/ice.{{ grid }}.nc", "{{ COM_ICE_NETCDF }}/{{ grid }}/{{ RUN }}.ice.t{{ current_cycle | strftime('%H') }}z.{{ grid }}.f{{ '%03d' % forecast_hour }}.nc"]
{% endfor %}
diff --git a/parm/ufs/fv3/diag_table_replay b/parm/ufs/fv3/diag_table_replay
new file mode 100644
index 0000000000..01f2cf9794
--- /dev/null
+++ b/parm/ufs/fv3/diag_table_replay
@@ -0,0 +1,337 @@
+"fv3_history", 0, "hours", 1, "hours", "time"
+"fv3_history2d", 0, "hours", 1, "hours", "time"
+"@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", @[FHOUT_OCN], "hours", 1, "hours", "time", @[FHOUT_OCN], "hours", "@[SYEAR] @[SMONTH] @[SDAY] @[CHOUR_offset] 0 0", @[FHOUT_OCN], "hours"
+"@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", @[FHOUT_OCN], "hours", 1, "hours", "time", @[FHOUT_OCN], "hours", "@[SYEAR1] @[SMONTH1] @[SDAY1] @[CHOUR1] 0 0"
+
+##############
+# Ocean fields first lead time
+##############
+# static fields
+"ocean_model", "geolon", "geolon", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolat", "geolat", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolon_c", "geolon_c", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolat_c", "geolat_c", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolon_u", "geolon_u", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolat_u", "geolat_u", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolon_v", "geolon_v", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "geolat_v", "geolat_v", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+#"ocean_model", "depth_ocean", "depth_ocean", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+#"ocean_model", "wet", "wet", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "wet_c", "wet_c", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "wet_u", "wet_u", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "wet_v", "wet_v", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "sin_rot", "sin_rot", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+"ocean_model", "cos_rot", "cos_rot", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+
+# ocean output TSUV and others
+"ocean_model", "SSH", "SSH", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+"ocean_model", "SST", "SST", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+"ocean_model", "SSS", "SSS", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+"ocean_model", "speed", "speed", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+"ocean_model", "SSU", "SSU", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+"ocean_model", "SSV", "SSV", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+"ocean_model", "frazil", "frazil", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+"ocean_model", "ePBL_h_ML", "ePBL", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+"ocean_model", "MLD_003", "MLD_003", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+"ocean_model", "MLD_0125", "MLD_0125", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+"ocean_model", "tob", "tob", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+
+# Z-Space Fields Provided for CMIP6 (CMOR Names):
+"ocean_model_z", "uo", "uo", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+"ocean_model_z", "vo", "vo", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+"ocean_model_z", "so", "so", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+"ocean_model_z", "temp", "temp", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+
+# forcing
+"ocean_model", "taux", "taux", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "tauy", "tauy", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "latent", "latent", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "sensible", "sensible", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "SW", "SW", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "LW", "LW", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "evap", "evap", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "lprec", "lprec", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "lrunoff", "lrunoff", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+#"ocean_model", "frunoff", "frunoff", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "fprec", "fprec", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "LwLatSens", "LwLatSens", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+"ocean_model", "Heat_PmE", "Heat_PmE", "@[MOM6_OUTPUT_DIR]/ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2
+
+##############
+# Ocean fields second lead time and after
+#############
+# static fields
+ocean_model, "geolon", "geolon", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+ocean_model, "geolat", "geolat", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+ocean_model, "geolon_c", "geolon_c", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+ocean_model, "geolat_c", "geolat_c", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+ocean_model, "geolon_u", "geolon_u", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+ocean_model, "geolat_u", "geolat_u", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+ocean_model, "geolon_v", "geolon_v", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+ocean_model, "geolat_v", "geolat_v", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+#"ocean_model", "depth_ocean", "depth_ocean", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+#"ocean_model", "wet", "wet", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+ocean_model, "wet_c", "wet_c", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+ocean_model, "wet_u", "wet_u", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+ocean_model, "wet_v", "wet_v", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+ocean_model, "sin_rot", "sin_rot", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+ocean_model, "cos_rot", "cos_rot", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .false., "none", 2
+
+# ocean output TSUV and others
+ocean_model, "SSH", "SSH", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+ocean_model, "SST", "SST", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+ocean_model, "SSS", "SSS", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+ocean_model, "speed", "speed", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+ocean_model, "SSU", "SSU", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+ocean_model, "SSV", "SSV", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+ocean_model, "frazil", "frazil", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+ocean_model, "ePBL_h_ML", "ePBL", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+ocean_model, "MLD_003", "MLD_003", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+ocean_model, "MLD_0125", "MLD_0125", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+ocean_model, "tob", "tob", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+
+# Z-Space Fields Provided for CMIP6 (CMOR Names):
+ocean_model_z, "uo", "uo", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+ocean_model_z, "vo", "vo", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+ocean_model_z, "so", "so", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+ocean_model_z, "temp", "temp", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr", "all", .true., "none", 2
+
+# forcing
+ocean_model, "taux", "taux", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2
+ocean_model, "tauy", "tauy", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2
+ocean_model, "latent", "latent", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2
+ocean_model, "sensible", "sensible", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2
+ocean_model, "SW", "SW", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2
+ocean_model, "LW", "LW", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2
+ocean_model, "evap", "evap", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2
+ocean_model, "lprec", "lprec", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2
+ocean_model, "lrunoff", "lrunoff", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2
+#"ocean_model", "frunoff", "frunoff", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2
+ocean_model, "fprec", "fprec", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2
+ocean_model, "LwLatSens", "LwLatSens", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2
+ocean_model, "Heat_PmE", "Heat_PmE", "@[MOM6_OUTPUT_DIR]/ocn_lead1%4yr%2mo%2dy%2hr","all",.true.,"none",2
+
+###################
+# Atmosphere fields
+###################
+"gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "sphum", "spfh", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "temp", "tmp", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "liq_wat", "clwmr", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "ice_wat", "icmr", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "snowwat", "snmr", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "rainwat", "rwmr", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "graupel", "grle", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "ice_nc", "nccice", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "rain_nc", "nconrd", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "o3mr", "o3mr", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "cld_amt", "cld_amt", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "delp", "dpres", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "delz", "delz", "fv3_history", "all", .false., "none", 2
+#"gfs_dyn", "pfhy", "preshy", "fv3_history", "all", .false., "none", 2
+#"gfs_dyn", "pfnh", "presnh", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "w", "dzdt", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2
+"gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2
+"gfs_phys", "refl_10cm", "refl_10cm", "fv3_history", "all", .false., "none", 2
+
+"gfs_phys", "cldfra", "cldfra", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "frzr", "frzr", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "frzrb", "frzrb", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "frozr", "frozr", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "frozrb", "frozrb", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "tsnowp", "tsnowp", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "tsnowpb", "tsnowpb", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "rhonewsn", "rhonewsn", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "ALBDO_ave", "albdo_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "cnvprcp_ave", "cprat_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "cnvprcpb_ave", "cpratb_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "totprcp_ave", "prate_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "totprcpb_ave", "prateb_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "DLWRF", "dlwrf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "DLWRFI", "dlwrf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "ULWRF", "ulwrf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "ULWRFI", "ulwrf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "DSWRF", "dswrf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "DSWRFI", "dswrf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "USWRF", "uswrf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "USWRFI", "uswrf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "DSWRFtoa", "dswrf_avetoa", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "USWRFtoa", "uswrf_avetoa", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "ULWRFtoa", "ulwrf_avetoa", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "gflux_ave", "gflux_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "hpbl", "hpbl", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "lhtfl_ave", "lhtfl_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "shtfl_ave", "shtfl_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "pwat", "pwat", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "soilm", "soilm", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TCDC_aveclm", "tcdc_aveclm", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TCDC_avebndcl", "tcdc_avebndcl", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TCDC_avelcl", "tcdc_avelcl", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TCDC_avemcl", "tcdc_avemcl", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TCDC_avehcl", "tcdc_avehcl", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TCDCcnvcl", "tcdccnvcl", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "PRES_avelct", "pres_avelct", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "PRES_avelcb", "pres_avelcb", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "PRES_avemct", "pres_avemct", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "PRES_avemcb", "pres_avemcb", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "PRES_avehct", "pres_avehct", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "PRES_avehcb", "pres_avehcb", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "PREScnvclt", "prescnvclt", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "PREScnvclb", "prescnvclb", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TEMP_avehct", "tmp_avehct", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TEMP_avemct", "tmp_avemct", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "TEMP_avelct", "tmp_avelct", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "u-gwd_ave", "u-gwd_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "v-gwd_ave", "v-gwd_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "dusfc", "uflx_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "dvsfc", "vflx_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "acond", "acond", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "cduvb_ave", "cduvb_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "cpofp", "cpofp", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "duvb_ave", "duvb_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "csdlf_ave", "csdlf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "csusf_ave", "csusf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "csusf_avetoa", "csusftoa", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "csdsf_ave", "csdsf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "csulf_ave", "csulf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "csulf_avetoa", "csulftoa", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "cwork_ave", "cwork_aveclm", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "evbs_ave", "evbs_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "evcw_ave", "evcw_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "fldcp", "fldcp", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "hgt_hyblev1", "hgt_hyblev1", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "spfh_hyblev1", "spfh_hyblev1", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "ugrd_hyblev1", "ugrd_hyblev1", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "vgrd_hyblev1", "vgrd_hyblev1", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "tmp_hyblev1", "tmp_hyblev1", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "gfluxi", "gflux", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "lhtfl", "lhtfl", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "shtfl", "shtfl", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "pevpr", "pevpr", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "pevpr_ave", "pevpr_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "sbsno_ave", "sbsno_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "sfexc", "sfexc", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "snohf", "snohf", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "snowc_ave", "snowc_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "spfhmax2m", "spfhmax_max2m", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "spfhmin2m", "spfhmin_min2m", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "tmpmax2m", "tmax_max2m", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "tmpmin2m", "tmin_min2m", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "ssrun_acc", "ssrun_acc", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "sunsd_acc", "sunsd_acc", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "watr_acc", "watr_acc", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "wilt", "wilt", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "vbdsf_ave", "vbdsf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "vddsf_ave", "vddsf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "nbdsf_ave", "nbdsf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "nddsf_ave", "nddsf_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "trans_ave", "trans_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "psurf", "pressfc", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "u10m", "ugrd10m", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "v10m", "vgrd10m", "fv3_history2d", "all", .false., "none", 2
+
+"gfs_phys", "pahi", "pahi", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "pah_ave", "pah_ave", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "ecan_acc", "ecan_acc", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "etran_acc", "etran_acc", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "edir_acc", "edir_acc", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "wa_acc", "wa_acc", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "lfrac", "lfrac", "fv3_history2d", "all", .false., "none", 2
+
+"gfs_sfc", "crain", "crain", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "tprcp", "tprcp", "fv3_history2d", "all", .false., "none", 2
+"gfs_phys", "rainc", "cnvprcp", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "hgtsfc", "orog", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "weasd", "weasd", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "f10m", "f10m", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "q2m", "spfh2m", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "t2m", "tmp2m", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "tsfc", "tmpsfc", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "vtype", "vtype", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "stype", "sotyp", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slmsksfc", "land", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "vfracsfc", "veg", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "zorlsfc", "sfcr", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "uustar", "fricv", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilt1", "soilt1" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilt2", "soilt2" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilt3", "soilt3" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilt4", "soilt4" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilw1", "soilw1" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilw2", "soilw2" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilw3", "soilw3" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "soilw4", "soilw4" "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slc_1", "soill1", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slc_2", "soill2", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slc_3", "soill3", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slc_4", "soill4", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "slope", "sltyp", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "alnsf", "alnsf", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "alnwf", "alnwf", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "alvsf", "alvsf", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "alvwf", "alvwf", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "canopy", "cnwat", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "facsf", "facsf", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "facwf", "facwf", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "ffhh", "ffhh", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "ffmm", "ffmm", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "fice", "icec", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "hice", "icetk", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "snoalb", "snoalb", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "shdmax", "shdmax", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "shdmin", "shdmin", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "snowd", "snod", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "tg3", "tg3", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "tisfc", "tisfc", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "tref", "tref", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "z_c", "zc", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "c_0", "c0", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "c_d", "cd", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "w_0", "w0", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "w_d", "wd", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xt", "xt", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xz", "xz", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "dt_cool", "dtcool", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xs", "xs", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xu", "xu", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xv", "xv", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xtts", "xtts", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "xzts", "xzts", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "d_conv", "dconv", "fv3_history2d", "all", .false., "none", 2
+"gfs_sfc", "qrain", "qrain", "fv3_history2d", "all", .false., "none", 2
+
+#=============================================================================================
+#
+#====> This file can be used with diag_manager/v2.0a (or higher) <====
+#
+#
+# FORMATS FOR FILE ENTRIES (not all input values are used)
+# ------------------------
+#
+#"file_name", output_freq, "output_units", format, "time_units", "long_name",
+#
+#
+#output_freq: > 0 output frequency in "output_units"
+# = 0 output frequency every time step
+# =-1 output frequency at end of run
+#
+#output_units = units used for output frequency
+# (years, months, days, minutes, hours, seconds)
+#
+#time_units = units used to label the time axis
+# (days, minutes, hours, seconds)
+#
+#
+# FORMAT FOR FIELD ENTRIES (not all input values are used)
+# ------------------------
+#
+#"module_name", "field_name", "output_name", "file_name" "time_sampling", time_avg, "other_opts", packing
+#
+#time_avg = .true. or .false.
+#
+#packing = 1 double precision
+# = 2 float
+# = 4 packed 16-bit integers
+# = 8 packed 1-byte (not tested?)
diff --git a/sorc/build_all.sh b/sorc/build_all.sh
index 28f52fd306..b6c4e6cc1c 100755
--- a/sorc/build_all.sh
+++ b/sorc/build_all.sh
@@ -145,7 +145,7 @@ build_opts["ww3prepost"]="${_wave_opt} ${_verbose_opt} ${_build_ufs_opt} ${_buil
# Optional DA builds
if [[ "${_build_ufsda}" == "YES" ]]; then
- if [[ "${MACHINE_ID}" != "orion" && "${MACHINE_ID}" != "hera" && "${MACHINE_ID}" != "hercules" && "${MACHINE_ID}" != "wcoss2" ]]; then
+ if [[ "${MACHINE_ID}" != "orion" && "${MACHINE_ID}" != "hera" && "${MACHINE_ID}" != "hercules" && "${MACHINE_ID}" != "wcoss2" && "${MACHINE_ID}" != "noaacloud" ]]; then
echo "NOTE: The GDAS App is not supported on ${MACHINE_ID}. Disabling build."
else
build_jobs["gdas"]=8
diff --git a/sorc/build_ufs.sh b/sorc/build_ufs.sh
index 7e84eaebc2..44c8c7a2ad 100755
--- a/sorc/build_ufs.sh
+++ b/sorc/build_ufs.sh
@@ -41,30 +41,9 @@ COMPILE_NR=0
CLEAN_BEFORE=YES
CLEAN_AFTER=NO
-if [[ "${MACHINE_ID}" != "noaacloud" ]]; then
- BUILD_JOBS=${BUILD_JOBS:-8} ./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "intel" "${CLEAN_BEFORE}" "${CLEAN_AFTER}"
- mv "./tests/fv3_${COMPILE_NR}.exe" ./tests/ufs_model.x
- mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua
- cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua
-else
-
- if [[ "${PW_CSP:-}" == "aws" ]]; then
- set +x
- # TODO: This will need to be addressed further when the EPIC stacks are available/supported.
- module use /contrib/spack-stack/envs/ufswm/install/modulefiles/Core
- module load stack-intel
- module load stack-intel-oneapi-mpi
- module load ufs-weather-model-env/1.0.0
- # TODO: It is still uncertain why this is the only module that is
- # missing; check the spack build as this needed to be added manually.
- module load w3emc/2.9.2 # TODO: This has similar issues for the EPIC stack.
- module list
- set -x
- fi
-
- export CMAKE_FLAGS="${MAKE_OPT}"
- BUILD_JOBS=${BUILD_JOBS:-8} ./build.sh
- mv "${cwd}/ufs_model.fd/build/ufs_model" "${cwd}/ufs_model.fd/tests/ufs_model.x"
-fi
+BUILD_JOBS=${BUILD_JOBS:-8} ./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "intel" "${CLEAN_BEFORE}" "${CLEAN_AFTER}"
+mv "./tests/fv3_${COMPILE_NR}.exe" ./tests/ufs_model.x
+mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua
+cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua
exit 0
diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh
index be912292fe..ae30e7a645 100755
--- a/sorc/link_workflow.sh
+++ b/sorc/link_workflow.sh
@@ -76,6 +76,7 @@ case "${machine}" in
"jet") FIX_DIR="/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix" ;;
"s4") FIX_DIR="/data/prod/glopara/fix" ;;
"gaea") FIX_DIR="/gpfs/f5/ufs-ard/world-shared/global/glopara/data/fix" ;;
+ "noaacloud") FIX_DIR="/contrib/global-workflow-shared-data/fix" ;;
*)
echo "FATAL: Unknown target machine ${machine}, couldn't set FIX_DIR"
exit 1
diff --git a/sorc/wxflow b/sorc/wxflow
index d314e06510..e1ef697430 160000
--- a/sorc/wxflow
+++ b/sorc/wxflow
@@ -1 +1 @@
-Subproject commit d314e065101041a4d45e5a11ec19cd2dc5f38c67
+Subproject commit e1ef697430c09d2b1a0560f21f11c7a32ed5f3e2
diff --git a/ush/atmos_extractvars.sh b/ush/atmos_extractvars.sh
index 70e86b2f4e..5fea8497c6 100755
--- a/ush/atmos_extractvars.sh
+++ b/ush/atmos_extractvars.sh
@@ -31,7 +31,16 @@ for outtype in "f2d" "f3d"; do
outdirpre="${subdata}/${outtype}"
[[ -d "${outdirpre}" ]] || mkdir -p "${outdirpre}"
- nh=${FHMIN}
+ if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
+ if [[ "${outtype}" == "f2d" ]]; then
+ nh=${OFFSET_START_HOUR}
+ elif [[ "${outtype}" == "f3d" ]]; then
+ nh=${FHOUT_GFS}
+ fi
+ else
+ nh=${FHMIN}
+ fi
+
while (( nh <= FHMAX_GFS )); do
fnh=$(printf "%3.3d" "${nh}")
@@ -45,11 +54,15 @@ for outtype in "f2d" "f3d"; do
outres="1p00"
fi
- if (( nh <= FHMAX_HF_GFS )); then
- outfreq=${FHOUT_HF_GFS}
- else
- outfreq=${FHOUT_GFS}
- fi
+ if [[ "${outtype}" == "f2d" ]]; then
+ if (( nh < FHMAX_HF_GFS )); then
+ outfreq=${FHOUT_HF_GFS}
+ else
+ outfreq=${FHOUT_GFS}
+ fi
+ elif [[ "${outtype}" == "f3d" ]]; then
+ outfreq=${FHOUT_GFS}
+ fi
com_var="COMIN_ATMOS_GRIB_${outres}"
infile1="${!com_var}/${RUN}.t${cyc}z.pgrb2.${outres}.f${fnh}"
diff --git a/ush/calcanl_gfs.py b/ush/calcanl_gfs.py
index 5d97d25dfd..9dc6ff9fa6 100755
--- a/ush/calcanl_gfs.py
+++ b/ush/calcanl_gfs.py
@@ -11,6 +11,7 @@
import gsi_utils
from collections import OrderedDict
import datetime
+from wxflow import cast_as_dtype
python2fortran_bool = {True: '.true.', False: '.false.'}
@@ -358,7 +359,7 @@ def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix,
ExecAnl = os.getenv('CALCANLEXEC', './calc_analysis.x')
ExecChgresInc = os.getenv('CHGRESINCEXEC', './interp_inc.x')
NEMSGet = os.getenv('NEMSIOGET', 'nemsio_get')
- IAUHrs = list(map(int, os.getenv('IAUFHRS', '6').split(',')))
+ IAUHrs = cast_as_dtype(os.getenv('IAUFHRS', '6,'))
Run = os.getenv('RUN', 'gdas')
JEDI = gsi_utils.isTrue(os.getenv('DO_JEDIATMVAR', 'YES'))
diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh
index e659d2ce80..8af9054972 100755
--- a/ush/forecast_postdet.sh
+++ b/ush/forecast_postdet.sh
@@ -206,11 +206,24 @@ EOF
for fhr in ${FV3_OUTPUT_FH}; do
FH3=$(printf %03i "${fhr}")
FH2=$(printf %02i "${fhr}")
- ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${FH3}.nc"
- ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${FH3}.nc"
- ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${FH3}"
- ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_atmf${FH3}.nc" "cubed_sphere_grid_atmf${FH3}.nc"
- ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_sfcf${FH3}.nc" "cubed_sphere_grid_sfcf${FH3}.nc"
+ # When replaying, the time format outputted by model in filename is HH-MM-SS
+ # because first fhr is a decimal number
+ if [[ ${REPLAY_ICS:-NO} == "YES" ]] && (( fhr >= OFFSET_START_HOUR )); then
+ local hhmmss_substring=${FV3_OUTPUT_FH_hhmmss/" ${FH3}-"*/} # Extract substring that contains all lead times up to the one space before target lead HHH-MM-SS
+ local hhmmss_substring_len=$(( ${#hhmmss_substring} + 1 )) # Get the size of the substring and add 1 to account for space
+ local f_hhmmss=${FV3_OUTPUT_FH_hhmmss:${hhmmss_substring_len}:9} # extract HHH-MM-SS for target lead time
+ ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${f_hhmmss}.nc"
+ ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${f_hhmmss}.nc"
+ ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${f_hhmmss}"
+ ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_atmf${FH3}.nc" "cubed_sphere_grid_atmf${f_hhmmss}.nc"
+ ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_sfcf${FH3}.nc" "cubed_sphere_grid_sfcf${f_hhmmss}.nc"
+ else
+ ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc" "atmf${FH3}.nc"
+ ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" "sfcf${FH3}.nc"
+ ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt" "log.atm.f${FH3}"
+ ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_atmf${FH3}.nc" "cubed_sphere_grid_atmf${FH3}.nc"
+ ${NLN} "${COMOUT_ATMOS_HISTORY}/${RUN}.t${cyc}z.cubed_sphere_grid_sfcf${FH3}.nc" "cubed_sphere_grid_sfcf${FH3}.nc"
+ fi
if [[ "${WRITE_DOPOST}" == ".true." ]]; then
${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.master.grb2f${FH3}" "GFSPRS.GrbF${FH2}"
${NLN} "${COMOUT_ATMOS_MASTER}/${RUN}.t${cyc}z.sfluxgrbf${FH3}.grib2" "GFSFLX.GrbF${FH2}"
@@ -454,10 +467,19 @@ MOM6_postdet() {
(( midpoint = last_fhr + interval/2 ))
vdate=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${fhr} hours" +%Y%m%d%H)
- vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${midpoint} hours" +%Y%m%d%H)
+ #If OFFSET_START_HOUR is greater than 0, OFFSET_START_HOUR should be added to the midpoint for first lead time
+ if (( OFFSET_START_HOUR > 0 )) && (( fhr == FHOUT_OCN ));then
+ vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + $(( midpoint + OFFSET_START_HOUR )) hours" +%Y%m%d%H)
+ else
+ vdate_mid=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${midpoint} hours" +%Y%m%d%H)
+ fi
# Native model output uses window midpoint in the filename, but we are mapping that to the end of the period for COM
- source_file="ocn_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc"
+ if (( OFFSET_START_HOUR > 0 )) && (( fhr == FHOUT_OCN ));then
+ source_file="ocn_lead1_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc"
+ else
+ source_file="ocn_${vdate_mid:0:4}_${vdate_mid:4:2}_${vdate_mid:6:2}_${vdate_mid:8:2}.nc"
+ fi
dest_file="${RUN}.ocean.t${cyc}z.${interval}hr_avg.f${fhr3}.nc"
${NLN} "${COMOUT_OCEAN_HISTORY}/${dest_file}" "${DATA}/MOM6_OUTPUT/${source_file}"
diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh
index ebf7cfd282..d1a332716a 100755
--- a/ush/forecast_predet.sh
+++ b/ush/forecast_predet.sh
@@ -77,6 +77,7 @@ common_predet(){
CDATE=${CDATE:-"${PDY}${cyc}"}
ENSMEM=${ENSMEM:-000}
+ MEMBER=$(( 10#${ENSMEM:-"-1"} )) # -1: control, 0: ensemble mean, >0: ensemble member $MEMBER
# Define significant cycles
half_window=$(( assim_freq / 2 ))
@@ -145,6 +146,7 @@ FV3_predet(){
fi
# Convert output settings into an explicit list for FV3
+ # Create an FV3 fhr list to be used in the filenames
FV3_OUTPUT_FH=""
local fhr=${FHMIN}
if (( FHOUT_HF > 0 && FHMAX_HF > 0 )); then
@@ -153,8 +155,36 @@ FV3_predet(){
fi
FV3_OUTPUT_FH="${FV3_OUTPUT_FH} $(seq -s ' ' "${fhr}" "${FHOUT}" "${FHMAX}")"
+ # Create an FV3 fhr list to be used in the namelist
+ # The FV3 fhr list for the namelist and the FV3 fhr list for the filenames
+ # are only different when REPLAY_ICS is set to YES
+ if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
+ local FV3_OUTPUT_FH_s
+ FV3_OUTPUT_FH_NML="$(echo "scale=5; ${OFFSET_START_HOUR}+(${DELTIM}/3600)" | bc -l)"
+ FV3_OUTPUT_FH_s=$(( OFFSET_START_HOUR * 3600 + DELTIM ))
+ local fhr=${FHOUT}
+ if (( FHOUT_HF > 0 && FHMAX_HF > 0 )); then
+ FV3_OUTPUT_FH_NML="${FV3_OUTPUT_FH_NML} $(seq -s ' ' "$(( OFFSET_START_HOUR + FHOUT_HF ))" "${FHOUT_HF}" "${FHMAX_HF}")"
+ FV3_OUTPUT_FH_s="${FV3_OUTPUT_FH_s} $(seq -s ' ' "$(( OFFSET_START_HOUR * 3600 + FHOUT_HF * 3600 ))" "$(( FHOUT_HF * 3600 ))" "$(( FHMAX_HF * 3600 ))")"
+ fhr=${FHMAX_HF}
+ fi
+ FV3_OUTPUT_FH_NML="${FV3_OUTPUT_FH_NML} $(seq -s ' ' "${fhr}" "${FHOUT}" "${FHMAX}")"
+ FV3_OUTPUT_FH_s="${FV3_OUTPUT_FH_s} $(seq -s ' ' "$(( fhr * 3600 ))" "$(( FHOUT * 3600 ))" "$(( FHMAX * 3600 ))")"
+ local hh mm ss s_total
+ FV3_OUTPUT_FH_hhmmss=""
+ for s_total in ${FV3_OUTPUT_FH_s}; do
+ # Convert seconds to HHH:MM:SS
+ (( ss = s_total, mm = ss / 60, ss %= 60, hh = mm / 60, mm %= 60 )) || true
+ FV3_OUTPUT_FH_hhmmss="${FV3_OUTPUT_FH_hhmmss} $(printf "%03d-%02d-%02d" "${hh}" "${mm}" "${ss}")"
+ done
+ # Create a string from an array
+ else # If non-replay ICs are being used
+ # The FV3 fhr list for the namelist and the FV3 fhr list for the filenames
+ # are identical when REPLAY_ICS is set to NO
+ FV3_OUTPUT_FH_NML="${FV3_OUTPUT_FH}"
+ fi
+
# Other options
- MEMBER=$(( 10#${ENSMEM:-"-1"} )) # -1: control, 0: ensemble mean, >0: ensemble member $MEMBER
PREFIX_ATMINC=${PREFIX_ATMINC:-""} # allow ensemble to use recentered increment
# IAU options
diff --git a/ush/load_fv3gfs_modules.sh b/ush/load_fv3gfs_modules.sh
index 5f6afb7e35..ff6f64cece 100755
--- a/ush/load_fv3gfs_modules.sh
+++ b/ush/load_fv3gfs_modules.sh
@@ -20,7 +20,7 @@ source "${HOMEgfs}/versions/run.ver"
module use "${HOMEgfs}/modulefiles"
case "${MACHINE_ID}" in
- "wcoss2" | "hera" | "orion" | "hercules" | "gaea" | "jet" | "s4")
+ "wcoss2" | "hera" | "orion" | "hercules" | "gaea" | "jet" | "s4" | "noaacloud")
module load "module_base.${MACHINE_ID}"
;;
*)
diff --git a/ush/load_ufswm_modules.sh b/ush/load_ufswm_modules.sh
index 6477a8ff39..f00358095d 100755
--- a/ush/load_ufswm_modules.sh
+++ b/ush/load_ufswm_modules.sh
@@ -11,40 +11,21 @@ ulimit_s=$( ulimit -S -s )
source "${HOMEgfs}/ush/detect_machine.sh"
source "${HOMEgfs}/ush/module-setup.sh"
-if [[ "${MACHINE_ID}" != "noaacloud" ]]; then
- module use "${HOMEgfs}/sorc/ufs_model.fd/modulefiles"
- module load "ufs_${MACHINE_ID}.intel"
- module load prod_util
- if [[ "${MACHINE_ID}" = "wcoss2" ]]; then
- module load cray-pals
- module load cfp
- module load libjpeg
- module load craype-network-ucx
- module load cray-mpich-ucx
- else
- export UTILROOT=${prod_util_ROOT}
- fi
- module load wgrib2
- export WGRIB2=wgrib2
-fi
-if [[ "${MACHINE_ID}" == "noaacloud" ]]; then
- if [[ "${PW_CSP:-}" = "aws" ]]; then
- # TODO: This can be cleaned-up; most of this is a hack for now.
- module use "/contrib/spack-stack/envs/ufswm/install/modulefiles/Core"
- module load "stack-intel"
- module load "stack-intel-oneapi-mpi"
- module use -a "/contrib/spack-stack/miniconda/modulefiles/miniconda/"
- module load "py39_4.12.0"
- module load "ufs-weather-model-env/1.0.0"
- export NETCDF="/contrib/spack-stack/miniconda/apps/miniconda/py39_4.12.0"
- # TODO: Are there plans for EPIC to maintain this package or should GW provide support?
- export UTILROOT="/contrib/global-workflow/NCEPLIBS-prod_util"
- export PATH="${PATH}:/contrib/global-workflow/bin"
- ndate_path="$(command -v ndate)"
- export NDATE="${ndate_path}"
- fi
+module use "${HOMEgfs}/sorc/ufs_model.fd/modulefiles"
+module load "ufs_${MACHINE_ID}.intel"
+module load prod_util
+if [[ "${MACHINE_ID}" = "wcoss2" ]]; then
+ module load cray-pals
+ module load cfp
+ module load libjpeg
+ module load craype-network-ucx
+ module load cray-mpich-ucx
+else
+ export UTILROOT=${prod_util_ROOT}
fi
+module load wgrib2
+export WGRIB2=wgrib2
module list
unset MACHINE_ID
diff --git a/ush/module-setup.sh b/ush/module-setup.sh
index b4ec3edafa..398562652d 100755
--- a/ush/module-setup.sh
+++ b/ush/module-setup.sh
@@ -92,10 +92,8 @@ elif [[ ${MACHINE_ID} = discover* ]]; then
# TODO: This can likely be made more general once other cloud
# platforms come online.
elif [[ ${MACHINE_ID} = "noaacloud" ]]; then
-
- export SPACK_ROOT=/contrib/global-workflow/spack-stack/spack
- export PATH=${PATH}:${SPACK_ROOT}/bin
- . "${SPACK_ROOT}"/share/spack/setup-env.sh
+ # We are on NOAA Cloud
+ module purge
else
echo WARNING: UNKNOWN PLATFORM 1>&2
diff --git a/ush/ocnice_extractvars.sh b/ush/ocnice_extractvars.sh
index f0660bb6ec..51276172b9 100755
--- a/ush/ocnice_extractvars.sh
+++ b/ush/ocnice_extractvars.sh
@@ -25,11 +25,11 @@ for (( nh = FHMIN_GFS; nh <= FHMAX_GFS; nh = nh + fhout_ocnice )); do
fnh=$(printf "%3.3d" "${nh}")
if [[ ${component_name} == "ocn" ]]; then
- infile=${COMIN_OCEAN_NETCDF}/${RUN}.ocean.t${cyc}z.${datares}.f${fnh}.nc
+ infile=${COMIN_OCEAN_NETCDF}/${datares}/${RUN}.ocean.t${cyc}z.${datares}.f${fnh}.nc
# For ocean products, add an argument to extract a subset of levels
otherargs=(-d "${depthvar_name},""${zmin},""${zmax}")
elif [[ ${component_name} == "ice" ]]; then
- infile=${COMIN_ICE_NETCDF}/${RUN}.ice.t${cyc}z.${datares}.f${fnh}.nc
+ infile=${COMIN_ICE_NETCDF}/${datares}/${RUN}.ice.t${cyc}z.${datares}.f${fnh}.nc
otherargs=()
fi
outfile=${subdata}/${RUN}.${component_name}.t${cyc}z.${datares}.f${fnh}.nc
diff --git a/ush/parsing_model_configure_FV3.sh b/ush/parsing_model_configure_FV3.sh
index d2dd434fff..7e8e065d26 100755
--- a/ush/parsing_model_configure_FV3.sh
+++ b/ush/parsing_model_configure_FV3.sh
@@ -48,7 +48,7 @@ local JCHUNK3D=$((2*restile))
local KCHUNK3D=1
local IMO=${LONB_IMO}
local JMO=${LATB_JMO}
-local OUTPUT_FH=${FV3_OUTPUT_FH}
+local OUTPUT_FH=${FV3_OUTPUT_FH_NML}
local IAU_OFFSET=${IAU_OFFSET:-0}
# Ensure the template exists
diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh
index 6101c2f5e1..60f44a721a 100755
--- a/ush/parsing_namelists_FV3.sh
+++ b/ush/parsing_namelists_FV3.sh
@@ -42,6 +42,18 @@ local SDAY=${current_cycle:6:2}
local CHOUR=${current_cycle:8:2}
local MOM6_OUTPUT_DIR="./MOM6_OUTPUT"
+if [[ "${REPLAY_ICS:-NO}" == "YES" ]]; then
+ local current_cycle_p1
+ current_cycle_p1=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHOUT_OCN} hours" +%Y%m%d%H)
+ local current_cycle_offset
+ current_cycle_offset=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${OFFSET_START_HOUR} hours" +%Y%m%d%H)
+ local SYEAR1=${current_cycle_p1:0:4}
+ local SMONTH1=${current_cycle_p1:4:2}
+ local SDAY1=${current_cycle_p1:6:2}
+ local CHOUR1=${current_cycle_p1:8:2}
+ local CHOUR_offset=${current_cycle_offset:8:2}
+fi
+
atparse < "${template}" >> "diag_table"
diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py
index 69a992d7d4..ccc5fb601a 100644
--- a/ush/python/pygfs/task/aero_analysis.py
+++ b/ush/python/pygfs/task/aero_analysis.py
@@ -46,7 +46,7 @@ def __init__(self, config):
'npz_anl': self.task_config['LEVS'] - 1,
'AERO_WINDOW_BEGIN': _window_begin,
'AERO_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H",
- 'aero_bkg_fhr': map(int, str(self.task_config['aero_bkg_times']).split(',')),
+ 'aero_bkg_fhr': self.task_config['aero_bkg_times'],
'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.",
diff --git a/ush/python/pygfs/task/aero_prepobs.py b/ush/python/pygfs/task/aero_prepobs.py
index d8396fe3ca..be58fa43a5 100644
--- a/ush/python/pygfs/task/aero_prepobs.py
+++ b/ush/python/pygfs/task/aero_prepobs.py
@@ -31,7 +31,7 @@ def __init__(self, config: Dict[str, Any]) -> None:
{
'window_begin': _window_begin,
'window_end': _window_end,
- 'sensors': str(self.task_config['SENSORS']).split(','),
+ 'sensors': self.task_config['SENSORS'],
'data_dir': self.task_config['VIIRS_DATA_DIR'],
'input_files': '',
'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
diff --git a/versions/build.noaacloud.ver b/versions/build.noaacloud.ver
new file mode 100644
index 0000000000..ba47313675
--- /dev/null
+++ b/versions/build.noaacloud.ver
@@ -0,0 +1,5 @@
+export stack_intel_ver=2021.3.0
+export stack_impi_ver=2021.3.0
+export spack_env=gsi-addon-env
+source "${HOMEgfs:-}/versions/build.spack.ver"
+export spack_mod_path="/contrib/spack-stack/spack-stack-${spack_stack_ver}/envs/gsi-addon-env/install/modulefiles/Core"
diff --git a/versions/run.noaacloud.ver b/versions/run.noaacloud.ver
new file mode 100644
index 0000000000..4c9ac3cd42
--- /dev/null
+++ b/versions/run.noaacloud.ver
@@ -0,0 +1,8 @@
+export stack_intel_ver=2021.3.0
+export stack_impi_ver=2021.3.0
+export spack_env=gsi-addon-env
+
+source "${HOMEgfs:-}/versions/run.spack.ver"
+export spack_mod_path="/contrib/spack-stack/spack-stack-${spack_stack_ver}/envs/gsi-addon-env/install/modulefiles/Core"
+
+export cdo_ver=2.2.0
diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py
index 97a77c2c21..8c1f69735e 100644
--- a/workflow/applications/applications.py
+++ b/workflow/applications/applications.py
@@ -75,6 +75,10 @@ def __init__(self, conf: Configuration) -> None:
self.do_hpssarch = _base.get('HPSSARCH', False)
self.nens = _base.get('NMEM_ENS', 0)
+ self.fcst_segments = _base.get('FCST_SEGMENTS', None)
+
+ if not AppConfig.is_monotonic(self.fcst_segments):
+ raise ValueError(f'Forecast segments do not increase monotonically: {",".join(self.fcst_segments)}')
self.wave_runs = None
if self.do_wave:
@@ -208,3 +212,26 @@ def get_gfs_interval(gfs_cyc: int) -> timedelta:
return to_timedelta(gfs_internal_map[str(gfs_cyc)])
except KeyError:
raise KeyError(f'Invalid gfs_cyc = {gfs_cyc}')
+
+ @staticmethod
+ def is_monotonic(test_list: List, check_decreasing: bool = False) -> bool:
+ """
+ Determine if an array is monotonically increasing or decreasing
+
+ TODO: Move this into wxflow somewhere
+
+ Inputs
+ test_list: List
+ A list of comparable values to check
+ check_decreasing: bool [default: False]
+ Check whether list is monotonically decreasing
+
+ Returns
+ bool: Whether the list is monotonically increasing (if check_decreasing
+ if False) or decreasing (if check_decreasing is True)
+
+ """
+ if check_decreasing:
+ return all(x > y for x, y in zip(test_list, test_list[1:]))
+ else:
+ return all(x < y for x, y in zip(test_list, test_list[1:]))
diff --git a/workflow/hosts.py b/workflow/hosts.py
index cd0cfe0083..eced460fd1 100644
--- a/workflow/hosts.py
+++ b/workflow/hosts.py
@@ -52,7 +52,7 @@ def detect(cls):
elif container is not None:
machine = 'CONTAINER'
elif pw_csp is not None:
- if pw_csp.lower() not in ['azure', 'aws', 'gcp']:
+ if pw_csp.lower() not in ['azure', 'aws', 'google']:
raise ValueError(
f'NOAA cloud service provider "{pw_csp}" is not supported.')
machine = f"{pw_csp.upper()}PW"
diff --git a/workflow/hosts/awspw.yaml b/workflow/hosts/awspw.yaml
index 046dafcfa7..f925f54008 100644
--- a/workflow/hosts/awspw.yaml
+++ b/workflow/hosts/awspw.yaml
@@ -1,12 +1,12 @@
-BASE_GIT: '/scratch1/NCEPDEV/global/glopara/git' #TODO: This does not yet exist.
-DMPDIR: '/scratch1/NCEPDEV/global/glopara/dump' # TODO: This does not yet exist.
-PACKAGEROOT: '/scratch1/NCEPDEV/global/glopara/nwpara' #TODO: This does not yet exist.
-COMINsyn: '/scratch1/NCEPDEV/global/glopara/com/gfs/prod/syndat' #TODO: This does not yet exist.
+BASE_GIT: '' #TODO: This does not yet exist.
+DMPDIR: '' # TODO: This does not yet exist.
+PACKAGEROOT: '' #TODO: This does not yet exist.
+COMINsyn: '' #TODO: This does not yet exist.
HOMEDIR: '/contrib/${USER}'
-STMP: '/lustre/${USER}/stmp2/'
-PTMP: '/lustre/${USER}/stmp4/'
-NOSCRUB: ${HOMEDIR}
-ACCOUNT: hwufscpldcld
+STMP: '/lustre/${USER}/stmp/'
+PTMP: '/lustre/${USER}/ptmp/'
+NOSCRUB: '${HOMEDIR}'
+ACCOUNT: '${USER}'
SCHEDULER: slurm
QUEUE: batch
QUEUE_SERVICE: batch
@@ -16,10 +16,11 @@ RESERVATION: ''
CLUSTERS: ''
CHGRP_RSTPROD: 'YES'
CHGRP_CMD: 'chgrp rstprod' # TODO: This is not yet supported.
-HPSSARCH: 'YES'
+HPSSARCH: 'NO'
HPSS_PROJECT: emc-global #TODO: See `ATARDIR` below.
+BASE_CPLIC: '/bucket/global-workflow-shared-data/ICSDIR/prototype_ICs'
LOCALARCH: 'NO'
-ATARDIR: '/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}' # TODO: This will not yet work from AWS.
+ATARDIR: '' # TODO: This will not yet work from AWS.
MAKE_NSSTBUFR: 'NO'
MAKE_ACFTBUFR: 'NO'
SUPPORTED_RESOLUTIONS: ['C48', 'C96'] # TODO: Test and support all cubed-sphere resolutions.
diff --git a/workflow/rocoto/gefs_tasks.py b/workflow/rocoto/gefs_tasks.py
index e78ac96d83..f0f73d1173 100644
--- a/workflow/rocoto/gefs_tasks.py
+++ b/workflow/rocoto/gefs_tasks.py
@@ -138,19 +138,34 @@ def fcst(self):
dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies)
+ num_fcst_segments = len(self.app_config.fcst_segments) - 1
+
+ fcst_vars = self.envars.copy()
+ fcst_envars_dict = {'FCST_SEGMENT': '#seg#'}
+ for key, value in fcst_envars_dict.items():
+ fcst_vars.append(rocoto.create_envar(name=key, value=str(value)))
+
resources = self.get_resource('fcst')
- task_name = f'fcst_mem000'
+ task_name = f'fcst_mem000_seg#seg#'
task_dict = {'task_name': task_name,
'resources': resources,
'dependency': dependencies,
- 'envars': self.envars,
+ 'envars': fcst_vars,
'cycledef': 'gefs',
'command': f'{self.HOMEgfs}/jobs/rocoto/fcst.sh',
'job_name': f'{self.pslot}_{task_name}_@H',
'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
'maxtries': '&MAXTRIES;'
}
- task = rocoto.create_task(task_dict)
+
+ seg_var_dict = {'seg': ' '.join([f"{seg}" for seg in range(0, num_fcst_segments)])}
+ metatask_dict = {'task_name': f'fcst_mem000',
+ 'is_serial': True,
+ 'var_dict': seg_var_dict,
+ 'task_dict': task_dict
+ }
+
+ task = rocoto.create_task(metatask_dict)
return task
@@ -169,36 +184,60 @@ def efcs(self):
dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies)
- efcsenvars = self.envars.copy()
- efcsenvars_dict = {'ENSMEM': '#member#',
- 'MEMDIR': 'mem#member#'
- }
- for key, value in efcsenvars_dict.items():
- efcsenvars.append(rocoto.create_envar(name=key, value=str(value)))
-
+ num_fcst_segments = len(self.app_config.fcst_segments) - 1
resources = self.get_resource('efcs')
- task_name = f'fcst_mem#member#'
- task_dict = {'task_name': task_name,
- 'resources': resources,
- 'dependency': dependencies,
- 'envars': efcsenvars,
- 'cycledef': 'gefs',
- 'command': f'{self.HOMEgfs}/jobs/rocoto/fcst.sh',
- 'job_name': f'{self.pslot}_{task_name}_@H',
- 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
- 'maxtries': '&MAXTRIES;'
- }
-
- member_var_dict = {'member': ' '.join([f"{mem:03d}" for mem in range(1, self.nmem + 1)])}
- metatask_dict = {'task_name': 'fcst_ens',
- 'var_dict': member_var_dict,
- 'task_dict': task_dict
+ # Kludge to work around bug in rocoto with serial metatasks nested
+ # in a parallel one (see christopherwharrop/rocoto#109). For now,
+ # loop over member to create a separate metatask for each instead
+ # of a metatask of a metatask.
+ #
+ tasks = []
+ for member in [f"{mem:03d}" for mem in range(1, self.nmem + 1)]:
+
+ efcsenvars = self.envars.copy()
+ efcsenvars_dict = {'ENSMEM': f'{member}',
+ 'MEMDIR': f'mem{member}',
+ 'FCST_SEGMENT': '#seg#'
+ }
+ for key, value in efcsenvars_dict.items():
+ efcsenvars.append(rocoto.create_envar(name=key, value=str(value)))
+
+ task_name = f'fcst_mem{member}_seg#seg#'
+ task_dict = {'task_name': task_name,
+ 'resources': resources,
+ 'dependency': dependencies,
+ 'envars': efcsenvars,
+ 'cycledef': 'gefs',
+ 'command': f'{self.HOMEgfs}/jobs/rocoto/fcst.sh',
+ 'job_name': f'{self.pslot}_{task_name}_@H',
+ 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log',
+ 'maxtries': '&MAXTRIES;'
}
- task = rocoto.create_task(metatask_dict)
+ seg_var_dict = {'seg': ' '.join([f"{seg}" for seg in range(0, num_fcst_segments)])}
+ seg_metatask_dict = {'task_name': f'fcst_mem{member}',
+ 'is_serial': True,
+ 'var_dict': seg_var_dict,
+ 'task_dict': task_dict
+ }
- return task
+ tasks.append(rocoto.create_task(seg_metatask_dict))
+
+ return '\n'.join(tasks)
+
+ # Keeping this in hopes the kludge is no longer necessary at some point
+ #
+ # member_var_dict = {'member': ' '.join([f"{mem:03d}" for mem in range(1, self.nmem + 1)])}
+ # mem_metatask_dict = {'task_name': 'fcst_ens',
+ # 'is_serial': False,
+ # 'var_dict': member_var_dict,
+ # 'task_dict': seg_metatask_dict
+ # }
+
+ # task = rocoto.create_task(mem_metatask_dict)
+
+ # return task
def atmos_prod(self):
return self._atmosoceaniceprod('atmos')
@@ -236,7 +275,7 @@ def _atmosoceaniceprod(self, component: str):
if component in ['ocean']:
dep_dict = {'type': 'data', 'data': data, 'age': 120}
deps.append(rocoto.add_dependency(dep_dict))
- dep_dict = {'type': 'task', 'name': 'fcst_mem#member#'}
+ dep_dict = {'type': 'metatask', 'name': 'fcst_mem#member#'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps, dep_condition='or')
elif component in ['ice']:
@@ -384,7 +423,7 @@ def wavepostsbs(self):
def wavepostbndpnt(self):
deps = []
- dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'}
+ dep_dict = {'type': 'metatask', 'name': f'fcst_mem#member#'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -429,7 +468,7 @@ def wavepostbndpntbll(self):
dep_dict = {'type': 'data', 'data': data}
deps.append(rocoto.add_dependency(dep_dict))
- dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'}
+ dep_dict = {'type': 'metatask', 'name': f'fcst_mem#member#'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep_condition='or', dep=deps)
@@ -465,7 +504,7 @@ def wavepostbndpntbll(self):
def wavepostpnt(self):
deps = []
- dep_dict = {'type': 'task', 'name': f'fcst_mem#member#'}
+ dep_dict = {'type': 'metatask', 'name': f'fcst_mem#member#'}
deps.append(rocoto.add_dependency(dep_dict))
if self.app_config.do_wave_bnd:
dep_dict = {'type': 'task', 'name': f'wave_post_bndpnt_bull_mem#member#'}
diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py
index 960a7548ab..9d9b28fb17 100644
--- a/workflow/rocoto/gfs_tasks.py
+++ b/workflow/rocoto/gfs_tasks.py
@@ -688,7 +688,7 @@ def ocnanalprep(self):
deps.append(rocoto.add_dependency(dep_dict))
dep_dict = {'type': 'task', 'name': f'{self.run}marinebmat'}
deps.append(rocoto.add_dependency(dep_dict))
- dep_dict = {'type': 'task', 'name': 'gdasfcst', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
+ dep_dict = {'type': 'metatask', 'name': 'gdasfcst', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep_condition='and', dep=deps)
@@ -880,12 +880,22 @@ def _fcst_forecast_only(self):
dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies)
+ if self.run in ['gfs']:
+ num_fcst_segments = len(self.app_config.fcst_segments) - 1
+ else:
+ num_fcst_segments = 1
+
+ fcst_vars = self.envars.copy()
+ fcst_envars_dict = {'FCST_SEGMENT': '#seg#'}
+ for key, value in fcst_envars_dict.items():
+ fcst_vars.append(rocoto.create_envar(name=key, value=str(value)))
+
resources = self.get_resource('fcst')
- task_name = f'{self.run}fcst'
+ task_name = f'{self.run}fcst_seg#seg#'
task_dict = {'task_name': task_name,
'resources': resources,
'dependency': dependencies,
- 'envars': self.envars,
+ 'envars': fcst_vars,
'cycledef': self.run.replace('enkf', ''),
'command': f'{self.HOMEgfs}/jobs/rocoto/fcst.sh',
'job_name': f'{self.pslot}_{task_name}_@H',
@@ -893,7 +903,14 @@ def _fcst_forecast_only(self):
'maxtries': '&MAXTRIES;'
}
- task = rocoto.create_task(task_dict)
+ seg_var_dict = {'seg': ' '.join([f"{seg}" for seg in range(0, num_fcst_segments)])}
+ metatask_dict = {'task_name': f'{self.run}fcst',
+ 'is_serial': True,
+ 'var_dict': seg_var_dict,
+ 'task_dict': task_dict
+ }
+
+ task = rocoto.create_task(metatask_dict)
return task
@@ -929,12 +946,22 @@ def _fcst_cycled(self):
cycledef = 'gdas_half,gdas' if self.run in ['gdas'] else self.run
+ if self.run in ['gfs']:
+ num_fcst_segments = len(self.app_config.fcst_segments) - 1
+ else:
+ num_fcst_segments = 1
+
+ fcst_vars = self.envars.copy()
+ fcst_envars_dict = {'FCST_SEGMENT': '#seg#'}
+ for key, value in fcst_envars_dict.items():
+ fcst_vars.append(rocoto.create_envar(name=key, value=str(value)))
+
resources = self.get_resource('fcst')
- task_name = f'{self.run}fcst'
+ task_name = f'{self.run}fcst_seg#seg#'
task_dict = {'task_name': task_name,
'resources': resources,
'dependency': dependencies,
- 'envars': self.envars,
+ 'envars': fcst_vars,
'cycledef': cycledef,
'command': f'{self.HOMEgfs}/jobs/rocoto/fcst.sh',
'job_name': f'{self.pslot}_{task_name}_@H',
@@ -942,7 +969,14 @@ def _fcst_cycled(self):
'maxtries': '&MAXTRIES;'
}
- task = rocoto.create_task(task_dict)
+ seg_var_dict = {'seg': ' '.join([f"{seg}" for seg in range(0, num_fcst_segments)])}
+ metatask_dict = {'task_name': f'{self.run}fcst',
+ 'is_serial': True,
+ 'var_dict': seg_var_dict,
+ 'task_dict': task_dict
+ }
+
+ task = rocoto.create_task(metatask_dict)
return task
@@ -1104,7 +1138,7 @@ def _atmosoceaniceprod(self, component: str):
data = f'{history_path}/{history_file_tmpl}'
dep_dict = {'type': 'data', 'data': data, 'age': 120}
deps.append(rocoto.add_dependency(dep_dict))
- dep_dict = {'type': 'task', 'name': f'{self.run}fcst'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.run}fcst'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps, dep_condition='or')
@@ -1169,7 +1203,7 @@ def wavepostsbs(self):
def wavepostbndpnt(self):
deps = []
- dep_dict = {'type': 'task', 'name': f'{self.run}fcst'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.run}fcst'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1221,7 +1255,7 @@ def wavepostbndpntbll(self):
def wavepostpnt(self):
deps = []
- dep_dict = {'type': 'task', 'name': f'{self.run}fcst'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.run}fcst'}
deps.append(rocoto.add_dependency(dep_dict))
if self.app_config.do_wave_bnd:
dep_dict = {'type': 'task', 'name': f'{self.run}wavepostbndpntbll'}
@@ -1318,7 +1352,7 @@ def waveawipsgridded(self):
def postsnd(self):
deps = []
- dep_dict = {'type': 'task', 'name': f'{self.run}fcst'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.run}fcst'}
deps.append(rocoto.add_dependency(dep_dict))
dependencies = rocoto.create_dependency(dep=deps)
@@ -1824,8 +1858,9 @@ def metp(self):
}
metatask_dict = {'task_name': f'{self.run}metp',
+ 'is_serial': True,
'task_dict': task_dict,
- 'var_dict': var_dict
+ 'var_dict': var_dict,
}
task = rocoto.create_task(metatask_dict)
@@ -2524,7 +2559,7 @@ def ecen(self):
def _get_ecengroups():
if self._base.get('DOIAU_ENKF', False):
- fhrs = list(self._base.get('IAUFHRS', '6').split(','))
+ fhrs = self._base.get('IAUFHRS', '[6]')
necengrp = self._configs['ecen']['NECENGRP']
ngrps = necengrp if len(fhrs) > necengrp else len(fhrs)
@@ -2666,7 +2701,7 @@ def echgres(self):
self._is_this_a_gdas_task(self.run, 'echgres')
deps = []
- dep_dict = {'type': 'task', 'name': f'{self.run.replace("enkf","")}fcst'}
+ dep_dict = {'type': 'metatask', 'name': f'{self.run.replace("enkf","")}fcst'}
deps.append(rocoto.add_dependency(dep_dict))
dep_dict = {'type': 'task', 'name': f'{self.run}fcst_mem001'}
deps.append(rocoto.add_dependency(dep_dict))
diff --git a/workflow/rocoto/rocoto.py b/workflow/rocoto/rocoto.py
index 0abb56cafb..2a20820da8 100644
--- a/workflow/rocoto/rocoto.py
+++ b/workflow/rocoto/rocoto.py
@@ -56,9 +56,10 @@ def create_task(task_dict: Dict[str, Any]) -> List[str]:
else:
# There is a nested task_dict, so this is a metatask
metataskname = f"{task_dict.get('task_name', 'demometatask')}"
+ metataskmode = 'serial' if task_dict.get('is_serial', False) else 'parallel'
var_dict = task_dict.get('var_dict', None)
- strings = [f'\n',
+ strings = [f'\n',
'\n']
if var_dict is None:
diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py
index 353d2aa943..64952498d4 100644
--- a/workflow/rocoto/tasks.py
+++ b/workflow/rocoto/tasks.py
@@ -209,7 +209,10 @@ def get_resource(self, task_name):
else:
native += ':shared'
elif scheduler in ['slurm']:
- native = '--export=NONE'
+ if task_config.get('is_exclusive', False):
+ native = '--exclusive'
+ else:
+ native = '--export=NONE'
if task_config['RESERVATION'] != "":
native += '' if task_name in Tasks.SERVICE_TASKS else ' --reservation=' + task_config['RESERVATION']
if task_config.get('CLUSTERS', "") not in ["", '@CLUSTERS@']:
diff --git a/workflow/rocoto/workflow_xml.py b/workflow/rocoto/workflow_xml.py
index 11b2cdfc45..ca54f3a5bb 100644
--- a/workflow/rocoto/workflow_xml.py
+++ b/workflow/rocoto/workflow_xml.py
@@ -157,10 +157,16 @@ def _write_crontab(self, crontab_file: str = None, cronint: int = 5) -> None:
strings = ['',
f'#################### {pslot} ####################',
- f'MAILTO="{replyto}"',
- f'{cronintstr} {rocotorunstr}',
- '#################################################################',
- '']
+ f'MAILTO="{replyto}"'
+ ]
+ # AWS need 'SHELL', and 'BASH_ENV' defined, or, the crontab job won't start.
+ if os.environ.get('PW_CSP', None) in ['aws', 'azure', 'google']:
+ strings.extend([f'SHELL="/bin/bash"',
+ f'BASH_ENV="/etc/bashrc"'
+ ])
+ strings.extend([f'{cronintstr} {rocotorunstr}',
+ '#################################################################',
+ ''])
if crontab_file is None:
crontab_file = f"{expdir}/{pslot}.crontab"