diff --git a/ci/Jenkinsfile b/ci/Jenkinsfile
index c6aa0887c76..956bd692dd5 100644
--- a/ci/Jenkinsfile
+++ b/ci/Jenkinsfile
@@ -1,9 +1,10 @@
def Machine = 'none'
def machine = 'none'
-def HOME = 'none'
+def CUSTOM_WORKSPACE = 'none'
def caseList = ''
// Location of the custom workspaces for each machine in the CI system. They are persitent for each iteration of the PR.
-def custom_workspace = [hera: '/scratch1/NCEPDEV/global/CI', orion: '/work2/noaa/stmp/CI/ORION', hercules: '/work2/noaa/stmp/CI/HERCULES']
+def NodeName = [hera: 'Hera-EMC', orion: 'Orion-EMC', hercules: 'Hercules-EMC', gaea: 'Gaea']
+def custom_workspace = [hera: '/scratch1/NCEPDEV/global/CI', orion: '/work2/noaa/stmp/CI/ORION', hercules: '/work2/noaa/stmp/CI/HERCULES', gaea: '/gpfs/f5/epic/proj-shared/global/CI']
def repo_url = 'git@github.com:NOAA-EMC/global-workflow.git'
def STATUS = 'Passed'
@@ -40,9 +41,9 @@ pipeline {
echo "This is parent job so getting list of nodes matching labels:"
for (label in pullRequest.labels) {
if (label.matches("CI-(.*?)-Ready")) {
- def Machine_name = label.split('-')[1].toString()
+ def machine_name = label.split('-')[1].toString().toLowerCase()
jenkins.model.Jenkins.get().computers.each { c ->
- if (c.node.selfLabel.name == "${Machine_name}-EMC") {
+ if (c.node.selfLabel.name == NodeName[machine_name]) {
run_nodes.add(c.node.selfLabel.name)
}
}
@@ -70,25 +71,25 @@ pipeline {
}
stage('2. Get Common Workspace') {
- agent { label "${machine}-emc" }
+ agent { label NodeName[machine].toLowerCase() }
steps {
script {
Machine = machine[0].toUpperCase() + machine.substring(1)
echo "Getting Common Workspace for ${Machine}"
ws("${custom_workspace[machine]}/${env.CHANGE_ID}") {
properties([parameters([[$class: 'NodeParameterDefinition', allowedSlaves: ['built-in', 'Hercules-EMC', 'Hera-EMC', 'Orion-EMC'], defaultSlaves: ['built-in'], name: '', nodeEligibility: [$class: 'AllNodeEligibility'], triggerIfResult: 'allCases']])])
- HOME = "${WORKSPACE}"
- sh(script: "mkdir -p ${HOME}/RUNTESTS;rm -Rf ${HOME}/RUNTESTS/*")
+ CUSTOM_WORKSPACE = "${WORKSPACE}"
+ sh(script: "mkdir -p ${CUSTOM_WORKSPACE}/RUNTESTS;rm -Rf ${CUSTOM_WORKSPACE}/RUNTESTS/*")
sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-Building" --remove-label "CI-${Machine}-Ready" """)
}
- echo "Building and running on ${Machine} in directory ${HOME}"
+ echo "Building and running on ${Machine} in directory ${CUSTOM_WORKSPACE}"
}
}
}
stage('3. Build System') {
matrix {
- agent { label "${machine}-emc" }
+ agent { label NodeName[machine].toLowerCase() }
//options {
// throttle(['global_matrix_build'])
//}
@@ -102,7 +103,7 @@ pipeline {
stage('build system') {
steps {
script {
- def HOMEgfs = "${HOME}/${system}" // local HOMEgfs is used to build the system on per system basis under the common workspace HOME
+ def HOMEgfs = "${CUSTOM_WORKSPACE}/${system}" // local HOMEgfs is used to build the system on per system basis under the custome workspace for each buile system
sh(script: "mkdir -p ${HOMEgfs}")
ws(HOMEgfs) {
if (fileExists("${HOMEgfs}/sorc/BUILT_semaphor")) { // if the system is already built, skip the build in the case of re-runs
@@ -112,7 +113,16 @@ pipeline {
sh(script: './link_workflow.sh')
}
} else {
- checkout scm
+ try {
+ echo "Checking out the code for ${system} on ${Machine} using scm in ${HOMEgfs}"
+ checkout scm
+ } catch (Exception e) {
+ if (env.CHANGE_ID) {
+ sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Checkout **Failed** on ${Machine}: ${e.getMessage()}" """)
+ }
+ echo "Failed to checkout: ${e.getMessage()}"
+ STATUS = 'Failed'
+ }
def gist_url = ""
def error_logs = ""
def error_logs_message = ""
@@ -173,7 +183,7 @@ pipeline {
stage('4. Run Tests') {
failFast false
matrix {
- agent { label "${machine}-emc" }
+ agent { label NodeName[machine].toLowerCase() }
axes {
axis {
name 'Case'
@@ -189,11 +199,11 @@ pipeline {
}
steps {
script {
- sh(script: "sed -n '/{.*}/!p' ${HOME}/gfs/ci/cases/pr/${Case}.yaml > ${HOME}/gfs/ci/cases/pr/${Case}.yaml.tmp")
- def yaml_case = readYaml file: "${HOME}/gfs/ci/cases/pr/${Case}.yaml.tmp"
+ sh(script: "sed -n '/{.*}/!p' ${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${Case}.yaml > ${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${Case}.yaml.tmp")
+ def yaml_case = readYaml file: "${CUSTOM_WORKSPACE}/gfs/ci/cases/pr/${Case}.yaml.tmp"
system = yaml_case.experiment.system
- def HOMEgfs = "${HOME}/${system}" // local HOMEgfs is used to populate the XML on per system basis
- env.RUNTESTS = "${HOME}/RUNTESTS"
+ def HOMEgfs = "${CUSTOM_WORKSPACE}/${system}" // local HOMEgfs is used to populate the XML on per system basis
+ env.RUNTESTS = "${CUSTOM_WORKSPACE}/RUNTESTS"
sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh create_experiment ${HOMEgfs}/ci/cases/pr/${Case}.yaml")
}
}
@@ -206,15 +216,15 @@ pipeline {
failFast false
steps {
script {
- HOMEgfs = "${HOME}/gfs" // common HOMEgfs is used to launch the scripts that run the experiments
- def pslot = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh get_pslot ${HOME}/RUNTESTS ${Case}", returnStdout: true).trim()
- def error_file = "${HOME}/RUNTESTS/${pslot}_error.logs"
+ HOMEgfs = "${CUSTOM_WORKSPACE}/gfs" // common HOMEgfs is used to launch the scripts that run the experiments
+ def pslot = sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh get_pslot ${CUSTOM_WORKSPACE}/RUNTESTS ${Case}", returnStdout: true).trim()
+ def error_file = "${CUSTOM_WORKSPACE}/RUNTESTS/${pslot}_error.logs"
sh(script: " rm -f ${error_file}")
try {
- sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${HOME} ${pslot} ${system}")
+ sh(script: "${HOMEgfs}/ci/scripts/run-check_ci.sh ${CUSTOM_WORKSPACE} ${pslot} ${system}")
} catch (Exception error_experment) {
sh(script: "${HOMEgfs}/ci/scripts/utils/ci_utils_wrapper.sh cancel_batch_jobs ${pslot}")
- ws(HOME) {
+ ws(CUSTOM_WORKSPACE) {
def error_logs = ""
def error_logs_message = ""
if (fileExists(error_file)) {
@@ -222,11 +232,11 @@ pipeline {
def lines = fileContent.readLines()
for (line in lines) {
echo "archiving: ${line}"
- if (fileExists("${HOME}/${line}") && readFile("${HOME}/${line}").length() > 0) {
+ if (fileExists("${CUSTOM_WORKSPACE}/${line}") && readFile("${CUSTOM_WORKSPACE}/${line}").length() > 0) {
try {
archiveArtifacts artifacts: "${line}", fingerprint: true
- error_logs = error_logs + "${HOME}/${line} "
- error_logs_message = error_logs_message + "${HOME}/${line}\n"
+ error_logs = error_logs + "${CUSTOM_WORKSPACE}/${line} "
+ error_logs_message = error_logs_message + "${CUSTOM_WORKSPACE}/${line}\n"
} catch (Exception error_arch) {
echo "Failed to archive error log ${line}: ${error_arch.getMessage()}"
}
@@ -240,12 +250,12 @@ pipeline {
echo "Failed to comment on PR: ${error_comment.getMessage()}"
}
} else {
- echo "No error logs found for failed cases in $HOME/RUNTESTS/${pslot}_error.logs"
+ echo "No error logs found for failed cases in $CUSTOM_WORKSPACE/RUNTESTS/${pslot}_error.logs"
}
STATUS = 'Failed'
try {
sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --remove-label "CI-${Machine}-Running" --add-label "CI-${Machine}-${STATUS}" """, returnStatus: true)
- sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Experiment ${Case} **FAILED** on ${Machine} in\n\\`${HOME}/RUNTESTS/${pslot}\\`" """)
+ sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "Experiment ${Case} **FAILED** on ${Machine} in\n\\`${CUSTOM_WORKSPACE}/RUNTESTS/${pslot}\\`" """)
} catch (Exception e) {
echo "Failed to update label from Running to ${STATUS}: ${e.getMessage()}"
}
@@ -259,19 +269,30 @@ pipeline {
}
}
stage( '5. FINALIZE' ) {
- when {
- expression {
- STATUS == 'Passed'
- }
- }
- agent { label "${machine}-emc" }
+ agent { label NodeName[machine].toLowerCase() }
steps {
script {
- try {
- sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --remove-label "CI-${Machine}-Running" --remove-label "CI-${Machine}-Building" --add-label "CI-${Machine}-${STATUS}" """, returnStatus: true)
- sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body "**CI ${STATUS}** ${Machine} at
Built and ran in directory \\`${HOME}\\`" """, returnStatus: true)
- } catch (Exception e) {
- echo "Failed to update label from Running to ${STATUS}: ${e.getMessage()}"
+ sh(script: """
+ labels=\$(gh pr view ${env.CHANGE_ID} --repo ${repo_url} --json labels --jq '.labels[].name')
+ for label in \$labels; do
+ if [[ "\$label" == *"${Machine}"* ]]; then
+ gh pr edit ${env.CHANGE_ID} --repo ${repo_url} --remove-label "\$label"
+ fi
+ done
+ """, returnStatus: true)
+ sh(script: """${GH} pr edit ${env.CHANGE_ID} --repo ${repo_url} --add-label "CI-${Machine}-${STATUS}" """, returnStatus: true)
+ if (fileExists("${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log")) {
+ sh(script: """echo "**CI ${STATUS}** ${Machine} at
Built and ran in directory \\`${CUSTOM_WORKSPACE}\\`\n\\`\\`\\`\n" | cat - ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log > temp && mv temp ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log""", returnStatus: true)
+ sh(script: """${GH} pr comment ${env.CHANGE_ID} --repo ${repo_url} --body-file ${CUSTOM_WORKSPACE}/RUNTESTS/ci-run_check.log """, returnStatus: true)
+ }
+ if (STATUS == 'Passed') {
+ try {
+ sh(script: "rm -Rf ${CUSTOM_WORKSPACE}/*")
+ } catch (Exception e) {
+ echo "Failed to remove custom work directory ${CUSTOM_WORKSPACE} on ${Machine}: ${e.getMessage()}"
+ }
+ } else {
+ echo "Failed to build and run Global-workflow in ${CUSTOM_WORKSPACE} and on ${Machine}"
}
}
}
diff --git a/ci/cases/pr/C96_atmaerosnowDA.yaml b/ci/cases/pr/C96_atmaerosnowDA.yaml
index edde37cbf73..7e22955a370 100644
--- a/ci/cases/pr/C96_atmaerosnowDA.yaml
+++ b/ci/cases/pr/C96_atmaerosnowDA.yaml
@@ -19,4 +19,3 @@ arguments:
skip_ci_on_hosts:
- orion
- hercules
- - wcoss2
diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh
index cd907d34aa8..04dd92f4a69 100755
--- a/ci/scripts/check_ci.sh
+++ b/ci/scripts/check_ci.sh
@@ -175,7 +175,7 @@ for pr in ${pr_list}; do
DATE=$(date +'%D %r')
echo "Experiment ${pslot} **SUCCESS** on ${MACHINE_ID^} at ${DATE}" >> "${output_ci_single}"
echo "Experiment ${pslot} *** SUCCESS *** at ${DATE}" >> "${output_ci}"
- "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci_single}"
+ # "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${output_ci_single}"
fi
done
done
diff --git a/ci/scripts/run-check_ci.sh b/ci/scripts/run-check_ci.sh
index 5c891fc4bdc..5c49a21c4ba 100755
--- a/ci/scripts/run-check_ci.sh
+++ b/ci/scripts/run-check_ci.sh
@@ -101,9 +101,7 @@ while true; do
if [[ "${ROCOTO_STATE}" == "DONE" ]]; then
{
- echo "Experiment ${pslot} Completed ${CYCLES_DONE} Cycles at $(date)" || true
- echo "with ${SUCCEEDED} successfully completed jobs" || true
- echo "Experiment ${pslot} Completed: *SUCCESS*"
+ echo "Experiment ${pslot} Completed ${CYCLES_DONE} Cycles: *SUCCESS* at $(date)" || true
} | tee -a "${run_check_logfile}"
rc=0
break
diff --git a/env/HERCULES.env b/env/HERCULES.env
index 77e57e066d1..79424f8639a 100755
--- a/env/HERCULES.env
+++ b/env/HERCULES.env
@@ -132,6 +132,16 @@ case ${step} in
[[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max}
export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun} --cpus-per-task=${NTHREADS_OCNANAL}"
;;
+"ocnanalecen")
+
+ export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
+
+ nth_max=$((npe_node_max / npe_node_ocnanalecen))
+
+ export NTHREADS_OCNANALECEN=${nth_ocnanalecen:-${nth_max}}
+ [[ ${NTHREADS_OCNANALECEN} -gt ${nth_max} ]] && export NTHREADS_OCNANALECEN=${nth_max}
+ export APRUN_OCNANALECEN="${launcher} -n ${npe_ocnanalecen} --cpus-per-task=${NTHREADS_OCNANALECEN}"
+;;
"ocnanalchkpt")
export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}"
diff --git a/jobs/JGLOBAL_WAVE_INIT b/jobs/JGLOBAL_WAVE_INIT
index 7cadfe9f879..3a0a8b43a85 100755
--- a/jobs/JGLOBAL_WAVE_INIT
+++ b/jobs/JGLOBAL_WAVE_INIT
@@ -10,9 +10,10 @@ export errchk=${errchk:-err_chk}
export MP_PULSE=0
# Set COM Paths
-YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_WAVE_PREP
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+ COMOUT_WAVE_PREP:COM_WAVE_PREP_TMPL
-mkdir -m 775 -p ${COM_WAVE_PREP}
+if [[ ! -d "${COMOUT_WAVE_PREP}" ]]; then mkdir -p "${COMOUT_WAVE_PREP}"; fi
# Set mpi serial command
export wavempexec=${wavempexec:-"mpirun -n"}
diff --git a/jobs/JGLOBAL_WAVE_POST_BNDPNT b/jobs/JGLOBAL_WAVE_POST_BNDPNT
index d1c92278954..808ba7d9f3f 100755
--- a/jobs/JGLOBAL_WAVE_POST_BNDPNT
+++ b/jobs/JGLOBAL_WAVE_POST_BNDPNT
@@ -9,9 +9,12 @@ export errchk=${errchk:-err_chk}
export MP_PULSE=0
# Set COM Paths and GETGES environment
-YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_STATION
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+ COMIN_WAVE_PREP:COM_WAVE_PREP_TMPL \
+ COMIN_WAVE_HISTORY:COM_WAVE_HISTORY_TMPL \
+ COMOUT_WAVE_STATION:COM_WAVE_STATION_TMPL
-if [[ ! -d ${COM_WAVE_STATION} ]]; then mkdir -p "${COM_WAVE_STATION}"; fi
+if [[ ! -d "${COMOUT_WAVE_STATION}" ]]; then mkdir -p "${COMOUT_WAVE_STATION}"; fi
# Set wave model ID tag to include member number
# if ensemble; waveMEMB var empty in deterministic
diff --git a/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL b/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL
index ea4bb30cfba..c85b1cb5f3c 100755
--- a/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL
+++ b/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL
@@ -13,9 +13,12 @@ export CDATE=${PDY}${cyc}
export MP_PULSE=0
# Set COM Paths and GETGES environment
-YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_STATION
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+ COMIN_WAVE_PREP:COM_WAVE_PREP_TMPL \
+ COMIN_WAVE_HISTORY:COM_WAVE_HISTORY_TMPL \
+ COMOUT_WAVE_STATION:COM_WAVE_STATION_TMPL
-if [[ ! -d ${COM_WAVE_STATION} ]]; then mkdir -p "${COM_WAVE_STATION}"; fi
+if [[ ! -d "${COMOUT_WAVE_STATION}" ]]; then mkdir -p "${COMOUT_WAVE_STATION}"; fi
# Set wave model ID tag to include member number
# if ensemble; waveMEMB var empty in deterministic
diff --git a/jobs/JGLOBAL_WAVE_POST_PNT b/jobs/JGLOBAL_WAVE_POST_PNT
index a946ae537d1..769159be617 100755
--- a/jobs/JGLOBAL_WAVE_POST_PNT
+++ b/jobs/JGLOBAL_WAVE_POST_PNT
@@ -9,9 +9,12 @@ export errchk=${errchk:-err_chk}
export MP_PULSE=0
# Set COM Paths and GETGES environment
-YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_STATION
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+ COMIN_WAVE_PREP:COM_WAVE_PREP_TMPL \
+ COMIN_WAVE_HISTORY:COM_WAVE_HISTORY_TMPL \
+ COMOUT_WAVE_STATION:COM_WAVE_STATION_TMPL
-if [[ ! -d ${COM_WAVE_STATION} ]]; then mkdir -p "${COM_WAVE_STATION}"; fi
+if [[ ! -d "${COMOUT_WAVE_STATION}" ]]; then mkdir -p "${COMOUT_WAVE_STATION}"; fi
# Set wave model ID tag to include member number
# if ensemble; waveMEMB var empty in deterministic
diff --git a/jobs/JGLOBAL_WAVE_POST_SBS b/jobs/JGLOBAL_WAVE_POST_SBS
index 89d8013ea17..53ac4b20833 100755
--- a/jobs/JGLOBAL_WAVE_POST_SBS
+++ b/jobs/JGLOBAL_WAVE_POST_SBS
@@ -9,9 +9,15 @@ export errchk=${errchk:-err_chk}
export MP_PULSE=0
# Set COM Paths and GETGES environment
-YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_GRID
-
-mkdir -p "${COM_WAVE_GRID}"
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+ COMIN_WAVE_PREP:COM_WAVE_PREP_TMPL \
+ COMIN_WAVE_HISTORY:COM_WAVE_HISTORY_TMPL \
+ COMOUT_WAVE_PREP:COM_WAVE_PREP_TMPL \
+ COMOUT_WAVE_GRID:COM_WAVE_GRID_TMPL
+
+for out_dir in "${COMOUT_WAVE_PREP}" "${COMOUT_WAVE_GRID}"; do
+ if [[ ! -d "${out_dir}" ]]; then mkdir -p "${out_dir}"; fi
+done
# Set wave model ID tag to include member number
diff --git a/jobs/JGLOBAL_WAVE_PRDGEN_BULLS b/jobs/JGLOBAL_WAVE_PRDGEN_BULLS
index 86ad17e8b09..ebecf716afc 100755
--- a/jobs/JGLOBAL_WAVE_PRDGEN_BULLS
+++ b/jobs/JGLOBAL_WAVE_PRDGEN_BULLS
@@ -13,9 +13,11 @@ export SENDDBN_NTC=${SENDDBN_NTC:-YES}
export SENDDBN=${SENDDBN:-NO}
export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
-YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_WAVE_STATION COM_WAVE_WMO
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+ COMIN_WAVE_STATION:COM_WAVE_STATION_TMPL \
+ COMOUT_WAVE_WMO:COM_WAVE_WMO_TMPL
-if [[ ! -d ${COM_WAVE_WMO} ]]; then mkdir -p "${COM_WAVE_WMO}"; fi
+if [[ ! -d ${COMOUT_WAVE_WMO} ]]; then mkdir -p "${COMOUT_WAVE_WMO}"; fi
###################################
# Execute the Script
diff --git a/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED b/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED
index 5a4250fb57c..208b36c5352 100755
--- a/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED
+++ b/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED
@@ -13,11 +13,12 @@ export SENDDBN_NTC=${SENDDBN_NTC:-YES}
export SENDDBN=${SENDDBN:-NO}
export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn}
-YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_WAVE_GRID COM_WAVE_WMO
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+ COMIN_WAVE_GRID:COM_WAVE_GRID_TMPL \
+ COMOUT_WAVE_WMO:COM_WAVE_WMO_TMPL
-if [[ ! -d ${COM_WAVE_WMO} ]]; then mkdir -p "${COM_WAVE_WMO}"; fi
+if [[ ! -d ${COMOUT_WAVE_WMO} ]]; then mkdir -p "${COMOUT_WAVE_WMO}"; fi
-mkdir -p "${COM_WAVE_WMO}"
###################################
# Execute the Script
diff --git a/jobs/JGLOBAL_WAVE_PREP b/jobs/JGLOBAL_WAVE_PREP
index 866f3cb3189..65928b870de 100755
--- a/jobs/JGLOBAL_WAVE_PREP
+++ b/jobs/JGLOBAL_WAVE_PREP
@@ -17,9 +17,12 @@ export MP_PULSE=0
export CDO=${CDO_ROOT}/bin/cdo
# Set COM Paths and GETGES environment
-YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OBS COM_WAVE_PREP
-declare_from_tmpl -rx COM_RTOFS
-[[ ! -d ${COM_WAVE_PREP} ]] && mkdir -m 775 -p "${COM_WAVE_PREP}"
+YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \
+ COMIN_OBS:COM_OBS_TMPL \
+ COMIN_WAVE_PREP:COM_WAVE_PREP_TMPL \
+ COMOUT_WAVE_PREP:COM_WAVE_PREP_TMPL \
+ COMIN_RTOFS:COM_RTOFS_TMPL
+if [[ ! -d ${COMOUT_WAVE_PREP} ]]; then mkdir -p "${COMOUT_WAVE_PREP}"; fi
# Execute the Script
${SCRgfs}/exgfs_wave_prep.sh
diff --git a/jobs/rocoto/aeroanlfinal.sh b/jobs/rocoto/aeroanlfinal.sh
index 16bb6887fd7..39dea71810f 100755
--- a/jobs/rocoto/aeroanlfinal.sh
+++ b/jobs/rocoto/aeroanlfinal.sh
@@ -11,11 +11,6 @@ status=$?
export job="aeroanlfinal"
export jobid="${job}.$$"
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
###############################################################
# Execute the JJOB
"${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE"
diff --git a/jobs/rocoto/aeroanlinit.sh b/jobs/rocoto/aeroanlinit.sh
index 9aaf255782c..7a1cf885c10 100755
--- a/jobs/rocoto/aeroanlinit.sh
+++ b/jobs/rocoto/aeroanlinit.sh
@@ -11,12 +11,6 @@ status=$?
export job="aeroanlinit"
export jobid="${job}.$$"
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
###############################################################
# Execute the JJOB
"${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE"
diff --git a/jobs/rocoto/aeroanlrun.sh b/jobs/rocoto/aeroanlrun.sh
index bcd86e3fbf6..529bb2d7d1f 100755
--- a/jobs/rocoto/aeroanlrun.sh
+++ b/jobs/rocoto/aeroanlrun.sh
@@ -11,12 +11,6 @@ status=$?
export job="aeroanlrun"
export jobid="${job}.$$"
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
###############################################################
# Execute the JJOB
"${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN"
diff --git a/jobs/rocoto/atmanlfinal.sh b/jobs/rocoto/atmanlfinal.sh
index 3d3c3ba9e65..a12894ed1e7 100755
--- a/jobs/rocoto/atmanlfinal.sh
+++ b/jobs/rocoto/atmanlfinal.sh
@@ -11,11 +11,6 @@ status=$?
export job="atmanlfinal"
export jobid="${job}.$$"
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
###############################################################
# Execute the JJOB
"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE"
diff --git a/jobs/rocoto/atmanlfv3inc.sh b/jobs/rocoto/atmanlfv3inc.sh
index effc18cee50..5261c15f090 100755
--- a/jobs/rocoto/atmanlfv3inc.sh
+++ b/jobs/rocoto/atmanlfv3inc.sh
@@ -11,12 +11,6 @@ status=$?
export job="atmanlfv3inc"
export jobid="${job}.$$"
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
###############################################################
# Execute the JJOB
"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT"
diff --git a/jobs/rocoto/atmanlinit.sh b/jobs/rocoto/atmanlinit.sh
index 13c7d8710b9..53292005908 100755
--- a/jobs/rocoto/atmanlinit.sh
+++ b/jobs/rocoto/atmanlinit.sh
@@ -11,12 +11,6 @@ status=$?
export job="atmanlinit"
export jobid="${job}.$$"
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
###############################################################
# Execute the JJOB
"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE"
diff --git a/jobs/rocoto/atmanlvar.sh b/jobs/rocoto/atmanlvar.sh
index 812e3c706a6..7df7f59dd10 100755
--- a/jobs/rocoto/atmanlvar.sh
+++ b/jobs/rocoto/atmanlvar.sh
@@ -11,12 +11,6 @@ status=$?
export job="atmanlvar"
export jobid="${job}.$$"
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
###############################################################
# Execute the JJOB
"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL"
diff --git a/jobs/rocoto/atmensanlfinal.sh b/jobs/rocoto/atmensanlfinal.sh
index 5ffaa92754c..fc29bdd9af0 100755
--- a/jobs/rocoto/atmensanlfinal.sh
+++ b/jobs/rocoto/atmensanlfinal.sh
@@ -11,11 +11,6 @@ status=$?
export job="atmensanlfinal"
export jobid="${job}.$$"
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
###############################################################
# Execute the JJOB
"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE"
diff --git a/jobs/rocoto/atmensanlfv3inc.sh b/jobs/rocoto/atmensanlfv3inc.sh
index bb44ddc3a0d..7f57e8d6188 100755
--- a/jobs/rocoto/atmensanlfv3inc.sh
+++ b/jobs/rocoto/atmensanlfv3inc.sh
@@ -11,12 +11,6 @@ status=$?
export job="atmensanlfv3inc"
export jobid="${job}.$$"
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
###############################################################
# Execute the JJOB
"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT"
diff --git a/jobs/rocoto/atmensanlinit.sh b/jobs/rocoto/atmensanlinit.sh
index 2c2204548ac..1cd8129df63 100755
--- a/jobs/rocoto/atmensanlinit.sh
+++ b/jobs/rocoto/atmensanlinit.sh
@@ -11,12 +11,6 @@ status=$?
export job="atmensanlinit"
export jobid="${job}.$$"
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
###############################################################
# Execute the JJOB
"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE"
diff --git a/jobs/rocoto/atmensanlletkf.sh b/jobs/rocoto/atmensanlletkf.sh
index b4a1a73a804..0ca86bfb43c 100755
--- a/jobs/rocoto/atmensanlletkf.sh
+++ b/jobs/rocoto/atmensanlletkf.sh
@@ -11,12 +11,6 @@ status=$?
export job="atmensanlletkf"
export jobid="${job}.$$"
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
###############################################################
# Execute the JJOB
"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF"
diff --git a/jobs/rocoto/oceanice_products.sh b/jobs/rocoto/oceanice_products.sh
index eb704fb35f3..2a3b617d054 100755
--- a/jobs/rocoto/oceanice_products.sh
+++ b/jobs/rocoto/oceanice_products.sh
@@ -12,12 +12,6 @@ source "${HOMEgfs}/ush/preamble.sh"
status=$?
if (( status != 0 )); then exit "${status}"; fi
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
export job="oceanice_products"
export jobid="${job}.$$"
diff --git a/jobs/rocoto/prepatmiodaobs.sh b/jobs/rocoto/prepatmiodaobs.sh
index 0e69eda5c9d..26629a514f5 100755
--- a/jobs/rocoto/prepatmiodaobs.sh
+++ b/jobs/rocoto/prepatmiodaobs.sh
@@ -12,11 +12,10 @@ export job="prepatmobs"
export jobid="${job}.$$"
###############################################################
-# setup python path for workflow and ioda utilities
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
+# setup python path for ioda utilities
# shellcheck disable=SC2311
pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python$(detect_py_ver)/"
-PYTHONPATH="${pyiodaPATH}:${wxflowPATH}:${PYTHONPATH}"
+PYTHONPATH="${pyiodaPATH}:${PYTHONPATH}"
export PYTHONPATH
###############################################################
diff --git a/jobs/rocoto/prepobsaero.sh b/jobs/rocoto/prepobsaero.sh
index 89da7547e8d..5d65ff8a02d 100755
--- a/jobs/rocoto/prepobsaero.sh
+++ b/jobs/rocoto/prepobsaero.sh
@@ -11,12 +11,6 @@ status=$?
export job="prepobsaero"
export jobid="${job}.$$"
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
###############################################################
# Execute the JJOB
"${HOMEgfs}/jobs/JGLOBAL_PREP_OBS_AERO"
diff --git a/jobs/rocoto/prepsnowobs.sh b/jobs/rocoto/prepsnowobs.sh
index cff082bab2c..3f23bc16a55 100755
--- a/jobs/rocoto/prepsnowobs.sh
+++ b/jobs/rocoto/prepsnowobs.sh
@@ -12,12 +12,11 @@ export job="prepsnowobs"
export jobid="${job}.$$"
###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
+# setup python path for ioda utilities
# shellcheck disable=SC2311
pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python$(detect_py_ver)/"
gdasappPATH="${HOMEgfs}/sorc/gdas.cd/sorc/iodaconv/src:${pyiodaPATH}"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}:${gdasappPATH}"
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}:${gdasappPATH}"
export PYTHONPATH
###############################################################
diff --git a/jobs/rocoto/snowanl.sh b/jobs/rocoto/snowanl.sh
index 627dd860f49..97df7a46c71 100755
--- a/jobs/rocoto/snowanl.sh
+++ b/jobs/rocoto/snowanl.sh
@@ -11,12 +11,6 @@ status=$?
export job="snowanl"
export jobid="${job}.$$"
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
###############################################################
# Execute the JJOB
"${HOMEgfs}/jobs/JGLOBAL_SNOW_ANALYSIS"
diff --git a/jobs/rocoto/upp.sh b/jobs/rocoto/upp.sh
index da0180472d4..c3f128ab022 100755
--- a/jobs/rocoto/upp.sh
+++ b/jobs/rocoto/upp.sh
@@ -29,18 +29,18 @@ if [[ "${MACHINE_ID}" = "wcoss2" ]]; then
module load python/3.8.6
module load crtm/2.4.0 # TODO: This is only needed when UPP_RUN=goes. Is there a better way to handle this?
set_trace
+
+ # Add wxflow to PYTHONPATH
+ wxflowPATH="${HOMEgfs}/ush/python"
+ PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush:${wxflowPATH}"
+ export PYTHONPATH
+
else
. "${HOMEgfs}/ush/load_fv3gfs_modules.sh"
status=$?
if (( status != 0 )); then exit "${status}"; fi
fi
-###############################################################
-# setup python path for workflow utilities and tasks
-wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src"
-PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}"
-export PYTHONPATH
-
export job="upp"
export jobid="${job}.$$"
diff --git a/parm/config/gefs/config.base b/parm/config/gefs/config.base
index 16e0fefaba3..9808b96579e 100644
--- a/parm/config/gefs/config.base
+++ b/parm/config/gefs/config.base
@@ -99,7 +99,6 @@ export EXPDIR="@EXPDIR@/${PSLOT}"
export ROTDIR="@COMROOT@/${PSLOT}"
export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops
-export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead
export ARCDIR="${NOSCRUB}/archive/${PSLOT}"
export ATARDIR="@ATARDIR@"
diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs
index 8beb0652f7e..584e4769a81 100644
--- a/parm/config/gefs/config.ufs
+++ b/parm/config/gefs/config.ufs
@@ -306,9 +306,13 @@ if [[ "${skip_mom6}" == "false" ]]; then
CHLCLIM="seawifs_1998-2006_smoothed_2X.nc"
MOM6_RESTART_SETTING='r'
MOM6_RIVER_RUNOFF='False'
+ if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
+ MOM6_DIAG_MISVAL="-1e34"
+ else
+ MOM6_DIAG_MISVAL="0.0"
+ fi
eps_imesh="4.0e-1"
MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc"
- MOM6_DIAG_MISVAL="0.0"
MOM6_ALLOW_LANDMASK_CHANGES='False'
TOPOEDITS=""
;;
@@ -324,13 +328,13 @@ if [[ "${skip_mom6}" == "false" ]]; then
MOM6_RESTART_SETTING='r'
MOM6_RIVER_RUNOFF='False'
eps_imesh="2.5e-1"
- TOPOEDITS="topo_edits_011818.nc"
- if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
- MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
- MOM6_DIAG_MISVAL="0.0"
- else
+ TOPOEDITS="ufs.topo_edits_011818.nc"
+ if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
MOM6_DIAG_MISVAL="-1e34"
+ else
+ MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+ MOM6_DIAG_MISVAL="0.0"
fi
MOM6_ALLOW_LANDMASK_CHANGES='True'
;;
@@ -346,12 +350,12 @@ if [[ "${skip_mom6}" == "false" ]]; then
MOM6_RESTART_SETTING='n'
MOM6_RIVER_RUNOFF='True'
eps_imesh="1.0e-1"
- if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
- MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
- MOM6_DIAG_MISVAL="0.0"
- else
+ if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
MOM6_DIAG_MISVAL="-1e34"
+ else
+ MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+ MOM6_DIAG_MISVAL="0.0"
fi
MOM6_ALLOW_LANDMASK_CHANGES='False'
TOPOEDITS=""
@@ -368,12 +372,12 @@ if [[ "${skip_mom6}" == "false" ]]; then
MOM6_RIVER_RUNOFF='True'
MOM6_RESTART_SETTING="r"
eps_imesh="1.0e-1"
- if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
- MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
- MOM6_DIAG_MISVAL="0.0"
- else
+ if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
MOM6_DIAG_MISVAL="-1e34"
+ else
+ MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+ MOM6_DIAG_MISVAL="0.0"
fi
MOM6_ALLOW_LANDMASK_CHANGES='False'
TOPOEDITS=""
diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl
index 7879b8b6838..9a06088eccf 100644
--- a/parm/config/gfs/config.atmanl
+++ b/parm/config/gfs/config.atmanl
@@ -15,14 +15,17 @@ export INTERP_METHOD='barycentric'
if [[ ${DOHYBVAR} = "YES" ]]; then
# shellcheck disable=SC2153
export CASE_ANL=${CASE_ENS}
- export BERROR_YAML="background_error_hybrid_${STATICB_TYPE}_${LOCALIZATION_TYPE}"
+ export BERROR_YAML="atmosphere_background_error_hybrid_${STATICB_TYPE}_${LOCALIZATION_TYPE}"
else
export CASE_ANL=${CASE}
- export BERROR_YAML="background_error_static_${STATICB_TYPE}"
+ export BERROR_YAML="atmosphere_background_error_static_${STATICB_TYPE}"
fi
export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2"
export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2"
+export VAR_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/atm_var_bkg.yaml.j2"
+export BERROR_STAGING_YAML="${PARMgfs}/gdas/staging/atm_berror_${STATICB_TYPE}.yaml.j2"
+export FV3ENS_STAGING_YAML="${PARMgfs}/gdas/staging/atm_var_fv3ens.yaml.j2"
export layout_x_atmanl=@LAYOUT_X_ATMANL@
export layout_y_atmanl=@LAYOUT_Y_ATMANL@
diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl
index c03583659dc..ddd3d88659d 100644
--- a/parm/config/gfs/config.atmensanl
+++ b/parm/config/gfs/config.atmensanl
@@ -12,6 +12,7 @@ export INTERP_METHOD='barycentric'
export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2"
export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2"
+export LGETKF_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/atm_lgetkf_bkg.yaml.j2"
export layout_x_atmensanl=@LAYOUT_X_ATMENSANL@
export layout_y_atmensanl=@LAYOUT_Y_ATMENSANL@
diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base
index 282a567178c..f78c7fb4003 100644
--- a/parm/config/gfs/config.base
+++ b/parm/config/gfs/config.base
@@ -129,7 +129,6 @@ if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; the
export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel
fi
export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops
-export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead
export ARCDIR="${NOSCRUB}/archive/${PSLOT}"
export ATARDIR="@ATARDIR@"
@@ -296,7 +295,7 @@ export FHOUT_GFS=3 # 3 for ops
export FHMAX_HF_GFS=@FHMAX_HF_GFS@
export FHOUT_HF_GFS=1
export FHOUT_OCN_GFS=6
-export FHOUT_ICE_GFS=6
+export FHOUT_ICE_GFS=6
export FHMIN_WAV=0
export FHOUT_WAV=3
export FHMAX_HF_WAV=120
diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs
index b87435cfefc..f6302268268 100644
--- a/parm/config/gfs/config.ufs
+++ b/parm/config/gfs/config.ufs
@@ -413,9 +413,13 @@ if [[ "${skip_mom6}" == "false" ]]; then
CHLCLIM="seawifs_1998-2006_smoothed_2X.nc"
MOM6_RESTART_SETTING='r'
MOM6_RIVER_RUNOFF='False'
+ if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
+ MOM6_DIAG_MISVAL="-1e34"
+ else
+ MOM6_DIAG_MISVAL="0.0"
+ fi
eps_imesh="4.0e-1"
MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc"
- MOM6_DIAG_MISVAL="0.0"
MOM6_ALLOW_LANDMASK_CHANGES='False'
TOPOEDITS=""
;;
@@ -432,12 +436,12 @@ if [[ "${skip_mom6}" == "false" ]]; then
MOM6_RIVER_RUNOFF='False'
eps_imesh="2.5e-1"
TOPOEDITS="ufs.topo_edits_011818.nc"
- if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
- MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
- MOM6_DIAG_MISVAL="0.0"
- else
+ if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
MOM6_DIAG_MISVAL="-1e34"
+ else
+ MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+ MOM6_DIAG_MISVAL="0.0"
fi
MOM6_ALLOW_LANDMASK_CHANGES='True'
;;
@@ -453,12 +457,12 @@ if [[ "${skip_mom6}" == "false" ]]; then
MOM6_RESTART_SETTING='n'
MOM6_RIVER_RUNOFF='True'
eps_imesh="1.0e-1"
- if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
- MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
- MOM6_DIAG_MISVAL="0.0"
- else
+ if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
MOM6_DIAG_MISVAL="-1e34"
+ else
+ MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+ MOM6_DIAG_MISVAL="0.0"
fi
MOM6_ALLOW_LANDMASK_CHANGES='False'
TOPOEDITS=""
@@ -475,12 +479,12 @@ if [[ "${skip_mom6}" == "false" ]]; then
MOM6_RIVER_RUNOFF='True'
MOM6_RESTART_SETTING="r"
eps_imesh="1.0e-1"
- if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then
- MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
- MOM6_DIAG_MISVAL="0.0"
- else
+ if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then
MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc"
MOM6_DIAG_MISVAL="-1e34"
+ else
+ MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc"
+ MOM6_DIAG_MISVAL="0.0"
fi
MOM6_ALLOW_LANDMASK_CHANGES='False'
TOPOEDITS=""
diff --git a/parm/gdas/staging/atm_berror_gsibec.yaml.j2 b/parm/gdas/staging/atm_berror_gsibec.yaml.j2
new file mode 100644
index 00000000000..e6c5e416094
--- /dev/null
+++ b/parm/gdas/staging/atm_berror_gsibec.yaml.j2
@@ -0,0 +1,8 @@
+{% set fname_list = ['gfs_gsi_global.nml', 'gsi-coeffs-gfs-global.nc4'] %}
+
+mkdir:
+- '{{ DATA }}/berror'
+copy:
+{% for fname in fname_list %}
+- ['{{ HOMEgfs }}/fix/gdas/gsibec/{{ CASE_ANL }}/{{ fname }}', '{{ DATA }}/berror']
+{% endfor %}
diff --git a/parm/gdas/staging/atm_lgetkf_bkg.yaml.j2 b/parm/gdas/staging/atm_lgetkf_bkg.yaml.j2
new file mode 100644
index 00000000000..eda3dad5a7c
--- /dev/null
+++ b/parm/gdas/staging/atm_lgetkf_bkg.yaml.j2
@@ -0,0 +1,32 @@
+{% set ftype_list = ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data'] %}
+{% set time_list = [current_cycle] %}
+
+mkdir:
+{% for imem in range(1,NMEM_ENS+1) %}
+ {% set memchar = 'mem%03d' | format(imem) %}
+ {% set tmpl_dict = ({ '${ROTDIR}': ROTDIR,
+ '${RUN}': RUN,
+ '${YMD}': current_cycle | to_YMD,
+ '${HH}': current_cycle | strftime('%H'),
+ '${MEMDIR}': memchar }) %}
+- '{{ DATA }}/bkg/{{ memchar }}'
+- '{{ DATA }}/anl/{{ memchar }}'
+- '{{ COM_ATMOS_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}'
+{% endfor %}
+copy:
+{% for time in time_list %}
+ {% for imem in range(1,NMEM_ENS+1) %}
+ {% set memchar = 'mem%03d' | format(imem) %}
+ {% set tmpl_dict = ({ '${ROTDIR}': ROTDIR,
+ '${RUN}': 'enkfgdas',
+ '${YMD}': previous_cycle | to_YMD,
+ '${HH}': previous_cycle | strftime('%H'),
+ '${MEMDIR}': memchar }) %}
+- ['{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ time | to_fv3time }}.coupler.res', '{{ DATA }}/bkg/{{ memchar }}/']
+ {% for ftype in ftype_list %}
+ {% for itile in range(1,7) %}
+- ['{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ time | to_fv3time }}.{{ ftype }}.tile{{ itile }}.nc', '{{ DATA }}/bkg/{{ memchar }}/']
+ {% endfor %}
+ {% endfor %}
+ {% endfor %}
+{% endfor %}
diff --git a/parm/gdas/staging/atm_var_bkg.yaml.j2 b/parm/gdas/staging/atm_var_bkg.yaml.j2
new file mode 100644
index 00000000000..37af8336494
--- /dev/null
+++ b/parm/gdas/staging/atm_var_bkg.yaml.j2
@@ -0,0 +1,14 @@
+{% set ftype_list = ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data'] %}
+{% set time_list = [current_cycle] %}
+
+mkdir:
+- '{{ DATA }}/bkg'
+copy:
+{% for time in time_list %}
+- ['{{ COM_ATMOS_RESTART_PREV }}/{{ time | to_fv3time }}.coupler.res', '{{ DATA }}/bkg/']
+ {% for ftype in ftype_list %}
+ {% for itile in range(1,ntiles+1) %}
+- ['{{ COM_ATMOS_RESTART_PREV }}/{{ time | to_fv3time }}.{{ ftype }}.tile{{ itile }}.nc', '{{ DATA }}/bkg/']
+ {% endfor %}
+ {% endfor %}
+{% endfor %}
diff --git a/parm/gdas/staging/atm_var_fv3ens.yaml.j2 b/parm/gdas/staging/atm_var_fv3ens.yaml.j2
new file mode 100644
index 00000000000..e499c86d574
--- /dev/null
+++ b/parm/gdas/staging/atm_var_fv3ens.yaml.j2
@@ -0,0 +1,24 @@
+{% set ftype_list = ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data'] %}
+{% set time_list = [current_cycle] %}
+
+mkdir:
+{% for imem in range(1,NMEM_ENS+1) %}
+- '{{ DATA }}/ens/{{ 'mem%03d' | format(imem) }}'
+{% endfor %}
+copy:
+{% for time in time_list %}
+ {% for imem in range(1,NMEM_ENS+1) %}
+ {% set memchar = 'mem%03d' | format(imem) %}
+ {% set tmpl_dict = ({ '${ROTDIR}': ROTDIR,
+ '${RUN}': 'enkfgdas',
+ '${YMD}': previous_cycle | to_YMD,
+ '${HH}': previous_cycle | strftime('%H'),
+ '${MEMDIR}': memchar }) %}
+- ['{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ time | to_fv3time }}.coupler.res', '{{ DATA }}/ens/{{ memchar }}/']
+ {% for ftype in ftype_list %}
+ {% for itile in range(1,ntiles+1) %}
+- ['{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ time | to_fv3time }}.{{ ftype }}.tile{{ itile }}.nc', '{{ DATA }}/ens/{{ memchar }}/']
+ {% endfor %}
+ {% endfor %}
+ {% endfor %}
+{% endfor %}
diff --git a/scripts/exgfs_wave_init.sh b/scripts/exgfs_wave_init.sh
index 9e3ca0b4972..17e6cec0428 100755
--- a/scripts/exgfs_wave_init.sh
+++ b/scripts/exgfs_wave_init.sh
@@ -83,15 +83,15 @@ source "${USHgfs}/preamble.sh"
grdALL=$(printf "%s\n" "${array[@]}" | sort -u | tr '\n' ' ')
for grdID in ${grdALL}; do
- if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
+ if [[ -f "${COMOUT_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
set +x
- echo " Mod def file for ${grdID} found in ${COM_WAVE_PREP}. copying ...."
+ echo " Mod def file for ${grdID} found in ${COMOUT_WAVE_PREP}. copying ...."
set_trace
- cp "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}"
+ cp "${COMOUT_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}"
else
set +x
- echo " Mod def file for ${grdID} not found in ${COM_WAVE_PREP}. Setting up to generate ..."
+ echo " Mod def file for ${grdID} not found in ${COMOUT_WAVE_PREP}. Setting up to generate ..."
echo ' '
set_trace
if [ -f ${FIXgfs}/wave/ww3_grid.inp.$grdID ]
@@ -125,7 +125,6 @@ source "${USHgfs}/preamble.sh"
fi
#TO DO: how do we say "it's unstructured, and therefore need to have error check here"
- [[ ! -d "${COM_WAVE_PREP}" ]] && mkdir -m 775 -p "${COM_WAVE_PREP}"
if [ ${CFP_MP:-"NO"} = "YES" ]; then
echo "$nmoddef ${USHgfs}/wave_grid_moddef.sh $grdID > $grdID.out 2>&1" >> cmdfile
else
@@ -190,7 +189,7 @@ source "${USHgfs}/preamble.sh"
# 1.a.3 File check
for grdID in ${grdALL}; do
- if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
+ if [[ -f "${COMOUT_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
set +x
echo ' '
echo " mod_def.$grdID succesfully created/copied "
@@ -213,10 +212,10 @@ source "${USHgfs}/preamble.sh"
# Copy to other members if needed
if (( NMEM_ENS > 0 )); then
for mem in $(seq -f "%03g" 1 "${NMEM_ENS}"); do
- MEMDIR="mem${mem}" YMD=${PDY} HH=${cyc} declare_from_tmpl COM_WAVE_PREP_MEM:COM_WAVE_PREP_TMPL
- mkdir -p "${COM_WAVE_PREP_MEM}"
+ MEMDIR="mem${mem}" YMD=${PDY} HH=${cyc} declare_from_tmpl COMOUT_WAVE_PREP_MEM:COM_WAVE_PREP_TMPL
+ mkdir -p "${COMOUT_WAVE_PREP_MEM}"
for grdID in ${grdALL}; do
- ${NLN} "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "${COM_WAVE_PREP_MEM}/${RUN}wave.mod_def.${grdID}"
+ ${NLN} "${COMOUT_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "${COMOUT_WAVE_PREP_MEM}/${RUN}wave.mod_def.${grdID}"
done
done
fi
diff --git a/scripts/exgfs_wave_post_gridded_sbs.sh b/scripts/exgfs_wave_post_gridded_sbs.sh
index 02aa8c456d1..b0cca34bd10 100755
--- a/scripts/exgfs_wave_post_gridded_sbs.sh
+++ b/scripts/exgfs_wave_post_gridded_sbs.sh
@@ -20,6 +20,8 @@
# 2020-06-10 J-Henrique Alves: Porting to R&D machine Hera
# 2020-07-31 Jessica Meixner: Removing points, now gridded data only
#
+# COM inputs:
+#
# $Id$
#
# Attributes:
@@ -103,12 +105,12 @@ source "${USHgfs}/preamble.sh"
# 1.a.1 Copy model definition files
for grdID in ${waveGRD} ${wavepostGRD} ${waveinterpGRD}; do
- if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
+ if [[ -f "${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
set +x
- echo " Mod def file for ${grdID} found in ${COM_WAVE_PREP}. copying ...."
+ echo " Mod def file for ${grdID} found in ${COMIN_WAVE_PREP}. copying ...."
set_trace
- cp -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}"
+ cp -f "${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}"
fi
done
@@ -257,9 +259,8 @@ source "${USHgfs}/preamble.sh"
if [ $fhr = $fhrg ]
then
-
for wavGRD in ${waveGRD}; do
- gfile="${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_grd.${wavGRD}.${YMD}.${HMS}"
+ gfile="${COMIN_WAVE_HISTORY}/${WAV_MOD_TAG}.out_grd.${wavGRD}.${YMD}.${HMS}"
if ! wait_for_file "${gfile}" "${sleep_interval}" "${iwaitmax}"; then
echo " FATAL ERROR : NO RAW FIELD OUTPUT FILE out_grd.${grdID}"
echo "${WAV_MOD_TAG} post ${grdID} ${PDY} ${cycle} : field output missing."
@@ -405,7 +406,7 @@ source "${USHgfs}/preamble.sh"
ENSTAG=""
if [ ${waveMEMB} ]; then ENSTAG=".${membTAG}${waveMEMB}" ; fi
gribchk="${RUN}wave.${cycle}${ENSTAG}.${GRDNAME}.${GRDRES}.f${FH3}.grib2"
- if [ ! -s ${COM_WAVE_GRID}/${gribchk} ]; then
+ if [ ! -s ${COMOUT_WAVE_GRID}/${gribchk} ]; then
set +x
echo ' '
echo '********************************************'
diff --git a/scripts/exgfs_wave_post_pnt.sh b/scripts/exgfs_wave_post_pnt.sh
index 93bdbeaf32e..b251661ee6e 100755
--- a/scripts/exgfs_wave_post_pnt.sh
+++ b/scripts/exgfs_wave_post_pnt.sh
@@ -22,6 +22,10 @@
# 2020-07-30 Jessica Meixner: Points only - no gridded data
# 2020-09-29 Jessica Meixner: optimized by changing loop structures
#
+# COM inputs:
+# - ${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID}
+# - ${COMIN_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}
+#
# $Id$
#
# Attributes:
@@ -117,12 +121,12 @@ source "${USHgfs}/preamble.sh"
# Copy model definition files
iloop=0
for grdID in ${waveuoutpGRD}; do
- if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
+ if [[ -f "${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then
set +x
- echo " Mod def file for ${grdID} found in ${COM_WAVE_PREP}. copying ...."
+ echo " Mod def file for ${grdID} found in ${COMIN_WAVE_PREP}. copying ...."
set_trace
- cp -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}"
+ cp -f "${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}"
iloop=$((iloop + 1))
fi
done
@@ -247,11 +251,10 @@ source "${USHgfs}/preamble.sh"
-e "s/FORMAT/F/g" \
ww3_outp_spec.inp.tmpl > ww3_outp.inp
- ${NLN} mod_def.${waveuoutpGRD} mod_def.ww3
- HH=$(date --utc -d "${PDY:0:8} ${cyc} + ${FHMIN_WAV} hours" +%H)
- HMS="${HH}0000"
- if [[ -f "${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}" ]]; then
- ${NLN} "${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}" \
+ ${NLN} mod_def.$waveuoutpGRD mod_def.ww3
+ HMS="${cyc}0000"
+ if [[ -f "${COMIN_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}" ]]; then
+ ${NLN} "${COMIN_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}" \
"./out_pnt.${waveuoutpGRD}"
else
echo '*************************************************** '
@@ -372,7 +375,7 @@ source "${USHgfs}/preamble.sh"
export BULLDATA=${DATA}/output_$YMDHMS
cp $DATA/mod_def.${waveuoutpGRD} mod_def.${waveuoutpGRD}
- pfile="${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS}"
+ pfile="${COMIN_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS}"
if [ -f ${pfile} ]
then
${NLN} ${pfile} ./out_pnt.${waveuoutpGRD}
@@ -696,6 +699,6 @@ source "${USHgfs}/preamble.sh"
# 4. Ending output
-exit $exit_code
+exit "${exit_code}"
# End of MWW3 point prostprocessor script ---------------------------------------- #
diff --git a/scripts/exgfs_wave_prdgen_bulls.sh b/scripts/exgfs_wave_prdgen_bulls.sh
index 2bf90cdf2bc..5f5b2c531e5 100755
--- a/scripts/exgfs_wave_prdgen_bulls.sh
+++ b/scripts/exgfs_wave_prdgen_bulls.sh
@@ -8,6 +8,10 @@
# Remarks : #
# - Supplemental error output is witten to the gfswave_prdgbulls.log file. #
# #
+# COM inputs: #
+# - ${COMIN_WAVE_STATION}/${RUNwave}.${cycle}.cbull_tar #
+# COM outputs: #
+# - ${COMOUT_WAVE_WMO}/awipsbull.${cycle}.${RUNwave} #
# #
# Origination : 05/02/2007 #
# Last update : 08/20/2020 #
@@ -52,11 +56,11 @@ source "${USHgfs}/preamble.sh"
# 1. Get necessary files
set +x
- echo " Copying bulletins from ${COM_WAVE_STATION}"
+ echo " Copying bulletins from ${COMIN_WAVE_STATION}"
set_trace
# 1.a Link the input file and untar it
- BullIn="${COM_WAVE_STATION}/${RUNwave}.${cycle}.cbull_tar"
+ BullIn="${COMIN_WAVE_STATION}/${RUNwave}.${cycle}.cbull_tar"
if [ -f $BullIn ]; then
cp $BullIn cbull.tar
else
@@ -170,7 +174,7 @@ source "${USHgfs}/preamble.sh"
set_trace
formbul.pl -d "${headr}" -f "${fname}" -j "${job}" -m "${RUNwave}" \
- -p "${COM_WAVE_WMO}" -s "NO" -o "${oname}" > formbul.out 2>&1
+ -p "${COMOUT_WAVE_WMO}" -s "NO" -o "${oname}" > formbul.out 2>&1
OK=$?
if [ "$OK" != '0' ] || [ ! -f $oname ]; then
@@ -196,15 +200,15 @@ source "${USHgfs}/preamble.sh"
# 3. Send output files to the proper destination
set_trace
-cp "awipsbull.${cycle}.${RUNwave}" "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}"
+cp "awipsbull.${cycle}.${RUNwave}" "${COMOUT_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}"
if [ "$SENDDBN_NTC" = YES ]; then
make_ntc_bull.pl "WMOBH" "NONE" "KWBC" "NONE" "${DATA}/awipsbull.${cycle}.${RUNwave}" \
- "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}"
+ "${COMOUT_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}"
else
if [ "${envir}" = "para" ] || [ "${envir}" = "test" ] || [ "${envir}" = "dev" ]; then
echo "Making NTC bulletin for parallel environment, but do not alert."
(export SENDDBN=NO; make_ntc_bull.pl "WMOBH" "NONE" "KWBC" "NONE" \
- "${DATA}/awipsbull.${cycle}.${RUNwave}" "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}")
+ "${DATA}/awipsbull.${cycle}.${RUNwave}" "${COMOUT_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}")
fi
fi
diff --git a/scripts/exgfs_wave_prdgen_gridded.sh b/scripts/exgfs_wave_prdgen_gridded.sh
index c896423ac13..9111c81273a 100755
--- a/scripts/exgfs_wave_prdgen_gridded.sh
+++ b/scripts/exgfs_wave_prdgen_gridded.sh
@@ -8,6 +8,11 @@
# Remarks : #
# - Supplemental error output is witten to the wave.log file. #
# #
+# COM inputs: #
+# - ${COMIN_WAVE_GRID}/${RUNwave}.${cycle}.${grdID}.f${fhr}.grib2 #
+# #
+# COM outputs: #
+# - ${COMOUT_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut} #
# #
# Origination : 05/02/2007 #
# Last update : 10/08/2020 #
@@ -104,7 +109,6 @@ grids=${grids:-ak_10m at_10m ep_10m wc_10m glo_30m}
echo "$RUNwave $grdID ${fhr} prdgen $date $cycle : GRIB file missing." >> $wavelog
err=1;export err;${errchk} || exit ${err}
fi
-
GRIBOUT=$RUNwave.$cycle.$grdID.f${fhr}.clipped.grib2
iparam=1
@@ -216,16 +220,16 @@ grids=${grids:-ak_10m at_10m ep_10m wc_10m glo_30m}
#set_trace
#set +x
echo " Saving $AWIPSGRB.$grdOut.f${fhr} as grib2.$cycle.awipsww3_${grdID}.f${fhr}"
- echo " in ${COM_WAVE_WMO}"
+ echo " in ${COMOUT_WAVE_WMO}"
#set_trace
- cp "${AWIPSGRB}.${grdID}.f${fhr}" "${COM_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}"
+ cp "${AWIPSGRB}.${grdID}.f${fhr}" "${COMOUT_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}"
#set +x
if [ "$SENDDBN" = 'YES' ]
then
echo " Sending $AWIPSGRB.$grdID.f${fhr} to DBRUN."
- "${DBNROOT}/bin/dbn_alert" GRIB_LOW "${RUN}" "${job}" "${COM_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}"
+ "${DBNROOT}/bin/dbn_alert" GRIB_LOW "${RUN}" "${job}" "${COMOUT_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}"
fi
rm -f $AWIPSGRB.$grdID.f${fhr} tocgrib2.out
done # For grids
diff --git a/scripts/exgfs_wave_prep.sh b/scripts/exgfs_wave_prep.sh
index 1fbe7dd767e..f83ead2c226 100755
--- a/scripts/exgfs_wave_prep.sh
+++ b/scripts/exgfs_wave_prep.sh
@@ -17,6 +17,13 @@
# Remarks : #
# - For non-fatal errors output is witten to the wave.log file. #
# #
+# COM inputs: #
+# - ${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID} #
+# - ${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f#HHH_prog.nc #
+# #
+# COM outputs: #
+# - ${COMOUT_WAVE_PREP}/${RUN}wave.${WAVECUR_FID}.$cycle.cur #
+# #
# Update record : #
# #
# - Origination: 01-Mar-2007 #
@@ -162,12 +169,12 @@ source "${USHgfs}/preamble.sh"
for grdID in $grdINP $waveGRD
do
- if [ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]
+ if [ -f "${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]
then
set +x
- echo " Mod def file for $grdID found in ${COM_WAVE_PREP}. copying ...."
+ echo " Mod def file for $grdID found in ${COMIN_WAVE_PREP}. copying ...."
set_trace
- cp ${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID} mod_def.$grdID
+ cp ${COMIN_WAVE_PREP}/${RUN}wave.mod_def.${grdID} mod_def.$grdID
else
set +x
@@ -322,19 +329,19 @@ source "${USHgfs}/preamble.sh"
ymdh_rtofs=$ymdh_beg
if [ "$FHMAX_WAV_CUR" -le 72 ]; then
- rtofsfile1="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f024_prog.nc"
- rtofsfile2="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f048_prog.nc"
- rtofsfile3="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f072_prog.nc"
+ rtofsfile1="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f024_prog.nc"
+ rtofsfile2="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f048_prog.nc"
+ rtofsfile3="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f072_prog.nc"
if [ ! -f $rtofsfile1 ] || [ ! -f $rtofsfile2 ] || [ ! -f $rtofsfile3 ]; then
#Needed current files are not available, so use RTOFS from previous day
export RPDY=$($NDATE -24 ${RPDY}00 | cut -c1-8)
fi
else
- rtofsfile1="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f096_prog.nc"
- rtofsfile2="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f120_prog.nc"
- rtofsfile3="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f144_prog.nc"
- rtofsfile4="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f168_prog.nc"
- rtofsfile5="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f192_prog.nc"
+ rtofsfile1="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f096_prog.nc"
+ rtofsfile2="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f120_prog.nc"
+ rtofsfile3="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f144_prog.nc"
+ rtofsfile4="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f168_prog.nc"
+ rtofsfile5="${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f192_prog.nc"
if [ ! -f $rtofsfile1 ] || [ ! -f $rtofsfile2 ] || [ ! -f $rtofsfile3 ] ||
[ ! -f $rtofsfile4 ] || [ ! -f $rtofsfile5 ]; then
#Needed current files are not available, so use RTOFS from previous day
@@ -360,8 +367,8 @@ source "${USHgfs}/preamble.sh"
fhr_rtofs=$(${NHOUR} ${ymdh_rtofs} ${RPDY}00)
fh3_rtofs=$(printf "%03d" "${fhr_rtofs#0}")
- curfile1h=${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc
- curfile3h=${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc
+ curfile1h=${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc
+ curfile3h=${COMIN_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc
if [ -s ${curfile1h} ] && [ "${FLGHF}" = "T" ] ; then
curfile=${curfile1h}
@@ -465,7 +472,7 @@ source "${USHgfs}/preamble.sh"
cat $file >> cur.${WAVECUR_FID}
done
- cp -f cur.${WAVECUR_FID} ${COM_WAVE_PREP}/${RUN}wave.${WAVECUR_FID}.$cycle.cur
+ cp -f cur.${WAVECUR_FID} ${COMOUT_WAVE_PREP}/${RUN}wave.${WAVECUR_FID}.$cycle.cur
else
echo ' '
diff --git a/scripts/exglobal_cleanup.sh b/scripts/exglobal_cleanup.sh
index 7c3dfafbad0..dcf1baef313 100755
--- a/scripts/exglobal_cleanup.sh
+++ b/scripts/exglobal_cleanup.sh
@@ -2,6 +2,9 @@
source "${USHgfs}/preamble.sh"
+###############################################################
+echo "Begin Cleanup ${DATAROOT}!"
+
# Remove DATAoutput from the forecast model run
# TODO: Handle this better
DATAfcst="${DATAROOT}/${RUN}fcst.${PDY:-}${cyc}"
@@ -9,6 +12,19 @@ if [[ -d "${DATAfcst}" ]]; then rm -rf "${DATAfcst}"; fi
#DATAefcs="${DATAROOT}/${RUN}efcs???${PDY:-}${cyc}"
rm -rf "${DATAROOT}/${RUN}efcs"*"${PDY:-}${cyc}"
+# Search and delete files/directories from DATAROOT/ older than ${purge_every_days} days
+# purge_every_days should be a positive integer
+#purge_every_days=3
+
+# Find and delete files older than ${purge_every_days} days
+#find "${DATAROOT}/"* -type f -mtime "+${purge_every_days}" -exec rm -f {} \;
+
+# Find and delete directories older than ${purge_every_days} days
+#find "${DATAROOT}/"* -type d -mtime "+${purge_every_days}" -exec rm -rf {} \;
+
+echo "Cleanup ${DATAROOT} completed!"
+###############################################################
+
###############################################################
# Clean up previous cycles; various depths
# PRIOR CYCLE: Leave the prior cycle alone
@@ -67,7 +83,7 @@ for (( current_date=first_date; current_date <= last_date; \
# shellcheck disable=SC2312
if [[ $(tail -n 1 "${rocotolog}") =~ "This cycle is complete: Success" ]]; then
YMD="${current_PDY}" HH="${current_cyc}" declare_from_tmpl \
- COMOUT_TOP:COM_TOP_TMPL
+ COMOUT_TOP:COM_TOP_TMPL
if [[ -d "${COMOUT_TOP}" ]]; then
IFS=", " read -r -a exclude_list <<< "${exclude_string:-}"
remove_files "${COMOUT_TOP}" "${exclude_list[@]:-}"
diff --git a/scripts/exglobal_prep_snow_obs.py b/scripts/exglobal_prep_snow_obs.py
index 5107d9c935f..d4998a7d847 100755
--- a/scripts/exglobal_prep_snow_obs.py
+++ b/scripts/exglobal_prep_snow_obs.py
@@ -21,5 +21,5 @@
# Instantiate the snow prepare task
SnowAnl = SnowAnalysis(config)
SnowAnl.prepare_GTS()
- if f"{ SnowAnl.runtime_config.cyc }" == '18':
+ if f"{ SnowAnl.task_config.cyc }" == '18':
SnowAnl.prepare_IMS()
diff --git a/sorc/gdas.cd b/sorc/gdas.cd
index 368c9c5db9b..e3644a98c36 160000
--- a/sorc/gdas.cd
+++ b/sorc/gdas.cd
@@ -1 +1 @@
-Subproject commit 368c9c5db9b5ea62e72937b6d1b0f753adb9be40
+Subproject commit e3644a98c362d7321f9e3081a4e55947885ed2bf
diff --git a/sorc/wxflow b/sorc/wxflow
index 8406beeea41..5dad7dd61ce 160000
--- a/sorc/wxflow
+++ b/sorc/wxflow
@@ -1 +1 @@
-Subproject commit 8406beeea410118cdfbd8300895b2b2878eadba6
+Subproject commit 5dad7dd61cebd9b3f2b163b3b06bb75eae1860a9
diff --git a/ush/detect_machine.sh b/ush/detect_machine.sh
index 683ee0db7fc..cfd0fa97e27 100755
--- a/ush/detect_machine.sh
+++ b/ush/detect_machine.sh
@@ -75,8 +75,8 @@ elif [[ -d /scratch1 ]]; then
MACHINE_ID=hera
elif [[ -d /work ]]; then
# We are on MSU Orion or Hercules
- if [[ -d /apps/other ]]; then
- # We are on Hercules
+ mount=$(findmnt -n -o SOURCE /home)
+ if [[ ${mount} =~ "hercules" ]]; then
MACHINE_ID=hercules
else
MACHINE_ID=orion
diff --git a/ush/load_fv3gfs_modules.sh b/ush/load_fv3gfs_modules.sh
index ae0e381db43..5f6afb7e35f 100755
--- a/ush/load_fv3gfs_modules.sh
+++ b/ush/load_fv3gfs_modules.sh
@@ -30,6 +30,11 @@ esac
module list
+# Add wxflow to PYTHONPATH
+wxflowPATH="${HOMEgfs}/ush/python"
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush:${wxflowPATH}"
+export PYTHONPATH
+
# Restore stack soft limit:
ulimit -S -s "${ulimit_s}"
unset ulimit_s
diff --git a/ush/load_ufsda_modules.sh b/ush/load_ufsda_modules.sh
index d7aa08e1aee..8117d3f3592 100755
--- a/ush/load_ufsda_modules.sh
+++ b/ush/load_ufsda_modules.sh
@@ -51,6 +51,11 @@ esac
module list
pip list
+# Add wxflow to PYTHONPATH
+wxflowPATH="${HOMEgfs}/ush/python"
+PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush:${wxflowPATH}"
+export PYTHONPATH
+
# Restore stack soft limit:
ulimit -S -s "${ulimit_s}"
unset ulimit_s
diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py
index 16d27350905..69a992d7d47 100644
--- a/ush/python/pygfs/task/aero_analysis.py
+++ b/ush/python/pygfs/task/aero_analysis.py
@@ -29,33 +29,33 @@ class AerosolAnalysis(Analysis):
def __init__(self, config):
super().__init__(config)
- _res = int(self.config['CASE'][1:])
- _res_anl = int(self.config['CASE_ANL'][1:])
- _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2)
- _jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml")
+ _res = int(self.task_config['CASE'][1:])
+ _res_anl = int(self.task_config['CASE_ANL'][1:])
+ _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2)
+ _jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.aerovar.yaml")
# Create a local dictionary that is repeatedly used across this class
local_dict = AttrDict(
{
'npx_ges': _res + 1,
'npy_ges': _res + 1,
- 'npz_ges': self.config.LEVS - 1,
- 'npz': self.config.LEVS - 1,
+ 'npz_ges': self.task_config.LEVS - 1,
+ 'npz': self.task_config.LEVS - 1,
'npx_anl': _res_anl + 1,
'npy_anl': _res_anl + 1,
- 'npz_anl': self.config['LEVS'] - 1,
+ 'npz_anl': self.task_config['LEVS'] - 1,
'AERO_WINDOW_BEGIN': _window_begin,
- 'AERO_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H",
- 'aero_bkg_fhr': map(int, str(self.config['aero_bkg_times']).split(',')),
- 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN
- 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN
- 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.",
+ 'AERO_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H",
+ 'aero_bkg_fhr': map(int, str(self.task_config['aero_bkg_times']).split(',')),
+ 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+ 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+ 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.",
'jedi_yaml': _jedi_yaml,
}
)
- # task_config is everything that this task should need
- self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict)
+ # Extend task_config with local_dict
+ self.task_config = AttrDict(**self.task_config, **local_dict)
@logit(logger)
def initialize(self: Analysis) -> None:
@@ -157,8 +157,8 @@ def finalize(self: Analysis) -> None:
archive.add(diaggzip, arcname=os.path.basename(diaggzip))
# copy full YAML from executable to ROTDIR
- src = os.path.join(self.task_config['DATA'], f"{self.task_config['CDUMP']}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml")
- dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['CDUMP']}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml")
+ src = os.path.join(self.task_config['DATA'], f"{self.task_config['RUN']}.t{self.task_config['cyc']:02d}z.aerovar.yaml")
+ dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['RUN']}.t{self.task_config['cyc']:02d}z.aerovar.yaml")
yaml_copy = {
'mkdir': [self.task_config.COM_CHEM_ANALYSIS],
'copy': [[src, dest]]
diff --git a/ush/python/pygfs/task/aero_emissions.py b/ush/python/pygfs/task/aero_emissions.py
index 17d2f528e43..5f2d4c68403 100644
--- a/ush/python/pygfs/task/aero_emissions.py
+++ b/ush/python/pygfs/task/aero_emissions.py
@@ -42,7 +42,9 @@ def __init__(self, config: Dict[str, Any]) -> None:
localdict = AttrDict(
{'variable_used_repeatedly': local_variable}
)
- self.task_config = AttrDict(**self.config, **self.runtime_config, **localdict)
+
+ # Extend task_config with localdict
+ self.task_config = AttrDict(**self.task_config, **localdict)
@staticmethod
@logit(logger)
diff --git a/ush/python/pygfs/task/aero_prepobs.py b/ush/python/pygfs/task/aero_prepobs.py
index f2344241a92..d8396fe3cad 100644
--- a/ush/python/pygfs/task/aero_prepobs.py
+++ b/ush/python/pygfs/task/aero_prepobs.py
@@ -24,23 +24,23 @@ class AerosolObsPrep(Task):
def __init__(self, config: Dict[str, Any]) -> None:
super().__init__(config)
- _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2)
- _window_end = add_to_datetime(self.runtime_config.current_cycle, +to_timedelta(f"{self.config['assim_freq']}H") / 2)
+ _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2)
+ _window_end = add_to_datetime(self.task_config.current_cycle, +to_timedelta(f"{self.task_config['assim_freq']}H") / 2)
local_dict = AttrDict(
{
'window_begin': _window_begin,
'window_end': _window_end,
- 'sensors': str(self.config['SENSORS']).split(','),
- 'data_dir': self.config['VIIRS_DATA_DIR'],
+ 'sensors': str(self.task_config['SENSORS']).split(','),
+ 'data_dir': self.task_config['VIIRS_DATA_DIR'],
'input_files': '',
- 'OPREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.",
- 'APREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z."
+ 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+ 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z."
}
)
# task_config is everything that this task should need
- self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict)
+ self.task_config = AttrDict(**self.task_config, **local_dict)
@logit(logger)
def initialize(self) -> None:
@@ -64,8 +64,8 @@ def initialize(self) -> None:
self.task_config.prepaero_config = self.get_obsproc_config(sensor)
# generate converter YAML file
- template = f"{self.runtime_config.CDUMP}.t{self.runtime_config['cyc']:02d}z.prepaero_viirs_{sensor}.yaml"
- _prepaero_yaml = os.path.join(self.runtime_config.DATA, template)
+ template = f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.prepaero_viirs_{sensor}.yaml"
+ _prepaero_yaml = os.path.join(self.task_config.DATA, template)
self.task_config.prepaero_yaml.append(_prepaero_yaml)
logger.debug(f"Generate PrepAeroObs YAML file: {_prepaero_yaml}")
save_as_yaml(self.task_config.prepaero_config, _prepaero_yaml)
diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py
index b668ac39804..e407cf17656 100644
--- a/ush/python/pygfs/task/analysis.py
+++ b/ush/python/pygfs/task/analysis.py
@@ -27,7 +27,7 @@ class Analysis(Task):
def __init__(self, config: Dict[str, Any]) -> None:
super().__init__(config)
# Store location of GDASApp jinja2 templates
- self.gdasapp_j2tmpl_dir = os.path.join(self.config.PARMgfs, 'gdas')
+ self.gdasapp_j2tmpl_dir = os.path.join(self.task_config.PARMgfs, 'gdas')
def initialize(self) -> None:
super().initialize()
@@ -54,7 +54,7 @@ def get_jedi_config(self, algorithm: Optional[str] = None) -> Dict[str, Any]:
----------
algorithm (optional) : str
Name of the algorithm to use in the JEDI configuration. Will override the algorithm
- set in the self.config.JCB_<>_YAML file
+ set in the self.task_config.JCB_<>_YAML file
Returns
----------
@@ -120,7 +120,7 @@ def get_obs_dict(self) -> Dict[str, Any]:
basename = os.path.basename(obfile)
copylist.append([os.path.join(self.task_config['COM_OBS'], basename), obfile])
obs_dict = {
- 'mkdir': [os.path.join(self.runtime_config['DATA'], 'obs')],
+ 'mkdir': [os.path.join(self.task_config['DATA'], 'obs')],
'copy': copylist
}
return obs_dict
@@ -161,7 +161,7 @@ def get_bias_dict(self) -> Dict[str, Any]:
# TODO: Why is this specific to ATMOS?
bias_dict = {
- 'mkdir': [os.path.join(self.runtime_config.DATA, 'bc')],
+ 'mkdir': [os.path.join(self.task_config.DATA, 'bc')],
'copy': copylist
}
return bias_dict
@@ -180,7 +180,7 @@ def add_fv3_increments(self, inc_file_tmpl: str, bkg_file_tmpl: str, incvars: Li
List of increment variables to add to the background
"""
- for itile in range(1, self.config.ntiles + 1):
+ for itile in range(1, self.task_config.ntiles + 1):
inc_path = inc_file_tmpl.format(tilenum=itile)
bkg_path = bkg_file_tmpl.format(tilenum=itile)
with Dataset(inc_path, mode='r') as incfile, Dataset(bkg_path, mode='a') as rstfile:
@@ -194,44 +194,6 @@ def add_fv3_increments(self, inc_file_tmpl: str, bkg_file_tmpl: str, incvars: Li
except (AttributeError, RuntimeError):
pass # checksum is missing, move on
- @logit(logger)
- def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]:
- """Compile a dictionary of model background files to copy
-
- This method is a placeholder for now... will be possibly made generic at a later date
-
- Parameters
- ----------
- task_config: Dict
- a dictionary containing all of the configuration needed for the task
-
- Returns
- ----------
- bkg_dict: Dict
- a dictionary containing the list of model background files to copy for FileHandler
- """
- bkg_dict = {'foo': 'bar'}
- return bkg_dict
-
- @logit(logger)
- def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]:
- """Compile a dictionary of background error files to copy
-
- This method is a placeholder for now... will be possibly made generic at a later date
-
- Parameters
- ----------
- config: Dict
- a dictionary containing all of the configuration needed
-
- Returns
- ----------
- berror_dict: Dict
- a dictionary containing the list of background error files to copy for FileHandler
- """
- berror_dict = {'foo': 'bar'}
- return berror_dict
-
@logit(logger)
def link_jediexe(self) -> None:
"""Compile a dictionary of background error files to copy
@@ -258,68 +220,6 @@ def link_jediexe(self) -> None:
return exe_dest
- @staticmethod
- @logit(logger)
- def get_fv3ens_dict(config: Dict[str, Any]) -> Dict[str, Any]:
- """Compile a dictionary of ensemble member restarts to copy
-
- This method constructs a dictionary of ensemble FV3 restart files (coupler, core, tracer)
- that are needed for global atmens DA and returns said dictionary for use by the FileHandler class.
-
- Parameters
- ----------
- config: Dict
- a dictionary containing all of the configuration needed
-
- Returns
- ----------
- ens_dict: Dict
- a dictionary containing the list of ensemble member restart files to copy for FileHandler
- """
- # NOTE for now this is FV3 restart files and just assumed to be fh006
-
- # define template
- template_res = config.COM_ATMOS_RESTART_TMPL
- prev_cycle = config.previous_cycle
- tmpl_res_dict = {
- 'ROTDIR': config.ROTDIR,
- 'RUN': config.RUN,
- 'YMD': to_YMD(prev_cycle),
- 'HH': prev_cycle.strftime('%H'),
- 'MEMDIR': None
- }
-
- # construct ensemble member file list
- dirlist = []
- enslist = []
- for imem in range(1, config.NMEM_ENS + 1):
- memchar = f"mem{imem:03d}"
-
- # create directory path for ensemble member restart
- dirlist.append(os.path.join(config.DATA, config.dirname, f'mem{imem:03d}'))
-
- # get FV3 restart files, this will be a lot simpler when using history files
- tmpl_res_dict['MEMDIR'] = memchar
- rst_dir = Template.substitute_structure(template_res, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_res_dict.get)
- run_dir = os.path.join(config.DATA, config.dirname, memchar)
-
- # atmens DA needs coupler
- basename = f'{to_fv3time(config.current_cycle)}.coupler.res'
- enslist.append([os.path.join(rst_dir, basename), os.path.join(config.DATA, config.dirname, memchar, basename)])
-
- # atmens DA needs core, srf_wnd, tracer, phy_data, sfc_data
- for ftype in ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data']:
- template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc'
- for itile in range(1, config.ntiles + 1):
- basename = template.format(tilenum=itile)
- enslist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)])
-
- ens_dict = {
- 'mkdir': dirlist,
- 'copy': enslist,
- }
- return ens_dict
-
@staticmethod
@logit(logger)
def tgz_diags(statfile: str, diagdir: str) -> None:
diff --git a/ush/python/pygfs/task/archive.py b/ush/python/pygfs/task/archive.py
index d0722552e15..953a856192e 100644
--- a/ush/python/pygfs/task/archive.py
+++ b/ush/python/pygfs/task/archive.py
@@ -35,12 +35,13 @@ def __init__(self, config: Dict[str, Any]) -> None:
"""
super().__init__(config)
- rotdir = self.config.ROTDIR + os.sep
+ rotdir = self.task_config.ROTDIR + os.sep
# Find all absolute paths in the environment and get their relative paths from ${ROTDIR}
path_dict = self._gen_relative_paths(rotdir)
- self.task_config = AttrDict(**self.config, **self.runtime_config, **path_dict)
+ # Extend task_config with path_dict
+ self.task_config = AttrDict(**self.task_config, **path_dict)
@logit(logger)
def configure(self, arch_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[str, Any]]):
@@ -297,7 +298,7 @@ def _create_tarball(target: str, fileset: List) -> None:
@logit(logger)
def _gen_relative_paths(self, root_path: str) -> Dict:
- """Generate a dict of paths in self.config relative to root_path
+ """Generate a dict of paths in self.task_config relative to root_path
Parameters
----------
@@ -314,7 +315,7 @@ def _gen_relative_paths(self, root_path: str) -> Dict:
"""
rel_path_dict = {}
- for key, value in self.config.items():
+ for key, value in self.task_config.items():
if isinstance(value, str):
if root_path in value:
rel_path = value.replace(root_path, "")
diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py
index 95545c57a44..4e9d37335c5 100644
--- a/ush/python/pygfs/task/atm_analysis.py
+++ b/ush/python/pygfs/task/atm_analysis.py
@@ -28,35 +28,35 @@ class AtmAnalysis(Analysis):
def __init__(self, config):
super().__init__(config)
- _res = int(self.config.CASE[1:])
- _res_anl = int(self.config.CASE_ANL[1:])
- _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2)
- _jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmvar.yaml")
+ _res = int(self.task_config.CASE[1:])
+ _res_anl = int(self.task_config.CASE_ANL[1:])
+ _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2)
+ _jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml")
# Create a local dictionary that is repeatedly used across this class
local_dict = AttrDict(
{
'npx_ges': _res + 1,
'npy_ges': _res + 1,
- 'npz_ges': self.config.LEVS - 1,
- 'npz': self.config.LEVS - 1,
+ 'npz_ges': self.task_config.LEVS - 1,
+ 'npz': self.task_config.LEVS - 1,
'npx_anl': _res_anl + 1,
'npy_anl': _res_anl + 1,
- 'npz_anl': self.config.LEVS - 1,
+ 'npz_anl': self.task_config.LEVS - 1,
'ATM_WINDOW_BEGIN': _window_begin,
- 'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H",
- 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN
- 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN
- 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.",
+ 'ATM_WINDOW_LENGTH': f"PT{self.task_config.assim_freq}H",
+ 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+ 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+ 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.",
'jedi_yaml': _jedi_yaml,
- 'atm_obsdatain_path': f"{self.runtime_config.DATA}/obs/",
- 'atm_obsdataout_path': f"{self.runtime_config.DATA}/diags/",
+ 'atm_obsdatain_path': f"{self.task_config.DATA}/obs/",
+ 'atm_obsdataout_path': f"{self.task_config.DATA}/diags/",
'BKG_TSTEP': "PT1H" # Placeholder for 4D applications
}
)
- # task_config is everything that this task should need
- self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict)
+ # Extend task_config with local_dict
+ self.task_config = AttrDict(**self.task_config, **local_dict)
@logit(logger)
def initialize(self: Analysis) -> None:
@@ -85,22 +85,22 @@ def initialize(self: Analysis) -> None:
# stage static background error files, otherwise it will assume ID matrix
logger.info(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}")
- FileHandler(self.get_berror_dict(self.task_config)).sync()
+ if self.task_config.STATICB_TYPE != 'identity':
+ berror_staging_dict = parse_j2yaml(self.task_config.BERROR_STAGING_YAML, self.task_config)
+ else:
+ berror_staging_dict = {}
+ FileHandler(berror_staging_dict).sync()
# stage ensemble files for use in hybrid background error
if self.task_config.DOHYBVAR:
logger.debug(f"Stage ensemble files for DOHYBVAR {self.task_config.DOHYBVAR}")
- localconf = AttrDict()
- keys = ['COM_ATMOS_RESTART_TMPL', 'previous_cycle', 'ROTDIR', 'RUN',
- 'NMEM_ENS', 'DATA', 'current_cycle', 'ntiles']
- for key in keys:
- localconf[key] = self.task_config[key]
- localconf.RUN = 'enkfgdas'
- localconf.dirname = 'ens'
- FileHandler(self.get_fv3ens_dict(localconf)).sync()
+ fv3ens_staging_dict = parse_j2yaml(self.task_config.FV3ENS_STAGING_YAML, self.task_config)
+ FileHandler(fv3ens_staging_dict).sync()
# stage backgrounds
- FileHandler(self.get_bkg_dict(AttrDict(self.task_config))).sync()
+ logger.info(f"Staging background files from {self.task_config.VAR_BKG_STAGING_YAML}")
+ bkg_staging_dict = parse_j2yaml(self.task_config.VAR_BKG_STAGING_YAML, self.task_config)
+ FileHandler(bkg_staging_dict).sync()
# generate variational YAML file
logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}")
@@ -140,7 +140,7 @@ def variational(self: Analysis) -> None:
@logit(logger)
def init_fv3_increment(self: Analysis) -> None:
# Setup JEDI YAML file
- self.task_config.jedi_yaml = os.path.join(self.runtime_config.DATA,
+ self.task_config.jedi_yaml = os.path.join(self.task_config.DATA,
f"{self.task_config.JCB_ALGO}.yaml")
save_as_yaml(self.get_jedi_config(self.task_config.JCB_ALGO), self.task_config.jedi_yaml)
@@ -198,8 +198,8 @@ def finalize(self: Analysis) -> None:
# copy full YAML from executable to ROTDIR
logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}")
- src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml")
- dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml")
+ src = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml")
+ dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml")
logger.debug(f"Copying {src} to {dest}")
yaml_copy = {
'mkdir': [self.task_config.COM_ATMOS_ANALYSIS],
@@ -244,7 +244,7 @@ def finalize(self: Analysis) -> None:
cdate = to_fv3time(self.task_config.current_cycle)
cdate_inc = cdate.replace('.', '_')
src = os.path.join(self.task_config.DATA, 'anl', f"atminc.{cdate_inc}z.nc4")
- dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc')
+ dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc')
logger.debug(f"Copying {src} to {dest}")
inc_copy = {
'copy': [[src, dest]]
@@ -253,189 +253,3 @@ def finalize(self: Analysis) -> None:
def clean(self):
super().clean()
-
- @logit(logger)
- def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]:
- """Compile a dictionary of model background files to copy
-
- This method constructs a dictionary of FV3 restart files (coupler, core, tracer)
- that are needed for global atm DA and returns said dictionary for use by the FileHandler class.
-
- Parameters
- ----------
- task_config: Dict
- a dictionary containing all of the configuration needed for the task
-
- Returns
- ----------
- bkg_dict: Dict
- a dictionary containing the list of model background files to copy for FileHandler
- """
- # NOTE for now this is FV3 restart files and just assumed to be fh006
-
- # get FV3 restart files, this will be a lot simpler when using history files
- rst_dir = os.path.join(task_config.COM_ATMOS_RESTART_PREV) # for now, option later?
- run_dir = os.path.join(task_config.DATA, 'bkg')
-
- # Start accumulating list of background files to copy
- bkglist = []
-
- # atm DA needs coupler
- basename = f'{to_fv3time(task_config.current_cycle)}.coupler.res'
- bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)])
-
- # atm DA needs core, srf_wnd, tracer, phy_data, sfc_data
- for ftype in ['core', 'srf_wnd', 'tracer']:
- template = f'{to_fv3time(self.task_config.current_cycle)}.fv_{ftype}.res.tile{{tilenum}}.nc'
- for itile in range(1, task_config.ntiles + 1):
- basename = template.format(tilenum=itile)
- bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)])
-
- for ftype in ['phy_data', 'sfc_data']:
- template = f'{to_fv3time(self.task_config.current_cycle)}.{ftype}.tile{{tilenum}}.nc'
- for itile in range(1, task_config.ntiles + 1):
- basename = template.format(tilenum=itile)
- bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)])
-
- bkg_dict = {
- 'mkdir': [run_dir],
- 'copy': bkglist,
- }
- return bkg_dict
-
- @logit(logger)
- def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]:
- """Compile a dictionary of background error files to copy
-
- This method will construct a dictionary of either bump of gsibec background
- error files for global atm DA and return said dictionary for use by the
- FileHandler class.
-
- Parameters
- ----------
- config: Dict
- a dictionary containing all of the configuration needed
-
- Returns
- ----------
- berror_dict: Dict
- a dictionary containing the list of atm background error files to copy for FileHandler
- """
- SUPPORTED_BERROR_STATIC_MAP = {'identity': self._get_berror_dict_identity,
- 'bump': self._get_berror_dict_bump,
- 'gsibec': self._get_berror_dict_gsibec}
-
- try:
- berror_dict = SUPPORTED_BERROR_STATIC_MAP[config.STATICB_TYPE](config)
- except KeyError:
- raise KeyError(f"{config.STATICB_TYPE} is not a supported background error type.\n" +
- f"Currently supported background error types are:\n" +
- f'{" | ".join(SUPPORTED_BERROR_STATIC_MAP.keys())}')
-
- return berror_dict
-
- @staticmethod
- @logit(logger)
- def _get_berror_dict_identity(config: Dict[str, Any]) -> Dict[str, List[str]]:
- """Identity BE does not need any files for staging.
-
- This is a private method and should not be accessed directly.
-
- Parameters
- ----------
- config: Dict
- a dictionary containing all of the configuration needed
- Returns
- ----------
- berror_dict: Dict
- Empty dictionary [identity BE needs not files to stage]
- """
- logger.info(f"Identity background error does not use staged files. Return empty dictionary")
- return {}
-
- @staticmethod
- @logit(logger)
- def _get_berror_dict_bump(config: Dict[str, Any]) -> Dict[str, List[str]]:
- """Compile a dictionary of atm bump background error files to copy
-
- This method will construct a dictionary of atm bump background error
- files for global atm DA and return said dictionary to the parent
-
- This is a private method and should not be accessed directly.
-
- Parameters
- ----------
- config: Dict
- a dictionary containing all of the configuration needed
-
- Returns
- ----------
- berror_dict: Dict
- a dictionary of atm bump background error files to copy for FileHandler
- """
- # BUMP atm static-B needs nicas, cor_rh, cor_rv and stddev files.
- b_dir = config.BERROR_DATA_DIR
- b_datestr = to_fv3time(config.BERROR_DATE)
- berror_list = []
- for ftype in ['cor_rh', 'cor_rv', 'stddev']:
- coupler = f'{b_datestr}.{ftype}.coupler.res'
- berror_list.append([
- os.path.join(b_dir, coupler), os.path.join(config.DATA, 'berror', coupler)
- ])
-
- template = '{b_datestr}.{ftype}.fv_tracer.res.tile{{tilenum}}.nc'
- for itile in range(1, config.ntiles + 1):
- tracer = template.format(tilenum=itile)
- berror_list.append([
- os.path.join(b_dir, tracer), os.path.join(config.DATA, 'berror', tracer)
- ])
-
- nproc = config.ntiles * config.layout_x * config.layout_y
- for nn in range(1, nproc + 1):
- berror_list.append([
- os.path.join(b_dir, f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc'),
- os.path.join(config.DATA, 'berror', f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc')
- ])
-
- # create dictionary of background error files to stage
- berror_dict = {
- 'mkdir': [os.path.join(config.DATA, 'berror')],
- 'copy': berror_list,
- }
- return berror_dict
-
- @staticmethod
- @logit(logger)
- def _get_berror_dict_gsibec(config: Dict[str, Any]) -> Dict[str, List[str]]:
- """Compile a dictionary of atm gsibec background error files to copy
-
- This method will construct a dictionary of atm gsibec background error
- files for global atm DA and return said dictionary to the parent
-
- This is a private method and should not be accessed directly.
-
- Parameters
- ----------
- config: Dict
- a dictionary containing all of the configuration needed
-
- Returns
- ----------
- berror_dict: Dict
- a dictionary of atm gsibec background error files to copy for FileHandler
- """
- # GSI atm static-B needs namelist and coefficient files.
- b_dir = os.path.join(config.HOMEgfs, 'fix', 'gdas', 'gsibec', config.CASE_ANL)
- berror_list = []
- for ftype in ['gfs_gsi_global.nml', 'gsi-coeffs-gfs-global.nc4']:
- berror_list.append([
- os.path.join(b_dir, ftype),
- os.path.join(config.DATA, 'berror', ftype)
- ])
-
- # create dictionary of background error files to stage
- berror_dict = {
- 'mkdir': [os.path.join(config.DATA, 'berror')],
- 'copy': berror_list,
- }
- return berror_dict
diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py
index 37ac6137364..bd5112050e8 100644
--- a/ush/python/pygfs/task/atmens_analysis.py
+++ b/ush/python/pygfs/task/atmens_analysis.py
@@ -29,22 +29,22 @@ class AtmEnsAnalysis(Analysis):
def __init__(self, config):
super().__init__(config)
- _res = int(self.config.CASE_ENS[1:])
- _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2)
- _jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmens.yaml")
+ _res = int(self.task_config.CASE_ENS[1:])
+ _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2)
+ _jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml")
# Create a local dictionary that is repeatedly used across this class
local_dict = AttrDict(
{
'npx_ges': _res + 1,
'npy_ges': _res + 1,
- 'npz_ges': self.config.LEVS - 1,
- 'npz': self.config.LEVS - 1,
+ 'npz_ges': self.task_config.LEVS - 1,
+ 'npz': self.task_config.LEVS - 1,
'ATM_WINDOW_BEGIN': _window_begin,
- 'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H",
- 'OPREFIX': f"{self.config.EUPD_CYC}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN
- 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN
- 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.",
+ 'ATM_WINDOW_LENGTH': f"PT{self.task_config.assim_freq}H",
+ 'OPREFIX': f"{self.task_config.EUPD_CYC}.t{self.task_config.cyc:02d}z.",
+ 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+ 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.",
'jedi_yaml': _jedi_yaml,
'atm_obsdatain_path': f"./obs/",
'atm_obsdataout_path': f"./diags/",
@@ -52,8 +52,8 @@ def __init__(self, config):
}
)
- # task_config is everything that this task should need
- self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict)
+ # Extend task_config with local_dict
+ self.task_config = AttrDict(**self.task_config, **local_dict)
@logit(logger)
def initialize(self: Analysis) -> None:
@@ -77,27 +77,6 @@ def initialize(self: Analysis) -> None:
"""
super().initialize()
- # Make member directories in DATA for background and in DATA and ROTDIR for analysis files
- # create template dictionary for output member analysis directories
- template_inc = self.task_config.COM_ATMOS_ANALYSIS_TMPL
- tmpl_inc_dict = {
- 'ROTDIR': self.task_config.ROTDIR,
- 'RUN': self.task_config.RUN,
- 'YMD': to_YMD(self.task_config.current_cycle),
- 'HH': self.task_config.current_cycle.strftime('%H')
- }
- dirlist = []
- for imem in range(1, self.task_config.NMEM_ENS + 1):
- dirlist.append(os.path.join(self.task_config.DATA, 'bkg', f'mem{imem:03d}'))
- dirlist.append(os.path.join(self.task_config.DATA, 'anl', f'mem{imem:03d}'))
-
- # create output directory path for member analysis
- tmpl_inc_dict['MEMDIR'] = f"mem{imem:03d}"
- incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get)
- dirlist.append(incdir)
-
- FileHandler({'mkdir': dirlist}).sync()
-
# stage CRTM fix files
logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}")
crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config)
@@ -110,13 +89,8 @@ def initialize(self: Analysis) -> None:
# stage backgrounds
logger.info(f"Stage ensemble member background files")
- localconf = AttrDict()
- keys = ['COM_ATMOS_RESTART_TMPL', 'previous_cycle', 'ROTDIR', 'RUN',
- 'NMEM_ENS', 'DATA', 'current_cycle', 'ntiles']
- for key in keys:
- localconf[key] = self.task_config[key]
- localconf.dirname = 'bkg'
- FileHandler(self.get_fv3ens_dict(localconf)).sync()
+ bkg_staging_dict = parse_j2yaml(self.task_config.LGETKF_BKG_STAGING_YAML, self.task_config)
+ FileHandler(bkg_staging_dict).sync()
# generate ensemble da YAML file
logger.debug(f"Generate ensemble da YAML file: {self.task_config.jedi_yaml}")
@@ -171,7 +145,7 @@ def letkf(self: Analysis) -> None:
@logit(logger)
def init_fv3_increment(self: Analysis) -> None:
# Setup JEDI YAML file
- self.task_config.jedi_yaml = os.path.join(self.runtime_config.DATA,
+ self.task_config.jedi_yaml = os.path.join(self.task_config.DATA,
f"{self.task_config.JCB_ALGO}.yaml")
save_as_yaml(self.get_jedi_config(self.task_config.JCB_ALGO), self.task_config.jedi_yaml)
@@ -235,8 +209,8 @@ def finalize(self: Analysis) -> None:
# copy full YAML from executable to ROTDIR
logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}")
- src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml")
- dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml")
+ src = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml")
+ dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml")
logger.debug(f"Copying {src} to {dest}")
yaml_copy = {
'mkdir': [self.task_config.COM_ATMOS_ANALYSIS_ENS],
@@ -265,7 +239,7 @@ def finalize(self: Analysis) -> None:
tmpl_inc_dict['MEMDIR'] = memchar
incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get)
src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.nc4")
- dest = os.path.join(incdir, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc")
+ dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc")
# copy increment
logger.debug(f"Copying {src} to {dest}")
diff --git a/ush/python/pygfs/task/oceanice_products.py b/ush/python/pygfs/task/oceanice_products.py
index 690aac95422..98b57ae8017 100644
--- a/ush/python/pygfs/task/oceanice_products.py
+++ b/ush/python/pygfs/task/oceanice_products.py
@@ -49,39 +49,40 @@ def __init__(self, config: Dict[str, Any]) -> None:
"""
super().__init__(config)
- if self.config.COMPONENT not in self.VALID_COMPONENTS:
- raise NotImplementedError(f'{self.config.COMPONENT} is not a valid model component.\n' +
+ if self.task_config.COMPONENT not in self.VALID_COMPONENTS:
+ raise NotImplementedError(f'{self.task_config.COMPONENT} is not a valid model component.\n' +
'Valid model components are:\n' +
f'{", ".join(self.VALID_COMPONENTS)}')
- model_grid = f"mx{self.config[self.COMPONENT_RES_MAP[self.config.COMPONENT]]:03d}"
+ model_grid = f"mx{self.task_config[self.COMPONENT_RES_MAP[self.task_config.COMPONENT]]:03d}"
- valid_datetime = add_to_datetime(self.runtime_config.current_cycle, to_timedelta(f"{self.config.FORECAST_HOUR}H"))
+ valid_datetime = add_to_datetime(self.task_config.current_cycle, to_timedelta(f"{self.task_config.FORECAST_HOUR}H"))
- if self.config.COMPONENT == 'ice':
- offset = int(self.runtime_config.current_cycle.strftime("%H")) % self.config.FHOUT_ICE_GFS
+ if self.task_config.COMPONENT == 'ice':
+ offset = int(self.task_config.current_cycle.strftime("%H")) % self.task_config.FHOUT_ICE_GFS
# For CICE cases where offset is not 0, forecast_hour needs to be adjusted based on the offset.
# TODO: Consider FHMIN when calculating offset.
if offset != 0:
- forecast_hour = self.config.FORECAST_HOUR - int(self.runtime_config.current_cycle.strftime("%H"))
+ forecast_hour = self.task_config.FORECAST_HOUR - int(self.task_config.current_cycle.strftime("%H"))
# For the first forecast hour, the interval may be different from the intervals of subsequent forecast hours
- if forecast_hour <= self.config.FHOUT_ICE_GFS:
- interval = self.config.FHOUT_ICE_GFS - int(self.runtime_config.current_cycle.strftime("%H"))
+ if forecast_hour <= self.task_config.FHOUT_ICE_GFS:
+ interval = self.task_config.FHOUT_ICE_GFS - int(self.task_config.current_cycle.strftime("%H"))
else:
- interval = self.config.FHOUT_ICE_GFS
+ interval = self.task_config.FHOUT_ICE_GFS
else:
- forecast_hour = self.config.FORECAST_HOUR
- interval = self.config.FHOUT_ICE_GFS
- if self.config.COMPONENT == 'ocean':
- forecast_hour = self.config.FORECAST_HOUR
- interval = self.config.FHOUT_OCN_GFS
+ forecast_hour = self.task_config.FORECAST_HOUR
+ interval = self.task_config.FHOUT_ICE_GFS
+ if self.task_config.COMPONENT == 'ocean':
+ forecast_hour = self.task_config.FORECAST_HOUR
+ interval = self.task_config.FHOUT_OCN_GFS
# TODO: This is a bit of a hack, but it works for now
# FIXME: find a better way to provide the averaging period
avg_period = f"{forecast_hour-interval:03d}-{forecast_hour:03d}"
+ # Extend task_config with localdict
localdict = AttrDict(
- {'component': self.config.COMPONENT,
+ {'component': self.task_config.COMPONENT,
'forecast_hour': forecast_hour,
'valid_datetime': valid_datetime,
'avg_period': avg_period,
@@ -89,11 +90,11 @@ def __init__(self, config: Dict[str, Any]) -> None:
'interval': interval,
'product_grids': self.VALID_PRODUCT_GRIDS[model_grid]}
)
- self.task_config = AttrDict(**self.config, **self.runtime_config, **localdict)
+ self.task_config = AttrDict(**self.task_config, **localdict)
# Read the oceanice_products.yaml file for common configuration
- logger.info(f"Read the ocean ice products configuration yaml file {self.config.OCEANICEPRODUCTS_CONFIG}")
- self.task_config.oceanice_yaml = parse_j2yaml(self.config.OCEANICEPRODUCTS_CONFIG, self.task_config)
+ logger.info(f"Read the ocean ice products configuration yaml file {self.task_config.OCEANICEPRODUCTS_CONFIG}")
+ self.task_config.oceanice_yaml = parse_j2yaml(self.task_config.OCEANICEPRODUCTS_CONFIG, self.task_config)
logger.debug(f"oceanice_yaml:\n{pformat(self.task_config.oceanice_yaml)}")
@staticmethod
diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py
index 9a5c7fcab01..9656b00a8ee 100644
--- a/ush/python/pygfs/task/snow_analysis.py
+++ b/ush/python/pygfs/task/snow_analysis.py
@@ -32,27 +32,27 @@ class SnowAnalysis(Analysis):
def __init__(self, config):
super().__init__(config)
- _res = int(self.config['CASE'][1:])
- _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2)
- _letkfoi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.RUN}.t{self.runtime_config['cyc']:02d}z.letkfoi.yaml")
+ _res = int(self.task_config['CASE'][1:])
+ _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2)
+ _letkfoi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.letkfoi.yaml")
# Create a local dictionary that is repeatedly used across this class
local_dict = AttrDict(
{
'npx_ges': _res + 1,
'npy_ges': _res + 1,
- 'npz_ges': self.config.LEVS - 1,
- 'npz': self.config.LEVS - 1,
+ 'npz_ges': self.task_config.LEVS - 1,
+ 'npz': self.task_config.LEVS - 1,
'SNOW_WINDOW_BEGIN': _window_begin,
- 'SNOW_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H",
- 'OPREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.",
- 'APREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.",
+ 'SNOW_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H",
+ 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
+ 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.",
'jedi_yaml': _letkfoi_yaml
}
)
- # task_config is everything that this task should need
- self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict)
+ # Extend task_config with local_dict
+ self.task_config = AttrDict(**self.task_config, **local_dict)
@logit(logger)
def prepare_GTS(self) -> None:
@@ -114,7 +114,7 @@ def _gtsbufr2iodax(exe, yaml_file):
# 1. generate bufr2ioda YAML files
# 2. execute bufr2ioda.x
for name in prep_gts_config.bufr2ioda.keys():
- gts_yaml = os.path.join(self.runtime_config.DATA, f"bufr_{name}_snow.yaml")
+ gts_yaml = os.path.join(self.task_config.DATA, f"bufr_{name}_snow.yaml")
logger.info(f"Generate BUFR2IODA YAML file: {gts_yaml}")
temp_yaml = parse_j2yaml(prep_gts_config.bufr2ioda[name], localconf)
save_as_yaml(temp_yaml, gts_yaml)
diff --git a/ush/python/pygfs/task/upp.py b/ush/python/pygfs/task/upp.py
index 7db50e1582c..7e42e07c64a 100644
--- a/ush/python/pygfs/task/upp.py
+++ b/ush/python/pygfs/task/upp.py
@@ -46,26 +46,27 @@ def __init__(self, config: Dict[str, Any]) -> None:
"""
super().__init__(config)
- if self.config.UPP_RUN not in self.VALID_UPP_RUN:
- raise NotImplementedError(f'{self.config.UPP_RUN} is not a valid UPP run type.\n' +
+ if self.task_config.UPP_RUN not in self.VALID_UPP_RUN:
+ raise NotImplementedError(f'{self.task_config.UPP_RUN} is not a valid UPP run type.\n' +
'Valid UPP_RUN values are:\n' +
f'{", ".join(self.VALID_UPP_RUN)}')
- valid_datetime = add_to_datetime(self.runtime_config.current_cycle, to_timedelta(f"{self.config.FORECAST_HOUR}H"))
+ valid_datetime = add_to_datetime(self.task_config.current_cycle, to_timedelta(f"{self.task_config.FORECAST_HOUR}H"))
+ # Extend task_config with localdict
localdict = AttrDict(
- {'upp_run': self.config.UPP_RUN,
- 'forecast_hour': self.config.FORECAST_HOUR,
+ {'upp_run': self.task_config.UPP_RUN,
+ 'forecast_hour': self.task_config.FORECAST_HOUR,
'valid_datetime': valid_datetime,
'atmos_filename': f"atm_{valid_datetime.strftime('%Y%m%d%H%M%S')}.nc",
'flux_filename': f"sfc_{valid_datetime.strftime('%Y%m%d%H%M%S')}.nc"
}
)
- self.task_config = AttrDict(**self.config, **self.runtime_config, **localdict)
+ self.task_config = AttrDict(**self.task_config, **localdict)
# Read the upp.yaml file for common configuration
- logger.info(f"Read the UPP configuration yaml file {self.config.UPP_CONFIG}")
- self.task_config.upp_yaml = parse_j2yaml(self.config.UPP_CONFIG, self.task_config)
+ logger.info(f"Read the UPP configuration yaml file {self.task_config.UPP_CONFIG}")
+ self.task_config.upp_yaml = parse_j2yaml(self.task_config.UPP_CONFIG, self.task_config)
logger.debug(f"upp_yaml:\n{pformat(self.task_config.upp_yaml)}")
@staticmethod
diff --git a/ush/wave_grib2_sbs.sh b/ush/wave_grib2_sbs.sh
index 431387cccd4..99f89f3f37d 100755
--- a/ush/wave_grib2_sbs.sh
+++ b/ush/wave_grib2_sbs.sh
@@ -72,7 +72,7 @@ if [[ -n ${waveMEMB} ]]; then ENSTAG=".${membTAG}${waveMEMB}" ; fi
outfile="${WAV_MOD_TAG}.${cycle}${ENSTAG}.${grdnam}.${grdres}.f${FH3}.grib2"
# Only create file if not present in COM
-if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
+if [[ ! -s "${COMOUT_WAVE_GRID}/${outfile}.idx" ]]; then
set +x
echo ' '
@@ -83,7 +83,7 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
set_trace
if [[ -z "${PDY}" ]] || [[ -z ${cyc} ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECgfs}" ]] || \
- [[ -z "${COM_WAVE_GRID}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${gribflags}" ]] || \
+ [[ -z "${COMOUT_WAVE_GRID}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${gribflags}" ]] || \
[[ -z "${GRIDNR}" ]] || [[ -z "${MODNR}" ]] || \
[[ -z "${SENDDBN}" ]]; then
set +x
@@ -157,11 +157,11 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
fi
if (( fhr > 0 )); then
- ${WGRIB2} gribfile -set_date "${PDY}${cyc}" -set_ftime "${fhr} hour fcst" -grib "${COM_WAVE_GRID}/${outfile}"
+ ${WGRIB2} gribfile -set_date "${PDY}${cyc}" -set_ftime "${fhr} hour fcst" -grib "${COMOUT_WAVE_GRID}/${outfile}"
err=$?
else
${WGRIB2} gribfile -set_date "${PDY}${cyc}" -set_ftime "${fhr} hour fcst" \
- -set table_1.4 1 -set table_1.2 1 -grib "${COM_WAVE_GRID}/${outfile}"
+ -set table_1.4 1 -set table_1.2 1 -grib "${COMOUT_WAVE_GRID}/${outfile}"
err=$?
fi
@@ -177,7 +177,7 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
fi
# Create index
- ${WGRIB2} -s "${COM_WAVE_GRID}/${outfile}" > "${COM_WAVE_GRID}/${outfile}.idx"
+ ${WGRIB2} -s "${COMOUT_WAVE_GRID}/${outfile}" > "${COMOUT_WAVE_GRID}/${outfile}.idx"
# Create grib2 subgrid is this is the source grid
if [[ "${grdID}" = "${WAV_SUBGRBSRC}" ]]; then
@@ -186,14 +186,14 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
subgrbnam=$(echo ${!subgrb} | cut -d " " -f 21)
subgrbres=$(echo ${!subgrb} | cut -d " " -f 22)
subfnam="${WAV_MOD_TAG}.${cycle}${ENSTAG}.${subgrbnam}.${subgrbres}.f${FH3}.grib2"
- ${COPYGB2} -g "${subgrbref}" -i0 -x "${COM_WAVE_GRID}/${outfile}" "${COM_WAVE_GRID}/${subfnam}"
- ${WGRIB2} -s "${COM_WAVE_GRID}/${subfnam}" > "${COM_WAVE_GRID}/${subfnam}.idx"
+ ${COPYGB2} -g "${subgrbref}" -i0 -x "${COMOUT_WAVE_GRID}/${outfile}" "${COMOUT_WAVE_GRID}/${subfnam}"
+ ${WGRIB2} -s "${COMOUT_WAVE_GRID}/${subfnam}" > "${COMOUT_WAVE_GRID}/${subfnam}.idx"
done
fi
# 1.e Save in /com
- if [[ ! -s "${COM_WAVE_GRID}/${outfile}" ]]; then
+ if [[ ! -s "${COMOUT_WAVE_GRID}/${outfile}" ]]; then
set +x
echo ' '
echo '********************************************* '
@@ -205,7 +205,7 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
set_trace
exit 4
fi
- if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
+ if [[ ! -s "${COMOUT_WAVE_GRID}/${outfile}.idx" ]]; then
set +x
echo ' '
echo '*************************************************** '
@@ -220,11 +220,11 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
if [[ "${SENDDBN}" = 'YES' ]] && [[ ${outfile} != *global.0p50* ]]; then
set +x
- echo " Alerting GRIB file as ${COM_WAVE_GRID}/${outfile}"
- echo " Alerting GRIB index file as ${COM_WAVE_GRID}/${outfile}.idx"
+ echo " Alerting GRIB file as ${COMOUT_WAVE_GRID}/${outfile}"
+ echo " Alerting GRIB index file as ${COMOUT_WAVE_GRID}/${outfile}.idx"
set_trace
- "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2" "${job}" "${COM_WAVE_GRID}/${outfile}"
- "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2_WIDX" "${job}" "${COM_WAVE_GRID}/${outfile}.idx"
+ "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2" "${job}" "${COMOUT_WAVE_GRID}/${outfile}"
+ "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2_WIDX" "${job}" "${COMOUT_WAVE_GRID}/${outfile}.idx"
else
echo "${outfile} is global.0p50 or SENDDBN is NO, no alert sent"
fi
@@ -245,7 +245,7 @@ if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then
else
set +x
echo ' '
- echo " File ${COM_WAVE_GRID}/${outfile} found, skipping generation process"
+ echo " File ${COMOUT_WAVE_GRID}/${outfile} found, skipping generation process"
echo ' '
set_trace
fi
diff --git a/ush/wave_grid_interp_sbs.sh b/ush/wave_grid_interp_sbs.sh
index e6f0a1a1aae..31b7808c168 100755
--- a/ush/wave_grid_interp_sbs.sh
+++ b/ush/wave_grid_interp_sbs.sh
@@ -66,7 +66,7 @@ source "${USHgfs}/preamble.sh"
set_trace
if [[ -z "${PDY}" ]] || [[ -z "${cyc}" ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECgfs}" ]] || \
- [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${SENDDBN}" ]] || \
+ [[ -z "${COMOUT_WAVE_PREP}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${SENDDBN}" ]] || \
[ -z "${waveGRD}" ]
then
set +x
@@ -75,7 +75,7 @@ source "${USHgfs}/preamble.sh"
echo '*** EXPORTED VARIABLES IN postprocessor NOT SET ***'
echo '***************************************************'
echo ' '
- echo "${PDY}${cyc} ${cycle} ${EXECgfs} ${COM_WAVE_PREP} ${WAV_MOD_TAG} ${SENDDBN} ${waveGRD}"
+ echo "${PDY}${cyc} ${cycle} ${EXECgfs} ${COMOUT_WAVE_PREP} ${WAV_MOD_TAG} ${SENDDBN} ${waveGRD}"
set_trace
exit 1
fi
@@ -171,9 +171,9 @@ source "${USHgfs}/preamble.sh"
# 1.c Save in /com
set +x
- echo " Saving GRID file as ${COM_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}"
+ echo " Saving GRID file as ${COMOUT_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}"
set_trace
- cp "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "${COM_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}"
+ cp "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "${COMOUT_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}"
# if [ "$SENDDBN" = 'YES' ]
# then
diff --git a/ush/wave_grid_moddef.sh b/ush/wave_grid_moddef.sh
index 2deb98ce9ca..1e8c44054a7 100755
--- a/ush/wave_grid_moddef.sh
+++ b/ush/wave_grid_moddef.sh
@@ -109,7 +109,7 @@ source "${USHgfs}/preamble.sh"
if [[ -f mod_def.ww3 ]]
then
- cp mod_def.ww3 "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}"
+ cp mod_def.ww3 "${COMOUT_WAVE_PREP}/${RUN}wave.mod_def.${grdID}"
mv mod_def.ww3 "../mod_def.${grdID}"
else
set +x
diff --git a/ush/wave_prnc_ice.sh b/ush/wave_prnc_ice.sh
index 5e6ba82731e..be089c30bd3 100755
--- a/ush/wave_prnc_ice.sh
+++ b/ush/wave_prnc_ice.sh
@@ -55,8 +55,8 @@ source "${USHgfs}/preamble.sh"
echo "Making ice fields."
if [[ -z "${YMDH}" ]] || [[ -z "${cycle}" ]] || \
- [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${FIXgfs}" ]] || [[ -z "${EXECgfs}" ]] || \
- [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${WAVEICE_FID}" ]] || [[ -z "${COM_OBS}" ]]; then
+ [[ -z "${COMOUT_WAVE_PREP}" ]] || [[ -z "${FIXgfs}" ]] || [[ -z "${EXECgfs}" ]] || \
+ [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${WAVEICE_FID}" ]] || [[ -z "${COMIN_OBS}" ]]; then
set +x
echo ' '
@@ -77,7 +77,7 @@ source "${USHgfs}/preamble.sh"
# 1. Get the necessary files
# 1.a Copy the ice data file
- file=${COM_OBS}/${WAVICEFILE}
+ file=${COMIN_OBS}/${WAVICEFILE}
if [ -f $file ]
then
@@ -178,9 +178,9 @@ source "${USHgfs}/preamble.sh"
fi
set +x
- echo " Saving ice.ww3 as ${COM_WAVE_PREP}/${icefile}"
+ echo " Saving ice.ww3 as ${COMOUT_WAVE_PREP}/${icefile}"
set_trace
- cp ice.ww3 "${COM_WAVE_PREP}/${icefile}"
+ cp ice.ww3 "${COMOUT_WAVE_PREP}/${icefile}"
rm -f ice.ww3
# --------------------------------------------------------------------------- #
diff --git a/ush/wave_tar.sh b/ush/wave_tar.sh
index e01ef61f154..f82849854ff 100755
--- a/ush/wave_tar.sh
+++ b/ush/wave_tar.sh
@@ -76,7 +76,7 @@ source "${USHgfs}/preamble.sh"
# 0.c Define directories and the search path.
# The tested variables should be exported by the postprocessor script.
- if [[ -z "${cycle}" ]] || [[ -z "${COM_WAVE_STATION}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || \
+ if [[ -z "${cycle}" ]] || [[ -z "${COMOUT_WAVE_STATION}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || \
[[ -z "${SENDDBN}" ]] || [[ -z "${STA_DIR}" ]]; then
set +x
echo ' '
@@ -179,10 +179,10 @@ source "${USHgfs}/preamble.sh"
set +x
echo ' '
- echo " Moving tar file ${file_name} to ${COM_WAVE_STATION} ..."
+ echo " Moving tar file ${file_name} to ${COMOUT_WAVE_STATION} ..."
set_trace
- cp "${file_name}" "${COM_WAVE_STATION}/."
+ cp "${file_name}" "${COMOUT_WAVE_STATION}/."
exit=$?
@@ -202,11 +202,11 @@ source "${USHgfs}/preamble.sh"
then
set +x
echo ' '
- echo " Alerting TAR file as ${COM_WAVE_STATION}/${file_name}"
+ echo " Alerting TAR file as ${COMOUT_WAVE_STATION}/${file_name}"
echo ' '
set_trace
"${DBNROOT}/bin/dbn_alert MODEL" "${alertName}_WAVE_TAR" "${job}" \
- "${COM_WAVE_STATION}/${file_name}"
+ "${COMOUT_WAVE_STATION}/${file_name}"
fi
# --------------------------------------------------------------------------- #
diff --git a/workflow/hosts.py b/workflow/hosts.py
index 2334a3ac356..cd0cfe00830 100644
--- a/workflow/hosts.py
+++ b/workflow/hosts.py
@@ -1,6 +1,7 @@
#!/usr/bin/env python3
import os
+import socket
from pathlib import Path
from wxflow import YAMLFile
@@ -39,10 +40,7 @@ def detect(cls):
if os.path.exists('/scratch1/NCEPDEV'):
machine = 'HERA'
elif os.path.exists('/work/noaa'):
- if os.path.exists('/apps/other'):
- machine = 'HERCULES'
- else:
- machine = 'ORION'
+ machine = socket.gethostname().split("-", 1)[0].upper()
elif os.path.exists('/lfs4/HFIP'):
machine = 'JET'
elif os.path.exists('/lfs/f1'):