From c49e4eee1a2ca818b3ecdcb9ea41c3f3e91d585b Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 28 Jun 2024 14:56:19 -0400 Subject: [PATCH 1/8] Revert PR 2681 (#2739) This PR: - reverts #2681 in part - keeps some changes for `RUN`. - is a hotfix - should be merged ASAP after consensus w/ @guillaumevernieres @CatherineThomas-NOAA @WalterKolczynski-NOAA --- parm/config/gefs/config.ufs | 12 +++++++++++- parm/config/gfs/config.ufs | 12 +++++++++++- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs index 2da6e7a2f0..584e4769a8 100644 --- a/parm/config/gefs/config.ufs +++ b/parm/config/gefs/config.ufs @@ -294,7 +294,6 @@ if [[ "${skip_mom6}" == "false" ]]; then export cplflx=".true." model_list="${model_list}.ocean" nthreads_mom6=1 - MOM6_DIAG_MISVAL="-1e34" case "${mom6_res}" in "500") ntasks_mom6=8 @@ -307,6 +306,11 @@ if [[ "${skip_mom6}" == "false" ]]; then CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" MOM6_RESTART_SETTING='r' MOM6_RIVER_RUNOFF='False' + if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then + MOM6_DIAG_MISVAL="-1e34" + else + MOM6_DIAG_MISVAL="0.0" + fi eps_imesh="4.0e-1" MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc" MOM6_ALLOW_LANDMASK_CHANGES='False' @@ -327,8 +331,10 @@ if [[ "${skip_mom6}" == "false" ]]; then TOPOEDITS="ufs.topo_edits_011818.nc" if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" else MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='True' ;; @@ -346,8 +352,10 @@ if [[ "${skip_mom6}" == "false" ]]; then eps_imesh="1.0e-1" if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" else MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='False' TOPOEDITS="" @@ -366,8 +374,10 @@ if [[ "${skip_mom6}" == "false" ]]; then eps_imesh="1.0e-1" if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" else MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='False' TOPOEDITS="" diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs index 5a57a27007..f630226826 100644 --- a/parm/config/gfs/config.ufs +++ b/parm/config/gfs/config.ufs @@ -401,7 +401,6 @@ if [[ "${skip_mom6}" == "false" ]]; then export cplflx=".true." model_list="${model_list}.ocean" nthreads_mom6=1 - MOM6_DIAG_MISVAL="-1e34" case "${mom6_res}" in "500") ntasks_mom6=8 @@ -414,6 +413,11 @@ if [[ "${skip_mom6}" == "false" ]]; then CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" MOM6_RESTART_SETTING='r' MOM6_RIVER_RUNOFF='False' + if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then + MOM6_DIAG_MISVAL="-1e34" + else + MOM6_DIAG_MISVAL="0.0" + fi eps_imesh="4.0e-1" MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_25L.nc" MOM6_ALLOW_LANDMASK_CHANGES='False' @@ -434,8 +438,10 @@ if [[ "${skip_mom6}" == "false" ]]; then TOPOEDITS="ufs.topo_edits_011818.nc" if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" else MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='True' ;; @@ -453,8 +459,10 @@ if [[ "${skip_mom6}" == "false" ]]; then eps_imesh="1.0e-1" if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" else MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='False' TOPOEDITS="" @@ -473,8 +481,10 @@ if [[ "${skip_mom6}" == "false" ]]; then eps_imesh="1.0e-1" if [[ ${RUN} == "gfs" || "${RUN}" == "gefs" ]]; then MOM6_DIAG_COORD_DEF_Z_FILE="interpolate_zgrid_40L.nc" + MOM6_DIAG_MISVAL="-1e34" else MOM6_DIAG_COORD_DEF_Z_FILE="oceanda_zgrid_75L.nc" + MOM6_DIAG_MISVAL="0.0" fi MOM6_ALLOW_LANDMASK_CHANGES='False' TOPOEDITS="" From de8706702ead0630beb54d868f83aa2cb23f8f79 Mon Sep 17 00:00:00 2001 From: RussTreadon-NOAA <26926959+RussTreadon-NOAA@users.noreply.github.com> Date: Mon, 1 Jul 2024 09:29:14 -0400 Subject: [PATCH 2/8] Update for JCB policies and stage DA job files with Jinja2-templates (#2700) This PR updates the `gdas.cd` hash to bring in new JCB conventions. Resolves #2699 From #2654 This PR will move much of the staging code that take place in the python initialization subroutines of the variational and ensemble DA jobs into Jinja2-templated YAML files to be passed into the wxflow file handler. Much of the staging has already been done this way, but this PR simply expands that strategy. The old Python routines that were doing this staging are now removed. This is part of a broader refactoring of the pygfs tasking. wxflow PR [#30](https://github.com/NOAA-EMC/wxflow/pull/30) is a companion to this PR. Co-authored-by: danholdaway Co-authored-by: DavidNew-NOAA Co-authored-by: DavidNew-NOAA <134300700+DavidNew-NOAA@users.noreply.github.com> Co-authored-by: Dan Holdaway <27729500+danholdaway@users.noreply.github.com> --- env/HERCULES.env | 10 + jobs/rocoto/aeroanlfinal.sh | 5 - jobs/rocoto/aeroanlinit.sh | 6 - jobs/rocoto/aeroanlrun.sh | 6 - jobs/rocoto/atmanlfinal.sh | 5 - jobs/rocoto/atmanlfv3inc.sh | 6 - jobs/rocoto/atmanlinit.sh | 6 - jobs/rocoto/atmanlvar.sh | 6 - jobs/rocoto/atmensanlfinal.sh | 5 - jobs/rocoto/atmensanlfv3inc.sh | 6 - jobs/rocoto/atmensanlinit.sh | 6 - jobs/rocoto/atmensanlletkf.sh | 6 - jobs/rocoto/oceanice_products.sh | 6 - jobs/rocoto/prepatmiodaobs.sh | 5 +- jobs/rocoto/prepobsaero.sh | 6 - jobs/rocoto/prepsnowobs.sh | 5 +- jobs/rocoto/snowanl.sh | 6 - jobs/rocoto/upp.sh | 12 +- parm/config/gfs/config.atmanl | 7 +- parm/config/gfs/config.atmensanl | 1 + parm/gdas/staging/atm_berror_gsibec.yaml.j2 | 8 + parm/gdas/staging/atm_lgetkf_bkg.yaml.j2 | 32 +++ parm/gdas/staging/atm_var_bkg.yaml.j2 | 14 ++ parm/gdas/staging/atm_var_fv3ens.yaml.j2 | 24 ++ scripts/exglobal_cleanup.sh | 6 +- scripts/exglobal_prep_snow_obs.py | 2 +- sorc/gdas.cd | 2 +- sorc/wxflow | 2 +- ush/detect_machine.sh | 4 +- ush/load_fv3gfs_modules.sh | 5 + ush/load_ufsda_modules.sh | 5 + ush/python/pygfs/task/aero_analysis.py | 32 +-- ush/python/pygfs/task/aero_emissions.py | 4 +- ush/python/pygfs/task/aero_prepobs.py | 18 +- ush/python/pygfs/task/analysis.py | 110 +-------- ush/python/pygfs/task/archive.py | 9 +- ush/python/pygfs/task/atm_analysis.py | 244 +++----------------- ush/python/pygfs/task/atmens_analysis.py | 60 ++--- ush/python/pygfs/task/oceanice_products.py | 39 ++-- ush/python/pygfs/task/snow_analysis.py | 22 +- ush/python/pygfs/task/upp.py | 17 +- workflow/hosts.py | 6 +- 42 files changed, 248 insertions(+), 538 deletions(-) create mode 100644 parm/gdas/staging/atm_berror_gsibec.yaml.j2 create mode 100644 parm/gdas/staging/atm_lgetkf_bkg.yaml.j2 create mode 100644 parm/gdas/staging/atm_var_bkg.yaml.j2 create mode 100644 parm/gdas/staging/atm_var_fv3ens.yaml.j2 diff --git a/env/HERCULES.env b/env/HERCULES.env index 77e57e066d..79424f8639 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -132,6 +132,16 @@ case ${step} in [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun} --cpus-per-task=${NTHREADS_OCNANAL}" ;; +"ocnanalecen") + + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_ocnanalecen)) + + export NTHREADS_OCNANALECEN=${nth_ocnanalecen:-${nth_max}} + [[ ${NTHREADS_OCNANALECEN} -gt ${nth_max} ]] && export NTHREADS_OCNANALECEN=${nth_max} + export APRUN_OCNANALECEN="${launcher} -n ${npe_ocnanalecen} --cpus-per-task=${NTHREADS_OCNANALECEN}" +;; "ocnanalchkpt") export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" diff --git a/jobs/rocoto/aeroanlfinal.sh b/jobs/rocoto/aeroanlfinal.sh index 16bb6887fd..39dea71810 100755 --- a/jobs/rocoto/aeroanlfinal.sh +++ b/jobs/rocoto/aeroanlfinal.sh @@ -11,11 +11,6 @@ status=$? export job="aeroanlfinal" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE" diff --git a/jobs/rocoto/aeroanlinit.sh b/jobs/rocoto/aeroanlinit.sh index 9aaf255782..7a1cf885c1 100755 --- a/jobs/rocoto/aeroanlinit.sh +++ b/jobs/rocoto/aeroanlinit.sh @@ -11,12 +11,6 @@ status=$? export job="aeroanlinit" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH - ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE" diff --git a/jobs/rocoto/aeroanlrun.sh b/jobs/rocoto/aeroanlrun.sh index bcd86e3fbf..529bb2d7d1 100755 --- a/jobs/rocoto/aeroanlrun.sh +++ b/jobs/rocoto/aeroanlrun.sh @@ -11,12 +11,6 @@ status=$? export job="aeroanlrun" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH - ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN" diff --git a/jobs/rocoto/atmanlfinal.sh b/jobs/rocoto/atmanlfinal.sh index 3d3c3ba9e6..a12894ed1e 100755 --- a/jobs/rocoto/atmanlfinal.sh +++ b/jobs/rocoto/atmanlfinal.sh @@ -11,11 +11,6 @@ status=$? export job="atmanlfinal" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE" diff --git a/jobs/rocoto/atmanlfv3inc.sh b/jobs/rocoto/atmanlfv3inc.sh index effc18cee5..5261c15f09 100755 --- a/jobs/rocoto/atmanlfv3inc.sh +++ b/jobs/rocoto/atmanlfv3inc.sh @@ -11,12 +11,6 @@ status=$? export job="atmanlfv3inc" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH - ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FV3_INCREMENT" diff --git a/jobs/rocoto/atmanlinit.sh b/jobs/rocoto/atmanlinit.sh index 13c7d8710b..5329200590 100755 --- a/jobs/rocoto/atmanlinit.sh +++ b/jobs/rocoto/atmanlinit.sh @@ -11,12 +11,6 @@ status=$? export job="atmanlinit" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH - ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE" diff --git a/jobs/rocoto/atmanlvar.sh b/jobs/rocoto/atmanlvar.sh index 812e3c706a..7df7f59dd1 100755 --- a/jobs/rocoto/atmanlvar.sh +++ b/jobs/rocoto/atmanlvar.sh @@ -11,12 +11,6 @@ status=$? export job="atmanlvar" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH - ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_VARIATIONAL" diff --git a/jobs/rocoto/atmensanlfinal.sh b/jobs/rocoto/atmensanlfinal.sh index 5ffaa92754..fc29bdd9af 100755 --- a/jobs/rocoto/atmensanlfinal.sh +++ b/jobs/rocoto/atmensanlfinal.sh @@ -11,11 +11,6 @@ status=$? export job="atmensanlfinal" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE" diff --git a/jobs/rocoto/atmensanlfv3inc.sh b/jobs/rocoto/atmensanlfv3inc.sh index bb44ddc3a0..7f57e8d618 100755 --- a/jobs/rocoto/atmensanlfv3inc.sh +++ b/jobs/rocoto/atmensanlfv3inc.sh @@ -11,12 +11,6 @@ status=$? export job="atmensanlfv3inc" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH - ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FV3_INCREMENT" diff --git a/jobs/rocoto/atmensanlinit.sh b/jobs/rocoto/atmensanlinit.sh index 2c2204548a..1cd8129df6 100755 --- a/jobs/rocoto/atmensanlinit.sh +++ b/jobs/rocoto/atmensanlinit.sh @@ -11,12 +11,6 @@ status=$? export job="atmensanlinit" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH - ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE" diff --git a/jobs/rocoto/atmensanlletkf.sh b/jobs/rocoto/atmensanlletkf.sh index b4a1a73a80..0ca86bfb43 100755 --- a/jobs/rocoto/atmensanlletkf.sh +++ b/jobs/rocoto/atmensanlletkf.sh @@ -11,12 +11,6 @@ status=$? export job="atmensanlletkf" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH - ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_LETKF" diff --git a/jobs/rocoto/oceanice_products.sh b/jobs/rocoto/oceanice_products.sh index eb704fb35f..2a3b617d05 100755 --- a/jobs/rocoto/oceanice_products.sh +++ b/jobs/rocoto/oceanice_products.sh @@ -12,12 +12,6 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? if (( status != 0 )); then exit "${status}"; fi -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH - export job="oceanice_products" export jobid="${job}.$$" diff --git a/jobs/rocoto/prepatmiodaobs.sh b/jobs/rocoto/prepatmiodaobs.sh index 0e69eda5c9..26629a514f 100755 --- a/jobs/rocoto/prepatmiodaobs.sh +++ b/jobs/rocoto/prepatmiodaobs.sh @@ -12,11 +12,10 @@ export job="prepatmobs" export jobid="${job}.$$" ############################################################### -# setup python path for workflow and ioda utilities -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +# setup python path for ioda utilities # shellcheck disable=SC2311 pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python$(detect_py_ver)/" -PYTHONPATH="${pyiodaPATH}:${wxflowPATH}:${PYTHONPATH}" +PYTHONPATH="${pyiodaPATH}:${PYTHONPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/prepobsaero.sh b/jobs/rocoto/prepobsaero.sh index 89da7547e8..5d65ff8a02 100755 --- a/jobs/rocoto/prepobsaero.sh +++ b/jobs/rocoto/prepobsaero.sh @@ -11,12 +11,6 @@ status=$? export job="prepobsaero" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH - ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_PREP_OBS_AERO" diff --git a/jobs/rocoto/prepsnowobs.sh b/jobs/rocoto/prepsnowobs.sh index cff082bab2..3f23bc16a5 100755 --- a/jobs/rocoto/prepsnowobs.sh +++ b/jobs/rocoto/prepsnowobs.sh @@ -12,12 +12,11 @@ export job="prepsnowobs" export jobid="${job}.$$" ############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +# setup python path for ioda utilities # shellcheck disable=SC2311 pyiodaPATH="${HOMEgfs}/sorc/gdas.cd/build/lib/python$(detect_py_ver)/" gdasappPATH="${HOMEgfs}/sorc/gdas.cd/sorc/iodaconv/src:${pyiodaPATH}" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}:${gdasappPATH}" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}:${gdasappPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/snowanl.sh b/jobs/rocoto/snowanl.sh index 627dd860f4..97df7a46c7 100755 --- a/jobs/rocoto/snowanl.sh +++ b/jobs/rocoto/snowanl.sh @@ -11,12 +11,6 @@ status=$? export job="snowanl" export jobid="${job}.$$" -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH - ############################################################### # Execute the JJOB "${HOMEgfs}/jobs/JGLOBAL_SNOW_ANALYSIS" diff --git a/jobs/rocoto/upp.sh b/jobs/rocoto/upp.sh index da0180472d..c3f128ab02 100755 --- a/jobs/rocoto/upp.sh +++ b/jobs/rocoto/upp.sh @@ -29,18 +29,18 @@ if [[ "${MACHINE_ID}" = "wcoss2" ]]; then module load python/3.8.6 module load crtm/2.4.0 # TODO: This is only needed when UPP_RUN=goes. Is there a better way to handle this? set_trace + + # Add wxflow to PYTHONPATH + wxflowPATH="${HOMEgfs}/ush/python" + PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush:${wxflowPATH}" + export PYTHONPATH + else . "${HOMEgfs}/ush/load_fv3gfs_modules.sh" status=$? if (( status != 0 )); then exit "${status}"; fi fi -############################################################### -# setup python path for workflow utilities and tasks -wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" -export PYTHONPATH - export job="upp" export jobid="${job}.$$" diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl index 7879b8b683..9a06088ecc 100644 --- a/parm/config/gfs/config.atmanl +++ b/parm/config/gfs/config.atmanl @@ -15,14 +15,17 @@ export INTERP_METHOD='barycentric' if [[ ${DOHYBVAR} = "YES" ]]; then # shellcheck disable=SC2153 export CASE_ANL=${CASE_ENS} - export BERROR_YAML="background_error_hybrid_${STATICB_TYPE}_${LOCALIZATION_TYPE}" + export BERROR_YAML="atmosphere_background_error_hybrid_${STATICB_TYPE}_${LOCALIZATION_TYPE}" else export CASE_ANL=${CASE} - export BERROR_YAML="background_error_static_${STATICB_TYPE}" + export BERROR_YAML="atmosphere_background_error_static_${STATICB_TYPE}" fi export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" +export VAR_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/atm_var_bkg.yaml.j2" +export BERROR_STAGING_YAML="${PARMgfs}/gdas/staging/atm_berror_${STATICB_TYPE}.yaml.j2" +export FV3ENS_STAGING_YAML="${PARMgfs}/gdas/staging/atm_var_fv3ens.yaml.j2" export layout_x_atmanl=@LAYOUT_X_ATMANL@ export layout_y_atmanl=@LAYOUT_Y_ATMANL@ diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index c03583659d..ddd3d88659 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -12,6 +12,7 @@ export INTERP_METHOD='barycentric' export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" export JEDI_FIX_YAML="${PARMgfs}/gdas/atm_jedi_fix.yaml.j2" +export LGETKF_BKG_STAGING_YAML="${PARMgfs}/gdas/staging/atm_lgetkf_bkg.yaml.j2" export layout_x_atmensanl=@LAYOUT_X_ATMENSANL@ export layout_y_atmensanl=@LAYOUT_Y_ATMENSANL@ diff --git a/parm/gdas/staging/atm_berror_gsibec.yaml.j2 b/parm/gdas/staging/atm_berror_gsibec.yaml.j2 new file mode 100644 index 0000000000..e6c5e41609 --- /dev/null +++ b/parm/gdas/staging/atm_berror_gsibec.yaml.j2 @@ -0,0 +1,8 @@ +{% set fname_list = ['gfs_gsi_global.nml', 'gsi-coeffs-gfs-global.nc4'] %} + +mkdir: +- '{{ DATA }}/berror' +copy: +{% for fname in fname_list %} +- ['{{ HOMEgfs }}/fix/gdas/gsibec/{{ CASE_ANL }}/{{ fname }}', '{{ DATA }}/berror'] +{% endfor %} diff --git a/parm/gdas/staging/atm_lgetkf_bkg.yaml.j2 b/parm/gdas/staging/atm_lgetkf_bkg.yaml.j2 new file mode 100644 index 0000000000..eda3dad5a7 --- /dev/null +++ b/parm/gdas/staging/atm_lgetkf_bkg.yaml.j2 @@ -0,0 +1,32 @@ +{% set ftype_list = ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data'] %} +{% set time_list = [current_cycle] %} + +mkdir: +{% for imem in range(1,NMEM_ENS+1) %} + {% set memchar = 'mem%03d' | format(imem) %} + {% set tmpl_dict = ({ '${ROTDIR}': ROTDIR, + '${RUN}': RUN, + '${YMD}': current_cycle | to_YMD, + '${HH}': current_cycle | strftime('%H'), + '${MEMDIR}': memchar }) %} +- '{{ DATA }}/bkg/{{ memchar }}' +- '{{ DATA }}/anl/{{ memchar }}' +- '{{ COM_ATMOS_ANALYSIS_TMPL | replace_tmpl(tmpl_dict) }}' +{% endfor %} +copy: +{% for time in time_list %} + {% for imem in range(1,NMEM_ENS+1) %} + {% set memchar = 'mem%03d' | format(imem) %} + {% set tmpl_dict = ({ '${ROTDIR}': ROTDIR, + '${RUN}': 'enkfgdas', + '${YMD}': previous_cycle | to_YMD, + '${HH}': previous_cycle | strftime('%H'), + '${MEMDIR}': memchar }) %} +- ['{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ time | to_fv3time }}.coupler.res', '{{ DATA }}/bkg/{{ memchar }}/'] + {% for ftype in ftype_list %} + {% for itile in range(1,7) %} +- ['{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ time | to_fv3time }}.{{ ftype }}.tile{{ itile }}.nc', '{{ DATA }}/bkg/{{ memchar }}/'] + {% endfor %} + {% endfor %} + {% endfor %} +{% endfor %} diff --git a/parm/gdas/staging/atm_var_bkg.yaml.j2 b/parm/gdas/staging/atm_var_bkg.yaml.j2 new file mode 100644 index 0000000000..37af833649 --- /dev/null +++ b/parm/gdas/staging/atm_var_bkg.yaml.j2 @@ -0,0 +1,14 @@ +{% set ftype_list = ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data'] %} +{% set time_list = [current_cycle] %} + +mkdir: +- '{{ DATA }}/bkg' +copy: +{% for time in time_list %} +- ['{{ COM_ATMOS_RESTART_PREV }}/{{ time | to_fv3time }}.coupler.res', '{{ DATA }}/bkg/'] + {% for ftype in ftype_list %} + {% for itile in range(1,ntiles+1) %} +- ['{{ COM_ATMOS_RESTART_PREV }}/{{ time | to_fv3time }}.{{ ftype }}.tile{{ itile }}.nc', '{{ DATA }}/bkg/'] + {% endfor %} + {% endfor %} +{% endfor %} diff --git a/parm/gdas/staging/atm_var_fv3ens.yaml.j2 b/parm/gdas/staging/atm_var_fv3ens.yaml.j2 new file mode 100644 index 0000000000..e499c86d57 --- /dev/null +++ b/parm/gdas/staging/atm_var_fv3ens.yaml.j2 @@ -0,0 +1,24 @@ +{% set ftype_list = ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data'] %} +{% set time_list = [current_cycle] %} + +mkdir: +{% for imem in range(1,NMEM_ENS+1) %} +- '{{ DATA }}/ens/{{ 'mem%03d' | format(imem) }}' +{% endfor %} +copy: +{% for time in time_list %} + {% for imem in range(1,NMEM_ENS+1) %} + {% set memchar = 'mem%03d' | format(imem) %} + {% set tmpl_dict = ({ '${ROTDIR}': ROTDIR, + '${RUN}': 'enkfgdas', + '${YMD}': previous_cycle | to_YMD, + '${HH}': previous_cycle | strftime('%H'), + '${MEMDIR}': memchar }) %} +- ['{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ time | to_fv3time }}.coupler.res', '{{ DATA }}/ens/{{ memchar }}/'] + {% for ftype in ftype_list %} + {% for itile in range(1,ntiles+1) %} +- ['{{ COM_ATMOS_RESTART_TMPL | replace_tmpl(tmpl_dict) }}/{{ time | to_fv3time }}.{{ ftype }}.tile{{ itile }}.nc', '{{ DATA }}/ens/{{ memchar }}/'] + {% endfor %} + {% endfor %} + {% endfor %} +{% endfor %} diff --git a/scripts/exglobal_cleanup.sh b/scripts/exglobal_cleanup.sh index 1150ca6d1d..dcf1baef31 100755 --- a/scripts/exglobal_cleanup.sh +++ b/scripts/exglobal_cleanup.sh @@ -14,13 +14,13 @@ rm -rf "${DATAROOT}/${RUN}efcs"*"${PDY:-}${cyc}" # Search and delete files/directories from DATAROOT/ older than ${purge_every_days} days # purge_every_days should be a positive integer -purge_every_days=3 +#purge_every_days=3 # Find and delete files older than ${purge_every_days} days -find "${DATAROOT}/"* -type f -mtime "+${purge_every_days}" -exec rm -f {} \; +#find "${DATAROOT}/"* -type f -mtime "+${purge_every_days}" -exec rm -f {} \; # Find and delete directories older than ${purge_every_days} days -find "${DATAROOT}/"* -type d -mtime "+${purge_every_days}" -exec rm -rf {} \; +#find "${DATAROOT}/"* -type d -mtime "+${purge_every_days}" -exec rm -rf {} \; echo "Cleanup ${DATAROOT} completed!" ############################################################### diff --git a/scripts/exglobal_prep_snow_obs.py b/scripts/exglobal_prep_snow_obs.py index 5107d9c935..d4998a7d84 100755 --- a/scripts/exglobal_prep_snow_obs.py +++ b/scripts/exglobal_prep_snow_obs.py @@ -21,5 +21,5 @@ # Instantiate the snow prepare task SnowAnl = SnowAnalysis(config) SnowAnl.prepare_GTS() - if f"{ SnowAnl.runtime_config.cyc }" == '18': + if f"{ SnowAnl.task_config.cyc }" == '18': SnowAnl.prepare_IMS() diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 368c9c5db9..e3644a98c3 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 368c9c5db9b5ea62e72937b6d1b0f753adb9be40 +Subproject commit e3644a98c362d7321f9e3081a4e55947885ed2bf diff --git a/sorc/wxflow b/sorc/wxflow index 8406beeea4..5dad7dd61c 160000 --- a/sorc/wxflow +++ b/sorc/wxflow @@ -1 +1 @@ -Subproject commit 8406beeea410118cdfbd8300895b2b2878eadba6 +Subproject commit 5dad7dd61cebd9b3f2b163b3b06bb75eae1860a9 diff --git a/ush/detect_machine.sh b/ush/detect_machine.sh index 683ee0db7f..cfd0fa97e2 100755 --- a/ush/detect_machine.sh +++ b/ush/detect_machine.sh @@ -75,8 +75,8 @@ elif [[ -d /scratch1 ]]; then MACHINE_ID=hera elif [[ -d /work ]]; then # We are on MSU Orion or Hercules - if [[ -d /apps/other ]]; then - # We are on Hercules + mount=$(findmnt -n -o SOURCE /home) + if [[ ${mount} =~ "hercules" ]]; then MACHINE_ID=hercules else MACHINE_ID=orion diff --git a/ush/load_fv3gfs_modules.sh b/ush/load_fv3gfs_modules.sh index ae0e381db4..5f6afb7e35 100755 --- a/ush/load_fv3gfs_modules.sh +++ b/ush/load_fv3gfs_modules.sh @@ -30,6 +30,11 @@ esac module list +# Add wxflow to PYTHONPATH +wxflowPATH="${HOMEgfs}/ush/python" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush:${wxflowPATH}" +export PYTHONPATH + # Restore stack soft limit: ulimit -S -s "${ulimit_s}" unset ulimit_s diff --git a/ush/load_ufsda_modules.sh b/ush/load_ufsda_modules.sh index d7aa08e1ae..8117d3f359 100755 --- a/ush/load_ufsda_modules.sh +++ b/ush/load_ufsda_modules.sh @@ -51,6 +51,11 @@ esac module list pip list +# Add wxflow to PYTHONPATH +wxflowPATH="${HOMEgfs}/ush/python" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${HOMEgfs}/ush:${wxflowPATH}" +export PYTHONPATH + # Restore stack soft limit: ulimit -S -s "${ulimit_s}" unset ulimit_s diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index 16d2735090..69a992d7d4 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -29,33 +29,33 @@ class AerosolAnalysis(Analysis): def __init__(self, config): super().__init__(config) - _res = int(self.config['CASE'][1:]) - _res_anl = int(self.config['CASE_ANL'][1:]) - _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) - _jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") + _res = int(self.task_config['CASE'][1:]) + _res_anl = int(self.task_config['CASE_ANL'][1:]) + _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2) + _jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.aerovar.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( { 'npx_ges': _res + 1, 'npy_ges': _res + 1, - 'npz_ges': self.config.LEVS - 1, - 'npz': self.config.LEVS - 1, + 'npz_ges': self.task_config.LEVS - 1, + 'npz': self.task_config.LEVS - 1, 'npx_anl': _res_anl + 1, 'npy_anl': _res_anl + 1, - 'npz_anl': self.config['LEVS'] - 1, + 'npz_anl': self.task_config['LEVS'] - 1, 'AERO_WINDOW_BEGIN': _window_begin, - 'AERO_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H", - 'aero_bkg_fhr': map(int, str(self.config['aero_bkg_times']).split(',')), - 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN - 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN - 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", + 'AERO_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", + 'aero_bkg_fhr': map(int, str(self.task_config['aero_bkg_times']).split(',')), + 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", 'jedi_yaml': _jedi_yaml, } ) - # task_config is everything that this task should need - self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + # Extend task_config with local_dict + self.task_config = AttrDict(**self.task_config, **local_dict) @logit(logger) def initialize(self: Analysis) -> None: @@ -157,8 +157,8 @@ def finalize(self: Analysis) -> None: archive.add(diaggzip, arcname=os.path.basename(diaggzip)) # copy full YAML from executable to ROTDIR - src = os.path.join(self.task_config['DATA'], f"{self.task_config['CDUMP']}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") - dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['CDUMP']}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") + src = os.path.join(self.task_config['DATA'], f"{self.task_config['RUN']}.t{self.task_config['cyc']:02d}z.aerovar.yaml") + dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['RUN']}.t{self.task_config['cyc']:02d}z.aerovar.yaml") yaml_copy = { 'mkdir': [self.task_config.COM_CHEM_ANALYSIS], 'copy': [[src, dest]] diff --git a/ush/python/pygfs/task/aero_emissions.py b/ush/python/pygfs/task/aero_emissions.py index 17d2f528e4..5f2d4c6840 100644 --- a/ush/python/pygfs/task/aero_emissions.py +++ b/ush/python/pygfs/task/aero_emissions.py @@ -42,7 +42,9 @@ def __init__(self, config: Dict[str, Any]) -> None: localdict = AttrDict( {'variable_used_repeatedly': local_variable} ) - self.task_config = AttrDict(**self.config, **self.runtime_config, **localdict) + + # Extend task_config with localdict + self.task_config = AttrDict(**self.task_config, **localdict) @staticmethod @logit(logger) diff --git a/ush/python/pygfs/task/aero_prepobs.py b/ush/python/pygfs/task/aero_prepobs.py index f2344241a9..d8396fe3ca 100644 --- a/ush/python/pygfs/task/aero_prepobs.py +++ b/ush/python/pygfs/task/aero_prepobs.py @@ -24,23 +24,23 @@ class AerosolObsPrep(Task): def __init__(self, config: Dict[str, Any]) -> None: super().__init__(config) - _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) - _window_end = add_to_datetime(self.runtime_config.current_cycle, +to_timedelta(f"{self.config['assim_freq']}H") / 2) + _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2) + _window_end = add_to_datetime(self.task_config.current_cycle, +to_timedelta(f"{self.task_config['assim_freq']}H") / 2) local_dict = AttrDict( { 'window_begin': _window_begin, 'window_end': _window_end, - 'sensors': str(self.config['SENSORS']).split(','), - 'data_dir': self.config['VIIRS_DATA_DIR'], + 'sensors': str(self.task_config['SENSORS']).split(','), + 'data_dir': self.task_config['VIIRS_DATA_DIR'], 'input_files': '', - 'OPREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", - 'APREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z." + 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z." } ) # task_config is everything that this task should need - self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + self.task_config = AttrDict(**self.task_config, **local_dict) @logit(logger) def initialize(self) -> None: @@ -64,8 +64,8 @@ def initialize(self) -> None: self.task_config.prepaero_config = self.get_obsproc_config(sensor) # generate converter YAML file - template = f"{self.runtime_config.CDUMP}.t{self.runtime_config['cyc']:02d}z.prepaero_viirs_{sensor}.yaml" - _prepaero_yaml = os.path.join(self.runtime_config.DATA, template) + template = f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.prepaero_viirs_{sensor}.yaml" + _prepaero_yaml = os.path.join(self.task_config.DATA, template) self.task_config.prepaero_yaml.append(_prepaero_yaml) logger.debug(f"Generate PrepAeroObs YAML file: {_prepaero_yaml}") save_as_yaml(self.task_config.prepaero_config, _prepaero_yaml) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index b668ac3980..e407cf1765 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -27,7 +27,7 @@ class Analysis(Task): def __init__(self, config: Dict[str, Any]) -> None: super().__init__(config) # Store location of GDASApp jinja2 templates - self.gdasapp_j2tmpl_dir = os.path.join(self.config.PARMgfs, 'gdas') + self.gdasapp_j2tmpl_dir = os.path.join(self.task_config.PARMgfs, 'gdas') def initialize(self) -> None: super().initialize() @@ -54,7 +54,7 @@ def get_jedi_config(self, algorithm: Optional[str] = None) -> Dict[str, Any]: ---------- algorithm (optional) : str Name of the algorithm to use in the JEDI configuration. Will override the algorithm - set in the self.config.JCB_<>_YAML file + set in the self.task_config.JCB_<>_YAML file Returns ---------- @@ -120,7 +120,7 @@ def get_obs_dict(self) -> Dict[str, Any]: basename = os.path.basename(obfile) copylist.append([os.path.join(self.task_config['COM_OBS'], basename), obfile]) obs_dict = { - 'mkdir': [os.path.join(self.runtime_config['DATA'], 'obs')], + 'mkdir': [os.path.join(self.task_config['DATA'], 'obs')], 'copy': copylist } return obs_dict @@ -161,7 +161,7 @@ def get_bias_dict(self) -> Dict[str, Any]: # TODO: Why is this specific to ATMOS? bias_dict = { - 'mkdir': [os.path.join(self.runtime_config.DATA, 'bc')], + 'mkdir': [os.path.join(self.task_config.DATA, 'bc')], 'copy': copylist } return bias_dict @@ -180,7 +180,7 @@ def add_fv3_increments(self, inc_file_tmpl: str, bkg_file_tmpl: str, incvars: Li List of increment variables to add to the background """ - for itile in range(1, self.config.ntiles + 1): + for itile in range(1, self.task_config.ntiles + 1): inc_path = inc_file_tmpl.format(tilenum=itile) bkg_path = bkg_file_tmpl.format(tilenum=itile) with Dataset(inc_path, mode='r') as incfile, Dataset(bkg_path, mode='a') as rstfile: @@ -194,44 +194,6 @@ def add_fv3_increments(self, inc_file_tmpl: str, bkg_file_tmpl: str, incvars: Li except (AttributeError, RuntimeError): pass # checksum is missing, move on - @logit(logger) - def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: - """Compile a dictionary of model background files to copy - - This method is a placeholder for now... will be possibly made generic at a later date - - Parameters - ---------- - task_config: Dict - a dictionary containing all of the configuration needed for the task - - Returns - ---------- - bkg_dict: Dict - a dictionary containing the list of model background files to copy for FileHandler - """ - bkg_dict = {'foo': 'bar'} - return bkg_dict - - @logit(logger) - def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: - """Compile a dictionary of background error files to copy - - This method is a placeholder for now... will be possibly made generic at a later date - - Parameters - ---------- - config: Dict - a dictionary containing all of the configuration needed - - Returns - ---------- - berror_dict: Dict - a dictionary containing the list of background error files to copy for FileHandler - """ - berror_dict = {'foo': 'bar'} - return berror_dict - @logit(logger) def link_jediexe(self) -> None: """Compile a dictionary of background error files to copy @@ -258,68 +220,6 @@ def link_jediexe(self) -> None: return exe_dest - @staticmethod - @logit(logger) - def get_fv3ens_dict(config: Dict[str, Any]) -> Dict[str, Any]: - """Compile a dictionary of ensemble member restarts to copy - - This method constructs a dictionary of ensemble FV3 restart files (coupler, core, tracer) - that are needed for global atmens DA and returns said dictionary for use by the FileHandler class. - - Parameters - ---------- - config: Dict - a dictionary containing all of the configuration needed - - Returns - ---------- - ens_dict: Dict - a dictionary containing the list of ensemble member restart files to copy for FileHandler - """ - # NOTE for now this is FV3 restart files and just assumed to be fh006 - - # define template - template_res = config.COM_ATMOS_RESTART_TMPL - prev_cycle = config.previous_cycle - tmpl_res_dict = { - 'ROTDIR': config.ROTDIR, - 'RUN': config.RUN, - 'YMD': to_YMD(prev_cycle), - 'HH': prev_cycle.strftime('%H'), - 'MEMDIR': None - } - - # construct ensemble member file list - dirlist = [] - enslist = [] - for imem in range(1, config.NMEM_ENS + 1): - memchar = f"mem{imem:03d}" - - # create directory path for ensemble member restart - dirlist.append(os.path.join(config.DATA, config.dirname, f'mem{imem:03d}')) - - # get FV3 restart files, this will be a lot simpler when using history files - tmpl_res_dict['MEMDIR'] = memchar - rst_dir = Template.substitute_structure(template_res, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_res_dict.get) - run_dir = os.path.join(config.DATA, config.dirname, memchar) - - # atmens DA needs coupler - basename = f'{to_fv3time(config.current_cycle)}.coupler.res' - enslist.append([os.path.join(rst_dir, basename), os.path.join(config.DATA, config.dirname, memchar, basename)]) - - # atmens DA needs core, srf_wnd, tracer, phy_data, sfc_data - for ftype in ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data']: - template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' - for itile in range(1, config.ntiles + 1): - basename = template.format(tilenum=itile) - enslist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - ens_dict = { - 'mkdir': dirlist, - 'copy': enslist, - } - return ens_dict - @staticmethod @logit(logger) def tgz_diags(statfile: str, diagdir: str) -> None: diff --git a/ush/python/pygfs/task/archive.py b/ush/python/pygfs/task/archive.py index d0722552e1..953a856192 100644 --- a/ush/python/pygfs/task/archive.py +++ b/ush/python/pygfs/task/archive.py @@ -35,12 +35,13 @@ def __init__(self, config: Dict[str, Any]) -> None: """ super().__init__(config) - rotdir = self.config.ROTDIR + os.sep + rotdir = self.task_config.ROTDIR + os.sep # Find all absolute paths in the environment and get their relative paths from ${ROTDIR} path_dict = self._gen_relative_paths(rotdir) - self.task_config = AttrDict(**self.config, **self.runtime_config, **path_dict) + # Extend task_config with path_dict + self.task_config = AttrDict(**self.task_config, **path_dict) @logit(logger) def configure(self, arch_dict: Dict[str, Any]) -> (Dict[str, Any], List[Dict[str, Any]]): @@ -297,7 +298,7 @@ def _create_tarball(target: str, fileset: List) -> None: @logit(logger) def _gen_relative_paths(self, root_path: str) -> Dict: - """Generate a dict of paths in self.config relative to root_path + """Generate a dict of paths in self.task_config relative to root_path Parameters ---------- @@ -314,7 +315,7 @@ def _gen_relative_paths(self, root_path: str) -> Dict: """ rel_path_dict = {} - for key, value in self.config.items(): + for key, value in self.task_config.items(): if isinstance(value, str): if root_path in value: rel_path = value.replace(root_path, "") diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 95545c57a4..4e9d37335c 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -28,35 +28,35 @@ class AtmAnalysis(Analysis): def __init__(self, config): super().__init__(config) - _res = int(self.config.CASE[1:]) - _res_anl = int(self.config.CASE_ANL[1:]) - _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) - _jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmvar.yaml") + _res = int(self.task_config.CASE[1:]) + _res_anl = int(self.task_config.CASE_ANL[1:]) + _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2) + _jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( { 'npx_ges': _res + 1, 'npy_ges': _res + 1, - 'npz_ges': self.config.LEVS - 1, - 'npz': self.config.LEVS - 1, + 'npz_ges': self.task_config.LEVS - 1, + 'npz': self.task_config.LEVS - 1, 'npx_anl': _res_anl + 1, 'npy_anl': _res_anl + 1, - 'npz_anl': self.config.LEVS - 1, + 'npz_anl': self.task_config.LEVS - 1, 'ATM_WINDOW_BEGIN': _window_begin, - 'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H", - 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN - 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN - 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", + 'ATM_WINDOW_LENGTH': f"PT{self.task_config.assim_freq}H", + 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", 'jedi_yaml': _jedi_yaml, - 'atm_obsdatain_path': f"{self.runtime_config.DATA}/obs/", - 'atm_obsdataout_path': f"{self.runtime_config.DATA}/diags/", + 'atm_obsdatain_path': f"{self.task_config.DATA}/obs/", + 'atm_obsdataout_path': f"{self.task_config.DATA}/diags/", 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications } ) - # task_config is everything that this task should need - self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + # Extend task_config with local_dict + self.task_config = AttrDict(**self.task_config, **local_dict) @logit(logger) def initialize(self: Analysis) -> None: @@ -85,22 +85,22 @@ def initialize(self: Analysis) -> None: # stage static background error files, otherwise it will assume ID matrix logger.info(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}") - FileHandler(self.get_berror_dict(self.task_config)).sync() + if self.task_config.STATICB_TYPE != 'identity': + berror_staging_dict = parse_j2yaml(self.task_config.BERROR_STAGING_YAML, self.task_config) + else: + berror_staging_dict = {} + FileHandler(berror_staging_dict).sync() # stage ensemble files for use in hybrid background error if self.task_config.DOHYBVAR: logger.debug(f"Stage ensemble files for DOHYBVAR {self.task_config.DOHYBVAR}") - localconf = AttrDict() - keys = ['COM_ATMOS_RESTART_TMPL', 'previous_cycle', 'ROTDIR', 'RUN', - 'NMEM_ENS', 'DATA', 'current_cycle', 'ntiles'] - for key in keys: - localconf[key] = self.task_config[key] - localconf.RUN = 'enkfgdas' - localconf.dirname = 'ens' - FileHandler(self.get_fv3ens_dict(localconf)).sync() + fv3ens_staging_dict = parse_j2yaml(self.task_config.FV3ENS_STAGING_YAML, self.task_config) + FileHandler(fv3ens_staging_dict).sync() # stage backgrounds - FileHandler(self.get_bkg_dict(AttrDict(self.task_config))).sync() + logger.info(f"Staging background files from {self.task_config.VAR_BKG_STAGING_YAML}") + bkg_staging_dict = parse_j2yaml(self.task_config.VAR_BKG_STAGING_YAML, self.task_config) + FileHandler(bkg_staging_dict).sync() # generate variational YAML file logger.debug(f"Generate variational YAML file: {self.task_config.jedi_yaml}") @@ -140,7 +140,7 @@ def variational(self: Analysis) -> None: @logit(logger) def init_fv3_increment(self: Analysis) -> None: # Setup JEDI YAML file - self.task_config.jedi_yaml = os.path.join(self.runtime_config.DATA, + self.task_config.jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.JCB_ALGO}.yaml") save_as_yaml(self.get_jedi_config(self.task_config.JCB_ALGO), self.task_config.jedi_yaml) @@ -198,8 +198,8 @@ def finalize(self: Analysis) -> None: # copy full YAML from executable to ROTDIR logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}") - src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") - dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") + src = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml") + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmvar.yaml") logger.debug(f"Copying {src} to {dest}") yaml_copy = { 'mkdir': [self.task_config.COM_ATMOS_ANALYSIS], @@ -244,7 +244,7 @@ def finalize(self: Analysis) -> None: cdate = to_fv3time(self.task_config.current_cycle) cdate_inc = cdate.replace('.', '_') src = os.path.join(self.task_config.DATA, 'anl', f"atminc.{cdate_inc}z.nc4") - dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc') + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f'{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc') logger.debug(f"Copying {src} to {dest}") inc_copy = { 'copy': [[src, dest]] @@ -253,189 +253,3 @@ def finalize(self: Analysis) -> None: def clean(self): super().clean() - - @logit(logger) - def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: - """Compile a dictionary of model background files to copy - - This method constructs a dictionary of FV3 restart files (coupler, core, tracer) - that are needed for global atm DA and returns said dictionary for use by the FileHandler class. - - Parameters - ---------- - task_config: Dict - a dictionary containing all of the configuration needed for the task - - Returns - ---------- - bkg_dict: Dict - a dictionary containing the list of model background files to copy for FileHandler - """ - # NOTE for now this is FV3 restart files and just assumed to be fh006 - - # get FV3 restart files, this will be a lot simpler when using history files - rst_dir = os.path.join(task_config.COM_ATMOS_RESTART_PREV) # for now, option later? - run_dir = os.path.join(task_config.DATA, 'bkg') - - # Start accumulating list of background files to copy - bkglist = [] - - # atm DA needs coupler - basename = f'{to_fv3time(task_config.current_cycle)}.coupler.res' - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - # atm DA needs core, srf_wnd, tracer, phy_data, sfc_data - for ftype in ['core', 'srf_wnd', 'tracer']: - template = f'{to_fv3time(self.task_config.current_cycle)}.fv_{ftype}.res.tile{{tilenum}}.nc' - for itile in range(1, task_config.ntiles + 1): - basename = template.format(tilenum=itile) - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - for ftype in ['phy_data', 'sfc_data']: - template = f'{to_fv3time(self.task_config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' - for itile in range(1, task_config.ntiles + 1): - basename = template.format(tilenum=itile) - bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) - - bkg_dict = { - 'mkdir': [run_dir], - 'copy': bkglist, - } - return bkg_dict - - @logit(logger) - def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: - """Compile a dictionary of background error files to copy - - This method will construct a dictionary of either bump of gsibec background - error files for global atm DA and return said dictionary for use by the - FileHandler class. - - Parameters - ---------- - config: Dict - a dictionary containing all of the configuration needed - - Returns - ---------- - berror_dict: Dict - a dictionary containing the list of atm background error files to copy for FileHandler - """ - SUPPORTED_BERROR_STATIC_MAP = {'identity': self._get_berror_dict_identity, - 'bump': self._get_berror_dict_bump, - 'gsibec': self._get_berror_dict_gsibec} - - try: - berror_dict = SUPPORTED_BERROR_STATIC_MAP[config.STATICB_TYPE](config) - except KeyError: - raise KeyError(f"{config.STATICB_TYPE} is not a supported background error type.\n" + - f"Currently supported background error types are:\n" + - f'{" | ".join(SUPPORTED_BERROR_STATIC_MAP.keys())}') - - return berror_dict - - @staticmethod - @logit(logger) - def _get_berror_dict_identity(config: Dict[str, Any]) -> Dict[str, List[str]]: - """Identity BE does not need any files for staging. - - This is a private method and should not be accessed directly. - - Parameters - ---------- - config: Dict - a dictionary containing all of the configuration needed - Returns - ---------- - berror_dict: Dict - Empty dictionary [identity BE needs not files to stage] - """ - logger.info(f"Identity background error does not use staged files. Return empty dictionary") - return {} - - @staticmethod - @logit(logger) - def _get_berror_dict_bump(config: Dict[str, Any]) -> Dict[str, List[str]]: - """Compile a dictionary of atm bump background error files to copy - - This method will construct a dictionary of atm bump background error - files for global atm DA and return said dictionary to the parent - - This is a private method and should not be accessed directly. - - Parameters - ---------- - config: Dict - a dictionary containing all of the configuration needed - - Returns - ---------- - berror_dict: Dict - a dictionary of atm bump background error files to copy for FileHandler - """ - # BUMP atm static-B needs nicas, cor_rh, cor_rv and stddev files. - b_dir = config.BERROR_DATA_DIR - b_datestr = to_fv3time(config.BERROR_DATE) - berror_list = [] - for ftype in ['cor_rh', 'cor_rv', 'stddev']: - coupler = f'{b_datestr}.{ftype}.coupler.res' - berror_list.append([ - os.path.join(b_dir, coupler), os.path.join(config.DATA, 'berror', coupler) - ]) - - template = '{b_datestr}.{ftype}.fv_tracer.res.tile{{tilenum}}.nc' - for itile in range(1, config.ntiles + 1): - tracer = template.format(tilenum=itile) - berror_list.append([ - os.path.join(b_dir, tracer), os.path.join(config.DATA, 'berror', tracer) - ]) - - nproc = config.ntiles * config.layout_x * config.layout_y - for nn in range(1, nproc + 1): - berror_list.append([ - os.path.join(b_dir, f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc'), - os.path.join(config.DATA, 'berror', f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc') - ]) - - # create dictionary of background error files to stage - berror_dict = { - 'mkdir': [os.path.join(config.DATA, 'berror')], - 'copy': berror_list, - } - return berror_dict - - @staticmethod - @logit(logger) - def _get_berror_dict_gsibec(config: Dict[str, Any]) -> Dict[str, List[str]]: - """Compile a dictionary of atm gsibec background error files to copy - - This method will construct a dictionary of atm gsibec background error - files for global atm DA and return said dictionary to the parent - - This is a private method and should not be accessed directly. - - Parameters - ---------- - config: Dict - a dictionary containing all of the configuration needed - - Returns - ---------- - berror_dict: Dict - a dictionary of atm gsibec background error files to copy for FileHandler - """ - # GSI atm static-B needs namelist and coefficient files. - b_dir = os.path.join(config.HOMEgfs, 'fix', 'gdas', 'gsibec', config.CASE_ANL) - berror_list = [] - for ftype in ['gfs_gsi_global.nml', 'gsi-coeffs-gfs-global.nc4']: - berror_list.append([ - os.path.join(b_dir, ftype), - os.path.join(config.DATA, 'berror', ftype) - ]) - - # create dictionary of background error files to stage - berror_dict = { - 'mkdir': [os.path.join(config.DATA, 'berror')], - 'copy': berror_list, - } - return berror_dict diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 37ac613736..bd5112050e 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -29,22 +29,22 @@ class AtmEnsAnalysis(Analysis): def __init__(self, config): super().__init__(config) - _res = int(self.config.CASE_ENS[1:]) - _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) - _jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmens.yaml") + _res = int(self.task_config.CASE_ENS[1:]) + _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2) + _jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( { 'npx_ges': _res + 1, 'npy_ges': _res + 1, - 'npz_ges': self.config.LEVS - 1, - 'npz': self.config.LEVS - 1, + 'npz_ges': self.task_config.LEVS - 1, + 'npz': self.task_config.LEVS - 1, 'ATM_WINDOW_BEGIN': _window_begin, - 'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H", - 'OPREFIX': f"{self.config.EUPD_CYC}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN - 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN - 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", + 'ATM_WINDOW_LENGTH': f"PT{self.task_config.assim_freq}H", + 'OPREFIX': f"{self.task_config.EUPD_CYC}.t{self.task_config.cyc:02d}z.", + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'GPREFIX': f"gdas.t{self.task_config.previous_cycle.hour:02d}z.", 'jedi_yaml': _jedi_yaml, 'atm_obsdatain_path': f"./obs/", 'atm_obsdataout_path': f"./diags/", @@ -52,8 +52,8 @@ def __init__(self, config): } ) - # task_config is everything that this task should need - self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + # Extend task_config with local_dict + self.task_config = AttrDict(**self.task_config, **local_dict) @logit(logger) def initialize(self: Analysis) -> None: @@ -77,27 +77,6 @@ def initialize(self: Analysis) -> None: """ super().initialize() - # Make member directories in DATA for background and in DATA and ROTDIR for analysis files - # create template dictionary for output member analysis directories - template_inc = self.task_config.COM_ATMOS_ANALYSIS_TMPL - tmpl_inc_dict = { - 'ROTDIR': self.task_config.ROTDIR, - 'RUN': self.task_config.RUN, - 'YMD': to_YMD(self.task_config.current_cycle), - 'HH': self.task_config.current_cycle.strftime('%H') - } - dirlist = [] - for imem in range(1, self.task_config.NMEM_ENS + 1): - dirlist.append(os.path.join(self.task_config.DATA, 'bkg', f'mem{imem:03d}')) - dirlist.append(os.path.join(self.task_config.DATA, 'anl', f'mem{imem:03d}')) - - # create output directory path for member analysis - tmpl_inc_dict['MEMDIR'] = f"mem{imem:03d}" - incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) - dirlist.append(incdir) - - FileHandler({'mkdir': dirlist}).sync() - # stage CRTM fix files logger.info(f"Staging CRTM fix files from {self.task_config.CRTM_FIX_YAML}") crtm_fix_list = parse_j2yaml(self.task_config.CRTM_FIX_YAML, self.task_config) @@ -110,13 +89,8 @@ def initialize(self: Analysis) -> None: # stage backgrounds logger.info(f"Stage ensemble member background files") - localconf = AttrDict() - keys = ['COM_ATMOS_RESTART_TMPL', 'previous_cycle', 'ROTDIR', 'RUN', - 'NMEM_ENS', 'DATA', 'current_cycle', 'ntiles'] - for key in keys: - localconf[key] = self.task_config[key] - localconf.dirname = 'bkg' - FileHandler(self.get_fv3ens_dict(localconf)).sync() + bkg_staging_dict = parse_j2yaml(self.task_config.LGETKF_BKG_STAGING_YAML, self.task_config) + FileHandler(bkg_staging_dict).sync() # generate ensemble da YAML file logger.debug(f"Generate ensemble da YAML file: {self.task_config.jedi_yaml}") @@ -171,7 +145,7 @@ def letkf(self: Analysis) -> None: @logit(logger) def init_fv3_increment(self: Analysis) -> None: # Setup JEDI YAML file - self.task_config.jedi_yaml = os.path.join(self.runtime_config.DATA, + self.task_config.jedi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.JCB_ALGO}.yaml") save_as_yaml(self.get_jedi_config(self.task_config.JCB_ALGO), self.task_config.jedi_yaml) @@ -235,8 +209,8 @@ def finalize(self: Analysis) -> None: # copy full YAML from executable to ROTDIR logger.info(f"Copying {self.task_config.jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}") - src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") - dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") + src = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml") + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atmens.yaml") logger.debug(f"Copying {src} to {dest}") yaml_copy = { 'mkdir': [self.task_config.COM_ATMOS_ANALYSIS_ENS], @@ -265,7 +239,7 @@ def finalize(self: Analysis) -> None: tmpl_inc_dict['MEMDIR'] = memchar incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) src = os.path.join(self.task_config.DATA, 'anl', memchar, f"atminc.{cdate_inc}z.nc4") - dest = os.path.join(incdir, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc") + dest = os.path.join(incdir, f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.atminc.nc") # copy increment logger.debug(f"Copying {src} to {dest}") diff --git a/ush/python/pygfs/task/oceanice_products.py b/ush/python/pygfs/task/oceanice_products.py index 690aac9542..98b57ae801 100644 --- a/ush/python/pygfs/task/oceanice_products.py +++ b/ush/python/pygfs/task/oceanice_products.py @@ -49,39 +49,40 @@ def __init__(self, config: Dict[str, Any]) -> None: """ super().__init__(config) - if self.config.COMPONENT not in self.VALID_COMPONENTS: - raise NotImplementedError(f'{self.config.COMPONENT} is not a valid model component.\n' + + if self.task_config.COMPONENT not in self.VALID_COMPONENTS: + raise NotImplementedError(f'{self.task_config.COMPONENT} is not a valid model component.\n' + 'Valid model components are:\n' + f'{", ".join(self.VALID_COMPONENTS)}') - model_grid = f"mx{self.config[self.COMPONENT_RES_MAP[self.config.COMPONENT]]:03d}" + model_grid = f"mx{self.task_config[self.COMPONENT_RES_MAP[self.task_config.COMPONENT]]:03d}" - valid_datetime = add_to_datetime(self.runtime_config.current_cycle, to_timedelta(f"{self.config.FORECAST_HOUR}H")) + valid_datetime = add_to_datetime(self.task_config.current_cycle, to_timedelta(f"{self.task_config.FORECAST_HOUR}H")) - if self.config.COMPONENT == 'ice': - offset = int(self.runtime_config.current_cycle.strftime("%H")) % self.config.FHOUT_ICE_GFS + if self.task_config.COMPONENT == 'ice': + offset = int(self.task_config.current_cycle.strftime("%H")) % self.task_config.FHOUT_ICE_GFS # For CICE cases where offset is not 0, forecast_hour needs to be adjusted based on the offset. # TODO: Consider FHMIN when calculating offset. if offset != 0: - forecast_hour = self.config.FORECAST_HOUR - int(self.runtime_config.current_cycle.strftime("%H")) + forecast_hour = self.task_config.FORECAST_HOUR - int(self.task_config.current_cycle.strftime("%H")) # For the first forecast hour, the interval may be different from the intervals of subsequent forecast hours - if forecast_hour <= self.config.FHOUT_ICE_GFS: - interval = self.config.FHOUT_ICE_GFS - int(self.runtime_config.current_cycle.strftime("%H")) + if forecast_hour <= self.task_config.FHOUT_ICE_GFS: + interval = self.task_config.FHOUT_ICE_GFS - int(self.task_config.current_cycle.strftime("%H")) else: - interval = self.config.FHOUT_ICE_GFS + interval = self.task_config.FHOUT_ICE_GFS else: - forecast_hour = self.config.FORECAST_HOUR - interval = self.config.FHOUT_ICE_GFS - if self.config.COMPONENT == 'ocean': - forecast_hour = self.config.FORECAST_HOUR - interval = self.config.FHOUT_OCN_GFS + forecast_hour = self.task_config.FORECAST_HOUR + interval = self.task_config.FHOUT_ICE_GFS + if self.task_config.COMPONENT == 'ocean': + forecast_hour = self.task_config.FORECAST_HOUR + interval = self.task_config.FHOUT_OCN_GFS # TODO: This is a bit of a hack, but it works for now # FIXME: find a better way to provide the averaging period avg_period = f"{forecast_hour-interval:03d}-{forecast_hour:03d}" + # Extend task_config with localdict localdict = AttrDict( - {'component': self.config.COMPONENT, + {'component': self.task_config.COMPONENT, 'forecast_hour': forecast_hour, 'valid_datetime': valid_datetime, 'avg_period': avg_period, @@ -89,11 +90,11 @@ def __init__(self, config: Dict[str, Any]) -> None: 'interval': interval, 'product_grids': self.VALID_PRODUCT_GRIDS[model_grid]} ) - self.task_config = AttrDict(**self.config, **self.runtime_config, **localdict) + self.task_config = AttrDict(**self.task_config, **localdict) # Read the oceanice_products.yaml file for common configuration - logger.info(f"Read the ocean ice products configuration yaml file {self.config.OCEANICEPRODUCTS_CONFIG}") - self.task_config.oceanice_yaml = parse_j2yaml(self.config.OCEANICEPRODUCTS_CONFIG, self.task_config) + logger.info(f"Read the ocean ice products configuration yaml file {self.task_config.OCEANICEPRODUCTS_CONFIG}") + self.task_config.oceanice_yaml = parse_j2yaml(self.task_config.OCEANICEPRODUCTS_CONFIG, self.task_config) logger.debug(f"oceanice_yaml:\n{pformat(self.task_config.oceanice_yaml)}") @staticmethod diff --git a/ush/python/pygfs/task/snow_analysis.py b/ush/python/pygfs/task/snow_analysis.py index 9a5c7fcab0..9656b00a8e 100644 --- a/ush/python/pygfs/task/snow_analysis.py +++ b/ush/python/pygfs/task/snow_analysis.py @@ -32,27 +32,27 @@ class SnowAnalysis(Analysis): def __init__(self, config): super().__init__(config) - _res = int(self.config['CASE'][1:]) - _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) - _letkfoi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.RUN}.t{self.runtime_config['cyc']:02d}z.letkfoi.yaml") + _res = int(self.task_config['CASE'][1:]) + _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config['assim_freq']}H") / 2) + _letkfoi_yaml = os.path.join(self.task_config.DATA, f"{self.task_config.RUN}.t{self.task_config['cyc']:02d}z.letkfoi.yaml") # Create a local dictionary that is repeatedly used across this class local_dict = AttrDict( { 'npx_ges': _res + 1, 'npy_ges': _res + 1, - 'npz_ges': self.config.LEVS - 1, - 'npz': self.config.LEVS - 1, + 'npz_ges': self.task_config.LEVS - 1, + 'npz': self.task_config.LEVS - 1, 'SNOW_WINDOW_BEGIN': _window_begin, - 'SNOW_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H", - 'OPREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", - 'APREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", + 'SNOW_WINDOW_LENGTH': f"PT{self.task_config['assim_freq']}H", + 'OPREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", 'jedi_yaml': _letkfoi_yaml } ) - # task_config is everything that this task should need - self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + # Extend task_config with local_dict + self.task_config = AttrDict(**self.task_config, **local_dict) @logit(logger) def prepare_GTS(self) -> None: @@ -114,7 +114,7 @@ def _gtsbufr2iodax(exe, yaml_file): # 1. generate bufr2ioda YAML files # 2. execute bufr2ioda.x for name in prep_gts_config.bufr2ioda.keys(): - gts_yaml = os.path.join(self.runtime_config.DATA, f"bufr_{name}_snow.yaml") + gts_yaml = os.path.join(self.task_config.DATA, f"bufr_{name}_snow.yaml") logger.info(f"Generate BUFR2IODA YAML file: {gts_yaml}") temp_yaml = parse_j2yaml(prep_gts_config.bufr2ioda[name], localconf) save_as_yaml(temp_yaml, gts_yaml) diff --git a/ush/python/pygfs/task/upp.py b/ush/python/pygfs/task/upp.py index 7db50e1582..7e42e07c64 100644 --- a/ush/python/pygfs/task/upp.py +++ b/ush/python/pygfs/task/upp.py @@ -46,26 +46,27 @@ def __init__(self, config: Dict[str, Any]) -> None: """ super().__init__(config) - if self.config.UPP_RUN not in self.VALID_UPP_RUN: - raise NotImplementedError(f'{self.config.UPP_RUN} is not a valid UPP run type.\n' + + if self.task_config.UPP_RUN not in self.VALID_UPP_RUN: + raise NotImplementedError(f'{self.task_config.UPP_RUN} is not a valid UPP run type.\n' + 'Valid UPP_RUN values are:\n' + f'{", ".join(self.VALID_UPP_RUN)}') - valid_datetime = add_to_datetime(self.runtime_config.current_cycle, to_timedelta(f"{self.config.FORECAST_HOUR}H")) + valid_datetime = add_to_datetime(self.task_config.current_cycle, to_timedelta(f"{self.task_config.FORECAST_HOUR}H")) + # Extend task_config with localdict localdict = AttrDict( - {'upp_run': self.config.UPP_RUN, - 'forecast_hour': self.config.FORECAST_HOUR, + {'upp_run': self.task_config.UPP_RUN, + 'forecast_hour': self.task_config.FORECAST_HOUR, 'valid_datetime': valid_datetime, 'atmos_filename': f"atm_{valid_datetime.strftime('%Y%m%d%H%M%S')}.nc", 'flux_filename': f"sfc_{valid_datetime.strftime('%Y%m%d%H%M%S')}.nc" } ) - self.task_config = AttrDict(**self.config, **self.runtime_config, **localdict) + self.task_config = AttrDict(**self.task_config, **localdict) # Read the upp.yaml file for common configuration - logger.info(f"Read the UPP configuration yaml file {self.config.UPP_CONFIG}") - self.task_config.upp_yaml = parse_j2yaml(self.config.UPP_CONFIG, self.task_config) + logger.info(f"Read the UPP configuration yaml file {self.task_config.UPP_CONFIG}") + self.task_config.upp_yaml = parse_j2yaml(self.task_config.UPP_CONFIG, self.task_config) logger.debug(f"upp_yaml:\n{pformat(self.task_config.upp_yaml)}") @staticmethod diff --git a/workflow/hosts.py b/workflow/hosts.py index 2334a3ac35..cd0cfe0083 100644 --- a/workflow/hosts.py +++ b/workflow/hosts.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import os +import socket from pathlib import Path from wxflow import YAMLFile @@ -39,10 +40,7 @@ def detect(cls): if os.path.exists('/scratch1/NCEPDEV'): machine = 'HERA' elif os.path.exists('/work/noaa'): - if os.path.exists('/apps/other'): - machine = 'HERCULES' - else: - machine = 'ORION' + machine = socket.gethostname().split("-", 1)[0].upper() elif os.path.exists('/lfs4/HFIP'): machine = 'JET' elif os.path.exists('/lfs/f1'): From 11943e36ba12b3df49c51942da780698fab02d38 Mon Sep 17 00:00:00 2001 From: DavidBurrows-NCO <82525974+DavidBurrows-NCO@users.noreply.github.com> Date: Tue, 2 Jul 2024 12:58:10 -0400 Subject: [PATCH 3/8] Fix xml file setup and complete C48 ATM and S2SW runs for CI on Gaea (#2701) This PR sets up the ability on Gaea for auto generation of a clean xml file, i.e., an xml file that does not need any alterations before running rocoto. Refs #2572 Refs #2664 --- env/GAEA.env | 40 +++++++++++++++++++++++---- parm/config/gfs/config.base | 1 + parm/config/gfs/config.resources.GAEA | 5 ++++ sorc/link_workflow.sh | 2 +- workflow/hosts/gaea.yaml | 21 ++++++++------ workflow/rocoto/tasks.py | 2 ++ 6 files changed, 55 insertions(+), 16 deletions(-) diff --git a/env/GAEA.env b/env/GAEA.env index 5509a29a3f..d72be6ba22 100755 --- a/env/GAEA.env +++ b/env/GAEA.env @@ -12,24 +12,52 @@ step=$1 export launcher="srun -l --export=ALL" export mpmd_opt="--multi-prog --output=mpmd.%j.%t.out" +export OMP_STACKSIZE=2048000 +export NTHSTACK=1024000000 + ulimit -s unlimited ulimit -a -if [[ "${step}" = "fcst" ]]; then +if [[ "${step}" = "waveinit" ]]; then + + export CFP_MP="YES" + if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi + export wavempexec=${launcher} + export wave_mpmd=${mpmd_opt} - ppn="npe_node_${step}_${RUN}" - [[ -z "${!ppn+0}" ]] && ppn="npe_node_${step}" - nprocs="npe_${step}_${RUN}" - [[ -z ${!nprocs+0} ]] && nprocs="npe_${step}" +elif [[ "${step}" = "fcst" ]]; then + if [[ "${CDUMP}" =~ "gfs" ]]; then + nprocs="npe_${step}_gfs" + ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}" + else + nprocs="npe_${step}" + ppn="npe_node_${step}" + fi (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} )) (( ntasks = nnodes*${!ppn} )) # With ESMF threading, the model wants to use the full node export APRUN_UFS="${launcher} -n ${ntasks}" unset nprocs ppn nnodes ntasks + elif [[ "${step}" = "atmos_products" ]]; then - export USE_CFP="YES" # Use MPMD for downstream product generation + export USE_CFP="YES" # Use MPMD for downstream product generation on Hera + +elif [[ "${step}" = "oceanice_products" ]]; then + + nth_max=$((npe_node_max / npe_node_oceanice_products)) + + export NTHREADS_OCNICEPOST=${nth_oceanice_products:-1} + export APRUN_OCNICEPOST="${launcher} -n 1 --cpus-per-task=${NTHREADS_OCNICEPOST}" + +elif [[ "${step}" = "fit2obs" ]]; then + + nth_max=$((npe_node_max / npe_node_fit2obs)) + + export NTHREADS_FIT2OBS=${nth_fit2obs:-1} + [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max} + export MPIRUN="${launcher} -n ${npe_fit2obs} --cpus-per-task=${NTHREADS_FIT2OBS}" fi diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index 9fd494a9eb..f78c7fb400 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -19,6 +19,7 @@ export QUEUE_SERVICE="@QUEUE_SERVICE@" export PARTITION_BATCH="@PARTITION_BATCH@" export PARTITION_SERVICE="@PARTITION_SERVICE@" export RESERVATION="@RESERVATION@" +export CLUSTERS="@CLUSTERS@" # Project to use in mass store: export HPSS_PROJECT="@HPSS_PROJECT@" diff --git a/parm/config/gfs/config.resources.GAEA b/parm/config/gfs/config.resources.GAEA index 64990b299f..3f0934edc2 100644 --- a/parm/config/gfs/config.resources.GAEA +++ b/parm/config/gfs/config.resources.GAEA @@ -20,3 +20,8 @@ case ${step} in ;; esac + +# shellcheck disable=SC2312 +for mem_var in $(env | grep '^memory_' | cut -d= -f1); do + unset "${mem_var}" +done diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 4973ab8d7d..8694f856b5 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -75,7 +75,7 @@ case "${machine}" in "hercules") FIX_DIR="/work/noaa/global/glopara/fix" ;; "jet") FIX_DIR="/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix" ;; "s4") FIX_DIR="/data/prod/glopara/fix" ;; - "gaea") FIX_DIR="/gpfs/f5/epic/proj-shared/global/glopara/data/fix" ;; + "gaea") FIX_DIR="/gpfs/f5/ufs-ard/world-shared/global/glopara/data/fix" ;; *) echo "FATAL: Unknown target machine ${machine}, couldn't set FIX_DIR" exit 1 diff --git a/workflow/hosts/gaea.yaml b/workflow/hosts/gaea.yaml index 7ca8420997..ff9877e77b 100644 --- a/workflow/hosts/gaea.yaml +++ b/workflow/hosts/gaea.yaml @@ -1,19 +1,22 @@ -BASE_GIT: '/gpfs/f5/epic/proj-shared/global/glopara/data/git' -DMPDIR: '/gpfs/f5/epic/proj-shared/global/glopara/data/dump' -BASE_CPLIC: '/gpfs/f5/epic/proj-shared/global/glopara/data/ICSDIR/prototype_ICs' -PACKAGEROOT: '/gpfs/f5/epic/proj-shared/global/glopara/data/nwpara' -COMROOT: '/gpfs/f5/epic/proj-shared/global/glopara/data/com' +BASE_GIT: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/git' +DMPDIR: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/dump' +BASE_CPLIC: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/ICSDIR/prototype_ICs' +PACKAGEROOT: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/nwpara' +COMROOT: '/gpfs/f5/ufs-ard/world-shared/global/glopara/data/com' COMINsyn: '${COMROOT}/gfs/prod/syndat' -HOMEDIR: '/gpfs/f5/epic/scratch/${USER}' -STMP: '/gpfs/f5/epic/scratch/${USER}' -PTMP: '/gpfs/f5/epic/scratch/${USER}' +HOMEDIR: '/gpfs/f5/ufs-ard/scratch/${USER}' +STMP: '/gpfs/f5/ufs-ard/scratch/${USER}' +PTMP: '/gpfs/f5/ufs-ard/scratch/${USER}' NOSCRUB: $HOMEDIR -ACCOUNT: epic +ACCOUNT: ufs-ard +ACCOUNT_SERVICE: ufs-ard SCHEDULER: slurm QUEUE: normal QUEUE_SERVICE: normal PARTITION_BATCH: batch PARTITION_SERVICE: batch +RESERVATION: '' +CLUSTERS: 'c5' CHGRP_RSTPROD: 'NO' CHGRP_CMD: 'chgrp rstprod' HPSSARCH: 'NO' diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index a126992cee..e18b45ef28 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -226,6 +226,8 @@ def get_resource(self, task_name): native = '--export=NONE' if task_config['RESERVATION'] != "": native += '' if task_name in Tasks.SERVICE_TASKS else ' --reservation=' + task_config['RESERVATION'] + if task_config['CLUSTERS'] != "": + native += ' --clusters=' + task_config['CLUSTERS'] queue = task_config['QUEUE_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config['QUEUE'] From 8215ae654202186a4f753c3abe937b7b9b91a9c7 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Tue, 2 Jul 2024 16:22:11 -0400 Subject: [PATCH 4/8] Hotfix for clusters from #2701 (#2747) Fixes an issue created from #2701 that added `CLUSTERS` to the `gaea.yaml`. --- workflow/rocoto/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index e18b45ef28..4f97ef1faa 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -226,7 +226,7 @@ def get_resource(self, task_name): native = '--export=NONE' if task_config['RESERVATION'] != "": native += '' if task_name in Tasks.SERVICE_TASKS else ' --reservation=' + task_config['RESERVATION'] - if task_config['CLUSTERS'] != "": + if task_config['CLUSTERS'] not in ["", '@CLUSTERS@']: native += ' --clusters=' + task_config['CLUSTERS'] queue = task_config['QUEUE_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config['QUEUE'] From 7dc6651a3b92194d963675bdc0a9ec3c28499abf Mon Sep 17 00:00:00 2001 From: GwenChen-NOAA <95313292+GwenChen-NOAA@users.noreply.github.com> Date: Wed, 3 Jul 2024 09:56:08 -0400 Subject: [PATCH 5/8] Update gempak job to run one fcst hour per task (#2671) This PR updates gempak jobs (gfs, gdas, and goes) from processing all forecast hours at once to one forecast hour at a time. This will reduce the job runtime to less than 5 min, so restart capability is not needed. Resolves #1250 Ref #2666 #2667 --------- Co-authored-by: Walter.Kolczynski --- .../gdas/atmos/gempak/jgdas_atmos_gempak.ecf | 6 +- .../gfs/atmos/gempak/jgfs_atmos_gempak.ecf | 5 +- .../gempak/jgfs_atmos_pgrb2_spec_gempak.ecf | 5 +- jobs/JGDAS_ATMOS_GEMPAK | 40 ++--- jobs/JGFS_ATMOS_GEMPAK | 164 +++++++++-------- jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC | 50 +----- jobs/rocoto/gempak.sh | 5 +- jobs/rocoto/gempakgrb2spec.sh | 5 +- parm/config/gfs/config.base | 7 + parm/config/gfs/config.resources | 2 +- scripts/exgdas_atmos_nawips.sh | 118 +++++-------- scripts/exgfs_atmos_goes_nawips.sh | 103 +++++------ scripts/exgfs_atmos_grib2_special_npoess.sh | 9 +- scripts/exgfs_atmos_nawips.sh | 166 ++++++++---------- workflow/rocoto/gfs_tasks.py | 55 ++++-- 15 files changed, 344 insertions(+), 396 deletions(-) diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf index 0bc2d76455..754d921f95 100755 --- a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf @@ -1,5 +1,5 @@ #PBS -S /bin/bash -#PBS -N %RUN%_atmos_gempak_%CYC% +#PBS -N %RUN%_atmos_gempak_%FHR3%_%CYC% #PBS -j oe #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% @@ -31,9 +31,13 @@ module load gempak/${gempak_ver} module list +############################################################# +# environment settings +############################################################# export cyc=%CYC% export cycle=t%CYC%z export USE_CFP=YES +export FHR3=%FHR3% ############################################################ # CALL executable job script here diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf index 1994f238d1..e01fa35e57 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf @@ -1,9 +1,9 @@ #PBS -S /bin/bash -#PBS -N %RUN%_atmos_gempak_%CYC% +#PBS -N %RUN%_atmos_gempak_%FHR3%_%CYC% #PBS -j oe #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=03:00:00 +#PBS -l walltime=00:30:00 #PBS -l select=1:ncpus=28:mpiprocs=28:mem=2GB #PBS -l place=vscatter:shared #PBS -l debug=true @@ -37,6 +37,7 @@ module list ############################################################# export cyc=%CYC% export cycle=t%CYC%z +export FHR3=%FHR3% ############################################################ # CALL executable job script here diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf index 04b07c58d1..df53868b05 100755 --- a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf @@ -1,9 +1,9 @@ #PBS -S /bin/bash -#PBS -N %RUN%_atmos_pgrb2_spec_gempak_%CYC% +#PBS -N %RUN%_atmos_pgrb2_spec_gempak_%FHR3%_%CYC% #PBS -j oe #PBS -q %QUEUE% #PBS -A %PROJ%-%PROJENVIR% -#PBS -l walltime=00:30:00 +#PBS -l walltime=00:15:00 #PBS -l select=1:ncpus=1:mem=1GB #PBS -l place=vscatter:shared #PBS -l debug=true @@ -35,6 +35,7 @@ module list ############################################################# export cyc=%CYC% export cycle=t%CYC%z +export FHR3=%FHR3% ############################################################ # CALL executable job script here diff --git a/jobs/JGDAS_ATMOS_GEMPAK b/jobs/JGDAS_ATMOS_GEMPAK index 3a9c8e0a9c..f5c00b9c98 100755 --- a/jobs/JGDAS_ATMOS_GEMPAK +++ b/jobs/JGDAS_ATMOS_GEMPAK @@ -3,27 +3,17 @@ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak" -c "base gempak" -# TODO (#1219) This j-job is not part of the rocoto suite - ############################################ # Set up model and cycle specific variables ############################################ - -export fend=09 -export finc=3 -export fstart=00 -export GRIB=pgrb2f +export model=${model:-gdas} +export GRIB=${GRIB:-pgrb2f} export EXT="" -export DBN_ALERT_TYPE=GDAS_GEMPAK +export DBN_ALERT_TYPE=${DBN_ALERT_TYPE:-GDAS_GEMPAK} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -################################### -# Specify NET and RUN Name and model -#################################### -export model=${model:-gdas} - ############################################## # Define COM directories ############################################## @@ -40,26 +30,18 @@ for grid in 0p25 1p00; do fi done -# TODO: These actions belong in an ex-script not a j-job -if [[ -f poescript ]]; then - rm -f poescript -fi +######################################################## +# Execute the script for one degree grib +######################################################## -{ - ######################################################## - # Execute the script. - echo "${SCRgfs}/exgdas_atmos_nawips.sh 1p00 009 GDAS_GEMPAK ${COM_ATMOS_GEMPAK_1p00}" - ######################################################## +"${SCRgfs}/exgdas_atmos_nawips.sh" "1p00" "${FHR3}" "GDAS_GEMPAK" "${COM_ATMOS_GEMPAK_1p00}" - ######################################################## - # Execute the script for quater-degree grib - echo "${SCRgfs}/exgdas_atmos_nawips.sh 0p25 009 GDAS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}" - ######################################################## -} > poescript +######################################################## +# Execute the script for quater-degree grib +######################################################## -cat poescript +"${SCRgfs}/exgdas_atmos_nawips.sh" "0p25" "${FHR3}" "GDAS_GEMPAK" "${COM_ATMOS_GEMPAK_0p25}" -"${HOMEgfs}/ush/run_mpmd.sh" poescript export err=$?; err_chk ############################################ diff --git a/jobs/JGFS_ATMOS_GEMPAK b/jobs/JGFS_ATMOS_GEMPAK index ab65cc3bcf..9988378fe5 100755 --- a/jobs/JGFS_ATMOS_GEMPAK +++ b/jobs/JGFS_ATMOS_GEMPAK @@ -6,27 +6,20 @@ source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak" -c "base gempak" ############################################ # Set up model and cycle specific variables ############################################ -export finc=${finc:-3} -export fstart=${fstart:-0} export model=${model:-gfs} export GRIB=${GRIB:-pgrb2f} export EXT="" export DBN_ALERT_TYPE=${DBN_ALERT_TYPE:-GFS_GEMPAK} +export SENDDBN=${SENDDBN:-NO} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + # For half-degree P Grib files export DO_HD_PGRB=${DO_HD_PGRB:-YES} -################################### -# Specify NET and RUN Name and model -#################################### -export model=${model:-gfs} - ############################################## # Define COM directories ############################################## -export SENDDBN=${SENDDBN:-NO} -export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} - for grid in 0p25 0p50 1p00; do GRID=${grid} YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_GRID_TMPL" done @@ -40,73 +33,100 @@ for grid in 1p00 0p50 0p25 40km 35km_atl 35km_pac; do fi done -# TODO: These actions belong in an ex-script not a j-job -if [[ -f poescript ]]; then - rm -f poescript -fi - +fhr=10#${FHR3} ocean_domain_max=180 if (( ocean_domain_max > FHMAX_GFS )); then ocean_domain_max=${FHMAX_GFS} fi -{ - ################################################################# - # Execute the script for the 384 hour 1 degree grib - ################################################################## - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 1p00 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 1p00 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 1p00 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 1p00 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 1p00 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 1p00 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00}" - - ################################################################# - # Execute the script for the half-degree grib - ################################################################## - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50}" - - ################################################################# - # Execute the script for the quater-degree grib - #################################################################### - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}" - - #################################################################### - # Execute the script to create the 35km Pacific grids for OPC - ##################################################################### - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 35km_pac ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_pac}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 35km_pac ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_pac}" - - #################################################################### - # Execute the script to create the 35km Atlantic grids for OPC - ##################################################################### - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 35km_atl ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_atl}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 35km_atl ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_atl}" - - ##################################################################### - # Execute the script to create the 40km grids for HPC - ###################################################################### - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 40km ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_40km}" - echo "time ${SCRgfs}/exgfs_atmos_nawips.sh 40km ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_40km}" -} > poescript - -cat poescript - -"${HOMEgfs}/ush/run_mpmd.sh" poescript +################################################################# +# Execute the script for the 384 hour 1 degree grib +################################################################## +fhmin=0 +fhmax=240 +if (( fhr >= fhmin && fhr <= fhmax )); then + if ((fhr % 3 == 0)); then + "${SCRgfs}/exgfs_atmos_nawips.sh" "1p00" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_1p00}" + fi +fi + +fhmin=252 +fhmax=384 +if (( fhr >= fhmin && fhr <= fhmax )); then + if ((fhr % 12 == 0)); then + "${SCRgfs}/exgfs_atmos_nawips.sh" "1p00" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_1p00}" + fi +fi + +################################################################# +# Execute the script for the half-degree grib +################################################################## +fhmin=0 +fhmax=240 +if (( fhr >= fhmin && fhr <= fhmax )); then + if ((fhr % 3 == 0)); then + "${SCRgfs}/exgfs_atmos_nawips.sh" "0p50" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_0p50}" + fi +fi + +fhmin=246 +fhmax=276 +if (( fhr >= fhmin && fhr <= fhmax )); then + if ((fhr % 6 == 0)); then + "${SCRgfs}/exgfs_atmos_nawips.sh" "0p50" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_0p50}" + fi +fi + +fhmin=288 +fhmax=384 +if (( fhr >= fhmin && fhr <= fhmax )); then + if ((fhr % 12 == 0)); then + "${SCRgfs}/exgfs_atmos_nawips.sh" "0p50" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_0p50}" + fi +fi + +################################################################# +# Execute the script for the quater-degree grib +#################################################################### +fhmin=0 +fhmax=120 +if (( fhr >= fhmin && fhr <= fhmax )); then + if ((fhr % 1 == 0)); then + "${SCRgfs}/exgfs_atmos_nawips.sh" "0p25" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_0p25}" + fi +fi + +fhmin=123 +fhmax=240 +if (( fhr >= fhmin && fhr <= fhmax )); then + if ((fhr % 3 == 0)); then + "${SCRgfs}/exgfs_atmos_nawips.sh" "0p25" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_0p25}" + fi +fi + +fhmin=252 +fhmax=384 +if (( fhr >= fhmin && fhr <= fhmax )); then + if ((fhr % 12 == 0)); then + "${SCRgfs}/exgfs_atmos_nawips.sh" "0p25" "${FHR3}" "GFS_GEMPAK" "${COM_ATMOS_GEMPAK_0p25}" + fi +fi + +#################################################################### +# Execute the script to create the 35km and 40km grids +##################################################################### +fhmin=0 +fhmax="${ocean_domain_max}" +if (( fhr >= fhmin && fhr <= fhmax )); then + if ((fhr % 3 == 0)); then + "${SCRgfs}/exgfs_atmos_nawips.sh" "35km_pac" "${FHR3}" "GFS_GEMPAK_WWB" "${COM_ATMOS_GEMPAK_35km_pac}" + + "${SCRgfs}/exgfs_atmos_nawips.sh" "35km_atl" "${FHR3}" "GFS_GEMPAK_WWB" "${COM_ATMOS_GEMPAK_35km_atl}" + + "${SCRgfs}/exgfs_atmos_nawips.sh" "40km" "${FHR3}" "GFS_GEMPAK_WWB" "${COM_ATMOS_GEMPAK_40km}" + fi +fi + export err=$?; err_chk ############################################ diff --git a/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC b/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC index 582dde2f65..0be52c0d19 100755 --- a/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC +++ b/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC @@ -1,21 +1,17 @@ #! /usr/bin/env bash -############################################ -# GFS_PGRB2_SPEC_GEMPAK PRODUCT GENERATION -############################################ source "${HOMEgfs}/ush/preamble.sh" source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_spec" -c "base" -################################### -# Specify NET and RUN Name and model -#################################### +############################################ +# Set up model and cycle specific variables +############################################ export COMPONENT="atmos" -export finc=3 -export model=gfs +export model=${model:-gfs} export EXT="" -# For half-degree P Grib files -#export DO_HD_PGRB=YES +export SENDDBN=${SENDDBN:-NO} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} ############################################## # Define COM directories @@ -24,9 +20,6 @@ YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_ATMOS_GOES GRID=0p25 YMD=${PDY} HH=${cyc} declare_from_tmpl -rx "COM_ATMOS_GEMPAK_0p25:COM_ATMOS_GEMPAK_TMPL" if [[ ! -d "${COM_ATMOS_GEMPAK_0p25}" ]]; then mkdir -m 775 -p "${COM_ATMOS_GEMPAK_0p25}"; fi -export SENDDBN="${SENDDBN:-NO}" -export DBNROOT="${DBNROOT:-${UTILROOT}/fakedbn}" - ################################################################# # Execute the script for the regular grib ################################################################# @@ -38,22 +31,12 @@ export DBN_ALERT_TYPE=GFS_GOESSIM_GEMPAK export RUN2=gfs_goessim export GRIB=goessimpgrb2.0p25.f export EXT="" -export fend=180 -if (( fend > FHMAX_GFS )); then - fend=${FHMAX_GFS} -fi -export finc=3 -export fstart=0 - -echo "RUNS the Program" -######################################################## -# Execute the script. -"${SCRgfs}/exgfs_atmos_goes_nawips.sh" +"${SCRgfs}/exgfs_atmos_goes_nawips.sh" "${FHR3}" ################################################################# # Execute the script for the 221 grib - +################################################################# export DATA_SPECIAL221="${DATA}/SPECIAL221" mkdir -p "${DATA_SPECIAL221}" cd "${DATA_SPECIAL221}" || exit 1 @@ -62,26 +45,12 @@ export DBN_ALERT_TYPE=GFS_GOESSIM221_GEMPAK export RUN2=gfs_goessim221 export GRIB=goessimpgrb2f export EXT=".grd221" -export fend=180 -if (( fend > FHMAX_GFS )); then - fend=${FHMAX_GFS} -fi -export finc=3 -export fstart=0 -echo "RUNS the Program" +"${SCRgfs}/exgfs_atmos_goes_nawips.sh" "${FHR3}" -######################################################## -# Execute the script. -"${SCRgfs}/exgfs_atmos_goes_nawips.sh" export err=$?; err_chk -######################################################## -echo "end of program" cd "${DATA}" || exit 1 -echo "######################################" -echo " SPECIAL.OUT " -echo "######################################" ############################################ # print exec I/O output @@ -96,4 +65,3 @@ fi if [[ "${KEEPDATA}" != "YES" ]] ; then rm -rf "${DATA}" fi - diff --git a/jobs/rocoto/gempak.sh b/jobs/rocoto/gempak.sh index 82ea1175d8..f5aea2379d 100755 --- a/jobs/rocoto/gempak.sh +++ b/jobs/rocoto/gempak.sh @@ -1,15 +1,14 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" - -############################################################### -. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" status=$? if (( status != 0 )); then exit "${status}"; fi export job="gempak" export jobid="${job}.$$" + # Execute the JJOB "${HOMEgfs}/jobs/J${RUN^^}_ATMOS_GEMPAK" diff --git a/jobs/rocoto/gempakgrb2spec.sh b/jobs/rocoto/gempakgrb2spec.sh index f76c33ecdb..ddcb84599e 100755 --- a/jobs/rocoto/gempakgrb2spec.sh +++ b/jobs/rocoto/gempakgrb2spec.sh @@ -1,15 +1,14 @@ #! /usr/bin/env bash source "${HOMEgfs}/ush/preamble.sh" - -############################################################### -. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" status=$? if (( status != 0 )); then exit "${status}"; fi export job="gempakpgrb2spec" export jobid="${job}.$$" + # Execute the JJOB "${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC" diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index f78c7fb400..9fd66bf0c7 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -313,6 +313,13 @@ if (( FHMAX_HF_GFS < 120 )); then export ILPOST=${FHOUT_GFS} fi +# Limit bounds of goes processing +export FHMAX_GOES=180 +export FHOUT_GOES=3 +if (( FHMAX_GOES > FHMAX_GFS )); then + export FHMAX_GOES=${FHMAX_GFS} +fi + # GFS restart interval in hours export restart_interval_gfs=12 # NOTE: Do not set this to zero. Instead set it to $FHMAX_GFS diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index 2e910d4eb4..5c3a100880 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -1217,7 +1217,7 @@ case ${step} in ;; "gempak") - export wtime_gempak="03:00:00" + export wtime_gempak="00:30:00" export npe_gempak_gdas=2 export npe_gempak_gfs=28 export npe_node_gempak_gdas=2 diff --git a/scripts/exgdas_atmos_nawips.sh b/scripts/exgdas_atmos_nawips.sh index ea350239c1..7feb3210ac 100755 --- a/scripts/exgdas_atmos_nawips.sh +++ b/scripts/exgdas_atmos_nawips.sh @@ -1,20 +1,14 @@ #! /usr/bin/env bash ################################################################### -# echo "----------------------------------------------------" # echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" -# echo "----------------------------------------------------" -# echo "History: Mar 2000 - First implementation of this new script." -# echo "S Lilly: May 2008 - add logic to make sure that all of the " -# echo " data produced from the restricted ECMWF" -# echo " data on the CCS is properly protected." -##################################################################### +################################################################### source "${USHgfs}/preamble.sh" "${2}" cd "${DATA}" || exit 1 grid=$1 -fend=$2 +fhr3=$2 DBN_ALERT_TYPE=$3 destination=$4 @@ -22,6 +16,9 @@ DATA_RUN="${DATA}/${grid}" mkdir -p "${DATA_RUN}" cd "${DATA_RUN}" || exit 1 +# "Import" functions used in this script +source "${USHgfs}/product_functions.sh" + for table in g2varswmo2.tbl g2vcrdwmo2.tbl g2varsncep1.tbl g2vcrdncep1.tbl; do cp "${HOMEgfs}/gempak/fix/${table}" "${table}" || \ ( echo "FATAL ERROR: ${table} is missing" && exit 2 ) @@ -39,71 +36,50 @@ proj= output=T pdsext=no -sleep_interval=10 -max_tries=180 - -fhr=$(( 10#${fstart} )) -while (( fhr <= 10#${fend} )); do - fhr3=$(printf "%03d" "${fhr}") - - source_dirvar="COM_ATMOS_GRIB_${grid}" - GEMGRD="${RUN}_${grid}_${PDY}${cyc}f${fhr3}" - export GRIBIN="${!source_dirvar}/${model}.${cycle}.pgrb2.${grid}.f${fhr3}" - GRIBIN_chk="${GRIBIN}.idx" - - if ! wait_for_file "${GRIBIN_chk}" "${sleep_interval}" "${max_tries}"; then - echo "FATAL ERROR: after 1 hour of waiting for ${GRIBIN_chk} file at F${fhr3} to end." - export err=7 ; err_chk - exit "${err}" - fi - - cp "${GRIBIN}" "grib${fhr3}" - - export pgm="nagrib2 F${fhr3}" - startmsg - - ${NAGRIB} << EOF - GBFILE = grib${fhr3} - INDXFL = - GDOUTF = ${GEMGRD} - PROJ = ${proj} - GRDAREA = ${grdarea} - KXKY = ${kxky} - MAXGRD = ${maxgrd} - CPYFIL = ${cpyfil} - GAREA = ${garea} - OUTPUT = ${output} - GBTBLS = ${gbtbls} - GBDIAG = - PDSEXT = ${pdsext} - l - r + + +GEMGRD="${RUN}_${grid}_${PDY}${cyc}f${fhr3}" +source_dirvar="COM_ATMOS_GRIB_${grid}" +export GRIBIN="${!source_dirvar}/${model}.${cycle}.pgrb2.${grid}.f${fhr3}" +GRIBIN_chk="${GRIBIN}.idx" + +if [[ ! -r "${GRIBIN_chk}" ]]; then + echo "FATAL ERROR: GRIB index file ${GRIBIN_chk} not found!" + export err=7 ; err_chk + exit "${err}" +fi + +cp "${GRIBIN}" "grib${fhr3}" + +export pgm="nagrib2 F${fhr3}" +startmsg + +${NAGRIB} << EOF +GBFILE = grib${fhr3} +INDXFL = +GDOUTF = ${GEMGRD} +PROJ = ${proj} +GRDAREA = ${grdarea} +KXKY = ${kxky} +MAXGRD = ${maxgrd} +CPYFIL = ${cpyfil} +GAREA = ${garea} +OUTPUT = ${output} +GBTBLS = ${gbtbls} +GBDIAG = +PDSEXT = ${pdsext} +l +r EOF - export err=$?; err_chk - - cp "${GEMGRD}" "${destination}/${GEMGRD}" - export err=$? - if (( err != 0 )) ; then - echo "FATAL ERROR: ${GEMGRD} does not exist." - exit "${err}" - fi - - if [[ ${SENDDBN} = "YES" ]] ; then - "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \ - "${destination}/${GEMGRD}" - else - echo "##### DBN_ALERT_TYPE is: ${DBN_ALERT_TYPE} #####" - fi - - if (( fhr >= 240 )) ; then - fhr=$((fhr+12)) - else - fhr=$((fhr+finc)) - fi -done -"${GEMEXE}/gpend" -##################################################################### +export err=$?; err_chk +cpfs "${GEMGRD}" "${destination}/${GEMGRD}" +if [[ ${SENDDBN} = "YES" ]] ; then + "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \ + "${destination}/${GEMGRD}" +fi + +"${GEMEXE}/gpend" ############################### END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_goes_nawips.sh b/scripts/exgfs_atmos_goes_nawips.sh index 2c725a6402..86b0eea795 100755 --- a/scripts/exgfs_atmos_goes_nawips.sh +++ b/scripts/exgfs_atmos_goes_nawips.sh @@ -1,26 +1,22 @@ #! /usr/bin/env bash ################################################################### -# echo "----------------------------------------------------" # echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" -# echo "----------------------------------------------------" -# echo "History: Mar 2000 - First implementation of this new script." -# echo "S Lilly: May 2008 - add logic to make sure that all of the " -# echo " data produced from the restricted ECMWF" -# echo " data on the CCS is properly protected." -# echo "C. Magee: 10/2013 - swap X and Y for rtgssthr Atl and Pac." -##################################################################### +################################################################### source "${USHgfs}/preamble.sh" -cd "${DATA}" || exit 2 +cd "${DATA}" || exit 1 +fhr3=$1 + +# "Import" functions used in this script +source "${USHgfs}/product_functions.sh" for table in g2varswmo2.tbl g2vcrdwmo2.tbl g2varsncep1.tbl g2vcrdncep1.tbl; do cp "${HOMEgfs}/gempak/fix/${table}" "${table}" || \ ( echo "FATAL ERROR: ${table} is missing" && exit 2 ) done -# NAGRIB_TABLE="${HOMEgfs}/gempak/fix/nagrib.tbl" NAGRIB="${GEMEXE}/nagrib2" @@ -48,58 +44,49 @@ else fi pdsext=no -sleep_interval=20 -max_tries=180 -fhr=${fstart} -for (( fhr=fstart; fhr <= fend; fhr=fhr+finc )); do - fhr3=$(printf "%03d" "${fhr}") - GRIBIN="${COM_ATMOS_GOES}/${model}.${cycle}.${GRIB}${fhr3}${EXT}" - GEMGRD="${RUN2}_${PDY}${cyc}f${fhr3}" - - GRIBIN_chk="${GRIBIN}" - - if ! wait_for_file "${GRIBIN_chk}" "${sleep_interval}" "${max_tries}"; then - echo "FATAL ERROR: after 1 hour of waiting for ${GRIBIN_chk} file at F${fhr3} to end." - export err=7 ; err_chk - exit "${err}" - fi - - cp "${GRIBIN}" "grib${fhr3}" - - export pgm="nagrib_nc F${fhr3}" - - ${NAGRIB} << EOF - GBFILE = grib${fhr3} - INDXFL = - GDOUTF = ${GEMGRD} - PROJ = ${proj} - GRDAREA = ${grdarea} - KXKY = ${kxky} - MAXGRD = ${maxgrd} - CPYFIL = ${cpyfil} - GAREA = ${garea} - OUTPUT = ${output} - GBTBLS = ${gbtbls} - GBDIAG = - PDSEXT = ${pdsext} - l - r -EOF - export err=$?;err_chk - "${GEMEXE}/gpend" - cpfs "${GEMGRD}" "${COM_ATMOS_GEMPAK_0p25}/${GEMGRD}" - if [[ ${SENDDBN} == "YES" ]] ; then - "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \ - "${COM_ATMOS_GEMPAK_0p25}/${GEMGRD}" - else - echo "##### DBN_ALERT_TYPE is: ${DBN_ALERT_TYPE} #####" - fi +GEMGRD="${RUN2}_${PDY}${cyc}f${fhr3}" +GRIBIN="${COM_ATMOS_GOES}/${model}.${cycle}.${GRIB}${fhr3}${EXT}" +GRIBIN_chk="${GRIBIN}" + +if [[ ! -r "${GRIBIN_chk}" ]]; then + echo "FATAL ERROR: GRIB index file ${GRIBIN_chk} not found!" + export err=7 ; err_chk + exit "${err}" +fi + +cp "${GRIBIN}" "grib${fhr3}" + +export pgm="nagrib_nc F${fhr3}" +startmsg + +${NAGRIB} << EOF +GBFILE = grib${fhr3} +INDXFL = +GDOUTF = ${GEMGRD} +PROJ = ${proj} +GRDAREA = ${grdarea} +KXKY = ${kxky} +MAXGRD = ${maxgrd} +CPYFIL = ${cpyfil} +GAREA = ${garea} +OUTPUT = ${output} +GBTBLS = ${gbtbls} +GBDIAG = +PDSEXT = ${pdsext} +l +r +EOF -done +export err=$?; err_chk -##################################################################### +cpfs "${GEMGRD}" "${COM_ATMOS_GEMPAK_0p25}/${GEMGRD}" +if [[ ${SENDDBN} == "YES" ]] ; then + "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \ + "${COM_ATMOS_GEMPAK_0p25}/${GEMGRD}" +fi +"${GEMEXE}/gpend" ############################### END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_grib2_special_npoess.sh b/scripts/exgfs_atmos_grib2_special_npoess.sh index 8d182469ed..63f5518b54 100755 --- a/scripts/exgfs_atmos_grib2_special_npoess.sh +++ b/scripts/exgfs_atmos_grib2_special_npoess.sh @@ -133,12 +133,9 @@ done ################################################################ # Specify Forecast Hour Range F000 - F180 for GOESSIMPGRB files ################################################################ -export SHOUR=0 -export FHOUR=180 -export FHINC=3 -if (( FHOUR > FHMAX_GFS )); then - export FHOUR="${FHMAX_GFS}" -fi +export SHOUR=${FHMIN_GFS} +export FHOUR=${FHMAX_GOES} +export FHINC=${FHOUT_GOES} ################################# # Process GFS PGRB2_SPECIAL_POST diff --git a/scripts/exgfs_atmos_nawips.sh b/scripts/exgfs_atmos_nawips.sh index 25873473a8..9cf1969f65 100755 --- a/scripts/exgfs_atmos_nawips.sh +++ b/scripts/exgfs_atmos_nawips.sh @@ -1,14 +1,8 @@ #! /usr/bin/env bash ################################################################### -# echo "----------------------------------------------------" # echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" -# echo "----------------------------------------------------" -# echo "History: Mar 2000 - First implementation of this new script." -# echo "S Lilly: May 2008 - add logic to make sure that all of the " -# echo " data produced from the restricted ECMWF" -# echo " data on the CCS is properly protected." -##################################################################### +################################################################### source "${USHgfs}/preamble.sh" "${2}" @@ -19,7 +13,7 @@ export ILPOST=${ILPOST:-1} cd "${DATA}" || exit 1 grid=$1 -fend=$2 +fhr3=$2 DBN_ALERT_TYPE=$3 destination=$4 @@ -30,9 +24,7 @@ cd "${DATA_RUN}" || exit 1 # "Import" functions used in this script source "${USHgfs}/product_functions.sh" -# NAGRIB="${GEMEXE}/nagrib2" -# cpyfil=gds garea=dset @@ -46,68 +38,65 @@ pdsext=no sleep_interval=10 max_tries=360 -fhr=$(( 10#${fstart} )) -while (( fhr <= 10#${fend} )) ; do - - fhr3=$(printf "%03d" "${fhr}") - - if mkdir "lock.${fhr3}" ; then - cd "lock.${fhr3}" || exit 1 - - for table in g2varswmo2.tbl g2vcrdwmo2.tbl g2varsncep1.tbl g2vcrdncep1.tbl; do - cp "${HOMEgfs}/gempak/fix/${table}" "${table}" || \ - ( echo "FATAL ERROR: ${table} is missing" && exit 2 ) - done - - GEMGRD="${RUN}_${grid}_${PDY}${cyc}f${fhr3}" - - # Set type of Interpolation for WGRIB2 - export opt1=' -set_grib_type same -new_grid_winds earth ' - export opt1uv=' -set_grib_type same -new_grid_winds grid ' - export opt21=' -new_grid_interpolation bilinear -if ' - export opt22=":(CSNOW|CRAIN|CFRZR|CICEP|ICSEV):" - export opt23=' -new_grid_interpolation neighbor -fi ' - export opt24=' -set_bitmap 1 -set_grib_max_bits 16 -if ' - export opt25=":(APCP|ACPCP|PRATE|CPRAT):" - export opt26=' -set_grib_max_bits 25 -fi -if ' - export opt27=":(APCP|ACPCP|PRATE|CPRAT|DZDT):" - export opt28=' -new_grid_interpolation budget -fi ' - - case ${grid} in - # TODO: Why aren't we interpolating from the 0p25 grids for 35-km and 40-km? - '0p50' | '0p25') grid_in=${grid};; - *) grid_in="1p00";; - esac - - source_var="COM_ATMOS_GRIB_${grid_in}" - export GRIBIN="${!source_var}/${model}.${cycle}.pgrb2.${grid_in}.f${fhr3}" - GRIBIN_chk="${!source_var}/${model}.${cycle}.pgrb2.${grid_in}.f${fhr3}.idx" - - if ! wait_for_file "${GRIBIN_chk}" "${sleep_interval}" "${max_tries}"; then - echo "FATAL ERROR: after 1 hour of waiting for ${GRIBIN_chk} file at F${fhr3} to end." - export err=7 ; err_chk - exit "${err}" - fi - - case "${grid}" in - 35km_pac) grid_spec='latlon 130.0:416:0.312 75.125:186:-0.312';; - 35km_atl) grid_spec='latlon 230.0:480:0.312 75.125:242:-0.312';; - 40km) grid_spec='lambert:265.0:25.0:25.0 226.541:185:40635.0 12.19:129:40635.0';; - *) grid_spec='';; - esac - - if [[ "${grid_spec}" != "" ]]; then - # shellcheck disable=SC2086,SC2248 - "${WGRIB2}" "${GRIBIN}" ${opt1uv} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} ${opt27} ${opt28} -new_grid ${grid_spec} "grib${fhr3}" - trim_rh "grib${fhr3}" - else - cp "${GRIBIN}" "grib${fhr3}" - fi - - export pgm="nagrib2 F${fhr3}" - startmsg - - ${NAGRIB} << EOF + + +mkdir -p "lock.${fhr3}" +cd "lock.${fhr3}" || exit 1 + +for table in g2varswmo2.tbl g2vcrdwmo2.tbl g2varsncep1.tbl g2vcrdncep1.tbl; do + cp "${HOMEgfs}/gempak/fix/${table}" "${table}" || \ + ( echo "FATAL ERROR: ${table} is missing" && exit 2 ) +done + +GEMGRD="${RUN}_${grid}_${PDY}${cyc}f${fhr3}" + +# Set type of Interpolation for WGRIB2 +export opt1=' -set_grib_type same -new_grid_winds earth ' +export opt1uv=' -set_grib_type same -new_grid_winds grid ' +export opt21=' -new_grid_interpolation bilinear -if ' +export opt22=":(CSNOW|CRAIN|CFRZR|CICEP|ICSEV):" +export opt23=' -new_grid_interpolation neighbor -fi ' +export opt24=' -set_bitmap 1 -set_grib_max_bits 16 -if ' +export opt25=":(APCP|ACPCP|PRATE|CPRAT):" +export opt26=' -set_grib_max_bits 25 -fi -if ' +export opt27=":(APCP|ACPCP|PRATE|CPRAT|DZDT):" +export opt28=' -new_grid_interpolation budget -fi ' + +case ${grid} in + # TODO: Why aren't we interpolating from the 0p25 grids for 35-km and 40-km? + '0p50' | '0p25') grid_in=${grid};; + *) grid_in="1p00";; +esac + +source_var="COM_ATMOS_GRIB_${grid_in}" +export GRIBIN="${!source_var}/${model}.${cycle}.pgrb2.${grid_in}.f${fhr3}" +GRIBIN_chk="${!source_var}/${model}.${cycle}.pgrb2.${grid_in}.f${fhr3}.idx" + +if ! wait_for_file "${GRIBIN_chk}" "${sleep_interval}" "${max_tries}"; then + echo "FATAL ERROR: after 1 hour of waiting for ${GRIBIN_chk} file at F${fhr3} to end." + export err=7 ; err_chk + exit "${err}" +fi + +case "${grid}" in + 35km_pac) grid_spec='latlon 130.0:416:0.312 75.125:186:-0.312';; + 35km_atl) grid_spec='latlon 230.0:480:0.312 75.125:242:-0.312';; + 40km) grid_spec='lambert:265.0:25.0:25.0 226.541:185:40635.0 12.19:129:40635.0';; + *) grid_spec='';; +esac + +if [[ "${grid_spec}" != "" ]]; then + # shellcheck disable=SC2086,SC2248 + "${WGRIB2}" "${GRIBIN}" ${opt1uv} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} ${opt27} ${opt28} -new_grid ${grid_spec} "grib${fhr3}" + trim_rh "grib${fhr3}" +else + cp "${GRIBIN}" "grib${fhr3}" +fi + +export pgm="nagrib2 F${fhr3}" +startmsg + +${NAGRIB} << EOF GBFILE = grib${fhr3} INDXFL = GDOUTF = ${GEMGRD} @@ -124,31 +113,16 @@ PDSEXT = ${pdsext} l r EOF - export err=$?;err_chk - - cpfs "${GEMGRD}" "${destination}/${GEMGRD}" - if [[ ${SENDDBN} == "YES" ]] ; then - "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \ - "${destination}/${GEMGRD}" - fi - cd "${DATA_RUN}" || exit 1 - else - if (( fhr >= 240 )) ; then - if (( fhr < 276 )) && [[ "${grid}" = "0p50" ]] ; then - fhr=$((fhr+6)) - else - fhr=$((fhr+12)) - fi - elif ((fhr < 120)) && [[ "${grid}" = "0p25" ]] ; then - fhr=$((fhr + ILPOST)) - else - fhr=$((ILPOST > finc ? fhr+ILPOST : fhr+finc )) - fi - fi -done -"${GEMEXE}/gpend" -##################################################################### +export err=$?;err_chk +cpfs "${GEMGRD}" "${destination}/${GEMGRD}" +if [[ ${SENDDBN} == "YES" ]] ; then + "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \ + "${destination}/${GEMGRD}" +fi +cd "${DATA_RUN}" || exit 1 + +"${GEMEXE}/gpend" ############################### END OF SCRIPT ####################### diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index 60a08549b6..55fa5a2475 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -1444,16 +1444,21 @@ def awips_20km_1p0deg(self): def gempak(self): deps = [] - dep_dict = {'type': 'metatask', 'name': f'{self.cdump}atmos_prod'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmos_prod_f#fhr#'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) + gempak_vars = self.envars.copy() + gempak_dict = {'FHR3': '#fhr#'} + for key, value in gempak_dict.items(): + gempak_vars.append(rocoto.create_envar(name=key, value=str(value))) + resources = self.get_resource('gempak') - task_name = f'{self.cdump}gempak' + task_name = f'{self.cdump}gempak_f#fhr#' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, - 'envars': self.envars, + 'envars': gempak_vars, 'cycledef': self.cdump.replace('enkf', ''), 'command': f'{self.HOMEgfs}/jobs/rocoto/gempak.sh', 'job_name': f'{self.pslot}_{task_name}_@H', @@ -1461,13 +1466,20 @@ def gempak(self): 'maxtries': '&MAXTRIES;' } - task = rocoto.create_task(task_dict) + fhrs = self._get_forecast_hours(self.cdump, self._configs['gempak']) + fhr_var_dict = {'fhr': ' '.join([f"{fhr:03d}" for fhr in fhrs])} + + fhr_metatask_dict = {'task_name': f'{self.cdump}gempak', + 'task_dict': task_dict, + 'var_dict': fhr_var_dict} + + task = rocoto.create_task(fhr_metatask_dict) return task def gempakmeta(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}gempak'} + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}gempak'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) @@ -1490,7 +1502,7 @@ def gempakmeta(self): def gempakmetancdc(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}gempak'} + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}gempak'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) @@ -1513,7 +1525,7 @@ def gempakmetancdc(self): def gempakncdcupapgif(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}gempak'} + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}gempak'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) @@ -1540,12 +1552,17 @@ def gempakpgrb2spec(self): deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) + gempak_vars = self.envars.copy() + gempak_dict = {'FHR3': '#fhr#'} + for key, value in gempak_dict.items(): + gempak_vars.append(rocoto.create_envar(name=key, value=str(value))) + resources = self.get_resource('gempak') - task_name = f'{self.cdump}gempakgrb2spec' + task_name = f'{self.cdump}gempakgrb2spec_f#fhr#' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, - 'envars': self.envars, + 'envars': gempak_vars, 'cycledef': self.cdump.replace('enkf', ''), 'command': f'{self.HOMEgfs}/jobs/rocoto/gempakgrb2spec.sh', 'job_name': f'{self.pslot}_{task_name}_@H', @@ -1553,7 +1570,23 @@ def gempakpgrb2spec(self): 'maxtries': '&MAXTRIES;' } - task = rocoto.create_task(task_dict) + # Override forecast lengths locally to be that of gempak goes job + local_config = self._configs['gempak'] + goes_times = { + 'FHMAX_HF_GFS': 0, + 'FHMAX_GFS': local_config['FHMAX_GOES'], + 'FHOUT_GFS': local_config['FHOUT_GOES'], + } + local_config.update(goes_times) + + fhrs = self._get_forecast_hours(self.cdump, local_config) + fhr_var_dict = {'fhr': ' '.join([f"{fhr:03d}" for fhr in fhrs])} + + fhr_metatask_dict = {'task_name': f'{self.cdump}gempakgrb2spec', + 'task_dict': task_dict, + 'var_dict': fhr_var_dict} + + task = rocoto.create_task(fhr_metatask_dict) return task @@ -2236,7 +2269,7 @@ def cleanup(self): dep_dict = {'type': 'task', 'name': f'{self.cdump}gempakncdcupapgif'} deps.append(rocoto.add_dependency(dep_dict)) if self.app_config.do_goes: - dep_dict = {'type': 'task', 'name': f'{self.cdump}gempakgrb2spec'} + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}gempakgrb2spec'} deps.append(rocoto.add_dependency(dep_dict)) dep_dict = {'type': 'task', 'name': f'{self.cdump}npoess_pgrb2_0p5deg'} deps.append(rocoto.add_dependency(dep_dict)) From 2bd106a013805ba4e16dbdc456d6731f8f36ec85 Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Wed, 3 Jul 2024 11:32:40 -0400 Subject: [PATCH 6/8] Hotfix for undefined CLUSTERS (#2748) Defines `CLUSTERS` as an empty string for all hosts except Gaea and uses the native `dict` `get` method to prevent grabbing an unset entry. --- workflow/hosts/awspw.yaml | 1 + workflow/hosts/container.yaml | 1 + workflow/hosts/hera.yaml | 1 + workflow/hosts/hercules.yaml | 1 + workflow/hosts/jet.yaml | 1 + workflow/hosts/orion.yaml | 1 + workflow/hosts/s4.yaml | 1 + workflow/hosts/wcoss2.yaml | 1 + workflow/rocoto/tasks.py | 2 +- 9 files changed, 9 insertions(+), 1 deletion(-) diff --git a/workflow/hosts/awspw.yaml b/workflow/hosts/awspw.yaml index d2223e799e..b7021a6e3f 100644 --- a/workflow/hosts/awspw.yaml +++ b/workflow/hosts/awspw.yaml @@ -14,6 +14,7 @@ QUEUE_SERVICE: batch PARTITION_BATCH: compute PARTITION_SERVICE: compute RESERVATION: '' +CLUSTERS: '' CHGRP_RSTPROD: 'YES' CHGRP_CMD: 'chgrp rstprod' # TODO: This is not yet supported. HPSSARCH: 'YES' diff --git a/workflow/hosts/container.yaml b/workflow/hosts/container.yaml index 5f4a66ac1f..907f69754e 100644 --- a/workflow/hosts/container.yaml +++ b/workflow/hosts/container.yaml @@ -14,6 +14,7 @@ QUEUE_SERVICE: '' PARTITION_BATCH: '' PARTITION_SERVICE: '' RESERVATION: '' +CLUSTERS: '' CHGRP_RSTPROD: 'YES' CHGRP_CMD: 'chgrp rstprod' HPSSARCH: 'NO' diff --git a/workflow/hosts/hera.yaml b/workflow/hosts/hera.yaml index 8cf7363605..76a7158f43 100644 --- a/workflow/hosts/hera.yaml +++ b/workflow/hosts/hera.yaml @@ -16,6 +16,7 @@ PARTITION_BATCH: hera PARTITION_SERVICE: service RESERVATION: '' CHGRP_RSTPROD: 'YES' +CLUSTERS: '' CHGRP_CMD: 'chgrp rstprod' HPSSARCH: 'YES' HPSS_PROJECT: emc-global diff --git a/workflow/hosts/hercules.yaml b/workflow/hosts/hercules.yaml index adebdfe23d..975558160f 100644 --- a/workflow/hosts/hercules.yaml +++ b/workflow/hosts/hercules.yaml @@ -16,6 +16,7 @@ PARTITION_BATCH: hercules PARTITION_SERVICE: service RESERVATION: '' CHGRP_RSTPROD: 'YES' +CLUSTERS: '' CHGRP_CMD: 'chgrp rstprod' HPSSARCH: 'NO' HPSS_PROJECT: emc-global diff --git a/workflow/hosts/jet.yaml b/workflow/hosts/jet.yaml index fd556fadc7..b526e073c3 100644 --- a/workflow/hosts/jet.yaml +++ b/workflow/hosts/jet.yaml @@ -16,6 +16,7 @@ PARTITION_BATCH: kjet PARTITION_SERVICE: service RESERVATION: '' CHGRP_RSTPROD: 'YES' +CLUSTERS: '' CHGRP_CMD: 'chgrp rstprod' HPSSARCH: 'YES' HPSS_PROJECT: emc-global diff --git a/workflow/hosts/orion.yaml b/workflow/hosts/orion.yaml index ba289df1e3..fe36c8e7ce 100644 --- a/workflow/hosts/orion.yaml +++ b/workflow/hosts/orion.yaml @@ -16,6 +16,7 @@ PARTITION_BATCH: orion PARTITION_SERVICE: service RESERVATION: '' CHGRP_RSTPROD: 'YES' +CLUSTERS: '' CHGRP_CMD: 'chgrp rstprod' HPSSARCH: 'NO' HPSS_PROJECT: emc-global diff --git a/workflow/hosts/s4.yaml b/workflow/hosts/s4.yaml index 543912cf23..37479fa13c 100644 --- a/workflow/hosts/s4.yaml +++ b/workflow/hosts/s4.yaml @@ -16,6 +16,7 @@ PARTITION_BATCH: s4 PARTITION_SERVICE: serial RESERVATION: '' CHGRP_RSTPROD: 'NO' +CLUSTERS: '' CHGRP_CMD: 'ls' HPSSARCH: 'NO' HPSS_PROJECT: emc-global diff --git a/workflow/hosts/wcoss2.yaml b/workflow/hosts/wcoss2.yaml index 4943495289..e3650e4710 100644 --- a/workflow/hosts/wcoss2.yaml +++ b/workflow/hosts/wcoss2.yaml @@ -16,6 +16,7 @@ PARTITION_BATCH: '' PARTITION_SERVICE: '' RESERVATION: '' CHGRP_RSTPROD: 'YES' +CLUSTERS: '' CHGRP_CMD: 'chgrp rstprod' HPSSARCH: 'NO' HPSS_PROJECT: emc-global diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 4f97ef1faa..097d1adef5 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -226,7 +226,7 @@ def get_resource(self, task_name): native = '--export=NONE' if task_config['RESERVATION'] != "": native += '' if task_name in Tasks.SERVICE_TASKS else ' --reservation=' + task_config['RESERVATION'] - if task_config['CLUSTERS'] not in ["", '@CLUSTERS@']: + if task_config.get('CLUSTERS', "") not in ["", '@CLUSTERS@']: native += ' --clusters=' + task_config['CLUSTERS'] queue = task_config['QUEUE_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config['QUEUE'] From d65d3d257b38225fac74e86b770f43e1f8ae2d5a Mon Sep 17 00:00:00 2001 From: Jessica Meixner Date: Wed, 3 Jul 2024 21:07:49 -0400 Subject: [PATCH 7/8] Update ufs model hash to 20240625 (#2729) Updates UFS weather model hash to hash from 2024-06-24 which has orion porting updates + a few namelist updates. --- sorc/ufs_model.fd | 2 +- ush/parsing_namelists_CICE.sh | 7 +++++++ ush/parsing_ufs_configure.sh | 3 +++ 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index 485ccdfc4a..e784814dfc 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit 485ccdfc4a7ed6deeb02d82c2cebe51b37e892f5 +Subproject commit e784814dfce3fb01e82be6d3949f9811860041d7 diff --git a/ush/parsing_namelists_CICE.sh b/ush/parsing_namelists_CICE.sh index aa495d1864..3822094c97 100755 --- a/ush/parsing_namelists_CICE.sh +++ b/ush/parsing_namelists_CICE.sh @@ -59,6 +59,7 @@ local CICE_RUNID="unknown" local CICE_USE_RESTART_TIME=${use_restart_time} local CICE_RESTART_DIR="./CICE_RESTART/" local CICE_RESTART_FILE="cice_model.res" +local CICE_ICE_IC='cice_model.res.nc' local CICE_RESTART_DEFLATE=0 local CICE_RESTART_CHUNK=0,0 local CICE_RESTART_STRIDE=-99 @@ -117,6 +118,12 @@ local CICE_NPROC=${ntasks_cice6} local CICE_BLCKX=${block_size_x} local CICE_BLCKY=${block_size_y} local CICE_DECOMP=${processor_shape} +# ice_prescribed_nml section +local CICE_PRESCRIBED="false" +local MESH_DICE="none" +local stream_files_dice="none" + + # Ensure the template exists local template=${CICE_TEMPLATE:-"${PARMgfs}/ufs/ice_in.IN"} diff --git a/ush/parsing_ufs_configure.sh b/ush/parsing_ufs_configure.sh index 24ea80e56c..062b40591e 100755 --- a/ush/parsing_ufs_configure.sh +++ b/ush/parsing_ufs_configure.sh @@ -39,6 +39,8 @@ if [[ "${cpl}" = ".true." ]]; then local coupling_interval_slow_sec="${CPL_SLOW}" fi +local WRITE_ENDOFRUN_RESTART=.false. + if [[ "${cplflx}" = ".true." ]]; then local use_coldstart=${use_coldstart:-".false."} @@ -56,6 +58,7 @@ if [[ "${cplflx}" = ".true." ]]; then local ATMTILESIZE="${CASE:1}" local ocean_albedo_limit=0.06 local pio_rearranger=${pio_rearranger:-"box"} + local MED_history_n=1000000 fi if [[ "${cplice}" = ".true." ]]; then From 58fca1668aecd6fb1afd12a441256ad35900e075 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 5 Jul 2024 15:02:23 -0400 Subject: [PATCH 8/8] Update (partially) global-workflow for orion+rocky9 (#2741) This PR: - updates a few submodules (GSI, GSI-utils, GSI-monitor, UFS_utils, GFS-utils) to include recent update to their modulefiles for Orion+Rocky9 upgrade - updates the modulefiles in global-workflow to load modules from Orion+Rocky9 paths - updates modulefiles for `gwsetup` and `gwci` as well. - removes NCL and GEMPAK from Orion. NCL is not used and GEMPAK is not installed. - adds `parm/config.gfs/config.resources.ORION` to address GSI performance degradation after Rocky 9 upgrade. This PR: - does not update the build for UPP. Standalone UPP is not available via ufs-weather-model as of #2729 - will need a follow-up update for `prepobs` and `fit2obs` updated locations when they are installed in `glopara` space on Orion. # Type of change - Maintenance (code refactor, clean-up, new CI test, etc.) # Change characteristics - Is this a breaking change (a change in existing functionality)? NO - Does this change require a documentation update? NO # How has this been tested? This PR is not sufficient for Orion. This PR must be tested on other platforms (Hera, WCOSS2) as this PR updates submodules. # Checklist - [ ] Any dependent changes have been merged and published - [ ] My code follows the style guidelines of this project - [ ] I have performed a self-review of my own code - [ ] I have commented my code, particularly in hard-to-understand areas - [ ] My changes generate no new warnings - [ ] New and existing tests pass with my changes - [ ] I have made corresponding changes to the documentation if necessary --------- Co-authored-by: Kate Friedman --- modulefiles/module_base.orion.lua | 9 ++++----- modulefiles/module_gwci.orion.lua | 6 +++--- modulefiles/module_gwsetup.orion.lua | 6 +++--- parm/config/gfs/config.resources.ORION | 17 +++++++++++++++++ sorc/gfs_utils.fd | 2 +- sorc/gsi_enkf.fd | 2 +- sorc/gsi_monitor.fd | 2 +- sorc/gsi_utils.fd | 2 +- sorc/ufs_utils.fd | 2 +- ush/detect_machine.sh | 4 ++-- versions/build.orion.ver | 6 +++--- versions/run.orion.ver | 12 +++--------- 12 files changed, 40 insertions(+), 30 deletions(-) create mode 100644 parm/config/gfs/config.resources.ORION diff --git a/modulefiles/module_base.orion.lua b/modulefiles/module_base.orion.lua index 4d747512db..74d6f0aee6 100644 --- a/modulefiles/module_base.orion.lua +++ b/modulefiles/module_base.orion.lua @@ -9,8 +9,6 @@ load(pathJoin("stack-intel", (os.getenv("stack_intel_ver") or "None"))) load(pathJoin("stack-intel-oneapi-mpi", (os.getenv("stack_impi_ver") or "None"))) load(pathJoin("python", (os.getenv("python_ver") or "None"))) -load(pathJoin("gempak", (os.getenv("gempak_ver") or "None"))) -load(pathJoin("ncl", (os.getenv("ncl_ver") or "None"))) load(pathJoin("jasper", (os.getenv("jasper_ver") or "None"))) load(pathJoin("libpng", (os.getenv("libpng_ver") or "None"))) load(pathJoin("cdo", (os.getenv("cdo_ver") or "None"))) @@ -44,11 +42,12 @@ setenv("WGRIB2","wgrib2") setenv("WGRIB","wgrib") setenv("UTILROOT",(os.getenv("prod_util_ROOT") or "None")) ---prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")) -prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) +--prepend_path("MODULEPATH", "/work/noaa/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles") +prepend_path("MODULEPATH", pathJoin("/work/noaa/global/kfriedma/glopara/git/prepobs/v" .. (os.getenv("prepobs_run_ver") or "None"), "modulefiles")) load(pathJoin("prepobs", (os.getenv("prepobs_run_ver") or "None"))) -prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles")) +--prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles")) +prepend_path("MODULEPATH", pathJoin("/work/noaa/global/kfriedma/glopara/git/Fit2Obs/v" .. (os.getenv("fit2obs_ver") or "None"), "modulefiles")) load(pathJoin("fit2obs", (os.getenv("fit2obs_ver") or "None"))) whatis("Description: GFS run environment") diff --git a/modulefiles/module_gwci.orion.lua b/modulefiles/module_gwci.orion.lua index cef7acf308..e2213932d7 100644 --- a/modulefiles/module_gwci.orion.lua +++ b/modulefiles/module_gwci.orion.lua @@ -2,10 +2,10 @@ help([[ Load environment to run GFS workflow ci scripts on Orion ]]) -prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/gsi-addon-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/gsi-addon-env-rocky9/install/modulefiles/Core") -load(pathJoin("stack-intel", os.getenv("2022.0.2"))) -load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.5.1"))) +load(pathJoin("stack-intel", os.getenv("2021.9.0"))) +load(pathJoin("stack-intel-oneapi-mpi", os.getenv("2021.9.0"))) load(pathJoin("netcdf-c", os.getenv("4.9.2"))) load(pathJoin("netcdf-fortran", os.getenv("4.6.1"))) diff --git a/modulefiles/module_gwsetup.orion.lua b/modulefiles/module_gwsetup.orion.lua index 37cb511659..b8e2fc8a9f 100644 --- a/modulefiles/module_gwsetup.orion.lua +++ b/modulefiles/module_gwsetup.orion.lua @@ -4,11 +4,11 @@ Load environment to run GFS workflow ci scripts on Orion prepend_path("MODULEPATH", "/apps/modulefiles/core") load(pathJoin("contrib","0.1")) -load(pathJoin("rocoto","1.3.3")) +load(pathJoin("rocoto","1.3.7")) -prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/gsi-addon-env/install/modulefiles/Core") +prepend_path("MODULEPATH", "/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.6.0/envs/gsi-addon-env-rocky9/install/modulefiles/Core") -local stack_intel_ver=os.getenv("stack_intel_ver") or "2022.0.2" +local stack_intel_ver=os.getenv("stack_intel_ver") or "2021.9.0" local python_ver=os.getenv("python_ver") or "3.11.6" load(pathJoin("stack-intel", stack_intel_ver)) diff --git a/parm/config/gfs/config.resources.ORION b/parm/config/gfs/config.resources.ORION new file mode 100644 index 0000000000..e3e81b0182 --- /dev/null +++ b/parm/config/gfs/config.resources.ORION @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +# Orion-specific job resources + +case ${step} in + "anal") + # TODO: + # On Orion, after Rocky 9 upgrade, GSI performance is degraded. + # Remove this block once GSI issue is resolved + # https://github.com/NOAA-EMC/GSI/pull/764 + # https://github.com/JCSDA/spack-stack/issues/1166 + export wtime_anal_gdas="02:40:00" + export wtime_anal_gfs="02:00:00" + ;; + *) + ;; +esac diff --git a/sorc/gfs_utils.fd b/sorc/gfs_utils.fd index 0cdc279526..02ce084c24 160000 --- a/sorc/gfs_utils.fd +++ b/sorc/gfs_utils.fd @@ -1 +1 @@ -Subproject commit 0cdc2795260fc1b59e86a873729433a470794a97 +Subproject commit 02ce084c244823e22661d493a50236b7d5eaf70a diff --git a/sorc/gsi_enkf.fd b/sorc/gsi_enkf.fd index 8e279f9c73..529bb796be 160000 --- a/sorc/gsi_enkf.fd +++ b/sorc/gsi_enkf.fd @@ -1 +1 @@ -Subproject commit 8e279f9c734097f673b07e80f385b2623d13ba4a +Subproject commit 529bb796bea0e490f186729cd168a91c034bb12d diff --git a/sorc/gsi_monitor.fd b/sorc/gsi_monitor.fd index f9d6f5f744..e1f9f21af1 160000 --- a/sorc/gsi_monitor.fd +++ b/sorc/gsi_monitor.fd @@ -1 +1 @@ -Subproject commit f9d6f5f744462a449e70abed8c5860b1c4564ad8 +Subproject commit e1f9f21af16ce912fdc2cd75c5b27094a550a0c5 diff --git a/sorc/gsi_utils.fd b/sorc/gsi_utils.fd index 4332814529..9382fd01c2 160000 --- a/sorc/gsi_utils.fd +++ b/sorc/gsi_utils.fd @@ -1 +1 @@ -Subproject commit 4332814529465ab8eb58e43a38227b952ebfca49 +Subproject commit 9382fd01c2a626c8934c3f553d420a45de2b4dec diff --git a/sorc/ufs_utils.fd b/sorc/ufs_utils.fd index 2794d413d0..3ef2e6bd72 160000 --- a/sorc/ufs_utils.fd +++ b/sorc/ufs_utils.fd @@ -1 +1 @@ -Subproject commit 2794d413d083b43d9ba37a15375d5c61b610d29e +Subproject commit 3ef2e6bd725d2662fd6ee95897cb7bac222e5144 diff --git a/ush/detect_machine.sh b/ush/detect_machine.sh index cfd0fa97e2..b049a6040e 100755 --- a/ush/detect_machine.sh +++ b/ush/detect_machine.sh @@ -45,7 +45,7 @@ case $(hostname -f) in *) MACHINE_ID=UNKNOWN ;; # Unknown platform esac -if [[ ${MACHINE_ID} == "UNKNOWN" ]]; then +if [[ ${MACHINE_ID} == "UNKNOWN" ]]; then case ${PW_CSP:-} in "aws" | "google" | "azure") MACHINE_ID=noaacloud ;; *) PW_CSP="UNKNOWN" @@ -75,7 +75,7 @@ elif [[ -d /scratch1 ]]; then MACHINE_ID=hera elif [[ -d /work ]]; then # We are on MSU Orion or Hercules - mount=$(findmnt -n -o SOURCE /home) + mount=$(findmnt -n -o SOURCE /home) if [[ ${mount} =~ "hercules" ]]; then MACHINE_ID=hercules else diff --git a/versions/build.orion.ver b/versions/build.orion.ver index df7856110d..834ecfc166 100644 --- a/versions/build.orion.ver +++ b/versions/build.orion.ver @@ -1,5 +1,5 @@ -export stack_intel_ver=2022.0.2 -export stack_impi_ver=2021.5.1 -export spack_env=gsi-addon-env +export stack_intel_ver=2021.9.0 +export stack_impi_ver=2021.9.0 +export spack_env=gsi-addon-env-rocky9 source "${HOMEgfs:-}/versions/build.spack.ver" export spack_mod_path="/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" diff --git a/versions/run.orion.ver b/versions/run.orion.ver index 2fdeae8888..112636fb20 100644 --- a/versions/run.orion.ver +++ b/versions/run.orion.ver @@ -1,12 +1,6 @@ -export stack_intel_ver=2022.0.2 -export stack_impi_ver=2021.5.1 -export spack_env=gsi-addon-env - -export ncl_ver=6.6.2 -export gempak_ver=7.5.1 +export stack_intel_ver=2021.9.0 +export stack_impi_ver=2021.9.0 +export spack_env=gsi-addon-env-rocky9 source "${HOMEgfs:-}/versions/run.spack.ver" export spack_mod_path="/work/noaa/epic/role-epic/spack-stack/orion/spack-stack-${spack_stack_ver}/envs/${spack_env}/install/modulefiles/Core" - -#cdo is older on Orion -export cdo_ver=2.0.5