From b405b7d3d11d384ce9fe3b9cd2180f315f7b38f2 Mon Sep 17 00:00:00 2001 From: Dan Holdaway <27729500+danholdaway@users.noreply.github.com> Date: Wed, 8 May 2024 20:52:48 -0400 Subject: [PATCH 1/2] Use JCB for assembling JEDI YAML files for atmospheric GDAS (#2477) Change the JEDI YAML assembly for the atmospheric GDAS to use the JEDI Configuration Builder (JCB) tool so that YAMLs can be made more portable and invoke the observation chronicle mechanism. Resolves #2476 Co-authored-by: danholdaway Co-authored-by: Walter Kolczynski - NOAA --- .gitignore | 7 +++++ .gitmodules | 4 +++ parm/config/gfs/config.atmanl | 10 ++++--- parm/config/gfs/config.atmanlfv3inc | 2 +- parm/config/gfs/config.atmensanl | 5 ++-- sorc/gdas.cd | 2 +- sorc/jcb | 1 + sorc/link_workflow.sh | 17 +++++++---- ush/python/pygfs/task/analysis.py | 38 ++++++++++++++++++++---- ush/python/pygfs/task/atm_analysis.py | 8 +++-- ush/python/pygfs/task/atmens_analysis.py | 3 ++ 11 files changed, 76 insertions(+), 21 deletions(-) create mode 160000 sorc/jcb diff --git a/.gitignore b/.gitignore index 943ad64e1a..04193fca0a 100644 --- a/.gitignore +++ b/.gitignore @@ -48,6 +48,8 @@ parm/gdas/io parm/gdas/ioda parm/gdas/snow parm/gdas/soca +parm/gdas/jcb-gdas +parm/gdas/jcb-algorithms parm/monitor parm/post/AEROSOL_LUTS.dat parm/post/nam_micro_lookup.dat @@ -195,3 +197,8 @@ versions/run.ver ush/python/wxflow workflow/wxflow ci/scripts/wxflow + +# jcb checkout and symlinks +ush/python/jcb +workflow/jcb +ci/scripts/jcb diff --git a/.gitmodules b/.gitmodules index 4851e232ee..ea1b5c06af 100644 --- a/.gitmodules +++ b/.gitmodules @@ -30,3 +30,7 @@ path = sorc/upp.fd url = https://github.com/NOAA-EMC/UPP.git ignore = dirty +[submodule "sorc/jcb"] + path = sorc/jcb + url = https://github.com/noaa-emc/jcb + fetchRecurseSubmodules = false diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl index 5eb692b473..dd8ca80b11 100644 --- a/parm/config/gfs/config.atmanl +++ b/parm/config/gfs/config.atmanl @@ -5,18 +5,20 @@ echo "BEGIN: config.atmanl" -export OBS_LIST="${PARMgfs}/gdas/atm/obs/lists/gdas_prototype_3d.yaml.j2" -export JEDIYAML="${PARMgfs}/gdas/atm/variational/3dvar_drpcg.yaml.j2" +export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" +export JCB_ALGO_YAML="${PARMgfs}/gdas/atm/jcb-prototype_3dvar.yaml.j2" + export STATICB_TYPE="gsibec" +export LOCALIZATION_TYPE="bump" export INTERP_METHOD='barycentric' if [[ ${DOHYBVAR} = "YES" ]]; then # shellcheck disable=SC2153 export CASE_ANL=${CASE_ENS} - export BERROR_YAML="${PARMgfs}/gdas/atm/berror/hybvar_${STATICB_TYPE}.yaml.j2" + export BERROR_YAML="background_error_hybrid_${STATICB_TYPE}_${LOCALIZATION_TYPE}" else export CASE_ANL=${CASE} - export BERROR_YAML="${PARMgfs}/gdas/atm/berror/staticb_${STATICB_TYPE}.yaml.j2" + export BERROR_YAML="background_error_static_${STATICB_TYPE}" fi export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" diff --git a/parm/config/gfs/config.atmanlfv3inc b/parm/config/gfs/config.atmanlfv3inc index 14c11d3dd3..ab7efa3a60 100644 --- a/parm/config/gfs/config.atmanlfv3inc +++ b/parm/config/gfs/config.atmanlfv3inc @@ -8,7 +8,7 @@ echo "BEGIN: config.atmanlfv3inc" # Get task specific resources . "${EXPDIR}/config.resources" atmanlfv3inc -export JEDIYAML=${PARMgfs}/gdas/atm/utils/fv3jedi_fv3inc_variational.yaml.j2 +export JCB_ALGO=fv3jedi_fv3inc_variational export JEDIEXE=${EXECgfs}/fv3jedi_fv3inc.x echo "END: config.atmanlfv3inc" diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl index 23eab7f7b9..3484cb670d 100644 --- a/parm/config/gfs/config.atmensanl +++ b/parm/config/gfs/config.atmensanl @@ -5,8 +5,9 @@ echo "BEGIN: config.atmensanl" -export OBS_LIST="${PARMgfs}/gdas/atm/obs/lists/lgetkf_prototype.yaml.j2" -export JEDIYAML="${PARMgfs}/gdas/atm/lgetkf/lgetkf.yaml.j2" +export JCB_BASE_YAML="${PARMgfs}/gdas/atm/jcb-base.yaml.j2" +export JCB_ALGO_YAML="${PARMgfs}/gdas/atm/jcb-prototype_lgetkf.yaml.j2" + export INTERP_METHOD='barycentric' export CRTM_FIX_YAML="${PARMgfs}/gdas/atm_crtm_coeff.yaml.j2" diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 70f1319139..2b2d417a96 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 70f13191391d0909e92da47dc7d17ddf1dc4c6c6 +Subproject commit 2b2d417a96528527d7d3e7eedaccf150dc075d92 diff --git a/sorc/jcb b/sorc/jcb new file mode 160000 index 0000000000..de75655d81 --- /dev/null +++ b/sorc/jcb @@ -0,0 +1 @@ +Subproject commit de75655d81ec2ee668d8d47bf4a43625c81dde7c diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 0041ce083b..c5d7243e8f 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -88,11 +88,17 @@ if [[ "${LINK_NEST:-OFF}" == "ON" ]] ; then source "${HOMEgfs}/versions/fix.nest.ver" fi -# Link wxflow in ush/python, workflow and ci/scripts +# Link python pacakges in ush/python +# TODO: This will be unnecessary when these are part of the virtualenv +packages=("wxflow" "jcb") +for package in "${packages[@]}"; do + cd "${HOMEgfs}/ush/python" || exit 1 + [[ -s "${package}" ]] && rm -f "${package}" + ${LINK} "${HOMEgfs}/sorc/${package}/src/${package}" . +done + +# Link wxflow in workflow and ci/scripts # TODO: This will be unnecessary when wxflow is part of the virtualenv -cd "${HOMEgfs}/ush/python" || exit 1 -[[ -s "wxflow" ]] && rm -f wxflow -${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . cd "${HOMEgfs}/workflow" || exit 1 [[ -s "wxflow" ]] && rm -f wxflow ${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . @@ -100,7 +106,6 @@ cd "${HOMEgfs}/ci/scripts" || exit 1 [[ -s "wxflow" ]] && rm -f wxflow ${LINK} "${HOMEgfs}/sorc/wxflow/src/wxflow" . - # Link fix directories if [[ -n "${FIX_DIR}" ]]; then if [[ ! -d "${HOMEgfs}/fix" ]]; then mkdir "${HOMEgfs}/fix" || exit 1; fi @@ -228,7 +233,7 @@ fi #------------------------------ if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then cd "${HOMEgfs}/parm/gdas" || exit 1 - declare -a gdasapp_comps=("aero" "atm" "io" "ioda" "snow" "soca") + declare -a gdasapp_comps=("aero" "atm" "io" "ioda" "snow" "soca" "jcb-gdas" "jcb-algorithms") for comp in "${gdasapp_comps[@]}"; do [[ -d "${comp}" ]] && rm -rf "${comp}" ${LINK_OR_COPY} "${HOMEgfs}/sorc/gdas.cd/parm/${comp}" . diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 5a516a02c8..5464c25370 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -6,8 +6,9 @@ from logging import getLogger from pprint import pformat from netCDF4 import Dataset -from typing import List, Dict, Any, Union +from typing import List, Dict, Any, Union, Optional +from jcb import render from wxflow import (parse_j2yaml, FileHandler, rm_p, logit, Task, Executable, WorkflowException, to_fv3time, to_YMD, Template, TemplateConstants) @@ -46,11 +47,14 @@ def initialize(self) -> None: self.link_jediexe() @logit(logger) - def get_jedi_config(self) -> Dict[str, Any]: + def get_jedi_config(self, algorithm: Optional[str] = None) -> Dict[str, Any]: """Compile a dictionary of JEDI configuration from JEDIYAML template file Parameters ---------- + algorithm (optional) : str + Name of the algorithm to use in the JEDI configuration. Will override the algorithm + set in the self.config.JCB_<>_YAML file Returns ---------- @@ -60,7 +64,31 @@ def get_jedi_config(self) -> Dict[str, Any]: # generate JEDI YAML file logger.info(f"Generate JEDI YAML config: {self.task_config.jedi_yaml}") - jedi_config = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, searchpath=self.gdasapp_j2tmpl_dir) + + if 'JCB_BASE_YAML' in self.task_config.keys(): + # Step 1: fill templates of the jcb base YAML file + jcb_config = parse_j2yaml(self.task_config.JCB_BASE_YAML, self.task_config) + + # Step 2: (optional) fill templates of algorithm override YAML and merge + if 'JCB_ALGO_YAML' in self.task_config.keys(): + jcb_algo_config = parse_j2yaml(self.task_config.JCB_ALGO_YAML, self.task_config) + jcb_config = {**jcb_config, **jcb_algo_config} + + # If algorithm is present override the algorithm in the JEDI config + if algorithm: + jcb_config['algorithm'] = algorithm + + # Step 3: generate the JEDI Yaml using JCB driving YAML + jedi_config = render(jcb_config) + elif 'JEDIYAML' in self.task_config.keys(): + # Generate JEDI YAML file (without using JCB) + logger.info(f"Generate JEDI YAML config: {self.task_config.jedi_yaml}") + jedi_config = parse_j2yaml(self.task_config.JEDIYAML, self.task_config, + searchpath=self.gdasapp_j2tmpl_dir) + logger.debug(f"JEDI config:\n{pformat(jedi_config)}") + else: + raise KeyError(f"Task config must contain JCB_BASE_YAML or JEDIYAML") + logger.debug(f"JEDI config:\n{pformat(jedi_config)}") return jedi_config @@ -82,7 +110,7 @@ def get_obs_dict(self) -> Dict[str, Any]: a dictionary containing the list of observation files to copy for FileHandler """ - logger.info(f"Extracting a list of observation files from {self.task_config.JEDIYAML}") + logger.info(f"Extracting a list of observation files from Jedi config file") observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations') logger.debug(f"observations:\n{pformat(observations)}") @@ -116,7 +144,7 @@ def get_bias_dict(self) -> Dict[str, Any]: a dictionary containing the list of observation bias files to copy for FileHandler """ - logger.info(f"Extracting a list of bias correction files from {self.task_config.JEDIYAML}") + logger.info(f"Extracting a list of bias correction files from Jedi config file") observations = find_value_in_nested_dict(self.task_config.jedi_config, 'observations') logger.debug(f"observations:\n{pformat(observations)}") diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 47d291268e..95545c57a4 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -49,6 +49,9 @@ def __init__(self, config): 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", 'jedi_yaml': _jedi_yaml, + 'atm_obsdatain_path': f"{self.runtime_config.DATA}/obs/", + 'atm_obsdataout_path': f"{self.runtime_config.DATA}/diags/", + 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications } ) @@ -137,8 +140,9 @@ def variational(self: Analysis) -> None: @logit(logger) def init_fv3_increment(self: Analysis) -> None: # Setup JEDI YAML file - self.task_config.jedi_yaml = os.path.join(self.runtime_config.DATA, os.path.basename(self.task_config.JEDIYAML)) - save_as_yaml(self.get_jedi_config(), self.task_config.jedi_yaml) + self.task_config.jedi_yaml = os.path.join(self.runtime_config.DATA, + f"{self.task_config.JCB_ALGO}.yaml") + save_as_yaml(self.get_jedi_config(self.task_config.JCB_ALGO), self.task_config.jedi_yaml) # Link JEDI executable to run directory self.task_config.jedi_exe = self.link_jediexe() diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index a1aecfe07c..5aaacc42e8 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -46,6 +46,9 @@ def __init__(self, config): 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", 'jedi_yaml': _jedi_yaml, + 'atm_obsdatain_path': f"./obs/", + 'atm_obsdataout_path': f"./diags/", + 'BKG_TSTEP': "PT1H" # Placeholder for 4D applications } ) From c7b3973014480a20dd8e24edaeb83a9e9e68159f Mon Sep 17 00:00:00 2001 From: Jessica Meixner Date: Thu, 9 May 2024 11:36:58 -0400 Subject: [PATCH 2/2] Updates for cold start half cycle, then continuing with IAU for WCDA (#2560) This PR allows us to run C384 S2S with IAU, but starting with the first half-cycle as a cold-start. This will be necessary for cycled testing as we build towards the full system for GFSv17. This updates the copying of the restarts for RUN=gdas for both ocean and ice copying what the atm model is doing. It also reduced the amount of restart files from 4 to 3. Other updates: * Add DOJEDI ocean triggers for archiving certain files update from: @CatherineThomas-NOAA * Adds COPY_FINAL_RESTARTS option to turn on/off copying the last restart file to COM. Defaults to off... * Defines model_start_date_current_cycle & model_start_date_next_cycle to help with knowing which IC to grab. Refs #2546 Co-authored-by: Rahul Mahajan --- jobs/JGLOBAL_FORECAST | 2 +- parm/config/gefs/config.fcst | 1 + parm/config/gfs/config.fcst | 1 + scripts/exglobal_archive.sh | 5 +- ush/forecast_det.sh | 13 +--- ush/forecast_postdet.sh | 142 ++++++++++++++--------------------- ush/forecast_predet.sh | 9 ++- ush/hpssarch_gen.sh | 15 ++-- 8 files changed, 85 insertions(+), 103 deletions(-) diff --git a/jobs/JGLOBAL_FORECAST b/jobs/JGLOBAL_FORECAST index 6c4200dd6e..8d91be8a57 100755 --- a/jobs/JGLOBAL_FORECAST +++ b/jobs/JGLOBAL_FORECAST @@ -99,6 +99,6 @@ fi # Remove the Temporary working directory ########################################## cd "${DATAROOT}" || true -[[ "${KEEPDATA}" == "NO" ]] && rm -rf "${DATA} ${DATArestart}" # do not remove DATAjob. It contains DATAoutput +[[ "${KEEPDATA}" == "NO" ]] && rm -rf "${DATA}" "${DATArestart}" # do not remove DATAjob. It contains DATAoutput exit 0 diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index 5c592556c8..9e5904d689 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -6,6 +6,7 @@ echo "BEGIN: config.fcst" export USE_ESMF_THREADING="YES" # Toggle to use ESMF-managed threading or traditional threading in UFSWM +export COPY_FINAL_RESTARTS="NO" # Toggle to copy restarts from the end of GFS/GEFS Run (GDAS is handled seperately) # Turn off waves if not used for this CDUMP case ${WAVE_CDUMP} in diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index 63273e0fe4..81fda1942a 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -6,6 +6,7 @@ echo "BEGIN: config.fcst" export USE_ESMF_THREADING="YES" # Toggle to use ESMF-managed threading or traditional threading in UFSWM +export COPY_FINAL_RESTARTS="NO" # Toggle to copy restarts from the end of GFS/GEFS Run (GDAS is handled seperately) # Turn off waves if not used for this CDUMP case ${WAVE_CDUMP} in diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh index 5842c76b57..acb926d0e6 100755 --- a/scripts/exglobal_archive.sh +++ b/scripts/exglobal_archive.sh @@ -237,7 +237,10 @@ if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then #gdasocean if [ "${DO_OCN}" = "YES" ]; then - targrp_list="${targrp_list} gdasocean gdasocean_analysis" + targrp_list="${targrp_list} gdasocean" + if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + targrp_list="${targrp_list} gdasocean_analysis" + fi fi #gdasice diff --git a/ush/forecast_det.sh b/ush/forecast_det.sh index de2a47c921..e4b9ded3d3 100755 --- a/ush/forecast_det.sh +++ b/ush/forecast_det.sh @@ -6,15 +6,9 @@ UFS_det(){ echo "SUB ${FUNCNAME[0]}: Run type determination for UFS" # Determine if the current cycle is a warm start (based on the availability of restarts) - if [[ "${DOIAU:-}" == "YES" ]]; then - if [[ -f "${COM_ATMOS_RESTART_PREV}/${current_cycle_begin:0:8}.${current_cycle_begin:8:2}0000.coupler.res" ]]; then - warm_start=".true." - fi - else - if [[ -f "${COM_ATMOS_RESTART_PREV}/${current_cycle:0:8}.${current_cycle:8:2}0000.coupler.res" ]]; then - warm_start=".true." - fi - fi + if [[ -f "${COM_ATMOS_RESTART_PREV}/${model_start_date_current_cycle:0:8}.${model_start_date_current_cycle:8:2}0000.coupler.res" ]]; then + warm_start=".true." + fi # If restarts were not available, this is likely a cold start if [[ "${warm_start}" == ".false." ]]; then @@ -30,6 +24,7 @@ UFS_det(){ # Since warm start is false, we cannot do IAU DOIAU="NO" IAU_OFFSET=0 + model_start_date_current_cycle=${current_cycle} # It is still possible that a restart is available from a previous forecast attempt # So we have to continue checking for restarts diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 8ea556055d..9c8858ec3d 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -48,11 +48,7 @@ FV3_postdet() { restart_date="${RERUN_DATE}" restart_dir="${DATArestart}/FV3_RESTART" else # "${RERUN}" == "NO" - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" restart_dir="${COM_ATMOS_RESTART_PREV}" fi @@ -92,11 +88,10 @@ FV3_postdet() { # Need a coupler.res that is consistent with the model start time if [[ "${DOIAU}" == "YES" ]]; then local model_start_time="${previous_cycle}" - local model_current_time="${current_cycle_begin}" else local model_start_time="${current_cycle}" - local model_current_time="${current_cycle}" fi + local model_current_time="${model_start_date_current_cycle}" rm -f "${DATA}/INPUT/coupler.res" cat >> "${DATA}/INPUT/coupler.res" << EOF 3 (Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4) @@ -258,13 +253,15 @@ FV3_out() { # Copy the final restart files at the end of the forecast segment # The final restart written at the end of the forecast does not include the valid date # TODO: verify the above statement since RM found that it did! - echo "Copying FV3 restarts for 'RUN=${RUN}' at the end of the forecast segment: ${forecast_end_cycle}" - for fv3_restart_file in "${fv3_restart_files[@]}"; do - restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${fv3_restart_file}" - ${NCP} "${DATArestart}/FV3_RESTART/${restart_file}" \ - "${COM_ATMOS_RESTART}/${restart_file}" - done - + # TODO: For other components, this is only for gfs/gefs - check to see if this should also have this + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + echo "Copying FV3 restarts for 'RUN=${RUN}' at the end of the forecast segment: ${forecast_end_cycle}" + for fv3_restart_file in "${fv3_restart_files[@]}"; do + restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${fv3_restart_file}" + ${NCP} "${DATArestart}/FV3_RESTART/${restart_file}" \ + "${COM_ATMOS_RESTART}/${restart_file}" + done + fi echo "SUB ${FUNCNAME[0]}: Output data for FV3 copied" } @@ -281,11 +278,7 @@ WW3_postdet() { restart_date="${RERUN_DATE}" restart_dir="${DATArestart}/WW3_RESTART" else - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" restart_dir="${COM_WAVE_RESTART_PREV}" fi echo "Copying WW3 restarts for 'RUN=${RUN}' at '${restart_date}' from '${restart_dir}'" @@ -384,11 +377,7 @@ MOM6_postdet() { restart_date="${RERUN_DATE}" else # "${RERUN}" == "NO" restart_dir="${COM_OCEAN_RESTART_PREV}" - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" fi # Copy MOM6 ICs @@ -489,11 +478,11 @@ MOM6_out() { # Coarser than 1/2 degree has a single MOM restart local mom6_restart_files mom6_restart_file restart_file mom6_restart_files=(MOM.res.nc) - # 1/4 degree resolution has 4 additional restarts + # 1/4 degree resolution has 3 additional restarts case "${OCNRES}" in "025") local nn - for (( nn = 1; nn <= 4; nn++ )); do + for (( nn = 1; nn <= 3; nn++ )); do mom6_restart_files+=("MOM.res_${nn}.nc") done ;; @@ -501,24 +490,22 @@ MOM6_out() { esac # Copy MOM6 restarts at the end of the forecast segment to COM for RUN=gfs|gefs - local restart_file - if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then - echo "Copying MOM6 restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" - for mom6_restart_file in "${mom6_restart_files[@]}"; do - restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${mom6_restart_file}" - ${NCP} "${DATArestart}/MOM6_RESTART/${restart_file}" \ - "${COM_OCEAN_RESTART}/${restart_file}" - done + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + local restart_file + if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then + echo "Copying MOM6 restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" + for mom6_restart_file in "${mom6_restart_files[@]}"; do + restart_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.${mom6_restart_file}" + ${NCP} "${DATArestart}/MOM6_RESTART/${restart_file}" \ + "${COM_OCEAN_RESTART}/${restart_file}" + done + fi fi - # Copy restarts at the beginning/middle of the next assimilation cycle to COM for RUN=gdas|enkfgdas|enkfgfs + # Copy restarts for the next cycle for RUN=gdas|enkfgdas|enkfgfs if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then local restart_date - if [[ "${DOIAU}" == "YES" ]]; then # Copy restarts at the beginning of the next cycle from DATA to COM - restart_date="${next_cycle_begin}" - else # Copy restarts at the middle of the next cycle from DATA to COM - restart_date="${next_cycle}" - fi + restart_date="${model_start_date_next_cycle}" echo "Copying MOM6 restarts for 'RUN=${RUN}' at ${restart_date}" for mom6_restart_file in "${mom6_restart_files[@]}"; do restart_file="${restart_date:0:8}.${restart_date:8:2}0000.${mom6_restart_file}" @@ -526,7 +513,6 @@ MOM6_out() { "${COM_OCEAN_RESTART}/${restart_file}" done fi - } CICE_postdet() { @@ -539,11 +525,7 @@ CICE_postdet() { seconds=$(to_seconds "${restart_date:8:2}0000") # convert HHMMSS to seconds cice_restart_file="${DATArestart}/CICE_RESTART/cice_model.res.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" else # "${RERUN}" == "NO" - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" cice_restart_file="${COM_ICE_RESTART_PREV}/${restart_date:0:8}.${restart_date:8:2}0000.cice_model.res.nc" fi @@ -554,8 +536,8 @@ CICE_postdet() { # Link iceh_ic file to COM. This is the initial condition file from CICE (f000) # TODO: Is this file needed in COM? Is this going to be used for generating any products? local vdate seconds vdatestr fhr fhr3 interval last_fhr - seconds=$(to_seconds "${current_cycle:8:2}0000") # convert HHMMSS to seconds - vdatestr="${current_cycle:0:4}-${current_cycle:4:2}-${current_cycle:6:2}-${seconds}" + seconds=$(to_seconds "${model_start_date_current_cycle:8:2}0000") # convert HHMMSS to seconds + vdatestr="${model_start_date_current_cycle:0:4}-${model_start_date_current_cycle:4:2}-${model_start_date_current_cycle:6:2}-${seconds}" ${NLN} "${COM_ICE_HISTORY}/${RUN}.ice.t${cyc}z.ic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${vdatestr}.nc" # Link CICE forecast output files from DATA/CICE_OUTPUT to COM @@ -601,24 +583,22 @@ CICE_out() { ${NCP} "${DATA}/ice_in" "${COM_CONF}/ufs.ice_in" # Copy CICE restarts at the end of the forecast segment to COM for RUN=gfs|gefs - local seconds source_file target_file - if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then - echo "Copying CICE restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" - seconds=$(to_seconds "${forecast_end_cycle:8:2}0000") # convert HHMMSS to seconds - source_file="cice_model.res.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc" - target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.cice_model.res.nc" - ${NCP} "${DATArestart}/CICE_RESTART/${source_file}" \ - "${COM_ICE_RESTART}/${target_file}" + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + local seconds source_file target_file + if [[ "${RUN}" == "gfs" || "${RUN}" == "gefs" ]]; then + echo "Copying CICE restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" + seconds=$(to_seconds "${forecast_end_cycle:8:2}0000") # convert HHMMSS to seconds + source_file="cice_model.res.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc" + target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.cice_model.res.nc" + ${NCP} "${DATArestart}/CICE_RESTART/${source_file}" \ + "${COM_ICE_RESTART}/${target_file}" + fi fi - # Copy restarts at the beginning/middle of the next assimilation cycle to COM for RUN=gdas|enkfgdas|enkfgfs + # Copy restarts for next cycle for RUN=gdas|enkfgdas|enkfgfs if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then local restart_date - if [[ "${DOIAU}" == "YES" ]]; then # Copy restarts at the beginning of the next cycle from DATA to COM - restart_date="${next_cycle_begin}" - else # Copy restarts at the middle of the next cycle from DATA to COM - restart_date="${next_cycle}" - fi + restart_date="${model_start_date_next_cycle}" echo "Copying CICE restarts for 'RUN=${RUN}' at ${restart_date}" seconds=$(to_seconds "${restart_date:8:2}0000") # convert HHMMSS to seconds source_file="cice_model.res.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" @@ -706,11 +686,7 @@ CMEPS_postdet() { seconds=$(to_seconds "${restart_date:8:2}0000") # convert HHMMSS to seconds cmeps_restart_file="${DATArestart}/CMEPS_RESTART/ufs.cpld.cpl.r.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" else # "${RERUN}" == "NO" - if [[ "${DOIAU}" == "YES" ]]; then - restart_date="${current_cycle_begin}" - else - restart_date="${current_cycle}" - fi + restart_date="${model_start_date_current_cycle}" cmeps_restart_file="${COM_MED_RESTART_PREV}/${restart_date:0:8}.${restart_date:8:2}0000.ufs.cpld.cpl.r.nc" fi @@ -740,26 +716,24 @@ CMEPS_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for CMEPS mediator" # Copy mediator restarts at the end of the forecast segment to COM for RUN=gfs|gefs - echo "Copying mediator restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" - local seconds source_file target_file - seconds=$(to_seconds "${forecast_end_cycle:8:2}"0000) - source_file="ufs.cpld.cpl.r.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc" - target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.ufs.cpld.cpl.r.nc" - if [[ -f "${DATArestart}/CMEPS_RESTART/${source_file}" ]]; then - ${NCP} "${DATArestart}/CMEPS_RESTART/${source_file}" \ - "${COM_MED_RESTART}/${target_file}" - else - echo "Mediator restart '${DATArestart}/CMEPS_RESTART/${source_file}' not found." - fi + if [[ "${COPY_FINAL_RESTARTS}" == "YES" ]]; then + echo "Copying mediator restarts for 'RUN=${RUN}' at ${forecast_end_cycle}" + local seconds source_file target_file + seconds=$(to_seconds "${forecast_end_cycle:8:2}"0000) + source_file="ufs.cpld.cpl.r.${forecast_end_cycle:0:4}-${forecast_end_cycle:4:2}-${forecast_end_cycle:6:2}-${seconds}.nc" + target_file="${forecast_end_cycle:0:8}.${forecast_end_cycle:8:2}0000.ufs.cpld.cpl.r.nc" + if [[ -f "${DATArestart}/CMEPS_RESTART/${source_file}" ]]; then + ${NCP} "${DATArestart}/CMEPS_RESTART/${source_file}" \ + "${COM_MED_RESTART}/${target_file}" + else + echo "Mediator restart '${DATArestart}/CMEPS_RESTART/${source_file}' not found." + fi + fi - # Copy restarts at the beginning/middle of the next assimilation cycle to COM for RUN=gdas|enkfgdas|enkfgfs + # Copy restarts for the next cycle to COM for RUN=gdas|enkfgdas|enkfgfs if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then local restart_date - if [[ "${DOIAU}" == "YES" ]]; then # Copy restarts at the beginning of the next cycle from DATA to COM - restart_date="${next_cycle_begin}" - else # Copy restarts at the middle of the next cycle from DATA to COM - restart_date="${next_cycle}" - fi + restart_date="${model_start_date_next_cycle}" echo "Copying mediator restarts for 'RUN=${RUN}' at ${restart_date}" seconds=$(to_seconds "${restart_date:8:2}"0000) source_file="ufs.cpld.cpl.r.${restart_date:0:4}-${restart_date:4:2}-${restart_date:6:2}-${seconds}.nc" diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index c300067ce9..de414437b1 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -54,7 +54,14 @@ common_predet(){ current_cycle_begin=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} - ${half_window} hours" +%Y%m%d%H) current_cycle_end=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${half_window} hours" +%Y%m%d%H) next_cycle_begin=$(date --utc -d "${next_cycle:0:8} ${next_cycle:8:2} - ${half_window} hours" +%Y%m%d%H) - next_cycle_end=$(date --utc -d "${next_cycle:0:8} ${next_cycle:8:2} + ${half_window} hours" +%Y%m%d%H) + #Define model start date for current_cycle and next_cycle as the time the forecast will start + if [[ "${DOIAU:-}" == "YES" ]]; then + model_start_date_current_cycle="${current_cycle_begin}" + model_start_date_next_cycle="${next_cycle_begin}" + else + model_start_date_current_cycle=${current_cycle} + model_start_date_next_cycle=${next_cycle} + fi forecast_end_cycle=$(date --utc -d "${current_cycle:0:8} ${current_cycle:8:2} + ${FHMAX} hours" +%Y%m%d%H) FHMIN=${FHMIN:-0} diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index 1b4329c58f..101745da8e 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -560,13 +560,14 @@ if [[ ${type} == "gdas" ]]; then echo "${COM_MED_RESTART/${ROTDIR}\//}/*" } >> "${DATA}/gdasocean_restart.txt" - { - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/${head}*" - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/gdas.t??z.ocngrid.nc" - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/diags" - echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/yaml" - } >> "${DATA}/gdasocean_analysis.txt" - + if [[ ${DO_JEDIOCNVAR} = "YES" ]]; then + { + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/${head}*" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/gdas.t??z.ocngrid.nc" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/diags" + echo "${COM_OCEAN_ANALYSIS/${ROTDIR}\//}/yaml" + } >> "${DATA}/gdasocean_analysis.txt" + fi fi if [[ ${DO_ICE} = "YES" ]]; then