From 848659691fdbf47e7ccdbbb2ebf22a6e470633a2 Mon Sep 17 00:00:00 2001 From: Guillaume Vernieres Date: Wed, 24 Jul 2024 15:00:35 -0400 Subject: [PATCH 1/3] Refactoring of the marine B-matrix job (#2749) Refactor the functionality of B-matrix generation from the GDASApp Resolves #2743 --- ci/cases/gfsv17/ocnanal.yaml | 9 +- env/HERA.env | 12 +- env/HERCULES.env | 22 +- env/JET.env | 8 +- env/ORION.env | 8 +- env/S4.env | 6 +- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT | 48 --- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY | 47 --- jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP | 4 + jobs/JGLOBAL_ARCHIVE | 2 + jobs/JGLOBAL_MARINE_BMAT | 66 ++++ jobs/rocoto/{ocnanalbmat.sh => marinebmat.sh} | 5 +- parm/archive/gdas.yaml.j2 | 2 +- parm/archive/gdasocean_analysis.yaml.j2 | 27 +- parm/config/gfs/config.com | 2 + parm/config/gfs/config.marinebmat | 11 + parm/config/gfs/config.ocnanal | 16 +- parm/config/gfs/config.ocnanalbmat | 11 - parm/config/gfs/config.resources | 12 +- parm/config/gfs/yaml/defaults.yaml | 12 +- scripts/exglobal_marinebmat.py | 24 ++ scripts/exglobal_prep_snow_obs.py | 2 +- sorc/gdas.cd | 2 +- sorc/link_workflow.sh | 7 +- ush/python/pygfs/__init__.py | 2 + ush/python/pygfs/task/marine_bmat.py | 350 ++++++++++++++++++ ush/python/pygfs/task/marine_letkf.py | 2 +- ush/python/pygfs/utils/__init__.py | 0 ush/python/pygfs/utils/marine_da_utils.py | 99 +++++ versions/fix.ver | 1 + workflow/applications/gfs_cycled.py | 4 +- workflow/rocoto/gfs_tasks.py | 35 +- workflow/rocoto/tasks.py | 2 +- 33 files changed, 655 insertions(+), 205 deletions(-) delete mode 100755 jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT delete mode 100755 jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY create mode 100755 jobs/JGLOBAL_MARINE_BMAT rename jobs/rocoto/{ocnanalbmat.sh => marinebmat.sh} (79%) create mode 100644 parm/config/gfs/config.marinebmat delete mode 100644 parm/config/gfs/config.ocnanalbmat create mode 100755 scripts/exglobal_marinebmat.py create mode 100644 ush/python/pygfs/task/marine_bmat.py create mode 100644 ush/python/pygfs/utils/__init__.py create mode 100644 ush/python/pygfs/utils/marine_da_utils.py diff --git a/ci/cases/gfsv17/ocnanal.yaml b/ci/cases/gfsv17/ocnanal.yaml index 9024afcb31..a2d7363c18 100644 --- a/ci/cases/gfsv17/ocnanal.yaml +++ b/ci/cases/gfsv17/ocnanal.yaml @@ -14,17 +14,14 @@ base: DO_VERFRAD: "YES" DO_VRFY_OCEANDA: "NO" FHMAX_GFS: 240 + ACCOUNT: {{ 'HPC_ACCOUNT' | getenv }} ocnanal: - SOCA_INPUT_FIX_DIR: /scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/1440x1080x75/soca - CASE_ANL: 'C24' + SOCA_INPUT_FIX_DIR: {{ FIXgfs }}/gdas/soca/1440x1080x75/soca SOCA_OBS_LIST: {{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml SOCA_NINNER: 100 - SABER_BLOCKS_YAML: '' - NICAS_RESOL: 1 - NICAS_GRID_SIZE: 15000 prepoceanobs: - SOCA_OBS_LIST: {{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml + SOCA_OBS_LIST: {{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml OBSPREP_YAML: {{ HOMEgfs }}/sorc/gdas.cd/parm/soca/obsprep/obsprep_config.yaml DMPDIR: /scratch1/NCEPDEV/da/common/ diff --git a/env/HERA.env b/env/HERA.env index b743a19a62..98ac04ced3 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -77,7 +77,7 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${nth_atmensanlfv3inc:-${nth_max}} [[ ${NTHREADS_ATMENSANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMENSANLFV3INC=${nth_max} - export APRUN_ATMENSANLFV3INC="${launcher} -n ${npe_atmensanlfv3inc} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" + export APRUN_ATMENSANLFV3INC="${launcher} -n ${npe_atmensanlfv3inc} --cpus-per-task=${NTHREADS_ATMENSANLFV3INC}" elif [[ "${step}" = "aeroanlrun" ]]; then @@ -114,11 +114,17 @@ elif [[ "${step}" = "snowanl" ]]; then export APRUN_APPLY_INCR="${launcher} -n 6" -elif [[ "${step}" = "ocnanalbmat" ]]; then +elif [[ "${step}" = "marinebmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd --multi-prog" - export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat}" + export APRUN_MARINEBMAT="${launcher} -n ${npe_marinebmat}" + +elif [[ "${step}" = "marinebmat" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + + export APRUN_OCNANAL="${launcher} -n ${npe_marinebmat}" elif [[ "${step}" = "ocnanalrun" ]]; then diff --git a/env/HERCULES.env b/env/HERCULES.env index 79424f8639..f94bae73cc 100755 --- a/env/HERCULES.env +++ b/env/HERCULES.env @@ -112,25 +112,15 @@ case ${step} in export APRUN_APPLY_INCR="${launcher} -n 6" ;; - "ocnanalbmat") + "marinebmat") - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_ocnanalbmat)) - - export NTHREADS_OCNANAL=${nth_ocnanalbmat:-${nth_max}} - [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} - export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat} --cpus-per-task=${NTHREADS_OCNANAL}" + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + export APRUN_MARINEBMAT="${launcher} -n ${npe_marinebmat}" ;; "ocnanalrun") - - export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - - nth_max=$((npe_node_max / npe_node_ocnanalrun)) - - export NTHREADS_OCNANAL=${nth_ocnanalrun:-${nth_max}} - [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} - export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun} --cpus-per-task=${NTHREADS_OCNANAL}" + + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun}" ;; "ocnanalecen") diff --git a/env/JET.env b/env/JET.env index bb9826f331..956762a921 100755 --- a/env/JET.env +++ b/env/JET.env @@ -102,15 +102,15 @@ elif [[ "${step}" = "atmanlfv3inc" ]]; then [[ ${NTHREADS_ATMANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMANLFV3INC=${nth_max} export APRUN_ATMANLFV3INC="${launcher} -n ${npe_atmanlfv3inc}" -elif [[ "${step}" = "ocnanalbmat" ]]; then +elif [[ "${step}" = "marinebmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - nth_max=$((npe_node_max / npe_node_ocnanalbmat)) + nth_max=$((npe_node_max / npe_node_marinebmat)) - export NTHREADS_OCNANAL=${nth_ocnanalbmat:-${nth_max}} + export NTHREADS_OCNANAL=${nth_marinebmat:-${nth_max}} [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} - export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat}" + export APRUN_OCNANAL="${launcher} -n ${npe_marinebmat}" elif [[ "${step}" = "ocnanalrun" ]]; then diff --git a/env/ORION.env b/env/ORION.env index c203acae48..71ba7bbda3 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -110,15 +110,15 @@ elif [[ "${step}" = "atmanlfv3inc" ]]; then [[ ${NTHREADS_ATMANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMANLFV3INC=${nth_max} export APRUN_ATMANLFV3INC="${launcher} -n ${npe_atmanlfv3inc} --cpus-per-task=${NTHREADS_ATMANLFV3INC}" -elif [[ "${step}" = "ocnanalbmat" ]]; then +elif [[ "${step}" = "marinebmat" ]]; then export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - nth_max=$((npe_node_max / npe_node_ocnanalbmat)) + nth_max=$((npe_node_max / npe_node_marinebmat)) - export NTHREADS_OCNANAL=${nth_ocnanalbmat:-${nth_max}} + export NTHREADS_OCNANAL=${nth_marinebmat:-${nth_max}} [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} - export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat} --cpus-per-task=${NTHREADS_OCNANAL}" + export APRUN_OCNANAL="${launcher} -n ${npe_marinebmat} --cpus-per-task=${NTHREADS_OCNANAL}" elif [[ "${step}" = "ocnanalrun" ]]; then diff --git a/env/S4.env b/env/S4.env index 190c7295f4..5e768d889d 100755 --- a/env/S4.env +++ b/env/S4.env @@ -65,7 +65,7 @@ elif [[ "${step}" = "atmensanlfv3inc" ]]; then export NTHREADS_ATMENSANLFV3INC=${nth_atmensanlfv3inc:-${nth_max}} [[ ${NTHREADS_ATMENSANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMENSANLFV3INC=${nth_max} - export APRUN_ATMENSANLFV3INC="${launcher} -n ${npe_atmensanlfv3inc}" + export APRUN_ATMENSANLFV3INC="${launcher} -n ${npe_atmensanlfv3inc}" elif [[ "${step}" = "aeroanlrun" ]]; then @@ -102,10 +102,10 @@ elif [[ "${step}" = "atmanlfv3inc" ]]; then [[ ${NTHREADS_ATMANLFV3INC} -gt ${nth_max} ]] && export NTHREADS_ATMANLFV3INC=${nth_max} export APRUN_ATMANLFV3INC="${launcher} -n ${npe_atmanlfv3inc}" -elif [[ "${step}" = "ocnanalbmat" ]]; then +elif [[ "${step}" = "marinebmat" ]]; then echo "WARNING: ${step} is not enabled on S4!" -elif [[ "${step}" = "ocnanalrun" ]]; then +elif [[ "${step}" = "marinerun" ]]; then echo "WARNING: ${step} is not enabled on S4!" elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT deleted file mode 100755 index 90902ba3c3..0000000000 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT +++ /dev/null @@ -1,48 +0,0 @@ -#!/bin/bash -source "${HOMEgfs}/ush/preamble.sh" -export WIPE_DATA="NO" - -export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" -source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalbmat" -c "base ocnanal ocnanalbmat" - - -############################################## -# Set variables used in the script -############################################## - - -############################################## -# Begin JOB SPECIFIC work -############################################## - -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OCEAN_ANALYSIS - -mkdir -p "${COM_OCEAN_ANALYSIS}" - -export COMOUT=${COM_OCEAN_ANALYSIS} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASOCNBMATSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat.sh} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit "${status}" - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [[ -e "${pgmout}" ]] ; then - cat "${pgmout}" -fi - -########################################## -# Do not remove the Temporary working directory (do this in POST) -########################################## -cd "${DATAROOT}" || exit 1 - -exit 0 diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY deleted file mode 100755 index 3727ba9853..0000000000 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash -source "${HOMEgfs}/ush/preamble.sh" -export WIPE_DATA="NO" -export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" -source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalrun" -c "base ocnanal ocnanalrun" - - -############################################## -# Set variables used in the script -############################################## - - -############################################## -# Begin JOB SPECIFIC work -############################################## - -YMD=${PDY} HH=${cyc} declare_from_tmpl -rx COM_OCEAN_ANALYSIS - -mkdir -p "${COM_OCEAN_ANALYSIS}" - -export COMOUT=${COM_OCEAN_ANALYSIS} - -############################################################### -# Run relevant script - -EXSCRIPT=${GDASOCNMBATVRFYSH:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat_vrfy.sh} -${EXSCRIPT} -status=$? -[[ ${status} -ne 0 ]] && exit "${status}" - -############################################## -# End JOB SPECIFIC work -############################################## - -############################################## -# Final processing -############################################## -if [[ -e "${pgmout}" ]] ; then - cat "${pgmout}" -fi - -########################################## -# Do not remove the Temporary working directory (do this in POST) -########################################## -cd "${DATAROOT}" || exit 1 - -exit 0 diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP index 73c5c4db75..bf714939f5 100755 --- a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP @@ -28,6 +28,10 @@ RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ COM_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL \ COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMIN_OCEAN_BMATRIX:COM_OCEAN_BMATRIX_TMPL \ + COMIN_ICE_BMATRIX:COM_ICE_BMATRIX_TMPL + ############################################## # Begin JOB SPECIFIC work ############################################## diff --git a/jobs/JGLOBAL_ARCHIVE b/jobs/JGLOBAL_ARCHIVE index 28bd820de1..6e9d671285 100755 --- a/jobs/JGLOBAL_ARCHIVE +++ b/jobs/JGLOBAL_ARCHIVE @@ -35,6 +35,8 @@ YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ COMIN_OCEAN_GRIB:COM_OCEAN_GRIB_TMPL \ COMIN_OCEAN_NETCDF:COM_OCEAN_NETCDF_TMPL \ COMIN_OCEAN_ANALYSIS:COM_OCEAN_ANALYSIS_TMPL \ + COMIN_OCEAN_BMATRIX:COM_OCEAN_BMATRIX_TMPL \ + COMIN_ICE_BMATRIX:COM_ICE_BMATRIX_TMPL \ COMIN_WAVE_GRID:COM_WAVE_GRID_TMPL \ COMIN_WAVE_HISTORY:COM_WAVE_HISTORY_TMPL \ COMIN_WAVE_STATION:COM_WAVE_STATION_TMPL \ diff --git a/jobs/JGLOBAL_MARINE_BMAT b/jobs/JGLOBAL_MARINE_BMAT new file mode 100755 index 0000000000..3dacec9278 --- /dev/null +++ b/jobs/JGLOBAL_MARINE_BMAT @@ -0,0 +1,66 @@ +#!/bin/bash + +source "${HOMEgfs}/ush/preamble.sh" + +if (( 10#${ENSMEM:-0} > 0 )); then + export DATAjob="${DATAROOT}/${RUN}marinebmat.${PDY:-}${cyc}" + export DATA="${DATAjob}/${jobid}" + # Create the directory to hold ensemble perturbations + export DATAenspert="${DATAjob}/enspert" + if [[ ! -d "${DATAenspert}" ]]; then mkdir -p "${DATAenspert}"; fi +fi + +# source config.base, config.ocnanal and config.marinebmat +# and pass marinebmat to ${machine}.env +source "${HOMEgfs}/ush/jjob_header.sh" -e "marinebmat" -c "base ocnanal marinebmat" + +############################################## +# Set variables used in the script +############################################## +# shellcheck disable=SC2153 +GDATE=$(date --utc +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Generate COM variables from templates +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ + COMIN_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL \ + COMIN_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL + +RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} declare_from_tmpl -rx \ + COMIN_OCEAN_HISTORY_ENS_PREV:COM_OCEAN_HISTORY_TMPL \ + COMIN_ICE_HISTORY_ENS_PREV:COM_ICE_HISTORY_TMPL + +YMD=${PDY} HH=${cyc} declare_from_tmpl -rx \ + COMOUT_OCEAN_BMATRIX:COM_OCEAN_BMATRIX_TMPL \ + COMOUT_ICE_BMATRIX:COM_ICE_BMATRIX_TMPL + +mkdir -p "${COMOUT_OCEAN_BMATRIX}" +mkdir -p "${COMOUT_ICE_BMATRIX}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASMARINEBMATRUNPY:-${SCRgfs}/exglobal_marinebmat.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/rocoto/ocnanalbmat.sh b/jobs/rocoto/marinebmat.sh similarity index 79% rename from jobs/rocoto/ocnanalbmat.sh rename to jobs/rocoto/marinebmat.sh index e62db9115a..9b72e5e12c 100755 --- a/jobs/rocoto/ocnanalbmat.sh +++ b/jobs/rocoto/marinebmat.sh @@ -8,12 +8,11 @@ source "${HOMEgfs}/ush/preamble.sh" status=$? [[ "${status}" -ne 0 ]] && exit "${status}" -export job="ocnanalbmat" +export job="marinebmat" export jobid="${job}.$$" ############################################################### # Execute the JJOB -"${HOMEgfs}"/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT -echo "BMAT gets run here" +"${HOMEgfs}"/jobs/JGLOBAL_MARINE_BMAT status=$? exit "${status}" diff --git a/parm/archive/gdas.yaml.j2 b/parm/archive/gdas.yaml.j2 index b253d27268..fe6a794224 100644 --- a/parm/archive/gdas.yaml.j2 +++ b/parm/archive/gdas.yaml.j2 @@ -22,7 +22,7 @@ gdas: {% if DO_JEDIOCNVAR %} - "logs/{{ cycle_YMDH }}/{{ RUN }}prepoceanobs.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalprep.log" - - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalbmat.log" + - "logs/{{ cycle_YMDH }}/{{ RUN }}marinebmat.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalrun.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalpost.log" - "logs/{{ cycle_YMDH }}/{{ RUN }}ocnanalchkpt.log" diff --git a/parm/archive/gdasocean_analysis.yaml.j2 b/parm/archive/gdasocean_analysis.yaml.j2 index d127ee0b75..b7c057eacf 100644 --- a/parm/archive/gdasocean_analysis.yaml.j2 +++ b/parm/archive/gdasocean_analysis.yaml.j2 @@ -3,23 +3,30 @@ gdasocean_analysis: name: "GDASOCEAN_ANALYSIS" target: "{{ ATARDIR }}/{{ cycle_YMDH }}/gdasocean_analysis.tar" required: + # analysis and analysis increments - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocninc.nc' - {% set ocngrid_cycle = '%02d' % (((cycle_HH | int) - 3) % 24) %} - - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/gdas.t{{ ocngrid_cycle }}z.ocngrid.nc' {% for domain in ["ocn", "ice"] %} - - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}.bkgerr_stddev.nc' - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}.incr.nc' - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}ana.nc' - {% if NMEM_ENS > 2 %} - - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}{{domain}}.recentering_error.nc' - {% endif %} {% endfor %} + + # static background error + - '{{ COMIN_OCEAN_BMATRIX | relpath(ROTDIR) }}/{{ head }}ocean.bkgerr_stddev.nc' + - '{{ COMIN_ICE_BMATRIX | relpath(ROTDIR) }}/{{ head }}ice.bkgerr_stddev.nc' + + # ensemble background error {% if NMEM_ENS > 2 %} - - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.ssh_steric_stddev.nc' - - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.ssh_unbal_stddev.nc' - - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.ssh_total_stddev.nc' - - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.steric_explained_variance.nc' + - '{{ COMIN_ICE_BMATRIX | relpath(ROTDIR) }}/{{ head }}ice.ens_weights.nc' + - '{{ COMIN_OCEAN_BMATRIX | relpath(ROTDIR) }}/{{ head }}ocean.ens_weights.nc' + - '{{ COMIN_OCEAN_BMATRIX | relpath(ROTDIR) }}/{{ head }}ocean.recentering_error.nc' + {% for diag_type in ["ssh_steric_stddev", "ssh_unbal_stddev", "ssh_total_stddev", "steric_explained_variance"] %} + - '{{ COMIN_OCEAN_BMATRIX | relpath(ROTDIR) }}/{{ head }}ocean.{{ diag_type }}.nc' + {% endfor %} {% endif %} + + # obs space diags - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/{{ head }}ocn.*.stats.csv' - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/diags/*.nc4' + + # runtime configs - '{{ COMIN_OCEAN_ANALYSIS | relpath(ROTDIR) }}/yaml/*.yaml' diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com index ec867e64ba..222ffdae95 100644 --- a/parm/config/gfs/config.com +++ b/parm/config/gfs/config.com @@ -82,11 +82,13 @@ declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_BMATRIX_TMPL=${COM_BASE}'/bmatrix/ocean' declare -rx COM_OCEAN_NETCDF_TMPL=${COM_BASE}'/products/ocean/netcdf' declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2' declare -rx COM_OCEAN_GRIB_GRID_TMPL=${COM_OCEAN_GRIB_TMPL}'/${GRID}' declare -rx COM_ICE_ANALYSIS_TMPL=${COM_BASE}'/analysis/ice' +declare -rx COM_ICE_BMATRIX_TMPL=${COM_BASE}'/bmatrix/ice' declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' diff --git a/parm/config/gfs/config.marinebmat b/parm/config/gfs/config.marinebmat new file mode 100644 index 0000000000..d88739dced --- /dev/null +++ b/parm/config/gfs/config.marinebmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.marinebmat ########## +# configuration for the marine B-matrix + +echo "BEGIN: config.marinebmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" marinebmat + +echo "END: config.marinebmat" diff --git a/parm/config/gfs/config.ocnanal b/parm/config/gfs/config.ocnanal index 367e570ec8..4d58f2dedf 100644 --- a/parm/config/gfs/config.ocnanal +++ b/parm/config/gfs/config.ocnanal @@ -6,21 +6,15 @@ echo "BEGIN: config.ocnanal" export OBS_YAML_DIR="${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config" -export OBS_LIST=@SOCA_OBS_LIST@ -export OBS_YAML="${OBS_LIST}" -export FV3JEDI_STAGE_YAML="${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml" +export OBS_LIST=@SOCA_OBS_LIST@ # TODO(GA): doesn't look necessary as is to have +export OBS_YAML="${OBS_LIST}" # OBS_LIST and OBS_YAML pick one or add logic export SOCA_INPUT_FIX_DIR=@SOCA_INPUT_FIX_DIR@ -export SOCA_VARS=tocn,socn,ssh -export SABER_BLOCKS_YAML=@SABER_BLOCKS_YAML@ export SOCA_NINNER=@SOCA_NINNER@ -export CASE_ANL=@CASE_ANL@ export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size resolution dependent -export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin -export SOCA_FIX_STAGE_YAML_TMPL="${PARMgfs}/gdas/soca/soca_fix_stage.yaml.j2" export SOCA_ENS_BKG_STAGE_YAML_TMPL="${PARMgfs}/gdas/soca/soca_ens_bkg_stage.yaml.j2" +export SOCA_FIX_YAML_TMPL="${PARMgfs}/gdas/soca/soca_fix_stage_${OCNRES}.yaml.j2" -# NICAS -export NICAS_RESOL=@NICAS_RESOL@ -export NICAS_GRID_SIZE=@NICAS_GRID_SIZE@ +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin # TODO(GA): remove once analysis "run" + # and "checkpoint" are refactored echo "END: config.ocnanal" diff --git a/parm/config/gfs/config.ocnanalbmat b/parm/config/gfs/config.ocnanalbmat deleted file mode 100644 index 024da5f51b..0000000000 --- a/parm/config/gfs/config.ocnanalbmat +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -########## config.ocnanalbmat ########## -# Ocn Analysis specific - -echo "BEGIN: config.ocnanalbmat" - -# Get task specific resources -. "${EXPDIR}/config.resources" ocnanalbmat - -echo "END: config.ocnanalbmat" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources index e16524ecd3..2dd037d426 100644 --- a/parm/config/gfs/config.resources +++ b/parm/config/gfs/config.resources @@ -25,7 +25,7 @@ if (( $# != 1 )); then echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" echo "wavegempak waveawipsbulls waveawipsgridded" echo "postsnd awips gempak npoess" - echo "ocnanalprep prepoceanobs ocnanalbmat ocnanalrun ocnanalecen marineanalletkf ocnanalchkpt ocnanalpost ocnanalvrfy" + echo "ocnanalprep prepoceanobs marinebmat ocnanalrun ocnanalecen marineanalletkf ocnanalchkpt ocnanalpost ocnanalvrfy" exit 1 fi @@ -483,7 +483,7 @@ case ${step} in export memory_prepoceanobs="48GB" ;; - "ocnanalbmat") + "marinebmat") npes=16 case ${OCNRES} in "025") npes=480;; @@ -494,11 +494,11 @@ case ${step} in exit 4 esac - export wtime_ocnanalbmat="00:30:00" - export npe_ocnanalbmat=${npes} - export nth_ocnanalbmat=1 + export wtime_marinebmat="00:30:00" + export npe_marinebmat=${npes} + export nth_marinebmat=1 export is_exclusive=True - export npe_node_ocnanalbmat=$(( npe_node_max / nth_ocnanalbmat )) + export npe_node_marinebmat=$(( npe_node_max / nth_marinebmat )) ;; "ocnanalrun") diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml index 2d662a9bcb..da4d587dff 100644 --- a/parm/config/gfs/yaml/defaults.yaml +++ b/parm/config/gfs/yaml/defaults.yaml @@ -22,8 +22,8 @@ base: FHMAX_ENKF_GFS: 12 atmanl: - JCB_ALGO_YAML: "${PARMgfs}/gdas/atm/jcb-prototype_3dvar.yaml.j2" - STATICB_TYPE: "gsibec" + JCB_ALGO_YAML: "${PARMgfs}/gdas/atm/jcb-prototype_3dvar.yaml.j2" + STATICB_TYPE: "gsibec" LAYOUT_X_ATMANL: 8 LAYOUT_Y_ATMANL: 8 IO_LAYOUT_X: 1 @@ -45,16 +45,12 @@ snowanl: IO_LAYOUT_Y: 1 ocnanal: - SOCA_INPUT_FIX_DIR: "/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25/soca" # TODO: These need to go to glopara fix space. - CASE_ANL: "C48" # TODO: Check in gdasapp if used anywhere for SOCA + SOCA_INPUT_FIX_DIR: "${FIXgfs}/gdas/soca/72x35x25/soca" SOCA_OBS_LIST: "${PARMgfs}/gdas/soca/obs/obs_list.yaml" # TODO: This is also repeated in oceanprepobs SOCA_NINNER: 100 - SABER_BLOCKS_YAML: "" - NICAS_RESOL: 1 - NICAS_GRID_SIZE: 15000 prepoceanobs: - SOCA_INPUT_FIX_DIR: "/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25/soca" # TODO: These need to go to glopara fix space. + SOCA_INPUT_FIX_DIR: "${FIXgfs}/gdas/soca/72x35x25/soca" SOCA_OBS_LIST: "${PARMgfs}/gdas/soca/obs/obs_list.yaml" # TODO: This is also repeated in ocnanal OBSPREP_YAML: "${PARMgfs}/gdas/soca/obsprep/obsprep_config.yaml" DMPDIR: "/scratch1/NCEPDEV/global/glopara/data/experimental_obs" diff --git a/scripts/exglobal_marinebmat.py b/scripts/exglobal_marinebmat.py new file mode 100755 index 0000000000..e285e646ac --- /dev/null +++ b/scripts/exglobal_marinebmat.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 +# exglobal_marine_bmat_run.py +# This script creates an marineBmat object +# and runs the execute method +# which executes all the steps necessary to create the global marine B-matrix +import os + +from wxflow import Logger, cast_strdict_as_dtypedict +from pygfs.task.marine_bmat import MarineBMat + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Create an instance of the MarineBMat task + marineBMat = MarineBMat(config) + marineBMat.initialize() + marineBMat.execute() + marineBMat.finalize() diff --git a/scripts/exglobal_prep_snow_obs.py b/scripts/exglobal_prep_snow_obs.py index d4998a7d84..a6a9070151 100755 --- a/scripts/exglobal_prep_snow_obs.py +++ b/scripts/exglobal_prep_snow_obs.py @@ -21,5 +21,5 @@ # Instantiate the snow prepare task SnowAnl = SnowAnalysis(config) SnowAnl.prepare_GTS() - if f"{ SnowAnl.task_config.cyc }" == '18': + if SnowAnl.task_config.cyc == 0: SnowAnl.prepare_IMS() diff --git a/sorc/gdas.cd b/sorc/gdas.cd index 01a7c4f433..52f41a298b 160000 --- a/sorc/gdas.cd +++ b/sorc/gdas.cd @@ -1 +1 @@ -Subproject commit 01a7c4f433346581dee172044e0cd3bd0fe8bd71 +Subproject commit 52f41a298b4c6b7bbf6f203b6579516819fbbf36 diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 8694f856b5..9722f5a2b8 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -71,8 +71,8 @@ ${LINK_OR_COPY} "${HOMEgfs}/versions/run.${machine}.ver" "${HOMEgfs}/versions/ru case "${machine}" in "wcoss2") FIX_DIR="/lfs/h2/emc/global/noscrub/emc.global/FIX/fix" ;; "hera") FIX_DIR="/scratch1/NCEPDEV/global/glopara/fix" ;; - "orion") FIX_DIR="/work/noaa/global/glopara/fix" ;; - "hercules") FIX_DIR="/work/noaa/global/glopara/fix" ;; + "orion") FIX_DIR="/work/noaa/global/kfriedma/glopara/fix" ;; + "hercules") FIX_DIR="/work/noaa/global/kfriedma/glopara/fix" ;; "jet") FIX_DIR="/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix" ;; "s4") FIX_DIR="/data/prod/glopara/fix" ;; "gaea") FIX_DIR="/gpfs/f5/ufs-ard/world-shared/global/glopara/data/fix" ;; @@ -237,7 +237,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd" ]]; then cd "${HOMEgfs}/fix" || exit 1 [[ ! -d gdas ]] && mkdir -p gdas cd gdas || exit 1 - for gdas_sub in fv3jedi gsibec obs; do + for gdas_sub in fv3jedi gsibec obs soca; do if [[ -d "${gdas_sub}" ]]; then rm -rf "${gdas_sub}" fi @@ -368,6 +368,7 @@ if [[ -d "${HOMEgfs}/sorc/gdas.cd/build" ]]; then "gdas_soca_gridgen.x" \ "gdas_soca_error_covariance_toolbox.x" \ "gdas_soca_setcorscales.x" \ + "gdas_soca_diagb.x" \ "fv3jedi_plot_field.x" \ "fv3jedi_fv3inc.x" \ "gdas_ens_handler.x" \ diff --git a/ush/python/pygfs/__init__.py b/ush/python/pygfs/__init__.py index fa6b0b373e..c0b72bbc35 100644 --- a/ush/python/pygfs/__init__.py +++ b/ush/python/pygfs/__init__.py @@ -6,10 +6,12 @@ from .task.aero_analysis import AerosolAnalysis from .task.atm_analysis import AtmAnalysis from .task.atmens_analysis import AtmEnsAnalysis +from .task.marine_bmat import MarineBMat from .task.snow_analysis import SnowAnalysis from .task.upp import UPP from .task.oceanice_products import OceanIceProducts from .task.gfs_forecast import GFSForecast +from .utils import marine_da_utils __docformat__ = "restructuredtext" __version__ = "0.1.0" diff --git a/ush/python/pygfs/task/marine_bmat.py b/ush/python/pygfs/task/marine_bmat.py new file mode 100644 index 0000000000..9d64e621c9 --- /dev/null +++ b/ush/python/pygfs/task/marine_bmat.py @@ -0,0 +1,350 @@ +#!/usr/bin/env python3 + +import os +import glob +from logging import getLogger +import pygfs.utils.marine_da_utils as mdau + +from wxflow import (AttrDict, + FileHandler, + add_to_datetime, to_timedelta, + chdir, + parse_j2yaml, + logit, + Executable, + Task) + +logger = getLogger(__name__.split('.')[-1]) + + +class MarineBMat(Task): + """ + Class for global marine B-matrix tasks + """ + @logit(logger, name="MarineBMat") + def __init__(self, config): + super().__init__(config) + _home_gdas = os.path.join(self.task_config.HOMEgfs, 'sorc', 'gdas.cd') + _calc_scale_exec = os.path.join(self.task_config.HOMEgfs, 'ush', 'soca', 'calc_scales.py') + _window_begin = add_to_datetime(self.task_config.current_cycle, -to_timedelta(f"{self.task_config.assim_freq}H") / 2) + _window_end = add_to_datetime(self.task_config.current_cycle, to_timedelta(f"{self.task_config.assim_freq}H") / 2) + + # compute the relative path from self.task_config.DATA to self.task_config.DATAenspert + if self.task_config.NMEM_ENS > 0: + _enspert_relpath = os.path.relpath(self.task_config.DATAenspert, self.task_config.DATA) + else: + _enspert_relpath = None + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'HOMEgdas': _home_gdas, + 'MARINE_WINDOW_BEGIN': _window_begin, + 'MARINE_WINDOW_END': _window_end, + 'MARINE_WINDOW_MIDDLE': self.task_config.current_cycle, + 'BERROR_YAML_DIR': os.path.join(_home_gdas, 'parm', 'soca', 'berror'), + 'GRID_GEN_YAML': os.path.join(_home_gdas, 'parm', 'soca', 'gridgen', 'gridgen.yaml'), + 'MARINE_ENSDA_STAGE_BKG_YAML_TMPL': os.path.join(_home_gdas, 'parm', 'soca', 'ensda', 'stage_ens_mem.yaml.j2'), + 'MARINE_DET_STAGE_BKG_YAML_TMPL': os.path.join(_home_gdas, 'parm', 'soca', 'soca_det_bkg_stage.yaml.j2'), + 'ENSPERT_RELPATH': _enspert_relpath, + 'CALC_SCALE_EXEC': _calc_scale_exec, + 'APREFIX': f"{self.task_config.RUN}.t{self.task_config.cyc:02d}z.", + } + ) + + # Extend task_config with local_dict + self.task_config = AttrDict(**self.task_config, **local_dict) + + @logit(logger) + def initialize(self: Task) -> None: + """Initialize a global B-matrix + + This method will initialize a global B-Matrix. + This includes: + - staging the deterministic backgrounds (middle of window) + - staging SOCA fix files + - staging static ensemble members (optional) + - staging ensemble members (optional) + - generating the YAML files for the JEDI and GDASApp executables + - creating output directories + """ + super().initialize() + + # stage fix files + logger.info(f"Staging SOCA fix files from {self.task_config.SOCA_INPUT_FIX_DIR}") + soca_fix_list = parse_j2yaml(self.task_config.SOCA_FIX_YAML_TMPL, self.task_config) + FileHandler(soca_fix_list).sync() + + # prepare the MOM6 input.nml + mdau.prep_input_nml(self.task_config) + + # stage backgrounds + # TODO(G): Check ocean backgrounds dates for consistency + bkg_list = parse_j2yaml(self.task_config.MARINE_DET_STAGE_BKG_YAML_TMPL, self.task_config) + FileHandler(bkg_list).sync() + for cice_fname in ['./INPUT/cice.res.nc', './bkg/ice.bkg.f006.nc', './bkg/ice.bkg.f009.nc']: + mdau.cice_hist2fms(cice_fname, cice_fname) + + # stage the grid generation yaml + FileHandler({'copy': [[self.task_config.GRID_GEN_YAML, + os.path.join(self.task_config.DATA, 'gridgen.yaml')]]}).sync() + + # generate the variance partitioning YAML file + logger.debug("Generate variance partitioning YAML file") + diagb_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_diagb.yaml.j2'), + data=self.task_config) + diagb_config.save(os.path.join(self.task_config.DATA, 'soca_diagb.yaml')) + + # generate the vertical decorrelation scale YAML file + logger.debug("Generate the vertical correlation scale YAML file") + vtscales_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_vtscales.yaml.j2'), + data=self.task_config) + vtscales_config.save(os.path.join(self.task_config.DATA, 'soca_vtscales.yaml')) + + # generate vertical diffusion scale YAML file + logger.debug("Generate vertical diffusion YAML file") + diffvz_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_parameters_diffusion_vt.yaml.j2'), + data=self.task_config) + diffvz_config.save(os.path.join(self.task_config.DATA, 'soca_parameters_diffusion_vt.yaml')) + + # generate the horizontal diffusion YAML files + if True: # TODO(G): skip this section once we have optimized the scales + # stage the correlation scale configuration + logger.debug("Generate correlation scale YAML file") + FileHandler({'copy': [[os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_setcorscales.yaml'), + os.path.join(self.task_config.DATA, 'soca_setcorscales.yaml')]]}).sync() + + # generate horizontal diffusion scale YAML file + logger.debug("Generate horizontal diffusion scale YAML file") + diffhz_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_parameters_diffusion_hz.yaml.j2'), + data=self.task_config) + diffhz_config.save(os.path.join(self.task_config.DATA, 'soca_parameters_diffusion_hz.yaml')) + + # hybrid EnVAR case + if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: + # stage ensemble membersfiles for use in hybrid background error + logger.debug(f"Stage ensemble members for the hybrid background error") + mdau.stage_ens_mem(self.task_config) + + # generate ensemble recentering/rebalancing YAML file + logger.debug("Generate ensemble recentering YAML file") + ensrecenter_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_ensb.yaml.j2'), + data=self.task_config) + ensrecenter_config.save(os.path.join(self.task_config.DATA, 'soca_ensb.yaml')) + + # generate ensemble weights YAML file + logger.debug("Generate ensemble recentering YAML file: {self.task_config.abcd_yaml}") + hybridweights_config = parse_j2yaml(path=os.path.join(self.task_config.BERROR_YAML_DIR, 'soca_ensweights.yaml.j2'), + data=self.task_config) + hybridweights_config.save(os.path.join(self.task_config.DATA, 'soca_ensweights.yaml')) + + # need output dir for ensemble perturbations and static B-matrix + logger.debug("Create empty diagb directories to receive output from executables") + FileHandler({'mkdir': [os.path.join(self.task_config.DATA, 'diagb')]}).sync() + + @logit(logger) + def gridgen(self: Task) -> None: + # link gdas_soca_gridgen.x + mdau.link_executable(self.task_config, 'gdas_soca_gridgen.x') + exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) + exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_gridgen.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('gridgen.yaml') + + mdau.run(exec_cmd) + + @logit(logger) + def variance_partitioning(self: Task) -> None: + # link the variance partitioning executable, gdas_soca_diagb.x + mdau.link_executable(self.task_config, 'gdas_soca_diagb.x') + exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) + exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_diagb.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_diagb.yaml') + + mdau.run(exec_cmd) + + @logit(logger) + def horizontal_diffusion(self: Task) -> None: + """Generate the horizontal diffusion coefficients + """ + # link the executable that computes the correlation scales, gdas_soca_setcorscales.x, + # and prepare the command to run it + mdau.link_executable(self.task_config, 'gdas_soca_setcorscales.x') + exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) + exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_setcorscales.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_setcorscales.yaml') + + # create a files containing the correlation scales + mdau.run(exec_cmd) + + # link the executable that computes the correlation scales, gdas_soca_error_covariance_toolbox.x, + # and prepare the command to run it + mdau.link_executable(self.task_config, 'gdas_soca_error_covariance_toolbox.x') + exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) + exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_error_covariance_toolbox.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_parameters_diffusion_hz.yaml') + + # compute the coefficients of the diffusion operator + mdau.run(exec_cmd) + + @logit(logger) + def vertical_diffusion(self: Task) -> None: + """Generate the vertical diffusion coefficients + """ + # compute the vertical correlation scales based on the MLD + FileHandler({'copy': [[os.path.join(self.task_config.CALC_SCALE_EXEC), + os.path.join(self.task_config.DATA, 'calc_scales.x')]]}).sync() + exec_cmd = Executable("python") + exec_name = os.path.join(self.task_config.DATA, 'calc_scales.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_vtscales.yaml') + mdau.run(exec_cmd) + + # link the executable that computes the correlation scales, gdas_soca_error_covariance_toolbox.x, + # and prepare the command to run it + mdau.link_executable(self.task_config, 'gdas_soca_error_covariance_toolbox.x') + exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) + exec_name = os.path.join(self.task_config.DATA, 'gdas_soca_error_covariance_toolbox.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_parameters_diffusion_vt.yaml') + + # compute the coefficients of the diffusion operator + mdau.run(exec_cmd) + + @logit(logger) + def ensemble_perturbations(self: Task) -> None: + """Generate the 3D ensemble of perturbation for the 3DEnVAR + + This method will generate ensemble perturbations re-balanced w.r.t the + deterministic background. + This includes: + - computing a storing the unbalanced ensemble perturbations' statistics + - recentering the ensemble members around the deterministic background and + accounting for the nonlinear steric recentering + - saving the recentered ensemble statistics + """ + mdau.link_executable(self.task_config, 'gdas_ens_handler.x') + exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) + exec_name = os.path.join(self.task_config.DATA, 'gdas_ens_handler.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_ensb.yaml') + + # generate the ensemble perturbations + mdau.run(exec_cmd) + + @logit(logger) + def hybrid_weight(self: Task) -> None: + """Generate the hybrid weights for the 3DEnVAR + + This method will generate the 3D fields hybrid weights for the 3DEnVAR for each + variables. + TODO(G): Currently implemented for the specific case of the static ensemble members only + """ + mdau.link_executable(self.task_config, 'gdas_socahybridweights.x') + exec_cmd = Executable(self.task_config.APRUN_MARINEBMAT) + exec_name = os.path.join(self.task_config.DATA, 'gdas_socahybridweights.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg('soca_ensweights.yaml') + + # compute the ensemble weights + mdau.run(exec_cmd) + + @logit(logger) + def execute(self: Task) -> None: + """Generate the full B-matrix + + This method will generate the full B-matrix according to the configuration. + """ + chdir(self.task_config.DATA) + self.gridgen() # TODO: This should be optional in case the geometry file was staged + self.variance_partitioning() + self.horizontal_diffusion() # TODO: Make this optional once we've converged on an acceptable set of scales + self.vertical_diffusion() + # hybrid EnVAR case + if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 2: + self.ensemble_perturbations() # TODO: refactor this from the old scripts + self.hybrid_weight() # TODO: refactor this from the old scripts + + @logit(logger) + def finalize(self: Task) -> None: + """Finalize the global B-matrix job + + This method will finalize the global B-matrix job. + This includes: + - copy the generated static, but cycle dependent background error files to the ROTDIR + - copy the generated YAML file from initialize to the ROTDIR + - keep the re-balanced ensemble perturbation files in DATAenspert + - ... + + """ + # Copy the soca grid if it was created + grid_file = os.path.join(self.task_config.DATA, 'soca_gridspec.nc') + if os.path.exists(grid_file): + logger.info(f"Copying the soca grid file to the ROTDIR") + FileHandler({'copy': [[grid_file, + os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX, 'soca_gridspec.nc')]]}).sync() + + # Copy the diffusion coefficient files to the ROTDIR + logger.info(f"Copying the diffusion coefficient files to the ROTDIR") + diffusion_coeff_list = [] + for diff_type in ['hz', 'vt']: + src = os.path.join(self.task_config.DATA, f"{diff_type}_ocean.nc") + dest = os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX, + f"{self.task_config.APREFIX}{diff_type}_ocean.nc") + diffusion_coeff_list.append([src, dest]) + + src = os.path.join(self.task_config.DATA, f"hz_ice.nc") + dest = os.path.join(self.task_config.COMOUT_ICE_BMATRIX, + f"{self.task_config.APREFIX}hz_ice.nc") + diffusion_coeff_list.append([src, dest]) + + FileHandler({'copy': diffusion_coeff_list}).sync() + + # Copy diag B files to ROTDIR + logger.info(f"Copying diag B files to the ROTDIR") + diagb_list = [] + window_end_iso = self.task_config.MARINE_WINDOW_END.strftime('%Y-%m-%dT%H:%M:%SZ') + + # ocean diag B + src = os.path.join(self.task_config.DATA, 'diagb', f"ocn.bkgerr_stddev.incr.{window_end_iso}.nc") + dst = os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX, + f"{self.task_config.APREFIX}ocean.bkgerr_stddev.nc") + diagb_list.append([src, dst]) + + # ice diag B + src = os.path.join(self.task_config.DATA, 'diagb', f"ice.bkgerr_stddev.incr.{window_end_iso}.nc") + dst = os.path.join(self.task_config.COMOUT_ICE_BMATRIX, + f"{self.task_config.APREFIX}ice.bkgerr_stddev.nc") + diagb_list.append([src, dst]) + + FileHandler({'copy': diagb_list}).sync() + + # Copy the ensemble perturbation diagnostics to the ROTDIR + if self.task_config.DOHYBVAR == "YES" or self.task_config.NMEM_ENS > 3: + window_middle_iso = self.task_config.MARINE_WINDOW_MIDDLE.strftime('%Y-%m-%dT%H:%M:%SZ') + weight_list = [] + src = os.path.join(self.task_config.DATA, f"ocn.ens_weights.incr.{window_middle_iso}.nc") + dst = os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX, + f"{self.task_config.APREFIX}ocean.ens_weights.nc") + weight_list.append([src, dst]) + + src = os.path.join(self.task_config.DATA, f"ice.ens_weights.incr.{window_middle_iso}.nc") + dst = os.path.join(self.task_config.COMOUT_ICE_BMATRIX, + f"{self.task_config.APREFIX}ice.ens_weights.nc") + weight_list.append([src, dst]) + + # TODO(G): missing ssh_steric_stddev, ssh_unbal_stddev, ssh_total_stddev and steric_explained_variance + + FileHandler({'copy': weight_list}).sync() + + # Copy the YAML files to the OCEAN ROTDIR + yamls = glob.glob(os.path.join(self.task_config.DATA, '*.yaml')) + yaml_list = [] + for yaml_file in yamls: + dest = os.path.join(self.task_config.COMOUT_OCEAN_BMATRIX, + f"{self.task_config.APREFIX}{os.path.basename(yaml_file)}") + yaml_list.append([yaml_file, dest]) + FileHandler({'copy': yaml_list}).sync() diff --git a/ush/python/pygfs/task/marine_letkf.py b/ush/python/pygfs/task/marine_letkf.py index 0fdd3d9aba..36c26d594b 100644 --- a/ush/python/pygfs/task/marine_letkf.py +++ b/ush/python/pygfs/task/marine_letkf.py @@ -72,7 +72,7 @@ def initialize(self): ensbkgconf.RUN = 'enkfgdas' soca_ens_bkg_stage_list = parse_j2yaml(self.task_config.SOCA_ENS_BKG_STAGE_YAML_TMPL, ensbkgconf) FileHandler(soca_ens_bkg_stage_list).sync() - soca_fix_stage_list = parse_j2yaml(self.task_config.SOCA_FIX_STAGE_YAML_TMPL, self.task_config) + soca_fix_stage_list = parse_j2yaml(self.task_config.SOCA_FIX_YAML_TMPL, self.task_config) FileHandler(soca_fix_stage_list).sync() letkf_stage_list = parse_j2yaml(self.task_config.MARINE_LETKF_STAGE_YAML_TMPL, self.task_config) FileHandler(letkf_stage_list).sync() diff --git a/ush/python/pygfs/utils/__init__.py b/ush/python/pygfs/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ush/python/pygfs/utils/marine_da_utils.py b/ush/python/pygfs/utils/marine_da_utils.py new file mode 100644 index 0000000000..016551878b --- /dev/null +++ b/ush/python/pygfs/utils/marine_da_utils.py @@ -0,0 +1,99 @@ +import f90nml +import os +from logging import getLogger +import xarray as xr + +from wxflow import (FileHandler, + logit, + WorkflowException, + AttrDict, + parse_j2yaml, + Executable, + jinja) + +logger = getLogger(__name__.split('.')[-1]) + + +@logit(logger) +def run(exec_cmd: Executable) -> None: + """Run the executable command + """ + logger.info(f"Executing {exec_cmd}") + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + +@logit(logger) +def link_executable(task_config: AttrDict, exe_name: str) -> None: + """Link the executable to the DATA directory + """ + logger.info(f"Link executable {exe_name}") + logger.warn("WARNING: Linking is not permitted per EE2.") + exe_src = os.path.join(task_config.EXECgfs, exe_name) + exe_dest = os.path.join(task_config.DATA, exe_name) + if os.path.exists(exe_dest): + os.remove(exe_dest) + os.symlink(exe_src, exe_dest) + + +@logit(logger) +def prep_input_nml(task_config: AttrDict) -> None: + """Prepare the input.nml file + TODO: Use jinja2 instead of f90nml + """ + # stage input.nml + mom_input_nml_tmpl_src = os.path.join(task_config.HOMEgdas, 'parm', 'soca', 'fms', 'input.nml') + mom_input_nml_tmpl = os.path.join(task_config.DATA, 'mom_input.nml.tmpl') + FileHandler({'copy': [[mom_input_nml_tmpl_src, mom_input_nml_tmpl]]}).sync() + + # swap date and stacksize + domain_stack_size = task_config.DOMAIN_STACK_SIZE + ymdhms = [int(s) for s in task_config.MARINE_WINDOW_END.strftime('%Y,%m,%d,%H,%M,%S').split(',')] + with open(mom_input_nml_tmpl, 'r') as nml_file: + nml = f90nml.read(nml_file) + nml['ocean_solo_nml']['date_init'] = ymdhms + nml['fms_nml']['domains_stack_size'] = int(domain_stack_size) + nml.write('mom_input.nml') + + +@logit(logger) +def cice_hist2fms(input_filename: str, output_filename: str) -> None: + """ Reformat the CICE history file so it can be read by SOCA/FMS + Simple reformatting utility to allow soca/fms to read the CICE history files + """ + + # open the CICE history file + ds = xr.open_dataset(input_filename) + + if 'aicen' in ds.variables and 'hicen' in ds.variables and 'hsnon' in ds.variables: + logger.info(f"*** Already reformatted, skipping.") + return + + # rename the dimensions to xaxis_1 and yaxis_1 + ds = ds.rename({'ni': 'xaxis_1', 'nj': 'yaxis_1'}) + + # rename the variables + ds = ds.rename({'aice_h': 'aicen', 'hi_h': 'hicen', 'hs_h': 'hsnon'}) + + # Save the new netCDF file + ds.to_netcdf(output_filename, mode='w') + + +@logit(logger) +def stage_ens_mem(task_config: AttrDict) -> None: + """ Copy the ensemble members to the DATA directory + Copy the ensemble members to the DATA directory and reformat the CICE history files + """ + # Copy the ensemble members to the DATA directory + logger.info("---------------- Stage ensemble members") + ensbkgconf = AttrDict(task_config) + ensbkgconf.RUN = task_config.GDUMP_ENS + logger.debug(f"{jinja.Jinja(task_config.MARINE_ENSDA_STAGE_BKG_YAML_TMPL, ensbkgconf).render}") + letkf_stage_list = parse_j2yaml(task_config.MARINE_ENSDA_STAGE_BKG_YAML_TMPL, ensbkgconf) + logger.info(f"{letkf_stage_list}") + FileHandler(letkf_stage_list).sync() diff --git a/versions/fix.ver b/versions/fix.ver index 1d54572c0b..6e35e651cf 100644 --- a/versions/fix.ver +++ b/versions/fix.ver @@ -9,6 +9,7 @@ export cpl_ver=20230526 export datm_ver=20220805 export gdas_crtm_ver=20220805 export gdas_fv3jedi_ver=20220805 +export gdas_soca_ver=20240624 export gdas_gsibec_ver=20240416 export gdas_obs_ver=20240213 export glwu_ver=20220805 diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 175ddb07bf..8771729e3a 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -43,7 +43,7 @@ def _get_app_configs(self): configs += ['anal', 'analdiag'] if self.do_jediocnvar: - configs += ['prepoceanobs', 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun'] + configs += ['prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun'] if self.do_hybvar: configs += ['ocnanalecen'] configs += ['ocnanalchkpt', 'ocnanalpost'] @@ -143,7 +143,7 @@ def get_task_names(self): gdas_gfs_common_tasks_before_fcst += ['anal'] if self.do_jediocnvar: - gdas_gfs_common_tasks_before_fcst += ['prepoceanobs', 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun'] + gdas_gfs_common_tasks_before_fcst += ['prepoceanobs', 'ocnanalprep', 'marinebmat', 'ocnanalrun'] if self.do_hybvar: gdas_gfs_common_tasks_before_fcst += ['ocnanalecen'] gdas_gfs_common_tasks_before_fcst += ['ocnanalchkpt', 'ocnanalpost'] diff --git a/workflow/rocoto/gfs_tasks.py b/workflow/rocoto/gfs_tasks.py index c14c3e2898..1f14751642 100644 --- a/workflow/rocoto/gfs_tasks.py +++ b/workflow/rocoto/gfs_tasks.py @@ -654,23 +654,24 @@ def prepoceanobs(self): return task - def ocnanalprep(self): + def marinebmat(self): + + ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"], {'RUN': 'gdas'}) deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}prepoceanobs'} - deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': 'gdasfcst', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + data = f'{ocean_hist_path}/gdas.ocean.t@Hz.inst.f009.nc' + dep_dict = {'type': 'data', 'data': data, 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + dependencies = rocoto.create_dependency(dep=deps) - resources = self.get_resource('ocnanalprep') - task_name = f'{self.cdump}ocnanalprep' + resources = self.get_resource('marinebmat') + task_name = f'{self.cdump}marinebmat' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.cdump.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/jobs/rocoto/ocnanalprep.sh', + 'command': f'{self.HOMEgfs}/jobs/rocoto/marinebmat.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -680,21 +681,25 @@ def ocnanalprep(self): return task - def ocnanalbmat(self): + def ocnanalprep(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalprep'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}prepoceanobs'} deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) + dep_dict = {'type': 'task', 'name': f'{self.cdump}marinebmat'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': 'gdasfcst', 'offset': f"-{timedelta_to_HMS(self._base['cycle_interval'])}"} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - resources = self.get_resource('ocnanalbmat') - task_name = f'{self.cdump}ocnanalbmat' + resources = self.get_resource('ocnanalprep') + task_name = f'{self.cdump}ocnanalprep' task_dict = {'task_name': task_name, 'resources': resources, 'dependency': dependencies, 'envars': self.envars, 'cycledef': self.cdump.replace('enkf', ''), - 'command': f'{self.HOMEgfs}/jobs/rocoto/ocnanalbmat.sh', + 'command': f'{self.HOMEgfs}/jobs/rocoto/ocnanalprep.sh', 'job_name': f'{self.pslot}_{task_name}_@H', 'log': f'{self.rotdir}/logs/@Y@m@d@H/{task_name}.log', 'maxtries': '&MAXTRIES;' @@ -707,7 +712,7 @@ def ocnanalbmat(self): def ocnanalrun(self): deps = [] - dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalbmat'} + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalprep'} deps.append(rocoto.add_dependency(dep_dict)) dependencies = rocoto.create_dependency(dep=deps) diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 404203f02d..25a6b2bef1 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -15,7 +15,7 @@ class Tasks: 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', "cleanup", 'prepatmiodaobs', 'atmanlinit', 'atmanlvar', 'atmanlfv3inc', 'atmanlfinal', 'prepoceanobs', - 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalecen', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy', + 'ocnanalprep', 'marinebmat', 'ocnanalrun', 'ocnanalecen', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy', 'earc', 'ecen', 'echgres', 'ediag', 'efcs', 'eobs', 'eomg', 'epos', 'esfc', 'eupd', 'atmensanlinit', 'atmensanlletkf', 'atmensanlfv3inc', 'atmensanlfinal', From a7f6b32ed63efa0de21bfb0ce63364a5b22b9891 Mon Sep 17 00:00:00 2001 From: David Huber <69919478+DavidHuber-NOAA@users.noreply.github.com> Date: Thu, 25 Jul 2024 14:26:52 -0400 Subject: [PATCH 2/3] Temporarily disable METplus jobs (#2796) --- parm/config/gfs/config.base | 2 ++ 1 file changed, 2 insertions(+) diff --git a/parm/config/gfs/config.base b/parm/config/gfs/config.base index 9fd66bf0c7..fca3fd99ed 100644 --- a/parm/config/gfs/config.base +++ b/parm/config/gfs/config.base @@ -453,6 +453,8 @@ export binary_diag=".false." # Verification options export DO_METP="@DO_METP@" # Run METPLUS jobs - set METPLUS settings in config.metp +# TODO Reenable METplus jobs when issue #2790 is resolved +export DO_METP="NO" export DO_FIT2OBS="YES" # Run fit to observations package export DO_VRFY_OCEANDA="@DO_VRFY_OCEANDA@" # Run SOCA Ocean and Seaice DA verification tasks From f156a7894d639f177e3e2588f98eec1f6f59aa68 Mon Sep 17 00:00:00 2001 From: Jessica Meixner Date: Fri, 26 Jul 2024 14:18:32 -0500 Subject: [PATCH 3/3] HR4 GWD update (#2732) This update is a combination of the gravity wave drag (GWD) versions from the NOAA/GSL and NOAA/PSL --- parm/config/gefs/config.fcst | 3 ++- parm/config/gfs/config.fcst | 3 ++- parm/config/gfs/config.ufs | 2 +- sorc/ufs_model.fd | 2 +- ush/forecast_postdet.sh | 1 + ush/parsing_namelists_FV3.sh | 2 ++ ush/parsing_namelists_FV3_nest.sh | 2 ++ versions/fix.ver | 2 +- 8 files changed, 12 insertions(+), 5 deletions(-) diff --git a/parm/config/gefs/config.fcst b/parm/config/gefs/config.fcst index 103d6f091d..bf24c4e906 100644 --- a/parm/config/gefs/config.fcst +++ b/parm/config/gefs/config.fcst @@ -108,8 +108,9 @@ if (( gwd_opt == 2 )); then export do_ugwp_v0_orog_only=".false." export do_ugwp_v0_nst_only=".false." export do_gsl_drag_ls_bl=".true." - export do_gsl_drag_ss=".true." + export do_gsl_drag_ss=".false." export do_gsl_drag_tofd=".true." + export do_gwd_opt_psl=".true." export do_ugwp_v1_orog_only=".false." launch_level=$(echo "${LEVS}/2.35" |bc) export launch_level diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst index 36064d5f2b..3323107ba7 100644 --- a/parm/config/gfs/config.fcst +++ b/parm/config/gfs/config.fcst @@ -120,8 +120,9 @@ if (( gwd_opt == 2 )); then export do_ugwp_v0_orog_only=".false." export do_ugwp_v0_nst_only=".false." export do_gsl_drag_ls_bl=".true." - export do_gsl_drag_ss=".true." + export do_gsl_drag_ss=".false." export do_gsl_drag_tofd=".true." + export do_gwd_opt_psl=".true." export do_ugwp_v1_orog_only=".false." launch_level=$(echo "${LEVS}/2.35" |bc) export launch_level diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs index f630226826..148793927f 100644 --- a/parm/config/gfs/config.ufs +++ b/parm/config/gfs/config.ufs @@ -102,7 +102,7 @@ case "${fv3_res}" in export knob_ugwp_tauamp=6.0e-3 # setting for UGWPv1 non-stationary GWD export k_split=1 export n_split=4 - export tau=10.0 + export tau=8.0 export rf_cutoff=100.0 export fv_sg_adj=3600 export WRITE_GROUP_GDAS=1 diff --git a/sorc/ufs_model.fd b/sorc/ufs_model.fd index e784814dfc..c12760125c 160000 --- a/sorc/ufs_model.fd +++ b/sorc/ufs_model.fd @@ -1 +1 @@ -Subproject commit e784814dfce3fb01e82be6d3949f9811860041d7 +Subproject commit c12760125ce7c5a85e8ced92d7f37c9ad6a59afe diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index 993331d70b..7de31d6235 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -255,6 +255,7 @@ FV3_out() { # Determine the dates for restart files to be copied to COM local restart_date restart_dates restart_dates=() + # Copy restarts in the assimilation window for RUN=gdas|enkfgdas|enkfgfs if [[ "${RUN}" =~ "gdas" || "${RUN}" == "enkfgfs" ]]; then restart_date="${model_start_date_next_cycle}" diff --git a/ush/parsing_namelists_FV3.sh b/ush/parsing_namelists_FV3.sh index b88849d7e7..08746423c8 100755 --- a/ush/parsing_namelists_FV3.sh +++ b/ush/parsing_namelists_FV3.sh @@ -284,6 +284,7 @@ EOF do_gsl_drag_ls_bl = ${do_gsl_drag_ls_bl:-".true."} do_gsl_drag_ss = ${do_gsl_drag_ss:-".true."} do_gsl_drag_tofd = ${do_gsl_drag_tofd:-".true."} + do_gwd_opt_psl = ${do_gwd_opt_psl:-".false."} do_ugwp_v1_orog_only = ${do_ugwp_v1_orog_only:-".false."} min_lakeice = ${min_lakeice:-"0.15"} min_seaice = ${min_seaice:-"0.15"} @@ -369,6 +370,7 @@ cat >> input.nml <> "${nml_file}" <