From 6bdab4c47925d58a4f1e909912072ac4bf947727 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Wed, 21 Aug 2024 13:39:30 -0500 Subject: [PATCH 01/47] initial chgres_cube config --- scripts/chgres_cube.py | 85 ++++++++++++++++++++++++++++++++++++ ush/config_defaults.yaml | 62 +++++++++++++++++++++++++- ush/config_defaults_aqm.yaml | 14 +++++- 3 files changed, 159 insertions(+), 2 deletions(-) create mode 100644 scripts/chgres_cube.py diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py new file mode 100644 index 000000000..24d36331e --- /dev/null +++ b/scripts/chgres_cube.py @@ -0,0 +1,85 @@ +""" +The run script for chgres_cube +""" + +import datetime as dt +import os +import sys +from argparse import ArgumentParser +from copy import deepcopy +from pathlib import Path + +from uwtools.api.file import link as uwlink +from uwtools.api.chgres_cube import Chgres_Cube +from uwtools.api.config import get_yaml_config + + +parser = ArgumentParser( + description="Script that runs chgres_cube via uwtools API", +) +parser.add_argument( + "-c", + "--config-file", + metavar="PATH", + required=True, + help="Path to experiment config file.", + type=Path, +) +parser.add_argument( + "--cycle", + help="The cycle in ISO8601 format (e.g. 2024-07-15T18)", + required=True, + type=dt.datetime.fromisoformat, +) +parser.add_argument( + "--key-path", + help="Dot-separated path of keys leading through the config to the driver's YAML block", + metavar="KEY[.KEY...]", + required=True, +) +parser.add_argument( + "--member", + default="000", + help="The 3-digit ensemble member number.", +) +args = parser.parse_args() + +os.environ["member"] = args.member + +# Extract driver config from experiment config +chgres_cube_driver = Chgres_Cube( + config=args.config_file, + cycle=args.cycle, + key_path=[args.key_path], +) +rundir = Path(chgres_cube_driver.config["rundir"]) +print(f"Will run in {rundir}") +# Run chgres_cube +chgres_cube_driver.run() + +if not (rundir / "runscript.chgres_cube.done").is_file(): + print("Error occurred running chgres_cube. Please see component error logs.") + sys.exit(1) + +# Deliver output data +expt_config = get_yaml_config(args.config_file) +chgres_cube_config = expt_config[args.key_path] + +links = {} +for label in chgres_cube_config["output_file_labels"]: + # deepcopy here because desired_output_name is parameterized within the loop + expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) + expt_config_cp.dereference( + context={ + "cycle": args.cycle, + "leadtime": args.leadtime, + "file_label": label, + **expt_config_cp, + } + ) + chgres_cube_block = expt_config_cp[args.key_path] + desired_output_fn = chgres_cube_block["desired_output_name"] + upp_output_fn = rundir / f"{label.upper()}.GrbF{int(args.leadtime.total_seconds() // 3600):02d}" + links[desired_output_fn] = str(upp_output_fn) + +uwlink(target_dir=rundir.parent, config=links) \ No newline at end of file diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 404c824ab..68f10f7cd 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1651,6 +1651,41 @@ task_make_ics: FVCOM_DIR: "" FVCOM_FILE: "fvcom.nc" VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" + #------------------------------------------------------------------------ + chgres_cube: + namelist: + update_values: + config: + fix_dir_target_grid: "{{ workflow.FIXlam }}" + mosaic_file_target_grid: "{{ workflow.FIXlam }}/{{ workflow.CRES }}{{ workflow.DOT_OR_USCORE }}mosaic.halo{{ constants.NH4 }}.nc" + orog_dir_target_grid: "{{ workflow.FIXlam }}" + orog_files_target_grid: "{{ workflow.CRES }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants. TILE_RGNL }}.halo{{ constants.NH4 }}.nc" + vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" + varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" + data_dir_input_grid: "{{ task_make_lbcs.chgres_cube.run_dir }}" + atm_files_input_grid: ${fn_atm} + sfc_files_input_grid': ${fn_sfc} + grib2_file_input_grid: \"${fn_grib2}\" + cycle_mon: !int "{{ cycle.strftime('%m') }}" + cycle_day: !int "{{ cycle.strftime('%d') }}" + cycle_hour: !int "{{ cycle.strftime('%H') }}" + convert_atm: true + convert_sfc: true + convert_nst: true + regional: 1 + halo_bndy: "{{ constants.NH4 }}" + halo_blend: "{{ global.HALO_BLEND }}" + input_type: "gaussian_nemsio" + external_model: "FV3GFS" + tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" + tracers: "[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" + nsoill_out': 4 + vgtyp_from_climo: true + sotyp_from_climo: true + vgfrc_from_climo: true + minmax_vgfrc_from_climo: true + lai_from_climo: true + tg3_from_soil: false #---------------------------- # MAKE LBCS config parameters @@ -1675,7 +1710,32 @@ task_make_lbcs: KMP_AFFINITY_MAKE_LBCS: "scatter" OMP_NUM_THREADS_MAKE_LBCS: 1 OMP_STACKSIZE_MAKE_LBCS: "1024m" - VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" + #------------------------------------------------------------------------ + chgres_cube: + namelist: + update_values: + config: + fix_dir_target_grid: "{{ workflow.FIXlam }}" + mosaic_file_target_grid: "{{ workflow.FIXlam }}/{{ workflow.CRES }}{{ workflow.DOT_OR_USCORE }}mosaic.halo{{ constants.NH4 }}.nc" + orog_dir_target_grid: "{{ workflow.FIXlam }}" + orog_files_target_grid: "{{ workflow.CRES }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants. TILE_RGNL }}.halo{{ constants.NH4 }}.nc" + vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" + varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" + data_dir_input_grid: "{{ task_make_lbcs.chgres_cube.run_dir }}" + atm_files_input_grid: ${fn_atm} + grib2_file_input_grid: \"${fn_grib2}\" + cycle_mon: !int "{{ cycle.strftime('%m') }}" + cycle_day: !int "{{ cycle.strftime('%d') }}" + cycle_hour: !int "{{ cycle.strftime('%H') }}" + convert_atm: true + regional: 2 + halo_bndy: "{{ constants.NH4 }}" + halo_blend: "{{ global.HALO_BLEND }}" + input_type: "gaussian_nemsio" + external_model: "FV3GFS" + tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" + tracers: "[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" + #---------------------------- # IO_LAYOUT_Y FORECAST config parameters diff --git a/ush/config_defaults_aqm.yaml b/ush/config_defaults_aqm.yaml index 13c1a5bfa..cea75c426 100644 --- a/ush/config_defaults_aqm.yaml +++ b/ush/config_defaults_aqm.yaml @@ -1,4 +1,16 @@ +task_make_ics: + chgres_cube: + namelist: + update_values: + config: + input_type: "gaussian_netcdf" +task_make_lbcs: + chgres_cube: + namelist: + update_values: + config: + input_type: "gaussian_netcdf" task_run_post: upp: files_to_copy: @@ -8,4 +20,4 @@ task_run_post: nampgb: aqf_on: true output_file_labels: - - cmaq + - cmaq \ No newline at end of file From 46a322773a9efd6baa852d053324a6ffbaf75fd2 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Wed, 21 Aug 2024 14:02:35 -0500 Subject: [PATCH 02/47] update coldstart.yaml --- parm/wflow/coldstart.yaml | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index 6fad0b8d8..56d8b7773 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -85,7 +85,11 @@ metatask_run_ensemble: mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' task_make_ics_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK; "make_ics" "&JOBSdir;/JREGIONAL_MAKE_ICS"' + command: !cycstr 'source &USHdir;/load_modules_wflow.sh hera ; python &SCRIPTSdir;/chgres_cube.py + -c &GLOBAL_VAR_DEFNS_FP; + --cycle @Y-@m-@dT@H:@M:@S + --key-path task_make_ics + --mem #mem#' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' @@ -124,7 +128,11 @@ metatask_run_ensemble: task_make_lbcs_mem#mem#: <<: *default_task - command: '&LOAD_MODULES_RUN_TASK; "make_lbcs" "&JOBSdir;/JREGIONAL_MAKE_LBCS"' + command: !cycstr 'source &USHdir;/load_modules_wflow.sh hera ; python &SCRIPTSdir;/chgres_cube.py + -c &GLOBAL_VAR_DEFNS_FP; + --cycle @Y-@m-@dT@H:@M:@S + --key-path task_make_lbcs + --mem #mem#' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' From 1af2517d662d7723d156ef674e85b9b46e863036 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Tue, 27 Aug 2024 09:33:12 -0500 Subject: [PATCH 03/47] second pass on passing files --- scripts/chgres_cube.py | 49 +++++++++++++++++++++------------------- ush/config_defaults.yaml | 13 +++++++---- uwtools | 1 + 3 files changed, 35 insertions(+), 28 deletions(-) create mode 160000 uwtools diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 24d36331e..0f0a760da 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -6,10 +6,8 @@ import os import sys from argparse import ArgumentParser -from copy import deepcopy from pathlib import Path -from uwtools.api.file import link as uwlink from uwtools.api.chgres_cube import Chgres_Cube from uwtools.api.config import get_yaml_config @@ -54,8 +52,32 @@ ) rundir = Path(chgres_cube_driver.config["rundir"]) print(f"Will run in {rundir}") -# Run chgres_cube -chgres_cube_driver.run() + + +varsfilepath = chgres_cube_driver.config["task_make_ics"]["input_files_metadata_path"] +extrn_config_fns = get_sh_config(varsfilepath)[EXTRN_MDL_FNS] + +# make_ics +fn_atm = extrn_config_fns[0] +fn_sfc = extrn_config_fns[1] + +# Loop the run of chgres_cube for the forecast length +num_fhrs = chgres_cube_driver.config["workflow"]["FCST_LEN_HRS"] +bcgrp10 = 0 +bcgrpnum10 = 1 +for ii in range(bcgrp10, num_fhrs, bcgrpnum10): + i = ii + bcgrpnum10 + if i < num_fhrs: + print(f"group ${bcgrp10} processes member ${i}") + fn_atm = f"${{EXTRN_MDL_FNS[${i}]}}" + fn_sfc= "$EXTRN_MDL_FNS[1]" + + if ics_or_lbcs == "LBCS": + chgres_cube_driver.config["task_make_lbcs"]["chgres_cube"]["namelist"]["update_values"]["config"]["atm_files_input_grid"] = fn_atm + else ics_or_lbcs == "ICS": + chgres_cube_driver.config["task_make_ics"]["chgres_cube"]["namelist"]["update_values"]["config"]["atm_files_input_grid"] = fn_atm + chgres_cube_driver.config["task_make_ics"]["chgres_cube"]["namelist"]["update_values"]["config"]["sfc_files_input_grid"] = fn_sfc + chgres_cube_driver.run() if not (rundir / "runscript.chgres_cube.done").is_file(): print("Error occurred running chgres_cube. Please see component error logs.") @@ -64,22 +86,3 @@ # Deliver output data expt_config = get_yaml_config(args.config_file) chgres_cube_config = expt_config[args.key_path] - -links = {} -for label in chgres_cube_config["output_file_labels"]: - # deepcopy here because desired_output_name is parameterized within the loop - expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) - expt_config_cp.dereference( - context={ - "cycle": args.cycle, - "leadtime": args.leadtime, - "file_label": label, - **expt_config_cp, - } - ) - chgres_cube_block = expt_config_cp[args.key_path] - desired_output_fn = chgres_cube_block["desired_output_name"] - upp_output_fn = rundir / f"{label.upper()}.GrbF{int(args.leadtime.total_seconds() // 3600):02d}" - links[desired_output_fn] = str(upp_output_fn) - -uwlink(target_dir=rundir.parent, config=links) \ No newline at end of file diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 68f10f7cd..070685599 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1518,6 +1518,7 @@ task_get_extrn_ics: USE_USER_STAGED_EXTRN_FILES: false EXTRN_MDL_SOURCE_BASEDIR_ICS: "" EXTRN_MDL_FILES_ICS: "" + rundir: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/for_ICS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' #---------------------------- # EXTRN LBCS config parameters @@ -1562,6 +1563,7 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 6 EXTRN_MDL_LBCS_OFFSET_HRS: '{{ 3 if task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS == "RAP" else 0 }}' FV3GFS_FILE_FMT_LBCS: "nemsio" + rundir: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/for_LBCS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' # #----------------------------------------------------------------------- # @@ -1652,6 +1654,7 @@ task_make_ics: FVCOM_FILE: "fvcom.nc" VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #------------------------------------------------------------------------ + input_files_metadata_path: "{{ task_get_extrn_ics.rundir }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}" chgres_cube: namelist: update_values: @@ -1663,9 +1666,9 @@ task_make_ics: vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" data_dir_input_grid: "{{ task_make_lbcs.chgres_cube.run_dir }}" - atm_files_input_grid: ${fn_atm} - sfc_files_input_grid': ${fn_sfc} - grib2_file_input_grid: \"${fn_grib2}\" + atm_files_input_grid: "" + sfc_files_input_grid': "" + grib2_file_input_grid: "" cycle_mon: !int "{{ cycle.strftime('%m') }}" cycle_day: !int "{{ cycle.strftime('%d') }}" cycle_hour: !int "{{ cycle.strftime('%H') }}" @@ -1679,7 +1682,7 @@ task_make_ics: external_model: "FV3GFS" tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" tracers: "[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" - nsoill_out': 4 + nsoill_out: 4 vgtyp_from_climo: true sotyp_from_climo: true vgfrc_from_climo: true @@ -1723,7 +1726,7 @@ task_make_lbcs: varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" data_dir_input_grid: "{{ task_make_lbcs.chgres_cube.run_dir }}" atm_files_input_grid: ${fn_atm} - grib2_file_input_grid: \"${fn_grib2}\" + grib2_file_input_grid: "" cycle_mon: !int "{{ cycle.strftime('%m') }}" cycle_day: !int "{{ cycle.strftime('%d') }}" cycle_hour: !int "{{ cycle.strftime('%H') }}" diff --git a/uwtools b/uwtools new file mode 160000 index 000000000..33766a90a --- /dev/null +++ b/uwtools @@ -0,0 +1 @@ +Subproject commit 33766a90a9b9743f6e18c7d39ad5f59701cbeacc From b7884ccca6551af38e75e73adfc6ea1956e5284d Mon Sep 17 00:00:00 2001 From: WeirAE Date: Wed, 4 Sep 2024 17:41:26 -0500 Subject: [PATCH 04/47] progress continues adding functionality --- scripts/chgres_cube.py | 128 +++++++++++++++++++++++++++----- ush/config_ccpp_suites.yaml | 6 ++ ush/config_defaults.yaml | 4 +- ush/config_external_models.yaml | 11 +++ 4 files changed, 127 insertions(+), 22 deletions(-) create mode 100644 ush/config_ccpp_suites.yaml create mode 100644 ush/config_external_models.yaml diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 0f0a760da..533338bf4 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -6,10 +6,12 @@ import os import sys from argparse import ArgumentParser +from copy import deepcopy from pathlib import Path from uwtools.api.chgres_cube import Chgres_Cube -from uwtools.api.config import get_yaml_config +from uwtools.api.config import get_sh_config, get_yaml_config +from uwtools.api.file import link as uwlink parser = ArgumentParser( @@ -44,6 +46,30 @@ os.environ["member"] = args.member +# Print message indicating entry into script. +print(""" +======================================================================== +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" + +This is the ex-script for the task that generates lateral boundary con- +dition (LBC) files (in NetCDF format) for all LBC update hours (except +hour zero). +======================================================================== + """) + + +# fix CRES dereferencing +expt_config = get_yaml_config(args.config_file) +os.environ["CRES"] = expt_config["workflow"]["CRES"] +expt_config.dereference( + context={ + **os.environ, + **expt_config_cp, + } +) +chgres_cube_config = expt_config[args.key_path] + # Extract driver config from experiment config chgres_cube_driver = Chgres_Cube( config=args.config_file, @@ -53,36 +79,98 @@ rundir = Path(chgres_cube_driver.config["rundir"]) print(f"Will run in {rundir}") - +if args.key_path == "task_make_ics" varsfilepath = chgres_cube_driver.config["task_make_ics"]["input_files_metadata_path"] -extrn_config_fns = get_sh_config(varsfilepath)[EXTRN_MDL_FNS] +extrn_config_fns = get_sh_config(varsfilepath)["EXTRN_MDL_FNS"] +extrn_config_fhrs = get_sh_config(varsfilepath)["EXTRN_MDL_FHRS"] # make_ics fn_atm = extrn_config_fns[0] fn_sfc = extrn_config_fns[1] # Loop the run of chgres_cube for the forecast length -num_fhrs = chgres_cube_driver.config["workflow"]["FCST_LEN_HRS"] -bcgrp10 = 0 -bcgrpnum10 = 1 -for ii in range(bcgrp10, num_fhrs, bcgrpnum10): - i = ii + bcgrpnum10 - if i < num_fhrs: - print(f"group ${bcgrp10} processes member ${i}") - fn_atm = f"${{EXTRN_MDL_FNS[${i}]}}" - fn_sfc= "$EXTRN_MDL_FNS[1]" - - if ics_or_lbcs == "LBCS": - chgres_cube_driver.config["task_make_lbcs"]["chgres_cube"]["namelist"]["update_values"]["config"]["atm_files_input_grid"] = fn_atm - else ics_or_lbcs == "ICS": - chgres_cube_driver.config["task_make_ics"]["chgres_cube"]["namelist"]["update_values"]["config"]["atm_files_input_grid"] = fn_atm - chgres_cube_driver.config["task_make_ics"]["chgres_cube"]["namelist"]["update_values"]["config"]["sfc_files_input_grid"] = fn_sfc - chgres_cube_driver.run() +if len(extrn_config_fns) > 2: + fn_sfc= "" + num_fhrs = chgres_cube_driver.config["workflow"]["FCST_LEN_HRS"] + bcgrp10 = 0 + bcgrpnum10 = 1 + for ii in range(bcgrp10, num_fhrs, bcgrpnum10): + i = ii + bcgrpnum10 + if i < num_fhrs: + print(f"group ${bcgrp10} processes member ${i}") + fn_atm = f"${{EXTRN_MDL_FNS[${i}]}}" + + expt_config["task_make_lbcs"]["chgres_cube"]["namelist"]["update_values"]["config"]["atm_files_input_grid"] = fn_atm + # reinstantiate driver + chgres_cube_driver = Chgres_Cube( + config=expt_config, + cycle=args.cycle, + key_path=[args.key_path], + ) + chgres_cube_driver.run() +else: + chgres_cube_driver.run() +# error message if not (rundir / "runscript.chgres_cube.done").is_file(): - print("Error occurred running chgres_cube. Please see component error logs.") + print(""" +Call to executable (exec_fp) to generate lateral boundary conditions (LBCs) +file for the FV3-LAM for forecast hour fhr failed: + exec_fp = \"${exec_fp}\" + fhr = \"$fhr\" +The external model from which the LBCs files are to be generated is: + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" +The external model files that are inputs to the executable (exec_fp) are +located in the following directory: + extrn_mdl_staging_dir = \"${extrn_mdl_staging_dir}\" + """) sys.exit(1) # Deliver output data expt_config = get_yaml_config(args.config_file) chgres_cube_config = expt_config[args.key_path] + + +# Move initial condition, surface, control, and 0-th hour lateral bound- +# ary files to ICs_BCs directory. +links = {} +for label in chgres_cube_config["output_file_labels"]: + # deepcopy here because desired_output_name is parameterized within the loop + expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) + expt_config_cp.dereference( + context={ + "cycle": args.cycle, + "leadtime": args.leadtime, + "file_label": label, + **expt_config_cp, + } + ) + lbc_block = expt_config_cp[args.key_path] + lbc_input_fn = "gfs.bndy.nc" + lbc_spec_fhrs = extrn_config_fhrs[i] + lbc_offset_fhrs = chgres_cube_driver.config["task_get_extrn_lbcs"]["EXTRN_MDL_LBCS_OFFSET_HRS"] + nco_net = chgres_cube_driver.config["nco"]["NET_default"] + dot_ensmem = f".mem{ENSMEM_INDX}" + fcst_hhh = ( lbc_spec_fhrs - lbc_offset_fhrs ) + fcst_hhh_FV3LAM = print(f"fcst_hhh:03d") + + lbc_output_fn = rundir / f"{nco_net}.{args.cycle}{dot_ensmem}.gfs_bndy.tile7.f{fcst_hhh_FV3LAM}.nc" + + links[lbc_input_fn] = str(lbc_output_fn) + +uwlink(target_dir=rundir.parent, config=links) + +# Process FVCOM Data + + + +# Print message indicating successful completion of script. +print(""" +======================================================================== +Lateral boundary condition (LBC) files (in NetCDF format) generated suc- +cessfully for all LBC update hours (except hour zero)!!! + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" +======================================================================== + """) \ No newline at end of file diff --git a/ush/config_ccpp_suites.yaml b/ush/config_ccpp_suites.yaml new file mode 100644 index 000000000..4fe3dff42 --- /dev/null +++ b/ush/config_ccpp_suites.yaml @@ -0,0 +1,6 @@ +task_make_ics: + chgres_cube: + namelist: + update_values: + config: + varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" \ No newline at end of file diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 070685599..226fc00c4 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1660,9 +1660,9 @@ task_make_ics: update_values: config: fix_dir_target_grid: "{{ workflow.FIXlam }}" - mosaic_file_target_grid: "{{ workflow.FIXlam }}/{{ workflow.CRES }}{{ workflow.DOT_OR_USCORE }}mosaic.halo{{ constants.NH4 }}.nc" + mosaic_file_target_grid: "{{ workflow.FIXlam }}/{{ "CRES" | env }} {{ workflow.DOT_OR_USCORE }}mosaic.halo{{ constants.NH4 }}.nc" orog_dir_target_grid: "{{ workflow.FIXlam }}" - orog_files_target_grid: "{{ workflow.CRES }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants. TILE_RGNL }}.halo{{ constants.NH4 }}.nc" + orog_files_target_grid: "{{ "CRES" | env }} {{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants. TILE_RGNL }}.halo{{ constants.NH4 }}.nc" vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" data_dir_input_grid: "{{ task_make_lbcs.chgres_cube.run_dir }}" diff --git a/ush/config_external_models.yaml b/ush/config_external_models.yaml new file mode 100644 index 000000000..960538aae --- /dev/null +++ b/ush/config_external_models.yaml @@ -0,0 +1,11 @@ +task_make_ics: + chgres_cube: + namelist: + update_values: + config: + atm_files_input_grid: "" + grib2_file_input_grid: "" + input_type: "gaussian_nemsio" + external_model: "FV3GFS" + tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" + tracers: "[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" \ No newline at end of file From b1883013cc82870b4a60cec40c67ca9332bc7716 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 5 Sep 2024 12:52:31 -0500 Subject: [PATCH 05/47] update secondary YAML logic --- ush/config_ccpp_suites.yaml | 48 ++++++++++++++++++++++++++++++++- ush/config_external_models.yaml | 44 ++++++++++++++++++++++++++---- 2 files changed, 86 insertions(+), 6 deletions(-) diff --git a/ush/config_ccpp_suites.yaml b/ush/config_ccpp_suites.yaml index 4fe3dff42..579170c28 100644 --- a/ush/config_ccpp_suites.yaml +++ b/ush/config_ccpp_suites.yaml @@ -3,4 +3,50 @@ task_make_ics: namelist: update_values: config: - varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" \ No newline at end of file + varmap_file: > + {% if {{ workflow.CCPP_PHYS_SUITE }} in [ + "FV3_GFS_2017_gfdlmp", + "FV3_GFS_2017_gfdlmp_regional", + "FV3_GFS_v16", + "FV3_GFS_v15p2" + ] %} + "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" + {% elif {{ workflow.CCPP_PHYS_SUITE }} in [ + "FV3_RRFS_v1beta", + "FV3_GFS_v15_thompson_mynn_lam3km", + "FV3_GFS_v17_p8", + "FV3_WoFS_v0", + "FV3_HRRR", + "FV3_RAP" + ] and {{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }} in [ + "RAP", + "HRRR" + ] %} + "{{ user.PARMdir }}/ufs_utils/varmap_tables/GSDphys_var_map.txt" + {% endif %} +task_make_lbcs: + chgres_cube: + namelist: + update_values: + config: + varmap_file: > + {% if {{ workflow.CCPP_PHYS_SUITE }} in [ + "FV3_GFS_2017_gfdlmp", + "FV3_GFS_2017_gfdlmp_regional", + "FV3_GFS_v16", + "FV3_GFS_v15p2" + ] %} + "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" + {% elif {{ workflow.CCPP_PHYS_SUITE }} in [ + "FV3_RRFS_v1beta", + "FV3_GFS_v15_thompson_mynn_lam3km", + "FV3_GFS_v17_p8", + "FV3_WoFS_v0", + "FV3_HRRR", + "FV3_RAP" + ] and {{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }} in [ + "RAP", + "HRRR" + ] %} + "{{ user.PARMdir }}/ufs_utils/varmap_tables/GSDphys_var_map.txt" + {% endif %} \ No newline at end of file diff --git a/ush/config_external_models.yaml b/ush/config_external_models.yaml index 960538aae..e3cdbeace 100644 --- a/ush/config_external_models.yaml +++ b/ush/config_external_models.yaml @@ -2,10 +2,44 @@ task_make_ics: chgres_cube: namelist: update_values: - config: + config: > + fn_atm: "" + fn_sfc: "" + vgtyp_from_climo: True + sotyp_from_climo: True + vgfrc_from_climo: True + minmax_vgfrc_from_climo: True + lai_from_climo: True + tg3_from_soil: '"true" if {{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }} == "GDAS" else "false"' + convert_nst: '"true" if {{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }} == "FV3GFS" && {{ task_get_extrn_ics.FV3GFS_FILE_FMT_ICS }} != "grib2" else "false" + external_model: {{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }} + input_type: '"grib2" if {{ task_get_extrn_ics.FV3GFS_FILE_FMT_ICS }} == "grib2" else "gaussian_{{ task_get_extrn_ics.FV3GFS_FILE_FMT_ICS }}"' + {% if {{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }} in [FV3GFS, UFS-CASE-STUDY, GDAS] + and {{ task_get_extrn_lbcs.FV3GFS_FILE_FMT_LBCS }} != "grib2" %} + tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" + tracers: "[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" + elif {{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }} == "GSMGFS" + input_type: "gfs_gaussian_nemsio" + tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\"]" + tracers: "[\"sphum\",\"liq_wat\",\"o3mr\"]" + {% endif %} + geogrid_file_input_grid: '"{{ platform.FIXgsm }}/geo_em.d01.nc_{{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }}X" if {{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }} in ["RAP", "HRRR"] else ""' + +task_make_lbcs: + chgres_cube: + namelist: + update_values: + config: > atm_files_input_grid: "" grib2_file_input_grid: "" - input_type: "gaussian_nemsio" - external_model: "FV3GFS" - tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" - tracers: "[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" \ No newline at end of file + external_model: {{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }} + input_type: '"grib2" if {{ task_get_extrn_lbcs.FV3GFS_FILE_FMT_LBCS }} = "grib2" else "gaussian_{{ task_get_extrn_lbcs.FV3GFS_FILE_FMT_LBCS }}"' + {% if {{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }} in [FV3GFS, UFS-CASE-STUDY, GDAS] + and {{ task_get_extrn_lbcs.FV3GFS_FILE_FMT_LBCS }} != "grib2" %} + tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" + tracers: "[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" + elif {{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }} == "GSMGFS" + input_type: "gfs_gaussian_nemsio" + tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\"]" + tracers: "[\"sphum\",\"liq_wat\",\"o3mr\"]" + {% endif %} \ No newline at end of file From 47530895de2d2e4e03e2cead4ce298181bdfff3f Mon Sep 17 00:00:00 2001 From: WeirAE Date: Tue, 17 Sep 2024 10:09:09 -0500 Subject: [PATCH 06/47] first version edits complete --- scripts/chgres_cube.py | 42 +---- ush/ccpp_suites_defaults.yaml | 54 ++++++ ush/config_ccpp_suites.yaml | 52 ------ ush/config_external_models.yaml | 45 ----- ush/external_model_defaults.yaml | 91 ++++++++++ ush/setup.py | 295 +++++++++++++++++++------------ 6 files changed, 327 insertions(+), 252 deletions(-) create mode 100644 ush/ccpp_suites_defaults.yaml delete mode 100644 ush/config_ccpp_suites.yaml delete mode 100644 ush/config_external_models.yaml create mode 100644 ush/external_model_defaults.yaml diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 533338bf4..3e0405d35 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -46,18 +46,6 @@ os.environ["member"] = args.member -# Print message indicating entry into script. -print(""" -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the ex-script for the task that generates lateral boundary con- -dition (LBC) files (in NetCDF format) for all LBC update hours (except -hour zero). -======================================================================== - """) - # fix CRES dereferencing expt_config = get_yaml_config(args.config_file) @@ -79,6 +67,7 @@ rundir = Path(chgres_cube_driver.config["rundir"]) print(f"Will run in {rundir}") + if args.key_path == "task_make_ics" varsfilepath = chgres_cube_driver.config["task_make_ics"]["input_files_metadata_path"] extrn_config_fns = get_sh_config(varsfilepath)["EXTRN_MDL_FNS"] @@ -113,17 +102,7 @@ # error message if not (rundir / "runscript.chgres_cube.done").is_file(): - print(""" -Call to executable (exec_fp) to generate lateral boundary conditions (LBCs) -file for the FV3-LAM for forecast hour fhr failed: - exec_fp = \"${exec_fp}\" - fhr = \"$fhr\" -The external model from which the LBCs files are to be generated is: - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" -The external model files that are inputs to the executable (exec_fp) are -located in the following directory: - extrn_mdl_staging_dir = \"${extrn_mdl_staging_dir}\" - """) + print("Error occurred running chgres_cube. Please see component error logs.") sys.exit(1) # Deliver output data @@ -150,7 +129,7 @@ lbc_spec_fhrs = extrn_config_fhrs[i] lbc_offset_fhrs = chgres_cube_driver.config["task_get_extrn_lbcs"]["EXTRN_MDL_LBCS_OFFSET_HRS"] nco_net = chgres_cube_driver.config["nco"]["NET_default"] - dot_ensmem = f".mem{ENSMEM_INDX}" + dot_ensmem = f".mem{ args.member }" fcst_hhh = ( lbc_spec_fhrs - lbc_offset_fhrs ) fcst_hhh_FV3LAM = print(f"fcst_hhh:03d") @@ -159,18 +138,3 @@ links[lbc_input_fn] = str(lbc_output_fn) uwlink(target_dir=rundir.parent, config=links) - -# Process FVCOM Data - - - -# Print message indicating successful completion of script. -print(""" -======================================================================== -Lateral boundary condition (LBC) files (in NetCDF format) generated suc- -cessfully for all LBC update hours (except hour zero)!!! - -Exiting script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" -======================================================================== - """) \ No newline at end of file diff --git a/ush/ccpp_suites_defaults.yaml b/ush/ccpp_suites_defaults.yaml new file mode 100644 index 000000000..a5e247d7a --- /dev/null +++ b/ush/ccpp_suites_defaults.yaml @@ -0,0 +1,54 @@ +gsd_defaults: &gsd_defaults + chgres_cube: + namelist: + update_values: + config: + varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" + +FV3_RAP + task_make_ics: + chgres_cube: + <<: *gsd_defaults + task_make_lbcs: + chgres_cube: + <<: *gsd_defaults + +FV3_HRRR + task_make_ics: + chgres_cube: + <<: *gsd_defaults + task_make_lbcs: + chgres_cube: + <<: *gsd_defaults + +FV3_WoFS_v0 + task_make_ics: + chgres_cube: + <<: *gsd_defaults + task_make_lbcs: + chgres_cube: + <<: *gsd_defaults + +FV3_RRFS_v1beta + task_make_ics: + chgres_cube: + <<: *gsd_defaults + task_make_lbcs: + chgres_cube: + <<: *gsd_defaults + +FV3_GFS_v15_thompson_mynn_lam3km + task_make_ics: + chgres_cube: + <<: *gsd_defaults + task_make_lbcs: + chgres_cube: + <<: *gsd_defaults + +FV3_GFS_v17_p8 + task_make_ics: + chgres_cube: + <<: *gsd_defaults + task_make_lbcs: + chgres_cube: + <<: *gsd_defaults \ No newline at end of file diff --git a/ush/config_ccpp_suites.yaml b/ush/config_ccpp_suites.yaml deleted file mode 100644 index 579170c28..000000000 --- a/ush/config_ccpp_suites.yaml +++ /dev/null @@ -1,52 +0,0 @@ -task_make_ics: - chgres_cube: - namelist: - update_values: - config: - varmap_file: > - {% if {{ workflow.CCPP_PHYS_SUITE }} in [ - "FV3_GFS_2017_gfdlmp", - "FV3_GFS_2017_gfdlmp_regional", - "FV3_GFS_v16", - "FV3_GFS_v15p2" - ] %} - "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" - {% elif {{ workflow.CCPP_PHYS_SUITE }} in [ - "FV3_RRFS_v1beta", - "FV3_GFS_v15_thompson_mynn_lam3km", - "FV3_GFS_v17_p8", - "FV3_WoFS_v0", - "FV3_HRRR", - "FV3_RAP" - ] and {{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }} in [ - "RAP", - "HRRR" - ] %} - "{{ user.PARMdir }}/ufs_utils/varmap_tables/GSDphys_var_map.txt" - {% endif %} -task_make_lbcs: - chgres_cube: - namelist: - update_values: - config: - varmap_file: > - {% if {{ workflow.CCPP_PHYS_SUITE }} in [ - "FV3_GFS_2017_gfdlmp", - "FV3_GFS_2017_gfdlmp_regional", - "FV3_GFS_v16", - "FV3_GFS_v15p2" - ] %} - "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" - {% elif {{ workflow.CCPP_PHYS_SUITE }} in [ - "FV3_RRFS_v1beta", - "FV3_GFS_v15_thompson_mynn_lam3km", - "FV3_GFS_v17_p8", - "FV3_WoFS_v0", - "FV3_HRRR", - "FV3_RAP" - ] and {{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }} in [ - "RAP", - "HRRR" - ] %} - "{{ user.PARMdir }}/ufs_utils/varmap_tables/GSDphys_var_map.txt" - {% endif %} \ No newline at end of file diff --git a/ush/config_external_models.yaml b/ush/config_external_models.yaml deleted file mode 100644 index e3cdbeace..000000000 --- a/ush/config_external_models.yaml +++ /dev/null @@ -1,45 +0,0 @@ -task_make_ics: - chgres_cube: - namelist: - update_values: - config: > - fn_atm: "" - fn_sfc: "" - vgtyp_from_climo: True - sotyp_from_climo: True - vgfrc_from_climo: True - minmax_vgfrc_from_climo: True - lai_from_climo: True - tg3_from_soil: '"true" if {{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }} == "GDAS" else "false"' - convert_nst: '"true" if {{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }} == "FV3GFS" && {{ task_get_extrn_ics.FV3GFS_FILE_FMT_ICS }} != "grib2" else "false" - external_model: {{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }} - input_type: '"grib2" if {{ task_get_extrn_ics.FV3GFS_FILE_FMT_ICS }} == "grib2" else "gaussian_{{ task_get_extrn_ics.FV3GFS_FILE_FMT_ICS }}"' - {% if {{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }} in [FV3GFS, UFS-CASE-STUDY, GDAS] - and {{ task_get_extrn_lbcs.FV3GFS_FILE_FMT_LBCS }} != "grib2" %} - tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" - tracers: "[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" - elif {{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }} == "GSMGFS" - input_type: "gfs_gaussian_nemsio" - tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\"]" - tracers: "[\"sphum\",\"liq_wat\",\"o3mr\"]" - {% endif %} - geogrid_file_input_grid: '"{{ platform.FIXgsm }}/geo_em.d01.nc_{{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }}X" if {{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }} in ["RAP", "HRRR"] else ""' - -task_make_lbcs: - chgres_cube: - namelist: - update_values: - config: > - atm_files_input_grid: "" - grib2_file_input_grid: "" - external_model: {{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }} - input_type: '"grib2" if {{ task_get_extrn_lbcs.FV3GFS_FILE_FMT_LBCS }} = "grib2" else "gaussian_{{ task_get_extrn_lbcs.FV3GFS_FILE_FMT_LBCS }}"' - {% if {{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }} in [FV3GFS, UFS-CASE-STUDY, GDAS] - and {{ task_get_extrn_lbcs.FV3GFS_FILE_FMT_LBCS }} != "grib2" %} - tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" - tracers: "[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" - elif {{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }} == "GSMGFS" - input_type: "gfs_gaussian_nemsio" - tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\"]" - tracers: "[\"sphum\",\"liq_wat\",\"o3mr\"]" - {% endif %} \ No newline at end of file diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml new file mode 100644 index 000000000..a42fac26a --- /dev/null +++ b/ush/external_model_defaults.yaml @@ -0,0 +1,91 @@ +grib2_defaults: &grib2_defaults + chgres_cube: + namelist: + update_values: + config: + input_type: "grib2" + convert_nst: False + tracers_input: "" + tracers: "" + +GSMGFS + task_make_ics: + chgres_cube: + input_type: "gfs_gaussian_nemsio" + external_model: "GSMGFS" + convert_nst: False + tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\"]" + tracers: "[\"sphum\",\"liq_wat\",\"o3mr\"]" + task_make_lbcs: + chgres_cube: + input_type: "gfs_gaussian_nemsio" + external_model: "GSMGFS" + convert_nst: False + tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\"]" + tracers: "[\"sphum\",\"liq_wat\",\"o3mr\"]" + +UFS-CASE-STUDY + task_make_ics: + chgres_cube: + external_model: "UFS-CASE-STUDY" + task_make_lbcs: + chgres_cube: + external_model: "UFS-CASE-STUDY" + +GDAS + task_make_ics: + chgres_cube: + external_model: "GFS" + tg3_from_soil: True + task_make_lbcs: + chgres_cube: + external_model: "GFS" + tg3_from_soil: True + +GEFS + task_make_ics: + chgres_cube: + external_model: "GFS" + <<: *grib2_defaults + task_make_lbcs: + chgres_cube: + external_model: "GFS" + <<: *grib2_defaults + +HRRR + task_make_ics: + chgres_cube: + external_model: "HRRR" + geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_HRRRX" + tg3_from_soil: True + <<: *grib2_defaults + task_make_lbcs: + chgres_cube: + external_model: "HRRR" + geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_HRRRX" + tg3_from_soil: True + <<: *grib2_defaults + +RAP + task_make_ics: + chgres_cube: + external_model: "RAP" + geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_RAPX" + tg3_from_soil: True + <<: *grib2_defaults + task_make_lbcs: + chgres_cube: + external_model: "RAP" + geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_RAPX" + tg3_from_soil: True + <<: *grib2_defaults + +NAM + task_make_ics: + chgres_cube: + external_model: "NAM" + <<: *grib2_defaults + task_make_lbcs: + chgres_cube: + external_model: "NAM" + <<: *grib2_defaults \ No newline at end of file diff --git a/ush/setup.py b/ush/setup.py index 7575b5a2b..6fbbce59a 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -47,6 +47,7 @@ from set_gridparams_GFDLgrid import set_gridparams_GFDLgrid from uwtools.api.config import get_yaml_config + def load_config_for_setup(ushdir, default_config, user_config): """Load in the default, machine, and user configuration files into Python dictionaries. Return the combined experiment dictionary. @@ -78,7 +79,9 @@ def load_config_for_setup(ushdir, default_config, user_config): try: cfg_u = load_config_file(user_config) - logging.debug(f"Read in the following values from YAML config file {user_config}:\n") + logging.debug( + f"Read in the following values from YAML config file {user_config}:\n" + ) logging.debug(cfg_u) except: errmsg = dedent( @@ -104,7 +107,9 @@ def load_config_for_setup(ushdir, default_config, user_config): errmsg = f"Invalid key(s) specified in {user_config}:\n" for entry in invalid: errmsg = errmsg + f"{entry} = {invalid[entry]}\n" - errmsg = errmsg + f"\nCheck {default_config} for allowed user-specified variables\n" + errmsg = ( + errmsg + f"\nCheck {default_config} for allowed user-specified variables\n" + ) raise Exception(errmsg) # Mandatory variables *must* be set in the user's config; the default value is invalid @@ -144,36 +149,36 @@ def load_config_for_setup(ushdir, default_config, user_config): # Load the constants file cfg_c = load_config_file(os.path.join(ushdir, "constants.yaml")) - # Load the rocoto workflow default file - cfg_wflow = load_config_file(os.path.join(ushdir, os.pardir, "parm", - "wflow", "default_workflow.yaml")) + cfg_wflow = load_config_file( + os.path.join(ushdir, os.pardir, "parm", "wflow", "default_workflow.yaml") + ) # Takes care of removing any potential "null" entries, i.e., # unsetting a default value from an anchored default_task update_dict(cfg_wflow, cfg_wflow) - # Take any user-specified taskgroups entry here. - taskgroups = cfg_u.get('rocoto', {}).get('tasks', {}).get('taskgroups') + taskgroups = cfg_u.get("rocoto", {}).get("tasks", {}).get("taskgroups") if taskgroups: - cfg_wflow['rocoto']['tasks']['taskgroups'] = taskgroups + cfg_wflow["rocoto"]["tasks"]["taskgroups"] = taskgroups # Extend yaml here on just the rocoto section to include the # appropriate groups of tasks extend_yaml(cfg_wflow) - # Put the entries expanded under taskgroups in tasks rocoto_tasks = cfg_wflow["rocoto"]["tasks"] - cfg_wflow["rocoto"]["tasks"] = yaml.load(rocoto_tasks.pop("taskgroups"),Loader=yaml.SafeLoader) + cfg_wflow["rocoto"]["tasks"] = yaml.load( + rocoto_tasks.pop("taskgroups"), Loader=yaml.SafeLoader + ) # Update wflow config from user one more time to make sure any of # the "null" settings are removed, i.e., tasks turned off. - update_dict(cfg_u.get('rocoto', {}), cfg_wflow["rocoto"]) + update_dict(cfg_u.get("rocoto", {}), cfg_wflow["rocoto"]) def add_jobname(tasks): - """ Add the jobname entry for all the tasks in the workflow """ + """Add the jobname entry for all the tasks in the workflow""" if not isinstance(tasks, dict): return @@ -182,13 +187,13 @@ def add_jobname(tasks): if task_type == "task": # Use the provided attribute if it is present, otherwise use # the name in the key - tasks[task]["jobname"] = \ - task_settings.get("attrs", {}).get("name") or \ - task.split("_", maxsplit=1)[1] + tasks[task]["jobname"] = ( + task_settings.get("attrs", {}).get("name") + or task.split("_", maxsplit=1)[1] + ) elif task_type == "metatask": add_jobname(task_settings) - # Add jobname entry to each remaining task add_jobname(cfg_wflow["rocoto"]["tasks"]) @@ -218,10 +223,23 @@ def add_jobname(tasks): update_dict(cfg_d, cfg_d) # Load one more if running Coupled AQM - if cfg_d['cpl_aqm_parm']['CPL_AQM']: + if cfg_d["cpl_aqm_parm"]["CPL_AQM"]: cfg_aqm = get_yaml_config("config_defaults_aqm.yaml") update_dict(cfg_aqm, cfg_d) + # Load CCPP suite-specific settings + ccpp_suite = cfg_d["workflow"]["CCPP_PHYS_SUITE"] + ccpp_cfg = get_yaml_config(Path(ushdir, "ccpp_suites_defaults.yaml")).get( + ccpp_suite, {} + ) + update_dict(ccpp_cfg, cfg_d) + + # Load external model-specific settings + extrn_mdl = cfg_d["workflow"]["EXTRN_MDL"] + extrn_cfg = get_yaml_config(Path(ushdir, "external_model_defaults.yaml")).get( + extrn_mdl, {} + ) + update_dict(extrn_cfg, cfg_d) # Set "Home" directory, the top-level ufs-srweather-app directory homedir = os.path.abspath(os.path.dirname(__file__) + os.sep + os.pardir) @@ -286,7 +304,6 @@ def add_jobname(tasks): def set_srw_paths(ushdir, expt_config): - """ Generate a dictionary of directories that describe the SRW structure, i.e., where SRW is installed, and the paths to @@ -427,7 +444,6 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): fcst_len_hrs_max = {fcst_len_hrs_max}""" ) - # # ----------------------------------------------------------------------- # @@ -491,11 +507,11 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): # ----------------------------------------------------------------------- # - rocoto_config = expt_config.get('rocoto', {}) + rocoto_config = expt_config.get("rocoto", {}) rocoto_tasks = rocoto_config.get("tasks") - run_make_grid = rocoto_tasks.get('task_make_grid') is not None - run_make_orog = rocoto_tasks.get('task_make_orog') is not None - run_make_sfc_climo = rocoto_tasks.get('task_make_sfc_climo') is not None + run_make_grid = rocoto_tasks.get("task_make_grid") is not None + run_make_orog = rocoto_tasks.get("task_make_orog") is not None + run_make_sfc_climo = rocoto_tasks.get("task_make_sfc_climo") is not None # Necessary tasks are turned on pregen_basedir = expt_config["platform"].get("DOMAIN_PREGEN_BASEDIR") @@ -523,7 +539,7 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): ) def remove_tag(tasks, tag): - """ Remove the tag for all the tasks in the workflow """ + """Remove the tag for all the tasks in the workflow""" if not isinstance(tasks, dict): return @@ -539,10 +555,10 @@ def remove_tag(tasks, tag): if remove_memory: remove_tag(rocoto_tasks, "memory") - for part in ['PARTITION_HPSS', 'PARTITION_DEFAULT', 'PARTITION_FCST']: + for part in ["PARTITION_HPSS", "PARTITION_DEFAULT", "PARTITION_FCST"]: partition = expt_config["platform"].get(part) if not partition: - remove_tag(rocoto_tasks, 'partition') + remove_tag(rocoto_tasks, "partition") # When not running subhourly post, remove those tasks, if they exist if not expt_config.get("task_run_post", {}).get("SUB_HOURLY_POST"): @@ -560,29 +576,37 @@ def remove_tag(tasks, tag): vx_metatasks_all = {} vx_fields_all["CCPA"] = ["APCP"] - vx_metatasks_all["CCPA"] = ["metatask_PcpCombine_obs", - "metatask_PcpCombine_fcst_APCP_all_accums_all_mems", - "metatask_GridStat_CCPA_all_accums_all_mems", - "metatask_GenEnsProd_EnsembleStat_CCPA", - "metatask_GridStat_CCPA_ensmeanprob_all_accums"] + vx_metatasks_all["CCPA"] = [ + "metatask_PcpCombine_obs", + "metatask_PcpCombine_fcst_APCP_all_accums_all_mems", + "metatask_GridStat_CCPA_all_accums_all_mems", + "metatask_GenEnsProd_EnsembleStat_CCPA", + "metatask_GridStat_CCPA_ensmeanprob_all_accums", + ] vx_fields_all["NOHRSC"] = ["ASNOW"] - vx_metatasks_all["NOHRSC"] = ["task_get_obs_nohrsc", - "metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems", - "metatask_GridStat_NOHRSC_all_accums_all_mems", - "metatask_GenEnsProd_EnsembleStat_NOHRSC", - "metatask_GridStat_NOHRSC_ensmeanprob_all_accums"] + vx_metatasks_all["NOHRSC"] = [ + "task_get_obs_nohrsc", + "metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems", + "metatask_GridStat_NOHRSC_all_accums_all_mems", + "metatask_GenEnsProd_EnsembleStat_NOHRSC", + "metatask_GridStat_NOHRSC_ensmeanprob_all_accums", + ] vx_fields_all["MRMS"] = ["REFC", "RETOP"] - vx_metatasks_all["MRMS"] = ["metatask_GridStat_MRMS_all_mems", - "metatask_GenEnsProd_EnsembleStat_MRMS", - "metatask_GridStat_MRMS_ensprob"] + vx_metatasks_all["MRMS"] = [ + "metatask_GridStat_MRMS_all_mems", + "metatask_GenEnsProd_EnsembleStat_MRMS", + "metatask_GridStat_MRMS_ensprob", + ] vx_fields_all["NDAS"] = ["ADPSFC", "ADPUPA"] - vx_metatasks_all["NDAS"] = ["task_run_MET_Pb2nc_obs", - "metatask_PointStat_NDAS_all_mems", - "metatask_GenEnsProd_EnsembleStat_NDAS", - "metatask_PointStat_NDAS_ensmeanprob"] + vx_metatasks_all["NDAS"] = [ + "task_run_MET_Pb2nc_obs", + "metatask_PointStat_NDAS_all_mems", + "metatask_GenEnsProd_EnsembleStat_NDAS", + "metatask_PointStat_NDAS_ensmeanprob", + ] # Get the vx fields specified in the experiment configuration. vx_fields_config = expt_config["verification"]["VX_FIELDS"] @@ -591,23 +615,27 @@ def remove_tag(tasks, tag): # for all observation types. if not vx_fields_config: metatask = "metatask_check_post_output_all_mems" - rocoto_config['tasks'].pop(metatask) + rocoto_config["tasks"].pop(metatask) # If for a given obstype no fields are specified, remove all vx metatasks # for that obstype. for obstype in vx_fields_all: - vx_fields_obstype = [field for field in vx_fields_config if field in vx_fields_all[obstype]] + vx_fields_obstype = [ + field for field in vx_fields_config if field in vx_fields_all[obstype] + ] if not vx_fields_obstype: for metatask in vx_metatasks_all[obstype]: - if metatask in rocoto_config['tasks']: - logging.info(dedent( - f""" + if metatask in rocoto_config["tasks"]: + logging.info( + dedent( + f""" Removing verification [meta]task "{metatask}" from workflow since no fields belonging to observation type "{obstype}" are specified for verification.""" - )) - rocoto_config['tasks'].pop(metatask) + ) + ) + rocoto_config["tasks"].pop(metatask) # # ----------------------------------------------------------------------- @@ -673,13 +701,13 @@ def get_location(xcs, fmt, expt_cfg): {data_key} = \"{basedir}\"''' ) - # Make sure the vertical coordinate file for both make_lbcs and # make_ics is the same. - if ics_vcoord := expt_config.get("task_make_ics", {}).get("VCOORD_FILE") != \ - (lbcs_vcoord := expt_config.get("task_make_lbcs", {}).get("VCOORD_FILE")): - raise ValueError( - f""" + if ics_vcoord := expt_config.get("task_make_ics", {}).get("VCOORD_FILE") != ( + lbcs_vcoord := expt_config.get("task_make_lbcs", {}).get("VCOORD_FILE") + ): + raise ValueError( + f""" The VCOORD_FILE must be set to the same value for both the make_ics task and the make_lbcs task. They are currently set to: @@ -690,7 +718,7 @@ def get_location(xcs, fmt, expt_cfg): make_lbcs: VCOORD_FILE: {lbcs_vcoord} """ - ) + ) # # ----------------------------------------------------------------------- @@ -709,14 +737,16 @@ def get_location(xcs, fmt, expt_cfg): dt = fcst_config.get("DT_ATMOS") if dt: if dt > 40: - logger.warning(dedent( - f""" + logger.warning( + dedent( + f""" WARNING: CCPP suite {workflow_config["CCPP_PHYS_SUITE"]} requires short time step regardless of grid resolution. The user-specified value DT_ATMOS = {fcst_config.get("DT_ATMOS")} may result in CFL violations or other errors! """ - )) + ) + ) # Gather the pre-defined grid parameters, if needed if workflow_config.get("PREDEF_GRID_NAME"): @@ -737,14 +767,19 @@ def get_location(xcs, fmt, expt_cfg): continue # DT_ATMOS needs special treatment based on CCPP suite elif param == "DT_ATMOS": - if workflow_config["CCPP_PHYS_SUITE"] in hires_ccpp_suites and grid_params[param] > 40: - logger.warning(dedent( - f""" + if ( + workflow_config["CCPP_PHYS_SUITE"] in hires_ccpp_suites + and grid_params[param] > 40 + ): + logger.warning( + dedent( + f""" WARNING: CCPP suite {workflow_config["CCPP_PHYS_SUITE"]} requires short time step regardless of grid resolution; setting DT_ATMOS to 40.\n This value can be overwritten in the user config file. """ - )) + ) + ) fcst_config[param] = 40 else: fcst_config[param] = value @@ -773,19 +808,19 @@ def get_location(xcs, fmt, expt_cfg): if 24 / incr_cycl_freq != len(fcst_len_cycl): # Also allow for the possibility that the user is running # cycles for less than a day: - num_cycles = len(set_cycle_dates( - date_first_cycl, - date_last_cycl, - incr_cycl_freq)) + num_cycles = len( + set_cycle_dates(date_first_cycl, date_last_cycl, incr_cycl_freq) + ) if num_cycles != len(fcst_len_cycl): - logger.error(f""" The number of entries in FCST_LEN_CYCL does + logger.error( + f""" The number of entries in FCST_LEN_CYCL does not divide evenly into a 24 hour day or the number of cycles in your experiment! FCST_LEN_CYCL = {fcst_len_cycl} """ - ) - raise ValueError + ) + raise ValueError # Build cycledefs entries for the long forecasts # Short forecast cycles will be relevant to all intended @@ -799,7 +834,7 @@ def get_location(xcs, fmt, expt_cfg): # Find the entries that match the long forecast, and map them to # their time of day. long_fcst_len = max(fcst_len_cycl) - long_indices = [i for i,x in enumerate(fcst_len_cycl) if x == long_fcst_len] + long_indices = [i for i, x in enumerate(fcst_len_cycl) if x == long_fcst_len] long_cycles = [i * incr_cycl_freq for i in long_indices] # add one forecast entry per cycle per day @@ -808,9 +843,9 @@ def get_location(xcs, fmt, expt_cfg): for hh in long_cycles: first = date_first_cycl.replace(hour=hh).strftime("%Y%m%d%H") last = date_last_cycl.replace(hour=hh).strftime("%Y%m%d%H") - fcst_cdef.append(f'{first}00 {last}00 24:00:00') + fcst_cdef.append(f"{first}00 {last}00 24:00:00") - rocoto_config['cycledefs']['long_forecast'] = fcst_cdef + rocoto_config["cycledefs"]["long_forecast"] = fcst_cdef # check the availability of restart intervals for restart capability of forecast do_fcst_restart = fcst_config.get("DO_FCST_RESTART") @@ -1032,7 +1067,6 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # - # If using external CRTM fix files to allow post-processing of synthetic # satellite products from the UPP, make sure the CRTM fix file directory exists. if global_sect.get("USE_CRTM"): @@ -1092,8 +1126,9 @@ def get_location(xcs, fmt, expt_cfg): # Update the rocoto string for the fcst output location if # running an ensemble in nco mode if global_sect["DO_ENSEMBLE"]: - rocoto_config["entities"]["FCST_DIR"] = \ - "{{ nco.PTMP }}/{{ nco.envir_default }}/tmp/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H" + rocoto_config["entities"][ + "FCST_DIR" + ] = "{{ nco.PTMP }}/{{ nco.envir_default }}/tmp/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H" # create experiment dir mkdir_vrfy(f' -p "{exptdir}"') @@ -1164,13 +1199,14 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # # Get list of all top-level tasks and metatasks in the workflow. - task_defs = rocoto_config.get('tasks') + task_defs = rocoto_config.get("tasks") all_tasks = [task for task in task_defs] # Get list of all valid top-level tasks and metatasks pertaining to ensemble # verification. ens_vx_task_defns = load_config_file( - os.path.join(USHdir, os.pardir, "parm", "wflow", "verify_ens.yaml")) + os.path.join(USHdir, os.pardir, "parm", "wflow", "verify_ens.yaml") + ) ens_vx_valid_tasks = [task for task in ens_vx_task_defns] # Get list of all valid top-level tasks and metatasks in the workflow that @@ -1183,14 +1219,24 @@ def get_location(xcs, fmt, expt_cfg): do_ensemble = global_sect["DO_ENSEMBLE"] if (not do_ensemble) and ens_vx_tasks: task_str = " " + "\n ".join(ens_vx_tasks) - msg = dedent(f""" + msg = dedent( + f""" Ensemble verification can not be run unless running in ensemble mode: DO_ENSEMBLE = \"{do_ensemble}\" Ensemble verification tasks: - """) - msg = "".join([msg, task_str, dedent(f""" + """ + ) + msg = "".join( + [ + msg, + task_str, + dedent( + f""" Please set DO_ENSEMBLE to True or remove ensemble vx tasks from the - workflow.""")]) + workflow.""" + ), + ] + ) raise Exception(msg) # @@ -1228,18 +1274,14 @@ def dict_find(user_dict, substring): run_make_ics = dict_find(rocoto_tasks, "task_make_ics") run_make_lbcs = dict_find(rocoto_tasks, "task_make_lbcs") run_run_fcst = dict_find(rocoto_tasks, "task_run_fcst") - run_any_coldstart_task = run_make_ics or \ - run_make_lbcs or \ - run_run_fcst + run_any_coldstart_task = run_make_ics or run_make_lbcs or run_run_fcst # Flags for creating symlinks to pre-generated grid, orography, and sfc_climo files. # These consider dependencies of other tasks on each pre-processing task. create_symlinks_to_pregen_files = { - "GRID": (not run_make_grid) and \ - (run_make_orog or run_make_sfc_climo or run_any_coldstart_task), - "OROG": (not run_make_orog) and \ - (run_make_sfc_climo or run_any_coldstart_task), - "SFC_CLIMO": (not run_make_sfc_climo) and \ - (run_make_ics or run_make_lbcs), + "GRID": (not run_make_grid) + and (run_make_orog or run_make_sfc_climo or run_any_coldstart_task), + "OROG": (not run_make_orog) and (run_make_sfc_climo or run_any_coldstart_task), + "SFC_CLIMO": (not run_make_sfc_climo) and (run_make_ics or run_make_lbcs), } fixed_files = expt_config["fixed_files"] @@ -1327,7 +1369,7 @@ def dict_find(user_dict, substring): # if fcst_config["WRITE_DOPOST"]: # Turn off run_post - task_name = 'metatask_run_ens_post' + task_name = "metatask_run_ens_post" removed_task = task_defs.pop(task_name, None) if removed_task: logger.warning( @@ -1352,33 +1394,49 @@ def dict_find(user_dict, substring): ccpp_suite_xml = load_xml_file(workflow_config["CCPP_PHYS_SUITE_IN_CCPP_FP"]) # Need to track if we are using RUC LSM for the make_ics step - workflow_config["SDF_USES_RUC_LSM"] = has_tag_with_value(ccpp_suite_xml, "scheme", "lsm_ruc") + workflow_config["SDF_USES_RUC_LSM"] = has_tag_with_value( + ccpp_suite_xml, "scheme", "lsm_ruc" + ) # Thompson microphysics needs additional input files and namelist settings - workflow_config["SDF_USES_THOMPSON_MP"] = has_tag_with_value(ccpp_suite_xml, "scheme", "mp_thompson") + workflow_config["SDF_USES_THOMPSON_MP"] = has_tag_with_value( + ccpp_suite_xml, "scheme", "mp_thompson" + ) if workflow_config["SDF_USES_THOMPSON_MP"]: - - logging.debug(f'Selected CCPP suite ({workflow_config["CCPP_PHYS_SUITE"]}) uses Thompson MP') - logging.debug(f'Setting up links for additional fix files') + + logging.debug( + f'Selected CCPP suite ({workflow_config["CCPP_PHYS_SUITE"]}) uses Thompson MP' + ) + logging.debug(f"Setting up links for additional fix files") # If the model ICs or BCs are not from RAP or HRRR, they will not contain aerosol # climatology data needed by the Thompson scheme, so we need to provide a separate file - if (get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RAP"] or - get_extrn_lbcs["EXTRN_MDL_NAME_LBCS"] not in ["HRRR", "RAP"]): - fixed_files["THOMPSON_FIX_FILES"].append(workflow_config["THOMPSON_MP_CLIMO_FN"]) + if get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RAP"] or get_extrn_lbcs[ + "EXTRN_MDL_NAME_LBCS" + ] not in ["HRRR", "RAP"]: + fixed_files["THOMPSON_FIX_FILES"].append( + workflow_config["THOMPSON_MP_CLIMO_FN"] + ) # Add thompson-specific fix files to CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING and # FIXgsm_FILES_TO_COPY_TO_FIXam; see parm/fixed_files_mapping.yaml for more info on these variables - fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"].extend(fixed_files["THOMPSON_FIX_FILES"]) + fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"].extend( + fixed_files["THOMPSON_FIX_FILES"] + ) for fix_file in fixed_files["THOMPSON_FIX_FILES"]: - fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"].append(f"{fix_file} | {fix_file}") - - logging.debug(f'New fix file list:\n{fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"]=}') - logging.debug(f'New fix file mapping:\n{fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"]=}') + fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"].append( + f"{fix_file} | {fix_file}" + ) + logging.debug( + f'New fix file list:\n{fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"]=}' + ) + logging.debug( + f'New fix file mapping:\n{fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"]=}' + ) # # ----------------------------------------------------------------------- @@ -1427,7 +1485,6 @@ def dict_find(user_dict, substring): var_defns_cfg["workflow"][dates] = date_to_str(var_defns_cfg["workflow"][dates]) var_defns_cfg.dump(Path(global_var_defns_fp)) - # # ----------------------------------------------------------------------- # @@ -1442,28 +1499,33 @@ def dict_find(user_dict, substring): if v is None or v == "": continue vkey = "valid_vals_" + k - if (vkey in cfg_v): - if (type(v) == list): - if not(all(ele in cfg_v[vkey] for ele in v)): + if vkey in cfg_v: + if type(v) == list: + if not (all(ele in cfg_v[vkey] for ele in v)): raise Exception( - dedent(f""" + dedent( + f""" The variable {k} = {v} in the user's configuration has at least one invalid value. Possible values are: {k} = {cfg_v[vkey]}""" - )) + ) + ) else: if not (v in cfg_v[vkey]): raise Exception( - dedent(f""" + dedent( + f""" The variable {k} = {v} ({type(v)}) in the user's configuration does not have a valid value. Possible values are: {k} = {cfg_v[vkey]}""" - )) + ) + ) return expt_config + def clean_rocoto_dict(rocotodict): """Removes any invalid entries from rocotodict. Examples of invalid entries are: @@ -1477,7 +1539,9 @@ def clean_rocoto_dict(rocotodict): elif key.split("_", maxsplit=1)[0] in ["task"]: if not rocotodict[key].get("command"): popped = rocotodict.pop(key) - logging.warning(f"Invalid task {key} removed due to empty/unset run command") + logging.warning( + f"Invalid task {key} removed due to empty/unset run command" + ) logging.debug(f"Removed entry:\n{popped}") # Loop 2: search for metatasks with no tasks in them @@ -1487,7 +1551,7 @@ def clean_rocoto_dict(rocotodict): for key2 in list(rocotodict[key].keys()): if key2.split("_", maxsplit=1)[0] == "metatask": clean_rocoto_dict(rocotodict[key][key2]) - #After above recursion, any nested empty metatasks will have popped themselves + # After above recursion, any nested empty metatasks will have popped themselves if rocotodict[key].get(key2): valid = True elif key2.split("_", maxsplit=1)[0] == "task": @@ -1498,7 +1562,6 @@ def clean_rocoto_dict(rocotodict): logging.debug(f"Removed entry:\n{popped}") - # # ----------------------------------------------------------------------- # From 93ac3620446696acb7d325fb0c2043b176f4a9dd Mon Sep 17 00:00:00 2001 From: WeirAE Date: Wed, 18 Sep 2024 10:00:52 -0500 Subject: [PATCH 07/47] config_defaults.yaml formatting fixes --- ush/config_defaults.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 226fc00c4..530435288 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1660,9 +1660,9 @@ task_make_ics: update_values: config: fix_dir_target_grid: "{{ workflow.FIXlam }}" - mosaic_file_target_grid: "{{ workflow.FIXlam }}/{{ "CRES" | env }} {{ workflow.DOT_OR_USCORE }}mosaic.halo{{ constants.NH4 }}.nc" + mosaic_file_target_grid: "{{ workflow.FIXlam }}/{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}mosaic.halo{{ constants.NH4 }}.nc" orog_dir_target_grid: "{{ workflow.FIXlam }}" - orog_files_target_grid: "{{ "CRES" | env }} {{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants. TILE_RGNL }}.halo{{ constants.NH4 }}.nc" + orog_files_target_grid: "{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" data_dir_input_grid: "{{ task_make_lbcs.chgres_cube.run_dir }}" @@ -1719,13 +1719,13 @@ task_make_lbcs: update_values: config: fix_dir_target_grid: "{{ workflow.FIXlam }}" - mosaic_file_target_grid: "{{ workflow.FIXlam }}/{{ workflow.CRES }}{{ workflow.DOT_OR_USCORE }}mosaic.halo{{ constants.NH4 }}.nc" + mosaic_file_target_grid: "{{ workflow.FIXlam }}/{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}mosaic.halo{{ constants.NH4 }}.nc" orog_dir_target_grid: "{{ workflow.FIXlam }}" - orog_files_target_grid: "{{ workflow.CRES }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants. TILE_RGNL }}.halo{{ constants.NH4 }}.nc" + orog_files_target_grid: "{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" data_dir_input_grid: "{{ task_make_lbcs.chgres_cube.run_dir }}" - atm_files_input_grid: ${fn_atm} + atm_files_input_grid: "" grib2_file_input_grid: "" cycle_mon: !int "{{ cycle.strftime('%m') }}" cycle_day: !int "{{ cycle.strftime('%d') }}" From e437b3d29cc1baff2a05826d22d76e53fac4a4ad Mon Sep 17 00:00:00 2001 From: WeirAE Date: Wed, 18 Sep 2024 11:04:14 -0500 Subject: [PATCH 08/47] yaml fixes to complete build --- ush/ccpp_suites_defaults.yaml | 12 ++++++------ ush/config_defaults.yaml | 1 + ush/external_model_defaults.yaml | 14 +++++++------- ush/setup.py | 2 +- 4 files changed, 15 insertions(+), 14 deletions(-) diff --git a/ush/ccpp_suites_defaults.yaml b/ush/ccpp_suites_defaults.yaml index a5e247d7a..f0439bbc9 100644 --- a/ush/ccpp_suites_defaults.yaml +++ b/ush/ccpp_suites_defaults.yaml @@ -5,7 +5,7 @@ gsd_defaults: &gsd_defaults config: varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" -FV3_RAP +FV3_RAP: task_make_ics: chgres_cube: <<: *gsd_defaults @@ -13,7 +13,7 @@ FV3_RAP chgres_cube: <<: *gsd_defaults -FV3_HRRR +FV3_HRRR: task_make_ics: chgres_cube: <<: *gsd_defaults @@ -21,7 +21,7 @@ FV3_HRRR chgres_cube: <<: *gsd_defaults -FV3_WoFS_v0 +FV3_WoFS_v0: task_make_ics: chgres_cube: <<: *gsd_defaults @@ -29,7 +29,7 @@ FV3_WoFS_v0 chgres_cube: <<: *gsd_defaults -FV3_RRFS_v1beta +FV3_RRFS_v1beta: task_make_ics: chgres_cube: <<: *gsd_defaults @@ -37,7 +37,7 @@ FV3_RRFS_v1beta chgres_cube: <<: *gsd_defaults -FV3_GFS_v15_thompson_mynn_lam3km +FV3_GFS_v15_thompson_mynn_lam3km: task_make_ics: chgres_cube: <<: *gsd_defaults @@ -45,7 +45,7 @@ FV3_GFS_v15_thompson_mynn_lam3km chgres_cube: <<: *gsd_defaults -FV3_GFS_v17_p8 +FV3_GFS_v17_p8: task_make_ics: chgres_cube: <<: *gsd_defaults diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 530435288..11409faa3 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1713,6 +1713,7 @@ task_make_lbcs: KMP_AFFINITY_MAKE_LBCS: "scatter" OMP_NUM_THREADS_MAKE_LBCS: 1 OMP_STACKSIZE_MAKE_LBCS: "1024m" + VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #------------------------------------------------------------------------ chgres_cube: namelist: diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml index a42fac26a..c3308887e 100644 --- a/ush/external_model_defaults.yaml +++ b/ush/external_model_defaults.yaml @@ -8,7 +8,7 @@ grib2_defaults: &grib2_defaults tracers_input: "" tracers: "" -GSMGFS +GSMGFS: task_make_ics: chgres_cube: input_type: "gfs_gaussian_nemsio" @@ -24,7 +24,7 @@ GSMGFS tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\"]" tracers: "[\"sphum\",\"liq_wat\",\"o3mr\"]" -UFS-CASE-STUDY +UFS-CASE-STUDY: task_make_ics: chgres_cube: external_model: "UFS-CASE-STUDY" @@ -32,7 +32,7 @@ UFS-CASE-STUDY chgres_cube: external_model: "UFS-CASE-STUDY" -GDAS +GDAS: task_make_ics: chgres_cube: external_model: "GFS" @@ -42,7 +42,7 @@ GDAS external_model: "GFS" tg3_from_soil: True -GEFS +GEFS: task_make_ics: chgres_cube: external_model: "GFS" @@ -52,7 +52,7 @@ GEFS external_model: "GFS" <<: *grib2_defaults -HRRR +HRRR: task_make_ics: chgres_cube: external_model: "HRRR" @@ -66,7 +66,7 @@ HRRR tg3_from_soil: True <<: *grib2_defaults -RAP +RAP: task_make_ics: chgres_cube: external_model: "RAP" @@ -80,7 +80,7 @@ RAP tg3_from_soil: True <<: *grib2_defaults -NAM +NAM: task_make_ics: chgres_cube: external_model: "NAM" diff --git a/ush/setup.py b/ush/setup.py index 6fbbce59a..f358effeb 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -235,7 +235,7 @@ def add_jobname(tasks): update_dict(ccpp_cfg, cfg_d) # Load external model-specific settings - extrn_mdl = cfg_d["workflow"]["EXTRN_MDL"] + extrn_mdl = cfg_d["task_get_extrn_ics"]["EXTRN_MDL_NAME_ICS"] extrn_cfg = get_yaml_config(Path(ushdir, "external_model_defaults.yaml")).get( extrn_mdl, {} ) From bee31be9fb0f8cc1ef0bd95bb9cf78b23ab11a93 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Wed, 18 Sep 2024 16:41:01 -0500 Subject: [PATCH 09/47] try some changes in chgres_cube.py --- scripts/chgres_cube.py | 73 ++++++++++++++++++++++-------------------- 1 file changed, 39 insertions(+), 34 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 3e0405d35..b0abec011 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -46,40 +46,39 @@ os.environ["member"] = args.member - -# fix CRES dereferencing -expt_config = get_yaml_config(args.config_file) -os.environ["CRES"] = expt_config["workflow"]["CRES"] -expt_config.dereference( - context={ - **os.environ, - **expt_config_cp, - } -) +expt_config = get_yaml_config(args.config_file) chgres_cube_config = expt_config[args.key_path] +rundir = Path(chgres_cube_config["rundir"]) +print(f"Will run in {rundir}") + +CRES = expt_config["workflow"]["CRES"] +os.environ["CRES"] = CRES + + # Extract driver config from experiment config chgres_cube_driver = Chgres_Cube( config=args.config_file, cycle=args.cycle, key_path=[args.key_path], ) -rundir = Path(chgres_cube_driver.config["rundir"]) -print(f"Will run in {rundir}") +# update fn_atm and fn_sfc for ics task +if args.key_path == "task_make_ics": + varsfilepath = chgres_cube_driver.config["task_make_ics"][ + "input_files_metadata_path" + ] + extrn_config_fns = get_sh_config(varsfilepath)["EXTRN_MDL_FNS"] + extrn_config_fhrs = get_sh_config(varsfilepath)["EXTRN_MDL_FHRS"] -if args.key_path == "task_make_ics" -varsfilepath = chgres_cube_driver.config["task_make_ics"]["input_files_metadata_path"] -extrn_config_fns = get_sh_config(varsfilepath)["EXTRN_MDL_FNS"] -extrn_config_fhrs = get_sh_config(varsfilepath)["EXTRN_MDL_FHRS"] + fn_atm = extrn_config_fns[0] + fn_sfc = extrn_config_fns[1] -# make_ics -fn_atm = extrn_config_fns[0] -fn_sfc = extrn_config_fns[1] + chgres_cube_driver.run() -# Loop the run of chgres_cube for the forecast length -if len(extrn_config_fns) > 2: - fn_sfc= "" +# Loop the run of chgres_cube for the forecast length if lbcs +else: + fn_sfc = "" num_fhrs = chgres_cube_driver.config["workflow"]["FCST_LEN_HRS"] bcgrp10 = 0 bcgrpnum10 = 1 @@ -88,17 +87,18 @@ if i < num_fhrs: print(f"group ${bcgrp10} processes member ${i}") fn_atm = f"${{EXTRN_MDL_FNS[${i}]}}" - - expt_config["task_make_lbcs"]["chgres_cube"]["namelist"]["update_values"]["config"]["atm_files_input_grid"] = fn_atm + + expt_config["task_make_lbcs"]["chgres_cube"]["namelist"]["update_values"][ + "config" + ]["atm_files_input_grid"] = fn_atm # reinstantiate driver chgres_cube_driver = Chgres_Cube( - config=expt_config, - cycle=args.cycle, - key_path=[args.key_path], - ) + config=expt_config, + cycle=args.cycle, + key_path=[args.key_path], + ) chgres_cube_driver.run() -else: - chgres_cube_driver.run() + # error message if not (rundir / "runscript.chgres_cube.done").is_file(): @@ -127,14 +127,19 @@ lbc_block = expt_config_cp[args.key_path] lbc_input_fn = "gfs.bndy.nc" lbc_spec_fhrs = extrn_config_fhrs[i] - lbc_offset_fhrs = chgres_cube_driver.config["task_get_extrn_lbcs"]["EXTRN_MDL_LBCS_OFFSET_HRS"] + lbc_offset_fhrs = chgres_cube_driver.config["task_get_extrn_lbcs"][ + "EXTRN_MDL_LBCS_OFFSET_HRS" + ] nco_net = chgres_cube_driver.config["nco"]["NET_default"] dot_ensmem = f".mem{ args.member }" - fcst_hhh = ( lbc_spec_fhrs - lbc_offset_fhrs ) + fcst_hhh = lbc_spec_fhrs - lbc_offset_fhrs fcst_hhh_FV3LAM = print(f"fcst_hhh:03d") - lbc_output_fn = rundir / f"{nco_net}.{args.cycle}{dot_ensmem}.gfs_bndy.tile7.f{fcst_hhh_FV3LAM}.nc" - + lbc_output_fn = ( + rundir + / f"{nco_net}.{args.cycle}{dot_ensmem}.gfs_bndy.tile7.f{fcst_hhh_FV3LAM}.nc" + ) + links[lbc_input_fn] = str(lbc_output_fn) uwlink(target_dir=rundir.parent, config=links) From e661d1ccb596a2d60eb89c0df9b71eb1f9057a9b Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 19 Sep 2024 09:29:41 -0500 Subject: [PATCH 10/47] add changes from PR 264 --- tests/WE2E/utils.py | 338 ++++++++++++++++---------- ush/create_aqm_rc_file.py | 116 +++++---- ush/create_diag_table_file.py | 14 +- ush/create_model_configure_file.py | 25 +- ush/create_ufs_configure_file.py | 68 +++--- ush/generate_FV3LAM_wflow.py | 268 ++++++++++++-------- ush/link_fix.py | 11 +- ush/set_fv3nml_sfc_climo_filenames.py | 24 +- 8 files changed, 530 insertions(+), 334 deletions(-) diff --git a/tests/WE2E/utils.py b/tests/WE2E/utils.py index 0e6629ad1..67b5fda77 100755 --- a/tests/WE2E/utils.py +++ b/tests/WE2E/utils.py @@ -16,17 +16,16 @@ sys.path.append("../../ush") +from uwtools.api.config import get_yaml_config + from calculate_cost import calculate_cost -from python_utils import ( - cfg_to_yaml_str, - flatten_dict, - load_config_file, - load_yaml_config -) +from python_utils import cfg_to_yaml_str, flatten_dict, load_config_file REPORT_WIDTH = 100 EXPT_COLUMN_WIDTH = 65 TASK_COLUMN_WIDTH = 40 + + def print_WE2E_summary(expts_dict: dict, debug: bool = False): """Function that creates a summary for the specified experiment @@ -41,38 +40,48 @@ def print_WE2E_summary(expts_dict: dict, debug: bool = False): # Create summary table as list of strings summary = [] - summary.append('-'*REPORT_WIDTH) - summary.append(f'Experiment name {" "*(EXPT_COLUMN_WIDTH-17)} | Status | Core hours used ') - summary.append('-'*REPORT_WIDTH) + summary.append("-" * REPORT_WIDTH) + summary.append( + f'Experiment name {" "*(EXPT_COLUMN_WIDTH-17)} | Status | Core hours used ' + ) + summary.append("-" * REPORT_WIDTH) total_core_hours = 0 statuses = [] expt_details = [] for expt in expts_dict: statuses.append(expts_dict[expt]["status"]) ch = 0 - expt_details.append('') - expt_details.append('-'*REPORT_WIDTH) - expt_details.append(f'Detailed summary of experiment {expt}') + expt_details.append("") + expt_details.append("-" * REPORT_WIDTH) + expt_details.append(f"Detailed summary of experiment {expt}") expt_details.append(f"in directory {expts_dict[expt]['expt_dir']}") - expt_details.append(f'{" "*TASK_COLUMN_WIDTH}| Status | Walltime | Core hours used') - expt_details.append('-'*REPORT_WIDTH) + expt_details.append( + f'{" "*TASK_COLUMN_WIDTH}| Status | Walltime | Core hours used' + ) + expt_details.append("-" * REPORT_WIDTH) for task in expts_dict[expt]: # Skip non-task entries - if task in ["expt_dir","status","start_time","walltime"]: + if task in ["expt_dir", "status", "start_time", "walltime"]: continue status = expts_dict[expt][task]["status"] walltime = expts_dict[expt][task]["walltime"] - expt_details.append(f'{task[:TASK_COLUMN_WIDTH]:<{TASK_COLUMN_WIDTH}s} {status:<12s} {walltime:>10.1f}') + expt_details.append( + f"{task[:TASK_COLUMN_WIDTH]:<{TASK_COLUMN_WIDTH}s} {status:<12s} {walltime:>10.1f}" + ) if "core_hours" in expts_dict[expt][task]: task_ch = expts_dict[expt][task]["core_hours"] ch += task_ch - expt_details[-1] = f'{expt_details[-1]} {task_ch:>13.2f}' + expt_details[-1] = f"{expt_details[-1]} {task_ch:>13.2f}" else: - expt_details[-1] = f'{expt_details[-1]} -' - expt_details.append('-'*REPORT_WIDTH) - expt_details.append(f'Total {" "*(TASK_COLUMN_WIDTH - 6)} {statuses[-1]:<12s} {" "*11} {ch:>13.2f}') - summary.append(f'{expt[:EXPT_COLUMN_WIDTH]:<{EXPT_COLUMN_WIDTH}s} {statuses[-1]:<12s} {ch:>13.2f}') + expt_details[-1] = f"{expt_details[-1]} -" + expt_details.append("-" * REPORT_WIDTH) + expt_details.append( + f'Total {" "*(TASK_COLUMN_WIDTH - 6)} {statuses[-1]:<12s} {" "*11} {ch:>13.2f}' + ) + summary.append( + f"{expt[:EXPT_COLUMN_WIDTH]:<{EXPT_COLUMN_WIDTH}s} {statuses[-1]:<12s} {ch:>13.2f}" + ) total_core_hours += ch if "ERROR" in statuses: total_status = "ERROR" @@ -86,25 +95,30 @@ def print_WE2E_summary(expts_dict: dict, debug: bool = False): total_status = "COMPLETE" else: total_status = "UNKNOWN" - summary.append('-'*REPORT_WIDTH) - summary.append(f'Total {" "*(EXPT_COLUMN_WIDTH - 6)} {total_status:<12s} {total_core_hours:>13.2f}') + summary.append("-" * REPORT_WIDTH) + summary.append( + f'Total {" "*(EXPT_COLUMN_WIDTH - 6)} {total_status:<12s} {total_core_hours:>13.2f}' + ) # Print summary to screen for line in summary: print(line) # Print summary and details to file - summary_file = os.path.join(os.path.dirname(expts_dict[expt]["expt_dir"]), - f'WE2E_summary_{datetime.now().strftime("%Y%m%d%H%M%S")}.txt') + summary_file = os.path.join( + os.path.dirname(expts_dict[expt]["expt_dir"]), + f'WE2E_summary_{datetime.now().strftime("%Y%m%d%H%M%S")}.txt', + ) print(f"\nDetailed summary written to {summary_file}\n") - with open(summary_file, 'w', encoding="utf-8") as f: + with open(summary_file, "w", encoding="utf-8") as f: for line in summary: f.write(f"{line}\n") f.write("\nDetailed summary of each experiment:\n") for line in expt_details: f.write(f"{line}\n") + def create_expts_dict(expt_dir: str) -> dict: """ Function takes in a directory, searches that directory for subdirectories containing @@ -118,27 +132,28 @@ def create_expts_dict(expt_dir: str) -> dict: """ contents = sorted(os.listdir(expt_dir)) - expts_dict=dict() + expts_dict = dict() for item in contents: # Look for FV3LAM_wflow.xml to indicate directories with experiments in them fullpath = os.path.join(expt_dir, item) if not os.path.isdir(fullpath): continue - xmlfile = os.path.join(expt_dir, item, 'FV3LAM_wflow.xml') + xmlfile = os.path.join(expt_dir, item, "FV3LAM_wflow.xml") if os.path.isfile(xmlfile): expts_dict[item] = dict() - expts_dict[item].update({"expt_dir": os.path.join(expt_dir,item)}) + expts_dict[item].update({"expt_dir": os.path.join(expt_dir, item)}) expts_dict[item].update({"status": "CREATED"}) else: - logging.debug(f'Skipping directory {item}, experiment XML file not found') + logging.debug(f"Skipping directory {item}, experiment XML file not found") continue - #Update the experiment dictionary + # Update the experiment dictionary logging.debug(f"Reading status of experiment {item}") - update_expt_status(expts_dict[item],item,True,False,False) + update_expt_status(expts_dict[item], item, True, False, False) summary_file = f'WE2E_tests_{datetime.now().strftime("%Y%m%d%H%M%S")}.yaml' return summary_file, expts_dict + def calculate_core_hours(expts_dict: dict) -> dict: """ Function takes in an experiment dictionary, reads the var_defns file for necessary information, @@ -154,49 +169,60 @@ def calculate_core_hours(expts_dict: dict) -> dict: for expt in expts_dict: # Read variable definitions file - vardefs_file = os.path.join(expts_dict[expt]["expt_dir"],"var_defns.yaml") + vardefs_file = os.path.join(expts_dict[expt]["expt_dir"], "var_defns.yaml") if not os.path.isfile(vardefs_file): - logging.warning(f"\nWARNING: For experiment {expt}, variable definitions file") - logging.warning(f"{vardefs_file}\ndoes not exist!\n\nDropping experiment from summary") + logging.warning( + f"\nWARNING: For experiment {expt}, variable definitions file" + ) + logging.warning( + f"{vardefs_file}\ndoes not exist!\n\nDropping experiment from summary" + ) continue - logging.debug(f'Reading variable definitions file {vardefs_file}') - vardefs = load_yaml_config(vardefs_file) + logging.debug(f"Reading variable definitions file {vardefs_file}") + vardefs = get_yaml_config(vardefs_file) vdf = flatten_dict(vardefs) cores_per_node = vdf["NCORES_PER_NODE"] for task in expts_dict[expt]: # Skip non-task entries - if task in ["expt_dir","status","start_time","walltime"]: + if task in ["expt_dir", "status", "start_time", "walltime"]: continue # Cycle is last 12 characters, task name is rest (minus separating underscore) taskname = task[:-13] # Handle task names that have ensemble and/or fhr info appended with regex - taskname = re.sub('_mem\d{3}', '', taskname) - taskname = re.sub('_f\d{3}', '', taskname) - nnodes_var = f'NNODES_{taskname.upper()}' + taskname = re.sub("_mem\d{3}", "", taskname) + taskname = re.sub("_f\d{3}", "", taskname) + nnodes_var = f"NNODES_{taskname.upper()}" if nnodes_var in vdf: nnodes = vdf[nnodes_var] # Users are charged for full use of nodes, so core hours = CPN * nodes * time in hrs - core_hours = cores_per_node * nnodes * expts_dict[expt][task]['walltime'] / 3600 - expts_dict[expt][task]['exact_count'] = True + core_hours = ( + cores_per_node * nnodes * expts_dict[expt][task]["walltime"] / 3600 + ) + expts_dict[expt][task]["exact_count"] = True else: # If we can't find the number of nodes, assume full usage (may undercount) - core_hours = expts_dict[expt][task]['cores'] * \ - expts_dict[expt][task]['walltime'] / 3600 - expts_dict[expt][task]['exact_count'] = False - expts_dict[expt][task]['core_hours'] = round(core_hours,2) + core_hours = ( + expts_dict[expt][task]["cores"] + * expts_dict[expt][task]["walltime"] + / 3600 + ) + expts_dict[expt][task]["exact_count"] = False + expts_dict[expt][task]["core_hours"] = round(core_hours, 2) return expts_dict def write_monitor_file(monitor_file: str, expts_dict: dict): try: - with open(monitor_file,"w", encoding="utf-8") as f: + with open(monitor_file, "w", encoding="utf-8") as f: f.write("### WARNING ###\n") - f.write("### THIS FILE IS AUTO_GENERATED AND REGULARLY OVER-WRITTEN BY WORKFLOW SCRIPTS\n") + f.write( + "### THIS FILE IS AUTO_GENERATED AND REGULARLY OVER-WRITTEN BY WORKFLOW SCRIPTS\n" + ) f.write("### EDITS MAY RESULT IN MISBEHAVIOR OF EXPERIMENTS RUNNING\n") f.writelines(cfg_to_yaml_str(expts_dict)) except KeyboardInterrupt: logging.warning("\nRefusing to interrupt during file write; try again\n") - write_monitor_file(monitor_file,expts_dict) + write_monitor_file(monitor_file, expts_dict) except: logging.fatal("\n********************************\n") logging.fatal(f"WARNING WARNING WARNING\n") @@ -206,8 +232,13 @@ def write_monitor_file(monitor_file: str, expts_dict: dict): raise -def update_expt_status(expt: dict, name: str, refresh: bool = False, debug: bool = False, - submit: bool = True) -> dict: +def update_expt_status( + expt: dict, + name: str, + refresh: bool = False, + debug: bool = False, + submit: bool = True, +) -> dict: """ This function reads the dictionary showing the location of a given experiment, runs a `rocotorun` command to update the experiment (running new jobs and updating the status of @@ -260,8 +291,8 @@ def update_expt_status(expt: dict, name: str, refresh: bool = False, debug: bool dict: The updated experiment dictionary. """ - #If we are no longer tracking this experiment, return unchanged - if (expt["status"] in ['DEAD','ERROR','COMPLETE']) and not refresh: + # If we are no longer tracking this experiment, return unchanged + if (expt["status"] in ["DEAD", "ERROR", "COMPLETE"]) and not refresh: return expt # Update experiment, read rocoto database rocoto_db = f"{expt['expt_dir']}/FV3LAM_wflow.db" @@ -270,35 +301,54 @@ def update_expt_status(expt: dict, name: str, refresh: bool = False, debug: bool if refresh: logging.debug(f"Updating database for experiment {name}") if debug: - rocotorun_cmd = ["rocotorun", f"-w {rocoto_xml}", f"-d {rocoto_db}", "-v 10"] - p = subprocess.run(rocotorun_cmd, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, text=True) + rocotorun_cmd = [ + "rocotorun", + f"-w {rocoto_xml}", + f"-d {rocoto_db}", + "-v 10", + ] + p = subprocess.run( + rocotorun_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + ) logging.debug(p.stdout) - #Run rocotorun again to get around rocotobqserver proliferation issue - p = subprocess.run(rocotorun_cmd, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, text=True) + # Run rocotorun again to get around rocotobqserver proliferation issue + p = subprocess.run( + rocotorun_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + ) logging.debug(p.stdout) else: rocotorun_cmd = ["rocotorun", f"-w {rocoto_xml}", f"-d {rocoto_db}"] subprocess.run(rocotorun_cmd) - #Run rocotorun again to get around rocotobqserver proliferation issue + # Run rocotorun again to get around rocotobqserver proliferation issue subprocess.run(rocotorun_cmd) - logging.debug(f"Reading database for experiment {name}, updating experiment dictionary") + logging.debug( + f"Reading database for experiment {name}, updating experiment dictionary" + ) try: # This section of code queries the "job" table of the rocoto database, returning a list # of tuples containing the taskname, cycle, and state of each job respectively with closing(sqlite3.connect(rocoto_db)) as connection: with closing(connection.cursor()) as cur: - db = cur.execute('SELECT taskname,cycle,state,cores,duration from jobs').fetchall() + db = cur.execute( + "SELECT taskname,cycle,state,cores,duration from jobs" + ).fetchall() except: # Some platforms (including Hera) can have a problem with rocoto jobs not submitting # properly due to build-ups of background processes. This will resolve over time as # rocotorun continues to be called, so let's only treat this as an error if we are # past the first initial iteration of job submissions if not refresh: - logging.warning(f"Unable to read database {rocoto_db}\nCan not track experiment {name}") + logging.warning( + f"Unable to read database {rocoto_db}\nCan not track experiment {name}" + ) expt["status"] = "ERROR" return expt @@ -307,7 +357,7 @@ def update_expt_status(expt: dict, name: str, refresh: bool = False, debug: bool # For each entry from rocoto database, store that task's info under a dictionary key named # TASKNAME_CYCLE; Cycle comes from the database in Unix Time (seconds), so convert to # human-readable - cycle = datetime.utcfromtimestamp(task[1]).strftime('%Y%m%d%H%M') + cycle = datetime.utcfromtimestamp(task[1]).strftime("%Y%m%d%H%M") if f"{task[0]}_{cycle}" not in expt: expt[f"{task[0]}_{cycle}"] = dict() expt[f"{task[0]}_{cycle}"]["status"] = task[2] @@ -317,15 +367,17 @@ def update_expt_status(expt: dict, name: str, refresh: bool = False, debug: bool statuses = list() for task in expt: # Skip non-task entries - if task in ["expt_dir","status","start_time","walltime"]: + if task in ["expt_dir", "status", "start_time", "walltime"]: continue statuses.append(expt[task]["status"]) if "DEAD" in statuses: still_live = ["RUNNING", "SUBMITTING", "QUEUED", "FAILED"] if any(status in still_live for status in statuses): - logging.debug(f'DEAD job in experiment {name}; continuing to track until all jobs are '\ - 'complete') + logging.debug( + f"DEAD job in experiment {name}; continuing to track until all jobs are " + "complete" + ) expt["status"] = "DYING" else: expt["status"] = "DEAD" @@ -348,33 +400,41 @@ def update_expt_status(expt: dict, name: str, refresh: bool = False, debug: bool # rocotorun continues to be called, so let's only print this warning message if we # are past the first initial iteration of job submissions if not refresh: - logging.warning(dedent( - f"""WARNING:Tasks have not yet been submitted for experiment {name}; + logging.warning( + dedent( + f"""WARNING:Tasks have not yet been submitted for experiment {name}; it could be that your jobs are being throttled at the system level. If you continue to see this message, there may be an error with your experiment configuration, such as an incorrect queue or account number. You can use ctrl-c to pause this script and inspect log files. - """)) + """ + ) + ) else: logging.fatal("Some kind of horrible thing has happened") - raise ValueError(dedent( - f"""Some kind of horrible thing has happened to the experiment status + raise ValueError( + dedent( + f"""Some kind of horrible thing has happened to the experiment status for experiment {name} status is {expt["status"]} - all task statuses are {statuses}""")) + all task statuses are {statuses}""" + ) + ) # Final check for experiments where all tasks are "SUCCEEDED"; since the rocoto database does # not include info on jobs that have not been submitted yet, use rocotostat to check that # there are no un-submitted jobs remaining. - if expt["status"] in ["SUCCEEDED","STALLED","STUCK"]: - expt = compare_rocotostat(expt,name) + if expt["status"] in ["SUCCEEDED", "STALLED", "STUCK"]: + expt = compare_rocotostat(expt, name) return expt -def update_expt_status_parallel(expts_dict: dict, procs: int, refresh: bool = False, - debug: bool = False) -> dict: + +def update_expt_status_parallel( + expts_dict: dict, procs: int, refresh: bool = False, debug: bool = False +) -> dict: """ This function updates an entire set of experiments in parallel, drastically speeding up the process if given enough parallel processes. Given a dictionary of experiments, it will @@ -396,7 +456,7 @@ def update_expt_status_parallel(expts_dict: dict, procs: int, refresh: bool = Fa args = [] # Define a tuple of arguments to pass to starmap for expt in expts_dict: - args.append( (expts_dict[expt],expt,refresh,debug) ) + args.append((expts_dict[expt], expt, refresh, debug)) # call update_expt_status() in parallel with Pool(processes=procs) as pool: @@ -411,7 +471,6 @@ def update_expt_status_parallel(expts_dict: dict, procs: int, refresh: bool = Fa return expts_dict - def print_test_info(txtfile: str = "WE2E_test_info.txt") -> None: """Prints a pipe ( | ) delimited text file containing summaries of each test defined by a config file in test_configs/* @@ -420,14 +479,14 @@ def print_test_info(txtfile: str = "WE2E_test_info.txt") -> None: txtfile (str): File name for test details file """ - testfiles = glob.glob('test_configs/**/config*.yaml', recursive=True) + testfiles = glob.glob("test_configs/**/config*.yaml", recursive=True) testdict = dict() links = dict() for testfile in testfiles: # Calculate relative cost of test based on config settings using legacy script cost_array = calculate_cost(testfile) cost = cost_array[1] / cost_array[3] - #Decompose full file path into relevant bits + # Decompose full file path into relevant bits pathname, filename = os.path.split(testfile) testname = filename[7:-5] dirname = os.path.basename(os.path.normpath(pathname)) @@ -444,16 +503,20 @@ def print_test_info(txtfile: str = "WE2E_test_info.txt") -> None: testdict[testname] = load_config_file(testfile) testdict[testname]["directory"] = dirname testdict[testname]["cost"] = cost - #Calculate number of forecasts for a cycling run - if testdict[testname]['workflow']["DATE_FIRST_CYCL"] != \ - testdict[testname]['workflow']["DATE_LAST_CYCL"]: - begin = datetime.strptime(testdict[testname]['workflow']["DATE_FIRST_CYCL"], - '%Y%m%d%H') - end = datetime.strptime(testdict[testname]['workflow']["DATE_LAST_CYCL"], - '%Y%m%d%H') + # Calculate number of forecasts for a cycling run + if ( + testdict[testname]["workflow"]["DATE_FIRST_CYCL"] + != testdict[testname]["workflow"]["DATE_LAST_CYCL"] + ): + begin = datetime.strptime( + testdict[testname]["workflow"]["DATE_FIRST_CYCL"], "%Y%m%d%H" + ) + end = datetime.strptime( + testdict[testname]["workflow"]["DATE_LAST_CYCL"], "%Y%m%d%H" + ) diff = end - begin diffh = diff.total_seconds() // 3600 - nf = diffh // testdict[testname]['workflow']["INCR_CYCL_FREQ"] + nf = diffh // testdict[testname]["workflow"]["INCR_CYCL_FREQ"] testdict[testname]["num_fcsts"] = nf else: testdict[testname]["num_fcsts"] = 1 @@ -465,49 +528,59 @@ def print_test_info(txtfile: str = "WE2E_test_info.txt") -> None: testdict[link_name]["alternate_directory_name"] = alt_dirname # Print the file - with open(txtfile, 'w', encoding="utf-8") as f: + with open(txtfile, "w", encoding="utf-8") as f: # Field delimiter character - d = "\" | \"" + d = '" | "' txt_output = ['"Test Name'] - txt_output.append(f'(Subdirectory){d}Alternate Test Names') - txt_output.append(f'(Subdirectories){d}Test Purpose/Description{d}Relative Cost of Running Dynamics') - txt_output.append(f'(1 corresponds to running a 6-hour forecast on the RRFS_CONUS_25km predefined grid using the default time step){d}PREDEF_GRID_NAME{d}CCPP_PHYS_SUITE{d}EXTRN_MDL_NAME_ICS{d}EXTRN_MDL_NAME_LBCS{d}DATE_FIRST_CYCL{d}DATE_LAST_CYCL{d}INCR_CYCL_FREQ{d}FCST_LEN_HRS{d}DT_ATMOS{d}LBC_SPEC_INTVL_HRS{d}NUM_ENS_MEMBERS') + txt_output.append(f"(Subdirectory){d}Alternate Test Names") + txt_output.append( + f"(Subdirectories){d}Test Purpose/Description{d}Relative Cost of Running Dynamics" + ) + txt_output.append( + f"(1 corresponds to running a 6-hour forecast on the RRFS_CONUS_25km predefined grid using the default time step){d}PREDEF_GRID_NAME{d}CCPP_PHYS_SUITE{d}EXTRN_MDL_NAME_ICS{d}EXTRN_MDL_NAME_LBCS{d}DATE_FIRST_CYCL{d}DATE_LAST_CYCL{d}INCR_CYCL_FREQ{d}FCST_LEN_HRS{d}DT_ATMOS{d}LBC_SPEC_INTVL_HRS{d}NUM_ENS_MEMBERS" + ) for line in txt_output: f.write(f"{line}\n") for expt in testdict: - f.write(f"\"{expt}\n(") + f.write(f'"{expt}\n(') f.write(f"{testdict[expt]['directory']}){d}") if "alternate_name" in testdict[expt]: - f.write(f"{testdict[expt]['alternate_name']}\n"\ - f"({testdict[expt]['alternate_directory_name']}){d}") + f.write( + f"{testdict[expt]['alternate_name']}\n" + f"({testdict[expt]['alternate_directory_name']}){d}" + ) else: f.write(f"{d}\n") - desc = testdict[expt]['metadata']['description'].splitlines() + desc = testdict[expt]["metadata"]["description"].splitlines() for line in desc[:-1]: f.write(f" {line}\n") f.write(f" {desc[-1]}") - #Write test relative cost and number of test forecasts (for cycling runs) - f.write(f"{d}'{round(testdict[expt]['cost'],2)}{d}'{round(testdict[expt]['num_fcsts'])}") + # Write test relative cost and number of test forecasts (for cycling runs) + f.write( + f"{d}'{round(testdict[expt]['cost'],2)}{d}'{round(testdict[expt]['num_fcsts'])}" + ) # Bundle various variables with their corresponding sections for more compact coding - key_pairs = [ ('workflow', 'PREDEF_GRID_NAME'), - ('workflow', 'CCPP_PHYS_SUITE'), - ('task_get_extrn_ics', 'EXTRN_MDL_NAME_ICS'), - ('task_get_extrn_lbcs', 'EXTRN_MDL_NAME_LBCS'), - ('workflow', 'DATE_FIRST_CYCL'), - ('workflow', 'DATE_LAST_CYCL'), - ('workflow', 'INCR_CYCL_FREQ'), - ('workflow', 'FCST_LEN_HRS'), - ('task_run_fcst', 'DT_ATMOS'), - ('task_get_extrn_lbcs', 'LBC_SPEC_INTVL_HRS'), - ('global', 'NUM_ENS_MEMBERS') ] + key_pairs = [ + ("workflow", "PREDEF_GRID_NAME"), + ("workflow", "CCPP_PHYS_SUITE"), + ("task_get_extrn_ics", "EXTRN_MDL_NAME_ICS"), + ("task_get_extrn_lbcs", "EXTRN_MDL_NAME_LBCS"), + ("workflow", "DATE_FIRST_CYCL"), + ("workflow", "DATE_LAST_CYCL"), + ("workflow", "INCR_CYCL_FREQ"), + ("workflow", "FCST_LEN_HRS"), + ("task_run_fcst", "DT_ATMOS"), + ("task_get_extrn_lbcs", "LBC_SPEC_INTVL_HRS"), + ("global", "NUM_ENS_MEMBERS"), + ] for key1, key2 in key_pairs: f.write(f"{d}{testdict[expt].get(key1, {}).get(key2, '')}") f.write("\n") -def compare_rocotostat(expt_dict,name): +def compare_rocotostat(expt_dict, name): """Reads the dictionary showing the location of a given experiment, runs a `rocotostat` command to get the full set of tasks for the experiment, and compares the two to see if there are any unsubmitted tasks remaining. @@ -517,27 +590,32 @@ def compare_rocotostat(expt_dict,name): rocoto_db = f"{expt_dict['expt_dir']}/FV3LAM_wflow.db" rocoto_xml = f"{expt_dict['expt_dir']}/FV3LAM_wflow.xml" rocotorun_cmd = ["rocotostat", f"-w {rocoto_xml}", f"-d {rocoto_db}", "-v 10"] - p = subprocess.run(rocotorun_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) + p = subprocess.run( + rocotorun_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True + ) rsout = p.stdout # Parse each line of rocotostat output, extracting relevant information untracked_tasks = [] - for line in rsout.split('\n'): + for line in rsout.split("\n"): # Skip blank lines and dividing lines of '=====...' if not line: continue - if line[0] == '=': + if line[0] == "=": continue line_array = line.split() # Skip header lines - if line_array[0] == 'CYCLE' or line_array[0] == '/apps/rocoto/1.3.3/lib/workflowmgr/launchserver.rb:40:': + if ( + line_array[0] == "CYCLE" + or line_array[0] == "/apps/rocoto/1.3.3/lib/workflowmgr/launchserver.rb:40:" + ): continue # We should now just have lines describing jobs, in the form: # line_array = ['cycle','task','jobid','status','exit status','num tries','walltime'] # As defined in update_expt_status(), the "task names" in the dictionary are a combination # of the task name and cycle - taskname = f'{line_array[1]}_{line_array[0]}' + taskname = f"{line_array[1]}_{line_array[0]}" # If we're already tracking this task, continue if expt_dict.get(taskname): @@ -548,15 +626,17 @@ def compare_rocotostat(expt_dict,name): if untracked_tasks: # We want to give this a couple loops before reporting that it is "stuck" - if expt_dict['status'] == 'SUCCEEDED': - expt_dict['status'] = 'STALLED' - elif expt_dict['status'] == 'STALLED': - expt_dict['status'] = 'STUCK' - elif expt_dict['status'] == 'STUCK': + if expt_dict["status"] == "SUCCEEDED": + expt_dict["status"] = "STALLED" + elif expt_dict["status"] == "STALLED": + expt_dict["status"] = "STUCK" + elif expt_dict["status"] == "STUCK": msg = f"WARNING: For experiment {name}, there are jobs that are not being submitted:" for ut in untracked_tasks: msg += ut - msg = msg + f"""WARNING: For experiment {name}, + msg = ( + msg + + f"""WARNING: For experiment {name}, there are some jobs that are not being submitted. It could be that your jobs are being throttled at the system level, or some task dependencies have not been met. @@ -566,14 +646,18 @@ def compare_rocotostat(expt_dict,name): You can use ctrl-c to pause this script and inspect log files. """ + ) logging.warning(dedent(msg)) else: logging.fatal("Some kind of horrible thing has happened") - raise ValueError(dedent( - f"""Some kind of horrible thing has happened to the experiment status + raise ValueError( + dedent( + f"""Some kind of horrible thing has happened to the experiment status for experiment {name} status is {expt_dict["status"]} - untracked tasknames are {untracked_tasks}""")) + untracked tasknames are {untracked_tasks}""" + ) + ) else: expt_dict["status"] = "COMPLETE" diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index c37ed05d2..d7cb1c301 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -7,20 +7,21 @@ import os import sys from textwrap import dedent +from uwtools.api.config import get_yaml_config from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, flatten_dict, import_vars, - load_yaml_config, print_info_msg, print_input_args, str_to_type, ) + def create_aqm_rc_file(cdate, run_dir, init_concentrations): - """ Creates an aqm.rc file in the specified run directory + """Creates an aqm.rc file in the specified run directory Args: cdate: cycle date @@ -32,61 +33,66 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): print_input_args(locals()) - #import all environment variables + # import all environment variables import_vars() - #pylint: disable=undefined-variable + # pylint: disable=undefined-variable # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # # Create the aqm.rc file in the specified run directory. # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # - print_info_msg(f''' + print_info_msg( + f''' Creating the aqm.rc file (\"{AQM_RC_FN}\") in the specified run directory (run_dir): - run_dir = \"{run_dir}\"''', verbose=VERBOSE) + run_dir = \"{run_dir}\"''', + verbose=VERBOSE, + ) # # Set output file path # - aqm_rc_fp=os.path.join(run_dir, AQM_RC_FN) + aqm_rc_fp = os.path.join(run_dir, AQM_RC_FN) # # Extract from cdate the starting year, month, and day of the forecast. # - yyyymmdd=cdate.strftime('%Y%m%d') - mm=f"{cdate.month:02d}" # pylint: disable=invalid-name - hh=f"{cdate.hour:02d}" # pylint: disable=invalid-name + yyyymmdd = cdate.strftime("%Y%m%d") + mm = f"{cdate.month:02d}" # pylint: disable=invalid-name + hh = f"{cdate.hour:02d}" # pylint: disable=invalid-name # # Set parameters in the aqm.rc file. # - aqm_rc_bio_file_fp=os.path.join(FIXaqm,"bio", AQM_BIO_FILE) + aqm_rc_bio_file_fp = os.path.join(FIXaqm, "bio", AQM_BIO_FILE) # Fire config - aqm_rc_fire_file_fp=os.path.join( - COMIN, - f"{AQM_FIRE_FILE_PREFIX}_{yyyymmdd}_t{hh}z{AQM_FIRE_FILE_SUFFIX}" - ) + aqm_rc_fire_file_fp = os.path.join( + COMIN, f"{AQM_FIRE_FILE_PREFIX}_{yyyymmdd}_t{hh}z{AQM_FIRE_FILE_SUFFIX}" + ) # Dust config - aqm_rc_dust_file_fp=os.path.join( - FIXaqm,"dust", - f"{AQM_DUST_FILE_PREFIX}_{PREDEF_GRID_NAME}{AQM_DUST_FILE_SUFFIX}", - ) + aqm_rc_dust_file_fp = os.path.join( + FIXaqm, + "dust", + f"{AQM_DUST_FILE_PREFIX}_{PREDEF_GRID_NAME}{AQM_DUST_FILE_SUFFIX}", + ) # Canopy config - aqm_rc_canopy_file_fp=os.path.join( - FIXaqm,"canopy",PREDEF_GRID_NAME, + aqm_rc_canopy_file_fp = os.path.join( + FIXaqm, + "canopy", + PREDEF_GRID_NAME, f"{AQM_CANOPY_FILE_PREFIX}.{mm}{AQM_CANOPY_FILE_SUFFIX}", - ) + ) # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # # Create a multiline variable that consists of a yaml-compliant string # specifying the values that the jinja variables in the template # AQM_RC_TMPL_FN file should be set to. # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # settings = { "do_aqm_dust": DO_AQM_DUST, @@ -101,7 +107,7 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): "aqm_rc_dust_file_fp": aqm_rc_dust_file_fp, "aqm_rc_canopy_file_fp": aqm_rc_canopy_file_fp, "aqm_rc_product_fn": AQM_RC_PRODUCT_FN, - "aqm_rc_product_frequency": AQM_RC_PRODUCT_FREQUENCY + "aqm_rc_product_frequency": AQM_RC_PRODUCT_FREQUENCY, } settings_str = cfg_to_yaml_str(settings) @@ -116,49 +122,59 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): verbose=VERBOSE, ) # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # # Call a python script to generate the experiment's actual AQM_RC_FN # file from the template file. # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # render( - input_file = AQM_RC_TMPL_FP, - output_file = aqm_rc_fp, - values_src = settings, + input_file=AQM_RC_TMPL_FP, + output_file=aqm_rc_fp, + values_src=settings, ) return True + def parse_args(argv): - """ Parse command line arguments""" + """Parse command line arguments""" parser = argparse.ArgumentParser(description="Creates aqm.rc file.") - parser.add_argument("-r", "--run-dir", - dest="run_dir", - required=True, - help="Run directory.") + parser.add_argument( + "-r", "--run-dir", dest="run_dir", required=True, help="Run directory." + ) - parser.add_argument("-c", "--cdate", - dest="cdate", - required=True, - help="Date string in YYYYMMDD format.") + parser.add_argument( + "-c", + "--cdate", + dest="cdate", + required=True, + help="Date string in YYYYMMDD format.", + ) - parser.add_argument("-i", "--init_concentrations", - dest="init_concentrations", - required=True, - help="Flag for initial concentrations.") + parser.add_argument( + "-i", + "--init_concentrations", + dest="init_concentrations", + required=True, + help="Flag for initial concentrations.", + ) - parser.add_argument("-p", "--path-to-defns", - dest="path_to_defns", - required=True, - help="Path to var_defns file.") + parser.add_argument( + "-p", + "--path-to-defns", + dest="path_to_defns", + required=True, + help="Path to var_defns file.", + ) return parser.parse_args(argv) + if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_yaml_config(args.path_to_defns) + cfg = get_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_aqm_rc_file( diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py index 113953172..2591b11a1 100644 --- a/ush/create_diag_table_file.py +++ b/ush/create_diag_table_file.py @@ -8,13 +8,13 @@ import os import sys from textwrap import dedent +from uwtools.api.config import get_yaml_config from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, flatten_dict, import_vars, - load_yaml_config, print_info_msg, print_input_args, ) @@ -34,7 +34,7 @@ def create_diag_table_file(run_dir): # import all environment variables import_vars() - #pylint: disable=undefined-variable + # pylint: disable=undefined-variable # create a diagnostic table file within the specified run directory print_info_msg( f""" @@ -74,10 +74,10 @@ def create_diag_table_file(run_dir): ) render( - input_file = DIAG_TABLE_TMPL_FP, - output_file = diag_table_fp, - values_src = settings, - ) + input_file=DIAG_TABLE_TMPL_FP, + output_file=diag_table_fp, + values_src=settings, + ) return True @@ -102,7 +102,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_yaml_config(args.path_to_defns) + cfg = get_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_diag_table_file(args.run_dir) diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index 4caa0a1dc..d40b6dfac 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -7,13 +7,13 @@ import os import sys from textwrap import dedent +from uwtools.api.config import get_yaml_config from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, flatten_dict, import_vars, - load_yaml_config, lowercase, print_info_msg, print_input_args, @@ -22,9 +22,14 @@ def create_model_configure_file( - cdate, fcst_len_hrs, fhrot, run_dir, dt_atmos, sub_hourly_post=False, + cdate, + fcst_len_hrs, + fhrot, + run_dir, + dt_atmos, + sub_hourly_post=False, dt_subhourly_post_mnts=None, - ): #pylint: disable=too-many-arguments +): # pylint: disable=too-many-arguments """Creates a model configuration file in the specified run directory @@ -118,9 +123,7 @@ def create_model_configure_file( "dlat": "", } ) - elif ( - WRTCMP_output_grid in ("regional_latlon", "rotated_latlon") - ): + elif WRTCMP_output_grid in ("regional_latlon", "rotated_latlon"): settings.update( { "lon2": WRTCMP_lon_upr_rght, @@ -221,10 +224,10 @@ def create_model_configure_file( model_config_fp = os.path.join(run_dir, MODEL_CONFIG_FN) render( - input_file = MODEL_CONFIG_TMPL_FP, - output_file = model_config_fp, - values_src = settings - ) + input_file=MODEL_CONFIG_TMPL_FP, + output_file=model_config_fp, + values_src=settings, + ) return True @@ -295,7 +298,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_yaml_config(args.path_to_defns) + cfg = get_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_model_configure_file( diff --git a/ush/create_ufs_configure_file.py b/ush/create_ufs_configure_file.py index 3fd82f488..c62c61c52 100644 --- a/ush/create_ufs_configure_file.py +++ b/ush/create_ufs_configure_file.py @@ -9,19 +9,20 @@ import os import sys from textwrap import dedent +from uwtools.api.config import get_yaml_config from uwtools.api.template import render from python_utils import ( cfg_to_yaml_str, flatten_dict, import_vars, - load_yaml_config, print_info_msg, print_input_args, ) + def create_ufs_configure_file(run_dir): - """ Creates a ufs configuration file in the specified + """Creates a ufs configuration file in the specified run directory Args: @@ -32,40 +33,39 @@ def create_ufs_configure_file(run_dir): print_input_args(locals()) - #import all environment variables + # import all environment variables import_vars() # pylint: disable=undefined-variable # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # # Create a UFS configuration file in the specified run directory. # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # - print_info_msg(f''' + print_info_msg( + f''' Creating a ufs.configure file (\"{UFS_CONFIG_FN}\") in the specified run directory (run_dir): - run_dir = \"{run_dir}\"''', verbose=VERBOSE) + run_dir = \"{run_dir}\"''', + verbose=VERBOSE, + ) # # Set output file path # ufs_config_fp = os.path.join(run_dir, UFS_CONFIG_FN) # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # # Create a multiline variable that consists of a yaml-compliant string # specifying the values that the jinja variables in the template # model_configure file should be set to. # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # - settings = { - "dt_atmos": DT_ATMOS, - "print_esmf": PRINT_ESMF, - "cpl_aqm": CPL_AQM - } + settings = {"dt_atmos": DT_ATMOS, "print_esmf": PRINT_ESMF, "cpl_aqm": CPL_AQM} settings_str = cfg_to_yaml_str(settings) print_info_msg( @@ -79,41 +79,43 @@ def create_ufs_configure_file(run_dir): verbose=VERBOSE, ) # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # # Call a python script to generate the experiment's actual UFS_CONFIG_FN # file from the template file. # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # render( - input_file = UFS_CONFIG_TMPL_FP, - output_file = ufs_config_fp, - values_src = settings, - ) + input_file=UFS_CONFIG_TMPL_FP, + output_file=ufs_config_fp, + values_src=settings, + ) return True + def parse_args(argv): - """ Parse command line arguments""" - parser = argparse.ArgumentParser( - description='Creates UFS configuration file.' - ) + """Parse command line arguments""" + parser = argparse.ArgumentParser(description="Creates UFS configuration file.") - parser.add_argument("-r", "--run-dir", - dest="run_dir", - required=True, - help="Run directory.") + parser.add_argument( + "-r", "--run-dir", dest="run_dir", required=True, help="Run directory." + ) - parser.add_argument("-p", "--path-to-defns", - dest="path_to_defns", - required=True, - help="Path to var_defns file.") + parser.add_argument( + "-p", + "--path-to-defns", + dest="path_to_defns", + required=True, + help="Path to var_defns file.", + ) return parser.parse_args(argv) + if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_yaml_config(args.path_to_defns) + cfg = get_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) create_ufs_configure_file( diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index cad5126ce..a89ee2fab 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -39,11 +39,11 @@ from get_crontab_contents import add_crontab_line from check_python_version import check_python_version + # pylint: disable=too-many-locals,too-many-branches, too-many-statements def generate_FV3LAM_wflow( - ushdir, - logfile: str = "log.generate_FV3LAM_wflow", - debug: bool = False) -> str: + ushdir, logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False +) -> str: """Function to setup a forecast experiment and create a workflow (according to the parameters specified in the config file) @@ -71,7 +71,7 @@ def generate_FV3LAM_wflow( # The setup function reads the user configuration file and fills in # non-user-specified values from config_defaults.yaml - expt_config = setup(ushdir,debug=debug) + expt_config = setup(ushdir, debug=debug) # # ----------------------------------------------------------------------- @@ -116,10 +116,10 @@ def generate_FV3LAM_wflow( # rocoto_yaml_fp = expt_config["workflow"]["ROCOTO_YAML_FP"] render( - input_file = template_xml_fp, - output_file = wflow_xml_fp, - values_src = rocoto_yaml_fp, - ) + input_file=template_xml_fp, + output_file=wflow_xml_fp, + values_src=rocoto_yaml_fp, + ) # # ----------------------------------------------------------------------- # @@ -140,22 +140,27 @@ def generate_FV3LAM_wflow( verbose=debug, ) - with open(wflow_launch_script_fp, "r", encoding='utf-8') as launch_script_file: + with open(wflow_launch_script_fp, "r", encoding="utf-8") as launch_script_file: launch_script_content = launch_script_file.read() # Stage an experiment-specific launch file in the experiment directory template = Template(launch_script_content) # The script needs several variables from the workflow and user sections - template_variables = {**expt_config["user"], **expt_config["workflow"], - "valid_vals_BOOLEAN": list_to_str(expt_config["constants"]["valid_vals_BOOLEAN"])} - launch_content = template.safe_substitute(template_variables) + template_variables = { + **expt_config["user"], + **expt_config["workflow"], + "valid_vals_BOOLEAN": list_to_str( + expt_config["constants"]["valid_vals_BOOLEAN"] + ), + } + launch_content = template.safe_substitute(template_variables) launch_fp = os.path.join(exptdir, wflow_launch_script_fn) - with open(launch_fp, "w", encoding='utf-8') as expt_launch_fn: + with open(launch_fp, "w", encoding="utf-8") as expt_launch_fn: expt_launch_fn.write(launch_content) - os.chmod(launch_fp, os.stat(launch_fp).st_mode|S_IXUSR) + os.chmod(launch_fp, os.stat(launch_fp).st_mode | S_IXUSR) # # ----------------------------------------------------------------------- @@ -175,9 +180,13 @@ def generate_FV3LAM_wflow( # pylint: disable=undefined-variable if USE_CRON_TO_RELAUNCH: - add_crontab_line(called_from_cron=False,machine=expt_config["user"]["MACHINE"], - crontab_line=expt_config["workflow"]["CRONTAB_LINE"], - exptdir=exptdir,debug=debug) + add_crontab_line( + called_from_cron=False, + machine=expt_config["user"]["MACHINE"], + crontab_line=expt_config["workflow"]["CRONTAB_LINE"], + exptdir=exptdir, + debug=debug, + ) # # Copy or symlink fix files @@ -362,86 +371,146 @@ def generate_FV3LAM_wflow( } fv_core_nml_dict = {} - fv_core_nml_dict.update({ - "target_lon": LON_CTR, - "target_lat": LAT_CTR, - "nrows_blend": HALO_BLEND, - # - # Question: - # For a ESGgrid type grid, what should stretch_fac be set to? This depends - # on how the FV3 code uses the stretch_fac parameter in the namelist file. - # Recall that for a ESGgrid, it gets set in the function set_gridparams_ESGgrid(.sh) - # to something like 0.9999, but is it ok to set it to that here in the - # FV3 namelist file? - # - "stretch_fac": STRETCH_FAC, - "npx": npx, - "npy": npy, - "layout": [LAYOUT_X, LAYOUT_Y], - "bc_update_interval": LBC_SPEC_INTVL_HRS, - }) + fv_core_nml_dict.update( + { + "target_lon": LON_CTR, + "target_lat": LAT_CTR, + "nrows_blend": HALO_BLEND, + # + # Question: + # For a ESGgrid type grid, what should stretch_fac be set to? This depends + # on how the FV3 code uses the stretch_fac parameter in the namelist file. + # Recall that for a ESGgrid, it gets set in the function set_gridparams_ESGgrid(.sh) + # to something like 0.9999, but is it ok to set it to that here in the + # FV3 namelist file? + # + "stretch_fac": STRETCH_FAC, + "npx": npx, + "npy": npy, + "layout": [LAYOUT_X, LAYOUT_Y], + "bc_update_interval": LBC_SPEC_INTVL_HRS, + } + ) if CCPP_PHYS_SUITE == "FV3_GFS_v15p2": if CPL_AQM: - fv_core_nml_dict.update({ - "dnats": 5 - }) + fv_core_nml_dict.update({"dnats": 5}) else: - fv_core_nml_dict.update({ - "dnats": 1 - }) + fv_core_nml_dict.update({"dnats": 1}) elif CCPP_PHYS_SUITE == "FV3_GFS_v16": if CPL_AQM: - fv_core_nml_dict.update({ - "hord_tr": 8, - "dnats": 5, - "nord": 2 - }) + fv_core_nml_dict.update({"hord_tr": 8, "dnats": 5, "nord": 2}) else: - fv_core_nml_dict.update({ - "dnats": 1 - }) + fv_core_nml_dict.update({"dnats": 1}) elif CCPP_PHYS_SUITE == "FV3_GFS_v17_p8": if CPL_AQM: - fv_core_nml_dict.update({ - "dnats": 4 - }) + fv_core_nml_dict.update({"dnats": 4}) else: - fv_core_nml_dict.update({ - "dnats": 0 - }) + fv_core_nml_dict.update({"dnats": 0}) settings["fv_core_nml"] = fv_core_nml_dict gfs_physics_nml_dict = {} - gfs_physics_nml_dict.update({ - "kice": kice or None, - "lsoil": lsoil or None, - "print_diff_pgr": PRINT_DIFF_PGR, - }) + gfs_physics_nml_dict.update( + { + "kice": kice or None, + "lsoil": lsoil or None, + "print_diff_pgr": PRINT_DIFF_PGR, + } + ) if CPL_AQM: - gfs_physics_nml_dict.update({ - "cplaqm": True, - "cplocn2atm": False, - "fscav_aero": [ - "aacd:0.0", "acet:0.0", "acrolein:0.0", "acro_primary:0.0", "ald2:0.0", - "ald2_primary:0.0", "aldx:0.0", "benzene:0.0", "butadiene13:0.0", "cat1:0.0", - "cl2:0.0", "clno2:0.0", "co:0.0", "cres:0.0", "cron:0.0", - "ech4:0.0", "epox:0.0", "eth:0.0", "etha:0.0", "ethy:0.0", - "etoh:0.0", "facd:0.0", "fmcl:0.0", "form:0.0", "form_primary:0.0", - "gly:0.0", "glyd:0.0", "h2o2:0.0", "hcl:0.0", "hg:0.0", - "hgiigas:0.0", "hno3:0.0", "hocl:0.0", "hono:0.0", "hpld:0.0", - "intr:0.0", "iole:0.0", "isop:0.0", "ispd:0.0", "ispx:0.0", - "ket:0.0", "meoh:0.0", "mepx:0.0", "mgly:0.0", "n2o5:0.0", - "naph:0.0", "no:0.0", "no2:0.0", "no3:0.0", "ntr1:0.0", - "ntr2:0.0", "o3:0.0", "ole:0.0", "opan:0.0", "open:0.0", - "opo3:0.0", "pacd:0.0", "pan:0.0", "panx:0.0", "par:0.0", - "pcvoc:0.0", "pna:0.0", "prpa:0.0", "rooh:0.0", "sesq:0.0", - "so2:0.0", "soaalk:0.0", "sulf:0.0", "terp:0.0", "tol:0.0", - "tolu:0.0", "vivpo1:0.0", "vlvoo1:0.0", "vlvoo2:0.0", "vlvpo1:0.0", - "vsvoo1:0.0", "vsvoo2:0.0", "vsvoo3:0.0", "vsvpo1:0.0", "vsvpo2:0.0", - "vsvpo3:0.0", "xopn:0.0", "xylmn:0.0", "*:0.2" ] - }) + gfs_physics_nml_dict.update( + { + "cplaqm": True, + "cplocn2atm": False, + "fscav_aero": [ + "aacd:0.0", + "acet:0.0", + "acrolein:0.0", + "acro_primary:0.0", + "ald2:0.0", + "ald2_primary:0.0", + "aldx:0.0", + "benzene:0.0", + "butadiene13:0.0", + "cat1:0.0", + "cl2:0.0", + "clno2:0.0", + "co:0.0", + "cres:0.0", + "cron:0.0", + "ech4:0.0", + "epox:0.0", + "eth:0.0", + "etha:0.0", + "ethy:0.0", + "etoh:0.0", + "facd:0.0", + "fmcl:0.0", + "form:0.0", + "form_primary:0.0", + "gly:0.0", + "glyd:0.0", + "h2o2:0.0", + "hcl:0.0", + "hg:0.0", + "hgiigas:0.0", + "hno3:0.0", + "hocl:0.0", + "hono:0.0", + "hpld:0.0", + "intr:0.0", + "iole:0.0", + "isop:0.0", + "ispd:0.0", + "ispx:0.0", + "ket:0.0", + "meoh:0.0", + "mepx:0.0", + "mgly:0.0", + "n2o5:0.0", + "naph:0.0", + "no:0.0", + "no2:0.0", + "no3:0.0", + "ntr1:0.0", + "ntr2:0.0", + "o3:0.0", + "ole:0.0", + "opan:0.0", + "open:0.0", + "opo3:0.0", + "pacd:0.0", + "pan:0.0", + "panx:0.0", + "par:0.0", + "pcvoc:0.0", + "pna:0.0", + "prpa:0.0", + "rooh:0.0", + "sesq:0.0", + "so2:0.0", + "soaalk:0.0", + "sulf:0.0", + "terp:0.0", + "tol:0.0", + "tolu:0.0", + "vivpo1:0.0", + "vlvoo1:0.0", + "vlvoo2:0.0", + "vlvpo1:0.0", + "vsvoo1:0.0", + "vsvoo2:0.0", + "vsvoo3:0.0", + "vsvpo1:0.0", + "vsvpo2:0.0", + "vsvpo3:0.0", + "xopn:0.0", + "xylmn:0.0", + "*:0.2", + ], + } + ) settings["gfs_physics_nml"] = gfs_physics_nml_dict # @@ -513,8 +582,8 @@ def generate_FV3LAM_wflow( physics_cfg = get_yaml_config(FV3_NML_YAML_CONFIG_FP) base_namelist = get_nml_config(FV3_NML_BASE_SUITE_FP) - base_namelist.update_values(physics_cfg[CCPP_PHYS_SUITE]) - base_namelist.update_values(settings) + base_namelist.update_from(physics_cfg[CCPP_PHYS_SUITE]) + base_namelist.update_from(settings) for sect, values in base_namelist.copy().items(): if not values: del base_namelist[sect] @@ -535,7 +604,7 @@ def generate_FV3LAM_wflow( # the C-resolution of the grid), and this parameter is in most workflow # configurations is not known until the grid is created. # - if not expt_config['rocoto']['tasks'].get('task_make_grid'): + if not expt_config["rocoto"]["tasks"].get("task_make_grid"): set_fv3nml_sfc_climo_filenames(flatten_dict(expt_config), debug) @@ -644,11 +713,11 @@ def generate_FV3LAM_wflow( settings_str = cfg_to_yaml_str(settings) # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # # Generate namelist files with stochastic physics if needed # - #----------------------------------------------------------------------- + # ----------------------------------------------------------------------- # if any((DO_SPP, DO_SPPT, DO_SHUM, DO_SKEB, DO_LSM_SPP)): realize( @@ -657,7 +726,7 @@ def generate_FV3LAM_wflow( output_file=FV3_NML_STOCH_FP, output_format="nml", update_config=get_nml_config(settings), - ) + ) # # ----------------------------------------------------------------------- @@ -725,7 +794,9 @@ def generate_FV3LAM_wflow( return EXPTDIR -def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False) -> None: +def setup_logging( + logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False +) -> None: """ Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all messages with detailed timing and routine info in the specified text file. @@ -736,7 +807,7 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals formatter = logging.Formatter("%(name)-22s %(levelname)-8s %(message)s") - fh = logging.FileHandler(logfile, mode='w') + fh = logging.FileHandler(logfile, mode="w") fh.setLevel(logging.DEBUG) fh.setFormatter(formatter) logging.getLogger().addHandler(fh) @@ -759,13 +830,18 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals if __name__ == "__main__": - #Parse arguments + # Parse arguments parser = argparse.ArgumentParser( - description="Script for setting up a forecast and creating a workflow"\ - "according to the parameters specified in the config file\n") + description="Script for setting up a forecast and creating a workflow" + "according to the parameters specified in the config file\n" + ) - parser.add_argument('-d', '--debug', action='store_true', - help='Script will be run in debug mode with more verbose output') + parser.add_argument( + "-d", + "--debug", + action="store_true", + help="Script will be run in debug mode with more verbose output", + ) pargs = parser.parse_args() USHdir = os.path.dirname(os.path.abspath(__file__)) @@ -775,7 +851,7 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals # experiment/workflow. try: expt_dir = generate_FV3LAM_wflow(USHdir, wflow_logfile, pargs.debug) - except: # pylint: disable=bare-except + except: # pylint: disable=bare-except logging.exception( dedent( f""" diff --git a/ush/link_fix.py b/ush/link_fix.py index f0d103d8e..3bc8b283d 100755 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -6,6 +6,7 @@ import re import glob +from uwtools.api.config import get_yaml_config from python_utils import ( import_vars, print_input_args, @@ -18,7 +19,6 @@ cd_vrfy, mkdir_vrfy, find_pattern_in_str, - load_yaml_config, ) @@ -207,7 +207,12 @@ def link_fix( f"C*{dot_or_uscore}oro_data.tile{tile_rgnl}.halo{nh0}.nc", f"C*{dot_or_uscore}oro_data.tile{tile_rgnl}.halo{nh4}.nc", ] - if ccpp_phys_suite == "FV3_RAP" or ccpp_phys_suite == "FV3_HRRR" or ccpp_phys_suite == "FV3_GFS_v15_thompson_mynn_lam3km" or ccpp_phys_suite == "FV3_GFS_v17_p8": + if ( + ccpp_phys_suite == "FV3_RAP" + or ccpp_phys_suite == "FV3_HRRR" + or ccpp_phys_suite == "FV3_GFS_v15_thompson_mynn_lam3km" + or ccpp_phys_suite == "FV3_GFS_v17_p8" + ): fns += [ f"C*{dot_or_uscore}oro_data_ss.tile{tile_rgnl}.halo{nh0}.nc", f"C*{dot_or_uscore}oro_data_ls.tile{tile_rgnl}.halo{nh0}.nc", @@ -403,7 +408,7 @@ def parse_args(argv): if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_yaml_config(args.path_to_defns) + cfg = get_yaml_config(args.path_to_defns) link_fix( verbose=cfg["workflow"]["VERBOSE"], file_group=args.file_group, diff --git a/ush/set_fv3nml_sfc_climo_filenames.py b/ush/set_fv3nml_sfc_climo_filenames.py index 7251a5b0e..eb5b371bb 100644 --- a/ush/set_fv3nml_sfc_climo_filenames.py +++ b/ush/set_fv3nml_sfc_climo_filenames.py @@ -31,11 +31,12 @@ "FV3_NML_FP", "PARMdir", "RUN_ENVIR", - ] +] # pylint: disable=undefined-variable + def set_fv3nml_sfc_climo_filenames(config, debug=False): """ This function sets the values of the variables in @@ -54,7 +55,9 @@ def set_fv3nml_sfc_climo_filenames(config, debug=False): import_vars(dictionary=config, env_vars=NEEDED_VARS) - fixed_cfg = get_yaml_config(os.path.join(PARMdir, "fixed_files_mapping.yaml"))["fixed_files"] + fixed_cfg = get_yaml_config(os.path.join(PARMdir, "fixed_files_mapping.yaml"))[ + "fixed_files" + ] # The regular expression regex_search set below will be used to extract # from the elements of the array FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING @@ -80,7 +83,9 @@ def set_fv3nml_sfc_climo_filenames(config, debug=False): file_path = os.path.join(FIXlam, f"{CRES}.{sfc_climo_field_name}.{suffix}") if RUN_ENVIR != "nco": - file_path = os.path.relpath(os.path.realpath(file_path), start=dummy_run_dir) + file_path = os.path.relpath( + os.path.realpath(file_path), start=dummy_run_dir + ) namsfc_dict[nml_var_name] = file_path @@ -106,7 +111,8 @@ def set_fv3nml_sfc_climo_filenames(config, debug=False): output_file=FV3_NML_FP, output_format="nml", update_config=get_nml_config(settings), - ) + ) + def parse_args(argv): """Parse command line arguments""" @@ -119,14 +125,18 @@ def parse_args(argv): required=True, help="Path to var_defns file.", ) - parser.add_argument('-d', '--debug', action='store_true', - help='Script will be run in debug mode with more verbose output') + parser.add_argument( + "-d", + "--debug", + action="store_true", + help="Script will be run in debug mode with more verbose output", + ) return parser.parse_args(argv) if __name__ == "__main__": args = parse_args(sys.argv[1:]) - cfg = load_yaml_config(args.path_to_defns) + cfg = get_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) set_fv3nml_sfc_climo_filenames(cfg, args.debug) From ed58157ec10e53640db5e5d1678f6dd251e806a3 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 19 Sep 2024 11:30:48 -0500 Subject: [PATCH 11/47] revert unintended changes --- ush/create_model_configure_file.py | 229 +------- ush/create_ufs_configure_file.py | 880 +++++++++++++++++++++++++++-- ush/setup.py | 8 +- 3 files changed, 845 insertions(+), 272 deletions(-) diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index fc413b4d0..eb0b52917 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -1,8 +1,10 @@ #!/usr/bin/env python3 + """ -Create a model_configure file for the FV3 forecast model from a -template. +Function to create a UFS configuration file for the FV3 forecast +model(s) from a template. """ + import argparse import os import sys @@ -12,36 +14,20 @@ cfg_to_yaml_str, flatten_dict, import_vars, - lowercase, print_info_msg, print_input_args, - str_to_type, ) from uwtools.api.config import get_yaml_config from uwtools.api.template import render -def create_model_configure_file( - cdate, - fcst_len_hrs, - fhrot, - run_dir, - dt_atmos, - sub_hourly_post=False, - dt_subhourly_post_mnts=None, -): # pylint: disable=too-many-arguments - """Creates a model configuration file in the specified +def create_ufs_configure_file(run_dir): + """Creates a ufs configuration file in the specified run directory Args: - cdate: cycle date - fcst_len_hrs: forecast length in hours - fhrot: forecast hour at restart run_dir: run directory - sub_hourly_post - dt_subhourly_post_mnts - dt_atmos Returns: Boolean """ @@ -56,18 +42,22 @@ def create_model_configure_file( # # ----------------------------------------------------------------------- # - # Create a model configuration file in the specified run directory. + # Create a UFS configuration file in the specified run directory. # # ----------------------------------------------------------------------- # print_info_msg( - f""" - Creating a model configuration file ('{MODEL_CONFIG_FN}') in the specified + f''' + Creating a ufs.configure file (\"{UFS_CONFIG_FN}\") in the specified run directory (run_dir): - run_dir = '{run_dir}'""", + run_dir = \"{run_dir}\"''', verbose=VERBOSE, ) # + # Set output file path + # + ufs_config_fp = os.path.join(run_dir, UFS_CONFIG_FN) + # # ----------------------------------------------------------------------- # # Create a multiline variable that consists of a yaml-compliant string @@ -76,138 +66,13 @@ def create_model_configure_file( # # ----------------------------------------------------------------------- # - settings = { - "PE_MEMBER01": PE_MEMBER01, - "start_year": cdate.year, - "start_month": cdate.month, - "start_day": cdate.day, - "start_hour": cdate.hour, - "nhours_fcst": fcst_len_hrs, - "fhrot": fhrot, - "dt_atmos": DT_ATMOS, - "atmos_nthreads": OMP_NUM_THREADS_RUN_FCST, - "restart_interval": RESTART_INTERVAL, - "itasks": ITASKS, - "write_dopost": f".{lowercase(str(WRITE_DOPOST))}.", - "quilting": f".{lowercase(str(QUILTING))}.", - "output_grid": WRTCMP_output_grid, - } - # - # If the write-component is to be used, then specify a set of computational - # parameters and a set of grid parameters. The latter depends on the type - # (coordinate system) of the grid that the write-component will be using. - # - if QUILTING: - settings.update( - { - "write_groups": WRTCMP_write_groups, - "write_tasks_per_group": WRTCMP_write_tasks_per_group, - "cen_lon": WRTCMP_cen_lon, - "cen_lat": WRTCMP_cen_lat, - "lon1": WRTCMP_lon_lwr_left, - "lat1": WRTCMP_lat_lwr_left, - } - ) - - if WRTCMP_output_grid == "lambert_conformal": - settings.update( - { - "stdlat1": WRTCMP_stdlat1, - "stdlat2": WRTCMP_stdlat2, - "nx": WRTCMP_nx, - "ny": WRTCMP_ny, - "dx": WRTCMP_dx, - "dy": WRTCMP_dy, - "lon2": "", - "lat2": "", - "dlon": "", - "dlat": "", - } - ) - elif WRTCMP_output_grid in ("regional_latlon", "rotated_latlon"): - settings.update( - { - "lon2": WRTCMP_lon_upr_rght, - "lat2": WRTCMP_lat_upr_rght, - "dlon": WRTCMP_dlon, - "dlat": WRTCMP_dlat, - "stdlat1": "", - "stdlat2": "", - "nx": "", - "ny": "", - "dx": "", - "dy": "", - } - ) - # - # If not using the write-component (aka quilting), set those variables - # needed for quilting to None so that it gets rendered in the template appropriately. - # - else: - settings.update( - { - "write_groups": None, - "write_tasks_per_group": None, - "cen_lon": None, - "cen_lat": None, - "lon1": None, - "lat1": None, - "stdlat1": None, - "stdlat2": None, - "nx": None, - "ny": None, - "dx": None, - "dy": None, - "lon2": None, - "lat2": None, - "dlon": None, - "dlat": None, - } - ) - # - # If sub_hourly_post is set to "TRUE", then the forecast model must be - # directed to generate output files on a sub-hourly interval. Do this - # by specifying the output interval in the model configuration file - # (MODEL_CONFIG_FN) in units of number of forecat model time steps (nsout). - # nsout is calculated using the user-specified output time interval - # dt_subhourly_post_mnts (in units of minutes) and the forecast model's - # main time step dt_atmos (in units of seconds). Note that nsout is - # guaranteed to be an integer because the experiment generation scripts - # require that dt_subhourly_post_mnts (after conversion to seconds) be - # evenly divisible by dt_atmos. Also, in this case, the variable output_fh - # [which specifies the output interval in hours; - # see the jinja model_config template file] is set to 0, although this - # doesn't matter because any positive of nsout will override output_fh. - # - # If sub_hourly_post is set to "FALSE", then the workflow is hard-coded - # (in the jinja model_config template file) to direct the forecast model - # to output files every hour. This is done by setting (1) output_fh to 1 - # here, and (2) nsout to -1 here which turns off output by time step interval. - # - # Note that the approach used here of separating how hourly and subhourly - # output is handled should be changed/generalized/simplified such that - # the user should only need to specify the output time interval (there - # should be no need to specify a flag like sub_hourly_post); the workflow - # should then be able to direct the model to output files with that time - # interval and to direct the post-processor to process those files - # regardless of whether that output time interval is larger than, equal - # to, or smaller than one hour. - # - if sub_hourly_post: - nsout = (dt_subhourly_post_mnts * 60) // dt_atmos - output_fh = 0 - else: - output_fh = 1 - nsout = -1 - - settings.update({"output_fh": output_fh, "nsout": nsout}) - + settings = {"dt_atmos": DT_ATMOS, "print_esmf": PRINT_ESMF, "cpl_aqm": CPL_AQM} settings_str = cfg_to_yaml_str(settings) print_info_msg( dedent( f""" - The variable 'settings' specifying values to be used in the '{MODEL_CONFIG_FN}' + The variable \"settings\" specifying values to be used in the \"{UFS_CONFIG_FN}\" file has been set as follows:\n settings =\n\n""" ) @@ -217,16 +82,14 @@ def create_model_configure_file( # # ----------------------------------------------------------------------- # - # Call a python script to generate the experiment's actual MODEL_CONFIG_FN + # Call a python script to generate the experiment's actual UFS_CONFIG_FN # file from the template file. # # ----------------------------------------------------------------------- # - model_config_fp = os.path.join(run_dir, MODEL_CONFIG_FN) - render( - input_file=MODEL_CONFIG_TMPL_FP, - output_file=model_config_fp, + input_file=UFS_CONFIG_TMPL_FP, + output_file=ufs_config_fp, values_src=settings, ) return True @@ -234,58 +97,12 @@ def create_model_configure_file( def parse_args(argv): """Parse command line arguments""" - parser = argparse.ArgumentParser(description="Creates model configuration file.") + parser = argparse.ArgumentParser(description="Creates UFS configuration file.") parser.add_argument( "-r", "--run-dir", dest="run_dir", required=True, help="Run directory." ) - parser.add_argument( - "-c", - "--cdate", - dest="cdate", - required=True, - help="Date string in YYYYMMDD format.", - ) - - parser.add_argument( - "-f", - "--fcst_len_hrs", - dest="fcst_len_hrs", - required=True, - help="Forecast length in hours.", - ) - - parser.add_argument( - "-b", - "--fhrot", - dest="fhrot", - required=True, - help="Forecast hour at restart.", - ) - - parser.add_argument( - "-s", - "--sub-hourly-post", - dest="sub_hourly_post", - help="Set sub hourly post to either TRUE/FALSE by passing corresponding string.", - ) - - parser.add_argument( - "-d", - "--dt-subhourly-post-mnts", - dest="dt_subhourly_post_mnts", - help="Subhourly post minitues.", - ) - - parser.add_argument( - "-t", - "--dt-atmos", - dest="dt_atmos", - required=True, - help="Forecast model's main time step.", - ) - parser.add_argument( "-p", "--path-to-defns", @@ -302,10 +119,6 @@ def parse_args(argv): cfg = get_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) - create_model_configure_file( + create_ufs_configure_file( run_dir=args.run_dir, - cdate=str_to_type(args.cdate), - fcst_len_hrs=str_to_type(args.fcst_len_hrs), - fhrot=str_to_type(args.fhrot), - dt_atmos=str_to_type(args.dt_atmos), ) diff --git a/ush/create_ufs_configure_file.py b/ush/create_ufs_configure_file.py index 3df4c499f..c9eb5cc7e 100644 --- a/ush/create_ufs_configure_file.py +++ b/ush/create_ufs_configure_file.py @@ -1,123 +1,881 @@ #!/usr/bin/env python3 """ -Function to create a UFS configuration file for the FV3 forecast -model(s) from a template. +User interface to create an experiment directory consistent with the +user-defined config.yaml file. """ +# pylint: disable=invalid-name + import argparse +import logging import os import sys +from pathlib import Path +from stat import S_IXUSR +from string import Template from textwrap import dedent from python_utils import ( + list_to_str, + log_info, + import_vars, + export_vars, + cp_vrfy, + ln_vrfy, + mkdir_vrfy, + mv_vrfy, + check_for_preexist_dir_file, cfg_to_yaml_str, + find_pattern_in_str, flatten_dict, - import_vars, - print_info_msg, - print_input_args, ) -from uwtools.api.config import get_yaml_config +from check_python_version import check_python_version +from get_crontab_contents import add_crontab_line +from setup import setup +from set_fv3nml_sfc_climo_filenames import set_fv3nml_sfc_climo_filenames + +from uwtools.api.config import get_nml_config, get_yaml_config, realize from uwtools.api.template import render -def create_ufs_configure_file(run_dir): - """Creates a ufs configuration file in the specified - run directory + +# pylint: disable=too-many-locals,too-many-branches, too-many-statements +def generate_FV3LAM_wflow( + ushdir, logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False +) -> str: + """Function to setup a forecast experiment and create a workflow + (according to the parameters specified in the config file) Args: - run_dir: run directory + ushdir (str) : The full path of the ush/ directory where this script is located + logfile (str) : The name of the file where logging is written + debug (bool): Enable extra output for debugging Returns: - Boolean + EXPTDIR (str) : The full path of the directory where this experiment has been generated """ - print_input_args(locals()) + # Set up logging to write to screen and logfile + setup_logging(logfile, debug) - # import all environment variables - import_vars() + # Check python version and presence of some non-standard packages + check_python_version() - # pylint: disable=undefined-variable + # Note start of workflow generation + log_info( + """ + ======================================================================== + Starting experiment generation... + ========================================================================""" + ) + + # The setup function reads the user configuration file and fills in + # non-user-specified values from config_defaults.yaml + expt_config = setup(ushdir, debug=debug) # # ----------------------------------------------------------------------- # - # Create a UFS configuration file in the specified run directory. + # Set the full path to the experiment's rocoto workflow xml file. This + # file will be placed at the top level of the experiment directory and + # then used by rocoto to run the workflow. # # ----------------------------------------------------------------------- # - print_info_msg( - f''' - Creating a ufs.configure file (\"{UFS_CONFIG_FN}\") in the specified - run directory (run_dir): - run_dir = \"{run_dir}\"''', - verbose=VERBOSE, + wflow_xml_fn = expt_config["workflow"]["WFLOW_XML_FN"] + wflow_xml_fp = os.path.join( + expt_config["workflow"]["EXPTDIR"], + wflow_xml_fn, ) # - # Set output file path + # ----------------------------------------------------------------------- # - ufs_config_fp = os.path.join(run_dir, UFS_CONFIG_FN) + # Create a multiline variable that consists of a yaml-compliant string + # specifying the values that the jinja variables in the template rocoto + # XML should be set to. These values are set either in the user-specified + # workflow configuration file (EXPT_CONFIG_FN) or in the setup() function + # called above. Then call the python script that generates the XML. # # ----------------------------------------------------------------------- # - # Create a multiline variable that consists of a yaml-compliant string - # specifying the values that the jinja variables in the template - # model_configure file should be set to. + if expt_config["platform"]["WORKFLOW_MANAGER"] == "rocoto": + + template_xml_fp = os.path.join( + expt_config["user"]["PARMdir"], + wflow_xml_fn, + ) + + log_info( + f""" + Creating rocoto workflow XML file (WFLOW_XML_FP): + WFLOW_XML_FP = '{wflow_xml_fp}'""" + ) + + # + # Call the python script to generate the experiment's XML file + # + rocoto_yaml_fp = expt_config["workflow"]["ROCOTO_YAML_FP"] + render( + input_file=template_xml_fp, + output_file=wflow_xml_fp, + values_src=rocoto_yaml_fp, + ) # # ----------------------------------------------------------------------- # - settings = {"dt_atmos": DT_ATMOS, "print_esmf": PRINT_ESMF, "cpl_aqm": CPL_AQM} - settings_str = cfg_to_yaml_str(settings) + # Create a symlink in the experiment directory that points to the workflow + # (re)launch script. + # + # ----------------------------------------------------------------------- + # + exptdir = expt_config["workflow"]["EXPTDIR"] + wflow_launch_script_fp = expt_config["workflow"]["WFLOW_LAUNCH_SCRIPT_FP"] + wflow_launch_script_fn = expt_config["workflow"]["WFLOW_LAUNCH_SCRIPT_FN"] + log_info( + f""" + Creating symlink in the experiment directory (EXPTDIR) that points to the + workflow launch script (WFLOW_LAUNCH_SCRIPT_FP): + EXPTDIR = '{exptdir}' + WFLOW_LAUNCH_SCRIPT_FP = '{wflow_launch_script_fp}'""", + verbose=debug, + ) + + with open(wflow_launch_script_fp, "r", encoding="utf-8") as launch_script_file: + launch_script_content = launch_script_file.read() + + # Stage an experiment-specific launch file in the experiment directory + template = Template(launch_script_content) + + # The script needs several variables from the workflow and user sections + template_variables = { + **expt_config["user"], + **expt_config["workflow"], + "valid_vals_BOOLEAN": list_to_str( + expt_config["constants"]["valid_vals_BOOLEAN"] + ), + } + launch_content = template.safe_substitute(template_variables) + + launch_fp = os.path.join(exptdir, wflow_launch_script_fn) + with open(launch_fp, "w", encoding="utf-8") as expt_launch_fn: + expt_launch_fn.write(launch_content) + + os.chmod(launch_fp, os.stat(launch_fp).st_mode | S_IXUSR) + + # + # ----------------------------------------------------------------------- + # + # If USE_CRON_TO_RELAUNCH is set to TRUE, add a line to the user's + # cron table to call the (re)launch script every + # CRON_RELAUNCH_INTVL_MNTS minutes. + # + # ----------------------------------------------------------------------- + # + # From here on out, going back to setting variables for everything + # in the flattened expt_config dictionary + # TODO: Reference all these variables in their respective + # dictionaries, instead. + import_vars(dictionary=flatten_dict(expt_config)) + export_vars(source_dict=flatten_dict(expt_config)) + + # pylint: disable=undefined-variable + if USE_CRON_TO_RELAUNCH: + add_crontab_line( + called_from_cron=False, + machine=expt_config["user"]["MACHINE"], + crontab_line=expt_config["workflow"]["CRONTAB_LINE"], + exptdir=exptdir, + debug=debug, + ) + + # + # Copy or symlink fix files + # + if SYMLINK_FIX_FILES: + log_info( + f""" + Symlinking fixed files from system directory (FIXgsm) to a subdirectory (FIXam): + FIXgsm = '{FIXgsm}' + FIXam = '{FIXam}'""", + verbose=debug, + ) + + ln_vrfy(f"""-fsn '{FIXgsm}' '{FIXam}'""") + else: + + log_info( + f""" + Copying fixed files from system directory (FIXgsm) to a subdirectory (FIXam): + FIXgsm = '{FIXgsm}' + FIXam = '{FIXam}'""", + verbose=debug, + ) + + check_for_preexist_dir_file(FIXam, "delete") + mkdir_vrfy("-p", FIXam) + mkdir_vrfy("-p", os.path.join(FIXam, "fix_co2_proj")) - print_info_msg( - dedent( + num_files = len(FIXgsm_FILES_TO_COPY_TO_FIXam) + for i in range(num_files): + fn = f"{FIXgsm_FILES_TO_COPY_TO_FIXam[i]}" + cp_vrfy(os.path.join(FIXgsm, fn), os.path.join(FIXam, fn)) + # + # ----------------------------------------------------------------------- + # + # Copy MERRA2 aerosol climatology data. + # + # ----------------------------------------------------------------------- + # + if USE_MERRA_CLIMO: + log_info( f""" - The variable \"settings\" specifying values to be used in the \"{UFS_CONFIG_FN}\" - file has been set as follows:\n - settings =\n\n""" + Copying MERRA2 aerosol climatology data files from system directory + (FIXaer/FIXlut) to a subdirectory (FIXclim) in the experiment directory: + FIXaer = '{FIXaer}' + FIXlut = '{FIXlut}' + FIXclim = '{FIXclim}'""", + verbose=debug, ) - + settings_str, - verbose=VERBOSE, + + check_for_preexist_dir_file(FIXclim, "delete") + mkdir_vrfy("-p", FIXclim) + + if SYMLINK_FIX_FILES: + ln_vrfy("-fsn", os.path.join(FIXaer, "merra2.aerclim*.nc"), FIXclim) + ln_vrfy("-fsn", os.path.join(FIXlut, "optics*.dat"), FIXclim) + else: + cp_vrfy(os.path.join(FIXaer, "merra2.aerclim*.nc"), FIXclim) + cp_vrfy(os.path.join(FIXlut, "optics*.dat"), FIXclim) + # + # ----------------------------------------------------------------------- + # + # Copy templates of various input files to the experiment directory. + # + # ----------------------------------------------------------------------- + # + log_info( + """ + Copying templates of various input files to the experiment directory...""", + verbose=debug, + ) + + log_info( + """ + Copying the template data table file to the experiment directory...""", + verbose=debug, + ) + cp_vrfy(DATA_TABLE_TMPL_FP, DATA_TABLE_FP) + + log_info( + """ + Copying the template field table file to the experiment directory...""", + verbose=debug, + ) + cp_vrfy(FIELD_TABLE_TMPL_FP, FIELD_TABLE_FP) + + # + # Copy the CCPP physics suite definition file from its location in the + # clone of the FV3 code repository to the experiment directory (EXPT- + # DIR). + # + log_info( + """ + Copying the CCPP physics suite definition XML file from its location in + the forecast model directory structure to the experiment directory...""", + verbose=debug, + ) + cp_vrfy(CCPP_PHYS_SUITE_IN_CCPP_FP, CCPP_PHYS_SUITE_FP) + # + # Copy the field dictionary file from its location in the + # clone of the FV3 code repository to the experiment directory (EXPT- + # DIR). + # + log_info( + """ + Copying the field dictionary file from its location in the + forecast model directory structure to the experiment + directory...""", + verbose=debug, ) + cp_vrfy(FIELD_DICT_IN_UWM_FP, FIELD_DICT_FP) # # ----------------------------------------------------------------------- # - # Call a python script to generate the experiment's actual UFS_CONFIG_FN - # file from the template file. + # Set parameters in the FV3-LAM namelist file. # # ----------------------------------------------------------------------- # - render( - input_file=UFS_CONFIG_TMPL_FP, - output_file=ufs_config_fp, - values_src=settings, + log_info( + f""" + Setting parameters in weather model's namelist file (FV3_NML_FP): + FV3_NML_FP = '{FV3_NML_FP}'""", + verbose=debug, ) - return True + # + # Set npx and npy, which are just NX plus 1 and NY plus 1, respectively. + # These need to be set in the FV3-LAM Fortran namelist file. They represent + # the number of cell vertices in the x and y directions on the regional + # grid. + # + npx = NX + 1 + npy = NY + 1 + # + # For the physics suites that use RUC LSM, set the parameter kice to 9, + # Otherwise, leave it unspecified (which means it gets set to the default + # value in the forecast model). + # + kice = None + if SDF_USES_RUC_LSM: + kice = 9 + # + # Set lsoil, which is the number of input soil levels provided in the + # chgres_cube output NetCDF file. This is the same as the parameter + # nsoill_out in the namelist file for chgres_cube. [On the other hand, + # the parameter lsoil_lsm (not set here but set in input.nml.FV3 and/or + # FV3.input.yml) is the number of soil levels that the LSM scheme in the + # forecast model will run with.] Here, we use the same approach to set + # lsoil as the one used to set nsoill_out in exregional_make_ics.sh. + # See that script for details. + # + # NOTE: + # May want to remove lsoil from FV3.input.yml (and maybe input.nml.FV3). + # Also, may want to set lsm here as well depending on SDF_USES_RUC_LSM. + # + lsoil = 4 + if EXTRN_MDL_NAME_ICS in ("HRRR", "RAP") and SDF_USES_RUC_LSM: + lsoil = 9 + if CCPP_PHYS_SUITE == "FV3_GFS_v15_thompson_mynn_lam3km": + lsoil = "" + # + # Create a multiline variable that consists of a yaml-compliant string + # specifying the values that the namelist variables that are physics- + # suite-independent need to be set to. Below, this variable will be + # passed to a python script that will in turn set the values of these + # variables in the namelist file. + # + # IMPORTANT: + # If we want a namelist variable to be removed from the namelist file, + # in the "settings" variable below, we need to set its value to the + # string "null". This is equivalent to setting its value to + # !!python/none + # in the base namelist file specified by FV3_NML_BASE_SUITE_FP or the + # suite-specific yaml settings file specified by FV3_NML_YAML_CONFIG_FP. + # + # It turns out that setting the variable to an empty string also works + # to remove it from the namelist! Which is better to use?? + # + settings = {} + settings["atmos_model_nml"] = { + "blocksize": BLOCKSIZE, + "ccpp_suite": CCPP_PHYS_SUITE, + } + fv_core_nml_dict = {} + fv_core_nml_dict.update( + { + "target_lon": LON_CTR, + "target_lat": LAT_CTR, + "nrows_blend": HALO_BLEND, + # + # Question: + # For a ESGgrid type grid, what should stretch_fac be set to? This depends + # on how the FV3 code uses the stretch_fac parameter in the namelist file. + # Recall that for a ESGgrid, it gets set in the function set_gridparams_ESGgrid(.sh) + # to something like 0.9999, but is it ok to set it to that here in the + # FV3 namelist file? + # + "stretch_fac": STRETCH_FAC, + "npx": npx, + "npy": npy, + "layout": [LAYOUT_X, LAYOUT_Y], + "bc_update_interval": LBC_SPEC_INTVL_HRS, + } + ) + if CCPP_PHYS_SUITE == "FV3_GFS_v15p2": + if CPL_AQM: + fv_core_nml_dict.update({"dnats": 5}) + else: + fv_core_nml_dict.update({"dnats": 1}) + elif CCPP_PHYS_SUITE == "FV3_GFS_v16": + if CPL_AQM: + fv_core_nml_dict.update({"hord_tr": 8, "dnats": 5, "nord": 2}) + else: + fv_core_nml_dict.update({"dnats": 1}) + elif CCPP_PHYS_SUITE == "FV3_GFS_v17_p8": + if CPL_AQM: + fv_core_nml_dict.update({"dnats": 4}) + else: + fv_core_nml_dict.update({"dnats": 0}) -def parse_args(argv): - """Parse command line arguments""" - parser = argparse.ArgumentParser(description="Creates UFS configuration file.") + settings["fv_core_nml"] = fv_core_nml_dict - parser.add_argument( - "-r", "--run-dir", dest="run_dir", required=True, help="Run directory." + gfs_physics_nml_dict = {} + gfs_physics_nml_dict.update( + { + "kice": kice or None, + "lsoil": lsoil or None, + "print_diff_pgr": PRINT_DIFF_PGR, + } ) - parser.add_argument( - "-p", - "--path-to-defns", - dest="path_to_defns", - required=True, - help="Path to var_defns file.", + if CPL_AQM: + gfs_physics_nml_dict.update( + { + "cplaqm": True, + "cplocn2atm": False, + "fscav_aero": [ + "aacd:0.0", + "acet:0.0", + "acrolein:0.0", + "acro_primary:0.0", + "ald2:0.0", + "ald2_primary:0.0", + "aldx:0.0", + "benzene:0.0", + "butadiene13:0.0", + "cat1:0.0", + "cl2:0.0", + "clno2:0.0", + "co:0.0", + "cres:0.0", + "cron:0.0", + "ech4:0.0", + "epox:0.0", + "eth:0.0", + "etha:0.0", + "ethy:0.0", + "etoh:0.0", + "facd:0.0", + "fmcl:0.0", + "form:0.0", + "form_primary:0.0", + "gly:0.0", + "glyd:0.0", + "h2o2:0.0", + "hcl:0.0", + "hg:0.0", + "hgiigas:0.0", + "hno3:0.0", + "hocl:0.0", + "hono:0.0", + "hpld:0.0", + "intr:0.0", + "iole:0.0", + "isop:0.0", + "ispd:0.0", + "ispx:0.0", + "ket:0.0", + "meoh:0.0", + "mepx:0.0", + "mgly:0.0", + "n2o5:0.0", + "naph:0.0", + "no:0.0", + "no2:0.0", + "no3:0.0", + "ntr1:0.0", + "ntr2:0.0", + "o3:0.0", + "ole:0.0", + "opan:0.0", + "open:0.0", + "opo3:0.0", + "pacd:0.0", + "pan:0.0", + "panx:0.0", + "par:0.0", + "pcvoc:0.0", + "pna:0.0", + "prpa:0.0", + "rooh:0.0", + "sesq:0.0", + "so2:0.0", + "soaalk:0.0", + "sulf:0.0", + "terp:0.0", + "tol:0.0", + "tolu:0.0", + "vivpo1:0.0", + "vlvoo1:0.0", + "vlvoo2:0.0", + "vlvpo1:0.0", + "vsvoo1:0.0", + "vsvoo2:0.0", + "vsvoo3:0.0", + "vsvpo1:0.0", + "vsvpo2:0.0", + "vsvpo3:0.0", + "xopn:0.0", + "xylmn:0.0", + "*:0.2", + ], + } + ) + settings["gfs_physics_nml"] = gfs_physics_nml_dict + + # + # Add to "settings" the values of those namelist variables that specify + # the paths to fixed files in the FIXam directory. As above, these namelist + # variables are physcs-suite-independent. + # + # Note that the array FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING contains + # the mapping between the namelist variables and the names of the files + # in the FIXam directory. Here, we loop through this array and process + # each element to construct each line of "settings". + # + dummy_run_dir = os.path.join(EXPTDIR, "any_cyc") + if DO_ENSEMBLE: + dummy_run_dir = os.path.join(dummy_run_dir, "any_ensmem") + + regex_search = "^[ ]*([^| ]+)[ ]*[|][ ]*([^| ]+)[ ]*$" + num_nml_vars = len(FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING) + namsfc_dict = {} + for i in range(num_nml_vars): + + mapping = f"{FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING[i]}" + tup = find_pattern_in_str(regex_search, mapping) + nml_var_name = tup[0] + FIXam_fn = tup[1] + + fp = '""' + if FIXam_fn: + fp = os.path.join(FIXam, FIXam_fn) + # + # If not in NCO mode, for portability and brevity, change fp so that it + # is a relative path (relative to any cycle directory immediately under + # the experiment directory). + # + if RUN_ENVIR != "nco": + fp = os.path.relpath(os.path.realpath(fp), start=dummy_run_dir) + # + # Add a line to the variable "settings" that specifies (in a yaml-compliant + # format) the name of the current namelist variable and the value it should + # be set to. + # + namsfc_dict[nml_var_name] = fp + # + # Add namsfc_dict to settings + # + settings["namsfc"] = namsfc_dict + # + # Use netCDF4 when running the North American 3-km domain due to file size. + # + if PREDEF_GRID_NAME == "RRFS_NA_3km": + settings["fms2_io_nml"] = {"netcdf_default_format": "netcdf4"} + + settings_str = cfg_to_yaml_str(settings) + + log_info( + """ + The variable 'settings' specifying values of the weather model's + namelist variables has been set as follows:\n""", + verbose=debug, ) + log_info("\nsettings =\n\n" + settings_str, verbose=debug) + # + # ----------------------------------------------------------------------- + # + # Create a new FV3 namelist file + # + # ----------------------------------------------------------------------- + # + + physics_cfg = get_yaml_config(FV3_NML_YAML_CONFIG_FP) + base_namelist = get_nml_config(FV3_NML_BASE_SUITE_FP) + base_namelist.update_from(physics_cfg[CCPP_PHYS_SUITE]) + base_namelist.update_from(settings) + for sect, values in base_namelist.copy().items(): + if not values: + del base_namelist[sect] + continue + for k, v in values.copy().items(): + if v is None: + del base_namelist[sect][k] + base_namelist.dump(Path(FV3_NML_FP)) + # + # If not running the TN_MAKE_GRID task (which implies the workflow will + # use pregenerated grid files), set the namelist variables specifying + # the paths to surface climatology files. These files are located in + # (or have symlinks that point to them) in the FIXlam directory. + # + # Note that if running the TN_MAKE_GRID task, this action usually cannot + # be performed here but must be performed in that task because the names + # of the surface climatology files depend on the CRES parameter (which is + # the C-resolution of the grid), and this parameter is in most workflow + # configurations is not known until the grid is created. + # + if not expt_config["rocoto"]["tasks"].get("task_make_grid"): + + set_fv3nml_sfc_climo_filenames(flatten_dict(expt_config), debug) + + # + # ----------------------------------------------------------------------- + # + # Add the relevant tendency-based stochastic physics namelist variables to + # "settings" when running with SPPT, SHUM, or SKEB turned on. If running + # with SPP or LSM SPP, set the "new_lscale" variable. Otherwise only + # include an empty "nam_stochy" stanza. + # + # ----------------------------------------------------------------------- + # + settings = {} + settings["gfs_physics_nml"] = { + "do_shum": DO_SHUM, + "do_sppt": DO_SPPT, + "do_skeb": DO_SKEB, + "do_spp": DO_SPP, + "n_var_spp": N_VAR_SPP, + "n_var_lndp": N_VAR_LNDP, + "lndp_type": LNDP_TYPE, + "fhcyc": FHCYC_LSM_SPP_OR_NOT, + } + nam_stochy_dict = {} + if DO_SPPT: + nam_stochy_dict.update( + { + "iseed_sppt": ISEED_SPPT, + "new_lscale": NEW_LSCALE, + "sppt": SPPT_MAG, + "sppt_logit": SPPT_LOGIT, + "sppt_lscale": SPPT_LSCALE, + "sppt_sfclimit": SPPT_SFCLIMIT, + "sppt_tau": SPPT_TSCALE, + "spptint": SPPT_INT, + "use_zmtnblck": USE_ZMTNBLCK, + } + ) + + if DO_SHUM: + nam_stochy_dict.update( + { + "iseed_shum": ISEED_SHUM, + "new_lscale": NEW_LSCALE, + "shum": SHUM_MAG, + "shum_lscale": SHUM_LSCALE, + "shum_tau": SHUM_TSCALE, + "shumint": SHUM_INT, + } + ) + + if DO_SKEB: + nam_stochy_dict.update( + { + "iseed_skeb": ISEED_SKEB, + "new_lscale": NEW_LSCALE, + "skeb": SKEB_MAG, + "skeb_lscale": SKEB_LSCALE, + "skebnorm": SKEBNORM, + "skeb_tau": SKEB_TSCALE, + "skebint": SKEB_INT, + "skeb_vdof": SKEB_VDOF, + } + ) + + if DO_SPP or DO_LSM_SPP: + nam_stochy_dict.update({"new_lscale": NEW_LSCALE}) + + settings["nam_stochy"] = nam_stochy_dict + # + # Add the relevant SPP namelist variables to "settings" when running with + # SPP turned on. Otherwise only include an empty "nam_sppperts" stanza. + # + nam_sppperts_dict = {} + if DO_SPP: + nam_sppperts_dict = { + "iseed_spp": ISEED_SPP, + "spp_lscale": SPP_LSCALE, + "spp_prt_list": SPP_MAG_LIST, + "spp_sigtop1": SPP_SIGTOP1, + "spp_sigtop2": SPP_SIGTOP2, + "spp_stddev_cutoff": SPP_STDDEV_CUTOFF, + "spp_tau": SPP_TSCALE, + "spp_var_list": SPP_VAR_LIST, + } + + settings["nam_sppperts"] = nam_sppperts_dict + # + # Add the relevant LSM SPP namelist variables to "settings" when running with + # LSM SPP turned on. + # + nam_sfcperts_dict = {} + if DO_LSM_SPP: + nam_sfcperts_dict = { + "lndp_type": LNDP_TYPE, + "lndp_model_type": LNDP_MODEL_TYPE, + "lndp_tau": LSM_SPP_TSCALE, + "lndp_lscale": LSM_SPP_LSCALE, + "iseed_lndp": ISEED_LSM_SPP, + "lndp_var_list": LSM_SPP_VAR_LIST, + "lndp_prt_list": LSM_SPP_MAG_LIST, + } - return parser.parse_args(argv) + settings["nam_sfcperts"] = nam_sfcperts_dict + + settings_str = cfg_to_yaml_str(settings) + # + # ----------------------------------------------------------------------- + # + # Generate namelist files with stochastic physics if needed + # + # ----------------------------------------------------------------------- + # + if any((DO_SPP, DO_SPPT, DO_SHUM, DO_SKEB, DO_LSM_SPP)): + realize( + input_config=FV3_NML_FP, + input_format="nml", + output_file=FV3_NML_STOCH_FP, + output_format="nml", + update_config=get_nml_config(settings), + ) + + # + # ----------------------------------------------------------------------- + # + # To have a record of how this experiment/workflow was generated, copy + # the experiment/workflow configuration file to the experiment directo- + # ry. + # + # ----------------------------------------------------------------------- + # + cp_vrfy(os.path.join(ushdir, EXPT_CONFIG_FN), EXPTDIR) + + # + # ----------------------------------------------------------------------- + # + # For convenience, print out the commands that need to be issued on the + # command line in order to launch the workflow and to check its status. + # Also, print out the line that should be placed in the user's cron table + # in order for the workflow to be continually resubmitted. + # + # ----------------------------------------------------------------------- + # + if WORKFLOW_MANAGER == "rocoto": + wflow_db_fn = f"{os.path.splitext(WFLOW_XML_FN)[0]}.db" + rocotorun_cmd = f"rocotorun -w {WFLOW_XML_FN} -d {wflow_db_fn} -v 10" + rocotostat_cmd = f"rocotostat -w {WFLOW_XML_FN} -d {wflow_db_fn} -v 10" + + # pylint: disable=line-too-long + log_info( + f""" + To launch the workflow, change location to the experiment directory + (EXPTDIR) and issue the rocotrun command, as follows: + + > cd {EXPTDIR} + > {rocotorun_cmd} + + To check on the status of the workflow, issue the rocotostat command + (also from the experiment directory): + + > {rocotostat_cmd} + + Note that: + + 1) The rocotorun command must be issued after the completion of each + task in the workflow in order for the workflow to submit the next + task(s) to the queue. + + 2) In order for the output of the rocotostat command to be up-to-date, + the rocotorun command must be issued immediately before issuing the + rocotostat command. + + For automatic resubmission of the workflow (say every {CRON_RELAUNCH_INTVL_MNTS} minutes), the + following line can be added to the user's crontab (use 'crontab -e' to + edit the cron table): + + */{CRON_RELAUNCH_INTVL_MNTS} * * * * cd {EXPTDIR} && ./launch_FV3LAM_wflow.sh called_from_cron="TRUE" + """ + ) + # pylint: enable=line-too-long + + # If we got to this point everything was successful: move the log + # file to the experiment directory. + mv_vrfy(logfile, EXPTDIR) + + return EXPTDIR + + +def setup_logging( + logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False +) -> None: + """ + Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all + messages with detailed timing and routine info in the specified text file. + + If debug = True, print all messages to both screen and log file. + """ + logging.getLogger().setLevel(logging.DEBUG) + + formatter = logging.Formatter("%(name)-22s %(levelname)-8s %(message)s") + + fh = logging.FileHandler(logfile, mode="w") + fh.setLevel(logging.DEBUG) + fh.setFormatter(formatter) + logging.getLogger().addHandler(fh) + logging.debug(f"Finished setting up debug file logging in {logfile}") + + # If there are already multiple handlers, that means + # generate_FV3LAM_workflow was called from another function. + # In that case, do not change the console (print-to-screen) logging. + if len(logging.getLogger().handlers) > 1: + return + + console = logging.StreamHandler() + if debug: + console.setLevel(logging.DEBUG) + else: + console.setLevel(logging.INFO) + logging.getLogger().addHandler(console) + logging.debug("Logging set up successfully") if __name__ == "__main__": - args = parse_args(sys.argv[1:]) - cfg = get_yaml_config(args.path_to_defns) - cfg = flatten_dict(cfg) - import_vars(dictionary=cfg) - create_ufs_configure_file( - run_dir=args.run_dir, + + # Parse arguments + parser = argparse.ArgumentParser( + description="Script for setting up a forecast and creating a workflow" + "according to the parameters specified in the config file\n" + ) + + parser.add_argument( + "-d", + "--debug", + action="store_true", + help="Script will be run in debug mode with more verbose output", + ) + pargs = parser.parse_args() + + USHdir = os.path.dirname(os.path.abspath(__file__)) + wflow_logfile = f"{USHdir}/log.generate_FV3LAM_wflow" + + # Call the generate_FV3LAM_wflow function defined above to generate the + # experiment/workflow. + try: + expt_dir = generate_FV3LAM_wflow(USHdir, wflow_logfile, pargs.debug) + except: # pylint: disable=bare-except + logging.exception( + dedent( + f""" + ********************************************************************* + FATAL ERROR: + Experiment generation failed. See the error message(s) printed below. + For more detailed information, check the log file from the workflow + generation script: {wflow_logfile} + *********************************************************************\n + """ + ) + ) + sys.exit(1) + + # pylint: disable=undefined-variable + # Note workflow generation completion + log_info( + f""" + ======================================================================== + + Experiment generation completed. The experiment directory is: + + EXPTDIR='{EXPTDIR}' + + ======================================================================== + """ ) diff --git a/ush/setup.py b/ush/setup.py index 1e3e3d876..8177af919 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -223,13 +223,15 @@ def add_jobname(tasks): update_dict(cfg_d, cfg_d) # Load one more if running Coupled AQM - if cfg_d['cpl_aqm_parm']['CPL_AQM']: + if cfg_d["cpl_aqm_parm"]["CPL_AQM"]: cfg_aqm = get_yaml_config(Path(ushdir, "config_defaults_aqm.yaml")) update_dict(cfg_aqm, cfg_d) # Load CCPP suite-specific settings - ccpp_suite = cfg_d['workflow']['CCPP_PHYS_SUITE'] - ccpp_cfg = get_yaml_config(Path(ushdir, "ccpp_suites_defaults.yaml")).get(ccpp_suite, {}) + ccpp_suite = cfg_d["workflow"]["CCPP_PHYS_SUITE"] + ccpp_cfg = get_yaml_config(Path(ushdir, "ccpp_suites_defaults.yaml")).get( + ccpp_suite, {} + ) update_dict(ccpp_cfg, cfg_d) # Load external model-specific settings From 8dde45fa7053ac47ed9452c41d5f44525de54351 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 19 Sep 2024 11:33:37 -0500 Subject: [PATCH 12/47] also revert utils.py --- tests/WE2E/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/WE2E/utils.py b/tests/WE2E/utils.py index adbdfd024..23e741271 100755 --- a/tests/WE2E/utils.py +++ b/tests/WE2E/utils.py @@ -182,7 +182,7 @@ def calculate_core_hours(expts_dict: dict) -> dict: f"{vardefs_file}\ndoes not exist!\n\nDropping experiment from summary" ) continue - logging.debug(f'Reading variable definitions file {vardefs_file}') + logging.debug(f"Reading variable definitions file {vardefs_file}") vardefs = get_yaml_config(vardefs_file) vdf = flatten_dict(vardefs) cores_per_node = vdf["NCORES_PER_NODE"] From 9f163426c52f3346a3fee7d9b23a3c4b607de149 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 19 Sep 2024 14:14:34 -0500 Subject: [PATCH 13/47] Revert remaining formatting --- tests/WE2E/utils.py | 329 ++++------- ush/create_aqm_rc_file.py | 113 ++-- ush/create_diag_table_file.py | 12 +- ush/create_model_configure_file.py | 232 +++++++- ush/create_ufs_configure_file.py | 897 +++-------------------------- ush/generate_FV3LAM_wflow.py | 266 +++------ ush/link_fix.py | 9 +- ush/setup.py | 290 ++++------ 8 files changed, 669 insertions(+), 1479 deletions(-) diff --git a/tests/WE2E/utils.py b/tests/WE2E/utils.py index 23e741271..2cb73b626 100755 --- a/tests/WE2E/utils.py +++ b/tests/WE2E/utils.py @@ -28,8 +28,6 @@ REPORT_WIDTH = 100 EXPT_COLUMN_WIDTH = 65 TASK_COLUMN_WIDTH = 40 - - def print_WE2E_summary(expts_dict: dict, debug: bool = False): """Function that creates a summary for the specified experiment @@ -44,48 +42,38 @@ def print_WE2E_summary(expts_dict: dict, debug: bool = False): # Create summary table as list of strings summary = [] - summary.append("-" * REPORT_WIDTH) - summary.append( - f'Experiment name {" "*(EXPT_COLUMN_WIDTH-17)} | Status | Core hours used ' - ) - summary.append("-" * REPORT_WIDTH) + summary.append('-'*REPORT_WIDTH) + summary.append(f'Experiment name {" "*(EXPT_COLUMN_WIDTH-17)} | Status | Core hours used ') + summary.append('-'*REPORT_WIDTH) total_core_hours = 0 statuses = [] expt_details = [] for expt in expts_dict: statuses.append(expts_dict[expt]["status"]) ch = 0 - expt_details.append("") - expt_details.append("-" * REPORT_WIDTH) - expt_details.append(f"Detailed summary of experiment {expt}") + expt_details.append('') + expt_details.append('-'*REPORT_WIDTH) + expt_details.append(f'Detailed summary of experiment {expt}') expt_details.append(f"in directory {expts_dict[expt]['expt_dir']}") - expt_details.append( - f'{" "*TASK_COLUMN_WIDTH}| Status | Walltime | Core hours used' - ) - expt_details.append("-" * REPORT_WIDTH) + expt_details.append(f'{" "*TASK_COLUMN_WIDTH}| Status | Walltime | Core hours used') + expt_details.append('-'*REPORT_WIDTH) for task in expts_dict[expt]: # Skip non-task entries - if task in ["expt_dir", "status", "start_time", "walltime"]: + if task in ["expt_dir","status","start_time","walltime"]: continue status = expts_dict[expt][task]["status"] walltime = expts_dict[expt][task]["walltime"] - expt_details.append( - f"{task[:TASK_COLUMN_WIDTH]:<{TASK_COLUMN_WIDTH}s} {status:<12s} {walltime:>10.1f}" - ) + expt_details.append(f'{task[:TASK_COLUMN_WIDTH]:<{TASK_COLUMN_WIDTH}s} {status:<12s} {walltime:>10.1f}') if "core_hours" in expts_dict[expt][task]: task_ch = expts_dict[expt][task]["core_hours"] ch += task_ch - expt_details[-1] = f"{expt_details[-1]} {task_ch:>13.2f}" + expt_details[-1] = f'{expt_details[-1]} {task_ch:>13.2f}' else: - expt_details[-1] = f"{expt_details[-1]} -" - expt_details.append("-" * REPORT_WIDTH) - expt_details.append( - f'Total {" "*(TASK_COLUMN_WIDTH - 6)} {statuses[-1]:<12s} {" "*11} {ch:>13.2f}' - ) - summary.append( - f"{expt[:EXPT_COLUMN_WIDTH]:<{EXPT_COLUMN_WIDTH}s} {statuses[-1]:<12s} {ch:>13.2f}" - ) + expt_details[-1] = f'{expt_details[-1]} -' + expt_details.append('-'*REPORT_WIDTH) + expt_details.append(f'Total {" "*(TASK_COLUMN_WIDTH - 6)} {statuses[-1]:<12s} {" "*11} {ch:>13.2f}') + summary.append(f'{expt[:EXPT_COLUMN_WIDTH]:<{EXPT_COLUMN_WIDTH}s} {statuses[-1]:<12s} {ch:>13.2f}') total_core_hours += ch if "ERROR" in statuses: total_status = "ERROR" @@ -99,30 +87,25 @@ def print_WE2E_summary(expts_dict: dict, debug: bool = False): total_status = "COMPLETE" else: total_status = "UNKNOWN" - summary.append("-" * REPORT_WIDTH) - summary.append( - f'Total {" "*(EXPT_COLUMN_WIDTH - 6)} {total_status:<12s} {total_core_hours:>13.2f}' - ) + summary.append('-'*REPORT_WIDTH) + summary.append(f'Total {" "*(EXPT_COLUMN_WIDTH - 6)} {total_status:<12s} {total_core_hours:>13.2f}') # Print summary to screen for line in summary: print(line) # Print summary and details to file - summary_file = os.path.join( - os.path.dirname(expts_dict[expt]["expt_dir"]), - f'WE2E_summary_{datetime.now().strftime("%Y%m%d%H%M%S")}.txt', - ) + summary_file = os.path.join(os.path.dirname(expts_dict[expt]["expt_dir"]), + f'WE2E_summary_{datetime.now().strftime("%Y%m%d%H%M%S")}.txt') print(f"\nDetailed summary written to {summary_file}\n") - with open(summary_file, "w", encoding="utf-8") as f: + with open(summary_file, 'w', encoding="utf-8") as f: for line in summary: f.write(f"{line}\n") f.write("\nDetailed summary of each experiment:\n") for line in expt_details: f.write(f"{line}\n") - def create_expts_dict(expt_dir: str) -> dict: """ Function takes in a directory, searches that directory for subdirectories containing @@ -136,28 +119,27 @@ def create_expts_dict(expt_dir: str) -> dict: """ contents = sorted(os.listdir(expt_dir)) - expts_dict = dict() + expts_dict=dict() for item in contents: # Look for FV3LAM_wflow.xml to indicate directories with experiments in them fullpath = os.path.join(expt_dir, item) if not os.path.isdir(fullpath): continue - xmlfile = os.path.join(expt_dir, item, "FV3LAM_wflow.xml") + xmlfile = os.path.join(expt_dir, item, 'FV3LAM_wflow.xml') if os.path.isfile(xmlfile): expts_dict[item] = dict() - expts_dict[item].update({"expt_dir": os.path.join(expt_dir, item)}) + expts_dict[item].update({"expt_dir": os.path.join(expt_dir,item)}) expts_dict[item].update({"status": "CREATED"}) else: - logging.debug(f"Skipping directory {item}, experiment XML file not found") + logging.debug(f'Skipping directory {item}, experiment XML file not found') continue - # Update the experiment dictionary + #Update the experiment dictionary logging.debug(f"Reading status of experiment {item}") - update_expt_status(expts_dict[item], item, True, False, False) + update_expt_status(expts_dict[item],item,True,False,False) summary_file = f'WE2E_tests_{datetime.now().strftime("%Y%m%d%H%M%S")}.yaml' return summary_file, expts_dict - def calculate_core_hours(expts_dict: dict) -> dict: """ Function takes in an experiment dictionary, reads the var_defns file for necessary information, @@ -173,60 +155,49 @@ def calculate_core_hours(expts_dict: dict) -> dict: for expt in expts_dict: # Read variable definitions file - vardefs_file = os.path.join(expts_dict[expt]["expt_dir"], "var_defns.yaml") + vardefs_file = os.path.join(expts_dict[expt]["expt_dir"],"var_defns.yaml") if not os.path.isfile(vardefs_file): - logging.warning( - f"\nWARNING: For experiment {expt}, variable definitions file" - ) - logging.warning( - f"{vardefs_file}\ndoes not exist!\n\nDropping experiment from summary" - ) + logging.warning(f"\nWARNING: For experiment {expt}, variable definitions file") + logging.warning(f"{vardefs_file}\ndoes not exist!\n\nDropping experiment from summary") continue - logging.debug(f"Reading variable definitions file {vardefs_file}") + logging.debug(f'Reading variable definitions file {vardefs_file}') vardefs = get_yaml_config(vardefs_file) vdf = flatten_dict(vardefs) cores_per_node = vdf["NCORES_PER_NODE"] for task in expts_dict[expt]: # Skip non-task entries - if task in ["expt_dir", "status", "start_time", "walltime"]: + if task in ["expt_dir","status","start_time","walltime"]: continue # Cycle is last 12 characters, task name is rest (minus separating underscore) taskname = task[:-13] # Handle task names that have ensemble and/or fhr info appended with regex - taskname = re.sub("_mem\d{3}", "", taskname) - taskname = re.sub("_f\d{3}", "", taskname) - nnodes_var = f"NNODES_{taskname.upper()}" + taskname = re.sub('_mem\d{3}', '', taskname) + taskname = re.sub('_f\d{3}', '', taskname) + nnodes_var = f'NNODES_{taskname.upper()}' if nnodes_var in vdf: nnodes = vdf[nnodes_var] # Users are charged for full use of nodes, so core hours = CPN * nodes * time in hrs - core_hours = ( - cores_per_node * nnodes * expts_dict[expt][task]["walltime"] / 3600 - ) - expts_dict[expt][task]["exact_count"] = True + core_hours = cores_per_node * nnodes * expts_dict[expt][task]['walltime'] / 3600 + expts_dict[expt][task]['exact_count'] = True else: # If we can't find the number of nodes, assume full usage (may undercount) - core_hours = ( - expts_dict[expt][task]["cores"] - * expts_dict[expt][task]["walltime"] - / 3600 - ) - expts_dict[expt][task]["exact_count"] = False - expts_dict[expt][task]["core_hours"] = round(core_hours, 2) + core_hours = expts_dict[expt][task]['cores'] * \ + expts_dict[expt][task]['walltime'] / 3600 + expts_dict[expt][task]['exact_count'] = False + expts_dict[expt][task]['core_hours'] = round(core_hours,2) return expts_dict def write_monitor_file(monitor_file: str, expts_dict: dict): try: - with open(monitor_file, "w", encoding="utf-8") as f: + with open(monitor_file,"w", encoding="utf-8") as f: f.write("### WARNING ###\n") - f.write( - "### THIS FILE IS AUTO_GENERATED AND REGULARLY OVER-WRITTEN BY WORKFLOW SCRIPTS\n" - ) + f.write("### THIS FILE IS AUTO_GENERATED AND REGULARLY OVER-WRITTEN BY WORKFLOW SCRIPTS\n") f.write("### EDITS MAY RESULT IN MISBEHAVIOR OF EXPERIMENTS RUNNING\n") f.writelines(cfg_to_yaml_str(expts_dict)) except KeyboardInterrupt: logging.warning("\nRefusing to interrupt during file write; try again\n") - write_monitor_file(monitor_file, expts_dict) + write_monitor_file(monitor_file,expts_dict) except: logging.fatal("\n********************************\n") logging.fatal(f"WARNING WARNING WARNING\n") @@ -236,13 +207,8 @@ def write_monitor_file(monitor_file: str, expts_dict: dict): raise -def update_expt_status( - expt: dict, - name: str, - refresh: bool = False, - debug: bool = False, - submit: bool = True, -) -> dict: +def update_expt_status(expt: dict, name: str, refresh: bool = False, debug: bool = False, + submit: bool = True) -> dict: """ This function reads the dictionary showing the location of a given experiment, runs a `rocotorun` command to update the experiment (running new jobs and updating the status of @@ -295,8 +261,8 @@ def update_expt_status( dict: The updated experiment dictionary. """ - # If we are no longer tracking this experiment, return unchanged - if (expt["status"] in ["DEAD", "ERROR", "COMPLETE"]) and not refresh: + #If we are no longer tracking this experiment, return unchanged + if (expt["status"] in ['DEAD','ERROR','COMPLETE']) and not refresh: return expt # Update experiment, read rocoto database rocoto_db = f"{expt['expt_dir']}/FV3LAM_wflow.db" @@ -305,54 +271,35 @@ def update_expt_status( if refresh: logging.debug(f"Updating database for experiment {name}") if debug: - rocotorun_cmd = [ - "rocotorun", - f"-w {rocoto_xml}", - f"-d {rocoto_db}", - "-v 10", - ] - p = subprocess.run( - rocotorun_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - text=True, - ) + rocotorun_cmd = ["rocotorun", f"-w {rocoto_xml}", f"-d {rocoto_db}", "-v 10"] + p = subprocess.run(rocotorun_cmd, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, text=True) logging.debug(p.stdout) - # Run rocotorun again to get around rocotobqserver proliferation issue - p = subprocess.run( - rocotorun_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - text=True, - ) + #Run rocotorun again to get around rocotobqserver proliferation issue + p = subprocess.run(rocotorun_cmd, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, text=True) logging.debug(p.stdout) else: rocotorun_cmd = ["rocotorun", f"-w {rocoto_xml}", f"-d {rocoto_db}"] subprocess.run(rocotorun_cmd) - # Run rocotorun again to get around rocotobqserver proliferation issue + #Run rocotorun again to get around rocotobqserver proliferation issue subprocess.run(rocotorun_cmd) - logging.debug( - f"Reading database for experiment {name}, updating experiment dictionary" - ) + logging.debug(f"Reading database for experiment {name}, updating experiment dictionary") try: # This section of code queries the "job" table of the rocoto database, returning a list # of tuples containing the taskname, cycle, and state of each job respectively with closing(sqlite3.connect(rocoto_db)) as connection: with closing(connection.cursor()) as cur: - db = cur.execute( - "SELECT taskname,cycle,state,cores,duration from jobs" - ).fetchall() + db = cur.execute('SELECT taskname,cycle,state,cores,duration from jobs').fetchall() except: # Some platforms (including Hera) can have a problem with rocoto jobs not submitting # properly due to build-ups of background processes. This will resolve over time as # rocotorun continues to be called, so let's only treat this as an error if we are # past the first initial iteration of job submissions if not refresh: - logging.warning( - f"Unable to read database {rocoto_db}\nCan not track experiment {name}" - ) + logging.warning(f"Unable to read database {rocoto_db}\nCan not track experiment {name}") expt["status"] = "ERROR" return expt @@ -361,7 +308,7 @@ def update_expt_status( # For each entry from rocoto database, store that task's info under a dictionary key named # TASKNAME_CYCLE; Cycle comes from the database in Unix Time (seconds), so convert to # human-readable - cycle = datetime.utcfromtimestamp(task[1]).strftime("%Y%m%d%H%M") + cycle = datetime.utcfromtimestamp(task[1]).strftime('%Y%m%d%H%M') if f"{task[0]}_{cycle}" not in expt: expt[f"{task[0]}_{cycle}"] = dict() expt[f"{task[0]}_{cycle}"]["status"] = task[2] @@ -371,17 +318,15 @@ def update_expt_status( statuses = list() for task in expt: # Skip non-task entries - if task in ["expt_dir", "status", "start_time", "walltime"]: + if task in ["expt_dir","status","start_time","walltime"]: continue statuses.append(expt[task]["status"]) if "DEAD" in statuses: still_live = ["RUNNING", "SUBMITTING", "QUEUED", "FAILED"] if any(status in still_live for status in statuses): - logging.debug( - f"DEAD job in experiment {name}; continuing to track until all jobs are " - "complete" - ) + logging.debug(f'DEAD job in experiment {name}; continuing to track until all jobs are '\ + 'complete') expt["status"] = "DYING" else: expt["status"] = "DEAD" @@ -404,41 +349,33 @@ def update_expt_status( # rocotorun continues to be called, so let's only print this warning message if we # are past the first initial iteration of job submissions if not refresh: - logging.warning( - dedent( - f"""WARNING:Tasks have not yet been submitted for experiment {name}; + logging.warning(dedent( + f"""WARNING:Tasks have not yet been submitted for experiment {name}; it could be that your jobs are being throttled at the system level. If you continue to see this message, there may be an error with your experiment configuration, such as an incorrect queue or account number. You can use ctrl-c to pause this script and inspect log files. - """ - ) - ) + """)) else: logging.fatal("Some kind of horrible thing has happened") - raise ValueError( - dedent( - f"""Some kind of horrible thing has happened to the experiment status + raise ValueError(dedent( + f"""Some kind of horrible thing has happened to the experiment status for experiment {name} status is {expt["status"]} - all task statuses are {statuses}""" - ) - ) + all task statuses are {statuses}""")) # Final check for experiments where all tasks are "SUCCEEDED"; since the rocoto database does # not include info on jobs that have not been submitted yet, use rocotostat to check that # there are no un-submitted jobs remaining. - if expt["status"] in ["SUCCEEDED", "STALLED", "STUCK"]: - expt = compare_rocotostat(expt, name) + if expt["status"] in ["SUCCEEDED","STALLED","STUCK"]: + expt = compare_rocotostat(expt,name) return expt - -def update_expt_status_parallel( - expts_dict: dict, procs: int, refresh: bool = False, debug: bool = False -) -> dict: +def update_expt_status_parallel(expts_dict: dict, procs: int, refresh: bool = False, + debug: bool = False) -> dict: """ This function updates an entire set of experiments in parallel, drastically speeding up the process if given enough parallel processes. Given a dictionary of experiments, it will @@ -460,7 +397,7 @@ def update_expt_status_parallel( args = [] # Define a tuple of arguments to pass to starmap for expt in expts_dict: - args.append((expts_dict[expt], expt, refresh, debug)) + args.append( (expts_dict[expt],expt,refresh,debug) ) # call update_expt_status() in parallel with Pool(processes=procs) as pool: @@ -475,6 +412,7 @@ def update_expt_status_parallel( return expts_dict + def print_test_info(txtfile: str = "WE2E_test_info.txt") -> None: """Prints a pipe ( | ) delimited text file containing summaries of each test defined by a config file in test_configs/* @@ -483,14 +421,14 @@ def print_test_info(txtfile: str = "WE2E_test_info.txt") -> None: txtfile (str): File name for test details file """ - testfiles = glob.glob("test_configs/**/config*.yaml", recursive=True) + testfiles = glob.glob('test_configs/**/config*.yaml', recursive=True) testdict = dict() links = dict() for testfile in testfiles: # Calculate relative cost of test based on config settings using legacy script cost_array = calculate_cost(testfile) cost = cost_array[1] / cost_array[3] - # Decompose full file path into relevant bits + #Decompose full file path into relevant bits pathname, filename = os.path.split(testfile) testname = filename[7:-5] dirname = os.path.basename(os.path.normpath(pathname)) @@ -507,20 +445,16 @@ def print_test_info(txtfile: str = "WE2E_test_info.txt") -> None: testdict[testname] = load_config_file(testfile) testdict[testname]["directory"] = dirname testdict[testname]["cost"] = cost - # Calculate number of forecasts for a cycling run - if ( - testdict[testname]["workflow"]["DATE_FIRST_CYCL"] - != testdict[testname]["workflow"]["DATE_LAST_CYCL"] - ): - begin = datetime.strptime( - testdict[testname]["workflow"]["DATE_FIRST_CYCL"], "%Y%m%d%H" - ) - end = datetime.strptime( - testdict[testname]["workflow"]["DATE_LAST_CYCL"], "%Y%m%d%H" - ) + #Calculate number of forecasts for a cycling run + if testdict[testname]['workflow']["DATE_FIRST_CYCL"] != \ + testdict[testname]['workflow']["DATE_LAST_CYCL"]: + begin = datetime.strptime(testdict[testname]['workflow']["DATE_FIRST_CYCL"], + '%Y%m%d%H') + end = datetime.strptime(testdict[testname]['workflow']["DATE_LAST_CYCL"], + '%Y%m%d%H') diff = end - begin diffh = diff.total_seconds() // 3600 - nf = diffh // testdict[testname]["workflow"]["INCR_CYCL_FREQ"] + nf = diffh // testdict[testname]['workflow']["INCR_CYCL_FREQ"] testdict[testname]["num_fcsts"] = nf else: testdict[testname]["num_fcsts"] = 1 @@ -532,59 +466,49 @@ def print_test_info(txtfile: str = "WE2E_test_info.txt") -> None: testdict[link_name]["alternate_directory_name"] = alt_dirname # Print the file - with open(txtfile, "w", encoding="utf-8") as f: + with open(txtfile, 'w', encoding="utf-8") as f: # Field delimiter character - d = '" | "' + d = "\" | \"" txt_output = ['"Test Name'] - txt_output.append(f"(Subdirectory){d}Alternate Test Names") - txt_output.append( - f"(Subdirectories){d}Test Purpose/Description{d}Relative Cost of Running Dynamics" - ) - txt_output.append( - f"(1 corresponds to running a 6-hour forecast on the RRFS_CONUS_25km predefined grid using the default time step){d}PREDEF_GRID_NAME{d}CCPP_PHYS_SUITE{d}EXTRN_MDL_NAME_ICS{d}EXTRN_MDL_NAME_LBCS{d}DATE_FIRST_CYCL{d}DATE_LAST_CYCL{d}INCR_CYCL_FREQ{d}FCST_LEN_HRS{d}DT_ATMOS{d}LBC_SPEC_INTVL_HRS{d}NUM_ENS_MEMBERS" - ) + txt_output.append(f'(Subdirectory){d}Alternate Test Names') + txt_output.append(f'(Subdirectories){d}Test Purpose/Description{d}Relative Cost of Running Dynamics') + txt_output.append(f'(1 corresponds to running a 6-hour forecast on the RRFS_CONUS_25km predefined grid using the default time step){d}PREDEF_GRID_NAME{d}CCPP_PHYS_SUITE{d}EXTRN_MDL_NAME_ICS{d}EXTRN_MDL_NAME_LBCS{d}DATE_FIRST_CYCL{d}DATE_LAST_CYCL{d}INCR_CYCL_FREQ{d}FCST_LEN_HRS{d}DT_ATMOS{d}LBC_SPEC_INTVL_HRS{d}NUM_ENS_MEMBERS') for line in txt_output: f.write(f"{line}\n") for expt in testdict: - f.write(f'"{expt}\n(') + f.write(f"\"{expt}\n(") f.write(f"{testdict[expt]['directory']}){d}") if "alternate_name" in testdict[expt]: - f.write( - f"{testdict[expt]['alternate_name']}\n" - f"({testdict[expt]['alternate_directory_name']}){d}" - ) + f.write(f"{testdict[expt]['alternate_name']}\n"\ + f"({testdict[expt]['alternate_directory_name']}){d}") else: f.write(f"{d}\n") - desc = testdict[expt]["metadata"]["description"].splitlines() + desc = testdict[expt]['metadata']['description'].splitlines() for line in desc[:-1]: f.write(f" {line}\n") f.write(f" {desc[-1]}") - # Write test relative cost and number of test forecasts (for cycling runs) - f.write( - f"{d}'{round(testdict[expt]['cost'],2)}{d}'{round(testdict[expt]['num_fcsts'])}" - ) + #Write test relative cost and number of test forecasts (for cycling runs) + f.write(f"{d}'{round(testdict[expt]['cost'],2)}{d}'{round(testdict[expt]['num_fcsts'])}") # Bundle various variables with their corresponding sections for more compact coding - key_pairs = [ - ("workflow", "PREDEF_GRID_NAME"), - ("workflow", "CCPP_PHYS_SUITE"), - ("task_get_extrn_ics", "EXTRN_MDL_NAME_ICS"), - ("task_get_extrn_lbcs", "EXTRN_MDL_NAME_LBCS"), - ("workflow", "DATE_FIRST_CYCL"), - ("workflow", "DATE_LAST_CYCL"), - ("workflow", "INCR_CYCL_FREQ"), - ("workflow", "FCST_LEN_HRS"), - ("task_run_fcst", "DT_ATMOS"), - ("task_get_extrn_lbcs", "LBC_SPEC_INTVL_HRS"), - ("global", "NUM_ENS_MEMBERS"), - ] + key_pairs = [ ('workflow', 'PREDEF_GRID_NAME'), + ('workflow', 'CCPP_PHYS_SUITE'), + ('task_get_extrn_ics', 'EXTRN_MDL_NAME_ICS'), + ('task_get_extrn_lbcs', 'EXTRN_MDL_NAME_LBCS'), + ('workflow', 'DATE_FIRST_CYCL'), + ('workflow', 'DATE_LAST_CYCL'), + ('workflow', 'INCR_CYCL_FREQ'), + ('workflow', 'FCST_LEN_HRS'), + ('task_run_fcst', 'DT_ATMOS'), + ('task_get_extrn_lbcs', 'LBC_SPEC_INTVL_HRS'), + ('global', 'NUM_ENS_MEMBERS') ] for key1, key2 in key_pairs: f.write(f"{d}{testdict[expt].get(key1, {}).get(key2, '')}") f.write("\n") -def compare_rocotostat(expt_dict, name): +def compare_rocotostat(expt_dict,name): """Reads the dictionary showing the location of a given experiment, runs a `rocotostat` command to get the full set of tasks for the experiment, and compares the two to see if there are any unsubmitted tasks remaining. @@ -594,32 +518,27 @@ def compare_rocotostat(expt_dict, name): rocoto_db = f"{expt_dict['expt_dir']}/FV3LAM_wflow.db" rocoto_xml = f"{expt_dict['expt_dir']}/FV3LAM_wflow.xml" rocotorun_cmd = ["rocotostat", f"-w {rocoto_xml}", f"-d {rocoto_db}", "-v 10"] - p = subprocess.run( - rocotorun_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True - ) + p = subprocess.run(rocotorun_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) rsout = p.stdout # Parse each line of rocotostat output, extracting relevant information untracked_tasks = [] - for line in rsout.split("\n"): + for line in rsout.split('\n'): # Skip blank lines and dividing lines of '=====...' if not line: continue - if line[0] == "=": + if line[0] == '=': continue line_array = line.split() # Skip header lines - if ( - line_array[0] == "CYCLE" - or line_array[0] == "/apps/rocoto/1.3.3/lib/workflowmgr/launchserver.rb:40:" - ): + if line_array[0] == 'CYCLE' or line_array[0] == '/apps/rocoto/1.3.3/lib/workflowmgr/launchserver.rb:40:': continue # We should now just have lines describing jobs, in the form: # line_array = ['cycle','task','jobid','status','exit status','num tries','walltime'] # As defined in update_expt_status(), the "task names" in the dictionary are a combination # of the task name and cycle - taskname = f"{line_array[1]}_{line_array[0]}" + taskname = f'{line_array[1]}_{line_array[0]}' # If we're already tracking this task, continue if expt_dict.get(taskname): @@ -630,17 +549,15 @@ def compare_rocotostat(expt_dict, name): if untracked_tasks: # We want to give this a couple loops before reporting that it is "stuck" - if expt_dict["status"] == "SUCCEEDED": - expt_dict["status"] = "STALLED" - elif expt_dict["status"] == "STALLED": - expt_dict["status"] = "STUCK" - elif expt_dict["status"] == "STUCK": + if expt_dict['status'] == 'SUCCEEDED': + expt_dict['status'] = 'STALLED' + elif expt_dict['status'] == 'STALLED': + expt_dict['status'] = 'STUCK' + elif expt_dict['status'] == 'STUCK': msg = f"WARNING: For experiment {name}, there are jobs that are not being submitted:" for ut in untracked_tasks: msg += ut - msg = ( - msg - + f"""WARNING: For experiment {name}, + msg = msg + f"""WARNING: For experiment {name}, there are some jobs that are not being submitted. It could be that your jobs are being throttled at the system level, or some task dependencies have not been met. @@ -650,19 +567,15 @@ def compare_rocotostat(expt_dict, name): You can use ctrl-c to pause this script and inspect log files. """ - ) logging.warning(dedent(msg)) else: logging.fatal("Some kind of horrible thing has happened") - raise ValueError( - dedent( - f"""Some kind of horrible thing has happened to the experiment status + raise ValueError(dedent( + f"""Some kind of horrible thing has happened to the experiment status for experiment {name} status is {expt_dict["status"]} - untracked tasknames are {untracked_tasks}""" - ) - ) + untracked tasknames are {untracked_tasks}""")) else: expt_dict["status"] = "COMPLETE" - return expt_dict + return expt_dict \ No newline at end of file diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index e740c85e4..8c3576983 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -22,7 +22,7 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): - """Creates an aqm.rc file in the specified run directory + """ Creates an aqm.rc file in the specified run directory Args: cdate: cycle date @@ -34,66 +34,61 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): print_input_args(locals()) - # import all environment variables + #import all environment variables import_vars() - # pylint: disable=undefined-variable + #pylint: disable=undefined-variable # - # ----------------------------------------------------------------------- + #----------------------------------------------------------------------- # # Create the aqm.rc file in the specified run directory. # - # ----------------------------------------------------------------------- + #----------------------------------------------------------------------- # - print_info_msg( - f''' + print_info_msg(f''' Creating the aqm.rc file (\"{AQM_RC_FN}\") in the specified run directory (run_dir): - run_dir = \"{run_dir}\"''', - verbose=VERBOSE, - ) + run_dir = \"{run_dir}\"''', verbose=VERBOSE) # # Set output file path # - aqm_rc_fp = os.path.join(run_dir, AQM_RC_FN) + aqm_rc_fp=os.path.join(run_dir, AQM_RC_FN) # # Extract from cdate the starting year, month, and day of the forecast. # - yyyymmdd = cdate.strftime("%Y%m%d") - mm = f"{cdate.month:02d}" # pylint: disable=invalid-name - hh = f"{cdate.hour:02d}" # pylint: disable=invalid-name + yyyymmdd=cdate.strftime('%Y%m%d') + mm=f"{cdate.month:02d}" # pylint: disable=invalid-name + hh=f"{cdate.hour:02d}" # pylint: disable=invalid-name # # Set parameters in the aqm.rc file. # - aqm_rc_bio_file_fp = os.path.join(FIXaqm, "bio", AQM_BIO_FILE) + aqm_rc_bio_file_fp=os.path.join(FIXaqm,"bio", AQM_BIO_FILE) # Fire config - aqm_rc_fire_file_fp = os.path.join( - COMIN, f"{AQM_FIRE_FILE_PREFIX}_{yyyymmdd}_t{hh}z{AQM_FIRE_FILE_SUFFIX}" - ) + aqm_rc_fire_file_fp=os.path.join( + COMIN, + f"{AQM_FIRE_FILE_PREFIX}_{yyyymmdd}_t{hh}z{AQM_FIRE_FILE_SUFFIX}" + ) # Dust config - aqm_rc_dust_file_fp = os.path.join( - FIXaqm, - "dust", - f"{AQM_DUST_FILE_PREFIX}_{PREDEF_GRID_NAME}{AQM_DUST_FILE_SUFFIX}", - ) + aqm_rc_dust_file_fp=os.path.join( + FIXaqm,"dust", + f"{AQM_DUST_FILE_PREFIX}_{PREDEF_GRID_NAME}{AQM_DUST_FILE_SUFFIX}", + ) # Canopy config - aqm_rc_canopy_file_fp = os.path.join( - FIXaqm, - "canopy", - PREDEF_GRID_NAME, + aqm_rc_canopy_file_fp=os.path.join( + FIXaqm,"canopy",PREDEF_GRID_NAME, f"{AQM_CANOPY_FILE_PREFIX}.{mm}{AQM_CANOPY_FILE_SUFFIX}", - ) + ) # - # ----------------------------------------------------------------------- + #----------------------------------------------------------------------- # # Create a multiline variable that consists of a yaml-compliant string # specifying the values that the jinja variables in the template # AQM_RC_TMPL_FN file should be set to. # - # ----------------------------------------------------------------------- + #----------------------------------------------------------------------- # settings = { "do_aqm_dust": DO_AQM_DUST, @@ -108,7 +103,7 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): "aqm_rc_dust_file_fp": aqm_rc_dust_file_fp, "aqm_rc_canopy_file_fp": aqm_rc_canopy_file_fp, "aqm_rc_product_fn": AQM_RC_PRODUCT_FN, - "aqm_rc_product_frequency": AQM_RC_PRODUCT_FREQUENCY, + "aqm_rc_product_frequency": AQM_RC_PRODUCT_FREQUENCY } settings_str = cfg_to_yaml_str(settings) @@ -123,56 +118,46 @@ def create_aqm_rc_file(cdate, run_dir, init_concentrations): verbose=VERBOSE, ) # - # ----------------------------------------------------------------------- + #----------------------------------------------------------------------- # # Call a python script to generate the experiment's actual AQM_RC_FN # file from the template file. # - # ----------------------------------------------------------------------- + #----------------------------------------------------------------------- # render( - input_file=AQM_RC_TMPL_FP, - output_file=aqm_rc_fp, - values_src=settings, + input_file = AQM_RC_TMPL_FP, + output_file = aqm_rc_fp, + values_src = settings, ) return True - def parse_args(argv): - """Parse command line arguments""" + """ Parse command line arguments""" parser = argparse.ArgumentParser(description="Creates aqm.rc file.") - parser.add_argument( - "-r", "--run-dir", dest="run_dir", required=True, help="Run directory." - ) + parser.add_argument("-r", "--run-dir", + dest="run_dir", + required=True, + help="Run directory.") - parser.add_argument( - "-c", - "--cdate", - dest="cdate", - required=True, - help="Date string in YYYYMMDD format.", - ) + parser.add_argument("-c", "--cdate", + dest="cdate", + required=True, + help="Date string in YYYYMMDD format.") - parser.add_argument( - "-i", - "--init_concentrations", - dest="init_concentrations", - required=True, - help="Flag for initial concentrations.", - ) + parser.add_argument("-i", "--init_concentrations", + dest="init_concentrations", + required=True, + help="Flag for initial concentrations.") - parser.add_argument( - "-p", - "--path-to-defns", - dest="path_to_defns", - required=True, - help="Path to var_defns file.", - ) + parser.add_argument("-p", "--path-to-defns", + dest="path_to_defns", + required=True, + help="Path to var_defns file.") return parser.parse_args(argv) - if __name__ == "__main__": args = parse_args(sys.argv[1:]) cfg = get_yaml_config(args.path_to_defns) @@ -182,4 +167,4 @@ def parse_args(argv): run_dir=args.run_dir, cdate=str_to_type(args.cdate), init_concentrations=str_to_type(args.init_concentrations), - ) + ) \ No newline at end of file diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py index 0030faf08..8ca4c5bc5 100644 --- a/ush/create_diag_table_file.py +++ b/ush/create_diag_table_file.py @@ -35,7 +35,7 @@ def create_diag_table_file(run_dir): # import all environment variables import_vars() - # pylint: disable=undefined-variable + #pylint: disable=undefined-variable # create a diagnostic table file within the specified run directory print_info_msg( f""" @@ -75,10 +75,10 @@ def create_diag_table_file(run_dir): ) render( - input_file=DIAG_TABLE_TMPL_FP, - output_file=diag_table_fp, - values_src=settings, - ) + input_file = DIAG_TABLE_TMPL_FP, + output_file = diag_table_fp, + values_src = settings, + ) return True @@ -106,4 +106,4 @@ def parse_args(argv): cfg = get_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) - create_diag_table_file(args.run_dir) + create_diag_table_file(args.run_dir) \ No newline at end of file diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index eb0b52917..126040dc2 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -1,10 +1,8 @@ #!/usr/bin/env python3 - """ -Function to create a UFS configuration file for the FV3 forecast -model(s) from a template. +Create a model_configure file for the FV3 forecast model from a +template. """ - import argparse import os import sys @@ -14,20 +12,31 @@ cfg_to_yaml_str, flatten_dict, import_vars, + lowercase, print_info_msg, print_input_args, + str_to_type, ) from uwtools.api.config import get_yaml_config from uwtools.api.template import render -def create_ufs_configure_file(run_dir): - """Creates a ufs configuration file in the specified +def create_model_configure_file( + cdate, fcst_len_hrs, fhrot, run_dir, dt_atmos, sub_hourly_post=False, + dt_subhourly_post_mnts=None, + ): #pylint: disable=too-many-arguments + """Creates a model configuration file in the specified run directory Args: + cdate: cycle date + fcst_len_hrs: forecast length in hours + fhrot: forecast hour at restart run_dir: run directory + sub_hourly_post + dt_subhourly_post_mnts + dt_atmos Returns: Boolean """ @@ -42,22 +51,18 @@ def create_ufs_configure_file(run_dir): # # ----------------------------------------------------------------------- # - # Create a UFS configuration file in the specified run directory. + # Create a model configuration file in the specified run directory. # # ----------------------------------------------------------------------- # print_info_msg( - f''' - Creating a ufs.configure file (\"{UFS_CONFIG_FN}\") in the specified + f""" + Creating a model configuration file ('{MODEL_CONFIG_FN}') in the specified run directory (run_dir): - run_dir = \"{run_dir}\"''', + run_dir = '{run_dir}'""", verbose=VERBOSE, ) # - # Set output file path - # - ufs_config_fp = os.path.join(run_dir, UFS_CONFIG_FN) - # # ----------------------------------------------------------------------- # # Create a multiline variable that consists of a yaml-compliant string @@ -66,13 +71,140 @@ def create_ufs_configure_file(run_dir): # # ----------------------------------------------------------------------- # - settings = {"dt_atmos": DT_ATMOS, "print_esmf": PRINT_ESMF, "cpl_aqm": CPL_AQM} + settings = { + "PE_MEMBER01": PE_MEMBER01, + "start_year": cdate.year, + "start_month": cdate.month, + "start_day": cdate.day, + "start_hour": cdate.hour, + "nhours_fcst": fcst_len_hrs, + "fhrot": fhrot, + "dt_atmos": DT_ATMOS, + "atmos_nthreads": OMP_NUM_THREADS_RUN_FCST, + "restart_interval": RESTART_INTERVAL, + "itasks": ITASKS, + "write_dopost": f".{lowercase(str(WRITE_DOPOST))}.", + "quilting": f".{lowercase(str(QUILTING))}.", + "output_grid": WRTCMP_output_grid, + } + # + # If the write-component is to be used, then specify a set of computational + # parameters and a set of grid parameters. The latter depends on the type + # (coordinate system) of the grid that the write-component will be using. + # + if QUILTING: + settings.update( + { + "write_groups": WRTCMP_write_groups, + "write_tasks_per_group": WRTCMP_write_tasks_per_group, + "cen_lon": WRTCMP_cen_lon, + "cen_lat": WRTCMP_cen_lat, + "lon1": WRTCMP_lon_lwr_left, + "lat1": WRTCMP_lat_lwr_left, + } + ) + + if WRTCMP_output_grid == "lambert_conformal": + settings.update( + { + "stdlat1": WRTCMP_stdlat1, + "stdlat2": WRTCMP_stdlat2, + "nx": WRTCMP_nx, + "ny": WRTCMP_ny, + "dx": WRTCMP_dx, + "dy": WRTCMP_dy, + "lon2": "", + "lat2": "", + "dlon": "", + "dlat": "", + } + ) + elif ( + WRTCMP_output_grid in ("regional_latlon", "rotated_latlon") + ): + settings.update( + { + "lon2": WRTCMP_lon_upr_rght, + "lat2": WRTCMP_lat_upr_rght, + "dlon": WRTCMP_dlon, + "dlat": WRTCMP_dlat, + "stdlat1": "", + "stdlat2": "", + "nx": "", + "ny": "", + "dx": "", + "dy": "", + } + ) + # + # If not using the write-component (aka quilting), set those variables + # needed for quilting to None so that it gets rendered in the template appropriately. + # + else: + settings.update( + { + "write_groups": None, + "write_tasks_per_group": None, + "cen_lon": None, + "cen_lat": None, + "lon1": None, + "lat1": None, + "stdlat1": None, + "stdlat2": None, + "nx": None, + "ny": None, + "dx": None, + "dy": None, + "lon2": None, + "lat2": None, + "dlon": None, + "dlat": None, + } + ) + # + # If sub_hourly_post is set to "TRUE", then the forecast model must be + # directed to generate output files on a sub-hourly interval. Do this + # by specifying the output interval in the model configuration file + # (MODEL_CONFIG_FN) in units of number of forecat model time steps (nsout). + # nsout is calculated using the user-specified output time interval + # dt_subhourly_post_mnts (in units of minutes) and the forecast model's + # main time step dt_atmos (in units of seconds). Note that nsout is + # guaranteed to be an integer because the experiment generation scripts + # require that dt_subhourly_post_mnts (after conversion to seconds) be + # evenly divisible by dt_atmos. Also, in this case, the variable output_fh + # [which specifies the output interval in hours; + # see the jinja model_config template file] is set to 0, although this + # doesn't matter because any positive of nsout will override output_fh. + # + # If sub_hourly_post is set to "FALSE", then the workflow is hard-coded + # (in the jinja model_config template file) to direct the forecast model + # to output files every hour. This is done by setting (1) output_fh to 1 + # here, and (2) nsout to -1 here which turns off output by time step interval. + # + # Note that the approach used here of separating how hourly and subhourly + # output is handled should be changed/generalized/simplified such that + # the user should only need to specify the output time interval (there + # should be no need to specify a flag like sub_hourly_post); the workflow + # should then be able to direct the model to output files with that time + # interval and to direct the post-processor to process those files + # regardless of whether that output time interval is larger than, equal + # to, or smaller than one hour. + # + if sub_hourly_post: + nsout = (dt_subhourly_post_mnts * 60) // dt_atmos + output_fh = 0 + else: + output_fh = 1 + nsout = -1 + + settings.update({"output_fh": output_fh, "nsout": nsout}) + settings_str = cfg_to_yaml_str(settings) print_info_msg( dedent( f""" - The variable \"settings\" specifying values to be used in the \"{UFS_CONFIG_FN}\" + The variable 'settings' specifying values to be used in the '{MODEL_CONFIG_FN}' file has been set as follows:\n settings =\n\n""" ) @@ -82,27 +214,75 @@ def create_ufs_configure_file(run_dir): # # ----------------------------------------------------------------------- # - # Call a python script to generate the experiment's actual UFS_CONFIG_FN + # Call a python script to generate the experiment's actual MODEL_CONFIG_FN # file from the template file. # # ----------------------------------------------------------------------- # + model_config_fp = os.path.join(run_dir, MODEL_CONFIG_FN) + render( - input_file=UFS_CONFIG_TMPL_FP, - output_file=ufs_config_fp, - values_src=settings, - ) + input_file = MODEL_CONFIG_TMPL_FP, + output_file = model_config_fp, + values_src = settings + ) return True def parse_args(argv): """Parse command line arguments""" - parser = argparse.ArgumentParser(description="Creates UFS configuration file.") + parser = argparse.ArgumentParser(description="Creates model configuration file.") parser.add_argument( "-r", "--run-dir", dest="run_dir", required=True, help="Run directory." ) + parser.add_argument( + "-c", + "--cdate", + dest="cdate", + required=True, + help="Date string in YYYYMMDD format.", + ) + + parser.add_argument( + "-f", + "--fcst_len_hrs", + dest="fcst_len_hrs", + required=True, + help="Forecast length in hours.", + ) + + parser.add_argument( + "-b", + "--fhrot", + dest="fhrot", + required=True, + help="Forecast hour at restart.", + ) + + parser.add_argument( + "-s", + "--sub-hourly-post", + dest="sub_hourly_post", + help="Set sub hourly post to either TRUE/FALSE by passing corresponding string.", + ) + + parser.add_argument( + "-d", + "--dt-subhourly-post-mnts", + dest="dt_subhourly_post_mnts", + help="Subhourly post minitues.", + ) + + parser.add_argument( + "-t", + "--dt-atmos", + dest="dt_atmos", + required=True, + help="Forecast model's main time step.", + ) + parser.add_argument( "-p", "--path-to-defns", @@ -119,6 +299,10 @@ def parse_args(argv): cfg = get_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) - create_ufs_configure_file( + create_model_configure_file( run_dir=args.run_dir, - ) + cdate=str_to_type(args.cdate), + fcst_len_hrs=str_to_type(args.fcst_len_hrs), + fhrot=str_to_type(args.fhrot), + dt_atmos=str_to_type(args.dt_atmos), + ) \ No newline at end of file diff --git a/ush/create_ufs_configure_file.py b/ush/create_ufs_configure_file.py index c9eb5cc7e..c03628005 100644 --- a/ush/create_ufs_configure_file.py +++ b/ush/create_ufs_configure_file.py @@ -1,881 +1,122 @@ #!/usr/bin/env python3 """ -User interface to create an experiment directory consistent with the -user-defined config.yaml file. +Function to create a UFS configuration file for the FV3 forecast +model(s) from a template. """ -# pylint: disable=invalid-name - import argparse -import logging import os import sys -from pathlib import Path -from stat import S_IXUSR -from string import Template from textwrap import dedent from python_utils import ( - list_to_str, - log_info, - import_vars, - export_vars, - cp_vrfy, - ln_vrfy, - mkdir_vrfy, - mv_vrfy, - check_for_preexist_dir_file, cfg_to_yaml_str, - find_pattern_in_str, flatten_dict, + import_vars, + print_info_msg, + print_input_args, ) -from check_python_version import check_python_version -from get_crontab_contents import add_crontab_line -from setup import setup -from set_fv3nml_sfc_climo_filenames import set_fv3nml_sfc_climo_filenames - -from uwtools.api.config import get_nml_config, get_yaml_config, realize +from uwtools.api.config import get_yaml_config from uwtools.api.template import render - -# pylint: disable=too-many-locals,too-many-branches, too-many-statements -def generate_FV3LAM_wflow( - ushdir, logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False -) -> str: - """Function to setup a forecast experiment and create a workflow - (according to the parameters specified in the config file) +def create_ufs_configure_file(run_dir): + """ Creates a ufs configuration file in the specified + run directory Args: - ushdir (str) : The full path of the ush/ directory where this script is located - logfile (str) : The name of the file where logging is written - debug (bool): Enable extra output for debugging + run_dir: run directory Returns: - EXPTDIR (str) : The full path of the directory where this experiment has been generated + Boolean """ - # Set up logging to write to screen and logfile - setup_logging(logfile, debug) - - # Check python version and presence of some non-standard packages - check_python_version() + print_input_args(locals()) - # Note start of workflow generation - log_info( - """ - ======================================================================== - Starting experiment generation... - ========================================================================""" - ) - - # The setup function reads the user configuration file and fills in - # non-user-specified values from config_defaults.yaml - expt_config = setup(ushdir, debug=debug) - - # - # ----------------------------------------------------------------------- - # - # Set the full path to the experiment's rocoto workflow xml file. This - # file will be placed at the top level of the experiment directory and - # then used by rocoto to run the workflow. - # - # ----------------------------------------------------------------------- - # - wflow_xml_fn = expt_config["workflow"]["WFLOW_XML_FN"] - wflow_xml_fp = os.path.join( - expt_config["workflow"]["EXPTDIR"], - wflow_xml_fn, - ) - # - # ----------------------------------------------------------------------- - # - # Create a multiline variable that consists of a yaml-compliant string - # specifying the values that the jinja variables in the template rocoto - # XML should be set to. These values are set either in the user-specified - # workflow configuration file (EXPT_CONFIG_FN) or in the setup() function - # called above. Then call the python script that generates the XML. - # - # ----------------------------------------------------------------------- - # - if expt_config["platform"]["WORKFLOW_MANAGER"] == "rocoto": - - template_xml_fp = os.path.join( - expt_config["user"]["PARMdir"], - wflow_xml_fn, - ) - - log_info( - f""" - Creating rocoto workflow XML file (WFLOW_XML_FP): - WFLOW_XML_FP = '{wflow_xml_fp}'""" - ) - - # - # Call the python script to generate the experiment's XML file - # - rocoto_yaml_fp = expt_config["workflow"]["ROCOTO_YAML_FP"] - render( - input_file=template_xml_fp, - output_file=wflow_xml_fp, - values_src=rocoto_yaml_fp, - ) - # - # ----------------------------------------------------------------------- - # - # Create a symlink in the experiment directory that points to the workflow - # (re)launch script. - # - # ----------------------------------------------------------------------- - # - exptdir = expt_config["workflow"]["EXPTDIR"] - wflow_launch_script_fp = expt_config["workflow"]["WFLOW_LAUNCH_SCRIPT_FP"] - wflow_launch_script_fn = expt_config["workflow"]["WFLOW_LAUNCH_SCRIPT_FN"] - log_info( - f""" - Creating symlink in the experiment directory (EXPTDIR) that points to the - workflow launch script (WFLOW_LAUNCH_SCRIPT_FP): - EXPTDIR = '{exptdir}' - WFLOW_LAUNCH_SCRIPT_FP = '{wflow_launch_script_fp}'""", - verbose=debug, - ) - - with open(wflow_launch_script_fp, "r", encoding="utf-8") as launch_script_file: - launch_script_content = launch_script_file.read() - - # Stage an experiment-specific launch file in the experiment directory - template = Template(launch_script_content) - - # The script needs several variables from the workflow and user sections - template_variables = { - **expt_config["user"], - **expt_config["workflow"], - "valid_vals_BOOLEAN": list_to_str( - expt_config["constants"]["valid_vals_BOOLEAN"] - ), - } - launch_content = template.safe_substitute(template_variables) - - launch_fp = os.path.join(exptdir, wflow_launch_script_fn) - with open(launch_fp, "w", encoding="utf-8") as expt_launch_fn: - expt_launch_fn.write(launch_content) - - os.chmod(launch_fp, os.stat(launch_fp).st_mode | S_IXUSR) - - # - # ----------------------------------------------------------------------- - # - # If USE_CRON_TO_RELAUNCH is set to TRUE, add a line to the user's - # cron table to call the (re)launch script every - # CRON_RELAUNCH_INTVL_MNTS minutes. - # - # ----------------------------------------------------------------------- - # - # From here on out, going back to setting variables for everything - # in the flattened expt_config dictionary - # TODO: Reference all these variables in their respective - # dictionaries, instead. - import_vars(dictionary=flatten_dict(expt_config)) - export_vars(source_dict=flatten_dict(expt_config)) + #import all environment variables + import_vars() # pylint: disable=undefined-variable - if USE_CRON_TO_RELAUNCH: - add_crontab_line( - called_from_cron=False, - machine=expt_config["user"]["MACHINE"], - crontab_line=expt_config["workflow"]["CRONTAB_LINE"], - exptdir=exptdir, - debug=debug, - ) # - # Copy or symlink fix files + #----------------------------------------------------------------------- # - if SYMLINK_FIX_FILES: - log_info( - f""" - Symlinking fixed files from system directory (FIXgsm) to a subdirectory (FIXam): - FIXgsm = '{FIXgsm}' - FIXam = '{FIXam}'""", - verbose=debug, - ) - - ln_vrfy(f"""-fsn '{FIXgsm}' '{FIXam}'""") - else: - - log_info( - f""" - Copying fixed files from system directory (FIXgsm) to a subdirectory (FIXam): - FIXgsm = '{FIXgsm}' - FIXam = '{FIXam}'""", - verbose=debug, - ) - - check_for_preexist_dir_file(FIXam, "delete") - mkdir_vrfy("-p", FIXam) - mkdir_vrfy("-p", os.path.join(FIXam, "fix_co2_proj")) - - num_files = len(FIXgsm_FILES_TO_COPY_TO_FIXam) - for i in range(num_files): - fn = f"{FIXgsm_FILES_TO_COPY_TO_FIXam[i]}" - cp_vrfy(os.path.join(FIXgsm, fn), os.path.join(FIXam, fn)) - # - # ----------------------------------------------------------------------- - # - # Copy MERRA2 aerosol climatology data. - # - # ----------------------------------------------------------------------- - # - if USE_MERRA_CLIMO: - log_info( - f""" - Copying MERRA2 aerosol climatology data files from system directory - (FIXaer/FIXlut) to a subdirectory (FIXclim) in the experiment directory: - FIXaer = '{FIXaer}' - FIXlut = '{FIXlut}' - FIXclim = '{FIXclim}'""", - verbose=debug, - ) - - check_for_preexist_dir_file(FIXclim, "delete") - mkdir_vrfy("-p", FIXclim) - - if SYMLINK_FIX_FILES: - ln_vrfy("-fsn", os.path.join(FIXaer, "merra2.aerclim*.nc"), FIXclim) - ln_vrfy("-fsn", os.path.join(FIXlut, "optics*.dat"), FIXclim) - else: - cp_vrfy(os.path.join(FIXaer, "merra2.aerclim*.nc"), FIXclim) - cp_vrfy(os.path.join(FIXlut, "optics*.dat"), FIXclim) - # - # ----------------------------------------------------------------------- - # - # Copy templates of various input files to the experiment directory. - # - # ----------------------------------------------------------------------- - # - log_info( - """ - Copying templates of various input files to the experiment directory...""", - verbose=debug, - ) - - log_info( - """ - Copying the template data table file to the experiment directory...""", - verbose=debug, - ) - cp_vrfy(DATA_TABLE_TMPL_FP, DATA_TABLE_FP) - - log_info( - """ - Copying the template field table file to the experiment directory...""", - verbose=debug, - ) - cp_vrfy(FIELD_TABLE_TMPL_FP, FIELD_TABLE_FP) - - # - # Copy the CCPP physics suite definition file from its location in the - # clone of the FV3 code repository to the experiment directory (EXPT- - # DIR). - # - log_info( - """ - Copying the CCPP physics suite definition XML file from its location in - the forecast model directory structure to the experiment directory...""", - verbose=debug, - ) - cp_vrfy(CCPP_PHYS_SUITE_IN_CCPP_FP, CCPP_PHYS_SUITE_FP) - # - # Copy the field dictionary file from its location in the - # clone of the FV3 code repository to the experiment directory (EXPT- - # DIR). - # - log_info( - """ - Copying the field dictionary file from its location in the - forecast model directory structure to the experiment - directory...""", - verbose=debug, - ) - cp_vrfy(FIELD_DICT_IN_UWM_FP, FIELD_DICT_FP) - # - # ----------------------------------------------------------------------- - # - # Set parameters in the FV3-LAM namelist file. - # - # ----------------------------------------------------------------------- + # Create a UFS configuration file in the specified run directory. # - log_info( - f""" - Setting parameters in weather model's namelist file (FV3_NML_FP): - FV3_NML_FP = '{FV3_NML_FP}'""", - verbose=debug, - ) - # - # Set npx and npy, which are just NX plus 1 and NY plus 1, respectively. - # These need to be set in the FV3-LAM Fortran namelist file. They represent - # the number of cell vertices in the x and y directions on the regional - # grid. - # - npx = NX + 1 - npy = NY + 1 + #----------------------------------------------------------------------- # - # For the physics suites that use RUC LSM, set the parameter kice to 9, - # Otherwise, leave it unspecified (which means it gets set to the default - # value in the forecast model). + print_info_msg(f''' + Creating a ufs.configure file (\"{UFS_CONFIG_FN}\") in the specified + run directory (run_dir): + run_dir = \"{run_dir}\"''', verbose=VERBOSE) # - kice = None - if SDF_USES_RUC_LSM: - kice = 9 + # Set output file path # - # Set lsoil, which is the number of input soil levels provided in the - # chgres_cube output NetCDF file. This is the same as the parameter - # nsoill_out in the namelist file for chgres_cube. [On the other hand, - # the parameter lsoil_lsm (not set here but set in input.nml.FV3 and/or - # FV3.input.yml) is the number of soil levels that the LSM scheme in the - # forecast model will run with.] Here, we use the same approach to set - # lsoil as the one used to set nsoill_out in exregional_make_ics.sh. - # See that script for details. + ufs_config_fp = os.path.join(run_dir, UFS_CONFIG_FN) # - # NOTE: - # May want to remove lsoil from FV3.input.yml (and maybe input.nml.FV3). - # Also, may want to set lsm here as well depending on SDF_USES_RUC_LSM. - # - lsoil = 4 - if EXTRN_MDL_NAME_ICS in ("HRRR", "RAP") and SDF_USES_RUC_LSM: - lsoil = 9 - if CCPP_PHYS_SUITE == "FV3_GFS_v15_thompson_mynn_lam3km": - lsoil = "" + #----------------------------------------------------------------------- # # Create a multiline variable that consists of a yaml-compliant string - # specifying the values that the namelist variables that are physics- - # suite-independent need to be set to. Below, this variable will be - # passed to a python script that will in turn set the values of these - # variables in the namelist file. - # - # IMPORTANT: - # If we want a namelist variable to be removed from the namelist file, - # in the "settings" variable below, we need to set its value to the - # string "null". This is equivalent to setting its value to - # !!python/none - # in the base namelist file specified by FV3_NML_BASE_SUITE_FP or the - # suite-specific yaml settings file specified by FV3_NML_YAML_CONFIG_FP. + # specifying the values that the jinja variables in the template + # model_configure file should be set to. # - # It turns out that setting the variable to an empty string also works - # to remove it from the namelist! Which is better to use?? + #----------------------------------------------------------------------- # - settings = {} - settings["atmos_model_nml"] = { - "blocksize": BLOCKSIZE, - "ccpp_suite": CCPP_PHYS_SUITE, + settings = { + "dt_atmos": DT_ATMOS, + "print_esmf": PRINT_ESMF, + "cpl_aqm": CPL_AQM } - - fv_core_nml_dict = {} - fv_core_nml_dict.update( - { - "target_lon": LON_CTR, - "target_lat": LAT_CTR, - "nrows_blend": HALO_BLEND, - # - # Question: - # For a ESGgrid type grid, what should stretch_fac be set to? This depends - # on how the FV3 code uses the stretch_fac parameter in the namelist file. - # Recall that for a ESGgrid, it gets set in the function set_gridparams_ESGgrid(.sh) - # to something like 0.9999, but is it ok to set it to that here in the - # FV3 namelist file? - # - "stretch_fac": STRETCH_FAC, - "npx": npx, - "npy": npy, - "layout": [LAYOUT_X, LAYOUT_Y], - "bc_update_interval": LBC_SPEC_INTVL_HRS, - } - ) - if CCPP_PHYS_SUITE == "FV3_GFS_v15p2": - if CPL_AQM: - fv_core_nml_dict.update({"dnats": 5}) - else: - fv_core_nml_dict.update({"dnats": 1}) - elif CCPP_PHYS_SUITE == "FV3_GFS_v16": - if CPL_AQM: - fv_core_nml_dict.update({"hord_tr": 8, "dnats": 5, "nord": 2}) - else: - fv_core_nml_dict.update({"dnats": 1}) - elif CCPP_PHYS_SUITE == "FV3_GFS_v17_p8": - if CPL_AQM: - fv_core_nml_dict.update({"dnats": 4}) - else: - fv_core_nml_dict.update({"dnats": 0}) - - settings["fv_core_nml"] = fv_core_nml_dict - - gfs_physics_nml_dict = {} - gfs_physics_nml_dict.update( - { - "kice": kice or None, - "lsoil": lsoil or None, - "print_diff_pgr": PRINT_DIFF_PGR, - } - ) - - if CPL_AQM: - gfs_physics_nml_dict.update( - { - "cplaqm": True, - "cplocn2atm": False, - "fscav_aero": [ - "aacd:0.0", - "acet:0.0", - "acrolein:0.0", - "acro_primary:0.0", - "ald2:0.0", - "ald2_primary:0.0", - "aldx:0.0", - "benzene:0.0", - "butadiene13:0.0", - "cat1:0.0", - "cl2:0.0", - "clno2:0.0", - "co:0.0", - "cres:0.0", - "cron:0.0", - "ech4:0.0", - "epox:0.0", - "eth:0.0", - "etha:0.0", - "ethy:0.0", - "etoh:0.0", - "facd:0.0", - "fmcl:0.0", - "form:0.0", - "form_primary:0.0", - "gly:0.0", - "glyd:0.0", - "h2o2:0.0", - "hcl:0.0", - "hg:0.0", - "hgiigas:0.0", - "hno3:0.0", - "hocl:0.0", - "hono:0.0", - "hpld:0.0", - "intr:0.0", - "iole:0.0", - "isop:0.0", - "ispd:0.0", - "ispx:0.0", - "ket:0.0", - "meoh:0.0", - "mepx:0.0", - "mgly:0.0", - "n2o5:0.0", - "naph:0.0", - "no:0.0", - "no2:0.0", - "no3:0.0", - "ntr1:0.0", - "ntr2:0.0", - "o3:0.0", - "ole:0.0", - "opan:0.0", - "open:0.0", - "opo3:0.0", - "pacd:0.0", - "pan:0.0", - "panx:0.0", - "par:0.0", - "pcvoc:0.0", - "pna:0.0", - "prpa:0.0", - "rooh:0.0", - "sesq:0.0", - "so2:0.0", - "soaalk:0.0", - "sulf:0.0", - "terp:0.0", - "tol:0.0", - "tolu:0.0", - "vivpo1:0.0", - "vlvoo1:0.0", - "vlvoo2:0.0", - "vlvpo1:0.0", - "vsvoo1:0.0", - "vsvoo2:0.0", - "vsvoo3:0.0", - "vsvpo1:0.0", - "vsvpo2:0.0", - "vsvpo3:0.0", - "xopn:0.0", - "xylmn:0.0", - "*:0.2", - ], - } - ) - settings["gfs_physics_nml"] = gfs_physics_nml_dict - - # - # Add to "settings" the values of those namelist variables that specify - # the paths to fixed files in the FIXam directory. As above, these namelist - # variables are physcs-suite-independent. - # - # Note that the array FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING contains - # the mapping between the namelist variables and the names of the files - # in the FIXam directory. Here, we loop through this array and process - # each element to construct each line of "settings". - # - dummy_run_dir = os.path.join(EXPTDIR, "any_cyc") - if DO_ENSEMBLE: - dummy_run_dir = os.path.join(dummy_run_dir, "any_ensmem") - - regex_search = "^[ ]*([^| ]+)[ ]*[|][ ]*([^| ]+)[ ]*$" - num_nml_vars = len(FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING) - namsfc_dict = {} - for i in range(num_nml_vars): - - mapping = f"{FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING[i]}" - tup = find_pattern_in_str(regex_search, mapping) - nml_var_name = tup[0] - FIXam_fn = tup[1] - - fp = '""' - if FIXam_fn: - fp = os.path.join(FIXam, FIXam_fn) - # - # If not in NCO mode, for portability and brevity, change fp so that it - # is a relative path (relative to any cycle directory immediately under - # the experiment directory). - # - if RUN_ENVIR != "nco": - fp = os.path.relpath(os.path.realpath(fp), start=dummy_run_dir) - # - # Add a line to the variable "settings" that specifies (in a yaml-compliant - # format) the name of the current namelist variable and the value it should - # be set to. - # - namsfc_dict[nml_var_name] = fp - # - # Add namsfc_dict to settings - # - settings["namsfc"] = namsfc_dict - # - # Use netCDF4 when running the North American 3-km domain due to file size. - # - if PREDEF_GRID_NAME == "RRFS_NA_3km": - settings["fms2_io_nml"] = {"netcdf_default_format": "netcdf4"} - settings_str = cfg_to_yaml_str(settings) - log_info( - """ - The variable 'settings' specifying values of the weather model's - namelist variables has been set as follows:\n""", - verbose=debug, - ) - log_info("\nsettings =\n\n" + settings_str, verbose=debug) - # - # ----------------------------------------------------------------------- - # - # Create a new FV3 namelist file - # - # ----------------------------------------------------------------------- - # - - physics_cfg = get_yaml_config(FV3_NML_YAML_CONFIG_FP) - base_namelist = get_nml_config(FV3_NML_BASE_SUITE_FP) - base_namelist.update_from(physics_cfg[CCPP_PHYS_SUITE]) - base_namelist.update_from(settings) - for sect, values in base_namelist.copy().items(): - if not values: - del base_namelist[sect] - continue - for k, v in values.copy().items(): - if v is None: - del base_namelist[sect][k] - base_namelist.dump(Path(FV3_NML_FP)) - # - # If not running the TN_MAKE_GRID task (which implies the workflow will - # use pregenerated grid files), set the namelist variables specifying - # the paths to surface climatology files. These files are located in - # (or have symlinks that point to them) in the FIXlam directory. - # - # Note that if running the TN_MAKE_GRID task, this action usually cannot - # be performed here but must be performed in that task because the names - # of the surface climatology files depend on the CRES parameter (which is - # the C-resolution of the grid), and this parameter is in most workflow - # configurations is not known until the grid is created. - # - if not expt_config["rocoto"]["tasks"].get("task_make_grid"): - - set_fv3nml_sfc_climo_filenames(flatten_dict(expt_config), debug) - - # - # ----------------------------------------------------------------------- - # - # Add the relevant tendency-based stochastic physics namelist variables to - # "settings" when running with SPPT, SHUM, or SKEB turned on. If running - # with SPP or LSM SPP, set the "new_lscale" variable. Otherwise only - # include an empty "nam_stochy" stanza. - # - # ----------------------------------------------------------------------- - # - settings = {} - settings["gfs_physics_nml"] = { - "do_shum": DO_SHUM, - "do_sppt": DO_SPPT, - "do_skeb": DO_SKEB, - "do_spp": DO_SPP, - "n_var_spp": N_VAR_SPP, - "n_var_lndp": N_VAR_LNDP, - "lndp_type": LNDP_TYPE, - "fhcyc": FHCYC_LSM_SPP_OR_NOT, - } - nam_stochy_dict = {} - if DO_SPPT: - nam_stochy_dict.update( - { - "iseed_sppt": ISEED_SPPT, - "new_lscale": NEW_LSCALE, - "sppt": SPPT_MAG, - "sppt_logit": SPPT_LOGIT, - "sppt_lscale": SPPT_LSCALE, - "sppt_sfclimit": SPPT_SFCLIMIT, - "sppt_tau": SPPT_TSCALE, - "spptint": SPPT_INT, - "use_zmtnblck": USE_ZMTNBLCK, - } - ) - - if DO_SHUM: - nam_stochy_dict.update( - { - "iseed_shum": ISEED_SHUM, - "new_lscale": NEW_LSCALE, - "shum": SHUM_MAG, - "shum_lscale": SHUM_LSCALE, - "shum_tau": SHUM_TSCALE, - "shumint": SHUM_INT, - } - ) - - if DO_SKEB: - nam_stochy_dict.update( - { - "iseed_skeb": ISEED_SKEB, - "new_lscale": NEW_LSCALE, - "skeb": SKEB_MAG, - "skeb_lscale": SKEB_LSCALE, - "skebnorm": SKEBNORM, - "skeb_tau": SKEB_TSCALE, - "skebint": SKEB_INT, - "skeb_vdof": SKEB_VDOF, - } - ) - - if DO_SPP or DO_LSM_SPP: - nam_stochy_dict.update({"new_lscale": NEW_LSCALE}) - - settings["nam_stochy"] = nam_stochy_dict - # - # Add the relevant SPP namelist variables to "settings" when running with - # SPP turned on. Otherwise only include an empty "nam_sppperts" stanza. - # - nam_sppperts_dict = {} - if DO_SPP: - nam_sppperts_dict = { - "iseed_spp": ISEED_SPP, - "spp_lscale": SPP_LSCALE, - "spp_prt_list": SPP_MAG_LIST, - "spp_sigtop1": SPP_SIGTOP1, - "spp_sigtop2": SPP_SIGTOP2, - "spp_stddev_cutoff": SPP_STDDEV_CUTOFF, - "spp_tau": SPP_TSCALE, - "spp_var_list": SPP_VAR_LIST, - } - - settings["nam_sppperts"] = nam_sppperts_dict - # - # Add the relevant LSM SPP namelist variables to "settings" when running with - # LSM SPP turned on. - # - nam_sfcperts_dict = {} - if DO_LSM_SPP: - nam_sfcperts_dict = { - "lndp_type": LNDP_TYPE, - "lndp_model_type": LNDP_MODEL_TYPE, - "lndp_tau": LSM_SPP_TSCALE, - "lndp_lscale": LSM_SPP_LSCALE, - "iseed_lndp": ISEED_LSM_SPP, - "lndp_var_list": LSM_SPP_VAR_LIST, - "lndp_prt_list": LSM_SPP_MAG_LIST, - } - - settings["nam_sfcperts"] = nam_sfcperts_dict - - settings_str = cfg_to_yaml_str(settings) - # - # ----------------------------------------------------------------------- - # - # Generate namelist files with stochastic physics if needed - # - # ----------------------------------------------------------------------- - # - if any((DO_SPP, DO_SPPT, DO_SHUM, DO_SKEB, DO_LSM_SPP)): - realize( - input_config=FV3_NML_FP, - input_format="nml", - output_file=FV3_NML_STOCH_FP, - output_format="nml", - update_config=get_nml_config(settings), + print_info_msg( + dedent( + f""" + The variable \"settings\" specifying values to be used in the \"{UFS_CONFIG_FN}\" + file has been set as follows:\n + settings =\n\n""" ) - - # - # ----------------------------------------------------------------------- - # - # To have a record of how this experiment/workflow was generated, copy - # the experiment/workflow configuration file to the experiment directo- - # ry. - # - # ----------------------------------------------------------------------- - # - cp_vrfy(os.path.join(ushdir, EXPT_CONFIG_FN), EXPTDIR) - + + settings_str, + verbose=VERBOSE, + ) # - # ----------------------------------------------------------------------- + #----------------------------------------------------------------------- # - # For convenience, print out the commands that need to be issued on the - # command line in order to launch the workflow and to check its status. - # Also, print out the line that should be placed in the user's cron table - # in order for the workflow to be continually resubmitted. + # Call a python script to generate the experiment's actual UFS_CONFIG_FN + # file from the template file. # - # ----------------------------------------------------------------------- + #----------------------------------------------------------------------- # - if WORKFLOW_MANAGER == "rocoto": - wflow_db_fn = f"{os.path.splitext(WFLOW_XML_FN)[0]}.db" - rocotorun_cmd = f"rocotorun -w {WFLOW_XML_FN} -d {wflow_db_fn} -v 10" - rocotostat_cmd = f"rocotostat -w {WFLOW_XML_FN} -d {wflow_db_fn} -v 10" - - # pylint: disable=line-too-long - log_info( - f""" - To launch the workflow, change location to the experiment directory - (EXPTDIR) and issue the rocotrun command, as follows: - - > cd {EXPTDIR} - > {rocotorun_cmd} - - To check on the status of the workflow, issue the rocotostat command - (also from the experiment directory): - - > {rocotostat_cmd} - - Note that: - - 1) The rocotorun command must be issued after the completion of each - task in the workflow in order for the workflow to submit the next - task(s) to the queue. - - 2) In order for the output of the rocotostat command to be up-to-date, - the rocotorun command must be issued immediately before issuing the - rocotostat command. - - For automatic resubmission of the workflow (say every {CRON_RELAUNCH_INTVL_MNTS} minutes), the - following line can be added to the user's crontab (use 'crontab -e' to - edit the cron table): - - */{CRON_RELAUNCH_INTVL_MNTS} * * * * cd {EXPTDIR} && ./launch_FV3LAM_wflow.sh called_from_cron="TRUE" - """ + render( + input_file = UFS_CONFIG_TMPL_FP, + output_file = ufs_config_fp, + values_src = settings, ) - # pylint: enable=line-too-long + return True - # If we got to this point everything was successful: move the log - # file to the experiment directory. - mv_vrfy(logfile, EXPTDIR) - - return EXPTDIR - - -def setup_logging( - logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False -) -> None: - """ - Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all - messages with detailed timing and routine info in the specified text file. - - If debug = True, print all messages to both screen and log file. - """ - logging.getLogger().setLevel(logging.DEBUG) - - formatter = logging.Formatter("%(name)-22s %(levelname)-8s %(message)s") - - fh = logging.FileHandler(logfile, mode="w") - fh.setLevel(logging.DEBUG) - fh.setFormatter(formatter) - logging.getLogger().addHandler(fh) - logging.debug(f"Finished setting up debug file logging in {logfile}") - - # If there are already multiple handlers, that means - # generate_FV3LAM_workflow was called from another function. - # In that case, do not change the console (print-to-screen) logging. - if len(logging.getLogger().handlers) > 1: - return - - console = logging.StreamHandler() - if debug: - console.setLevel(logging.DEBUG) - else: - console.setLevel(logging.INFO) - logging.getLogger().addHandler(console) - logging.debug("Logging set up successfully") - - -if __name__ == "__main__": - - # Parse arguments +def parse_args(argv): + """ Parse command line arguments""" parser = argparse.ArgumentParser( - description="Script for setting up a forecast and creating a workflow" - "according to the parameters specified in the config file\n" + description='Creates UFS configuration file.' ) - parser.add_argument( - "-d", - "--debug", - action="store_true", - help="Script will be run in debug mode with more verbose output", - ) - pargs = parser.parse_args() + parser.add_argument("-r", "--run-dir", + dest="run_dir", + required=True, + help="Run directory.") - USHdir = os.path.dirname(os.path.abspath(__file__)) - wflow_logfile = f"{USHdir}/log.generate_FV3LAM_wflow" + parser.add_argument("-p", "--path-to-defns", + dest="path_to_defns", + required=True, + help="Path to var_defns file.") - # Call the generate_FV3LAM_wflow function defined above to generate the - # experiment/workflow. - try: - expt_dir = generate_FV3LAM_wflow(USHdir, wflow_logfile, pargs.debug) - except: # pylint: disable=bare-except - logging.exception( - dedent( - f""" - ********************************************************************* - FATAL ERROR: - Experiment generation failed. See the error message(s) printed below. - For more detailed information, check the log file from the workflow - generation script: {wflow_logfile} - *********************************************************************\n - """ - ) - ) - sys.exit(1) + return parser.parse_args(argv) - # pylint: disable=undefined-variable - # Note workflow generation completion - log_info( - f""" - ======================================================================== - - Experiment generation completed. The experiment directory is: - - EXPTDIR='{EXPTDIR}' - - ======================================================================== - """ - ) +if __name__ == "__main__": + args = parse_args(sys.argv[1:]) + cfg = get_yaml_config(args.path_to_defns) + cfg = flatten_dict(cfg) + import_vars(dictionary=cfg) + create_ufs_configure_file( + run_dir=args.run_dir, + ) \ No newline at end of file diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index b8510d228..ae3431878 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -40,11 +40,11 @@ from uwtools.api.template import render - # pylint: disable=too-many-locals,too-many-branches, too-many-statements def generate_FV3LAM_wflow( - ushdir, logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False -) -> str: + ushdir, + logfile: str = "log.generate_FV3LAM_wflow", + debug: bool = False) -> str: """Function to setup a forecast experiment and create a workflow (according to the parameters specified in the config file) @@ -72,7 +72,7 @@ def generate_FV3LAM_wflow( # The setup function reads the user configuration file and fills in # non-user-specified values from config_defaults.yaml - expt_config = setup(ushdir, debug=debug) + expt_config = setup(ushdir,debug=debug) # # ----------------------------------------------------------------------- @@ -117,10 +117,10 @@ def generate_FV3LAM_wflow( # rocoto_yaml_fp = expt_config["workflow"]["ROCOTO_YAML_FP"] render( - input_file=template_xml_fp, - output_file=wflow_xml_fp, - values_src=rocoto_yaml_fp, - ) + input_file = template_xml_fp, + output_file = wflow_xml_fp, + values_src = rocoto_yaml_fp, + ) # # ----------------------------------------------------------------------- # @@ -141,27 +141,22 @@ def generate_FV3LAM_wflow( verbose=debug, ) - with open(wflow_launch_script_fp, "r", encoding="utf-8") as launch_script_file: + with open(wflow_launch_script_fp, "r", encoding='utf-8') as launch_script_file: launch_script_content = launch_script_file.read() # Stage an experiment-specific launch file in the experiment directory template = Template(launch_script_content) # The script needs several variables from the workflow and user sections - template_variables = { - **expt_config["user"], - **expt_config["workflow"], - "valid_vals_BOOLEAN": list_to_str( - expt_config["constants"]["valid_vals_BOOLEAN"] - ), - } - launch_content = template.safe_substitute(template_variables) + template_variables = {**expt_config["user"], **expt_config["workflow"], + "valid_vals_BOOLEAN": list_to_str(expt_config["constants"]["valid_vals_BOOLEAN"])} + launch_content = template.safe_substitute(template_variables) launch_fp = os.path.join(exptdir, wflow_launch_script_fn) - with open(launch_fp, "w", encoding="utf-8") as expt_launch_fn: + with open(launch_fp, "w", encoding='utf-8') as expt_launch_fn: expt_launch_fn.write(launch_content) - os.chmod(launch_fp, os.stat(launch_fp).st_mode | S_IXUSR) + os.chmod(launch_fp, os.stat(launch_fp).st_mode|S_IXUSR) # # ----------------------------------------------------------------------- @@ -181,13 +176,9 @@ def generate_FV3LAM_wflow( # pylint: disable=undefined-variable if USE_CRON_TO_RELAUNCH: - add_crontab_line( - called_from_cron=False, - machine=expt_config["user"]["MACHINE"], - crontab_line=expt_config["workflow"]["CRONTAB_LINE"], - exptdir=exptdir, - debug=debug, - ) + add_crontab_line(called_from_cron=False,machine=expt_config["user"]["MACHINE"], + crontab_line=expt_config["workflow"]["CRONTAB_LINE"], + exptdir=exptdir,debug=debug) # # Copy or symlink fix files @@ -372,146 +363,86 @@ def generate_FV3LAM_wflow( } fv_core_nml_dict = {} - fv_core_nml_dict.update( - { - "target_lon": LON_CTR, - "target_lat": LAT_CTR, - "nrows_blend": HALO_BLEND, - # - # Question: - # For a ESGgrid type grid, what should stretch_fac be set to? This depends - # on how the FV3 code uses the stretch_fac parameter in the namelist file. - # Recall that for a ESGgrid, it gets set in the function set_gridparams_ESGgrid(.sh) - # to something like 0.9999, but is it ok to set it to that here in the - # FV3 namelist file? - # - "stretch_fac": STRETCH_FAC, - "npx": npx, - "npy": npy, - "layout": [LAYOUT_X, LAYOUT_Y], - "bc_update_interval": LBC_SPEC_INTVL_HRS, - } - ) + fv_core_nml_dict.update({ + "target_lon": LON_CTR, + "target_lat": LAT_CTR, + "nrows_blend": HALO_BLEND, + # + # Question: + # For a ESGgrid type grid, what should stretch_fac be set to? This depends + # on how the FV3 code uses the stretch_fac parameter in the namelist file. + # Recall that for a ESGgrid, it gets set in the function set_gridparams_ESGgrid(.sh) + # to something like 0.9999, but is it ok to set it to that here in the + # FV3 namelist file? + # + "stretch_fac": STRETCH_FAC, + "npx": npx, + "npy": npy, + "layout": [LAYOUT_X, LAYOUT_Y], + "bc_update_interval": LBC_SPEC_INTVL_HRS, + }) if CCPP_PHYS_SUITE == "FV3_GFS_v15p2": if CPL_AQM: - fv_core_nml_dict.update({"dnats": 5}) + fv_core_nml_dict.update({ + "dnats": 5 + }) else: - fv_core_nml_dict.update({"dnats": 1}) + fv_core_nml_dict.update({ + "dnats": 1 + }) elif CCPP_PHYS_SUITE == "FV3_GFS_v16": if CPL_AQM: - fv_core_nml_dict.update({"hord_tr": 8, "dnats": 5, "nord": 2}) + fv_core_nml_dict.update({ + "hord_tr": 8, + "dnats": 5, + "nord": 2 + }) else: - fv_core_nml_dict.update({"dnats": 1}) + fv_core_nml_dict.update({ + "dnats": 1 + }) elif CCPP_PHYS_SUITE == "FV3_GFS_v17_p8": if CPL_AQM: - fv_core_nml_dict.update({"dnats": 4}) + fv_core_nml_dict.update({ + "dnats": 4 + }) else: - fv_core_nml_dict.update({"dnats": 0}) + fv_core_nml_dict.update({ + "dnats": 0 + }) settings["fv_core_nml"] = fv_core_nml_dict gfs_physics_nml_dict = {} - gfs_physics_nml_dict.update( - { - "kice": kice or None, - "lsoil": lsoil or None, - "print_diff_pgr": PRINT_DIFF_PGR, - } - ) + gfs_physics_nml_dict.update({ + "kice": kice or None, + "lsoil": lsoil or None, + "print_diff_pgr": PRINT_DIFF_PGR, + }) if CPL_AQM: - gfs_physics_nml_dict.update( - { - "cplaqm": True, - "cplocn2atm": False, - "fscav_aero": [ - "aacd:0.0", - "acet:0.0", - "acrolein:0.0", - "acro_primary:0.0", - "ald2:0.0", - "ald2_primary:0.0", - "aldx:0.0", - "benzene:0.0", - "butadiene13:0.0", - "cat1:0.0", - "cl2:0.0", - "clno2:0.0", - "co:0.0", - "cres:0.0", - "cron:0.0", - "ech4:0.0", - "epox:0.0", - "eth:0.0", - "etha:0.0", - "ethy:0.0", - "etoh:0.0", - "facd:0.0", - "fmcl:0.0", - "form:0.0", - "form_primary:0.0", - "gly:0.0", - "glyd:0.0", - "h2o2:0.0", - "hcl:0.0", - "hg:0.0", - "hgiigas:0.0", - "hno3:0.0", - "hocl:0.0", - "hono:0.0", - "hpld:0.0", - "intr:0.0", - "iole:0.0", - "isop:0.0", - "ispd:0.0", - "ispx:0.0", - "ket:0.0", - "meoh:0.0", - "mepx:0.0", - "mgly:0.0", - "n2o5:0.0", - "naph:0.0", - "no:0.0", - "no2:0.0", - "no3:0.0", - "ntr1:0.0", - "ntr2:0.0", - "o3:0.0", - "ole:0.0", - "opan:0.0", - "open:0.0", - "opo3:0.0", - "pacd:0.0", - "pan:0.0", - "panx:0.0", - "par:0.0", - "pcvoc:0.0", - "pna:0.0", - "prpa:0.0", - "rooh:0.0", - "sesq:0.0", - "so2:0.0", - "soaalk:0.0", - "sulf:0.0", - "terp:0.0", - "tol:0.0", - "tolu:0.0", - "vivpo1:0.0", - "vlvoo1:0.0", - "vlvoo2:0.0", - "vlvpo1:0.0", - "vsvoo1:0.0", - "vsvoo2:0.0", - "vsvoo3:0.0", - "vsvpo1:0.0", - "vsvpo2:0.0", - "vsvpo3:0.0", - "xopn:0.0", - "xylmn:0.0", - "*:0.2", - ], - } - ) + gfs_physics_nml_dict.update({ + "cplaqm": True, + "cplocn2atm": False, + "fscav_aero": [ + "aacd:0.0", "acet:0.0", "acrolein:0.0", "acro_primary:0.0", "ald2:0.0", + "ald2_primary:0.0", "aldx:0.0", "benzene:0.0", "butadiene13:0.0", "cat1:0.0", + "cl2:0.0", "clno2:0.0", "co:0.0", "cres:0.0", "cron:0.0", + "ech4:0.0", "epox:0.0", "eth:0.0", "etha:0.0", "ethy:0.0", + "etoh:0.0", "facd:0.0", "fmcl:0.0", "form:0.0", "form_primary:0.0", + "gly:0.0", "glyd:0.0", "h2o2:0.0", "hcl:0.0", "hg:0.0", + "hgiigas:0.0", "hno3:0.0", "hocl:0.0", "hono:0.0", "hpld:0.0", + "intr:0.0", "iole:0.0", "isop:0.0", "ispd:0.0", "ispx:0.0", + "ket:0.0", "meoh:0.0", "mepx:0.0", "mgly:0.0", "n2o5:0.0", + "naph:0.0", "no:0.0", "no2:0.0", "no3:0.0", "ntr1:0.0", + "ntr2:0.0", "o3:0.0", "ole:0.0", "opan:0.0", "open:0.0", + "opo3:0.0", "pacd:0.0", "pan:0.0", "panx:0.0", "par:0.0", + "pcvoc:0.0", "pna:0.0", "prpa:0.0", "rooh:0.0", "sesq:0.0", + "so2:0.0", "soaalk:0.0", "sulf:0.0", "terp:0.0", "tol:0.0", + "tolu:0.0", "vivpo1:0.0", "vlvoo1:0.0", "vlvoo2:0.0", "vlvpo1:0.0", + "vsvoo1:0.0", "vsvoo2:0.0", "vsvoo3:0.0", "vsvpo1:0.0", "vsvpo2:0.0", + "vsvpo3:0.0", "xopn:0.0", "xylmn:0.0", "*:0.2" ] + }) settings["gfs_physics_nml"] = gfs_physics_nml_dict # @@ -605,7 +536,7 @@ def generate_FV3LAM_wflow( # the C-resolution of the grid), and this parameter is in most workflow # configurations is not known until the grid is created. # - if not expt_config["rocoto"]["tasks"].get("task_make_grid"): + if not expt_config['rocoto']['tasks'].get('task_make_grid'): set_fv3nml_sfc_climo_filenames(flatten_dict(expt_config), debug) @@ -714,11 +645,11 @@ def generate_FV3LAM_wflow( settings_str = cfg_to_yaml_str(settings) # - # ----------------------------------------------------------------------- + #----------------------------------------------------------------------- # # Generate namelist files with stochastic physics if needed # - # ----------------------------------------------------------------------- + #----------------------------------------------------------------------- # if any((DO_SPP, DO_SPPT, DO_SHUM, DO_SKEB, DO_LSM_SPP)): realize( @@ -727,7 +658,7 @@ def generate_FV3LAM_wflow( output_file=FV3_NML_STOCH_FP, output_format="nml", update_config=get_nml_config(settings), - ) + ) # # ----------------------------------------------------------------------- @@ -795,9 +726,7 @@ def generate_FV3LAM_wflow( return EXPTDIR -def setup_logging( - logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False -) -> None: +def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = False) -> None: """ Sets up logging, printing high-priority (INFO and higher) messages to screen, and printing all messages with detailed timing and routine info in the specified text file. @@ -808,7 +737,7 @@ def setup_logging( formatter = logging.Formatter("%(name)-22s %(levelname)-8s %(message)s") - fh = logging.FileHandler(logfile, mode="w") + fh = logging.FileHandler(logfile, mode='w') fh.setLevel(logging.DEBUG) fh.setFormatter(formatter) logging.getLogger().addHandler(fh) @@ -831,18 +760,13 @@ def setup_logging( if __name__ == "__main__": - # Parse arguments + #Parse arguments parser = argparse.ArgumentParser( - description="Script for setting up a forecast and creating a workflow" - "according to the parameters specified in the config file\n" - ) + description="Script for setting up a forecast and creating a workflow"\ + "according to the parameters specified in the config file\n") - parser.add_argument( - "-d", - "--debug", - action="store_true", - help="Script will be run in debug mode with more verbose output", - ) + parser.add_argument('-d', '--debug', action='store_true', + help='Script will be run in debug mode with more verbose output') pargs = parser.parse_args() USHdir = os.path.dirname(os.path.abspath(__file__)) @@ -852,7 +776,7 @@ def setup_logging( # experiment/workflow. try: expt_dir = generate_FV3LAM_wflow(USHdir, wflow_logfile, pargs.debug) - except: # pylint: disable=bare-except + except: # pylint: disable=bare-except logging.exception( dedent( f""" @@ -879,4 +803,4 @@ def setup_logging( ======================================================================== """ - ) + ) \ No newline at end of file diff --git a/ush/link_fix.py b/ush/link_fix.py index 80c3ed39a..8184eb431 100755 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -208,12 +208,7 @@ def link_fix( f"C*{dot_or_uscore}oro_data.tile{tile_rgnl}.halo{nh0}.nc", f"C*{dot_or_uscore}oro_data.tile{tile_rgnl}.halo{nh4}.nc", ] - if ( - ccpp_phys_suite == "FV3_RAP" - or ccpp_phys_suite == "FV3_HRRR" - or ccpp_phys_suite == "FV3_GFS_v15_thompson_mynn_lam3km" - or ccpp_phys_suite == "FV3_GFS_v17_p8" - ): + if ccpp_phys_suite == "FV3_RAP" or ccpp_phys_suite == "FV3_HRRR" or ccpp_phys_suite == "FV3_GFS_v15_thompson_mynn_lam3km" or ccpp_phys_suite == "FV3_GFS_v17_p8": fns += [ f"C*{dot_or_uscore}oro_data_ss.tile{tile_rgnl}.halo{nh0}.nc", f"C*{dot_or_uscore}oro_data_ls.tile{tile_rgnl}.halo{nh0}.nc", @@ -423,4 +418,4 @@ def parse_args(argv): nhw=cfg["grid_params"]["NHW"], run_task=True, sfc_climo_fields=cfg["fixed_files"]["SFC_CLIMO_FIELDS"], - ) + ) \ No newline at end of file diff --git a/ush/setup.py b/ush/setup.py index 8177af919..30a99be96 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -47,7 +47,6 @@ from set_gridparams_GFDLgrid import set_gridparams_GFDLgrid from uwtools.api.config import get_yaml_config - def load_config_for_setup(ushdir, default_config, user_config): """Load in the default, machine, and user configuration files into Python dictionaries. Return the combined experiment dictionary. @@ -79,9 +78,7 @@ def load_config_for_setup(ushdir, default_config, user_config): try: cfg_u = load_config_file(user_config) - logging.debug( - f"Read in the following values from YAML config file {user_config}:\n" - ) + logging.debug(f"Read in the following values from YAML config file {user_config}:\n") logging.debug(cfg_u) except: errmsg = dedent( @@ -107,9 +104,7 @@ def load_config_for_setup(ushdir, default_config, user_config): errmsg = f"Invalid key(s) specified in {user_config}:\n" for entry in invalid: errmsg = errmsg + f"{entry} = {invalid[entry]}\n" - errmsg = ( - errmsg + f"\nCheck {default_config} for allowed user-specified variables\n" - ) + errmsg = errmsg + f"\nCheck {default_config} for allowed user-specified variables\n" raise Exception(errmsg) # Mandatory variables *must* be set in the user's config; the default value is invalid @@ -149,36 +144,36 @@ def load_config_for_setup(ushdir, default_config, user_config): # Load the constants file cfg_c = load_config_file(os.path.join(ushdir, "constants.yaml")) + # Load the rocoto workflow default file - cfg_wflow = load_config_file( - os.path.join(ushdir, os.pardir, "parm", "wflow", "default_workflow.yaml") - ) + cfg_wflow = load_config_file(os.path.join(ushdir, os.pardir, "parm", + "wflow", "default_workflow.yaml")) # Takes care of removing any potential "null" entries, i.e., # unsetting a default value from an anchored default_task update_dict(cfg_wflow, cfg_wflow) + # Take any user-specified taskgroups entry here. - taskgroups = cfg_u.get("rocoto", {}).get("tasks", {}).get("taskgroups") + taskgroups = cfg_u.get('rocoto', {}).get('tasks', {}).get('taskgroups') if taskgroups: - cfg_wflow["rocoto"]["tasks"]["taskgroups"] = taskgroups + cfg_wflow['rocoto']['tasks']['taskgroups'] = taskgroups # Extend yaml here on just the rocoto section to include the # appropriate groups of tasks extend_yaml(cfg_wflow) + # Put the entries expanded under taskgroups in tasks rocoto_tasks = cfg_wflow["rocoto"]["tasks"] - cfg_wflow["rocoto"]["tasks"] = yaml.load( - rocoto_tasks.pop("taskgroups"), Loader=yaml.SafeLoader - ) + cfg_wflow["rocoto"]["tasks"] = yaml.load(rocoto_tasks.pop("taskgroups"),Loader=yaml.SafeLoader) # Update wflow config from user one more time to make sure any of # the "null" settings are removed, i.e., tasks turned off. - update_dict(cfg_u.get("rocoto", {}), cfg_wflow["rocoto"]) + update_dict(cfg_u.get('rocoto', {}), cfg_wflow["rocoto"]) def add_jobname(tasks): - """Add the jobname entry for all the tasks in the workflow""" + """ Add the jobname entry for all the tasks in the workflow """ if not isinstance(tasks, dict): return @@ -187,13 +182,13 @@ def add_jobname(tasks): if task_type == "task": # Use the provided attribute if it is present, otherwise use # the name in the key - tasks[task]["jobname"] = ( - task_settings.get("attrs", {}).get("name") - or task.split("_", maxsplit=1)[1] - ) + tasks[task]["jobname"] = \ + task_settings.get("attrs", {}).get("name") or \ + task.split("_", maxsplit=1)[1] elif task_type == "metatask": add_jobname(task_settings) + # Add jobname entry to each remaining task add_jobname(cfg_wflow["rocoto"]["tasks"]) @@ -223,15 +218,13 @@ def add_jobname(tasks): update_dict(cfg_d, cfg_d) # Load one more if running Coupled AQM - if cfg_d["cpl_aqm_parm"]["CPL_AQM"]: + if cfg_d['cpl_aqm_parm']['CPL_AQM']: cfg_aqm = get_yaml_config(Path(ushdir, "config_defaults_aqm.yaml")) update_dict(cfg_aqm, cfg_d) # Load CCPP suite-specific settings - ccpp_suite = cfg_d["workflow"]["CCPP_PHYS_SUITE"] - ccpp_cfg = get_yaml_config(Path(ushdir, "ccpp_suites_defaults.yaml")).get( - ccpp_suite, {} - ) + ccpp_suite = cfg_d['workflow']['CCPP_PHYS_SUITE'] + ccpp_cfg = get_yaml_config(Path(ushdir, "ccpp_suites_defaults.yaml")).get(ccpp_suite, {}) update_dict(ccpp_cfg, cfg_d) # Load external model-specific settings @@ -304,6 +297,7 @@ def add_jobname(tasks): def set_srw_paths(ushdir, expt_config): + """ Generate a dictionary of directories that describe the SRW structure, i.e., where SRW is installed, and the paths to @@ -444,6 +438,7 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): fcst_len_hrs_max = {fcst_len_hrs_max}""" ) + # # ----------------------------------------------------------------------- # @@ -507,11 +502,11 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): # ----------------------------------------------------------------------- # - rocoto_config = expt_config.get("rocoto", {}) + rocoto_config = expt_config.get('rocoto', {}) rocoto_tasks = rocoto_config.get("tasks") - run_make_grid = rocoto_tasks.get("task_make_grid") is not None - run_make_orog = rocoto_tasks.get("task_make_orog") is not None - run_make_sfc_climo = rocoto_tasks.get("task_make_sfc_climo") is not None + run_make_grid = rocoto_tasks.get('task_make_grid') is not None + run_make_orog = rocoto_tasks.get('task_make_orog') is not None + run_make_sfc_climo = rocoto_tasks.get('task_make_sfc_climo') is not None # Necessary tasks are turned on pregen_basedir = expt_config["platform"].get("DOMAIN_PREGEN_BASEDIR") @@ -539,7 +534,7 @@ def setup(USHdir, user_config_fn="config.yaml", debug: bool = False): ) def remove_tag(tasks, tag): - """Remove the tag for all the tasks in the workflow""" + """ Remove the tag for all the tasks in the workflow """ if not isinstance(tasks, dict): return @@ -555,10 +550,10 @@ def remove_tag(tasks, tag): if remove_memory: remove_tag(rocoto_tasks, "memory") - for part in ["PARTITION_HPSS", "PARTITION_DEFAULT", "PARTITION_FCST"]: + for part in ['PARTITION_HPSS', 'PARTITION_DEFAULT', 'PARTITION_FCST']: partition = expt_config["platform"].get(part) if not partition: - remove_tag(rocoto_tasks, "partition") + remove_tag(rocoto_tasks, 'partition') # When not running subhourly post, remove those tasks, if they exist if not expt_config.get("task_run_post", {}).get("SUB_HOURLY_POST"): @@ -576,37 +571,29 @@ def remove_tag(tasks, tag): vx_metatasks_all = {} vx_fields_all["CCPA"] = ["APCP"] - vx_metatasks_all["CCPA"] = [ - "metatask_PcpCombine_obs", - "metatask_PcpCombine_fcst_APCP_all_accums_all_mems", - "metatask_GridStat_CCPA_all_accums_all_mems", - "metatask_GenEnsProd_EnsembleStat_CCPA", - "metatask_GridStat_CCPA_ensmeanprob_all_accums", - ] + vx_metatasks_all["CCPA"] = ["metatask_PcpCombine_obs", + "metatask_PcpCombine_fcst_APCP_all_accums_all_mems", + "metatask_GridStat_CCPA_all_accums_all_mems", + "metatask_GenEnsProd_EnsembleStat_CCPA", + "metatask_GridStat_CCPA_ensmeanprob_all_accums"] vx_fields_all["NOHRSC"] = ["ASNOW"] - vx_metatasks_all["NOHRSC"] = [ - "task_get_obs_nohrsc", - "metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems", - "metatask_GridStat_NOHRSC_all_accums_all_mems", - "metatask_GenEnsProd_EnsembleStat_NOHRSC", - "metatask_GridStat_NOHRSC_ensmeanprob_all_accums", - ] + vx_metatasks_all["NOHRSC"] = ["task_get_obs_nohrsc", + "metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems", + "metatask_GridStat_NOHRSC_all_accums_all_mems", + "metatask_GenEnsProd_EnsembleStat_NOHRSC", + "metatask_GridStat_NOHRSC_ensmeanprob_all_accums"] vx_fields_all["MRMS"] = ["REFC", "RETOP"] - vx_metatasks_all["MRMS"] = [ - "metatask_GridStat_MRMS_all_mems", - "metatask_GenEnsProd_EnsembleStat_MRMS", - "metatask_GridStat_MRMS_ensprob", - ] + vx_metatasks_all["MRMS"] = ["metatask_GridStat_MRMS_all_mems", + "metatask_GenEnsProd_EnsembleStat_MRMS", + "metatask_GridStat_MRMS_ensprob"] vx_fields_all["NDAS"] = ["ADPSFC", "ADPUPA"] - vx_metatasks_all["NDAS"] = [ - "task_run_MET_Pb2nc_obs", - "metatask_PointStat_NDAS_all_mems", - "metatask_GenEnsProd_EnsembleStat_NDAS", - "metatask_PointStat_NDAS_ensmeanprob", - ] + vx_metatasks_all["NDAS"] = ["task_run_MET_Pb2nc_obs", + "metatask_PointStat_NDAS_all_mems", + "metatask_GenEnsProd_EnsembleStat_NDAS", + "metatask_PointStat_NDAS_ensmeanprob"] # Get the vx fields specified in the experiment configuration. vx_fields_config = expt_config["verification"]["VX_FIELDS"] @@ -615,27 +602,23 @@ def remove_tag(tasks, tag): # for all observation types. if not vx_fields_config: metatask = "metatask_check_post_output_all_mems" - rocoto_config["tasks"].pop(metatask) + rocoto_config['tasks'].pop(metatask) # If for a given obstype no fields are specified, remove all vx metatasks # for that obstype. for obstype in vx_fields_all: - vx_fields_obstype = [ - field for field in vx_fields_config if field in vx_fields_all[obstype] - ] + vx_fields_obstype = [field for field in vx_fields_config if field in vx_fields_all[obstype]] if not vx_fields_obstype: for metatask in vx_metatasks_all[obstype]: - if metatask in rocoto_config["tasks"]: - logging.info( - dedent( - f""" + if metatask in rocoto_config['tasks']: + logging.info(dedent( + f""" Removing verification [meta]task "{metatask}" from workflow since no fields belonging to observation type "{obstype}" are specified for verification.""" - ) - ) - rocoto_config["tasks"].pop(metatask) + )) + rocoto_config['tasks'].pop(metatask) # # ----------------------------------------------------------------------- @@ -701,13 +684,13 @@ def get_location(xcs, fmt, expt_cfg): {data_key} = \"{basedir}\"''' ) + # Make sure the vertical coordinate file for both make_lbcs and # make_ics is the same. - if ics_vcoord := expt_config.get("task_make_ics", {}).get("VCOORD_FILE") != ( - lbcs_vcoord := expt_config.get("task_make_lbcs", {}).get("VCOORD_FILE") - ): - raise ValueError( - f""" + if ics_vcoord := expt_config.get("task_make_ics", {}).get("VCOORD_FILE") != \ + (lbcs_vcoord := expt_config.get("task_make_lbcs", {}).get("VCOORD_FILE")): + raise ValueError( + f""" The VCOORD_FILE must be set to the same value for both the make_ics task and the make_lbcs task. They are currently set to: @@ -718,7 +701,7 @@ def get_location(xcs, fmt, expt_cfg): make_lbcs: VCOORD_FILE: {lbcs_vcoord} """ - ) + ) # # ----------------------------------------------------------------------- @@ -737,16 +720,14 @@ def get_location(xcs, fmt, expt_cfg): dt = fcst_config.get("DT_ATMOS") if dt: if dt > 40: - logger.warning( - dedent( - f""" + logger.warning(dedent( + f""" WARNING: CCPP suite {workflow_config["CCPP_PHYS_SUITE"]} requires short time step regardless of grid resolution. The user-specified value DT_ATMOS = {fcst_config.get("DT_ATMOS")} may result in CFL violations or other errors! """ - ) - ) + )) # Gather the pre-defined grid parameters, if needed if workflow_config.get("PREDEF_GRID_NAME"): @@ -767,19 +748,14 @@ def get_location(xcs, fmt, expt_cfg): continue # DT_ATMOS needs special treatment based on CCPP suite elif param == "DT_ATMOS": - if ( - workflow_config["CCPP_PHYS_SUITE"] in hires_ccpp_suites - and grid_params[param] > 40 - ): - logger.warning( - dedent( - f""" + if workflow_config["CCPP_PHYS_SUITE"] in hires_ccpp_suites and grid_params[param] > 40: + logger.warning(dedent( + f""" WARNING: CCPP suite {workflow_config["CCPP_PHYS_SUITE"]} requires short time step regardless of grid resolution; setting DT_ATMOS to 40.\n This value can be overwritten in the user config file. """ - ) - ) + )) fcst_config[param] = 40 else: fcst_config[param] = value @@ -808,19 +784,19 @@ def get_location(xcs, fmt, expt_cfg): if 24 / incr_cycl_freq != len(fcst_len_cycl): # Also allow for the possibility that the user is running # cycles for less than a day: - num_cycles = len( - set_cycle_dates(date_first_cycl, date_last_cycl, incr_cycl_freq) - ) + num_cycles = len(set_cycle_dates( + date_first_cycl, + date_last_cycl, + incr_cycl_freq)) if num_cycles != len(fcst_len_cycl): - logger.error( - f""" The number of entries in FCST_LEN_CYCL does + logger.error(f""" The number of entries in FCST_LEN_CYCL does not divide evenly into a 24 hour day or the number of cycles in your experiment! FCST_LEN_CYCL = {fcst_len_cycl} """ - ) - raise ValueError + ) + raise ValueError # Build cycledefs entries for the long forecasts # Short forecast cycles will be relevant to all intended @@ -834,7 +810,7 @@ def get_location(xcs, fmt, expt_cfg): # Find the entries that match the long forecast, and map them to # their time of day. long_fcst_len = max(fcst_len_cycl) - long_indices = [i for i, x in enumerate(fcst_len_cycl) if x == long_fcst_len] + long_indices = [i for i,x in enumerate(fcst_len_cycl) if x == long_fcst_len] long_cycles = [i * incr_cycl_freq for i in long_indices] # add one forecast entry per cycle per day @@ -843,9 +819,9 @@ def get_location(xcs, fmt, expt_cfg): for hh in long_cycles: first = date_first_cycl.replace(hour=hh).strftime("%Y%m%d%H") last = date_last_cycl.replace(hour=hh).strftime("%Y%m%d%H") - fcst_cdef.append(f"{first}00 {last}00 24:00:00") + fcst_cdef.append(f'{first}00 {last}00 24:00:00') - rocoto_config["cycledefs"]["long_forecast"] = fcst_cdef + rocoto_config['cycledefs']['long_forecast'] = fcst_cdef # check the availability of restart intervals for restart capability of forecast do_fcst_restart = fcst_config.get("DO_FCST_RESTART") @@ -1067,6 +1043,7 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # + # If using external CRTM fix files to allow post-processing of synthetic # satellite products from the UPP, make sure the CRTM fix file directory exists. if global_sect.get("USE_CRTM"): @@ -1126,9 +1103,8 @@ def get_location(xcs, fmt, expt_cfg): # Update the rocoto string for the fcst output location if # running an ensemble in nco mode if global_sect["DO_ENSEMBLE"]: - rocoto_config["entities"][ - "FCST_DIR" - ] = "{{ nco.PTMP }}/{{ nco.envir_default }}/tmp/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H" + rocoto_config["entities"]["FCST_DIR"] = \ + "{{ nco.PTMP }}/{{ nco.envir_default }}/tmp/run_fcst_mem#mem#.{{ workflow.WORKFLOW_ID }}_@Y@m@d@H" # create experiment dir mkdir_vrfy(f' -p "{exptdir}"') @@ -1199,14 +1175,13 @@ def get_location(xcs, fmt, expt_cfg): # ----------------------------------------------------------------------- # # Get list of all top-level tasks and metatasks in the workflow. - task_defs = rocoto_config.get("tasks") + task_defs = rocoto_config.get('tasks') all_tasks = [task for task in task_defs] # Get list of all valid top-level tasks and metatasks pertaining to ensemble # verification. ens_vx_task_defns = load_config_file( - os.path.join(USHdir, os.pardir, "parm", "wflow", "verify_ens.yaml") - ) + os.path.join(USHdir, os.pardir, "parm", "wflow", "verify_ens.yaml")) ens_vx_valid_tasks = [task for task in ens_vx_task_defns] # Get list of all valid top-level tasks and metatasks in the workflow that @@ -1219,24 +1194,14 @@ def get_location(xcs, fmt, expt_cfg): do_ensemble = global_sect["DO_ENSEMBLE"] if (not do_ensemble) and ens_vx_tasks: task_str = " " + "\n ".join(ens_vx_tasks) - msg = dedent( - f""" + msg = dedent(f""" Ensemble verification can not be run unless running in ensemble mode: DO_ENSEMBLE = \"{do_ensemble}\" Ensemble verification tasks: - """ - ) - msg = "".join( - [ - msg, - task_str, - dedent( - f""" + """) + msg = "".join([msg, task_str, dedent(f""" Please set DO_ENSEMBLE to True or remove ensemble vx tasks from the - workflow.""" - ), - ] - ) + workflow.""")]) raise Exception(msg) # @@ -1274,14 +1239,18 @@ def dict_find(user_dict, substring): run_make_ics = dict_find(rocoto_tasks, "task_make_ics") run_make_lbcs = dict_find(rocoto_tasks, "task_make_lbcs") run_run_fcst = dict_find(rocoto_tasks, "task_run_fcst") - run_any_coldstart_task = run_make_ics or run_make_lbcs or run_run_fcst + run_any_coldstart_task = run_make_ics or \ + run_make_lbcs or \ + run_run_fcst # Flags for creating symlinks to pre-generated grid, orography, and sfc_climo files. # These consider dependencies of other tasks on each pre-processing task. create_symlinks_to_pregen_files = { - "GRID": (not run_make_grid) - and (run_make_orog or run_make_sfc_climo or run_any_coldstart_task), - "OROG": (not run_make_orog) and (run_make_sfc_climo or run_any_coldstart_task), - "SFC_CLIMO": (not run_make_sfc_climo) and (run_make_ics or run_make_lbcs), + "GRID": (not run_make_grid) and \ + (run_make_orog or run_make_sfc_climo or run_any_coldstart_task), + "OROG": (not run_make_orog) and \ + (run_make_sfc_climo or run_any_coldstart_task), + "SFC_CLIMO": (not run_make_sfc_climo) and \ + (run_make_ics or run_make_lbcs), } fixed_files = expt_config["fixed_files"] @@ -1370,7 +1339,7 @@ def dict_find(user_dict, substring): # if fcst_config["WRITE_DOPOST"]: # Turn off run_post - task_name = "metatask_run_ens_post" + task_name = 'metatask_run_ens_post' removed_task = task_defs.pop(task_name, None) if removed_task: logger.warning( @@ -1395,49 +1364,33 @@ def dict_find(user_dict, substring): ccpp_suite_xml = load_xml_file(workflow_config["CCPP_PHYS_SUITE_IN_CCPP_FP"]) # Need to track if we are using RUC LSM for the make_ics step - workflow_config["SDF_USES_RUC_LSM"] = has_tag_with_value( - ccpp_suite_xml, "scheme", "lsm_ruc" - ) + workflow_config["SDF_USES_RUC_LSM"] = has_tag_with_value(ccpp_suite_xml, "scheme", "lsm_ruc") # Thompson microphysics needs additional input files and namelist settings - workflow_config["SDF_USES_THOMPSON_MP"] = has_tag_with_value( - ccpp_suite_xml, "scheme", "mp_thompson" - ) + workflow_config["SDF_USES_THOMPSON_MP"] = has_tag_with_value(ccpp_suite_xml, "scheme", "mp_thompson") if workflow_config["SDF_USES_THOMPSON_MP"]: - - logging.debug( - f'Selected CCPP suite ({workflow_config["CCPP_PHYS_SUITE"]}) uses Thompson MP' - ) - logging.debug(f"Setting up links for additional fix files") + + logging.debug(f'Selected CCPP suite ({workflow_config["CCPP_PHYS_SUITE"]}) uses Thompson MP') + logging.debug(f'Setting up links for additional fix files') # If the model ICs or BCs are not from RAP or HRRR, they will not contain aerosol # climatology data needed by the Thompson scheme, so we need to provide a separate file - if get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RAP"] or get_extrn_lbcs[ - "EXTRN_MDL_NAME_LBCS" - ] not in ["HRRR", "RAP"]: - fixed_files["THOMPSON_FIX_FILES"].append( - workflow_config["THOMPSON_MP_CLIMO_FN"] - ) + if (get_extrn_ics["EXTRN_MDL_NAME_ICS"] not in ["HRRR", "RAP"] or + get_extrn_lbcs["EXTRN_MDL_NAME_LBCS"] not in ["HRRR", "RAP"]): + fixed_files["THOMPSON_FIX_FILES"].append(workflow_config["THOMPSON_MP_CLIMO_FN"]) # Add thompson-specific fix files to CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING and # FIXgsm_FILES_TO_COPY_TO_FIXam; see parm/fixed_files_mapping.yaml for more info on these variables - fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"].extend( - fixed_files["THOMPSON_FIX_FILES"] - ) + fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"].extend(fixed_files["THOMPSON_FIX_FILES"]) for fix_file in fixed_files["THOMPSON_FIX_FILES"]: - fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"].append( - f"{fix_file} | {fix_file}" - ) + fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"].append(f"{fix_file} | {fix_file}") + + logging.debug(f'New fix file list:\n{fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"]=}') + logging.debug(f'New fix file mapping:\n{fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"]=}') - logging.debug( - f'New fix file list:\n{fixed_files["FIXgsm_FILES_TO_COPY_TO_FIXam"]=}' - ) - logging.debug( - f'New fix file mapping:\n{fixed_files["CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING"]=}' - ) # # ----------------------------------------------------------------------- @@ -1486,6 +1439,7 @@ def dict_find(user_dict, substring): var_defns_cfg["workflow"][dates] = date_to_str(var_defns_cfg["workflow"][dates]) var_defns_cfg.dump(Path(global_var_defns_fp)) + # # ----------------------------------------------------------------------- # @@ -1500,33 +1454,28 @@ def dict_find(user_dict, substring): if v is None or v == "": continue vkey = "valid_vals_" + k - if vkey in cfg_v: - if type(v) == list: - if not (all(ele in cfg_v[vkey] for ele in v)): + if (vkey in cfg_v): + if (type(v) == list): + if not(all(ele in cfg_v[vkey] for ele in v)): raise Exception( - dedent( - f""" + dedent(f""" The variable {k} = {v} in the user's configuration has at least one invalid value. Possible values are: {k} = {cfg_v[vkey]}""" - ) - ) + )) else: if not (v in cfg_v[vkey]): raise Exception( - dedent( - f""" + dedent(f""" The variable {k} = {v} ({type(v)}) in the user's configuration does not have a valid value. Possible values are: {k} = {cfg_v[vkey]}""" - ) - ) + )) return expt_config - def clean_rocoto_dict(rocotodict): """Removes any invalid entries from rocotodict. Examples of invalid entries are: @@ -1540,9 +1489,7 @@ def clean_rocoto_dict(rocotodict): elif key.split("_", maxsplit=1)[0] in ["task"]: if not rocotodict[key].get("command"): popped = rocotodict.pop(key) - logging.warning( - f"Invalid task {key} removed due to empty/unset run command" - ) + logging.warning(f"Invalid task {key} removed due to empty/unset run command") logging.debug(f"Removed entry:\n{popped}") # Loop 2: search for metatasks with no tasks in them @@ -1552,7 +1499,7 @@ def clean_rocoto_dict(rocotodict): for key2 in list(rocotodict[key].keys()): if key2.split("_", maxsplit=1)[0] == "metatask": clean_rocoto_dict(rocotodict[key][key2]) - # After above recursion, any nested empty metatasks will have popped themselves + #After above recursion, any nested empty metatasks will have popped themselves if rocotodict[key].get(key2): valid = True elif key2.split("_", maxsplit=1)[0] == "task": @@ -1563,6 +1510,7 @@ def clean_rocoto_dict(rocotodict): logging.debug(f"Removed entry:\n{popped}") + # # ----------------------------------------------------------------------- # @@ -1572,4 +1520,4 @@ def clean_rocoto_dict(rocotodict): # if __name__ == "__main__": USHdir = os.path.dirname(os.path.abspath(__file__)) - setup(USHdir) + setup(USHdir) \ No newline at end of file From d6c37867272f83f2b97fa31e4a3145f502d815b9 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 19 Sep 2024 21:03:30 +0000 Subject: [PATCH 14/47] fix local issues --- scripts/chgres_cube.py | 15 ++++++++------- ush/config_defaults.yaml | 2 ++ uwtools | 2 +- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index b0abec011..2e434ea8c 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -9,9 +9,9 @@ from copy import deepcopy from pathlib import Path -from uwtools.api.chgres_cube import Chgres_Cube +from uwtools.api.chgres_cube import ChgresCube from uwtools.api.config import get_sh_config, get_yaml_config -from uwtools.api.file import link as uwlink +from uwtools.api.fs import link as uwlink parser = ArgumentParser( @@ -49,15 +49,12 @@ expt_config = get_yaml_config(args.config_file) chgres_cube_config = expt_config[args.key_path] -rundir = Path(chgres_cube_config["rundir"]) -print(f"Will run in {rundir}") - CRES = expt_config["workflow"]["CRES"] os.environ["CRES"] = CRES # Extract driver config from experiment config -chgres_cube_driver = Chgres_Cube( +chgres_cube_driver = ChgresCube( config=args.config_file, cycle=args.cycle, key_path=[args.key_path], @@ -65,6 +62,8 @@ # update fn_atm and fn_sfc for ics task if args.key_path == "task_make_ics": + rundir = Path(chgres_cube_config["task_make_ics"]["rundir"]) + print(f"Will run in {rundir}") varsfilepath = chgres_cube_driver.config["task_make_ics"][ "input_files_metadata_path" ] @@ -78,6 +77,8 @@ # Loop the run of chgres_cube for the forecast length if lbcs else: + rundir = Path(chgres_cube_config["task_make_lbcs"]["rundir"]) + print(f"Will run in {rundir}") fn_sfc = "" num_fhrs = chgres_cube_driver.config["workflow"]["FCST_LEN_HRS"] bcgrp10 = 0 @@ -92,7 +93,7 @@ "config" ]["atm_files_input_grid"] = fn_atm # reinstantiate driver - chgres_cube_driver = Chgres_Cube( + chgres_cube_driver = ChgresCube( config=expt_config, cycle=args.cycle, key_path=[args.key_path], diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index b61ff5525..76bcc848b 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1695,6 +1695,7 @@ task_make_ics: FVCOM_FILE: "fvcom.nc" VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #------------------------------------------------------------------------ + rundir: '{{ workflow.EXPTDIR }}/make_ics' input_files_metadata_path: "{{ task_get_extrn_ics.rundir }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}" chgres_cube: namelist: @@ -1756,6 +1757,7 @@ task_make_lbcs: OMP_STACKSIZE_MAKE_LBCS: "1024m" VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #------------------------------------------------------------------------ + rundir: '{{ workflow.EXPTDIR }}/make_lbcs' chgres_cube: namelist: update_values: diff --git a/uwtools b/uwtools index 33766a90a..b7ed02f20 160000 --- a/uwtools +++ b/uwtools @@ -1 +1 @@ -Subproject commit 33766a90a9b9743f6e18c7d39ad5f59701cbeacc +Subproject commit b7ed02f20e87a92cc5a17facff17bafb53874e1b From 4eff96ad449f2e143b9686fd70993e43af0562f0 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 19 Sep 2024 16:20:01 -0500 Subject: [PATCH 15/47] fix YAML formatting --- scripts/chgres_cube.py | 4 ++-- ush/config_defaults.yaml | 38 +++++++++++++++++++++++--------- ush/external_model_defaults.yaml | 8 +++---- 3 files changed, 34 insertions(+), 16 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 2e434ea8c..8e7732d16 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -62,7 +62,7 @@ # update fn_atm and fn_sfc for ics task if args.key_path == "task_make_ics": - rundir = Path(chgres_cube_config["task_make_ics"]["rundir"]) + rundir = Path(chgres_cube_config["task_make_ics"]["chgres_cube"]["rundir"]) print(f"Will run in {rundir}") varsfilepath = chgres_cube_driver.config["task_make_ics"][ "input_files_metadata_path" @@ -77,7 +77,7 @@ # Loop the run of chgres_cube for the forecast length if lbcs else: - rundir = Path(chgres_cube_config["task_make_lbcs"]["rundir"]) + rundir = Path(chgres_cube_config["task_make_lbcs"]["chgres_cube"]["rundir"]) print(f"Will run in {rundir}") fn_sfc = "" num_fhrs = chgres_cube_driver.config["workflow"]["FCST_LEN_HRS"] diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 76bcc848b..7c08b43e4 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1695,9 +1695,17 @@ task_make_ics: FVCOM_FILE: "fvcom.nc" VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #------------------------------------------------------------------------ - rundir: '{{ workflow.EXPTDIR }}/make_ics' input_files_metadata_path: "{{ task_get_extrn_ics.rundir }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}" chgres_cube: + execution: + executable: "{{ user.EXECdir }}/chgres_cube" + envcmds: + - module use {{ user.HOMEdir }}/modulefiles + - module load build_{{ user.MACHINE|lower }}_{{ workflow.COMPILER }} + mpiargs: + - "--export=NONE" + mpicmd: '{{ platform.BATCH_RUN_CMD }}' + threads: 1 namelist: update_values: config: @@ -1718,12 +1726,12 @@ task_make_ics: convert_sfc: true convert_nst: true regional: 1 - halo_bndy: "{{ constants.NH4 }}" - halo_blend: "{{ global.HALO_BLEND }}" + halo_bndy: !int "{{ constants.NH4 }}" + halo_blend: !int "{{ global.HALO_BLEND }}" input_type: "gaussian_nemsio" external_model: "FV3GFS" - tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" - tracers: "[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" + tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] + tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] nsoill_out: 4 vgtyp_from_climo: true sotyp_from_climo: true @@ -1731,6 +1739,7 @@ task_make_ics: minmax_vgfrc_from_climo: true lai_from_climo: true tg3_from_soil: false + rundir: '{{ workflow.EXPTDIR }}/make_ics' #---------------------------- # MAKE LBCS config parameters @@ -1757,8 +1766,16 @@ task_make_lbcs: OMP_STACKSIZE_MAKE_LBCS: "1024m" VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #------------------------------------------------------------------------ - rundir: '{{ workflow.EXPTDIR }}/make_lbcs' chgres_cube: + execution: + executable: "{{ user.EXECdir }}/chgres_cube" + envcmds: + - module use {{ user.HOMEdir }}/modulefiles + - module load build_{{ user.MACHINE|lower }}_{{ workflow.COMPILER }} + mpiargs: + - "--export=NONE" + mpicmd: '{{ platform.BATCH_RUN_CMD }}' + threads: 1 namelist: update_values: config: @@ -1776,12 +1793,13 @@ task_make_lbcs: cycle_hour: !int "{{ cycle.strftime('%H') }}" convert_atm: true regional: 2 - halo_bndy: "{{ constants.NH4 }}" - halo_blend: "{{ global.HALO_BLEND }}" + halo_bndy: !int "{{ constants.NH4 }}" + halo_blend: !int "{{ global.HALO_BLEND }}" input_type: "gaussian_nemsio" external_model: "FV3GFS" - tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" - tracers: "[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" + tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] + tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] + rundir: '{{ workflow.EXPTDIR }}/make_lbcs' #---------------------------- diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml index c3308887e..7c86b949b 100644 --- a/ush/external_model_defaults.yaml +++ b/ush/external_model_defaults.yaml @@ -14,15 +14,15 @@ GSMGFS: input_type: "gfs_gaussian_nemsio" external_model: "GSMGFS" convert_nst: False - tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\"]" - tracers: "[\"sphum\",\"liq_wat\",\"o3mr\"]" + tracers_input: [\"spfh\",\"clwmr\",\"o3mr\"] + tracers: [\"sphum\",\"liq_wat\",\"o3mr\"] task_make_lbcs: chgres_cube: input_type: "gfs_gaussian_nemsio" external_model: "GSMGFS" convert_nst: False - tracers_input: "[\"spfh\",\"clwmr\",\"o3mr\"]" - tracers: "[\"sphum\",\"liq_wat\",\"o3mr\"]" + tracers_input: [\"spfh\",\"clwmr\",\"o3mr\"] + tracers: [\"sphum\",\"liq_wat\",\"o3mr\"] UFS-CASE-STUDY: task_make_ics: From 3b65a6cca4f65d2e87d32d432fe5ef973b6e5773 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 19 Sep 2024 16:34:41 -0500 Subject: [PATCH 16/47] Fix remaining YAML syntax issues --- ush/external_model_defaults.yaml | 108 +++++++++++++++++++++---------- 1 file changed, 75 insertions(+), 33 deletions(-) diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml index 7c86b949b..d36daf255 100644 --- a/ush/external_model_defaults.yaml +++ b/ush/external_model_defaults.yaml @@ -11,81 +11,123 @@ grib2_defaults: &grib2_defaults GSMGFS: task_make_ics: chgres_cube: - input_type: "gfs_gaussian_nemsio" - external_model: "GSMGFS" - convert_nst: False - tracers_input: [\"spfh\",\"clwmr\",\"o3mr\"] - tracers: [\"sphum\",\"liq_wat\",\"o3mr\"] + namelist: + update_values: + config: + input_type: "gfs_gaussian_nemsio" + external_model: "GSMGFS" + convert_nst: False + tracers_input: [\"spfh\",\"clwmr\",\"o3mr\"] + tracers: [\"sphum\",\"liq_wat\",\"o3mr\"] task_make_lbcs: chgres_cube: - input_type: "gfs_gaussian_nemsio" - external_model: "GSMGFS" - convert_nst: False - tracers_input: [\"spfh\",\"clwmr\",\"o3mr\"] - tracers: [\"sphum\",\"liq_wat\",\"o3mr\"] + namelist: + update_values: + config: + input_type: "gfs_gaussian_nemsio" + external_model: "GSMGFS" + convert_nst: False + tracers_input: [\"spfh\",\"clwmr\",\"o3mr\"] + tracers: [\"sphum\",\"liq_wat\",\"o3mr\"] UFS-CASE-STUDY: task_make_ics: chgres_cube: - external_model: "UFS-CASE-STUDY" + namelist: + update_values: + config: + external_model: "UFS-CASE-STUDY" task_make_lbcs: chgres_cube: - external_model: "UFS-CASE-STUDY" + namelist: + update_values: + config: + external_model: "UFS-CASE-STUDY" GDAS: task_make_ics: chgres_cube: - external_model: "GFS" - tg3_from_soil: True + namelist: + update_values: + config: + external_model: "GFS" + tg3_from_soil: True task_make_lbcs: chgres_cube: - external_model: "GFS" - tg3_from_soil: True + namelist: + update_values: + config: + external_model: "GFS" + tg3_from_soil: True GEFS: task_make_ics: chgres_cube: - external_model: "GFS" <<: *grib2_defaults + namelist: + update_values: + config: + external_model: "GFS" task_make_lbcs: chgres_cube: - external_model: "GFS" <<: *grib2_defaults + namelist: + update_values: + config: + external_model: "GFS" HRRR: task_make_ics: chgres_cube: - external_model: "HRRR" - geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_HRRRX" - tg3_from_soil: True <<: *grib2_defaults + namelist: + update_values: + config: + external_model: "HRRR" + geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_HRRRX" + tg3_from_soil: True task_make_lbcs: chgres_cube: - external_model: "HRRR" - geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_HRRRX" - tg3_from_soil: True <<: *grib2_defaults + namelist: + update_values: + config: + external_model: "HRRR" + geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_HRRRX" + tg3_from_soil: True RAP: task_make_ics: chgres_cube: - external_model: "RAP" - geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_RAPX" - tg3_from_soil: True <<: *grib2_defaults + namelist: + update_values: + config: + external_model: "RAP" + geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_RAPX" + tg3_from_soil: True task_make_lbcs: chgres_cube: - external_model: "RAP" - geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_RAPX" - tg3_from_soil: True <<: *grib2_defaults + namelist: + update_values: + config: + external_model: "RAP" + geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_RAPX" + tg3_from_soil: True NAM: task_make_ics: chgres_cube: - external_model: "NAM" <<: *grib2_defaults + namelist: + update_values: + config: + external_model: "NAM" task_make_lbcs: chgres_cube: - external_model: "NAM" - <<: *grib2_defaults \ No newline at end of file + <<: *grib2_defaults + namelist: + update_values: + config: + external_model: "NAM" \ No newline at end of file From 7116141307ae685d71d4c3c5517b8617941d4044 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 19 Sep 2024 17:25:05 -0500 Subject: [PATCH 17/47] Undo prior incorrect sync and refix YAML --- ush/ccpp_suites_defaults.yaml | 35 ++++++++++++++++---------------- ush/external_model_defaults.yaml | 33 +++++++++++++++--------------- 2 files changed, 33 insertions(+), 35 deletions(-) diff --git a/ush/ccpp_suites_defaults.yaml b/ush/ccpp_suites_defaults.yaml index b130203d5..b3467e893 100644 --- a/ush/ccpp_suites_defaults.yaml +++ b/ush/ccpp_suites_defaults.yaml @@ -1,9 +1,8 @@ -gsd_defaults: &gsd_defaults - chgres_cube: - namelist: - update_values: - config: - varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" +chgres_cube_gsd_defaults: &chgres_cube_gsd_defaults + namelist: + update_values: + config: + varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" orog_gsl_defaults: &orog_gsl_defaults config: @@ -27,54 +26,54 @@ orog_gsl_defaults: &orog_gsl_defaults FV3_RAP: task_make_ics: chgres_cube: - <<: *gsd_defaults + <<: *chgres_cube_gsd_defaults task_make_lbcs: chgres_cube: - <<: *gsd_defaults + <<: *chgres_cube_gsd_defaults task_make_orog: orog_gsl: <<: *orog_gsl_defaults FV3_HRRR: task_make_ics: chgres_cube: - <<: *gsd_defaults + <<: *chgres_cube_gsd_defaults task_make_lbcs: chgres_cube: - <<: *gsd_defaults + <<: *chgres_cube_gsd_defaults task_make_orog: orog_gsl: <<: *orog_gsl_defaults FV3_WoFS_v0: task_make_ics: chgres_cube: - <<: *gsd_defaults + <<: *chgres_cube_gsd_defaults task_make_lbcs: chgres_cube: - <<: *gsd_defaults + <<: *chgres_cube_gsd_defaults FV3_RRFS_v1beta: task_make_ics: chgres_cube: - <<: *gsd_defaults + <<: *chgres_cube_gsd_defaults task_make_lbcs: chgres_cube: - <<: *gsd_defaults + <<: *chgres_cube_gsd_defaults FV3_GFS_v15_thompson_mynn_lam3km: task_make_ics: chgres_cube: - <<: *gsd_defaults + <<: *chgres_cube_gsd_defaults task_make_lbcs: chgres_cube: - <<: *gsd_defaults + <<: *chgres_cube_gsd_defaults task_make_orog: orog_gsl: <<: *orog_gsl_defaults FV3_GFS_v17_p8: task_make_ics: chgres_cube: - <<: *gsd_defaults + <<: *chgres_cube_gsd_defaults task_make_lbcs: chgres_cube: - <<: *gsd_defaults + <<: *chgres_cube_gsd_defaults task_make_orog: orog_gsl: <<: *orog_gsl_defaults diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml index d36daf255..04bf41719 100644 --- a/ush/external_model_defaults.yaml +++ b/ush/external_model_defaults.yaml @@ -1,12 +1,11 @@ -grib2_defaults: &grib2_defaults - chgres_cube: - namelist: - update_values: - config: - input_type: "grib2" - convert_nst: False - tracers_input: "" - tracers: "" +chgres_cube_grib2_defaults: &chgres_cube_grib2_defaults + namelist: + update_values: + config: + input_type: "grib2" + convert_nst: False + tracers_input: "" + tracers: "" GSMGFS: task_make_ics: @@ -63,14 +62,14 @@ GDAS: GEFS: task_make_ics: chgres_cube: - <<: *grib2_defaults + <<: *chgres_cube_grib2_defaults namelist: update_values: config: external_model: "GFS" task_make_lbcs: chgres_cube: - <<: *grib2_defaults + <<: *chgres_cube_grib2_defaults namelist: update_values: config: @@ -79,7 +78,7 @@ GEFS: HRRR: task_make_ics: chgres_cube: - <<: *grib2_defaults + <<: *chgres_cube_grib2_defaults namelist: update_values: config: @@ -88,7 +87,7 @@ HRRR: tg3_from_soil: True task_make_lbcs: chgres_cube: - <<: *grib2_defaults + <<: *chgres_cube_grib2_defaults: namelist: update_values: config: @@ -99,7 +98,7 @@ HRRR: RAP: task_make_ics: chgres_cube: - <<: *grib2_defaults + <<: *chgres_cube_grib2_defaults namelist: update_values: config: @@ -108,7 +107,7 @@ RAP: tg3_from_soil: True task_make_lbcs: chgres_cube: - <<: *grib2_defaults + <<: *chgres_cube_grib2_defaults namelist: update_values: config: @@ -119,14 +118,14 @@ RAP: NAM: task_make_ics: chgres_cube: - <<: *grib2_defaults + <<: *chgres_cube_grib2_defaults namelist: update_values: config: external_model: "NAM" task_make_lbcs: chgres_cube: - <<: *grib2_defaults + <<: *chgres_cube_grib2_defaults namelist: update_values: config: From c310cb8194784023007c63634604646e4228a840 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Fri, 20 Sep 2024 16:22:01 +0000 Subject: [PATCH 18/47] local YAML and path fixes --- scripts/chgres_cube.py | 14 +++++++------- ush/ccpp_suites_defaults.yaml | 9 ++++----- ush/config_defaults.yaml | 3 ++- ush/external_model_defaults.yaml | 17 ++++++++--------- 4 files changed, 21 insertions(+), 22 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 8e7732d16..18a919eb4 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -62,9 +62,9 @@ # update fn_atm and fn_sfc for ics task if args.key_path == "task_make_ics": - rundir = Path(chgres_cube_config["task_make_ics"]["chgres_cube"]["rundir"]) + rundir = Path(chgres_cube_driver.config["rundir"]) print(f"Will run in {rundir}") - varsfilepath = chgres_cube_driver.config["task_make_ics"][ + varsfilepath = expt_config["task_make_ics"][ "input_files_metadata_path" ] extrn_config_fns = get_sh_config(varsfilepath)["EXTRN_MDL_FNS"] @@ -77,10 +77,10 @@ # Loop the run of chgres_cube for the forecast length if lbcs else: - rundir = Path(chgres_cube_config["task_make_lbcs"]["chgres_cube"]["rundir"]) + rundir = Path(chgres_cube_driver.config["rundir"]) print(f"Will run in {rundir}") fn_sfc = "" - num_fhrs = chgres_cube_driver.config["workflow"]["FCST_LEN_HRS"] + num_fhrs = expt_config["workflow"]["FCST_LEN_HRS"] bcgrp10 = 0 bcgrpnum10 = 1 for ii in range(bcgrp10, num_fhrs, bcgrpnum10): @@ -89,7 +89,7 @@ print(f"group ${bcgrp10} processes member ${i}") fn_atm = f"${{EXTRN_MDL_FNS[${i}]}}" - expt_config["task_make_lbcs"]["chgres_cube"]["namelist"]["update_values"][ + chgres_cube_driver.config["namelist"]["update_values"][ "config" ]["atm_files_input_grid"] = fn_atm # reinstantiate driver @@ -128,10 +128,10 @@ lbc_block = expt_config_cp[args.key_path] lbc_input_fn = "gfs.bndy.nc" lbc_spec_fhrs = extrn_config_fhrs[i] - lbc_offset_fhrs = chgres_cube_driver.config["task_get_extrn_lbcs"][ + lbc_offset_fhrs = expt_config["task_get_extrn_lbcs"][ "EXTRN_MDL_LBCS_OFFSET_HRS" ] - nco_net = chgres_cube_driver.config["nco"]["NET_default"] + nco_net = expt_config["nco"]["NET_default"] dot_ensmem = f".mem{ args.member }" fcst_hhh = lbc_spec_fhrs - lbc_offset_fhrs fcst_hhh_FV3LAM = print(f"fcst_hhh:03d") diff --git a/ush/ccpp_suites_defaults.yaml b/ush/ccpp_suites_defaults.yaml index b130203d5..a34db6b44 100644 --- a/ush/ccpp_suites_defaults.yaml +++ b/ush/ccpp_suites_defaults.yaml @@ -1,9 +1,8 @@ gsd_defaults: &gsd_defaults - chgres_cube: - namelist: - update_values: - config: - varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" + namelist: + update_values: + config: + varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" orog_gsl_defaults: &orog_gsl_defaults config: diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 7c08b43e4..da1becf9a 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1695,7 +1695,7 @@ task_make_ics: FVCOM_FILE: "fvcom.nc" VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #------------------------------------------------------------------------ - input_files_metadata_path: "{{ task_get_extrn_ics.rundir }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}" + input_files_metadata_path: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }}/for_ICS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}' chgres_cube: execution: executable: "{{ user.EXECdir }}/chgres_cube" @@ -1766,6 +1766,7 @@ task_make_lbcs: OMP_STACKSIZE_MAKE_LBCS: "1024m" VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #------------------------------------------------------------------------ + input_files_metadata_path: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }}/for_LBCS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}' chgres_cube: execution: executable: "{{ user.EXECdir }}/chgres_cube" diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml index d36daf255..50d66d701 100644 --- a/ush/external_model_defaults.yaml +++ b/ush/external_model_defaults.yaml @@ -1,12 +1,11 @@ grib2_defaults: &grib2_defaults - chgres_cube: - namelist: - update_values: - config: - input_type: "grib2" - convert_nst: False - tracers_input: "" - tracers: "" + namelist: + update_values: + config: + input_type: "grib2" + convert_nst: False + tracers_input: "" + tracers: "" GSMGFS: task_make_ics: @@ -130,4 +129,4 @@ NAM: namelist: update_values: config: - external_model: "NAM" \ No newline at end of file + external_model: "NAM" From 9a0ab65c089674dbd43b6853182a3351ae2c2865 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Fri, 20 Sep 2024 18:23:24 +0000 Subject: [PATCH 19/47] progress resolving dereferencing --- scripts/chgres_cube.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 18a919eb4..a8c67cdac 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -12,6 +12,7 @@ from uwtools.api.chgres_cube import ChgresCube from uwtools.api.config import get_sh_config, get_yaml_config from uwtools.api.fs import link as uwlink +from uwtools.api.logging import use_uwtools_logger parser = ArgumentParser( @@ -42,6 +43,7 @@ default="000", help="The 3-digit ensemble member number.", ) +use_uwtools_logger() args = parser.parse_args() os.environ["member"] = args.member @@ -49,10 +51,10 @@ expt_config = get_yaml_config(args.config_file) chgres_cube_config = expt_config[args.key_path] +# dereference expressions during driver initialization CRES = expt_config["workflow"]["CRES"] os.environ["CRES"] = CRES - # Extract driver config from experiment config chgres_cube_driver = ChgresCube( config=args.config_file, From 226d335fb529a02878beb52f5043905bbb8d42bb Mon Sep 17 00:00:00 2001 From: WeirAE Date: Wed, 25 Sep 2024 13:33:41 +0000 Subject: [PATCH 20/47] fixes for loop logic --- scripts/chgres_cube.py | 18 ++++++++++++------ ush/config_defaults.yaml | 4 ++-- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index a8c67cdac..548bbf21e 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -62,6 +62,9 @@ key_path=[args.key_path], ) +extrn_config_fns = get_sh_config(varsfilepath)["EXTRN_MDL_FNS"] +extrn_config_fhrs = get_sh_config(varsfilepath)["EXTRN_MDL_FHRS"] + # update fn_atm and fn_sfc for ics task if args.key_path == "task_make_ics": rundir = Path(chgres_cube_driver.config["rundir"]) @@ -69,8 +72,6 @@ varsfilepath = expt_config["task_make_ics"][ "input_files_metadata_path" ] - extrn_config_fns = get_sh_config(varsfilepath)["EXTRN_MDL_FNS"] - extrn_config_fhrs = get_sh_config(varsfilepath)["EXTRN_MDL_FHRS"] fn_atm = extrn_config_fns[0] fn_sfc = extrn_config_fns[1] @@ -82,18 +83,23 @@ rundir = Path(chgres_cube_driver.config["rundir"]) print(f"Will run in {rundir}") fn_sfc = "" - num_fhrs = expt_config["workflow"]["FCST_LEN_HRS"] + num_fhrs = len(extrn_config_fhrs) bcgrp10 = 0 bcgrpnum10 = 1 for ii in range(bcgrp10, num_fhrs, bcgrpnum10): - i = ii + bcgrpnum10 + i = ii + bcgrp10 if i < num_fhrs: - print(f"group ${bcgrp10} processes member ${i}") - fn_atm = f"${{EXTRN_MDL_FNS[${i}]}}" + print(f"group {bcgrp10} processes member {i}") + fn_atm = extrn_config_fns[i] + fn_grib2 = extrn_config_fns[i] chgres_cube_driver.config["namelist"]["update_values"][ "config" ]["atm_files_input_grid"] = fn_atm + chgres_cube_driver.config["namelist"]["update_values"][ + "config" + ]["grib2_files_input_grid"] = fn_grib2 + # reinstantiate driver chgres_cube_driver = ChgresCube( config=expt_config, diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index da1becf9a..4183c5d12 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1739,7 +1739,7 @@ task_make_ics: minmax_vgfrc_from_climo: true lai_from_climo: true tg3_from_soil: false - rundir: '{{ workflow.EXPTDIR }}/make_ics' + rundir: '{{ task_run_fcst.rundir }}/tmp_MAKE_ICS' #---------------------------- # MAKE LBCS config parameters @@ -1800,7 +1800,7 @@ task_make_lbcs: external_model: "FV3GFS" tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] - rundir: '{{ workflow.EXPTDIR }}/make_lbcs' + rundir: '{{ task_run_fcst.rundir }}/tmp_MAKE_LBCS' #---------------------------- From 8a3ed18b80255a0e2544d89ae5053bf8c281e708 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Mon, 7 Oct 2024 16:06:04 +0000 Subject: [PATCH 21/47] fixes for lbcs --- scripts/chgres_cube.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 548bbf21e..20f8cb966 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -62,8 +62,6 @@ key_path=[args.key_path], ) -extrn_config_fns = get_sh_config(varsfilepath)["EXTRN_MDL_FNS"] -extrn_config_fhrs = get_sh_config(varsfilepath)["EXTRN_MDL_FHRS"] # update fn_atm and fn_sfc for ics task if args.key_path == "task_make_ics": @@ -72,6 +70,8 @@ varsfilepath = expt_config["task_make_ics"][ "input_files_metadata_path" ] + extrn_config_fns = get_sh_config(varsfilepath)["EXTRN_MDL_FNS"] + extrn_config_fhrs = get_sh_config(varsfilepath)["EXTRN_MDL_FHRS"] fn_atm = extrn_config_fns[0] fn_sfc = extrn_config_fns[1] @@ -83,6 +83,10 @@ rundir = Path(chgres_cube_driver.config["rundir"]) print(f"Will run in {rundir}") fn_sfc = "" + varsfilepath = expt_config["task_make_lbcs"][ + "input_files_metadata_path" + ] + extrn_config_fhrs = get_sh_config(varsfilepath)["EXTRN_MDL_FHRS"] num_fhrs = len(extrn_config_fhrs) bcgrp10 = 0 bcgrpnum10 = 1 From 8b7dd87fb432d3eb8683021858fabf55740a9576 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Mon, 7 Oct 2024 21:53:51 +0000 Subject: [PATCH 22/47] fix external model yaml issues --- scripts/chgres_cube.py | 18 ++++++++++++++++-- ush/config_defaults.yaml | 4 ++-- ush/external_model_defaults.yaml | 29 +++++++++++++---------------- 3 files changed, 31 insertions(+), 20 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 20f8cb966..89255af50 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -62,6 +62,14 @@ key_path=[args.key_path], ) +# Dereference cycle for file paths +expt_config = get_yaml_config(deepcopy(expt_config.data)) +expt_config.dereference( + context={ + "cycle": args.cycle, + **expt_config, + } +) # update fn_atm and fn_sfc for ics task if args.key_path == "task_make_ics": @@ -73,8 +81,13 @@ extrn_config_fns = get_sh_config(varsfilepath)["EXTRN_MDL_FNS"] extrn_config_fhrs = get_sh_config(varsfilepath)["EXTRN_MDL_FHRS"] - fn_atm = extrn_config_fns[0] - fn_sfc = extrn_config_fns[1] + if chgres_cube_driver.config["namelist"]["update_values"][ + "config" + ]["input_type"] == "grib2": + fn_grib2 = extrn_config_fns[0] + else: + fn_atm = extrn_config_fns[0] + fn_sfc = extrn_config_fns[1] chgres_cube_driver.run() @@ -86,6 +99,7 @@ varsfilepath = expt_config["task_make_lbcs"][ "input_files_metadata_path" ] + extrn_config_fns = get_sh_config(varsfilepath)["EXTRN_MDL_FNS"] extrn_config_fhrs = get_sh_config(varsfilepath)["EXTRN_MDL_FHRS"] num_fhrs = len(extrn_config_fhrs) bcgrp10 = 0 diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 4183c5d12..a720bdda3 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1695,7 +1695,7 @@ task_make_ics: FVCOM_FILE: "fvcom.nc" VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #------------------------------------------------------------------------ - input_files_metadata_path: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }}/for_ICS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}' + input_files_metadata_path: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }}/for_ICS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}.sh' chgres_cube: execution: executable: "{{ user.EXECdir }}/chgres_cube" @@ -1766,7 +1766,7 @@ task_make_lbcs: OMP_STACKSIZE_MAKE_LBCS: "1024m" VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #------------------------------------------------------------------------ - input_files_metadata_path: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }}/for_LBCS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}' + input_files_metadata_path: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }}/for_LBCS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}.sh' chgres_cube: execution: executable: "{{ user.EXECdir }}/chgres_cube" diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml index 50d66d701..3c8c6a1a0 100644 --- a/ush/external_model_defaults.yaml +++ b/ush/external_model_defaults.yaml @@ -1,11 +1,8 @@ -grib2_defaults: &grib2_defaults - namelist: - update_values: - config: - input_type: "grib2" - convert_nst: False - tracers_input: "" - tracers: "" +grib2_default_config: &grib2_default_config + input_type: "grib2" + convert_nst: False + tracers_input: [] + tracers: [] GSMGFS: task_make_ics: @@ -62,35 +59,35 @@ GDAS: GEFS: task_make_ics: chgres_cube: - <<: *grib2_defaults namelist: update_values: config: + <<: *grib2_default_config external_model: "GFS" task_make_lbcs: chgres_cube: - <<: *grib2_defaults namelist: update_values: config: + <<: *grib2_default_config external_model: "GFS" HRRR: task_make_ics: chgres_cube: - <<: *grib2_defaults namelist: update_values: config: + <<: *grib2_default_config external_model: "HRRR" geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_HRRRX" tg3_from_soil: True task_make_lbcs: chgres_cube: - <<: *grib2_defaults namelist: update_values: config: + <<: *grib2_default_config external_model: "HRRR" geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_HRRRX" tg3_from_soil: True @@ -98,19 +95,19 @@ HRRR: RAP: task_make_ics: chgres_cube: - <<: *grib2_defaults namelist: update_values: config: + <<: *grib2_default_config external_model: "RAP" geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_RAPX" tg3_from_soil: True task_make_lbcs: chgres_cube: - <<: *grib2_defaults namelist: update_values: config: + <<: *grib2_default_config external_model: "RAP" geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_RAPX" tg3_from_soil: True @@ -118,15 +115,15 @@ RAP: NAM: task_make_ics: chgres_cube: - <<: *grib2_defaults namelist: update_values: config: + <<: *grib2_default_config external_model: "NAM" task_make_lbcs: chgres_cube: - <<: *grib2_defaults namelist: update_values: config: + <<: *grib2_default_config external_model: "NAM" From b83c5af29f8b5ae6f05a0e94ea652eebdfbadea0 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 10 Oct 2024 16:06:33 -0500 Subject: [PATCH 23/47] move tracers to external_model_defaults --- ush/config_defaults.yaml | 9 --------- ush/external_model_defaults.yaml | 27 +++++++++++++++++++++++++-- 2 files changed, 25 insertions(+), 11 deletions(-) diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index a720bdda3..8d0fce628 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1716,9 +1716,6 @@ task_make_ics: vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" data_dir_input_grid: "{{ task_make_lbcs.chgres_cube.run_dir }}" - atm_files_input_grid: "" - sfc_files_input_grid': "" - grib2_file_input_grid: "" cycle_mon: !int "{{ cycle.strftime('%m') }}" cycle_day: !int "{{ cycle.strftime('%d') }}" cycle_hour: !int "{{ cycle.strftime('%H') }}" @@ -1730,8 +1727,6 @@ task_make_ics: halo_blend: !int "{{ global.HALO_BLEND }}" input_type: "gaussian_nemsio" external_model: "FV3GFS" - tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] - tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] nsoill_out: 4 vgtyp_from_climo: true sotyp_from_climo: true @@ -1787,8 +1782,6 @@ task_make_lbcs: vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" data_dir_input_grid: "{{ task_make_lbcs.chgres_cube.run_dir }}" - atm_files_input_grid: "" - grib2_file_input_grid: "" cycle_mon: !int "{{ cycle.strftime('%m') }}" cycle_day: !int "{{ cycle.strftime('%d') }}" cycle_hour: !int "{{ cycle.strftime('%H') }}" @@ -1798,8 +1791,6 @@ task_make_lbcs: halo_blend: !int "{{ global.HALO_BLEND }}" input_type: "gaussian_nemsio" external_model: "FV3GFS" - tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] - tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] rundir: '{{ task_run_fcst.rundir }}/tmp_MAKE_LBCS' diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml index 3c8c6a1a0..24ab8a131 100644 --- a/ush/external_model_defaults.yaml +++ b/ush/external_model_defaults.yaml @@ -1,8 +1,23 @@ grib2_default_config: &grib2_default_config input_type: "grib2" convert_nst: False - tracers_input: [] - tracers: [] + + +FV3GFS: + task_make_ics: + chgres_cube: + namelist: + update_values: + config: + tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] + tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] + task_make_lbcs: + chgres_cube: + namelist: + update_values: + config: + tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] + tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] GSMGFS: task_make_ics: @@ -33,12 +48,16 @@ UFS-CASE-STUDY: update_values: config: external_model: "UFS-CASE-STUDY" + tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] + tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] task_make_lbcs: chgres_cube: namelist: update_values: config: external_model: "UFS-CASE-STUDY" + tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] + tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] GDAS: task_make_ics: @@ -47,6 +66,8 @@ GDAS: update_values: config: external_model: "GFS" + tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] + tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] tg3_from_soil: True task_make_lbcs: chgres_cube: @@ -54,6 +75,8 @@ GDAS: update_values: config: external_model: "GFS" + tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] + tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] tg3_from_soil: True GEFS: From d9701bf4ef312e18faa6d9646754d68724fab75e Mon Sep 17 00:00:00 2001 From: WeirAE Date: Tue, 15 Oct 2024 19:24:12 +0000 Subject: [PATCH 24/47] significant logic changes --- scripts/chgres_cube.py | 387 +++++++++++++++++++------------ ush/ccpp_suites_defaults.yaml | 4 +- ush/config_defaults.yaml | 13 +- ush/external_model_defaults.yaml | 8 +- uwtools | 2 +- 5 files changed, 251 insertions(+), 163 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 89255af50..3ce17e5b6 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -1,172 +1,263 @@ +#!/usr/bin/env python """ The run script for chgres_cube """ import datetime as dt +import logging import os +import re import sys from argparse import ArgumentParser from copy import deepcopy from pathlib import Path from uwtools.api.chgres_cube import ChgresCube -from uwtools.api.config import get_sh_config, get_yaml_config +from uwtools.api.config import get_yaml_config from uwtools.api.fs import link as uwlink from uwtools.api.logging import use_uwtools_logger -parser = ArgumentParser( - description="Script that runs chgres_cube via uwtools API", -) -parser.add_argument( - "-c", - "--config-file", - metavar="PATH", - required=True, - help="Path to experiment config file.", - type=Path, -) -parser.add_argument( - "--cycle", - help="The cycle in ISO8601 format (e.g. 2024-07-15T18)", - required=True, - type=dt.datetime.fromisoformat, -) -parser.add_argument( - "--key-path", - help="Dot-separated path of keys leading through the config to the driver's YAML block", - metavar="KEY[.KEY...]", - required=True, -) -parser.add_argument( - "--member", - default="000", - help="The 3-digit ensemble member number.", -) -use_uwtools_logger() -args = parser.parse_args() - -os.environ["member"] = args.member - -expt_config = get_yaml_config(args.config_file) -chgres_cube_config = expt_config[args.key_path] - -# dereference expressions during driver initialization -CRES = expt_config["workflow"]["CRES"] -os.environ["CRES"] = CRES - -# Extract driver config from experiment config -chgres_cube_driver = ChgresCube( - config=args.config_file, - cycle=args.cycle, - key_path=[args.key_path], -) - -# Dereference cycle for file paths -expt_config = get_yaml_config(deepcopy(expt_config.data)) -expt_config.dereference( - context={ - "cycle": args.cycle, - **expt_config, - } -) - -# update fn_atm and fn_sfc for ics task -if args.key_path == "task_make_ics": - rundir = Path(chgres_cube_driver.config["rundir"]) - print(f"Will run in {rundir}") - varsfilepath = expt_config["task_make_ics"][ - "input_files_metadata_path" - ] - extrn_config_fns = get_sh_config(varsfilepath)["EXTRN_MDL_FNS"] - extrn_config_fhrs = get_sh_config(varsfilepath)["EXTRN_MDL_FHRS"] - - if chgres_cube_driver.config["namelist"]["update_values"][ - "config" - ]["input_type"] == "grib2": - fn_grib2 = extrn_config_fns[0] - else: - fn_atm = extrn_config_fns[0] - fn_sfc = extrn_config_fns[1] - - chgres_cube_driver.run() - -# Loop the run of chgres_cube for the forecast length if lbcs -else: - rundir = Path(chgres_cube_driver.config["rundir"]) - print(f"Will run in {rundir}") - fn_sfc = "" - varsfilepath = expt_config["task_make_lbcs"][ - "input_files_metadata_path" - ] - extrn_config_fns = get_sh_config(varsfilepath)["EXTRN_MDL_FNS"] - extrn_config_fhrs = get_sh_config(varsfilepath)["EXTRN_MDL_FHRS"] - num_fhrs = len(extrn_config_fhrs) - bcgrp10 = 0 - bcgrpnum10 = 1 - for ii in range(bcgrp10, num_fhrs, bcgrpnum10): - i = ii + bcgrp10 - if i < num_fhrs: - print(f"group {bcgrp10} processes member {i}") - fn_atm = extrn_config_fns[i] - fn_grib2 = extrn_config_fns[i] - - chgres_cube_driver.config["namelist"]["update_values"][ - "config" - ]["atm_files_input_grid"] = fn_atm - chgres_cube_driver.config["namelist"]["update_values"][ - "config" - ]["grib2_files_input_grid"] = fn_grib2 - - # reinstantiate driver - chgres_cube_driver = ChgresCube( - config=expt_config, - cycle=args.cycle, - key_path=[args.key_path], - ) - chgres_cube_driver.run() - - -# error message -if not (rundir / "runscript.chgres_cube.done").is_file(): - print("Error occurred running chgres_cube. Please see component error logs.") - sys.exit(1) - -# Deliver output data -expt_config = get_yaml_config(args.config_file) -chgres_cube_config = expt_config[args.key_path] - - -# Move initial condition, surface, control, and 0-th hour lateral bound- -# ary files to ICs_BCs directory. -links = {} -for label in chgres_cube_config["output_file_labels"]: - # deepcopy here because desired_output_name is parameterized within the loop +def _parse_var_defns(file): + var_dict = {} + with open(file, "r", encoding="utf-8") as f: + lines = f.readlines() + for line in lines: + line = line.strip() + if "=" in line: + key, value = line.split("=", 1) + key = key.strip() + value = value.strip() + + if value.startswith("(") and value.endswith(")"): + items = re.findall(r"\((.*?)\)", value) + if items: + value = [item.strip() for item in items[0].split()] + var_dict[key] = value + return var_dict + + +def _walk_key_path(config, key_path): + """ + Navigate to the sub-config at the end of the path of given keys. + """ + keys = [] + pathstr = "" + for key in key_path: + keys.append(key) + pathstr = " -> ".join(keys) + try: + subconfig = config[key] + except KeyError: + logging.error(f"Bad config path: {pathstr}") + raise + if not isinstance(subconfig, dict): + logging.error(f"Value at {pathstr} must be a dictionary") + sys.exit(1) + config = subconfig + return config + + +def parse_args(argv): + """ + Parse arguments for the script. + """ + parser = ArgumentParser( + description="Script that runs chgres_cube via uwtools API", + ) + parser.add_argument( + "-c", + "--config-file", + metavar="PATH", + required=True, + help="Path to experiment config file.", + type=Path, + ) + parser.add_argument( + "--cycle", + help="The cycle in ISO8601 format (e.g. 2024-07-15T18)", + required=True, + type=dt.datetime.fromisoformat, + ) + parser.add_argument( + "--key-path", + help="Dot-separated path of keys leading through the config to the driver's YAML block", + metavar="KEY[.KEY...]", + required=True, + type=lambda s: s.split("."), + ) + parser.add_argument( + "--member", + default="000", + help="The 3-digit ensemble member number.", + ) + return parser.parse_args(argv) + + +# pylint: disable=too-many-locals, too-many-statements +def run_chgres_cube(config_file, cycle, key_path, member): + """ + Setup and run the chgres_cube Driver. + """ + + # The experiment config will have {{ MEMBER | env }} expressions in it that need to be + # dereferenced during driver initialization. + + os.environ["member"] = member + + expt_config = get_yaml_config(config_file) + + # dereference expressions during driver initialization + CRES = expt_config["workflow"]["CRES"] + os.environ["CRES"] = CRES + + # Extract driver config from experiment config + chgres_cube_driver = ChgresCube( + config=config_file, + cycle=cycle, + key_path=key_path, + ) + + # Dereference cycle for file paths expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) expt_config_cp.dereference( context={ - "cycle": args.cycle, - "leadtime": args.leadtime, - "file_label": label, + "cycle": cycle, **expt_config_cp, } ) - lbc_block = expt_config_cp[args.key_path] - lbc_input_fn = "gfs.bndy.nc" - lbc_spec_fhrs = extrn_config_fhrs[i] - lbc_offset_fhrs = expt_config["task_get_extrn_lbcs"][ - "EXTRN_MDL_LBCS_OFFSET_HRS" - ] - nco_net = expt_config["nco"]["NET_default"] - dot_ensmem = f".mem{ args.member }" - fcst_hhh = lbc_spec_fhrs - lbc_offset_fhrs - fcst_hhh_FV3LAM = print(f"fcst_hhh:03d") - - lbc_output_fn = ( - rundir - / f"{nco_net}.{args.cycle}{dot_ensmem}.gfs_bndy.tile7.f{fcst_hhh_FV3LAM}.nc" - ) - links[lbc_input_fn] = str(lbc_output_fn) + chgres_cube_config = _walk_key_path(expt_config_cp, key_path) + # update fn_atm and fn_sfc for ics task + if "task_make_ics" in key_path: + rundir = Path(chgres_cube_driver.config["rundir"]) + print(f"Will run in {rundir}") + varsfilepath = chgres_cube_config["input_files_metadata_path"] + shconfig = _parse_var_defns(varsfilepath) + extrn_config_fns = shconfig["EXTRN_MDL_FNS"] + extrn_config_fhrs = shconfig["EXTRN_MDL_FHRS"] + + input_type = chgres_cube_config["chgres_cube"]["namelist"]["update_values"][ + "config" + ].get("input_type") + if input_type == "grib2": + fn_grib2 = extrn_config_fns[0] + update = {"grib2_file_input_grid": fn_grib2} + else: + fn_atm = extrn_config_fns[0] + fn_sfc = extrn_config_fns[1] + update = {"atm_files_input_grid": fn_atm, "sfc_files_input_grid": fn_sfc} + + update_cfg = { + "task_make_ics": { + "chgres_cube": {"namelist": {"update_values": {"config": update}}} + } + } + expt_config_cp.update_from(update_cfg) + logging.info(f"updated config: {expt_config_cp}") + + # reinstantiate driver + chgres_cube_driver = ChgresCube( + config=expt_config_cp, + cycle=cycle, + key_path=key_path, + ) + chgres_cube_driver.run() + + # Loop the run of chgres_cube for the forecast length if lbcs + else: + rundir = Path(chgres_cube_driver.config["rundir"]) + print(f"Will run in {rundir}") + fn_sfc = "" + varsfilepath = chgres_cube_config["input_files_metadata_path"] + shconfig = _parse_var_defns(varsfilepath) + extrn_config_fns = shconfig["EXTRN_MDL_FNS"] + extrn_config_fhrs = shconfig["EXTRN_MDL_FHRS"] + num_fhrs = len(extrn_config_fhrs) + + input_type = chgres_cube_config["chgres_cube"]["namelist"]["update_values"][ + "config" + ].get("input_type") + bcgrp10 = 0 + bcgrpnum10 = 1 + update = {} + for ii in range(bcgrp10, num_fhrs, bcgrpnum10): + i = ii + bcgrp10 + if i < num_fhrs: + print(f"group {bcgrp10} processes member {i}") + if input_type == "grib2": + fn_grib2 = extrn_config_fns[i] + update = {"grib2_file_input_grid": fn_grib2} + else: + fn_atm = extrn_config_fns[i] + update = {"atm_files_input_grid": fn_atm} + + update_cfg = { + "task_make_ics": { + "chgres_cube": { + "namelist": {"update_values": {"config": update}} + } + } + } + expt_config_cp.update_from(update_cfg) + + # reinstantiate driver + chgres_cube_driver = ChgresCube( + config=expt_config_cp, + cycle=cycle, + key_path=key_path, + ) + chgres_cube_driver.run() -uwlink(target_dir=rundir.parent, config=links) + # error message + if not (rundir / "runscript.chgres_cube.done").is_file(): + print("Error occurred running chgres_cube. Please see component error logs.") + sys.exit(1) + + # Deliver output data + expt_config = get_yaml_config(config_file) + chgres_cube_config = _walk_key_path(expt_config, key_path) + + # Move initial condition, surface, control, and 0-th hour lateral bound- + # ary files to ICs_BCs directory. + links = {} + for label in chgres_cube_config["output_file_labels"]: + # deepcopy here because desired_output_name is parameterized within the loop + expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) + expt_config_cp.dereference( + context={ + "cycle": cycle, + "file_label": label, + **expt_config_cp, + } + ) + lbc_block = _walk_key_path(expt_config_cp, key_path) + lbc_input_fn = "gfs.bndy.nc" + lbc_spec_fhrs = extrn_config_fhrs[i] + lbc_offset_fhrs = lbc_block["EXTRN_MDL_LBCS_OFFSET_HRS"] + nco_net = expt_config["nco"]["NET_default"] + dot_ensmem = f".mem{member}" + fcst_hhh = lbc_spec_fhrs - lbc_offset_fhrs + fcst_hhh_FV3LAM = f"{fcst_hhh:03d}" + + lbc_output_fn = ( + rundir + / f"{nco_net}.{cycle}{dot_ensmem}.gfs_bndy.tile7.f{fcst_hhh_FV3LAM}.nc" + ) + + links[lbc_input_fn] = str(lbc_output_fn) + + uwlink(target_dir=rundir.parent, config=links) + + +if __name__ == "__main__": + + use_uwtools_logger() + + args = parse_args(sys.argv[1:]) + run_chgres_cube( + config_file=args.config_file, + cycle=args.cycle, + key_path=args.key_path, + member=args.member, + ) diff --git a/ush/ccpp_suites_defaults.yaml b/ush/ccpp_suites_defaults.yaml index 0c9c2cb0d..86960655b 100644 --- a/ush/ccpp_suites_defaults.yaml +++ b/ush/ccpp_suites_defaults.yaml @@ -1,8 +1,8 @@ -gsd_defaults: &gsd_defaults +gsd_defaults: &chgres_cube_gsd_defaults namelist: update_values: config: - varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" + varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GSDphys_var_map.txt" orog_gsl_defaults: &orog_gsl_defaults config: diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 8d0fce628..d728eb231 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1703,7 +1703,7 @@ task_make_ics: - module use {{ user.HOMEdir }}/modulefiles - module load build_{{ user.MACHINE|lower }}_{{ workflow.COMPILER }} mpiargs: - - "--export=NONE" + - "--ntasks $SLURM_CPUS_ON_NODE" mpicmd: '{{ platform.BATCH_RUN_CMD }}' threads: 1 namelist: @@ -1715,7 +1715,7 @@ task_make_ics: orog_files_target_grid: "{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" - data_dir_input_grid: "{{ task_make_lbcs.chgres_cube.run_dir }}" + data_dir_input_grid: "{{ task_make_ics.chgres_cube.rundir }}" cycle_mon: !int "{{ cycle.strftime('%m') }}" cycle_day: !int "{{ cycle.strftime('%d') }}" cycle_hour: !int "{{ cycle.strftime('%H') }}" @@ -1735,7 +1735,6 @@ task_make_ics: lai_from_climo: true tg3_from_soil: false rundir: '{{ task_run_fcst.rundir }}/tmp_MAKE_ICS' - #---------------------------- # MAKE LBCS config parameters #----------------------------- @@ -1769,7 +1768,7 @@ task_make_lbcs: - module use {{ user.HOMEdir }}/modulefiles - module load build_{{ user.MACHINE|lower }}_{{ workflow.COMPILER }} mpiargs: - - "--export=NONE" + - "--ntasks $SLURM_CPUS_ON_NODE" mpicmd: '{{ platform.BATCH_RUN_CMD }}' threads: 1 namelist: @@ -1781,7 +1780,7 @@ task_make_lbcs: orog_files_target_grid: "{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" - data_dir_input_grid: "{{ task_make_lbcs.chgres_cube.run_dir }}" + data_dir_input_grid: "{{ task_make_lbcs.chgres_cube.rundir }}" cycle_mon: !int "{{ cycle.strftime('%m') }}" cycle_day: !int "{{ cycle.strftime('%d') }}" cycle_hour: !int "{{ cycle.strftime('%H') }}" @@ -1791,9 +1790,7 @@ task_make_lbcs: halo_blend: !int "{{ global.HALO_BLEND }}" input_type: "gaussian_nemsio" external_model: "FV3GFS" - rundir: '{{ task_run_fcst.rundir }}/tmp_MAKE_LBCS' - - + rundir: '{{ task_run_fcst.rundir}}/tmp_MAKE_LBCS' #---------------------------- # IO_LAYOUT_Y FORECAST config parameters #----------------------------- diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml index 24ab8a131..44e42d0c5 100644 --- a/ush/external_model_defaults.yaml +++ b/ush/external_model_defaults.yaml @@ -103,7 +103,7 @@ HRRR: config: <<: *grib2_default_config external_model: "HRRR" - geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_HRRRX" + geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_HRRRX" tg3_from_soil: True task_make_lbcs: chgres_cube: @@ -112,7 +112,7 @@ HRRR: config: <<: *grib2_default_config external_model: "HRRR" - geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_HRRRX" + geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_HRRRX" tg3_from_soil: True RAP: @@ -123,7 +123,7 @@ RAP: config: <<: *grib2_default_config external_model: "RAP" - geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_RAPX" + geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_RAPX" tg3_from_soil: True task_make_lbcs: chgres_cube: @@ -132,7 +132,7 @@ RAP: config: <<: *grib2_default_config external_model: "RAP" - geogrid_file_input_grid: "{{ workflow.FIXgsm }}/geo_em.d01.nc_RAPX" + geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_RAPX" tg3_from_soil: True NAM: diff --git a/uwtools b/uwtools index b7ed02f20..e2eb7fc7c 160000 --- a/uwtools +++ b/uwtools @@ -1 +1 @@ -Subproject commit b7ed02f20e87a92cc5a17facff17bafb53874e1b +Subproject commit e2eb7fc7cbfd00381382a249ab4558206dcd9477 From fc592043bff23993751dfbce1b853d146844c0f8 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Wed, 16 Oct 2024 18:30:31 +0000 Subject: [PATCH 25/47] Fix file copy logic --- scripts/chgres_cube.py | 86 +++++++++++++++++++++++----------------- ush/config_defaults.yaml | 4 +- 2 files changed, 51 insertions(+), 39 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 3ce17e5b6..03dcc7374 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -154,7 +154,6 @@ def run_chgres_cube(config_file, cycle, key_path, member): } } expt_config_cp.update_from(update_cfg) - logging.info(f"updated config: {expt_config_cp}") # reinstantiate driver chgres_cube_driver = ChgresCube( @@ -164,6 +163,31 @@ def run_chgres_cube(config_file, cycle, key_path, member): ) chgres_cube_driver.run() + # Deliver output data to a common location above the rundir. + links = {} + + nco_net = expt_config["nco"]["NET_default"] + dot_ensmem = f".mem{member}" + tile_rgnl = expt_config["constants"]["TILE_RGNL"] + nh0 = expt_config["constants"]["NH0"] + cyc = str(expt_config["workflow"]["DATE_FIRST_CYCL"])[-2:] + + output_dir = os.path.join(os.path.dirname(rundir.parent), "INPUT") + links[f"out.atm.tile{tile_rgnl}.nc"] = str( + f"{output_dir}{nco_net}.t{cyc}z{dot_ensmem}.gfs_data.tile{tile_rgnl}.halo{nh0}.nc" + ) + links[f"out.sfc.tile{tile_rgnl}.nc"] = str( + f"{output_dir}{nco_net}.t{cyc}z{dot_ensmem}.sfc_data.tile{tile_rgnl}.halo{nh0}.nc" + ) + links[f"gfs_ctrl.nc"] = str( + f"{output_dir}{nco_net}.t{cyc}z{dot_ensmem}.gfs_ctrl.nc" + ) + links[f"gfs.bndy.nc"] = str( + f"{output_dir}{nco_net}.t{cyc}z{dot_ensmem}.gfs_bndy.tile{tile_rgnl}.f000.nc" + ) + + uwlink(target_dir=rundir, config=links) + # Loop the run of chgres_cube for the forecast length if lbcs else: rundir = Path(chgres_cube_driver.config["rundir"]) @@ -193,7 +217,7 @@ def run_chgres_cube(config_file, cycle, key_path, member): update = {"atm_files_input_grid": fn_atm} update_cfg = { - "task_make_ics": { + "task_make_lbcs": { "chgres_cube": { "namelist": {"update_values": {"config": update}} } @@ -209,46 +233,34 @@ def run_chgres_cube(config_file, cycle, key_path, member): ) chgres_cube_driver.run() + # Deliver output data to a common location above the rundir. + links = {} + + lbc_spec_fhrs = extrn_config_fhrs[i] + lbc_offset_fhrs = expt_config_cp["task_get_extrn_lbcs"][ + "EXTRN_MDL_LBCS_OFFSET_HRS" + ] + fcst_hhh = int(lbc_spec_fhrs) - int(lbc_offset_fhrs) + fcst_hhh_FV3LAM = f"{fcst_hhh:03d}" + cyc = str(expt_config["workflow"]["DATE_FIRST_CYCL"])[-2:] + + nco_net = expt_config["nco"]["NET_default"] + dot_ensmem = f".mem{member}" + + lbc_input_fn = "gfs.bndy.nc" + output_dir = os.path.join(os.path.dirname(rundir.parent), "INPUT") + lbc_output_fn = str( + f"{output_dir}{nco_net}.t{cyc}z{dot_ensmem}" + f".gfs_bndy.tile7.f{fcst_hhh_FV3LAM}.nc" + ) + links[lbc_input_fn] = str(lbc_output_fn) + uwlink(target_dir=rundir, config=links) + # error message if not (rundir / "runscript.chgres_cube.done").is_file(): print("Error occurred running chgres_cube. Please see component error logs.") sys.exit(1) - # Deliver output data - expt_config = get_yaml_config(config_file) - chgres_cube_config = _walk_key_path(expt_config, key_path) - - # Move initial condition, surface, control, and 0-th hour lateral bound- - # ary files to ICs_BCs directory. - links = {} - for label in chgres_cube_config["output_file_labels"]: - # deepcopy here because desired_output_name is parameterized within the loop - expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) - expt_config_cp.dereference( - context={ - "cycle": cycle, - "file_label": label, - **expt_config_cp, - } - ) - lbc_block = _walk_key_path(expt_config_cp, key_path) - lbc_input_fn = "gfs.bndy.nc" - lbc_spec_fhrs = extrn_config_fhrs[i] - lbc_offset_fhrs = lbc_block["EXTRN_MDL_LBCS_OFFSET_HRS"] - nco_net = expt_config["nco"]["NET_default"] - dot_ensmem = f".mem{member}" - fcst_hhh = lbc_spec_fhrs - lbc_offset_fhrs - fcst_hhh_FV3LAM = f"{fcst_hhh:03d}" - - lbc_output_fn = ( - rundir - / f"{nco_net}.{cycle}{dot_ensmem}.gfs_bndy.tile7.f{fcst_hhh_FV3LAM}.nc" - ) - - links[lbc_input_fn] = str(lbc_output_fn) - - uwlink(target_dir=rundir.parent, config=links) - if __name__ == "__main__": diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index d728eb231..b76a46554 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1715,7 +1715,7 @@ task_make_ics: orog_files_target_grid: "{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" - data_dir_input_grid: "{{ task_make_ics.chgres_cube.rundir }}" + data_dir_input_grid: '{{workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }}/for_ICS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' cycle_mon: !int "{{ cycle.strftime('%m') }}" cycle_day: !int "{{ cycle.strftime('%d') }}" cycle_hour: !int "{{ cycle.strftime('%H') }}" @@ -1780,7 +1780,7 @@ task_make_lbcs: orog_files_target_grid: "{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" - data_dir_input_grid: "{{ task_make_lbcs.chgres_cube.rundir }}" + data_dir_input_grid: '{{workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }}/for_LBCS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' cycle_mon: !int "{{ cycle.strftime('%m') }}" cycle_day: !int "{{ cycle.strftime('%d') }}" cycle_hour: !int "{{ cycle.strftime('%H') }}" From 12fb677c4cd87c0454298433273eaec7823f81a5 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Fri, 18 Oct 2024 17:12:48 +0000 Subject: [PATCH 26/47] fix output directories --- scripts/chgres_cube.py | 40 +++++++++++++++++++--------------------- ush/setup.py | 15 +++++++++------ 2 files changed, 28 insertions(+), 27 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 03dcc7374..1a6cac3f2 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -165,28 +165,26 @@ def run_chgres_cube(config_file, cycle, key_path, member): # Deliver output data to a common location above the rundir. links = {} - nco_net = expt_config["nco"]["NET_default"] - dot_ensmem = f".mem{member}" tile_rgnl = expt_config["constants"]["TILE_RGNL"] nh0 = expt_config["constants"]["NH0"] cyc = str(expt_config["workflow"]["DATE_FIRST_CYCL"])[-2:] - output_dir = os.path.join(os.path.dirname(rundir.parent), "INPUT") - links[f"out.atm.tile{tile_rgnl}.nc"] = str( - f"{output_dir}{nco_net}.t{cyc}z{dot_ensmem}.gfs_data.tile{tile_rgnl}.halo{nh0}.nc" - ) - links[f"out.sfc.tile{tile_rgnl}.nc"] = str( - f"{output_dir}{nco_net}.t{cyc}z{dot_ensmem}.sfc_data.tile{tile_rgnl}.halo{nh0}.nc" + output_dir = os.path.join(rundir.parent, "INPUT") + os.makedirs(output_dir, exist_ok=True) + links[ + f"{nco_net}.t{cyc}z.gfs_data.tile{tile_rgnl}.halo{nh0}.nc" + ] = str(rundir / f"out.atm.tile{tile_rgnl}.nc") + links[ + f"{nco_net}.t{cyc}z.sfc_data.tile{tile_rgnl}.halo{nh0}.nc" + ] = str(rundir / f"out.sfc.tile{tile_rgnl}.nc") + links[f"{nco_net}.t{cyc}z.gfs_ctrl.nc"] = str( + rundir / f"gfs_ctrl.nc" ) - links[f"gfs_ctrl.nc"] = str( - f"{output_dir}{nco_net}.t{cyc}z{dot_ensmem}.gfs_ctrl.nc" + links[f"{nco_net}.t{cyc}z.gfs_bndy.tile{tile_rgnl}.f000.nc"] = str( + rundir / f"gfs.bndy.nc" ) - links[f"gfs.bndy.nc"] = str( - f"{output_dir}{nco_net}.t{cyc}z{dot_ensmem}.gfs_bndy.tile{tile_rgnl}.f000.nc" - ) - - uwlink(target_dir=rundir, config=links) + uwlink(target_dir=output_dir, config=links) # Loop the run of chgres_cube for the forecast length if lbcs else: @@ -245,16 +243,16 @@ def run_chgres_cube(config_file, cycle, key_path, member): cyc = str(expt_config["workflow"]["DATE_FIRST_CYCL"])[-2:] nco_net = expt_config["nco"]["NET_default"] - dot_ensmem = f".mem{member}" - lbc_input_fn = "gfs.bndy.nc" - output_dir = os.path.join(os.path.dirname(rundir.parent), "INPUT") + lbc_input_fn = rundir / f"gfs.bndy.nc" + output_dir = os.path.join(rundir.parent, "INPUT") + os.makedirs(output_dir, exist_ok=True) lbc_output_fn = str( - f"{output_dir}{nco_net}.t{cyc}z{dot_ensmem}" + f"{nco_net}.t{cyc}z" f".gfs_bndy.tile7.f{fcst_hhh_FV3LAM}.nc" ) - links[lbc_input_fn] = str(lbc_output_fn) - uwlink(target_dir=rundir, config=links) + links[lbc_output_fn] = str(lbc_input_fn) + uwlink(target_dir=output_dir, config=links) # error message if not (rundir / "runscript.chgres_cube.done").is_file(): diff --git a/ush/setup.py b/ush/setup.py index 30a99be96..fe4535f43 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -228,11 +228,14 @@ def add_jobname(tasks): update_dict(ccpp_cfg, cfg_d) # Load external model-specific settings - extrn_mdl = cfg_d["task_get_extrn_ics"]["EXTRN_MDL_NAME_ICS"] - extrn_cfg = get_yaml_config(Path(ushdir, "external_model_defaults.yaml")).get( - extrn_mdl, {} - ) - update_dict(extrn_cfg, cfg_d) + tasks = [("task_get_extrn_ics", "EXTRN_MDL_NAME_ICS", "task_make_lbcs"), + ("task_get_extrn_lbcs", "EXTRN_MDL_NAME_LBCS", "task_make_ics")] + + for task, mdl_key, make_key in tasks: + extrn_mdl = cfg_d[task][mdl_key] + extrn_cfg = get_yaml_config(Path(ushdir, "external_model_defaults.yaml")).get(extrn_mdl, {}) + del extrn_cfg[make_key] + update_dict(extrn_cfg, cfg_d) # Set "Home" directory, the top-level ufs-srweather-app directory homedir = os.path.abspath(os.path.dirname(__file__) + os.sep + os.pardir) @@ -1520,4 +1523,4 @@ def clean_rocoto_dict(rocotodict): # if __name__ == "__main__": USHdir = os.path.dirname(os.path.abspath(__file__)) - setup(USHdir) \ No newline at end of file + setup(USHdir) From f485be8270be8fd0d364ff4ef378d918346837cd Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 24 Oct 2024 14:17:31 +0000 Subject: [PATCH 27/47] All fundamental tests pass --- scripts/chgres_cube.py | 31 ++-- tests/WE2E/my_test.out | 105 ++++++++++++ .../tmp_MAKE_ICS/runscript.chgres_cube" | 11 ++ .../tmp_MAKE_LBCS/runscript.chgres_cube" | 11 ++ ush/external_model_defaults.yaml | 156 +++++++++--------- 5 files changed, 223 insertions(+), 91 deletions(-) create mode 100644 tests/WE2E/my_test.out create mode 100755 "tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_ICS/runscript.chgres_cube" create mode 100755 "tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_LBCS/runscript.chgres_cube" diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 1a6cac3f2..43bbd489a 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -94,7 +94,7 @@ def parse_args(argv): return parser.parse_args(argv) -# pylint: disable=too-many-locals, too-many-statements +# pylint: disable=too-many-locals, too-many-statements, too-many-branches def run_chgres_cube(config_file, cycle, key_path, member): """ Setup and run the chgres_cube Driver. @@ -103,10 +103,19 @@ def run_chgres_cube(config_file, cycle, key_path, member): # The experiment config will have {{ MEMBER | env }} expressions in it that need to be # dereferenced during driver initialization. - os.environ["member"] = member - + os.environ["MEMBER"] = member expt_config = get_yaml_config(config_file) + dot_ensmem = ( + f".mem{member}" + if ( + expt_config["user"]["RUN_ENVIR"] == "nco" + and expt_config["global"]["DO_ENSEMBLE"] + and member + ) + else "" + ) + # dereference expressions during driver initialization CRES = expt_config["workflow"]["CRES"] os.environ["CRES"] = CRES @@ -168,20 +177,18 @@ def run_chgres_cube(config_file, cycle, key_path, member): nco_net = expt_config["nco"]["NET_default"] tile_rgnl = expt_config["constants"]["TILE_RGNL"] nh0 = expt_config["constants"]["NH0"] - cyc = str(expt_config["workflow"]["DATE_FIRST_CYCL"])[-2:] + cyc = str(expt_config["workflow"]["DATE_FIRST_CYCL"])[8:10] output_dir = os.path.join(rundir.parent, "INPUT") os.makedirs(output_dir, exist_ok=True) links[ - f"{nco_net}.t{cyc}z.gfs_data.tile{tile_rgnl}.halo{nh0}.nc" + f"{nco_net}.t{cyc}z{dot_ensmem}.gfs_data.tile{tile_rgnl}.halo{nh0}.nc" ] = str(rundir / f"out.atm.tile{tile_rgnl}.nc") links[ - f"{nco_net}.t{cyc}z.sfc_data.tile{tile_rgnl}.halo{nh0}.nc" + f"{nco_net}.t{cyc}z{dot_ensmem}.sfc_data.tile{tile_rgnl}.halo{nh0}.nc" ] = str(rundir / f"out.sfc.tile{tile_rgnl}.nc") - links[f"{nco_net}.t{cyc}z.gfs_ctrl.nc"] = str( - rundir / f"gfs_ctrl.nc" - ) - links[f"{nco_net}.t{cyc}z.gfs_bndy.tile{tile_rgnl}.f000.nc"] = str( + links[f"{nco_net}.t{cyc}z.gfs_ctrl.nc"] = str(rundir / f"gfs_ctrl.nc") + links[f"{nco_net}.t{cyc}z{dot_ensmem}.gfs_bndy.tile{tile_rgnl}.f000.nc"] = str( rundir / f"gfs.bndy.nc" ) uwlink(target_dir=output_dir, config=links) @@ -240,7 +247,7 @@ def run_chgres_cube(config_file, cycle, key_path, member): ] fcst_hhh = int(lbc_spec_fhrs) - int(lbc_offset_fhrs) fcst_hhh_FV3LAM = f"{fcst_hhh:03d}" - cyc = str(expt_config["workflow"]["DATE_FIRST_CYCL"])[-2:] + cyc = str(expt_config["workflow"]["DATE_FIRST_CYCL"])[8:10] nco_net = expt_config["nco"]["NET_default"] @@ -248,7 +255,7 @@ def run_chgres_cube(config_file, cycle, key_path, member): output_dir = os.path.join(rundir.parent, "INPUT") os.makedirs(output_dir, exist_ok=True) lbc_output_fn = str( - f"{nco_net}.t{cyc}z" + f"{nco_net}.t{cyc}z{dot_ensmem}" f".gfs_bndy.tile7.f{fcst_hhh_FV3LAM}.nc" ) links[lbc_output_fn] = str(lbc_input_fn) diff --git a/tests/WE2E/my_test.out b/tests/WE2E/my_test.out new file mode 100644 index 000000000..0af6849c6 --- /dev/null +++ b/tests/WE2E/my_test.out @@ -0,0 +1,105 @@ +Checking that all tests are valid +Will run 1 tests: +/scratch2/NAGAPE/epic/Brian.Weir/ufs-srweather-app/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml +Calling workflow generation function for test grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 + + + ======================================================================== + Starting experiment generation... + ======================================================================== + + ======================================================================== + Starting function setup() in "setup.py"... + ======================================================================== + WORKFLOW ID = + + Specified directory or file already exists: + /scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 + Moving (renaming) preexisting directory or file to: + /scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16_old_20241024_030803 + +Removing verification [meta]task + "task_get_obs_nohrsc" +from workflow since no fields belonging to observation type "NOHRSC" +are specified for verification. + +Removing verification [meta]task + "metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems" +from workflow since no fields belonging to observation type "NOHRSC" +are specified for verification. + +Removing verification [meta]task + "metatask_GridStat_NOHRSC_all_accums_all_mems" +from workflow since no fields belonging to observation type "NOHRSC" +are specified for verification. + +Removing verification [meta]task + "metatask_GenEnsProd_EnsembleStat_NOHRSC" +from workflow since no fields belonging to observation type "NOHRSC" +are specified for verification. + +Removing verification [meta]task + "metatask_GridStat_NOHRSC_ensmeanprob_all_accums" +from workflow since no fields belonging to observation type "NOHRSC" +are specified for verification. + + Generating the global experiment variable definitions file here: + GLOBAL_VAR_DEFNS_FP = '/scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16/var_defns.yaml' + For more detailed information, set DEBUG to 'TRUE' in the experiment + configuration file ('config.yaml'). + + Creating rocoto workflow XML file (WFLOW_XML_FP): + WFLOW_XML_FP = '/scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16/FV3LAM_wflow.xml' + + To launch the workflow, change location to the experiment directory + (EXPTDIR) and issue the rocotrun command, as follows: + + > cd /scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 + > rocotorun -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 + + To check on the status of the workflow, issue the rocotostat command + (also from the experiment directory): + + > rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 + + Note that: + + 1) The rocotorun command must be issued after the completion of each + task in the workflow in order for the workflow to submit the next + task(s) to the queue. + + 2) In order for the output of the rocotostat command to be up-to-date, + the rocotorun command must be issued immediately before issuing the + rocotostat command. + + For automatic resubmission of the workflow (say every 3 minutes), the + following line can be added to the user's crontab (use 'crontab -e' to + edit the cron table): + + */3 * * * * cd /scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 && ./launch_FV3LAM_wflow.sh called_from_cron="TRUE" + +Workflow for test grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 successfully generated in +/scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 + +All experiments have been generated; +Experiment file WE2E_tests_20241024030751.yaml created +Writing information for all experiments to WE2E_tests_20241024030751.yaml +Checking tests available for monitoring... +Starting experiment grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16_20241024030751 running +Setup complete; monitoring 1 experiments +Use ctrl-c to pause job submission/monitoring +10/24/24 03:20:46 UTC :: FV3LAM_wflow.xml :: Cycle 202105121200, Task run_fcst_mem002, jobid=1459914, in state DEAD (FAILED), ran for 55.0 seconds, exit status=1, try=1 (of 1) +10/24/24 03:20:47 UTC :: FV3LAM_wflow.xml :: Cycle 202105121200, Task run_fcst_mem001, jobid=1459913, in state DEAD (FAILED), ran for 59.0 seconds, exit status=1, try=1 (of 1) +Experiment grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16_20241024030751 is DEAD +Took 0:12:56.956921; will no longer monitor. +All 1 experiments finished +Calculating core-hour usage and printing final summary +---------------------------------------------------------------------------------------------------- +Experiment name | Status | Core hours used +---------------------------------------------------------------------------------------------------- +grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16_2024102403075 DEAD 3.84 +---------------------------------------------------------------------------------------------------- +Total DEAD 3.84 + +Detailed summary written to /scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/WE2E_summary_20241024032053.txt + diff --git "a/tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_ICS/runscript.chgres_cube" "b/tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_ICS/runscript.chgres_cube" new file mode 100755 index 000000000..639d39111 --- /dev/null +++ "b/tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_ICS/runscript.chgres_cube" @@ -0,0 +1,11 @@ +#!/bin/bash + +module use /scratch2/NAGAPE/epic/Brian.Weir/ufs-srweather-app/modulefiles +module load build_hera_intel + +export KMP_AFFINITY=scatter +export OMP_NUM_THREADS=1 +export OMP_STACKSIZE=1024m + +time srun --export=ALL --ntasks $SLURM_CPUS_ON_NODE /scratch2/NAGAPE/epic/Brian.Weir/ufs-srweather-app/exec/chgres_cube +test $? -eq 0 && touch runscript.chgres_cube.done diff --git "a/tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_LBCS/runscript.chgres_cube" "b/tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_LBCS/runscript.chgres_cube" new file mode 100755 index 000000000..639d39111 --- /dev/null +++ "b/tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_LBCS/runscript.chgres_cube" @@ -0,0 +1,11 @@ +#!/bin/bash + +module use /scratch2/NAGAPE/epic/Brian.Weir/ufs-srweather-app/modulefiles +module load build_hera_intel + +export KMP_AFFINITY=scatter +export OMP_NUM_THREADS=1 +export OMP_STACKSIZE=1024m + +time srun --export=ALL --ntasks $SLURM_CPUS_ON_NODE /scratch2/NAGAPE/epic/Brian.Weir/ufs-srweather-app/exec/chgres_cube +test $? -eq 0 && touch runscript.chgres_cube.done diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml index 44e42d0c5..ffd315b6a 100644 --- a/ush/external_model_defaults.yaml +++ b/ush/external_model_defaults.yaml @@ -1,23 +1,20 @@ -grib2_default_config: &grib2_default_config - input_type: "grib2" - convert_nst: False FV3GFS: task_make_ics: chgres_cube: - namelist: - update_values: - config: - tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] - tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] + namelist: + update_values: + config: + tracers_input: ["spfh","clwmr","o3mr","icmr","rwmr","snmr","grle"] + tracers: ["sphum","liq_wat","o3mr","ice_wat","rainwat","snowwat","graupel"] task_make_lbcs: chgres_cube: - namelist: - update_values: - config: - tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] - tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] + namelist: + update_values: + config: + tracers_input: ["spfh","clwmr","o3mr","icmr","rwmr","snmr","grle"] + tracers: ["sphum","liq_wat","o3mr","ice_wat","rainwat","snowwat","graupel"] GSMGFS: task_make_ics: @@ -28,8 +25,8 @@ GSMGFS: input_type: "gfs_gaussian_nemsio" external_model: "GSMGFS" convert_nst: False - tracers_input: [\"spfh\",\"clwmr\",\"o3mr\"] - tracers: [\"sphum\",\"liq_wat\",\"o3mr\"] + tracers_input: ["spfh","clwmr","o3mr"] + tracers: ["sphum","liq_wat","o3mr"] task_make_lbcs: chgres_cube: namelist: @@ -38,8 +35,8 @@ GSMGFS: input_type: "gfs_gaussian_nemsio" external_model: "GSMGFS" convert_nst: False - tracers_input: [\"spfh\",\"clwmr\",\"o3mr\"] - tracers: [\"sphum\",\"liq_wat\",\"o3mr\"] + tracers_input: ["spfh","clwmr","o3mr"] + tracers: ["sphum","liq_wat","o3mr"] UFS-CASE-STUDY: task_make_ics: @@ -48,105 +45,106 @@ UFS-CASE-STUDY: update_values: config: external_model: "UFS-CASE-STUDY" - tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] - tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] + tracers_input: ["spfh","clwmr","o3mr","icmr","rwmr","snmr","grle"] + tracers: ["sphum","liq_wat","o3mr","ice_wat","rainwat","snowwat","graupel"] task_make_lbcs: chgres_cube: - namelist: - update_values: - config: - external_model: "UFS-CASE-STUDY" - tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] - tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] + namelist: + update_values: + config: + external_model: "UFS-CASE-STUDY" + tracers_input: ["spfh","clwmr","o3mr","icmr","rwmr","snmr","grle"] + tracers: ["sphum","liq_wat","o3mr","ice_wat","rainwat","snowwat","graupel"] GDAS: task_make_ics: chgres_cube: - namelist: - update_values: - config: - external_model: "GFS" - tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] - tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] - tg3_from_soil: True + namelist: + update_values: + config: + external_model: "GFS" + tracers_input: ["spfh","clwmr","o3mr","icmr","rwmr","snmr","grle"] + tracers: ["sphum","liq_wat","o3mr","ice_wat","rainwat","snowwat","graupel"] + tg3_from_soil: True task_make_lbcs: chgres_cube: - namelist: - update_values: - config: - external_model: "GFS" - tracers_input: [\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"] - tracers: [\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"] - tg3_from_soil: True + namelist: + update_values: + config: + external_model: "GFS" + tracers_input: ["spfh","clwmr","o3mr","icmr","rwmr","snmr","grle"] + tracers: ["sphum","liq_wat","o3mr","ice_wat","rainwat","snowwat","graupel"] GEFS: task_make_ics: chgres_cube: namelist: - update_values: - config: - <<: *grib2_default_config - external_model: "GFS" + update_values: + config: + input_type: "grib2" + convert_nst: False + external_model: "GFS" task_make_lbcs: chgres_cube: namelist: - update_values: - config: - <<: *grib2_default_config - external_model: "GFS" + update_values: + config: + input_type: "grib2" + external_model: "GFS" HRRR: task_make_ics: chgres_cube: namelist: - update_values: - config: - <<: *grib2_default_config - external_model: "HRRR" - geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_HRRRX" - tg3_from_soil: True + update_values: + config: + input_type: "grib2" + convert_nst: False + external_model: "HRRR" + geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_HRRRX" + tg3_from_soil: True + nsoill_out: 9 task_make_lbcs: chgres_cube: namelist: - update_values: - config: - <<: *grib2_default_config - external_model: "HRRR" - geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_HRRRX" - tg3_from_soil: True + update_values: + config: + input_type: "grib2" + external_model: "HRRR" RAP: task_make_ics: chgres_cube: namelist: - update_values: - config: - <<: *grib2_default_config - external_model: "RAP" - geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_RAPX" - tg3_from_soil: True + update_values: + config: + input_type: "grib2" + convert_nst: False + external_model: "RAP" + geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_RAPX" + tg3_from_soil: True + nsoill_out: 9 task_make_lbcs: chgres_cube: namelist: - update_values: - config: - <<: *grib2_default_config - external_model: "RAP" - geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_RAPX" - tg3_from_soil: True + update_values: + config: + input_type: "grib2" + external_model: "RAP" NAM: task_make_ics: chgres_cube: namelist: - update_values: - config: - <<: *grib2_default_config - external_model: "NAM" + update_values: + config: + input_type: "grib2" + convert_nst: False + external_model: "NAM" task_make_lbcs: chgres_cube: namelist: - update_values: - config: - <<: *grib2_default_config - external_model: "NAM" + update_values: + config: + input_type: "grib2" + external_model: "NAM" From fccb757f06a9928eee6e0f2491df3c3aa5fd31e3 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 24 Oct 2024 14:19:44 +0000 Subject: [PATCH 28/47] fix extra files 1 --- tests/WE2E/my_test.out | 105 ------------------ .../tmp_MAKE_ICS/runscript.chgres_cube" | 11 -- .../tmp_MAKE_LBCS/runscript.chgres_cube" | 11 -- 3 files changed, 127 deletions(-) delete mode 100644 tests/WE2E/my_test.out delete mode 100755 "tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_ICS/runscript.chgres_cube" delete mode 100755 "tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_LBCS/runscript.chgres_cube" diff --git a/tests/WE2E/my_test.out b/tests/WE2E/my_test.out deleted file mode 100644 index 0af6849c6..000000000 --- a/tests/WE2E/my_test.out +++ /dev/null @@ -1,105 +0,0 @@ -Checking that all tests are valid -Will run 1 tests: -/scratch2/NAGAPE/epic/Brian.Weir/ufs-srweather-app/tests/WE2E/test_configs/grids_extrn_mdls_suites_community/config.grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16.yaml -Calling workflow generation function for test grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 - - - ======================================================================== - Starting experiment generation... - ======================================================================== - - ======================================================================== - Starting function setup() in "setup.py"... - ======================================================================== - WORKFLOW ID = - - Specified directory or file already exists: - /scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 - Moving (renaming) preexisting directory or file to: - /scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16_old_20241024_030803 - -Removing verification [meta]task - "task_get_obs_nohrsc" -from workflow since no fields belonging to observation type "NOHRSC" -are specified for verification. - -Removing verification [meta]task - "metatask_PcpCombine_fcst_ASNOW_all_accums_all_mems" -from workflow since no fields belonging to observation type "NOHRSC" -are specified for verification. - -Removing verification [meta]task - "metatask_GridStat_NOHRSC_all_accums_all_mems" -from workflow since no fields belonging to observation type "NOHRSC" -are specified for verification. - -Removing verification [meta]task - "metatask_GenEnsProd_EnsembleStat_NOHRSC" -from workflow since no fields belonging to observation type "NOHRSC" -are specified for verification. - -Removing verification [meta]task - "metatask_GridStat_NOHRSC_ensmeanprob_all_accums" -from workflow since no fields belonging to observation type "NOHRSC" -are specified for verification. - - Generating the global experiment variable definitions file here: - GLOBAL_VAR_DEFNS_FP = '/scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16/var_defns.yaml' - For more detailed information, set DEBUG to 'TRUE' in the experiment - configuration file ('config.yaml'). - - Creating rocoto workflow XML file (WFLOW_XML_FP): - WFLOW_XML_FP = '/scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16/FV3LAM_wflow.xml' - - To launch the workflow, change location to the experiment directory - (EXPTDIR) and issue the rocotrun command, as follows: - - > cd /scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 - > rocotorun -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 - - To check on the status of the workflow, issue the rocotostat command - (also from the experiment directory): - - > rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10 - - Note that: - - 1) The rocotorun command must be issued after the completion of each - task in the workflow in order for the workflow to submit the next - task(s) to the queue. - - 2) In order for the output of the rocotostat command to be up-to-date, - the rocotorun command must be issued immediately before issuing the - rocotostat command. - - For automatic resubmission of the workflow (say every 3 minutes), the - following line can be added to the user's crontab (use 'crontab -e' to - edit the cron table): - - */3 * * * * cd /scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 && ./launch_FV3LAM_wflow.sh called_from_cron="TRUE" - -Workflow for test grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 successfully generated in -/scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16 - -All experiments have been generated; -Experiment file WE2E_tests_20241024030751.yaml created -Writing information for all experiments to WE2E_tests_20241024030751.yaml -Checking tests available for monitoring... -Starting experiment grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16_20241024030751 running -Setup complete; monitoring 1 experiments -Use ctrl-c to pause job submission/monitoring -10/24/24 03:20:46 UTC :: FV3LAM_wflow.xml :: Cycle 202105121200, Task run_fcst_mem002, jobid=1459914, in state DEAD (FAILED), ran for 55.0 seconds, exit status=1, try=1 (of 1) -10/24/24 03:20:47 UTC :: FV3LAM_wflow.xml :: Cycle 202105121200, Task run_fcst_mem001, jobid=1459913, in state DEAD (FAILED), ran for 59.0 seconds, exit status=1, try=1 (of 1) -Experiment grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16_20241024030751 is DEAD -Took 0:12:56.956921; will no longer monitor. -All 1 experiments finished -Calculating core-hour usage and printing final summary ----------------------------------------------------------------------------------------------------- -Experiment name | Status | Core hours used ----------------------------------------------------------------------------------------------------- -grid_RRFS_CONUS_25km_ics_NAM_lbcs_NAM_suite_GFS_v16_2024102403075 DEAD 3.84 ----------------------------------------------------------------------------------------------------- -Total DEAD 3.84 - -Detailed summary written to /scratch2/NAGAPE/epic/Brian.Weir/expt_dirs/WE2E_summary_20241024032053.txt - diff --git "a/tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_ICS/runscript.chgres_cube" "b/tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_ICS/runscript.chgres_cube" deleted file mode 100755 index 639d39111..000000000 --- "a/tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_ICS/runscript.chgres_cube" +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -module use /scratch2/NAGAPE/epic/Brian.Weir/ufs-srweather-app/modulefiles -module load build_hera_intel - -export KMP_AFFINITY=scatter -export OMP_NUM_THREADS=1 -export OMP_STACKSIZE=1024m - -time srun --export=ALL --ntasks $SLURM_CPUS_ON_NODE /scratch2/NAGAPE/epic/Brian.Weir/ufs-srweather-app/exec/chgres_cube -test $? -eq 0 && touch runscript.chgres_cube.done diff --git "a/tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_LBCS/runscript.chgres_cube" "b/tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_LBCS/runscript.chgres_cube" deleted file mode 100755 index 639d39111..000000000 --- "a/tests/WE2E/{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}{{ \"/mem%s\" % (\"MEMBER\"|env) if global.DO_ENSEMBLE }}/tmp_MAKE_LBCS/runscript.chgres_cube" +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -module use /scratch2/NAGAPE/epic/Brian.Weir/ufs-srweather-app/modulefiles -module load build_hera_intel - -export KMP_AFFINITY=scatter -export OMP_NUM_THREADS=1 -export OMP_STACKSIZE=1024m - -time srun --export=ALL --ntasks $SLURM_CPUS_ON_NODE /scratch2/NAGAPE/epic/Brian.Weir/ufs-srweather-app/exec/chgres_cube -test $? -eq 0 && touch runscript.chgres_cube.done From 7bc77323d5840d66bef0efacf5d1396c0afe89af Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 24 Oct 2024 11:14:09 -0500 Subject: [PATCH 29/47] First attempt to fix erroneous changes --- ush/set_fv3nml_sfc_climo_filenames.py | 24 +++++++----------------- uwtools | 2 +- 2 files changed, 8 insertions(+), 18 deletions(-) diff --git a/ush/set_fv3nml_sfc_climo_filenames.py b/ush/set_fv3nml_sfc_climo_filenames.py index a17bcea6b..46dc034de 100644 --- a/ush/set_fv3nml_sfc_climo_filenames.py +++ b/ush/set_fv3nml_sfc_climo_filenames.py @@ -31,12 +31,11 @@ "FV3_NML_FP", "PARMdir", "RUN_ENVIR", -] + ] # pylint: disable=undefined-variable - def set_fv3nml_sfc_climo_filenames(config, debug=False): """ This function sets the values of the variables in @@ -55,9 +54,7 @@ def set_fv3nml_sfc_climo_filenames(config, debug=False): import_vars(dictionary=config, env_vars=NEEDED_VARS) - fixed_cfg = get_yaml_config(os.path.join(PARMdir, "fixed_files_mapping.yaml"))[ - "fixed_files" - ] + fixed_cfg = get_yaml_config(os.path.join(PARMdir, "fixed_files_mapping.yaml"))["fixed_files"] # The regular expression regex_search set below will be used to extract # from the elements of the array FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING @@ -83,9 +80,7 @@ def set_fv3nml_sfc_climo_filenames(config, debug=False): file_path = os.path.join(FIXlam, f"{CRES}.{sfc_climo_field_name}.{suffix}") if RUN_ENVIR != "nco": - file_path = os.path.relpath( - os.path.realpath(file_path), start=dummy_run_dir - ) + file_path = os.path.relpath(os.path.realpath(file_path), start=dummy_run_dir) namsfc_dict[nml_var_name] = file_path @@ -111,8 +106,7 @@ def set_fv3nml_sfc_climo_filenames(config, debug=False): output_file=FV3_NML_FP, output_format="nml", update_config=get_nml_config(settings), - ) - + ) def parse_args(argv): """Parse command line arguments""" @@ -125,12 +119,8 @@ def parse_args(argv): required=True, help="Path to var_defns file.", ) - parser.add_argument( - "-d", - "--debug", - action="store_true", - help="Script will be run in debug mode with more verbose output", - ) + parser.add_argument('-d', '--debug', action='store_true', + help='Script will be run in debug mode with more verbose output') return parser.parse_args(argv) @@ -139,4 +129,4 @@ def parse_args(argv): args = parse_args(sys.argv[1:]) cfg = get_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) - set_fv3nml_sfc_climo_filenames(cfg, args.debug) + set_fv3nml_sfc_climo_filenames(cfg, args.debug) \ No newline at end of file diff --git a/uwtools b/uwtools index e2eb7fc7c..406750c80 160000 --- a/uwtools +++ b/uwtools @@ -1 +1 @@ -Subproject commit e2eb7fc7cbfd00381382a249ab4558206dcd9477 +Subproject commit 406750c80746424f2ee537f79130ec1f8aeab0b7 From 80bca4669b145722206890ae15e1716d019c1d52 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 24 Oct 2024 12:06:37 -0500 Subject: [PATCH 30/47] remove spurious uwtools directory --- uwtools | 1 - 1 file changed, 1 deletion(-) delete mode 160000 uwtools diff --git a/uwtools b/uwtools deleted file mode 160000 index 406750c80..000000000 --- a/uwtools +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 406750c80746424f2ee537f79130ec1f8aeab0b7 From 4300a3e7b73d6df28a9e364294801ff068b1fb1c Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 24 Oct 2024 12:18:46 -0500 Subject: [PATCH 31/47] Fix missing newline --- tests/WE2E/utils.py | 2 +- ush/config_defaults_aqm.yaml | 2 +- ush/create_aqm_rc_file.py | 2 +- ush/create_diag_table_file.py | 2 +- ush/create_model_configure_file.py | 2 +- ush/create_ufs_configure_file.py | 2 +- ush/generate_FV3LAM_wflow.py | 2 +- ush/link_fix.py | 2 +- ush/set_fv3nml_sfc_climo_filenames.py | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/WE2E/utils.py b/tests/WE2E/utils.py index 2cb73b626..b86e95b19 100755 --- a/tests/WE2E/utils.py +++ b/tests/WE2E/utils.py @@ -578,4 +578,4 @@ def compare_rocotostat(expt_dict,name): else: expt_dict["status"] = "COMPLETE" - return expt_dict \ No newline at end of file + return expt_dict diff --git a/ush/config_defaults_aqm.yaml b/ush/config_defaults_aqm.yaml index cea75c426..df2a02d85 100644 --- a/ush/config_defaults_aqm.yaml +++ b/ush/config_defaults_aqm.yaml @@ -20,4 +20,4 @@ task_run_post: nampgb: aqf_on: true output_file_labels: - - cmaq \ No newline at end of file + - cmaq diff --git a/ush/create_aqm_rc_file.py b/ush/create_aqm_rc_file.py index 8c3576983..7135a8e26 100644 --- a/ush/create_aqm_rc_file.py +++ b/ush/create_aqm_rc_file.py @@ -167,4 +167,4 @@ def parse_args(argv): run_dir=args.run_dir, cdate=str_to_type(args.cdate), init_concentrations=str_to_type(args.init_concentrations), - ) \ No newline at end of file + ) diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py index 8ca4c5bc5..48fed392b 100644 --- a/ush/create_diag_table_file.py +++ b/ush/create_diag_table_file.py @@ -106,4 +106,4 @@ def parse_args(argv): cfg = get_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) import_vars(dictionary=cfg) - create_diag_table_file(args.run_dir) \ No newline at end of file + create_diag_table_file(args.run_dir) diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index 126040dc2..c428de489 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -305,4 +305,4 @@ def parse_args(argv): fcst_len_hrs=str_to_type(args.fcst_len_hrs), fhrot=str_to_type(args.fhrot), dt_atmos=str_to_type(args.dt_atmos), - ) \ No newline at end of file + ) diff --git a/ush/create_ufs_configure_file.py b/ush/create_ufs_configure_file.py index c03628005..0434479c9 100644 --- a/ush/create_ufs_configure_file.py +++ b/ush/create_ufs_configure_file.py @@ -119,4 +119,4 @@ def parse_args(argv): import_vars(dictionary=cfg) create_ufs_configure_file( run_dir=args.run_dir, - ) \ No newline at end of file + ) diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index ae3431878..d3c2290a6 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -803,4 +803,4 @@ def setup_logging(logfile: str = "log.generate_FV3LAM_wflow", debug: bool = Fals ======================================================================== """ - ) \ No newline at end of file + ) diff --git a/ush/link_fix.py b/ush/link_fix.py index 8184eb431..aaf75e966 100755 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -418,4 +418,4 @@ def parse_args(argv): nhw=cfg["grid_params"]["NHW"], run_task=True, sfc_climo_fields=cfg["fixed_files"]["SFC_CLIMO_FIELDS"], - ) \ No newline at end of file + ) diff --git a/ush/set_fv3nml_sfc_climo_filenames.py b/ush/set_fv3nml_sfc_climo_filenames.py index 46dc034de..90d686235 100644 --- a/ush/set_fv3nml_sfc_climo_filenames.py +++ b/ush/set_fv3nml_sfc_climo_filenames.py @@ -129,4 +129,4 @@ def parse_args(argv): args = parse_args(sys.argv[1:]) cfg = get_yaml_config(args.path_to_defns) cfg = flatten_dict(cfg) - set_fv3nml_sfc_climo_filenames(cfg, args.debug) \ No newline at end of file + set_fv3nml_sfc_climo_filenames(cfg, args.debug) From 91e15dd8e40f234b4cbce41b220f7eacbcecced9 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 24 Oct 2024 17:32:57 +0000 Subject: [PATCH 32/47] update cleaner --- scripts/chgres_cube.py | 62 ++- scripts/exregional_make_ics.sh | 834 ------------------------------- scripts/exregional_make_lbcs.sh | 686 ------------------------- ush/external_model_defaults.yaml | 4 - uwtools | 1 - 5 files changed, 35 insertions(+), 1552 deletions(-) delete mode 100755 scripts/exregional_make_ics.sh delete mode 100755 scripts/exregional_make_lbcs.sh delete mode 160000 uwtools diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 43bbd489a..6be8ccc65 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -100,12 +100,14 @@ def run_chgres_cube(config_file, cycle, key_path, member): Setup and run the chgres_cube Driver. """ - # The experiment config will have {{ MEMBER | env }} expressions in it that need to be - # dereferenced during driver initialization. - - os.environ["MEMBER"] = member + # dereference expressions during driver initialization expt_config = get_yaml_config(config_file) + CRES = expt_config["workflow"]["CRES"] + os.environ["CRES"] = CRES + os.environ["MEMBER"] = member + # set universal variables + cyc = str(expt_config["workflow"]["DATE_FIRST_CYCL"])[8:10] dot_ensmem = ( f".mem{member}" if ( @@ -115,10 +117,7 @@ def run_chgres_cube(config_file, cycle, key_path, member): ) else "" ) - - # dereference expressions during driver initialization - CRES = expt_config["workflow"]["CRES"] - os.environ["CRES"] = CRES + nco_net = expt_config["nco"]["NET_default"] # Extract driver config from experiment config chgres_cube_driver = ChgresCube( @@ -126,6 +125,8 @@ def run_chgres_cube(config_file, cycle, key_path, member): cycle=cycle, key_path=key_path, ) + rundir = Path(chgres_cube_driver.config["rundir"]) + print(f"Will run in {rundir}") # Dereference cycle for file paths expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) @@ -135,20 +136,18 @@ def run_chgres_cube(config_file, cycle, key_path, member): **expt_config_cp, } ) - chgres_cube_config = _walk_key_path(expt_config_cp, key_path) - # update fn_atm and fn_sfc for ics task + input_type = chgres_cube_config["chgres_cube"]["namelist"]["update_values"][ + "config" + ].get("input_type") + + # update config for ics task, run and stage data if "task_make_ics" in key_path: - rundir = Path(chgres_cube_driver.config["rundir"]) - print(f"Will run in {rundir}") varsfilepath = chgres_cube_config["input_files_metadata_path"] shconfig = _parse_var_defns(varsfilepath) extrn_config_fns = shconfig["EXTRN_MDL_FNS"] extrn_config_fhrs = shconfig["EXTRN_MDL_FHRS"] - input_type = chgres_cube_config["chgres_cube"]["namelist"]["update_values"][ - "config" - ].get("input_type") if input_type == "grib2": fn_grib2 = extrn_config_fns[0] update = {"grib2_file_input_grid": fn_grib2} @@ -156,6 +155,18 @@ def run_chgres_cube(config_file, cycle, key_path, member): fn_atm = extrn_config_fns[0] fn_sfc = extrn_config_fns[1] update = {"atm_files_input_grid": fn_atm, "sfc_files_input_grid": fn_sfc} + if expt_config["task_get_extrn_ics"]["EXTRN_MDL_NAME_ICS"] in [ + "HRRR", + "RAP", + ]: + if expt_config["workflow"]["SDF_USES_RUC_LSM"] is True: + update["nsoill_out"] = 9 + else: + if expt_config["workflow"]["SDF_USES_THOMPSON_MP"] is True: + update["thomp_mp_climo_file"] = expt_config["workflow"][ + "THOMPSON_MP_CLIMO_FP" + + ] update_cfg = { "task_make_ics": { @@ -174,10 +185,8 @@ def run_chgres_cube(config_file, cycle, key_path, member): # Deliver output data to a common location above the rundir. links = {} - nco_net = expt_config["nco"]["NET_default"] tile_rgnl = expt_config["constants"]["TILE_RGNL"] nh0 = expt_config["constants"]["NH0"] - cyc = str(expt_config["workflow"]["DATE_FIRST_CYCL"])[8:10] output_dir = os.path.join(rundir.parent, "INPUT") os.makedirs(output_dir, exist_ok=True) @@ -193,10 +202,8 @@ def run_chgres_cube(config_file, cycle, key_path, member): ) uwlink(target_dir=output_dir, config=links) - # Loop the run of chgres_cube for the forecast length if lbcs + # update config for lbcs task, loop run and stage data else: - rundir = Path(chgres_cube_driver.config["rundir"]) - print(f"Will run in {rundir}") fn_sfc = "" varsfilepath = chgres_cube_config["input_files_metadata_path"] shconfig = _parse_var_defns(varsfilepath) @@ -204,12 +211,8 @@ def run_chgres_cube(config_file, cycle, key_path, member): extrn_config_fhrs = shconfig["EXTRN_MDL_FHRS"] num_fhrs = len(extrn_config_fhrs) - input_type = chgres_cube_config["chgres_cube"]["namelist"]["update_values"][ - "config" - ].get("input_type") bcgrp10 = 0 bcgrpnum10 = 1 - update = {} for ii in range(bcgrp10, num_fhrs, bcgrpnum10): i = ii + bcgrp10 if i < num_fhrs: @@ -220,6 +223,14 @@ def run_chgres_cube(config_file, cycle, key_path, member): else: fn_atm = extrn_config_fns[i] update = {"atm_files_input_grid": fn_atm} + if expt_config["task_get_extrn_lbcs"]["EXTRN_MDL_NAME_LBCS"] not in [ + "HRRR", + "RAP", + ]: + if expt_config["workflow"]["SDF_USES_THOMPSON_MP"] is True: + update["thomp_mp_climo_file"] = expt_config["workflow"][ + "THOMPSON_MP_CLIMO_FP" + ] update_cfg = { "task_make_lbcs": { @@ -247,9 +258,6 @@ def run_chgres_cube(config_file, cycle, key_path, member): ] fcst_hhh = int(lbc_spec_fhrs) - int(lbc_offset_fhrs) fcst_hhh_FV3LAM = f"{fcst_hhh:03d}" - cyc = str(expt_config["workflow"]["DATE_FIRST_CYCL"])[8:10] - - nco_net = expt_config["nco"]["NET_default"] lbc_input_fn = rundir / f"gfs.bndy.nc" output_dir = os.path.join(rundir.parent, "INPUT") diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh deleted file mode 100755 index 49a2dc896..000000000 --- a/scripts/exregional_make_ics.sh +++ /dev/null @@ -1,834 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# The ex-scrtipt that sets up and runs chgres_cube for preparing initial -# conditions for the FV3 forecast -# -# Run-time environment variables: -# -# COMIN -# COMOUT -# COMROOT -# DATA -# DATAROOT -# DATA_SHARE -# EXTRN_MDL_CDATE -# GLOBAL_VAR_DEFNS_FP -# INPUT_DATA -# NET -# PDY -# REDIRECT_OUT_ERR -# SLASH_ENSMEM_SUBDIR -# -# Experiment variables -# -# user: -# EXECdir -# MACHINE -# PARMdir -# RUN_ENVIR -# USHdir -# -# platform: -# FIXgsm -# PRE_TASK_CMDS -# RUN_CMD_UTILS -# -# workflow: -# CCPP_PHYS_SUITE -# COLDSTART -# CRES -# DATE_FIRST_CYCL -# DOT_OR_USCORE -# EXTRN_MDL_VAR_DEFNS_FN -# FIXlam -# SDF_USES_RUC_LSM -# SDF_USES_THOMPSON_MP -# THOMPSON_MP_CLIMO_FP -# VERBOSE -# -# task_make_ics: -# FVCOM_DIR -# FVCOM_FILE -# FVCOM_WCSTART -# KMP_AFFINITY_MAKE_ICS -# OMP_NUM_THREADS_MAKE_ICS -# OMP_STACKSIZE_MAKE_ICS -# USE_FVCOM -# VCOORD_FILE -# -# task_get_extrn_ics: -# EXTRN_MDL_NAME_ICS -# FV3GFS_FILE_FMT_ICS -# -# global: -# HALO_BLEND -# -# cpl_aqm_parm: -# CPL_AQM -# -# constants: -# NH0 -# NH4 -# TILE_RGNL -# -#----------------------------------------------------------------------- -# - - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -sections=( - user - nco - platform - workflow - global - cpl_aqm_parm - constants - task_get_extrn_ics - task_make_ics -) -for sect in ${sections[*]} ; do - source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} -done -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the ex-script for the task that generates initial condition -(IC), surface, and zeroth hour lateral boundary condition (LBC0) files -(in NetCDF format) for the FV3-LAM. -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set OpenMP variables. -# -#----------------------------------------------------------------------- -# -export KMP_AFFINITY=${KMP_AFFINITY_MAKE_ICS} -export OMP_NUM_THREADS=${OMP_NUM_THREADS_MAKE_ICS} -export OMP_STACKSIZE=${OMP_STACKSIZE_MAKE_ICS} -# -#----------------------------------------------------------------------- -# -# Set machine-dependent parameters. -# -#----------------------------------------------------------------------- -# -eval ${PRE_TASK_CMDS} - -if [ -z "${RUN_CMD_UTILS:-}" ] ; then - print_err_msg_exit "\ - Run command was not set in machine file. \ - Please set RUN_CMD_UTILS for your platform" -else - print_info_msg "$VERBOSE" " - All executables will be submitted with command \'${RUN_CMD_UTILS}\'." -fi - - -# -#----------------------------------------------------------------------- -# -# Source the file containing definitions of variables associated with the -# external model for ICs. -# -#----------------------------------------------------------------------- -# -if [ $RUN_ENVIR = "nco" ]; then - extrn_mdl_staging_dir="${DATAROOT}/get_extrn_ics.${share_pid}${SLASH_ENSMEM_SUBDIR}" - extrn_mdl_var_defns_fp="${extrn_mdl_staging_dir}/${NET}.${cycle}.${EXTRN_MDL_NAME_ICS}.ICS.${EXTRN_MDL_VAR_DEFNS_FN}.sh" -else - extrn_mdl_staging_dir="${COMIN}/${EXTRN_MDL_NAME_ICS}/for_ICS${SLASH_ENSMEM_SUBDIR}" - extrn_mdl_var_defns_fp="${extrn_mdl_staging_dir}/${EXTRN_MDL_VAR_DEFNS_FN}.sh" -fi -. ${extrn_mdl_var_defns_fp} -# -#----------------------------------------------------------------------- -# -# Set physics-suite-dependent variable mapping table needed in the FORTRAN -# namelist file that the chgres_cube executable will read in. -# -#----------------------------------------------------------------------- -# -varmap_file="" - -case "${CCPP_PHYS_SUITE}" in -# - "FV3_GFS_2017_gfdlmp" | \ - "FV3_GFS_2017_gfdlmp_regional" | \ - "FV3_GFS_v16" | \ - "FV3_GFS_v15p2" ) - varmap_file="GFSphys_var_map.txt" - ;; -# - "FV3_RRFS_v1beta" | \ - "FV3_GFS_v15_thompson_mynn_lam3km" | \ - "FV3_GFS_v17_p8" | \ - "FV3_WoFS_v0" | \ - "FV3_HRRR" | \ - "FV3_RAP" ) - if [ "${EXTRN_MDL_NAME_ICS}" = "RAP" ] || \ - [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" ]; then - varmap_file="GSDphys_var_map.txt" - elif [ "${EXTRN_MDL_NAME_ICS}" = "NAM" ] || \ - [ "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ] || \ - [ "${EXTRN_MDL_NAME_ICS}" = "UFS-CASE-STUDY" ] || \ - [ "${EXTRN_MDL_NAME_ICS}" = "GEFS" ] || \ - [ "${EXTRN_MDL_NAME_ICS}" = "GDAS" ] || \ - [ "${EXTRN_MDL_NAME_ICS}" = "GSMGFS" ]; then - varmap_file="GFSphys_var_map.txt" - fi - ;; -# - *) - message_txt="The variable \"varmap_file\" has not yet been specified for -this physics suite (CCPP_PHYS_SUITE): - CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - ;; -# -esac -# -#----------------------------------------------------------------------- -# -# Set external-model-dependent variables that are needed in the FORTRAN -# namelist file that the chgres_cube executable will read in. These are de- -# scribed below. Note that for a given external model, usually only a -# subset of these all variables are set (since some may be irrelevant). -# -# external_model: -# Name of the external model from which we are obtaining the fields -# needed to generate the ICs. -# -# fn_atm: -# Name (not including path) of the nemsio or netcdf file generated by the -# external model that contains the atmospheric fields. Currently used for -# GSMGFS and FV3GFS external model data. -# -# fn_sfc: -# Name (not including path) of the nemsio or netcdf file generated by the -# external model that contains the surface fields. Currently used for -# GSMGFS and FV3GFS external model data. -# -# fn_grib2: -# Name (not including path) of the grib2 file generated by the external -# model. Currently used for NAM, RAP, and HRRR external model data. -# -# input_type: -# The "type" of input being provided to chgres_cube. This contains a combi- -# nation of information on the external model, external model file for- -# mat, and maybe other parameters. For clarity, it would be best to -# eliminate this variable in chgres_cube and replace with with 2 or 3 others -# (e.g. extrn_mdl, extrn_mdl_file_format, etc). -# -# tracers_input: -# List of atmospheric tracers to read in from the external model file -# containing these tracers. -# -# tracers: -# Names to use in the output NetCDF file for the atmospheric tracers -# specified in tracers_input. With the possible exception of GSD phys- -# ics, the elements of this array should have a one-to-one correspond- -# ence with the elements in tracers_input, e.g. if the third element of -# tracers_input is the name of the O3 mixing ratio, then the third ele- -# ment of tracers should be the name to use for the O3 mixing ratio in -# the output file. For GSD physics, three additional tracers -- ice, -# rain, and water number concentrations -- may be specified at the end -# of tracers, and these will be calculated by chgres_cube. -# -# nsoill_out: -# The number of soil layers to include in the output NetCDF file. -# -# FIELD_from_climo, where FIELD = "vgtyp", "sotyp", "vgfrc", "lai", or -# "minmax_vgfrc": -# Logical variable indicating whether or not to obtain the field in -# question from climatology instead of the external model. The field in -# question is one of vegetation type (FIELD="vgtyp"), soil type (FIELD= -# "sotyp"), vegetation fraction (FIELD="vgfrc"), leaf area index -# (FIELD="lai"), or min/max areal fractional coverage of annual green -# vegetation (FIELD="minmax_vfrr"). If FIELD_from_climo is set to -# ".true.", then the field is obtained from climatology (regardless of -# whether or not it exists in an external model file). If it is set -# to ".false.", then the field is obtained from the external model. -# If "false" is chosen and the external model file does not provide -# this field, then chgres_cube prints out an error message and stops. -# -# tg3_from_soil: -# Logical variable indicating whether or not to set the tg3 soil tempe- # Needs to be verified. -# rature field to the temperature of the deepest soil layer. -# -#----------------------------------------------------------------------- -# - -# GSK comments about chgres: -# -# The following are the three atmsopheric tracers that are in the atmo- -# spheric analysis (atmanl) nemsio file for CDATE=2017100700: -# -# "spfh","o3mr","clwmr" -# -# Note also that these are hardcoded in the code (file input_data.F90, -# subroutine read_input_atm_gfs_spectral_file), so that subroutine will -# break if tracers_input(:) is not specified as above. -# -# Note that there are other fields too ["hgt" (surface height (togography?)), -# pres (surface pressure), ugrd, vgrd, and tmp (temperature)] in the atmanl file, but those -# are not considered tracers (they're categorized as dynamics variables, -# I guess). -# -# Another note: The way things are set up now, tracers_input(:) and -# tracers(:) are assumed to have the same number of elements (just the -# atmospheric tracer names in the input and output files may be differ- -# ent). There needs to be a check for this in the chgres_cube code!! -# If there was a varmap table that specifies how to handle missing -# fields, that would solve this problem. -# -# Also, it seems like the order of tracers in tracers_input(:) and -# tracers(:) must match, e.g. if ozone mixing ratio is 3rd in -# tracers_input(:), it must also be 3rd in tracers(:). How can this be checked? -# -# NOTE: Really should use a varmap table for GFS, just like we do for -# RAP/HRRR. -# -# A non-prognostic variable that appears in the field_table for GSD physics -# is cld_amt. Why is that in the field_table at all (since it is a non- -# prognostic field), and how should we handle it here?? -# I guess this works for FV3GFS but not for the spectral GFS since these -# variables won't exist in the spectral GFS atmanl files. -# tracers_input="\"sphum\",\"liq_wat\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\",\"o3mr\"" -# -# Not sure if tracers(:) should include "cld_amt" since that is also in -# the field_table for CDATE=2017100700 but is a non-prognostic variable. - -external_model="" -fn_atm="" -fn_sfc="" -fn_grib2="" -input_type="" -tracers_input="\"\"" -tracers="\"\"" -nsoill_out="" -geogrid_file_input_grid="\"\"" -vgtyp_from_climo="" -sotyp_from_climo="" -vgfrc_from_climo="" -minmax_vgfrc_from_climo="" -lai_from_climo="" -tg3_from_soil="" -convert_nst="" -# -#----------------------------------------------------------------------- -# -# If the external model is not one that uses the RUC land surface model -# (LSM) -- which currently includes all valid external models except the -# HRRR and the RAP -- then we set the number of soil levels to include -# in the output NetCDF file that chgres_cube generates (nsoill_out; this -# is a variable in the namelist that chgres_cube reads in) to 4. This -# is because FV3 can handle this regardless of the LSM that it is using -# (which is specified in the suite definition file, or SDF), as follows. -# If the SDF does not use the RUC LSM (i.e. it uses the Noah or Noah MP -# LSM), then it will expect to see 4 soil layers; and if the SDF uses -# the RUC LSM, then the RUC LSM itself has the capability to regrid from -# 4 soil layers to the 9 layers that it uses. -# -# On the other hand, if the external model is one that uses the RUC LSM -# (currently meaning that it is either the HRRR or the RAP), then what -# we set nsoill_out to depends on whether the RUC or the Noah/Noah MP -# LSM is used in the SDF. If the SDF uses RUC, then both the external -# model and FV3 use RUC (which expects 9 soil levels), so we simply set -# nsoill_out to 9. In this case, chgres_cube does not need to do any -# regridding of soil levels (because the number of levels in is the same -# as the number out). If the SDF uses the Noah or Noah MP LSM, then the -# output from chgres_cube must contain 4 soil levels because that is what -# these LSMs expect, and the code in FV3 does not have the capability to -# regrid from the 9 levels in the external model to the 4 levels expected -# by Noah/Noah MP. In this case, chgres_cube does the regridding from -# 9 to 4 levels. -# -# In summary, we can set nsoill_out to 4 unless the external model is -# the HRRR or RAP AND the forecast model is using the RUC LSM. -# -#----------------------------------------------------------------------- -# -nsoill_out="4" -if [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" -o \ - "${EXTRN_MDL_NAME_ICS}" = "RAP" ] && \ - [ $(boolify "${SDF_USES_RUC_LSM}") = "TRUE" ]; then - nsoill_out="9" -fi -# -#----------------------------------------------------------------------- -# -# If the external model for ICs is one that does not provide the aerosol -# fields needed by Thompson microphysics (currently only the HRRR and -# RAP provide aerosol data) and if the physics suite uses Thompson -# microphysics, set the variable thomp_mp_climo_file in the chgres_cube -# namelist to the full path of the file containing aerosol climatology -# data. In this case, this file will be used to generate approximate -# aerosol fields in the ICs that Thompson MP can use. Otherwise, set -# thomp_mp_climo_file to a null string. -# -#----------------------------------------------------------------------- -# -thomp_mp_climo_file="" -if [ "${EXTRN_MDL_NAME_ICS}" != "HRRR" -a \ - "${EXTRN_MDL_NAME_ICS}" != "RAP" ] && \ - [ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then - thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}" -fi -# -#----------------------------------------------------------------------- -# -# Set other chgres_cube namelist variables depending on the external -# model used. -# -#----------------------------------------------------------------------- -# -case "${EXTRN_MDL_NAME_ICS}" in - -"GSMGFS") - external_model="GSMGFS" - fn_atm="${EXTRN_MDL_FNS[0]}" - fn_sfc="${EXTRN_MDL_FNS[1]}" - input_type="gfs_gaussian_nemsio" # For spectral GFS Gaussian grid in nemsio format. - convert_nst=False - tracers_input="[\"spfh\",\"clwmr\",\"o3mr\"]" - tracers="[\"sphum\",\"liq_wat\",\"o3mr\"]" - vgtyp_from_climo=True - sotyp_from_climo=True - vgfrc_from_climo=True - minmax_vgfrc_from_climo=True - lai_from_climo=True - tg3_from_soil=False - ;; - -"FV3GFS") - if [ "${FV3GFS_FILE_FMT_ICS}" = "nemsio" ]; then - external_model="FV3GFS" - input_type="gaussian_nemsio" # For FV3GFS data on a Gaussian grid in nemsio format. - tracers_input="[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" - tracers="[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" - fn_atm="${EXTRN_MDL_FNS[0]}" - fn_sfc="${EXTRN_MDL_FNS[1]}" - convert_nst=True - elif [ "${FV3GFS_FILE_FMT_ICS}" = "grib2" ]; then - external_model="GFS" - fn_grib2="${EXTRN_MDL_FNS[0]}" - input_type="grib2" - convert_nst=False - elif [ "${FV3GFS_FILE_FMT_ICS}" = "netcdf" ]; then - external_model="FV3GFS" - input_type="gaussian_netcdf" # For FV3GFS data on a Gaussian grid in netcdf format. - tracers_input="[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" - tracers="[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" - fn_atm="${EXTRN_MDL_FNS[0]}" - fn_sfc="${EXTRN_MDL_FNS[1]}" - convert_nst=True - fi - vgtyp_from_climo=True - sotyp_from_climo=True - vgfrc_from_climo=True - minmax_vgfrc_from_climo=True - lai_from_climo=True - tg3_from_soil=False - ;; - -"UFS-CASE-STUDY") - hh="${EXTRN_MDL_CDATE:8:2}" - if [ "${FV3GFS_FILE_FMT_ICS}" = "nemsio" ]; then - external_model="UFS-CASE-STUDY" - input_type="gaussian_nemsio" - tracers_input="[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" - tracers="[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" - fn_atm="gfs.t${hh}z.atmanl.nemsio" - fn_sfc="gfs.t${hh}z.sfcanl.nemsio" - convert_nst=True - fi - vgtyp_from_climo=True - sotyp_from_climo=True - vgfrc_from_climo=True - minmax_vgfrc_from_climo=True - lai_from_climo=True - tg3_from_soil=False - unset hh - ;; - -"GDAS") - if [ "${FV3GFS_FILE_FMT_ICS}" = "nemsio" ]; then - input_type="gaussian_nemsio" - elif [ "${FV3GFS_FILE_FMT_ICS}" = "netcdf" ]; then - input_type="gaussian_netcdf" - fi - external_model="GFS" - tracers_input="[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" - tracers="[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" - convert_nst=False - fn_atm="${EXTRN_MDL_FNS[0]}" - fn_sfc="${EXTRN_MDL_FNS[1]}" - vgtyp_from_climo=True - sotyp_from_climo=True - vgfrc_from_climo=True - minmax_vgfrc_from_climo=True - lai_from_climo=True - tg3_from_soil=True - ;; - -"GEFS") - external_model="GFS" - fn_grib2="${EXTRN_MDL_FNS[0]}" - input_type="grib2" - convert_nst=False - vgtyp_from_climo=True - sotyp_from_climo=True - vgfrc_from_climo=True - minmax_vgfrc_from_climo=True - lai_from_climo=True - tg3_from_soil=False - ;; - -"HRRR") - external_model="HRRR" - fn_grib2="${EXTRN_MDL_FNS[0]}" - input_type="grib2" -# -# Path to the HRRRX geogrid file. -# - geogrid_file_input_grid="${FIXgsm}/geo_em.d01.nc_HRRRX" -# Note that vgfrc, shdmin/shdmax (minmax_vgfrc), and lai fields are only available in HRRRX -# files after mid-July 2019, and only so long as the record order didn't change afterward - vgtyp_from_climo=True - sotyp_from_climo=True - vgfrc_from_climo=True - minmax_vgfrc_from_climo=True - lai_from_climo=True - tg3_from_soil=True - convert_nst=False - ;; - -"RAP") - external_model="RAP" - fn_grib2="${EXTRN_MDL_FNS[0]}" - input_type="grib2" -# -# Path to the RAPX geogrid file. -# - geogrid_file_input_grid="${FIXgsm}/geo_em.d01.nc_RAPX" - vgtyp_from_climo=True - sotyp_from_climo=True - vgfrc_from_climo=True - minmax_vgfrc_from_climo=True - lai_from_climo=True - tg3_from_soil=True - convert_nst=False - ;; - -"NAM") - external_model="NAM" - fn_grib2="${EXTRN_MDL_FNS[0]}" - input_type="grib2" - vgtyp_from_climo=True - sotyp_from_climo=True - vgfrc_from_climo=True - minmax_vgfrc_from_climo=True - lai_from_climo=True - tg3_from_soil=False - convert_nst=False - ;; - -*) - message_txt="External-model-dependent namelist variables have not yet been specified -for this external IC model (EXTRN_MDL_NAME_ICS): - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - ;; - -esac -# -#----------------------------------------------------------------------- -# -# Get the starting month, day, and hour of the the external model forecast. -# -#----------------------------------------------------------------------- -# -mm="${EXTRN_MDL_CDATE:4:2}" -dd="${EXTRN_MDL_CDATE:6:2}" -hh="${EXTRN_MDL_CDATE:8:2}" -# -#----------------------------------------------------------------------- -# -# Check that the executable that generates the ICs exists. -# -#----------------------------------------------------------------------- -# -exec_fn="chgres_cube" -exec_fp="$EXECdir/${exec_fn}" -if [ ! -f "${exec_fp}" ]; then - message_txt="The executable (exec_fp) for generating initial conditions -on the FV3-LAM native grid does not exist: - exec_fp = \"${exec_fp}\" -Please ensure that you've built this executable." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi -fi -# -#----------------------------------------------------------------------- -# -# Build the FORTRAN namelist file that chgres_cube will read in. -# -#----------------------------------------------------------------------- -# -# Create a multiline variable that consists of a yaml-compliant string -# specifying the values that the namelist variables need to be set to -# (one namelist variable per line, plus a header and footer). Below, -# this variable will be passed to a python script that will create the -# namelist file. -# -# IMPORTANT: -# If we want a namelist variable to be removed from the namelist file, -# in the "settings" variable below, we need to set its value to the -# string "null". -# -settings=" -'config': - 'fix_dir_target_grid': ${FIXlam} - 'mosaic_file_target_grid': ${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo$((10#${NH4})).nc - 'orog_dir_target_grid': ${FIXlam} - 'orog_files_target_grid': ${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo$((10#${NH4})).nc - 'vcoord_file_target_grid': ${VCOORD_FILE} - 'varmap_file': ${PARMdir}/ufs_utils/varmap_tables/${varmap_file} - 'data_dir_input_grid': ${extrn_mdl_staging_dir} - 'atm_files_input_grid': ${fn_atm} - 'sfc_files_input_grid': ${fn_sfc} - 'grib2_file_input_grid': \"${fn_grib2}\" - 'cycle_mon': $((10#${mm})) - 'cycle_day': $((10#${dd})) - 'cycle_hour': $((10#${hh})) - 'convert_atm': True - 'convert_sfc': True - 'convert_nst': ${convert_nst} - 'regional': 1 - 'halo_bndy': $((10#${NH4})) - 'halo_blend': $((10#${HALO_BLEND})) - 'input_type': ${input_type} - 'external_model': ${external_model} - 'tracers_input': ${tracers_input} - 'tracers': ${tracers} - 'nsoill_out': $((10#${nsoill_out})) - 'geogrid_file_input_grid': ${geogrid_file_input_grid} - 'vgtyp_from_climo': ${vgtyp_from_climo} - 'sotyp_from_climo': ${sotyp_from_climo} - 'vgfrc_from_climo': ${vgfrc_from_climo} - 'minmax_vgfrc_from_climo': ${minmax_vgfrc_from_climo} - 'lai_from_climo': ${lai_from_climo} - 'tg3_from_soil': ${tg3_from_soil} - 'thomp_mp_climo_file': ${thomp_mp_climo_file} -" - - -nml_fn="fort.41" - -(cat << EOF -$settings -EOF -) | uw config realize \ - --input-format yaml \ - -o ${nml_fn} \ - --output-format nml\ - -v \ - -err=$? -if [ $err -ne 0 ]; then - message_txt="Error creating namelist read by ${exec_fn} failed. - Settings for input are: -$settings" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi -fi - -# -#----------------------------------------------------------------------- -# -# Run chgres_cube. -# -#----------------------------------------------------------------------- -# -# NOTE: -# Often when the chgres_cube.exe run fails, it still returns a zero -# return code, so the failure isn't picked up the the logical OR (||) -# below. That should be fixed. This might be due to the RUN_CMD_UTILS -# command - maybe that is returning a zero exit code even though the -# exit code of chgres_cube is nonzero. A similar thing happens in the -# forecast task. -# -PREP_STEP -eval ${RUN_CMD_UTILS} ${exec_fp} ${REDIRECT_OUT_ERR} || \ - print_err_msg_exit "\ -Call to executable (exec_fp) to generate surface and initial conditions -(ICs) files for the FV3-LAM failed: - exec_fp = \"${exec_fp}\" -The external model from which the ICs files are to be generated is: - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" -The external model files that are inputs to the executable (exec_fp) are -located in the following directory: - extrn_mdl_staging_dir = \"${extrn_mdl_staging_dir}\"" -POST_STEP -# -#----------------------------------------------------------------------- -# -# Move initial condition, surface, control, and 0-th hour lateral bound- -# ary files to ICs_BCs directory. -# -#----------------------------------------------------------------------- -# -if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then - COMOUT="${COMROOT}/${NET}/${model_ver}/${RUN}.${PDY}/${cyc}${SLASH_ENSMEM_SUBDIR}" #temporary path, should be removed later - if [ $(boolify "${COLDSTART}") = "TRUE" ] && [ "${PDY}${cyc}" = "${DATE_FIRST_CYCL:0:10}" ]; then - data_trans_path="${COMOUT}" - else - data_trans_path="${DATA_SHARE}" - fi - cp -p out.atm.tile${TILE_RGNL}.nc "${data_trans_path}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" - cp -p out.sfc.tile${TILE_RGNL}.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" - cp -p gfs_ctrl.nc "${COMOUT}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc" - cp -p gfs.bndy.nc "${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc" -else - mv out.atm.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_data.tile${TILE_RGNL}.halo${NH0}.nc - mv out.sfc.tile${TILE_RGNL}.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc - mv gfs_ctrl.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_ctrl.nc - mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile${TILE_RGNL}.f000.nc -fi -# -#----------------------------------------------------------------------- -# -# Process FVCOM Data -# -#----------------------------------------------------------------------- -# -if [ $(boolify "${USE_FVCOM}") = "TRUE" ]; then - -#Format for fvcom_time: YYYY-MM-DDTHH:00:00.000000 - fvcom_exec_fn="fvcom_to_FV3" - fvcom_exec_fp="$EXECdir/${fvcom_exec_fn}" - fvcom_time="${DATE_FIRST_CYCL:0:4}-${DATE_FIRST_CYCL:4:2}-${DATE_FIRST_CYCL:6:2}T${DATE_FIRST_CYCL:8:2}:00:00.000000" - if [ ! -f "${fvcom_exec_fp}" ]; then - message_txt="The executable (fvcom_exec_fp) for processing FVCOM data -onto FV3-LAM native grid does not exist: - fvcom_exec_fp = \"${fvcom_exec_fp}\" -Please ensure that you've built this executable." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}"\ - else - print_err_msg_exit "${message_txt}" - fi - fi - cp ${fvcom_exec_fp} ${INPUT_DATA}/. - fvcom_data_fp="${FVCOM_DIR}/${FVCOM_FILE}" - if [ ! -f "${fvcom_data_fp}" ]; then - message_txt="The file or path (fvcom_data_fp) does not exist: - fvcom_data_fp = \"${fvcom_data_fp}\" -Please check the following user defined variables: - FVCOM_DIR = \"${FVCOM_DIR}\" - FVCOM_FILE= \"${FVCOM_FILE}\" " - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi - - cp ${fvcom_data_fp} ${INPUT_DATA}/fvcom.nc - cd ${INPUT_DATA} - PREP_STEP - eval ${RUN_CMD_UTILS} ${fvcom_exec_fn} \ - ${NET}.${cycle}${dot_ensmem}.sfc_data.tile${TILE_RGNL}.halo${NH0}.nc fvcom.nc ${FVCOM_WCSTART} ${fvcom_time} \ - ${REDIRECT_OUT_ERR} || print_err_msg_exit "\ -Call to executable (fvcom_exe) to modify sfc fields for FV3-LAM failed: - fvcom_exe = \"${fvcom_exe}\" -The following variables were being used: - FVCOM_DIR = \"${FVCOM_DIR}\" - FVCOM_FILE = \"${FVCOM_FILE}\" - fvcom_time = \"${fvcom_time}\" - FVCOM_WCSTART = \"${FVCOM_WCSTART}\" - INPUT_DATA = \"${INPUT_DATA}\" - fvcom_exe_dir = \"${fvcom_exe_dir}\" - fvcom_exe = \"${fvcom_exe}\"" - POST_STEP -fi -# -#----------------------------------------------------------------------- -# -# Print message indicating successful completion of script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Initial condition, surface, and zeroth hour lateral boundary condition -files (in NetCDF format) for FV3 generated successfully!!! - -Exiting script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" -========================================================================" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh deleted file mode 100755 index c4b669dbd..000000000 --- a/scripts/exregional_make_lbcs.sh +++ /dev/null @@ -1,686 +0,0 @@ -#!/usr/bin/env bash - -# -#----------------------------------------------------------------------- -# -# The ex-scrtipt that sets up and runs chgres_cube for preparing lateral -# boundary conditions for the FV3 forecast -# -# Run-time environment variables: -# -# COMIN -# COMOUT -# COMROOT -# DATA -# DATAROOT -# DATA_SHARE -# EXTRN_MDL_CDATE -# INPUT_DATA -# GLOBAL_VAR_DEFNS_FP -# NET -# PDY -# REDIRECT_OUT_ERR -# SLASH_ENSMEM_SUBDIR -# -# Experiment variables -# -# user: -# EXECdir -# MACHINE -# PARMdir -# RUN_ENVIR -# USHdir -# -# platform: -# FIXgsm -# PRE_TASK_CMDS -# RUN_CMD_UTILS -# -# workflow: -# CCPP_PHYS_SUITE -# COLDSTART -# CRES -# DATE_FIRST_CYCL -# DOT_OR_USCORE -# EXTRN_MDL_VAR_DEFNS_FN -# FIXlam -# SDF_USES_RUC_LSM -# SDF_USES_THOMPSON_MP -# THOMPSON_MP_CLIMO_FP -# VERBOSE -# -# task_get_extrn_lbcs: -# EXTRN_MDL_NAME_LBCS -# FV3GFS_FILE_FMT_LBCS -# -# task_make_lbcs: -# FVCOM_DIR -# FVCOM_FILE -# FVCOM_WCSTART -# KMP_AFFINITY_MAKE_LBCS -# OMP_NUM_THREADS_MAKE_LBCS -# OMP_STACKSIZE_MAKE_LBCS -# USE_FVCOM -# VCOORD_FILE -# -# global: -# HALO_BLEND -# -# cpl_aqm_parm: -# CPL_AQM -# -# constants: -# NH0 -# NH4 -# TILE_RGNL -# -#----------------------------------------------------------------------- -# - - -# -#----------------------------------------------------------------------- -# -# Source the variable definitions file and the bash utility functions. -# -#----------------------------------------------------------------------- -# -. $USHdir/source_util_funcs.sh -set -x -sections=( - user - nco - platform - workflow - global - cpl_aqm_parm - constants - task_get_extrn_lbcs - task_make_lbcs -) -for sect in ${sections[*]} ; do - source_yaml ${GLOBAL_VAR_DEFNS_FP} ${sect} -done -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; . $USHdir/preamble.sh; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Print message indicating entry into script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Entering script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" - -This is the ex-script for the task that generates lateral boundary con- -dition (LBC) files (in NetCDF format) for all LBC update hours (except -hour zero). -========================================================================" -# -#----------------------------------------------------------------------- -# -# Set OpenMP variables. -# -#----------------------------------------------------------------------- -# -export KMP_AFFINITY=${KMP_AFFINITY_MAKE_LBCS} -export OMP_NUM_THREADS=${OMP_NUM_THREADS_MAKE_LBCS} -export OMP_STACKSIZE=${OMP_STACKSIZE_MAKE_LBCS} -# -#----------------------------------------------------------------------- -# -# Set machine-dependent parameters. -# -#----------------------------------------------------------------------- -# -eval ${PRE_TASK_CMDS} - -if [ -z "${RUN_CMD_UTILS:-}" ] ; then - print_err_msg_exit "\ - Run command was not set in machine file. \ - Please set RUN_CMD_UTILS for your platform" -else - print_info_msg "$VERBOSE" " - All executables will be submitted with command \'${RUN_CMD_UTILS}\'." -fi -# -#----------------------------------------------------------------------- -# -# Source the file containing definitions of variables associated with the -# external model for LBCs. -# -#----------------------------------------------------------------------- -# -if [ $RUN_ENVIR = "nco" ]; then - extrn_mdl_staging_dir="${DATAROOT}/get_extrn_lbcs.${share_pid}${SLASH_ENSMEM_SUBDIR}" - extrn_mdl_var_defns_fp="${extrn_mdl_staging_dir}/${NET}.${cycle}.${EXTRN_MDL_NAME_LBCS}.LBCS.${EXTRN_MDL_VAR_DEFNS_FN}.sh" -else - extrn_mdl_staging_dir="${COMIN}/${EXTRN_MDL_NAME_LBCS}/for_LBCS${SLASH_ENSMEM_SUBDIR}" - extrn_mdl_var_defns_fp="${extrn_mdl_staging_dir}/${EXTRN_MDL_VAR_DEFNS_FN}.sh" -fi -. ${extrn_mdl_var_defns_fp} -# -#----------------------------------------------------------------------- -# -# Set physics-suite-dependent variable mapping table needed in the FORTRAN -# namelist file that the chgres_cube executable will read in. -# -#----------------------------------------------------------------------- -# -varmap_file="" - -case "${CCPP_PHYS_SUITE}" in -# - "FV3_GFS_2017_gfdlmp" | \ - "FV3_GFS_2017_gfdlmp_regional" | \ - "FV3_GFS_v16" | \ - "FV3_GFS_v15p2" ) - varmap_file="GFSphys_var_map.txt" - ;; -# - "FV3_RRFS_v1beta" | \ - "FV3_GFS_v15_thompson_mynn_lam3km" | \ - "FV3_GFS_v17_p8" | \ - "FV3_WoFS_v0" | \ - "FV3_HRRR" | \ - "FV3_RAP") - if [ "${EXTRN_MDL_NAME_LBCS}" = "RAP" ] || \ - [ "${EXTRN_MDL_NAME_LBCS}" = "HRRR" ]; then - varmap_file="GSDphys_var_map.txt" - elif [ "${EXTRN_MDL_NAME_LBCS}" = "NAM" ] || \ - [ "${EXTRN_MDL_NAME_LBCS}" = "FV3GFS" ] || \ - [ "${EXTRN_MDL_NAME_LBCS}" = "UFS-CASE-STUDY" ] || \ - [ "${EXTRN_MDL_NAME_LBCS}" = "GEFS" ] || \ - [ "${EXTRN_MDL_NAME_LBCS}" = "GDAS" ] || \ - [ "${EXTRN_MDL_NAME_LBCS}" = "GSMGFS" ]; then - varmap_file="GFSphys_var_map.txt" - fi - ;; -# - *) - message_txt="The variable \"varmap_file\" has not yet been specified -for this physics suite (CCPP_PHYS_SUITE): - CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - ;; -# -esac -# -#----------------------------------------------------------------------- -# -# Set external-model-dependent variables that are needed in the FORTRAN -# namelist file that the chgres_cube executable will read in. These are de- -# scribed below. Note that for a given external model, usually only a -# subset of these all variables are set (since some may be irrelevant). -# -# external_model: -# Name of the external model from which we are obtaining the fields -# needed to generate the LBCs. -# -# fn_atm: -# Name (not including path) of the nemsio or netcdf file generated by the -# external model that contains the atmospheric fields. Currently used for -# GSMGFS and FV3GFS external model data. -# -# fn_grib2: -# Name (not including path) of the grib2 file generated by the external -# model. Currently used for NAM, RAP, and HRRR external model data. -# -# input_type: -# The "type" of input being provided to chgres_cube. This contains a combi- -# nation of information on the external model, external model file for- -# mat, and maybe other parameters. For clarity, it would be best to -# eliminate this variable in chgres_cube and replace with with 2 or 3 others -# (e.g. extrn_mdl, extrn_mdl_file_format, etc). -# -# tracers_input: -# List of atmospheric tracers to read in from the external model file -# containing these tracers. -# -# tracers: -# Names to use in the output NetCDF file for the atmospheric tracers -# specified in tracers_input. With the possible exception of GSD phys- -# ics, the elements of this array should have a one-to-one correspond- -# ence with the elements in tracers_input, e.g. if the third element of -# tracers_input is the name of the O3 mixing ratio, then the third ele- -# ment of tracers should be the name to use for the O3 mixing ratio in -# the output file. For GSD physics, three additional tracers -- ice, -# rain, and water number concentrations -- may be specified at the end -# of tracers, and these will be calculated by chgres_cube. -# -#----------------------------------------------------------------------- -# - -# GSK comments about chgres_cube: -# -# The following are the three atmsopheric tracers that are in the atmo- -# spheric analysis (atmanl) nemsio file for CDATE=2017100700: -# -# "spfh","o3mr","clwmr" -# -# Note also that these are hardcoded in the code (file input_data.F90, -# subroutine read_input_atm_gfs_spectral_file), so that subroutine will -# break if tracers_input(:) is not specified as above. -# -# Note that there are other fields too ["hgt" (surface height (togography?)), -# pres (surface pressure), ugrd, vgrd, and tmp (temperature)] in the atmanl file, but those -# are not considered tracers (they're categorized as dynamics variables, -# I guess). -# -# Another note: The way things are set up now, tracers_input(:) and -# tracers(:) are assumed to have the same number of elements (just the -# atmospheric tracer names in the input and output files may be differ- -# ent). There needs to be a check for this in the chgres_cube code!! -# If there was a varmap table that specifies how to handle missing -# fields, that would solve this problem. -# -# Also, it seems like the order of tracers in tracers_input(:) and -# tracers(:) must match, e.g. if ozone mixing ratio is 3rd in -# tracers_input(:), it must also be 3rd in tracers(:). How can this be checked? -# -# NOTE: Really should use a varmap table for GFS, just like we do for -# RAP/HRRR. -# - -# A non-prognostic variable that appears in the field_table for GSD physics -# is cld_amt. Why is that in the field_table at all (since it is a non- -# prognostic field), and how should we handle it here?? - -# I guess this works for FV3GFS but not for the spectral GFS since these -# variables won't exist in the spectral GFS atmanl files. -# tracers_input="\"sphum\",\"liq_wat\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\",\"o3mr\"" -# -# Not sure if tracers(:) should include "cld_amt" since that is also in -# the field_table for CDATE=2017100700 but is a non-prognostic variable. - -external_model="" -fn_atm="" -fn_grib2="" -input_type="" -tracers_input="\"\"" -tracers="\"\"" -# -#----------------------------------------------------------------------- -# -# If the external model for LBCs is one that does not provide the aerosol -# fields needed by Thompson microphysics (currently only the HRRR and -# RAP provide aerosol data) and if the physics suite uses Thompson -# microphysics, set the variable thomp_mp_climo_file in the chgres_cube -# namelist to the full path of the file containing aerosol climatology -# data. In this case, this file will be used to generate approximate -# aerosol fields in the LBCs that Thompson MP can use. Otherwise, set -# thomp_mp_climo_file to a null string. -# -#----------------------------------------------------------------------- -# -thomp_mp_climo_file="" -if [ "${EXTRN_MDL_NAME_LBCS}" != "HRRR" -a \ - "${EXTRN_MDL_NAME_LBCS}" != "RAP" ] && \ - [ $(boolify "${SDF_USES_THOMPSON_MP}") = "TRUE" ]; then - thomp_mp_climo_file="${THOMPSON_MP_CLIMO_FP}" -fi -# -#----------------------------------------------------------------------- -# -# Set other chgres_cube namelist variables depending on the external -# model used. -# -#----------------------------------------------------------------------- -# -case "${EXTRN_MDL_NAME_LBCS}" in - -"GSMGFS") - external_model="GSMGFS" - input_type="gfs_gaussian_nemsio" # For spectral GFS Gaussian grid in nemsio format. - tracers_input="[\"spfh\",\"clwmr\",\"o3mr\"]" - tracers="[\"sphum\",\"liq_wat\",\"o3mr\"]" - ;; - -"FV3GFS") - if [ "${FV3GFS_FILE_FMT_LBCS}" = "nemsio" ]; then - external_model="FV3GFS" - input_type="gaussian_nemsio" # For FV3GFS data on a Gaussian grid in nemsio format. - tracers_input="[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" - tracers="[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" - elif [ "${FV3GFS_FILE_FMT_LBCS}" = "grib2" ]; then - external_model="GFS" - fn_grib2="${EXTRN_MDL_FNS[0]}" - input_type="grib2" - elif [ "${FV3GFS_FILE_FMT_LBCS}" = "netcdf" ]; then - external_model="FV3GFS" - input_type="gaussian_netcdf" # For FV3GFS data on a Gaussian grid in netcdf format. - tracers_input="[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" - tracers="[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" - fi - ;; - -"UFS-CASE-STUDY") - if [ "${FV3GFS_FILE_FMT_LBCS}" = "nemsio" ]; then - external_model="UFS-CASE-STUDY" - input_type="gaussian_nemsio" # For FV3GFS data on a Gaussian grid in nemsio format. - tracers_input="[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" - tracers="[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" - fi - ;; - -"GDAS") - if [ "${FV3GFS_FILE_FMT_LBCS}" = "nemsio" ]; then - input_type="gaussian_nemsio" - elif [ "${FV3GFS_FILE_FMT_LBCS}" = "netcdf" ]; then - input_type="gaussian_netcdf" - fi - external_model="GFS" - tracers_input="[\"spfh\",\"clwmr\",\"o3mr\",\"icmr\",\"rwmr\",\"snmr\",\"grle\"]" - tracers="[\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"]" - fn_atm="${EXTRN_MDL_FNS[0]}" - ;; - -"GEFS") - external_model="GFS" - fn_grib2="${EXTRN_MDL_FNS[0]}" - input_type="grib2" - ;; - -"RAP") - external_model="RAP" - input_type="grib2" - ;; - -"HRRR") - external_model="HRRR" - input_type="grib2" - ;; - -"NAM") - external_model="NAM" - input_type="grib2" - ;; - -*) - message_txt="External-model-dependent namelist variables have not yet been -specified for this external LBC model (EXTRN_MDL_NAME_LBCS): - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - ;; - -esac -# -#----------------------------------------------------------------------- -# -# Check that the executable that generates the LBCs exists. -# -#----------------------------------------------------------------------- -# -exec_fn="chgres_cube" -exec_fp="$EXECdir/${exec_fn}" -if [ ! -f "${exec_fp}" ]; then - message_txt="The executable (exec_fp) for generating initial conditions -on the FV3-LAM native grid does not exist: - exec_fp = \"${exec_fp}\" -Please ensure that you've built this executable." - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi -fi -# -#----------------------------------------------------------------------- -# -# Loop through the LBC update times and run chgres_cube for each such time to -# obtain an LBC file for each that can be used as input to the FV3-LAM. -# -#----------------------------------------------------------------------- -# -num_fhrs="${#EXTRN_MDL_FHRS[@]}" -bcgrp10=${bcgrp#0} -bcgrpnum10=${bcgrpnum#0} -for (( ii=0; ii<${num_fhrs}; ii=ii+bcgrpnum10 )); do - i=$(( ii + bcgrp10 )) - if [ ${i} -lt ${num_fhrs} ]; then - echo " group ${bcgrp10} processes member ${i}" -# -# Get the forecast hour of the external model. -# - fhr="${EXTRN_MDL_FHRS[$i]}" -# -# Set external model output file name and file type/format. Note that -# these are now inputs into chgres_cube. -# - fn_atm="" - fn_grib2="" - - case "${EXTRN_MDL_NAME_LBCS}" in - "GSMGFS") - fn_atm="${EXTRN_MDL_FNS[$i]}" - ;; - "FV3GFS") - if [ "${FV3GFS_FILE_FMT_LBCS}" = "nemsio" ]; then - fn_atm="${EXTRN_MDL_FNS[$i]}" - elif [ "${FV3GFS_FILE_FMT_LBCS}" = "grib2" ]; then - fn_grib2="${EXTRN_MDL_FNS[$i]}" - elif [ "${FV3GFS_FILE_FMT_LBCS}" = "netcdf" ]; then - fn_atm="${EXTRN_MDL_FNS[$i]}" - fi - ;; - "UFS-CASE-STUDY") - if [ "${FV3GFS_FILE_FMT_LBCS}" = "nemsio" ]; then - hh="${EXTRN_MDL_CDATE:8:2}" - fhr_str=$(printf "%03d" ${fhr}) - fn_atm="gfs.t${hh}z.atmf${fhr_str}.nemsio" - unset hh fhr_str - fi - ;; - "GDAS") - fn_atm="${EXTRN_MDL_FNS[$i]}" - ;; - "GEFS") - fn_grib2="${EXTRN_MDL_FNS[$i]}" - ;; - "RAP") - fn_grib2="${EXTRN_MDL_FNS[$i]}" - ;; - "HRRR") - fn_grib2="${EXTRN_MDL_FNS[$i]}" - ;; - "NAM") - fn_grib2="${EXTRN_MDL_FNS[$i]}" - ;; - *) - message_txt="The external model output file name to use in the chgres_cube -FORTRAN namelist file has not specified for this external LBC model (EXTRN_MDL_NAME_LBCS): - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - ;; - esac -# -# Get the starting date (year, month, and day together), month, day, and -# hour of the the external model forecast. Then add the forecast hour -# to it to get a date and time corresponding to the current forecast time. -# - yyyymmdd="${EXTRN_MDL_CDATE:0:8}" - mm="${EXTRN_MDL_CDATE:4:2}" - dd="${EXTRN_MDL_CDATE:6:2}" - hh="${EXTRN_MDL_CDATE:8:2}" - - cdate_crnt_fhr=$( $DATE_UTIL --utc --date "${yyyymmdd} ${hh} UTC + ${fhr} hours" "+%Y%m%d%H" ) -# -# Get the month, day, and hour corresponding to the current forecast time -# of the the external model. -# - mm="${cdate_crnt_fhr:4:2}" - dd="${cdate_crnt_fhr:6:2}" - hh="${cdate_crnt_fhr:8:2}" -# -# Build the FORTRAN namelist file that chgres_cube will read in. -# - -# -# Create a multiline variable that consists of a yaml-compliant string -# specifying the values that the namelist variables need to be set to -# (one namelist variable per line, plus a header and footer). Below, -# this variable will be passed to a python script that will create the -# namelist file. -# -# IMPORTANT: -# If we want a namelist variable to be removed from the namelist file, -# in the "settings" variable below, we need to set its value to the -# string "null". -# - settings=" -'config': - 'fix_dir_target_grid': ${FIXlam} - 'mosaic_file_target_grid': ${FIXlam}/${CRES}${DOT_OR_USCORE}mosaic.halo$((10#${NH4})).nc - 'orog_dir_target_grid': ${FIXlam} - 'orog_files_target_grid': ${CRES}${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo$((10#${NH4})).nc - 'vcoord_file_target_grid': ${VCOORD_FILE} - 'varmap_file': ${PARMdir}/ufs_utils/varmap_tables/${varmap_file} - 'data_dir_input_grid': ${extrn_mdl_staging_dir} - 'atm_files_input_grid': ${fn_atm} - 'grib2_file_input_grid': \"${fn_grib2}\" - 'cycle_mon': $((10#${mm})) - 'cycle_day': $((10#${dd})) - 'cycle_hour': $((10#${hh})) - 'convert_atm': True - 'regional': 2 - 'halo_bndy': $((10#${NH4})) - 'halo_blend': $((10#${HALO_BLEND})) - 'input_type': ${input_type} - 'external_model': ${external_model} - 'tracers_input': ${tracers_input} - 'tracers': ${tracers} - 'thomp_mp_climo_file': ${thomp_mp_climo_file} -" - - nml_fn="fort.41" - # UW takes input from stdin when no -i/--input-config flag is provided - (cat << EOF -$settings -EOF -) | uw config realize \ - --input-format yaml \ - -o ${nml_fn} \ - --output-format nml \ - -v \ - - export err=$? - if [ $err -ne 0 ]; then - message_txt="Error creating namelist read by ${exec_fn} failed. - Settings for input are: -$settings" - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_exit "${message_txt}" - else - print_err_msg_exit "${message_txt}" - fi - fi -# -#----------------------------------------------------------------------- -# -# Run chgres_cube. -# -#----------------------------------------------------------------------- -# -# NOTE: -# Often when the chgres_cube.exe run fails, it still returns a zero -# return code, so the failure isn't picked up the the logical OR (||) -# below. That should be fixed. This might be due to the RUN_CMD_UTILS -# command - maybe that is returning a zero exit code even though the -# exit code of chgres_cube is nonzero. A similar thing happens in the -# forecast task. -# - PREP_STEP - eval ${RUN_CMD_UTILS} ${exec_fp} ${REDIRECT_OUT_ERR} - export err=$? - if [ "${RUN_ENVIR}" = "nco" ] && [ "${MACHINE}" = "WCOSS2" ]; then - err_chk - else - if [ $err -ne 0 ]; then - print_err_msg_exit "\ -Call to executable (exec_fp) to generate lateral boundary conditions (LBCs) -file for the FV3-LAM for forecast hour fhr failed: - exec_fp = \"${exec_fp}\" - fhr = \"$fhr\" -The external model from which the LBCs files are to be generated is: - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" -The external model files that are inputs to the executable (exec_fp) are -located in the following directory: - extrn_mdl_staging_dir = \"${extrn_mdl_staging_dir}\"" - fi - fi - POST_STEP -# -# Move LBCs file for the current lateral boundary update time to the LBCs -# work directory. Note that we rename the file by including in its name -# the forecast hour of the FV3-LAM (which is not necessarily the same as -# that of the external model since their start times may be offset). -# - lbc_spec_fhrs=( "${EXTRN_MDL_FHRS[$i]}" ) - fcst_hhh=$(( ${lbc_spec_fhrs} - ${EXTRN_MDL_LBCS_OFFSET_HRS} )) - fcst_hhh_FV3LAM=$( printf "%03d" "$fcst_hhh" ) - if [ $(boolify "${CPL_AQM}") = "TRUE" ]; then - cp -p gfs.bndy.nc ${DATA_SHARE}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc - else - mv gfs.bndy.nc ${INPUT_DATA}/${NET}.${cycle}${dot_ensmem}.gfs_bndy.tile7.f${fcst_hhh_FV3LAM}.nc - fi - - fi -done -# -#----------------------------------------------------------------------- -# -# Print message indicating successful completion of script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Lateral boundary condition (LBC) files (in NetCDF format) generated suc- -cessfully for all LBC update hours (except hour zero)!!! - -Exiting script: \"${scrfunc_fn}\" -In directory: \"${scrfunc_dir}\" -========================================================================" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml index ffd315b6a..3a9c9f3ba 100644 --- a/ush/external_model_defaults.yaml +++ b/ush/external_model_defaults.yaml @@ -1,5 +1,3 @@ - - FV3GFS: task_make_ics: chgres_cube: @@ -103,7 +101,6 @@ HRRR: external_model: "HRRR" geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_HRRRX" tg3_from_soil: True - nsoill_out: 9 task_make_lbcs: chgres_cube: namelist: @@ -123,7 +120,6 @@ RAP: external_model: "RAP" geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_RAPX" tg3_from_soil: True - nsoill_out: 9 task_make_lbcs: chgres_cube: namelist: diff --git a/uwtools b/uwtools deleted file mode 160000 index 406750c80..000000000 --- a/uwtools +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 406750c80746424f2ee537f79130ec1f8aeab0b7 From 25fd2b466605d0134abc760bf94f03de7a43b3cd Mon Sep 17 00:00:00 2001 From: Brian Weir <94982354+WeirAE@users.noreply.github.com> Date: Fri, 25 Oct 2024 14:06:55 -0500 Subject: [PATCH 33/47] Apply suggestions from code review Co-authored-by: NaureenBharwaniNOAA <136371446+NaureenBharwaniNOAA@users.noreply.github.com> --- parm/wflow/coldstart.yaml | 4 ++-- scripts/chgres_cube.py | 6 ++---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index ac319f7cf..1770367f1 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -85,7 +85,7 @@ metatask_run_ensemble: mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' task_make_ics_mem#mem#: <<: *default_task - command: !cycstr 'source &USHdir;/load_modules_wflow.sh hera ; python &SCRIPTSdir;/chgres_cube.py + command: !cycstr 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; python &SCRIPTSdir;/chgres_cube.py -c &GLOBAL_VAR_DEFNS_FP; --cycle @Y-@m-@dT@H:@M:@S --key-path task_make_ics @@ -128,7 +128,7 @@ metatask_run_ensemble: task_make_lbcs_mem#mem#: <<: *default_task - command: !cycstr 'source &USHdir;/load_modules_wflow.sh hera ; python &SCRIPTSdir;/chgres_cube.py + command: !cycstr 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; python &SCRIPTSdir;/chgres_cube.py -c &GLOBAL_VAR_DEFNS_FP; --cycle @Y-@m-@dT@H:@M:@S --key-path task_make_lbcs diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 6be8ccc65..5a952a03a 100644 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -21,10 +21,8 @@ def _parse_var_defns(file): var_dict = {} with open(file, "r", encoding="utf-8") as f: - lines = f.readlines() - for line in lines: - line = line.strip() - if "=" in line: + for line in f: + if "=" in line.strip(): key, value = line.split("=", 1) key = key.strip() value = value.strip() From 0e2f6e03d601efd4115ebf519d4ec5628cb9bced Mon Sep 17 00:00:00 2001 From: WeirAE Date: Tue, 29 Oct 2024 22:44:56 +0000 Subject: [PATCH 34/47] add back several changes lost in conflict resolutions --- parm/wflow/coldstart.yaml | 4 +- scripts/chgres_cube.py | 119 +++++----------------- scripts/exregional_get_extrn_mdl_files.sh | 4 +- ush/ccpp_suites_defaults.yaml | 2 +- ush/config_defaults.yaml | 22 +++- ush/config_defaults_aqm.yaml | 12 --- ush/retrieve_data.py | 20 ++-- ush/setup.py | 8 +- 8 files changed, 63 insertions(+), 128 deletions(-) mode change 100644 => 100755 scripts/chgres_cube.py diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index 2a476f9af..ad4e15dbc 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -102,7 +102,7 @@ metatask_run_ensemble: mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' task_make_ics_mem#mem#: <<: *default_task - command: !cycstr 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; python &SCRIPTSdir;/chgres_cube.py + command: !cycstr 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} && python &SCRIPTSdir;/chgres_cube.py -c &GLOBAL_VAR_DEFNS_FP; --cycle @Y-@m-@dT@H:@M:@S --key-path task_make_ics @@ -148,7 +148,7 @@ metatask_run_ensemble: task_make_lbcs_mem#mem#: <<: *default_task - command: !cycstr 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; python &SCRIPTSdir;/chgres_cube.py + command: !cycstr 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} && python &SCRIPTSdir;/chgres_cube.py -c &GLOBAL_VAR_DEFNS_FP; --cycle @Y-@m-@dT@H:@M:@S --key-path task_make_lbcs diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py old mode 100644 new mode 100755 index 5a952a03a..c6de96fa0 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -18,23 +18,6 @@ from uwtools.api.logging import use_uwtools_logger -def _parse_var_defns(file): - var_dict = {} - with open(file, "r", encoding="utf-8") as f: - for line in f: - if "=" in line.strip(): - key, value = line.split("=", 1) - key = key.strip() - value = value.strip() - - if value.startswith("(") and value.endswith(")"): - items = re.findall(r"\((.*?)\)", value) - if items: - value = [item.strip() for item in items[0].split()] - var_dict[key] = value - return var_dict - - def _walk_key_path(config, key_path): """ Navigate to the sub-config at the end of the path of given keys. @@ -105,16 +88,8 @@ def run_chgres_cube(config_file, cycle, key_path, member): os.environ["MEMBER"] = member # set universal variables - cyc = str(expt_config["workflow"]["DATE_FIRST_CYCL"])[8:10] - dot_ensmem = ( - f".mem{member}" - if ( - expt_config["user"]["RUN_ENVIR"] == "nco" - and expt_config["global"]["DO_ENSEMBLE"] - and member - ) - else "" - ) + cyc = str(cycle.strftime("%H")] + dot_ensmem = f".mem{member}" if expt_config["global"]["DO_ENSEMBLE"] else "" nco_net = expt_config["nco"]["NET_default"] # Extract driver config from experiment config @@ -138,42 +113,29 @@ def run_chgres_cube(config_file, cycle, key_path, member): input_type = chgres_cube_config["chgres_cube"]["namelist"]["update_values"][ "config" ].get("input_type") + varsfilepath = chgres_cube_config["input_files_metadata_path"] + external_config = get_yaml_config(varsfilepath) + external_config_fns = external_config["external_model_fns"] + external_config_fhrs = external_config["external_model_fhrs"] # update config for ics task, run and stage data if "task_make_ics" in key_path: - varsfilepath = chgres_cube_config["input_files_metadata_path"] - shconfig = _parse_var_defns(varsfilepath) - extrn_config_fns = shconfig["EXTRN_MDL_FNS"] - extrn_config_fhrs = shconfig["EXTRN_MDL_FHRS"] if input_type == "grib2": fn_grib2 = extrn_config_fns[0] - update = {"grib2_file_input_grid": fn_grib2} else: - fn_atm = extrn_config_fns[0] - fn_sfc = extrn_config_fns[1] - update = {"atm_files_input_grid": fn_atm, "sfc_files_input_grid": fn_sfc} - if expt_config["task_get_extrn_ics"]["EXTRN_MDL_NAME_ICS"] in [ - "HRRR", - "RAP", - ]: - if expt_config["workflow"]["SDF_USES_RUC_LSM"] is True: - update["nsoill_out"] = 9 - else: - if expt_config["workflow"]["SDF_USES_THOMPSON_MP"] is True: - update["thomp_mp_climo_file"] = expt_config["workflow"][ - "THOMPSON_MP_CLIMO_FP" - - ] + fn_atm = external_config_fns[0] + fn_sfc = external_config_fns[1] - update_cfg = { - "task_make_ics": { - "chgres_cube": {"namelist": {"update_values": {"config": update}}} - } - } expt_config_cp.update_from(update_cfg) # reinstantiate driver + expt_config_cp.dereference( + context={ + "cycle": cycle, + **expt_config_cp, + } + ) chgres_cube_driver = ChgresCube( config=expt_config_cp, cycle=cycle, @@ -183,30 +145,18 @@ def run_chgres_cube(config_file, cycle, key_path, member): # Deliver output data to a common location above the rundir. links = {} - tile_rgnl = expt_config["constants"]["TILE_RGNL"] - nh0 = expt_config["constants"]["NH0"] output_dir = os.path.join(rundir.parent, "INPUT") os.makedirs(output_dir, exist_ok=True) - links[ - f"{nco_net}.t{cyc}z{dot_ensmem}.gfs_data.tile{tile_rgnl}.halo{nh0}.nc" - ] = str(rundir / f"out.atm.tile{tile_rgnl}.nc") - links[ - f"{nco_net}.t{cyc}z{dot_ensmem}.sfc_data.tile{tile_rgnl}.halo{nh0}.nc" - ] = str(rundir / f"out.sfc.tile{tile_rgnl}.nc") - links[f"{nco_net}.t{cyc}z.gfs_ctrl.nc"] = str(rundir / f"gfs_ctrl.nc") - links[f"{nco_net}.t{cyc}z{dot_ensmem}.gfs_bndy.tile{tile_rgnl}.f000.nc"] = str( - rundir / f"gfs.bndy.nc" - ) + for i, label in enumerate(chgres_cube_config["output_file_labels"]): + input_fn = expt_config["task_get_extrn_ics"]["output_files"][i] + links[input_fn] = str(label) + uwlink(target_dir=output_dir, config=links) # update config for lbcs task, loop run and stage data else: fn_sfc = "" - varsfilepath = chgres_cube_config["input_files_metadata_path"] - shconfig = _parse_var_defns(varsfilepath) - extrn_config_fns = shconfig["EXTRN_MDL_FNS"] - extrn_config_fhrs = shconfig["EXTRN_MDL_FHRS"] num_fhrs = len(extrn_config_fhrs) bcgrp10 = 0 @@ -217,29 +167,16 @@ def run_chgres_cube(config_file, cycle, key_path, member): print(f"group {bcgrp10} processes member {i}") if input_type == "grib2": fn_grib2 = extrn_config_fns[i] - update = {"grib2_file_input_grid": fn_grib2} else: fn_atm = extrn_config_fns[i] - update = {"atm_files_input_grid": fn_atm} - if expt_config["task_get_extrn_lbcs"]["EXTRN_MDL_NAME_LBCS"] not in [ - "HRRR", - "RAP", - ]: - if expt_config["workflow"]["SDF_USES_THOMPSON_MP"] is True: - update["thomp_mp_climo_file"] = expt_config["workflow"][ - "THOMPSON_MP_CLIMO_FP" - ] - - update_cfg = { - "task_make_lbcs": { - "chgres_cube": { - "namelist": {"update_values": {"config": update}} - } - } - } - expt_config_cp.update_from(update_cfg) # reinstantiate driver + expt_config_cp.dereference( + context={ + "cycle": cycle, + **expt_config_cp, + } + ) chgres_cube_driver = ChgresCube( config=expt_config_cp, cycle=cycle, @@ -257,13 +194,11 @@ def run_chgres_cube(config_file, cycle, key_path, member): fcst_hhh = int(lbc_spec_fhrs) - int(lbc_offset_fhrs) fcst_hhh_FV3LAM = f"{fcst_hhh:03d}" - lbc_input_fn = rundir / f"gfs.bndy.nc" output_dir = os.path.join(rundir.parent, "INPUT") os.makedirs(output_dir, exist_ok=True) - lbc_output_fn = str( - f"{nco_net}.t{cyc}z{dot_ensmem}" - f".gfs_bndy.tile7.f{fcst_hhh_FV3LAM}.nc" - ) + + lbc_input_fn = expt_config["task_get_extrn_lbcs"]["output_file_labels"][0] + lbc_output_fn = chgres_cube_config["output_file_labels"][0] links[lbc_output_fn] = str(lbc_input_fn) uwlink(target_dir=output_dir, config=links) diff --git a/scripts/exregional_get_extrn_mdl_files.sh b/scripts/exregional_get_extrn_mdl_files.sh index 554cf2cc4..8a2e1265f 100755 --- a/scripts/exregional_get_extrn_mdl_files.sh +++ b/scripts/exregional_get_extrn_mdl_files.sh @@ -240,9 +240,9 @@ fi mkdir -p ${EXTRN_MDL_STAGING_DIR} if [ $RUN_ENVIR = "nco" ]; then - EXTRN_DEFNS="${NET}.${cycle}.${EXTRN_MDL_NAME}.${ICS_OR_LBCS}.${EXTRN_MDL_VAR_DEFNS_FN}.sh" + EXTRN_DEFNS="${NET}.${cycle}.${EXTRN_MDL_NAME}.${ICS_OR_LBCS}.${EXTRN_MDL_VAR_DEFNS_FN}.yaml" else - EXTRN_DEFNS="${EXTRN_MDL_VAR_DEFNS_FN}.sh" + EXTRN_DEFNS="${EXTRN_MDL_VAR_DEFNS_FN}.yaml" fi cmd=" python3 -u ${USHdir}/retrieve_data.py \ diff --git a/ush/ccpp_suites_defaults.yaml b/ush/ccpp_suites_defaults.yaml index 86960655b..fa9f85347 100644 --- a/ush/ccpp_suites_defaults.yaml +++ b/ush/ccpp_suites_defaults.yaml @@ -1,4 +1,4 @@ -gsd_defaults: &chgres_cube_gsd_defaults +chgres_cube_gsd_defaults: &chgres_cube_gsd_defaults namelist: update_values: config: diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index e57839e97..5eac6d982 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1612,6 +1612,11 @@ task_get_extrn_ics: EXTRN_MDL_SOURCE_BASEDIR_ICS: "" EXTRN_MDL_FILES_ICS: "" rundir: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/for_ICS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' + output_file_labels: + - "out.atm.tile{{{ constants.TILE_RGNL }}.nc" + - "out.sfc.tile{{ constants.TILE_RGNL }}.nc" + - "gfs_ctrl.nc" + - "gfs.bndy.nc" #---------------------------- # EXTRN LBCS config parameters @@ -1694,6 +1699,8 @@ task_get_extrn_lbcs: USE_USER_STAGED_EXTRN_FILES: false EXTRN_MDL_SOURCE_BASEDIR_LBCS: "" EXTRN_MDL_FILES_LBCS: "" + output_file_labels: + - "gfs.bndy.nc" #---------------------------- # MAKE ICS config parameters @@ -1760,7 +1767,7 @@ task_make_ics: FVCOM_FILE: "fvcom.nc" VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #------------------------------------------------------------------------ - input_files_metadata_path: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }}/for_ICS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}.sh' + input_files_metadata_path: '{{ task_get_extrn_ics.envvars.rundir }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}.sh' chgres_cube: execution: executable: "{{ user.EXECdir }}/chgres_cube" @@ -1780,7 +1787,7 @@ task_make_ics: orog_files_target_grid: "{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" - data_dir_input_grid: '{{workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_ics.EXTRN_MDL_NAME_ICS }}/for_ICS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' + data_dir_input_grid: '{{ task_get_extrn_ics.envvars.rundir }}' cycle_mon: !int "{{ cycle.strftime('%m') }}" cycle_day: !int "{{ cycle.strftime('%d') }}" cycle_hour: !int "{{ cycle.strftime('%H') }}" @@ -1800,6 +1807,11 @@ task_make_ics: lai_from_climo: true tg3_from_soil: false rundir: '{{ task_run_fcst.rundir }}/tmp_MAKE_ICS' + output_file_labels: + - '{{ nco.NET_DEFAULT }}.t{{ cycle.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.sfc_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' + - '{{ nco.NET_DEFAULT }}.t{{ cycle.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' + - '{{ nco.NET_DEFAULT }}.t{{ cycle.hh }}z.gfs_ctrl.nc' + - '{{ nco.NET_DEFAULT }}.t{{ cycle.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_bndy.tile{{ constants.TILE_RGNL }}.f000.nc' #---------------------------- # MAKE LBCS config parameters @@ -1830,7 +1842,7 @@ task_make_lbcs: OMP_STACKSIZE_MAKE_LBCS: "1024m" VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #------------------------------------------------------------------------ - input_files_metadata_path: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }}/for_LBCS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}.sh' + input_files_metadata_path: '{{ task_get_extrn_lbcs.envvars.rundir }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}.yaml' chgres_cube: execution: executable: "{{ user.EXECdir }}/chgres_cube" @@ -1850,7 +1862,7 @@ task_make_lbcs: orog_files_target_grid: "{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" - data_dir_input_grid: '{{workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_lbcs.EXTRN_MDL_NAME_LBCS }}/for_LBCS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' + data_dir_input_grid: '{{ task_get_extrn_lbcs.envvars.rundir }}' cycle_mon: !int "{{ cycle.strftime('%m') }}" cycle_day: !int "{{ cycle.strftime('%d') }}" cycle_hour: !int "{{ cycle.strftime('%H') }}" @@ -1861,6 +1873,8 @@ task_make_lbcs: input_type: "gaussian_nemsio" external_model: "FV3GFS" rundir: '{{ task_run_fcst.rundir}}/tmp_MAKE_LBCS' + output_file_labels: + - '{{ nco.NET_DEFAULT }}.t{{ cycle.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_bndy.tile7.f{{ "fcst_hhh_FV3LAM" | env }}.nc' #---------------------------- # IO_LAYOUT_Y FORECAST config parameters diff --git a/ush/config_defaults_aqm.yaml b/ush/config_defaults_aqm.yaml index 16eed7e7f..928b50863 100644 --- a/ush/config_defaults_aqm.yaml +++ b/ush/config_defaults_aqm.yaml @@ -1,16 +1,4 @@ -task_make_ics: - chgres_cube: - namelist: - update_values: - config: - input_type: "gaussian_netcdf" -task_make_lbcs: - chgres_cube: - namelist: - update_values: - config: - input_type: "gaussian_netcdf" task_run_post: upp: files_to_copy: diff --git a/ush/retrieve_data.py b/ush/retrieve_data.py index 5acf9d5ce..fe20d754c 100755 --- a/ush/retrieve_data.py +++ b/ush/retrieve_data.py @@ -742,18 +742,16 @@ def write_summary_file(cla, data_store, file_templates): output_path = fill_template(cla.output_path, cla.cycle_date, mem=mem) summary_fp = os.path.join(output_path, cla.summary_file) logging.info(f"Writing a summary file to {summary_fp}") - file_contents = dedent( - f""" - DATA_SRC={data_store} - EXTRN_MDL_CDATE={cla.cycle_date.strftime('%Y%m%d%H')} - EXTRN_MDL_STAGING_DIR={output_path} - EXTRN_MDL_FNS=( {' '.join(files)} ) - EXTRN_MDL_FHRS=( {' '.join([str(i) for i in cla.fcst_hrs])} ) - """ - ) + file_contents = { + "data_source": data_store, + "external_model_cdate": cla.cycle_date.strftime('%Y%m%d%H'), + "external_model_staging_dir": output_path, + "external_model_fns": files, + "external_model_fhrs": [str(i) for i in cla.fcst_hrs] + } logging.info(f"Contents: {file_contents}") - with open(summary_fp, "w") as summary: - summary.write(file_contents) + with open(summary_fp, "w") as f: + yaml.dump(file_contents, f) def to_datetime(arg): diff --git a/ush/setup.py b/ush/setup.py index f80d3011f..d419ef131 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -151,10 +151,10 @@ def load_config_for_setup(ushdir, default_config_path, user_config_path): ("task_get_extrn_lbcs", "EXTRN_MDL_NAME_LBCS", "task_make_ics")] for task, mdl_key, make_key in tasks: - extrn_mdl = cfg_d[task][mdl_key] - extrn_cfg = get_yaml_config(Path(ushdir, "external_model_defaults.yaml")).get(extrn_mdl, {}) - del extrn_cfg[make_key] - update_dict(extrn_cfg, cfg_d) + external_mdl = cfg_d[task][mdl_key] + external_cfg = get_yaml_config(Path(ushdir, "external_model_defaults.yaml")).get(external_mdl, {}) + del external_cfg[make_key] + update_dict(external_cfg, cfg_d) # Set "Home" directory, the top-level ufs-srweather-app directory homedir = Path(__file__).parent.parent.resolve() From 093fbf0080f20b684cf4b3201e0fcbe7b6de8c17 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Wed, 30 Oct 2024 15:35:11 +0000 Subject: [PATCH 35/47] additional residual formatting fixes --- scripts/chgres_cube.py | 43 +++++++++------------- ush/config_defaults.yaml | 63 +++++++++++++++++--------------- ush/external_model_defaults.yaml | 24 ++++++++---- ush/setup.py | 4 +- 4 files changed, 68 insertions(+), 66 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index c6de96fa0..45e88ae7c 100755 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -6,7 +6,6 @@ import datetime as dt import logging import os -import re import sys from argparse import ArgumentParser from copy import deepcopy @@ -74,8 +73,7 @@ def parse_args(argv): ) return parser.parse_args(argv) - -# pylint: disable=too-many-locals, too-many-statements, too-many-branches +# pylint: disable-next=too-many-locals, too-many-statements def run_chgres_cube(config_file, cycle, key_path, member): """ Setup and run the chgres_cube Driver. @@ -87,11 +85,6 @@ def run_chgres_cube(config_file, cycle, key_path, member): os.environ["CRES"] = CRES os.environ["MEMBER"] = member - # set universal variables - cyc = str(cycle.strftime("%H")] - dot_ensmem = f".mem{member}" if expt_config["global"]["DO_ENSEMBLE"] else "" - nco_net = expt_config["nco"]["NET_default"] - # Extract driver config from experiment config chgres_cube_driver = ChgresCube( config=config_file, @@ -120,14 +113,11 @@ def run_chgres_cube(config_file, cycle, key_path, member): # update config for ics task, run and stage data if "task_make_ics" in key_path: - if input_type == "grib2": - fn_grib2 = extrn_config_fns[0] + os.environ["fn_grib2"] = external_config_fns[0] else: - fn_atm = external_config_fns[0] - fn_sfc = external_config_fns[1] - - expt_config_cp.update_from(update_cfg) + os.environ["fn_atm"] = external_config_fns[0] + os.environ["fn_sfc"] = external_config_fns[1] # reinstantiate driver expt_config_cp.dereference( @@ -156,8 +146,7 @@ def run_chgres_cube(config_file, cycle, key_path, member): # update config for lbcs task, loop run and stage data else: - fn_sfc = "" - num_fhrs = len(extrn_config_fhrs) + num_fhrs = len(external_config_fhrs) bcgrp10 = 0 bcgrpnum10 = 1 @@ -166,9 +155,16 @@ def run_chgres_cube(config_file, cycle, key_path, member): if i < num_fhrs: print(f"group {bcgrp10} processes member {i}") if input_type == "grib2": - fn_grib2 = extrn_config_fns[i] + os.environ["fn_grib2"] = external_config_fns[i] else: - fn_atm = extrn_config_fns[i] + os.environ["fn_atm"] = external_config_fns[i] + + lbc_spec_fhrs = external_config_fhrs[i] + lbc_offset_fhrs = expt_config_cp["task_get_extrn_lbcs"][ + "EXTRN_MDL_LBCS_OFFSET_HRS" + ] + fcst_hhh = int(lbc_spec_fhrs) - int(lbc_offset_fhrs) + os.environ["fcst_hhh_FV3LAM"] = f"{fcst_hhh:03d}" # reinstantiate driver expt_config_cp.dereference( @@ -187,17 +183,12 @@ def run_chgres_cube(config_file, cycle, key_path, member): # Deliver output data to a common location above the rundir. links = {} - lbc_spec_fhrs = extrn_config_fhrs[i] - lbc_offset_fhrs = expt_config_cp["task_get_extrn_lbcs"][ - "EXTRN_MDL_LBCS_OFFSET_HRS" - ] - fcst_hhh = int(lbc_spec_fhrs) - int(lbc_offset_fhrs) - fcst_hhh_FV3LAM = f"{fcst_hhh:03d}" - output_dir = os.path.join(rundir.parent, "INPUT") os.makedirs(output_dir, exist_ok=True) - lbc_input_fn = expt_config["task_get_extrn_lbcs"]["output_file_labels"][0] + lbc_input_fn = expt_config["task_get_extrn_lbcs"]["output_file_labels"][ + 0 + ] lbc_output_fn = chgres_cube_config["output_file_labels"][0] links[lbc_output_fn] = str(lbc_input_fn) uwlink(target_dir=output_dir, config=links) diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 5eac6d982..744bddb2c 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1781,31 +1781,33 @@ task_make_ics: namelist: update_values: config: - fix_dir_target_grid: "{{ workflow.FIXlam }}" - mosaic_file_target_grid: "{{ workflow.FIXlam }}/{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}mosaic.halo{{ constants.NH4 }}.nc" - orog_dir_target_grid: "{{ workflow.FIXlam }}" - orog_files_target_grid: "{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" - vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" - varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" - data_dir_input_grid: '{{ task_get_extrn_ics.envvars.rundir }}' - cycle_mon: !int "{{ cycle.strftime('%m') }}" - cycle_day: !int "{{ cycle.strftime('%d') }}" - cycle_hour: !int "{{ cycle.strftime('%H') }}" + atm_files_input_grid: "{{ 'fn_atm' | env }}" convert_atm: true - convert_sfc: true convert_nst: true - regional: 1 - halo_bndy: !int "{{ constants.NH4 }}" + convert_sfc: true + cycle_day: !int "{{ cycle.strftime('%d') }}" + cycle_hour: !int "{{ cycle.strftime('%H') }}" + cycle_mon: !int "{{ cycle.strftime('%m') }}" + data_dir_input_grid: '{{ task_get_extrn_ics.envvars.rundir }}' + external_model: "FV3GFS" + fix_dir_target_grid: "{{ workflow.FIXlam }}" halo_blend: !int "{{ global.HALO_BLEND }}" + halo_bndy: !int "{{ constants.NH4 }}" input_type: "gaussian_nemsio" - external_model: "FV3GFS" + lai_from_climo: true + minmax_vgfrc_from_climo: true + mosaic_file_target_grid: "{{ workflow.FIXlam }}/{{ 'CRES' | env }}_mosaic.halo{{ constants.NH4 }}.nc" nsoill_out: 4 - vgtyp_from_climo: true + orog_dir_target_grid: "{{ workflow.FIXlam }}" + orog_files_target_grid: "{{ 'CRES' | env }}_oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" + regional: 1 + sfc_files_input_grid: "{{ 'fn_sfc' | env }}" sotyp_from_climo: true - vgfrc_from_climo: true - minmax_vgfrc_from_climo: true - lai_from_climo: true tg3_from_soil: false + varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" + vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" + vgtyp_from_climo: true + vgfrc_from_climo: true rundir: '{{ task_run_fcst.rundir }}/tmp_MAKE_ICS' output_file_labels: - '{{ nco.NET_DEFAULT }}.t{{ cycle.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.sfc_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' @@ -1856,22 +1858,23 @@ task_make_lbcs: namelist: update_values: config: - fix_dir_target_grid: "{{ workflow.FIXlam }}" - mosaic_file_target_grid: "{{ workflow.FIXlam }}/{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}mosaic.halo{{ constants.NH4 }}.nc" - orog_dir_target_grid: "{{ workflow.FIXlam }}" - orog_files_target_grid: "{{ 'CRES' | env }}{{ workflow.DOT_OR_USCORE }}oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" - vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" - varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" - data_dir_input_grid: '{{ task_get_extrn_lbcs.envvars.rundir }}' - cycle_mon: !int "{{ cycle.strftime('%m') }}" + atm_files_input_grid: "{{ 'fn_atm' | env }}" + convert_atm: true cycle_day: !int "{{ cycle.strftime('%d') }}" cycle_hour: !int "{{ cycle.strftime('%H') }}" - convert_atm: true - regional: 2 - halo_bndy: !int "{{ constants.NH4 }}" + cycle_mon: !int "{{ cycle.strftime('%m') }}" + data_dir_input_grid: '{{ task_get_extrn_lbcs.envvars.rundir }}' + external_model: "FV3GFS" + fix_dir_target_grid: "{{ workflow.FIXlam }}" halo_blend: !int "{{ global.HALO_BLEND }}" + halo_bndy: !int "{{ constants.NH4 }}" input_type: "gaussian_nemsio" - external_model: "FV3GFS" + mosaic_file_target_grid: "{{ workflow.FIXlam }}/{{ 'CRES' | env }}_mosaic.halo{{ constants.NH4 }}.nc" + orog_dir_target_grid: "{{ workflow.FIXlam }}" + orog_files_target_grid: "{{ 'CRES' | env }}_oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" + regional: 2 + varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" + vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" rundir: '{{ task_run_fcst.rundir}}/tmp_MAKE_LBCS' output_file_labels: - '{{ nco.NET_DEFAULT }}.t{{ cycle.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_bndy.tile7.f{{ "fcst_hhh_FV3LAM" | env }}.nc' diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml index 3a9c9f3ba..8a7e0a558 100644 --- a/ush/external_model_defaults.yaml +++ b/ush/external_model_defaults.yaml @@ -79,16 +79,18 @@ GEFS: namelist: update_values: config: - input_type: "grib2" convert_nst: False external_model: "GFS" + input_grid: "{{ 'fn_grib2' | env }}" + input_type: "grib2" task_make_lbcs: chgres_cube: namelist: update_values: config: - input_type: "grib2" external_model: "GFS" + input_grid: "{{ 'fn_grib2' | env }}" + input_type: "grib2" HRRR: task_make_ics: @@ -96,18 +98,20 @@ HRRR: namelist: update_values: config: - input_type: "grib2" convert_nst: False external_model: "HRRR" geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_HRRRX" + input_grid: "{{ 'fn_grib2' | env }}" + input_type: "grib2" tg3_from_soil: True task_make_lbcs: chgres_cube: namelist: update_values: config: - input_type: "grib2" external_model: "HRRR" + input_grid: "{{ 'fn_grib2' | env }}" + input_type: "grib2" RAP: task_make_ics: @@ -115,18 +119,20 @@ RAP: namelist: update_values: config: - input_type: "grib2" convert_nst: False external_model: "RAP" geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_RAPX" + input_grid: "{{ 'fn_grib2' | env }}" + input_type: "grib2" tg3_from_soil: True task_make_lbcs: chgres_cube: namelist: update_values: config: - input_type: "grib2" external_model: "RAP" + input_grid: "{{ 'fn_grib2' | env }}" + input_type: "grib2" NAM: task_make_ics: @@ -134,13 +140,15 @@ NAM: namelist: update_values: config: - input_type: "grib2" convert_nst: False external_model: "NAM" + input_grid: "{{ 'fn_grib2' | env }}" + input_type: "grib2" task_make_lbcs: chgres_cube: namelist: update_values: config: - input_type: "grib2" external_model: "NAM" + input_grid: "{{ 'fn_grib2' | env }}" + input_type: "grib2" diff --git a/ush/setup.py b/ush/setup.py index d419ef131..09cddb785 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -151,10 +151,10 @@ def load_config_for_setup(ushdir, default_config_path, user_config_path): ("task_get_extrn_lbcs", "EXTRN_MDL_NAME_LBCS", "task_make_ics")] for task, mdl_key, make_key in tasks: - external_mdl = cfg_d[task][mdl_key] + external_mdl = default_config[task]["envvars"][mdl_key] external_cfg = get_yaml_config(Path(ushdir, "external_model_defaults.yaml")).get(external_mdl, {}) del external_cfg[make_key] - update_dict(external_cfg, cfg_d) + default_config.update_from(external_cfg) # Set "Home" directory, the top-level ufs-srweather-app directory homedir = Path(__file__).parent.parent.resolve() From affa5d456d884271dbbdf024c54fafffc5281497 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Wed, 30 Oct 2024 16:30:27 +0000 Subject: [PATCH 36/47] fixes for running --- parm/wflow/coldstart.yaml | 8 ++++---- ush/config_defaults.yaml | 32 ++++++++------------------------ 2 files changed, 12 insertions(+), 28 deletions(-) diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index ad4e15dbc..c285b9c5a 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -102,7 +102,7 @@ metatask_run_ensemble: mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' task_make_ics_mem#mem#: <<: *default_task - command: !cycstr 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} && python &SCRIPTSdir;/chgres_cube.py + command: !cycstr 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; &SCRIPTSdir;/chgres_cube.py -c &GLOBAL_VAR_DEFNS_FP; --cycle @Y-@m-@dT@H:@M:@S --key-path task_make_ics @@ -115,7 +115,7 @@ metatask_run_ensemble: cyclestr: value: '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' nodes: '{{ task_make_ics.chgres_cube.execution.batchargs.nodes }}:ppn={{ task_make_ics.chgres_cube.execution.batchargs.tasks_per_node }}' - walltime: '{{ task_make_ics.chgres_cube.execution.batchargs.walltime }}' + walltime: '{{ task_make_ics.chgres_cube.execution.batchargs.walltime }}' dependency: and: &make_ics_deps taskdep_get_extrn: @@ -148,7 +148,7 @@ metatask_run_ensemble: task_make_lbcs_mem#mem#: <<: *default_task - command: !cycstr 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} && python &SCRIPTSdir;/chgres_cube.py + command: !cycstr 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; &SCRIPTSdir;/chgres_cube.py -c &GLOBAL_VAR_DEFNS_FP; --cycle @Y-@m-@dT@H:@M:@S --key-path task_make_lbcs @@ -163,7 +163,7 @@ metatask_run_ensemble: cyclestr: value: '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' nodes: '{{ task_make_lbcs.chgres_cube.execution.batchargs.nodes }}:ppn={{ task_make_lbcs.chgres_cube.execution.batchargs.tasks_per_node }}' - walltime: '{{ task_make_lbcs.chgres_cube.execution.batchargs.walltime }}' + walltime: '{{ task_make_lbcs.chgres_cube.execution.batchargs.walltime }}' dependency: and: <<: *make_ics_deps diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 744bddb2c..1fa4e8cda 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1706,14 +1706,6 @@ task_get_extrn_lbcs: # MAKE ICS config parameters #----------------------------- task_make_ics: - - chgres_cube: - execution: &chgres_cube_execution - batchargs: - walltime: 00:30:00 - threads: 1 - nodes: 4 - tasks_per_node: 12 #----------------------------------------------------------------------- # KMP_AFFINITY_MAKE_ICS: # Intel Thread Affinity Interface for the make_ics task. @@ -1769,15 +1761,17 @@ task_make_ics: #------------------------------------------------------------------------ input_files_metadata_path: '{{ task_get_extrn_ics.envvars.rundir }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}.sh' chgres_cube: - execution: - executable: "{{ user.EXECdir }}/chgres_cube" + execution: &chgres_cube_execution + batchargs: + nodes: 4 + tasks_per_node: 12 + threads: 1 + walltime: 00:30:00 envcmds: - module use {{ user.HOMEdir }}/modulefiles - module load build_{{ user.MACHINE|lower }}_{{ workflow.COMPILER }} - mpiargs: - - "--ntasks $SLURM_CPUS_ON_NODE" + executable: "{{ user.EXECdir }}/chgres_cube" mpicmd: '{{ platform.BATCH_RUN_CMD }}' - threads: 1 namelist: update_values: config: @@ -1819,9 +1813,6 @@ task_make_ics: # MAKE LBCS config parameters #----------------------------- task_make_lbcs: - chgres_cube: - execution: - <<: *chgres_cube_execution #------------------------------------------------------------------------ # # KMP_AFFINITY_MAKE_LBCS: @@ -1847,14 +1838,7 @@ task_make_lbcs: input_files_metadata_path: '{{ task_get_extrn_lbcs.envvars.rundir }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}.yaml' chgres_cube: execution: - executable: "{{ user.EXECdir }}/chgres_cube" - envcmds: - - module use {{ user.HOMEdir }}/modulefiles - - module load build_{{ user.MACHINE|lower }}_{{ workflow.COMPILER }} - mpiargs: - - "--ntasks $SLURM_CPUS_ON_NODE" - mpicmd: '{{ platform.BATCH_RUN_CMD }}' - threads: 1 + <<: *chgres_cube_execution namelist: update_values: config: From d74b41090c6bedde2947cfd22bfe7b4d50893556 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Wed, 30 Oct 2024 20:24:00 +0000 Subject: [PATCH 37/47] Additional fixes --- parm/wflow/coldstart.yaml | 30 ++++++++++++++++++++---------- scripts/chgres_cube.py | 25 ++++++++++++++++++------- ush/config_defaults.yaml | 17 ++++++++--------- ush/external_model_defaults.yaml | 16 ++++++++-------- 4 files changed, 54 insertions(+), 34 deletions(-) diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index c285b9c5a..ff1796720 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -102,11 +102,16 @@ metatask_run_ensemble: mem: '{% if global.DO_ENSEMBLE %}{%- for m in range(1, global.NUM_ENS_MEMBERS+1) -%}{{ "%03d "%m }}{%- endfor -%} {% else %}{{ "000"|string }}{% endif %}' task_make_ics_mem#mem#: <<: *default_task - command: !cycstr 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; &SCRIPTSdir;/chgres_cube.py - -c &GLOBAL_VAR_DEFNS_FP; - --cycle @Y-@m-@dT@H:@M:@S - --key-path task_make_ics - --mem #mem#' + command: + cyclestr: + value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; &SCRIPTSdir;/chgres_cube.py + -c &GLOBAL_VAR_DEFNS_FP; + --cycle @Y-@m-@dT@H:@M:@S + --key-path task_make_ics + --mem #mem#' + join: + cyclestr: + value: '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' @@ -148,11 +153,16 @@ metatask_run_ensemble: task_make_lbcs_mem#mem#: <<: *default_task - command: !cycstr 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; &SCRIPTSdir;/chgres_cube.py - -c &GLOBAL_VAR_DEFNS_FP; - --cycle @Y-@m-@dT@H:@M:@S - --key-path task_make_lbcs - --mem #mem#' + command: + cyclestr: + value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; &SCRIPTSdir;/chgres_cube.py + -c &GLOBAL_VAR_DEFNS_FP; + --cycle @Y-@m-@dT@H:@M:@S + --key-path task_make_lbcs + --mem #mem#' + join: + cyclestr: + value: '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' envars: <<: *default_vars SLASH_ENSMEM_SUBDIR: '&SLASH_ENSMEM_SUBDIR;' diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 45e88ae7c..b523ee818 100755 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -73,6 +73,7 @@ def parse_args(argv): ) return parser.parse_args(argv) + # pylint: disable-next=too-many-locals, too-many-statements def run_chgres_cube(config_file, cycle, key_path, member): """ @@ -106,6 +107,7 @@ def run_chgres_cube(config_file, cycle, key_path, member): input_type = chgres_cube_config["chgres_cube"]["namelist"]["update_values"][ "config" ].get("input_type") + varsfilepath = chgres_cube_config["input_files_metadata_path"] external_config = get_yaml_config(varsfilepath) external_config_fns = external_config["external_model_fns"] @@ -114,12 +116,15 @@ def run_chgres_cube(config_file, cycle, key_path, member): # update config for ics task, run and stage data if "task_make_ics" in key_path: if input_type == "grib2": + os.environ["fn_atm"] = "" os.environ["fn_grib2"] = external_config_fns[0] + os.environ["fn_sfc"] = "" else: os.environ["fn_atm"] = external_config_fns[0] + os.environ["fn_grib2"] = "" os.environ["fn_sfc"] = external_config_fns[1] - # reinstantiate driver + # reinstantiate driver expt_config_cp.dereference( context={ "cycle": cycle, @@ -138,9 +143,11 @@ def run_chgres_cube(config_file, cycle, key_path, member): output_dir = os.path.join(rundir.parent, "INPUT") os.makedirs(output_dir, exist_ok=True) - for i, label in enumerate(chgres_cube_config["output_file_labels"]): - input_fn = expt_config["task_get_extrn_ics"]["output_files"][i] - links[input_fn] = str(label) + for i, output_fn in enumerate( + expt_config_cp["task_make_ics"]["output_file_labels"] + ): + input_fn = expt_config_cp["task_get_extrn_ics"]["output_file_labels"][i] + links[output_fn] = str(input_fn) uwlink(target_dir=output_dir, config=links) @@ -155,12 +162,14 @@ def run_chgres_cube(config_file, cycle, key_path, member): if i < num_fhrs: print(f"group {bcgrp10} processes member {i}") if input_type == "grib2": + os.environ["fn_atm"] = "" os.environ["fn_grib2"] = external_config_fns[i] else: os.environ["fn_atm"] = external_config_fns[i] + os.environ["fn_grib2"] = "" lbc_spec_fhrs = external_config_fhrs[i] - lbc_offset_fhrs = expt_config_cp["task_get_extrn_lbcs"][ + lbc_offset_fhrs = expt_config_cp["task_get_extrn_lbcs"]["envvars"][ "EXTRN_MDL_LBCS_OFFSET_HRS" ] fcst_hhh = int(lbc_spec_fhrs) - int(lbc_offset_fhrs) @@ -186,10 +195,12 @@ def run_chgres_cube(config_file, cycle, key_path, member): output_dir = os.path.join(rundir.parent, "INPUT") os.makedirs(output_dir, exist_ok=True) - lbc_input_fn = expt_config["task_get_extrn_lbcs"]["output_file_labels"][ + lbc_input_fn = expt_config_cp["task_get_extrn_lbcs"][ + "output_file_labels" + ][0] + lbc_output_fn = expt_config_cp["task_make_lbcs"]["output_file_labels"][ 0 ] - lbc_output_fn = chgres_cube_config["output_file_labels"][0] links[lbc_output_fn] = str(lbc_input_fn) uwlink(target_dir=output_dir, config=links) diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 1fa4e8cda..1a1e1d2e2 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1611,7 +1611,7 @@ task_get_extrn_ics: USE_USER_STAGED_EXTRN_FILES: false EXTRN_MDL_SOURCE_BASEDIR_ICS: "" EXTRN_MDL_FILES_ICS: "" - rundir: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/for_ICS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' + rundir: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_ics.envvars.EXTRN_MDL_NAME_ICS }}/for_ICS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' output_file_labels: - "out.atm.tile{{{ constants.TILE_RGNL }}.nc" - "out.sfc.tile{{ constants.TILE_RGNL }}.nc" @@ -1665,7 +1665,6 @@ task_get_extrn_lbcs: LBC_SPEC_INTVL_HRS: 6 EXTRN_MDL_LBCS_OFFSET_HRS: '{{ 3 if task_get_extrn_lbcs.envvars.EXTRN_MDL_NAME_LBCS == "RAP" else 0 }}' FV3GFS_FILE_FMT_LBCS: "nemsio" - rundir: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/for_LBCS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' # #----------------------------------------------------------------------- # @@ -1699,6 +1698,7 @@ task_get_extrn_lbcs: USE_USER_STAGED_EXTRN_FILES: false EXTRN_MDL_SOURCE_BASEDIR_LBCS: "" EXTRN_MDL_FILES_LBCS: "" + rundir: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_lbcs.envvars.EXTRN_MDL_NAME_LBCS }}/for_LBCS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' output_file_labels: - "gfs.bndy.nc" @@ -1759,13 +1759,12 @@ task_make_ics: FVCOM_FILE: "fvcom.nc" VCOORD_FILE: "{{ workflow.FIXam }}/global_hyblev.l65.txt" #------------------------------------------------------------------------ - input_files_metadata_path: '{{ task_get_extrn_ics.envvars.rundir }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}.sh' + input_files_metadata_path: '{{ task_get_extrn_ics.envvars.rundir }}/{{ workflow.EXTRN_MDL_VAR_DEFNS_FN }}.yaml' chgres_cube: execution: &chgres_cube_execution batchargs: nodes: 4 tasks_per_node: 12 - threads: 1 walltime: 00:30:00 envcmds: - module use {{ user.HOMEdir }}/modulefiles @@ -1804,10 +1803,10 @@ task_make_ics: vgfrc_from_climo: true rundir: '{{ task_run_fcst.rundir }}/tmp_MAKE_ICS' output_file_labels: - - '{{ nco.NET_DEFAULT }}.t{{ cycle.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.sfc_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' - - '{{ nco.NET_DEFAULT }}.t{{ cycle.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' - - '{{ nco.NET_DEFAULT }}.t{{ cycle.hh }}z.gfs_ctrl.nc' - - '{{ nco.NET_DEFAULT }}.t{{ cycle.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_bndy.tile{{ constants.TILE_RGNL }}.f000.nc' + - '{{ nco.NET_DEFAULT }}.t{{ cycle.strftime("%H") }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.sfc_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' + - '{{ nco.NET_DEFAULT }}.t{{ cycle.strftime("%H") }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' + - '{{ nco.NET_DEFAULT }}.t{{ cycle.strftime("%H") }}z.gfs_ctrl.nc' + - '{{ nco.NET_DEFAULT }}.t{{ cycle.strftime("%H") }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_bndy.tile{{ constants.TILE_RGNL }}.f000.nc' #---------------------------- # MAKE LBCS config parameters @@ -1861,7 +1860,7 @@ task_make_lbcs: vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" rundir: '{{ task_run_fcst.rundir}}/tmp_MAKE_LBCS' output_file_labels: - - '{{ nco.NET_DEFAULT }}.t{{ cycle.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_bndy.tile7.f{{ "fcst_hhh_FV3LAM" | env }}.nc' + - '{{ nco.NET_DEFAULT }}.t{{ cycle.strftime("%H") }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_bndy.tile7.f{{ "fcst_hhh_FV3LAM" | env }}.nc' #---------------------------- # IO_LAYOUT_Y FORECAST config parameters diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml index 8a7e0a558..7d5a781dc 100644 --- a/ush/external_model_defaults.yaml +++ b/ush/external_model_defaults.yaml @@ -81,7 +81,7 @@ GEFS: config: convert_nst: False external_model: "GFS" - input_grid: "{{ 'fn_grib2' | env }}" + grib2_file_input_grid: "{{ 'fn_grib2' | env }}" input_type: "grib2" task_make_lbcs: chgres_cube: @@ -89,7 +89,7 @@ GEFS: update_values: config: external_model: "GFS" - input_grid: "{{ 'fn_grib2' | env }}" + grib2_file_input_grid: "{{ 'fn_grib2' | env }}" input_type: "grib2" HRRR: @@ -101,7 +101,7 @@ HRRR: convert_nst: False external_model: "HRRR" geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_HRRRX" - input_grid: "{{ 'fn_grib2' | env }}" + grib2_file_input_grid: "{{ 'fn_grib2' | env }}" input_type: "grib2" tg3_from_soil: True task_make_lbcs: @@ -110,7 +110,7 @@ HRRR: update_values: config: external_model: "HRRR" - input_grid: "{{ 'fn_grib2' | env }}" + grib2_file_input_grid: "{{ 'fn_grib2' | env }}" input_type: "grib2" RAP: @@ -122,7 +122,7 @@ RAP: convert_nst: False external_model: "RAP" geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_RAPX" - input_grid: "{{ 'fn_grib2' | env }}" + grib2_file_input_grid: "{{ 'fn_grib2' | env }}" input_type: "grib2" tg3_from_soil: True task_make_lbcs: @@ -131,7 +131,7 @@ RAP: update_values: config: external_model: "RAP" - input_grid: "{{ 'fn_grib2' | env }}" + grib2_file_input_grid: "{{ 'fn_grib2' | env }}" input_type: "grib2" NAM: @@ -142,7 +142,7 @@ NAM: config: convert_nst: False external_model: "NAM" - input_grid: "{{ 'fn_grib2' | env }}" + grib2_file_input_grid: "{{ 'fn_grib2' | env }}" input_type: "grib2" task_make_lbcs: chgres_cube: @@ -150,5 +150,5 @@ NAM: update_values: config: external_model: "NAM" - input_grid: "{{ 'fn_grib2' | env }}" + grib2_file_input_grid: "{{ 'fn_grib2' | env }}" input_type: "grib2" From 86db971b5d173a2bdf64e2a63f29ae2b205b0e69 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Wed, 30 Oct 2024 21:49:50 +0000 Subject: [PATCH 38/47] move file handling to external_model_defaults.yaml --- parm/wflow/post.yaml | 2 +- scripts/chgres_cube.py | 5 ---- ush/config_defaults.yaml | 3 --- ush/external_model_defaults.yaml | 44 ++++++++++++++++++++------------ 4 files changed, 28 insertions(+), 26 deletions(-) diff --git a/parm/wflow/post.yaml b/parm/wflow/post.yaml index 7eb7ffc0a..b528341d1 100644 --- a/parm/wflow/post.yaml +++ b/parm/wflow/post.yaml @@ -12,7 +12,7 @@ metatask_run_ens_post: maxtries: '2' command: cyclestr: - value: 'source &USHdir;/load_modules_wflow.sh hera ; &SCRIPTSdir;/upp.py + value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; &SCRIPTSdir;/upp.py -c &GLOBAL_VAR_DEFNS_FP; --cycle @Y-@m-@dT@H:@M:@S --leadtime #fhr#:00:00 diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index b523ee818..8d46158f4 100755 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -116,12 +116,9 @@ def run_chgres_cube(config_file, cycle, key_path, member): # update config for ics task, run and stage data if "task_make_ics" in key_path: if input_type == "grib2": - os.environ["fn_atm"] = "" os.environ["fn_grib2"] = external_config_fns[0] - os.environ["fn_sfc"] = "" else: os.environ["fn_atm"] = external_config_fns[0] - os.environ["fn_grib2"] = "" os.environ["fn_sfc"] = external_config_fns[1] # reinstantiate driver @@ -162,11 +159,9 @@ def run_chgres_cube(config_file, cycle, key_path, member): if i < num_fhrs: print(f"group {bcgrp10} processes member {i}") if input_type == "grib2": - os.environ["fn_atm"] = "" os.environ["fn_grib2"] = external_config_fns[i] else: os.environ["fn_atm"] = external_config_fns[i] - os.environ["fn_grib2"] = "" lbc_spec_fhrs = external_config_fhrs[i] lbc_offset_fhrs = expt_config_cp["task_get_extrn_lbcs"]["envvars"][ diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 1a1e1d2e2..b679f9b06 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1774,7 +1774,6 @@ task_make_ics: namelist: update_values: config: - atm_files_input_grid: "{{ 'fn_atm' | env }}" convert_atm: true convert_nst: true convert_sfc: true @@ -1794,7 +1793,6 @@ task_make_ics: orog_dir_target_grid: "{{ workflow.FIXlam }}" orog_files_target_grid: "{{ 'CRES' | env }}_oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" regional: 1 - sfc_files_input_grid: "{{ 'fn_sfc' | env }}" sotyp_from_climo: true tg3_from_soil: false varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" @@ -1841,7 +1839,6 @@ task_make_lbcs: namelist: update_values: config: - atm_files_input_grid: "{{ 'fn_atm' | env }}" convert_atm: true cycle_day: !int "{{ cycle.strftime('%d') }}" cycle_hour: !int "{{ cycle.strftime('%H') }}" diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml index 7d5a781dc..a57d5a583 100644 --- a/ush/external_model_defaults.yaml +++ b/ush/external_model_defaults.yaml @@ -4,6 +4,8 @@ FV3GFS: namelist: update_values: config: + atm_files_input_grid: "{{ 'fn_atm' | env }}" + sfc_files_input_grid: "{{ 'fn_sfc' | env }}" tracers_input: ["spfh","clwmr","o3mr","icmr","rwmr","snmr","grle"] tracers: ["sphum","liq_wat","o3mr","ice_wat","rainwat","snowwat","graupel"] task_make_lbcs: @@ -11,30 +13,34 @@ FV3GFS: namelist: update_values: config: + atm_files_input_grid: "{{ 'fn_atm' | env }}" tracers_input: ["spfh","clwmr","o3mr","icmr","rwmr","snmr","grle"] tracers: ["sphum","liq_wat","o3mr","ice_wat","rainwat","snowwat","graupel"] GSMGFS: task_make_ics: chgres_cube: - namelist: - update_values: - config: - input_type: "gfs_gaussian_nemsio" - external_model: "GSMGFS" - convert_nst: False - tracers_input: ["spfh","clwmr","o3mr"] - tracers: ["sphum","liq_wat","o3mr"] + namelist: + update_values: + config: + atm_files_input_grid: "{{ 'fn_atm' | env }}" + external_model: "GSMGFS" + convert_nst: False + input_type: "gfs_gaussian_nemsio" + sfc_files_input_grid: "{{ 'fn_sfc' | env }}" + tracers_input: ["spfh","clwmr","o3mr"] + tracers: ["sphum","liq_wat","o3mr"] task_make_lbcs: chgres_cube: - namelist: - update_values: - config: - input_type: "gfs_gaussian_nemsio" - external_model: "GSMGFS" - convert_nst: False - tracers_input: ["spfh","clwmr","o3mr"] - tracers: ["sphum","liq_wat","o3mr"] + namelist: + update_values: + config: + atm_files_input_grid: "{{ 'fn_atm' | env }}" + convert_nst: False + external_model: "GSMGFS" + input_type: "gfs_gaussian_nemsio" + tracers_input: ["spfh","clwmr","o3mr"] + tracers: ["sphum","liq_wat","o3mr"] UFS-CASE-STUDY: task_make_ics: @@ -42,7 +48,9 @@ UFS-CASE-STUDY: namelist: update_values: config: + atm_files_input_grid: "{{ 'fn_atm' | env }}" external_model: "UFS-CASE-STUDY" + sfc_files_input_grid: "{{ 'fn_sfc' | env }}" tracers_input: ["spfh","clwmr","o3mr","icmr","rwmr","snmr","grle"] tracers: ["sphum","liq_wat","o3mr","ice_wat","rainwat","snowwat","graupel"] task_make_lbcs: @@ -50,6 +58,7 @@ UFS-CASE-STUDY: namelist: update_values: config: + atm_files_input_grid: "{{ 'fn_atm' | env }}" external_model: "UFS-CASE-STUDY" tracers_input: ["spfh","clwmr","o3mr","icmr","rwmr","snmr","grle"] tracers: ["sphum","liq_wat","o3mr","ice_wat","rainwat","snowwat","graupel"] @@ -61,14 +70,15 @@ GDAS: update_values: config: external_model: "GFS" + tg3_from_soil: True tracers_input: ["spfh","clwmr","o3mr","icmr","rwmr","snmr","grle"] tracers: ["sphum","liq_wat","o3mr","ice_wat","rainwat","snowwat","graupel"] - tg3_from_soil: True task_make_lbcs: chgres_cube: namelist: update_values: config: + atm_files_input_grid: "{{ 'fn_atm' | env }}" external_model: "GFS" tracers_input: ["spfh","clwmr","o3mr","icmr","rwmr","snmr","grle"] tracers: ["sphum","liq_wat","o3mr","ice_wat","rainwat","snowwat","graupel"] From d9488d935fd8f3b1a842ac08fc31b7a4ff0c3403 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 31 Oct 2024 14:30:03 +0000 Subject: [PATCH 39/47] clean dereferencing, fix file errors --- scripts/chgres_cube.py | 58 ++++++++++++++++++++-------------------- ush/config_defaults.yaml | 12 ++++----- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 8d46158f4..11fe4ff11 100755 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -79,31 +79,29 @@ def run_chgres_cube(config_file, cycle, key_path, member): """ Setup and run the chgres_cube Driver. """ - - # dereference expressions during driver initialization expt_config = get_yaml_config(config_file) - CRES = expt_config["workflow"]["CRES"] - os.environ["CRES"] = CRES - os.environ["MEMBER"] = member - # Extract driver config from experiment config + # The experiment config will have {{ CRES | env }} expressions in it that need to be + # dereferenced during driver initialization + cres = expt_config["workflow"]["CRES"] + os.environ["CRES"] = cres + os.environ["MEMBER"] = member + expt_config.dereference( + context={ + "cycle": cycle, + **os.environ, + **expt_config, + } + ) chgres_cube_driver = ChgresCube( config=config_file, cycle=cycle, key_path=key_path, ) rundir = Path(chgres_cube_driver.config["rundir"]) - print(f"Will run in {rundir}") + logging.info(f"Will run in {rundir}") - # Dereference cycle for file paths - expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) - expt_config_cp.dereference( - context={ - "cycle": cycle, - **expt_config_cp, - } - ) - chgres_cube_config = _walk_key_path(expt_config_cp, key_path) + chgres_cube_config = _walk_key_path(expt_config, key_path) input_type = chgres_cube_config["chgres_cube"]["namelist"]["update_values"][ "config" ].get("input_type") @@ -120,11 +118,12 @@ def run_chgres_cube(config_file, cycle, key_path, member): else: os.environ["fn_atm"] = external_config_fns[0] os.environ["fn_sfc"] = external_config_fns[1] - - # reinstantiate driver + # reinstantiate driver + expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) expt_config_cp.dereference( context={ "cycle": cycle, + **os.environ, **expt_config_cp, } ) @@ -140,10 +139,10 @@ def run_chgres_cube(config_file, cycle, key_path, member): output_dir = os.path.join(rundir.parent, "INPUT") os.makedirs(output_dir, exist_ok=True) - for i, output_fn in enumerate( - expt_config_cp["task_make_ics"]["output_file_labels"] - ): - input_fn = expt_config_cp["task_get_extrn_ics"]["output_file_labels"][i] + task_get_block = _walk_key_path(expt_config_cp, {"task_get_extrn_ics"}) + task_make_block = _walk_key_path(expt_config_cp, key_path) + for i, output_fn in enumerate(task_make_block["output_file_labels"]): + input_fn = task_get_block["output_file_labels"][i] links[output_fn] = str(input_fn) uwlink(target_dir=output_dir, config=links) @@ -164,16 +163,18 @@ def run_chgres_cube(config_file, cycle, key_path, member): os.environ["fn_atm"] = external_config_fns[i] lbc_spec_fhrs = external_config_fhrs[i] - lbc_offset_fhrs = expt_config_cp["task_get_extrn_lbcs"]["envvars"][ + lbc_offset_fhrs = expt_config["task_get_extrn_lbcs"]["envvars"][ "EXTRN_MDL_LBCS_OFFSET_HRS" ] fcst_hhh = int(lbc_spec_fhrs) - int(lbc_offset_fhrs) os.environ["fcst_hhh_FV3LAM"] = f"{fcst_hhh:03d}" # reinstantiate driver + expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) expt_config_cp.dereference( context={ "cycle": cycle, + **os.environ, **expt_config_cp, } ) @@ -187,15 +188,14 @@ def run_chgres_cube(config_file, cycle, key_path, member): # Deliver output data to a common location above the rundir. links = {} + task_get_block = _walk_key_path(expt_config_cp, {"task_get_extrn_lbcs"}) + task_make_block = _walk_key_path(expt_config_cp, key_path) + output_dir = os.path.join(rundir.parent, "INPUT") os.makedirs(output_dir, exist_ok=True) - lbc_input_fn = expt_config_cp["task_get_extrn_lbcs"][ - "output_file_labels" - ][0] - lbc_output_fn = expt_config_cp["task_make_lbcs"]["output_file_labels"][ - 0 - ] + lbc_input_fn = task_get_block["output_file_labels"][0] + lbc_output_fn = task_make_block["output_file_labels"][0] links[lbc_output_fn] = str(lbc_input_fn) uwlink(target_dir=output_dir, config=links) diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index b679f9b06..15bf72163 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1613,7 +1613,7 @@ task_get_extrn_ics: EXTRN_MDL_FILES_ICS: "" rundir: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_ics.envvars.EXTRN_MDL_NAME_ICS }}/for_ICS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' output_file_labels: - - "out.atm.tile{{{ constants.TILE_RGNL }}.nc" + - "out.atm.tile{{ constants.TILE_RGNL }}.nc" - "out.sfc.tile{{ constants.TILE_RGNL }}.nc" - "gfs_ctrl.nc" - "gfs.bndy.nc" @@ -1801,10 +1801,10 @@ task_make_ics: vgfrc_from_climo: true rundir: '{{ task_run_fcst.rundir }}/tmp_MAKE_ICS' output_file_labels: - - '{{ nco.NET_DEFAULT }}.t{{ cycle.strftime("%H") }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.sfc_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' - - '{{ nco.NET_DEFAULT }}.t{{ cycle.strftime("%H") }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' - - '{{ nco.NET_DEFAULT }}.t{{ cycle.strftime("%H") }}z.gfs_ctrl.nc' - - '{{ nco.NET_DEFAULT }}.t{{ cycle.strftime("%H") }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_bndy.tile{{ constants.TILE_RGNL }}.f000.nc' + - '{{ nco.NET_default }}.t{{ timevars.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.sfc_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' + - '{{ nco.NET_default }}.t{{ timevars.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' + - '{{ nco.NET_default }}.t{{ timevars.hh }}z.gfs_ctrl.nc' + - '{{ nco.NET_default }}.t{{ timevars.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_bndy.tile{{ constants.TILE_RGNL }}.f000.nc' #---------------------------- # MAKE LBCS config parameters @@ -1857,7 +1857,7 @@ task_make_lbcs: vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" rundir: '{{ task_run_fcst.rundir}}/tmp_MAKE_LBCS' output_file_labels: - - '{{ nco.NET_DEFAULT }}.t{{ cycle.strftime("%H") }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_bndy.tile7.f{{ "fcst_hhh_FV3LAM" | env }}.nc' + - '{{ nco.NET_default }}.t{{ timevars.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_bndy.tile7.f{{ "fcst_hhh_FV3LAM" | env }}.nc' #---------------------------- # IO_LAYOUT_Y FORECAST config parameters From fb398c075baf4854c67d9ce9052290124845eecd Mon Sep 17 00:00:00 2001 From: WeirAE Date: Thu, 31 Oct 2024 14:51:37 +0000 Subject: [PATCH 40/47] found missing file path --- scripts/chgres_cube.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 11fe4ff11..b75ba1ddf 100755 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -143,7 +143,7 @@ def run_chgres_cube(config_file, cycle, key_path, member): task_make_block = _walk_key_path(expt_config_cp, key_path) for i, output_fn in enumerate(task_make_block["output_file_labels"]): input_fn = task_get_block["output_file_labels"][i] - links[output_fn] = str(input_fn) + links[output_fn] = str(rundir / input_fn) uwlink(target_dir=output_dir, config=links) @@ -196,7 +196,7 @@ def run_chgres_cube(config_file, cycle, key_path, member): lbc_input_fn = task_get_block["output_file_labels"][0] lbc_output_fn = task_make_block["output_file_labels"][0] - links[lbc_output_fn] = str(lbc_input_fn) + links[lbc_output_fn] = str(rundir / lbc_input_fn) uwlink(target_dir=output_dir, config=links) # error message From bedeb67fb93775fae12beae6564b773916262639 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 1 Nov 2024 14:20:31 +0000 Subject: [PATCH 41/47] Updates for make_ics/make_lbcs. Fundamental tests pass. --- scripts/chgres_cube.py | 162 ++++++++++++++----------------- ush/config_defaults.yaml | 31 +++--- ush/external_model_defaults.yaml | 2 + ush/setup.py | 16 +-- 4 files changed, 96 insertions(+), 115 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index b75ba1ddf..cc4f5b167 100755 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -1,6 +1,6 @@ #!/usr/bin/env python """ -The run script for chgres_cube +The run script for chgres_cube for both initial and lateral boundary conditions. """ import datetime as dt @@ -13,10 +13,21 @@ from uwtools.api.chgres_cube import ChgresCube from uwtools.api.config import get_yaml_config -from uwtools.api.fs import link as uwlink +from uwtools.api.fs import copy as uwcopy from uwtools.api.logging import use_uwtools_logger +def _deliver_files(config, dst_dir, key_path, src_dir): + """ + Deliver files defined in the config "outuput_file_links" section. + """ + dst_dir.mkdir(exist_ok=True) + output_links = _walk_key_path(config, key_path + ["output_file_links"]) + output_links = {k: str(src_dir/v) for k, v in output_links.items()} + if not uwcopy(target_dir=dst_dir, config=output_links): + logging.error("Files could not be copied to their final destination.") + sys.exit(1) + def _walk_key_path(config, key_path): """ Navigate to the sub-config at the end of the path of given keys. @@ -86,6 +97,8 @@ def run_chgres_cube(config_file, cycle, key_path, member): cres = expt_config["workflow"]["CRES"] os.environ["CRES"] = cres os.environ["MEMBER"] = member + + # Find the names of the external model files expt_config.dereference( context={ "cycle": cycle, @@ -93,14 +106,6 @@ def run_chgres_cube(config_file, cycle, key_path, member): **expt_config, } ) - chgres_cube_driver = ChgresCube( - config=config_file, - cycle=cycle, - key_path=key_path, - ) - rundir = Path(chgres_cube_driver.config["rundir"]) - logging.info(f"Will run in {rundir}") - chgres_cube_config = _walk_key_path(expt_config, key_path) input_type = chgres_cube_config["chgres_cube"]["namelist"]["update_values"][ "config" @@ -111,98 +116,79 @@ def run_chgres_cube(config_file, cycle, key_path, member): external_config_fns = external_config["external_model_fns"] external_config_fhrs = external_config["external_model_fhrs"] - # update config for ics task, run and stage data if "task_make_ics" in key_path: if input_type == "grib2": os.environ["fn_grib2"] = external_config_fns[0] else: os.environ["fn_atm"] = external_config_fns[0] os.environ["fn_sfc"] = external_config_fns[1] - # reinstantiate driver - expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) - expt_config_cp.dereference( - context={ - "cycle": cycle, - **os.environ, - **expt_config_cp, - } - ) + chgres_cube_driver = ChgresCube( - config=expt_config_cp, + config=config_file, cycle=cycle, key_path=key_path, + leadtime=dt.timedelta(hours=0), ) + rundir = Path(chgres_cube_driver.config["rundir"]) + logging.info(f"Will run in {rundir}") chgres_cube_driver.run() - # Deliver output data to a common location above the rundir. - links = {} - - output_dir = os.path.join(rundir.parent, "INPUT") - os.makedirs(output_dir, exist_ok=True) - task_get_block = _walk_key_path(expt_config_cp, {"task_get_extrn_ics"}) - task_make_block = _walk_key_path(expt_config_cp, key_path) - for i, output_fn in enumerate(task_make_block["output_file_labels"]): - input_fn = task_get_block["output_file_labels"][i] - links[output_fn] = str(rundir / input_fn) - - uwlink(target_dir=output_dir, config=links) - - # update config for lbcs task, loop run and stage data - else: - num_fhrs = len(external_config_fhrs) - - bcgrp10 = 0 - bcgrpnum10 = 1 - for ii in range(bcgrp10, num_fhrs, bcgrpnum10): - i = ii + bcgrp10 - if i < num_fhrs: - print(f"group {bcgrp10} processes member {i}") - if input_type == "grib2": - os.environ["fn_grib2"] = external_config_fns[i] - else: - os.environ["fn_atm"] = external_config_fns[i] - - lbc_spec_fhrs = external_config_fhrs[i] - lbc_offset_fhrs = expt_config["task_get_extrn_lbcs"]["envvars"][ + if not (rundir / "runscript.chgres_cube.done").is_file(): + logging.error("Error occurred running chgres_cube. See component error logs.") + sys.exit(1) + + # Deliver output data to the forecast's INPUT dir + delivery_dir = rundir.parent / "INPUT" + _deliver_files(config=expt_config, dst_dir=delivery_dir, key_path=key_path, src_dir=rundir) + + else: # loop over make_lbcs tasks + # This loop will need a version of the config that is not dereferenced + expt_config = get_yaml_config(config_file) + + fhrs_and_fns = list(zip(external_config_fhrs, external_config_fns)) + for external_fhr, external_fn in fhrs_and_fns: + if input_type == "grib2": + os.environ["fn_grib2"] = external_fn + else: + os.environ["fn_atm"] = external_fn + + lbc_offset_fhrs = expt_config["task_get_extrn_lbcs"]["envvars"][ "EXTRN_MDL_LBCS_OFFSET_HRS" ] - fcst_hhh = int(lbc_spec_fhrs) - int(lbc_offset_fhrs) - os.environ["fcst_hhh_FV3LAM"] = f"{fcst_hhh:03d}" - - # reinstantiate driver - expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) - expt_config_cp.dereference( - context={ - "cycle": cycle, - **os.environ, - **expt_config_cp, - } - ) - chgres_cube_driver = ChgresCube( - config=expt_config_cp, - cycle=cycle, - key_path=key_path, - ) - chgres_cube_driver.run() - - # Deliver output data to a common location above the rundir. - links = {} - - task_get_block = _walk_key_path(expt_config_cp, {"task_get_extrn_lbcs"}) - task_make_block = _walk_key_path(expt_config_cp, key_path) - - output_dir = os.path.join(rundir.parent, "INPUT") - os.makedirs(output_dir, exist_ok=True) - - lbc_input_fn = task_get_block["output_file_labels"][0] - lbc_output_fn = task_make_block["output_file_labels"][0] - links[lbc_output_fn] = str(rundir / lbc_input_fn) - uwlink(target_dir=output_dir, config=links) - - # error message - if not (rundir / "runscript.chgres_cube.done").is_file(): - print("Error occurred running chgres_cube. Please see component error logs.") - sys.exit(1) + fcst_hr_lam = int(external_fhr) - int(lbc_offset_fhrs) + leadtime = dt.timedelta(hours=fcst_hr_lam) + + chgres_cube_driver = ChgresCube( + config=config_file, + cycle=cycle, + key_path=key_path, + leadtime=leadtime, + ) + rundir = Path(chgres_cube_driver.config["rundir"]) + logging.info(f"Will run in {rundir}") + chgres_cube_driver.run() + + if not (rundir / "runscript.chgres_cube.done").is_file(): + logging.error("Error occurred running chgres_cube. See component error logs.") + sys.exit(1) + + # Use a copy of the original here to avoid opening the file every time. + expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) + + # output files should contain cycle information relative to the start time of the + # forecast -- use cycle = cycle + # This dereferencing must be inside loop bc the fcst hour is set differently each time + expt_config_cp.dereference( + context={ + "cycle": cycle, + "leadtime": leadtime, + **os.environ, + **expt_config_cp, + } + ) + # Deliver output data to the forecast's INPUT dir + delivery_dir = rundir.parent / "INPUT" + _deliver_files(config=expt_config_cp, dst_dir=delivery_dir, key_path=key_path, src_dir=rundir) if __name__ == "__main__": diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml index 15bf72163..567faf66e 100644 --- a/ush/config_defaults.yaml +++ b/ush/config_defaults.yaml @@ -1612,11 +1612,6 @@ task_get_extrn_ics: EXTRN_MDL_SOURCE_BASEDIR_ICS: "" EXTRN_MDL_FILES_ICS: "" rundir: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_ics.envvars.EXTRN_MDL_NAME_ICS }}/for_ICS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' - output_file_labels: - - "out.atm.tile{{ constants.TILE_RGNL }}.nc" - - "out.sfc.tile{{ constants.TILE_RGNL }}.nc" - - "gfs_ctrl.nc" - - "gfs.bndy.nc" #---------------------------- # EXTRN LBCS config parameters @@ -1699,8 +1694,6 @@ task_get_extrn_lbcs: EXTRN_MDL_SOURCE_BASEDIR_LBCS: "" EXTRN_MDL_FILES_LBCS: "" rundir: '{{ workflow.EXPTDIR }}/{{ timevars.yyyymmddhh }}/{{ task_get_extrn_lbcs.envvars.EXTRN_MDL_NAME_LBCS }}/for_LBCS{{ "/mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}' - output_file_labels: - - "gfs.bndy.nc" #---------------------------- # MAKE ICS config parameters @@ -1800,11 +1793,11 @@ task_make_ics: vgtyp_from_climo: true vgfrc_from_climo: true rundir: '{{ task_run_fcst.rundir }}/tmp_MAKE_ICS' - output_file_labels: - - '{{ nco.NET_default }}.t{{ timevars.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.sfc_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' - - '{{ nco.NET_default }}.t{{ timevars.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc' - - '{{ nco.NET_default }}.t{{ timevars.hh }}z.gfs_ctrl.nc' - - '{{ nco.NET_default }}.t{{ timevars.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_bndy.tile{{ constants.TILE_RGNL }}.f000.nc' + output_file_links: + '{{ nco.NET_default }}.t{{ timevars.hh }}z.sfc_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc': out.sfc.tile{{ constants.TILE_RGNL }}.nc + '{{ nco.NET_default }}.t{{ timevars.hh }}z.gfs_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH0 }}.nc': out.atm.tile{{ constants.TILE_RGNL }}.nc + '{{ nco.NET_default }}.t{{ timevars.hh }}z.gfs_ctrl.nc': gfs_ctrl.nc + '{{ nco.NET_default }}.t{{ timevars.hh }}z.gfs_bndy.tile{{ constants.TILE_RGNL }}.f000.nc': gfs.bndy.nc #---------------------------- # MAKE LBCS config parameters @@ -1840,24 +1833,24 @@ task_make_lbcs: update_values: config: convert_atm: true - cycle_day: !int "{{ cycle.strftime('%d') }}" - cycle_hour: !int "{{ cycle.strftime('%H') }}" - cycle_mon: !int "{{ cycle.strftime('%m') }}" + cycle_day: !int "{{ (cycle + leadtime).strftime('%d') }}" + cycle_hour: !int "{{ (cycle + leadtime).strftime('%H') }}" + cycle_mon: !int "{{ (cycle + leadtime).strftime('%m') }}" data_dir_input_grid: '{{ task_get_extrn_lbcs.envvars.rundir }}' external_model: "FV3GFS" fix_dir_target_grid: "{{ workflow.FIXlam }}" halo_blend: !int "{{ global.HALO_BLEND }}" halo_bndy: !int "{{ constants.NH4 }}" - input_type: "gaussian_nemsio" + input_type: "gaussian_nemsio" mosaic_file_target_grid: "{{ workflow.FIXlam }}/{{ 'CRES' | env }}_mosaic.halo{{ constants.NH4 }}.nc" orog_dir_target_grid: "{{ workflow.FIXlam }}" orog_files_target_grid: "{{ 'CRES' | env }}_oro_data.tile{{ constants.TILE_RGNL }}.halo{{ constants.NH4 }}.nc" regional: 2 varmap_file: "{{ user.PARMdir }}/ufs_utils/varmap_tables/GFSphys_var_map.txt" vcoord_file_target_grid: "{{ workflow.FIXam }}/global_hyblev.l65.txt" - rundir: '{{ task_run_fcst.rundir}}/tmp_MAKE_LBCS' - output_file_labels: - - '{{ nco.NET_default }}.t{{ timevars.hh }}z{{ ".mem%s" % ("MEMBER"|env) if global.DO_ENSEMBLE }}.gfs_bndy.tile7.f{{ "fcst_hhh_FV3LAM" | env }}.nc' + rundir: '{{ task_run_fcst.rundir}}/tmp_MAKE_LBCS_{{ timevars.yyyymmddhh }}' + output_file_links: + '{{ nco.NET_default }}.t{{ timevars.hh }}z.gfs_bndy.tile7.f{{ "%03d" % (leadtime.total_seconds() // 3600) }}.nc': gfs.bndy.nc #---------------------------- # IO_LAYOUT_Y FORECAST config parameters diff --git a/ush/external_model_defaults.yaml b/ush/external_model_defaults.yaml index a57d5a583..b971c06da 100644 --- a/ush/external_model_defaults.yaml +++ b/ush/external_model_defaults.yaml @@ -113,6 +113,7 @@ HRRR: geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_HRRRX" grib2_file_input_grid: "{{ 'fn_grib2' | env }}" input_type: "grib2" + nsoill_out: !int "{{ 9 if workflow.SDF_USES_RUC_LSM else 4 }}" tg3_from_soil: True task_make_lbcs: chgres_cube: @@ -134,6 +135,7 @@ RAP: geogrid_file_input_grid: "{{ platform.FIXgsm }}/geo_em.d01.nc_RAPX" grib2_file_input_grid: "{{ 'fn_grib2' | env }}" input_type: "grib2" + nsoill_out: !int "{{ 9 if workflow.SDF_USES_RUC_LSM else 4 }}" tg3_from_soil: True task_make_lbcs: chgres_cube: diff --git a/ush/setup.py b/ush/setup.py index 09cddb785..d80b654a3 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -147,14 +147,14 @@ def load_config_for_setup(ushdir, default_config_path, user_config_path): default_config.update_from(ccpp_config) # Load external model-specific settings - tasks = [("task_get_extrn_ics", "EXTRN_MDL_NAME_ICS", "task_make_lbcs"), - ("task_get_extrn_lbcs", "EXTRN_MDL_NAME_LBCS", "task_make_ics")] - - for task, mdl_key, make_key in tasks: - external_mdl = default_config[task]["envvars"][mdl_key] - external_cfg = get_yaml_config(Path(ushdir, "external_model_defaults.yaml")).get(external_mdl, {}) - del external_cfg[make_key] - default_config.update_from(external_cfg) + external_cfg = get_yaml_config(ushdir / "external_model_defaults.yaml") + for bcs in ("ics", "lbcs"): + get_task_config = default_config[f"task_get_extrn_{bcs}"] + external_model = get_task_config["envvars"][f"EXTRN_MDL_NAME_{bcs.upper()}"] + bcs_task = f"task_make_{bcs}" + default_config.update_from( + {bcs_task: external_cfg.get(external_model, {}).get(bcs_task, {}) } + ) # Set "Home" directory, the top-level ufs-srweather-app directory homedir = Path(__file__).parent.parent.resolve() From 9da027b12621fbcea7bd6ec814bf1982c80a75bb Mon Sep 17 00:00:00 2001 From: Brian Weir <94982354+WeirAE@users.noreply.github.com> Date: Fri, 1 Nov 2024 11:24:37 -0500 Subject: [PATCH 42/47] Apply suggestions from code review Co-authored-by: Paul Madden <136389411+maddenp-noaa@users.noreply.github.com> --- parm/wflow/coldstart.yaml | 4 ++-- scripts/chgres_cube.py | 22 +++++++--------------- 2 files changed, 9 insertions(+), 17 deletions(-) diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index ff1796720..7620cbf10 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -120,7 +120,7 @@ metatask_run_ensemble: cyclestr: value: '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' nodes: '{{ task_make_ics.chgres_cube.execution.batchargs.nodes }}:ppn={{ task_make_ics.chgres_cube.execution.batchargs.tasks_per_node }}' - walltime: '{{ task_make_ics.chgres_cube.execution.batchargs.walltime }}' + walltime: '{{ task_make_ics.chgres_cube.execution.batchargs.walltime }}' dependency: and: &make_ics_deps taskdep_get_extrn: @@ -173,7 +173,7 @@ metatask_run_ensemble: cyclestr: value: '&LOGDIR;/{{ jobname }}_@Y@m@d@H&LOGEXT;' nodes: '{{ task_make_lbcs.chgres_cube.execution.batchargs.nodes }}:ppn={{ task_make_lbcs.chgres_cube.execution.batchargs.tasks_per_node }}' - walltime: '{{ task_make_lbcs.chgres_cube.execution.batchargs.walltime }}' + walltime: '{{ task_make_lbcs.chgres_cube.execution.batchargs.walltime }}' dependency: and: <<: *make_ics_deps diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index cc4f5b167..9bdc38a29 100755 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -94,11 +94,10 @@ def run_chgres_cube(config_file, cycle, key_path, member): # The experiment config will have {{ CRES | env }} expressions in it that need to be # dereferenced during driver initialization - cres = expt_config["workflow"]["CRES"] - os.environ["CRES"] = cres + os.environ["CRES"] = expt_config["workflow"]["CRES"] os.environ["MEMBER"] = member - # Find the names of the external model files + # Render names of external model files in experiment config. expt_config.dereference( context={ "cycle": cycle, @@ -107,9 +106,7 @@ def run_chgres_cube(config_file, cycle, key_path, member): } ) chgres_cube_config = _walk_key_path(expt_config, key_path) - input_type = chgres_cube_config["chgres_cube"]["namelist"]["update_values"][ - "config" - ].get("input_type") + grib2_input = _walk_key_path(chgres_cube_config, ["chgres_cube", "namelist", "update_values", "config"]).get("input_type") == "grib2" varsfilepath = chgres_cube_config["input_files_metadata_path"] external_config = get_yaml_config(varsfilepath) @@ -117,7 +114,7 @@ def run_chgres_cube(config_file, cycle, key_path, member): external_config_fhrs = external_config["external_model_fhrs"] if "task_make_ics" in key_path: - if input_type == "grib2": + if grib2_input: os.environ["fn_grib2"] = external_config_fns[0] else: os.environ["fn_atm"] = external_config_fns[0] @@ -147,14 +144,9 @@ def run_chgres_cube(config_file, cycle, key_path, member): fhrs_and_fns = list(zip(external_config_fhrs, external_config_fns)) for external_fhr, external_fn in fhrs_and_fns: - if input_type == "grib2": - os.environ["fn_grib2"] = external_fn - else: - os.environ["fn_atm"] = external_fn - - lbc_offset_fhrs = expt_config["task_get_extrn_lbcs"]["envvars"][ - "EXTRN_MDL_LBCS_OFFSET_HRS" - ] + os.environ["fn_grib2" if grib2_input else "fn_atm"] = external_fn + + lbc_offset_fhrs = _walk_key_path(expt_config, ["task_get_extrn_lbcs", "envvars", "EXTRN_MDL_LBCS_OFFSET_HRS"]) fcst_hr_lam = int(external_fhr) - int(lbc_offset_fhrs) leadtime = dt.timedelta(hours=fcst_hr_lam) From 43fedf3a5d9a71eaf1474aad62a6bff0ecd15fb7 Mon Sep 17 00:00:00 2001 From: WeirAE Date: Fri, 1 Nov 2024 16:48:04 +0000 Subject: [PATCH 43/47] Additional changes from comments --- parm/wflow/coldstart.yaml | 4 +-- scripts/chgres_cube.py | 64 +++++++++++++++++++++++++-------------- 2 files changed, 44 insertions(+), 24 deletions(-) diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index 7620cbf10..99ed68966 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -104,7 +104,7 @@ metatask_run_ensemble: <<: *default_task command: cyclestr: - value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; &SCRIPTSdir;/chgres_cube.py + value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} && &SCRIPTSdir;/chgres_cube.py -c &GLOBAL_VAR_DEFNS_FP; --cycle @Y-@m-@dT@H:@M:@S --key-path task_make_ics @@ -155,7 +155,7 @@ metatask_run_ensemble: <<: *default_task command: cyclestr: - value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; &SCRIPTSdir;/chgres_cube.py + value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} && &SCRIPTSdir;/chgres_cube.py -c &GLOBAL_VAR_DEFNS_FP; --cycle @Y-@m-@dT@H:@M:@S --key-path task_make_lbcs diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 9bdc38a29..28a209855 100755 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -17,17 +17,18 @@ from uwtools.api.logging import use_uwtools_logger -def _deliver_files(config, dst_dir, key_path, src_dir): +def _deliver_files(config, dst_dir, key_path, src_dir): """ Deliver files defined in the config "outuput_file_links" section. """ dst_dir.mkdir(exist_ok=True) output_links = _walk_key_path(config, key_path + ["output_file_links"]) - output_links = {k: str(src_dir/v) for k, v in output_links.items()} + output_links = {k: str(src_dir / v) for k, v in output_links.items()} if not uwcopy(target_dir=dst_dir, config=output_links): logging.error("Files could not be copied to their final destination.") sys.exit(1) + def _walk_key_path(config, key_path): """ Navigate to the sub-config at the end of the path of given keys. @@ -93,22 +94,27 @@ def run_chgres_cube(config_file, cycle, key_path, member): expt_config = get_yaml_config(config_file) # The experiment config will have {{ CRES | env }} expressions in it that need to be - # dereferenced during driver initialization + # dereferenced during driver initialization. os.environ["CRES"] = expt_config["workflow"]["CRES"] os.environ["MEMBER"] = member # Render names of external model files in experiment config. expt_config.dereference( context={ - "cycle": cycle, - **os.environ, **expt_config, + **os.environ, + "cycle": cycle, } ) - chgres_cube_config = _walk_key_path(expt_config, key_path) - grib2_input = _walk_key_path(chgres_cube_config, ["chgres_cube", "namelist", "update_values", "config"]).get("input_type") == "grib2" + task_config = _walk_key_path(expt_config, key_path) + grib2_input = ( + _walk_key_path( + task_config, ["chgres_cube", "namelist", "update_values", "config"] + ).get("input_type") + == "grib2" + ) - varsfilepath = chgres_cube_config["input_files_metadata_path"] + varsfilepath = task_config["input_files_metadata_path"] external_config = get_yaml_config(varsfilepath) external_config_fns = external_config["external_model_fns"] external_config_fhrs = external_config["external_model_fhrs"] @@ -131,22 +137,29 @@ def run_chgres_cube(config_file, cycle, key_path, member): chgres_cube_driver.run() if not (rundir / "runscript.chgres_cube.done").is_file(): - logging.error("Error occurred running chgres_cube. See component error logs.") + logging.error( + "Error occurred running chgres_cube. See component error logs." + ) sys.exit(1) - # Deliver output data to the forecast's INPUT dir + # Deliver output data to the forecast's INPUT dir. delivery_dir = rundir.parent / "INPUT" - _deliver_files(config=expt_config, dst_dir=delivery_dir, key_path=key_path, src_dir=rundir) + _deliver_files( + config=expt_config, dst_dir=delivery_dir, key_path=key_path, src_dir=rundir + ) - else: # loop over make_lbcs tasks - # This loop will need a version of the config that is not dereferenced + else: # Loop over make_lbcs tasks. + # This loop will need a version of the config that is not dereferenced. expt_config = get_yaml_config(config_file) fhrs_and_fns = list(zip(external_config_fhrs, external_config_fns)) for external_fhr, external_fn in fhrs_and_fns: os.environ["fn_grib2" if grib2_input else "fn_atm"] = external_fn - lbc_offset_fhrs = _walk_key_path(expt_config, ["task_get_extrn_lbcs", "envvars", "EXTRN_MDL_LBCS_OFFSET_HRS"]) + lbc_offset_fhrs = _walk_key_path( + expt_config, + ["task_get_extrn_lbcs", "envvars", "EXTRN_MDL_LBCS_OFFSET_HRS"], + ) fcst_hr_lam = int(external_fhr) - int(lbc_offset_fhrs) leadtime = dt.timedelta(hours=fcst_hr_lam) @@ -161,26 +174,33 @@ def run_chgres_cube(config_file, cycle, key_path, member): chgres_cube_driver.run() if not (rundir / "runscript.chgres_cube.done").is_file(): - logging.error("Error occurred running chgres_cube. See component error logs.") + logging.error( + "Error occurred running chgres_cube. See component error logs." + ) sys.exit(1) # Use a copy of the original here to avoid opening the file every time. expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) - # output files should contain cycle information relative to the start time of the - # forecast -- use cycle = cycle - # This dereferencing must be inside loop bc the fcst hour is set differently each time + # Output files should contain cycle information relative to the start time of the + # forecast -- use cycle = cycle. + # This dereferencing must be inside loop bc the fcst hour is set differently each time. expt_config_cp.dereference( context={ + **expt_config_cp, + **os.environ, "cycle": cycle, "leadtime": leadtime, - **os.environ, - **expt_config_cp, } ) - # Deliver output data to the forecast's INPUT dir + # Deliver output data to the forecast's INPUT dir. delivery_dir = rundir.parent / "INPUT" - _deliver_files(config=expt_config_cp, dst_dir=delivery_dir, key_path=key_path, src_dir=rundir) + _deliver_files( + config=expt_config_cp, + dst_dir=delivery_dir, + key_path=key_path, + src_dir=rundir, + ) if __name__ == "__main__": From 4e55694d973f29cd8d3026f5c38be6f9da6cd71e Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 1 Nov 2024 22:27:29 +0000 Subject: [PATCH 44/47] WIP. --- parm/wflow/coldstart.yaml | 4 +- scripts/chgres_cube.py | 141 +++++++++++++++++++------------------- 2 files changed, 74 insertions(+), 71 deletions(-) diff --git a/parm/wflow/coldstart.yaml b/parm/wflow/coldstart.yaml index 99ed68966..7620cbf10 100644 --- a/parm/wflow/coldstart.yaml +++ b/parm/wflow/coldstart.yaml @@ -104,7 +104,7 @@ metatask_run_ensemble: <<: *default_task command: cyclestr: - value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} && &SCRIPTSdir;/chgres_cube.py + value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; &SCRIPTSdir;/chgres_cube.py -c &GLOBAL_VAR_DEFNS_FP; --cycle @Y-@m-@dT@H:@M:@S --key-path task_make_ics @@ -155,7 +155,7 @@ metatask_run_ensemble: <<: *default_task command: cyclestr: - value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} && &SCRIPTSdir;/chgres_cube.py + value: 'source &USHdir;/load_modules_wflow.sh {{ user.MACHINE }} ; &SCRIPTSdir;/chgres_cube.py -c &GLOBAL_VAR_DEFNS_FP; --cycle @Y-@m-@dT@H:@M:@S --key-path task_make_lbcs diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 28a209855..53b424073 100755 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -28,6 +28,37 @@ def _deliver_files(config, dst_dir, key_path, src_dir): logging.error("Files could not be copied to their final destination.") sys.exit(1) +def _get_external_fns(config, cycle, key_path): + """ + Return external model file names and forecast hours for the given task in the experiment. + + They come from the metadata file written by the prior data retrival task. + """ + config_cp = get_yaml_config(deepcopy(config.data)) + config_cp.dereference( + context={ + **config_cp, + **os.environ, + "cycle": cycle, + } + ) + varsfilepath = _walk_key_path( + config_cp, + key_path + ["input_files_metadata_path"], + ) + external_config = get_yaml_config(varsfilepath) + external_config_fns = external_config["external_model_fns"] + external_config_fhrs = external_config["external_model_fhrs"] + return external_config_fhrs, external_config_fns + +def _is_grib2(config, key_path): + """ + Is the input in grib2 format? + """ + return _walk_key_path( + config, + key_path + ["chgres_cube", "namelist", "update_values", "config"], + ).get("input_type") == "grib2" def _walk_key_path(config, key_path): """ @@ -55,7 +86,7 @@ def parse_args(argv): Parse arguments for the script. """ parser = ArgumentParser( - description="Script that runs chgres_cube via uwtools API", + description="Script that runs chgres_cube via uwtools API for SRW", ) parser.add_argument( "-c", @@ -67,13 +98,13 @@ def parse_args(argv): ) parser.add_argument( "--cycle", - help="The cycle in ISO8601 format (e.g. 2024-07-15T18)", + help="The cycle in ISO8601 format (e.g. 2024-07-15T18).", required=True, type=dt.datetime.fromisoformat, ) parser.add_argument( "--key-path", - help="Dot-separated path of keys leading through the config to the driver's YAML block", + help="Dot-separated path of keys leading through the config to the driver's YAML block.", metavar="KEY[.KEY...]", required=True, type=lambda s: s.split("."), @@ -86,61 +117,28 @@ def parse_args(argv): return parser.parse_args(argv) -# pylint: disable-next=too-many-locals, too-many-statements def run_chgres_cube(config_file, cycle, key_path, member): """ - Setup and run the chgres_cube Driver. + Setup and run the chgres_cube UW Driver. """ expt_config = get_yaml_config(config_file) - # The experiment config will have {{ CRES | env }} expressions in it that need to be + # The experiment config will have {{ 'CRES' | env }} expressions in it that need to be # dereferenced during driver initialization. os.environ["CRES"] = expt_config["workflow"]["CRES"] os.environ["MEMBER"] = member - # Render names of external model files in experiment config. - expt_config.dereference( - context={ - **expt_config, - **os.environ, - "cycle": cycle, - } - ) - task_config = _walk_key_path(expt_config, key_path) - grib2_input = ( - _walk_key_path( - task_config, ["chgres_cube", "namelist", "update_values", "config"] - ).get("input_type") - == "grib2" - ) - - varsfilepath = task_config["input_files_metadata_path"] - external_config = get_yaml_config(varsfilepath) - external_config_fns = external_config["external_model_fns"] - external_config_fhrs = external_config["external_model_fhrs"] - + ext_fhrs, ext_fns = _get_external_fns(expt_config, cycle, key_path) + grib2_input = _is_grib2(expt_config, key_path) if "task_make_ics" in key_path: if grib2_input: - os.environ["fn_grib2"] = external_config_fns[0] + os.environ["fn_grib2"] = ext_fns[0] else: - os.environ["fn_atm"] = external_config_fns[0] - os.environ["fn_sfc"] = external_config_fns[1] - - chgres_cube_driver = ChgresCube( - config=config_file, - cycle=cycle, - key_path=key_path, - leadtime=dt.timedelta(hours=0), - ) - rundir = Path(chgres_cube_driver.config["rundir"]) - logging.info(f"Will run in {rundir}") - chgres_cube_driver.run() + os.environ["fn_atm"] = ext_fns[0] + os.environ["fn_sfc"] = ext_fns[1] - if not (rundir / "runscript.chgres_cube.done").is_file(): - logging.error( - "Error occurred running chgres_cube. See component error logs." - ) - sys.exit(1) + driver = run_driver(ChgresCube, config_file, cycle, key_path, leadtime=dt.timedelta(hours=0)) + rundir = Path(driver.config["rundir"]) # Deliver output data to the forecast's INPUT dir. delivery_dir = rundir.parent / "INPUT" @@ -151,39 +149,21 @@ def run_chgres_cube(config_file, cycle, key_path, member): else: # Loop over make_lbcs tasks. # This loop will need a version of the config that is not dereferenced. expt_config = get_yaml_config(config_file) - - fhrs_and_fns = list(zip(external_config_fhrs, external_config_fns)) - for external_fhr, external_fn in fhrs_and_fns: + for external_fhr, external_fn in list(zip(ext_fhrs, ext_fns)): os.environ["fn_grib2" if grib2_input else "fn_atm"] = external_fn + # Determine lead time and run the driver lbc_offset_fhrs = _walk_key_path( expt_config, - ["task_get_extrn_lbcs", "envvars", "EXTRN_MDL_LBCS_OFFSET_HRS"], + key_path + ["envvars", "EXTRN_MDL_LBCS_OFFSET_HRS"], ) - fcst_hr_lam = int(external_fhr) - int(lbc_offset_fhrs) - leadtime = dt.timedelta(hours=fcst_hr_lam) - - chgres_cube_driver = ChgresCube( - config=config_file, - cycle=cycle, - key_path=key_path, - leadtime=leadtime, - ) - rundir = Path(chgres_cube_driver.config["rundir"]) - logging.info(f"Will run in {rundir}") - chgres_cube_driver.run() - - if not (rundir / "runscript.chgres_cube.done").is_file(): - logging.error( - "Error occurred running chgres_cube. See component error logs." - ) - sys.exit(1) + leadtime = dt.timedelta(hours=int(external_fhr) - int(lbc_offset_fhrs)) + run_driver(ChgresCube, config_file, cycle, key_path, leadtime=leadtime) + rundir = Path(driver.config["rundir"]) # Use a copy of the original here to avoid opening the file every time. expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) - # Output files should contain cycle information relative to the start time of the - # forecast -- use cycle = cycle. # This dereferencing must be inside loop bc the fcst hour is set differently each time. expt_config_cp.dereference( context={ @@ -202,6 +182,29 @@ def run_chgres_cube(config_file, cycle, key_path, member): src_dir=rundir, ) +def run_driver(driver_obj, config_file, cycle, key_path, leadtime): + """ + Initialize and run the provided UW driver. + + Return the configured object. + """ + driver = driver_obj( + config=config_file, + cycle=cycle, + key_path=key_path, + leadtime=leadtime, + ) + rundir = Path(driver_obj.config["rundir"]) + logging.info(f"Will run {driver.driver_name()} in {rundir}") + driver_obj.run() + + if not (rundir / f"runscript.{driver.driver_name()}.done").is_file(): + logging.error( + f"Error occurred running {driver.driver_name()}. Please see component error logs." + ) + sys.exit(1) + return driver + if __name__ == "__main__": From ef9f299f888b84dcb45600393ba0bf8cf853d9e7 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Fri, 1 Nov 2024 22:28:05 +0000 Subject: [PATCH 45/47] Run black. --- scripts/chgres_cube.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 53b424073..9074c33b8 100755 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -28,6 +28,7 @@ def _deliver_files(config, dst_dir, key_path, src_dir): logging.error("Files could not be copied to their final destination.") sys.exit(1) + def _get_external_fns(config, cycle, key_path): """ Return external model file names and forecast hours for the given task in the experiment. @@ -45,20 +46,25 @@ def _get_external_fns(config, cycle, key_path): varsfilepath = _walk_key_path( config_cp, key_path + ["input_files_metadata_path"], - ) + ) external_config = get_yaml_config(varsfilepath) external_config_fns = external_config["external_model_fns"] external_config_fhrs = external_config["external_model_fhrs"] return external_config_fhrs, external_config_fns + def _is_grib2(config, key_path): """ Is the input in grib2 format? """ - return _walk_key_path( - config, - key_path + ["chgres_cube", "namelist", "update_values", "config"], - ).get("input_type") == "grib2" + return ( + _walk_key_path( + config, + key_path + ["chgres_cube", "namelist", "update_values", "config"], + ).get("input_type") + == "grib2" + ) + def _walk_key_path(config, key_path): """ @@ -137,7 +143,9 @@ def run_chgres_cube(config_file, cycle, key_path, member): os.environ["fn_atm"] = ext_fns[0] os.environ["fn_sfc"] = ext_fns[1] - driver = run_driver(ChgresCube, config_file, cycle, key_path, leadtime=dt.timedelta(hours=0)) + driver = run_driver( + ChgresCube, config_file, cycle, key_path, leadtime=dt.timedelta(hours=0) + ) rundir = Path(driver.config["rundir"]) # Deliver output data to the forecast's INPUT dir. @@ -182,6 +190,7 @@ def run_chgres_cube(config_file, cycle, key_path, member): src_dir=rundir, ) + def run_driver(driver_obj, config_file, cycle, key_path, leadtime): """ Initialize and run the provided UW driver. From da4b988b70e3f5901562835ba00d4b399ad3534a Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 4 Nov 2024 16:44:41 +0000 Subject: [PATCH 46/47] Updates to scripts. --- scripts/chgres_cube.py | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index 9074c33b8..d438a510a 100755 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -43,10 +43,7 @@ def _get_external_fns(config, cycle, key_path): "cycle": cycle, } ) - varsfilepath = _walk_key_path( - config_cp, - key_path + ["input_files_metadata_path"], - ) + varsfilepath = _walk_key_path(config_cp, key_path)["input_files_metadata_path"] external_config = get_yaml_config(varsfilepath) external_config_fns = external_config["external_model_fns"] external_config_fhrs = external_config["external_model_fhrs"] @@ -150,8 +147,16 @@ def run_chgres_cube(config_file, cycle, key_path, member): # Deliver output data to the forecast's INPUT dir. delivery_dir = rundir.parent / "INPUT" + expt_config_cp = get_yaml_config(deepcopy(expt_config.data)) + expt_config_cp.dereference( + context={ + **expt_config_cp, + **os.environ, + "cycle": cycle, + } + ) _deliver_files( - config=expt_config, dst_dir=delivery_dir, key_path=key_path, src_dir=rundir + config=expt_config_cp, dst_dir=delivery_dir, key_path=key_path, src_dir=rundir ) else: # Loop over make_lbcs tasks. @@ -163,10 +168,9 @@ def run_chgres_cube(config_file, cycle, key_path, member): # Determine lead time and run the driver lbc_offset_fhrs = _walk_key_path( expt_config, - key_path + ["envvars", "EXTRN_MDL_LBCS_OFFSET_HRS"], - ) + ["task_get_extrn_lbcs", "envvars"])["EXTRN_MDL_LBCS_OFFSET_HRS"] leadtime = dt.timedelta(hours=int(external_fhr) - int(lbc_offset_fhrs)) - run_driver(ChgresCube, config_file, cycle, key_path, leadtime=leadtime) + driver = run_driver(ChgresCube, config_file, cycle, key_path, leadtime=leadtime) rundir = Path(driver.config["rundir"]) # Use a copy of the original here to avoid opening the file every time. @@ -203,9 +207,9 @@ def run_driver(driver_obj, config_file, cycle, key_path, leadtime): key_path=key_path, leadtime=leadtime, ) - rundir = Path(driver_obj.config["rundir"]) + rundir = Path(driver.config["rundir"]) logging.info(f"Will run {driver.driver_name()} in {rundir}") - driver_obj.run() + driver.run() if not (rundir / f"runscript.{driver.driver_name()}.done").is_file(): logging.error( From e86eb94c0723cc35c8be6283726fcebb3afbf204 Mon Sep 17 00:00:00 2001 From: "Christina.Holt" Date: Mon, 4 Nov 2024 16:45:43 +0000 Subject: [PATCH 47/47] Apply black. --- scripts/chgres_cube.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/scripts/chgres_cube.py b/scripts/chgres_cube.py index d438a510a..4053b33d3 100755 --- a/scripts/chgres_cube.py +++ b/scripts/chgres_cube.py @@ -156,7 +156,10 @@ def run_chgres_cube(config_file, cycle, key_path, member): } ) _deliver_files( - config=expt_config_cp, dst_dir=delivery_dir, key_path=key_path, src_dir=rundir + config=expt_config_cp, + dst_dir=delivery_dir, + key_path=key_path, + src_dir=rundir, ) else: # Loop over make_lbcs tasks. @@ -167,10 +170,12 @@ def run_chgres_cube(config_file, cycle, key_path, member): # Determine lead time and run the driver lbc_offset_fhrs = _walk_key_path( - expt_config, - ["task_get_extrn_lbcs", "envvars"])["EXTRN_MDL_LBCS_OFFSET_HRS"] + expt_config, ["task_get_extrn_lbcs", "envvars"] + )["EXTRN_MDL_LBCS_OFFSET_HRS"] leadtime = dt.timedelta(hours=int(external_fhr) - int(lbc_offset_fhrs)) - driver = run_driver(ChgresCube, config_file, cycle, key_path, leadtime=leadtime) + driver = run_driver( + ChgresCube, config_file, cycle, key_path, leadtime=leadtime + ) rundir = Path(driver.config["rundir"]) # Use a copy of the original here to avoid opening the file every time.