From 91dbc79f3a1056c54e71bb0e619de443a2de8e59 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 17 Oct 2019 12:32:14 -0600 Subject: [PATCH 001/203] (1) Fix spacing in ush/templates/input_ccpp_gfsextern_gfsphys.nml; (2) Rename variable FV3GFS_DATE_TYPE to FV3GFS_FILE_FMT. --- scripts/exregional_make_ic_lbc0.sh | 4 +- scripts/exregional_make_lbc1_to_lbcn.sh | 8 +-- ush/config_defaults.sh | 2 +- ush/get_extrn_mdl_file_dir_info.sh | 12 ++-- ush/setup.sh | 14 ++--- .../input_ccpp_gfsextern_gfsphys.nml | 62 +++++++++---------- ush/valid_param_vals.sh | 4 +- 7 files changed, 53 insertions(+), 53 deletions(-) diff --git a/scripts/exregional_make_ic_lbc0.sh b/scripts/exregional_make_ic_lbc0.sh index 08b1cced4..8b15061cd 100755 --- a/scripts/exregional_make_ic_lbc0.sh +++ b/scripts/exregional_make_ic_lbc0.sh @@ -242,7 +242,7 @@ case "$EXTRN_MDL_NAME_ICS" in "FV3GFS") - if [ "$FV3GFS_DATA_TYPE" = "nemsio" ]; then + if [ "${FV3GFS_FILE_FMT}" = "nemsio" ]; then external_model="FV3GFS" @@ -275,7 +275,7 @@ case "$EXTRN_MDL_NAME_ICS" in tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"" fi - elif [ "$FV3GFS_DATA_TYPE" = "grib2" ]; then + elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then external_model="GFS" diff --git a/scripts/exregional_make_lbc1_to_lbcn.sh b/scripts/exregional_make_lbc1_to_lbcn.sh index 15608e3c1..09c99a3cf 100755 --- a/scripts/exregional_make_lbc1_to_lbcn.sh +++ b/scripts/exregional_make_lbc1_to_lbcn.sh @@ -237,7 +237,7 @@ case "$EXTRN_MDL_NAME_LBCS" in "FV3GFS") - if [ "$FV3GFS_DATA_TYPE" = "nemsio" ]; then + if [ "${FV3GFS_FILE_FMT}" = "nemsio" ]; then external_model="FV3GFS" @@ -267,7 +267,7 @@ case "$EXTRN_MDL_NAME_LBCS" in tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"" fi - elif [ "$FV3GFS_DATA_TYPE" = "grib2" ]; then + elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then external_model="GFS" @@ -342,9 +342,9 @@ for (( i=0; i<$num_fhrs; i++ )); do fn_atm_nemsio="${EXTRN_MDL_FNS[$i]}" ;; "FV3GFS") - if [ "$FV3GFS_DATA_TYPE" = "nemsio" ]; then + if [ "${FV3GFS_FILE_FMT}" = "nemsio" ]; then fn_atm_nemsio="${EXTRN_MDL_FNS[$i]}" - elif [ "$FV3GFS_DATA_TYPE" = "grib2" ]; then + elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then fn_grib2="${EXTRN_MDL_FNS[$i]}" fi ;; diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 282018220..11e9120ac 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -265,7 +265,7 @@ FCST_LEN_HRS="24" # EXTRN_MDL_NAME_ICS="FV3GFS" EXTRN_MDL_NAME_LBCS="FV3GFS" -FV3GFS_DATA_TYPE="nemsio" +FV3GFS_FILE_FMT="nemsio" LBC_UPDATE_INTVL_HRS="6" # #----------------------------------------------------------------------- diff --git a/ush/get_extrn_mdl_file_dir_info.sh b/ush/get_extrn_mdl_file_dir_info.sh index a8cdb0304..5e47804f9 100755 --- a/ush/get_extrn_mdl_file_dir_info.sh +++ b/ush/get_extrn_mdl_file_dir_info.sh @@ -377,7 +377,7 @@ anl_or_fcst must be set to one of the following: "FV3GFS") - if [ "$FV3GFS_DATA_TYPE" = "nemsio" ]; then + if [ "${FV3GFS_FILE_FMT}" = "nemsio" ]; then # fns=( "atm" "sfc" "nst" ) fns=( "atm" "sfc" ) @@ -386,7 +386,7 @@ anl_or_fcst must be set to one of the following: suffix="anl.nemsio" fns=( "${fns[@]/%/$suffix}" ) - elif [ "$FV3GFS_DATA_TYPE" = "grib2" ]; then #Only 0.25 degree files for now + elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then #Only 0.25 degree files for now fns=( "gfs.t${hh}z.pgrb2.0p25.anl" ) @@ -435,13 +435,13 @@ bination of external model (extrn_mdl_name) and analysis or forecast ;; "FV3GFS") - if [ "$FV3GFS_DATA_TYPE" = "nemsio" ]; then + if [ "${FV3GFS_FILE_FMT}" = "nemsio" ]; then fcst_hhh=( $( printf "%03d " "${lbc_update_fhrs[@]}" ) ) prefix="gfs.t${hh}z.atmf" fns=( "${fcst_hhh[@]/#/$prefix}" ) suffix=".nemsio" fns=( "${fns[@]/%/$suffix}" ) - elif [ "$FV3GFS_DATA_TYPE" = "grib2" ]; then + elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then fcst_hhh=( $( printf "%03d " "${lbc_update_fhrs[@]}" ) ) prefix="gfs.t${hh}z.pgrb2.0p25.f" fns=( "${fcst_hhh[@]/#/$prefix}" ) @@ -661,7 +661,7 @@ has not been specified for this external model: ;; "FV3GFS") - if [ "$FV3GFS_DATA_TYPE" = "nemsio" ]; then + if [ "${FV3GFS_FILE_FMT}" = "nemsio" ]; then if [ "${cdate_FV3SAR}" -le "2019061206" ]; then arcv_dir="/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_C/Q2FY19/prfv3rt3/${cdate_FV3SAR}" @@ -688,7 +688,7 @@ has not been specified for this external model: arcvrel_dir="./gfs.${yyyymmdd}/${hh}" fi - elif [ "$FV3GFS_DATA_TYPE" = "grib2" ]; then + elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then arcv_dir="/NCEPPROD/hpssprod/runhistory/rh${yyyy}/${yyyy}${mm}/${yyyymmdd}" arcv_fns="gpfs_dell1_nco_ops_com_gfs_prod_gfs.${yyyymmdd}_${hh}.gfs_pgrb2" diff --git a/ush/setup.sh b/ush/setup.sh index 89d35ac20..bf01bc34a 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1051,17 +1051,17 @@ EXTRN_MDL_NAME_LBCS must be one of the following: # #----------------------------------------------------------------------- # -# Make sure FV3GFS_DATA_TYPE is set to a valid value. +# Make sure FV3GFS_FILE_FMT is set to a valid value. # #----------------------------------------------------------------------- # -iselementof "$FV3GFS_DATA_TYPE" valid_vals_FV3GFS_DATA_TYPE || { \ -valid_vals_FV3GFS_DATA_TYPE_str=$(printf "\"%s\" " "${valid_vals_FV3GFS_DATA_TYPE[@]}"); +iselementof "${FV3GFS_FILE_FMT}" valid_vals_FV3GFS_FILE_FMT || { \ +valid_vals_FV3GFS_FILE_FMT_str=$(printf "\"%s\" " "${valid_vals_FV3GFS_FILE_FMT[@]}"); print_err_msg_exit "${script_name}" "\ -The data type specified in FV3GFS_DATA_TYPE is not supported: - FV3GFS_DATA_TYPE = \"$FV3GFS_DATA_TYPE\" -FV3GFS_DATA_TYPE must be one of the following: - $valid_vals_FV3GFS_DATA_TYPE_str +The data type specified in FV3GFS_FILE_FMT is not supported: + FV3GFS_FILE_FMT = \"${FV3GFS_FILE_FMT}\" +FV3GFS_FILE_FMT must be one of the following: + $valid_vals_FV3GFS_FILE_FMT_str "; } # #----------------------------------------------------------------------- diff --git a/ush/templates/input_ccpp_gfsextern_gfsphys.nml b/ush/templates/input_ccpp_gfsextern_gfsphys.nml index 496a63d4e..baf82650b 100755 --- a/ush/templates/input_ccpp_gfsextern_gfsphys.nml +++ b/ush/templates/input_ccpp_gfsextern_gfsphys.nml @@ -1,42 +1,42 @@ - &amip_interp_nml - interp_oi_sst = .true. - use_ncep_sst = .true. - use_ncep_ice = .false. - no_anom_sst = .false. - data_set = 'reynolds_oi', - date_out_of_range = 'climo', +&amip_interp_nml + interp_oi_sst = .true. + use_ncep_sst = .true. + use_ncep_ice = .false. + no_anom_sst = .false. + data_set = 'reynolds_oi', + date_out_of_range = 'climo', / - &atmos_model_nml - blocksize = - chksum_debug = .false. - dycore_only = .false. - fdiag = 1 - ccpp_suite = 'FV3_GFS_2017_gfdlmp' - avg_max_length = 3600. +&atmos_model_nml + blocksize = + chksum_debug = .false. + dycore_only = .false. + fdiag = 1 + ccpp_suite = 'FV3_GFS_2017_gfdlmp' + avg_max_length = 3600. / &diag_manager_nml - prepend_date = .false. + prepend_date = .false. / - &fms_io_nml +&fms_io_nml checksum_required = .false. max_files_r = 100, max_files_w = 100, / - &fms_nml +&fms_nml clock_grain = 'ROUTINE', domains_stack_size = 1800200, print_memory_usage = .false. / - &fv_grid_nml +&fv_grid_nml grid_file = 'INPUT/grid_spec.nc' / - &fv_core_nml +&fv_core_nml layout = , io_layout = 1,1 npx = @@ -119,17 +119,17 @@ / &surf_map_nml - zero_ocean = .F. - cd4 = 0.12 - cd2 = -1 - n_del2_strong = 0 - n_del2_weak = 2 - n_del4 = 1 - max_slope = 0.4 - peak_fac = 1. + zero_ocean = .F. + cd4 = 0.12 + cd2 = -1 + n_del2_strong = 0 + n_del2_weak = 2 + n_del4 = 1 + max_slope = 0.4 + peak_fac = 1. / - &external_ic_nml +&external_ic_nml filtered_terrain = .true. levp = 65 gfs_dwinds = .true. @@ -137,7 +137,7 @@ nt_checker = 0 / - &gfs_physics_nml +&gfs_physics_nml fhzero = 1. ldiag3d = .false. lradar = .true. @@ -188,7 +188,7 @@ effr_in = .true. / - &gfdl_cloud_microphysics_nml +&gfdl_cloud_microphysics_nml sedi_transport = .false. do_sedi_heat = .false. rad_snow = .true. @@ -236,7 +236,7 @@ icloud_f = 1 mp_time = 90. / - &interpolator_nml +&interpolator_nml interp_method = 'conserve_great_circle' / &namsfc diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index 5b29e8e57..4510bf0c0 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -1,13 +1,13 @@ valid_vals_RUN_ENVIR=("nco" "community") valid_vals_VERBOSE=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_MACHINE=("WCOSS_C" "WCOSS" "DELL" "THEIA" "HERA" "JET" "ODIN" "CHEYENNE") -valid_vals_PREDEF_GRID_NAME=("GSD_HAFSV0.A3km" "GSD_HAFSV0.A13km" "GSD_HAFSV0.A25km" "GSD_RAP13km" "GSD_HRRR25km" "GSD_HRRR13km" "GSD_HRRR3km" "EMC_CONUS" "EMC_AK") +valid_vals_PREDEF_GRID_NAME=("GSD_HAFSV0.A3km" "GSD_HAFSV0.A13km" "GSD_HAFSV0.A25km" "GSD_RAP13km" "GSD_HRRR3km" "GSD_HRRR13km" "GSD_HRRR25km" "EMC_CONUS" "EMC_AK") valid_vals_USE_CCPP=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_CCPP_PHYS_SUITE=("GFS" "GSD") valid_vals_RES=("48" "96" "192" "384" "768" "1152" "3072") valid_vals_EXTRN_MDL_NAME_ICS=("GSMGFS" "FV3GFS" "RAPX" "HRRRX") valid_vals_EXTRN_MDL_NAME_LBCS=("GSMGFS" "FV3GFS" "RAPX" "HRRRX") -valid_vals_FV3GFS_DATA_TYPE=("nemsio" "grib2") +valid_vals_FV3GFS_FILE_FMT=("nemsio" "grib2") valid_vals_GRID_GEN_METHOD=("GFDLgrid" "JPgrid") valid_vals_PREEXISTING_DIR_METHOD=("delete" "rename" "quit") valid_vals_gtype=("nest" "regional") From 4480726bba57479d758be597d2a8d76cf994c41f Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 18 Oct 2019 10:52:21 -0600 Subject: [PATCH 002/203] (1) Create new function (check_var_valid_value) that checks whether a variable has a valid value; replace blocks of code in various scripts with calls to this function; (2) In process_args.sh, change local variable names for clarity (and to avoid conflicts with check_var_valid_value function) and update documentation. --- ush/check_var_valid_value.sh | 112 +++++++++++ ush/get_extrn_mdl_file_dir_info.sh | 61 +++--- ush/process_args.sh | 313 +++++++++++++++-------------- ush/set_predef_grid_params.sh | 15 +- ush/setup.sh | 199 +++++------------- ush/source_funcs.sh | 9 + 6 files changed, 371 insertions(+), 338 deletions(-) create mode 100755 ush/check_var_valid_value.sh diff --git a/ush/check_var_valid_value.sh b/ush/check_var_valid_value.sh new file mode 100755 index 000000000..7d1a85a95 --- /dev/null +++ b/ush/check_var_valid_value.sh @@ -0,0 +1,112 @@ +# +#----------------------------------------------------------------------- +# +# This function checks whether the specified variable contains a valid +# value (where the set of valid values is also specified). +# +#----------------------------------------------------------------------- +# +function check_var_valid_value() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# + if [ "$#" -lt 2 ] || [ "$#" -gt 3 ]; then + + print_err_msg_exit "\ +Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. +Usage: + + ${FUNCNAME[0]} var_name valid_var_values_array_name [msg] + +where the arguments are defined as follows: + + var_name: + The name of the variable whose value we want to check for validity. + + valid_var_values_array_name: + The name of the array containing a list of valid values that var_name + can take on. + + msg + Optional argument specifying the first portion of the error message to + print out if var_name does not have a valid value. +" + + fi +# +#----------------------------------------------------------------------- +# +# Declare local variables. +# +#----------------------------------------------------------------------- +# + local var_name \ + valid_var_values_array_name \ + var_value \ + valid_var_values_at \ + valid_var_values \ + err_msg \ + valid_var_values_str +# +#----------------------------------------------------------------------- +# +# Set local variable values. +# +#----------------------------------------------------------------------- +# + var_name="$1" + valid_var_values_array_name="$2" + + var_value=${!var_name} + valid_var_values_at="$valid_var_values_array_name[@]" + valid_var_values=("${!valid_var_values_at}") + + if [ "$#" -eq 3 ]; then + err_msg="$3" + else + err_msg="\ +The value specified in ${var_name} is not supported: + ${var_name} = \"${var_value}\"" + fi +# +#----------------------------------------------------------------------- +# +# Check whether var_value is equal to one of the elements of the array +# valid_var_values. If not, print out an error message and exit the +# calling script. +# +#----------------------------------------------------------------------- +# + iselementof "${var_value}" valid_var_values || { \ + caller_name=$( basename "${BASH_SOURCE[1]}" ) + valid_var_values_str=$(printf "\"%s\" " "${valid_var_values[@]}"); + print_err_msg_exit "${caller_name}" "\ +${err_msg} +${var_name} must be set to one of the following: + ${valid_var_values_str} +"; } +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 + +} + diff --git a/ush/get_extrn_mdl_file_dir_info.sh b/ush/get_extrn_mdl_file_dir_info.sh index 5e47804f9..bc3f79038 100755 --- a/ush/get_extrn_mdl_file_dir_info.sh +++ b/ush/get_extrn_mdl_file_dir_info.sh @@ -242,14 +242,7 @@ fi #----------------------------------------------------------------------- # valid_vals_anl_or_fcst=( "ANL" "anl" "FCST" "fcst" ) - iselementof "$anl_or_fcst" valid_vals_anl_or_fcst || { \ - valid_vals_anl_or_fcst_str=$(printf "\"%s\" " "${valid_vals_anl_or_fcst[@]}"); - print_err_msg_exit "${func_name}" "\ -Value specified in anl_or_fcst is not supported: - anl_or_fcst = \"$anl_or_fcst\" -anl_or_fcst must be set to one of the following: - $valid_vals_anl_or_fcst_str -"; } + check_var_valid_value "anl_or_fcst" "valid_vals_anl_or_fcst" # # For convenience of checking input values, change contents of anl_or_- # fcst to uppercase. @@ -305,7 +298,7 @@ anl_or_fcst must be set to one of the following: # lbc_update_fhrs=( "" ) - if [ "$anl_or_fcst" = "FCST" ]; then + if [ "${anl_or_fcst}" = "FCST" ]; then lbc_update_fhrs=( "${LBC_UPDATE_FCST_HRS[@]}" ) # @@ -329,8 +322,8 @@ anl_or_fcst must be set to one of the following: # #----------------------------------------------------------------------- # - if [ "$extrn_mdl_name" = "RAPX" ] || \ - [ "$extrn_mdl_name" = "HRRRX" ]; then + if [ "${extrn_mdl_name}" = "RAPX" ] || \ + [ "${extrn_mdl_name}" = "HRRRX" ]; then # # Get the Julian day-of-year of the starting date and time of the exter- # nal model run. @@ -351,7 +344,7 @@ anl_or_fcst must be set to one of the following: # #----------------------------------------------------------------------- # - case "$anl_or_fcst" in + case "${anl_or_fcst}" in # #----------------------------------------------------------------------- # @@ -364,7 +357,7 @@ anl_or_fcst must be set to one of the following: fcst_hh="00" fcst_mn="00" - case "$extrn_mdl_name" in + case "${extrn_mdl_name}" in "GSMGFS") # fns=( "atm" "sfc" "nst" ) @@ -386,9 +379,9 @@ anl_or_fcst must be set to one of the following: suffix="anl.nemsio" fns=( "${fns[@]/%/$suffix}" ) - elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then #Only 0.25 degree files for now + elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then - fns=( "gfs.t${hh}z.pgrb2.0p25.anl" ) + fns=( "gfs.t${hh}z.pgrb2.0p25.anl" ) # Get only 0.25 degree files for now. fi ;; @@ -406,8 +399,8 @@ anl_or_fcst must be set to one of the following: The external model file names have not yet been specified for this com- bination of external model (extrn_mdl_name) and analysis or forecast (anl_or_fcst): - extrn_mdl_name = \"$extrn_mdl_name\" - anl_or_fcst = \"$anl_or_fcst\" + extrn_mdl_name = \"${extrn_mdl_name}\" + anl_or_fcst = \"${anl_or_fcst}\" " ;; @@ -424,7 +417,7 @@ bination of external model (extrn_mdl_name) and analysis or forecast fcst_mn="00" - case "$extrn_mdl_name" in + case "${extrn_mdl_name}" in "GSMGFS") fcst_hhh=( $( printf "%03d " "${lbc_update_fhrs[@]}" ) ) @@ -469,8 +462,8 @@ bination of external model (extrn_mdl_name) and analysis or forecast The external model file names have not yet been specified for this com- bination of external model (extrn_mdl_name) and analysis or forecast (anl_or_fcst): - extrn_mdl_name = \"$extrn_mdl_name\" - anl_or_fcst = \"$anl_or_fcst\" + extrn_mdl_name = \"${extrn_mdl_name}\" + anl_or_fcst = \"${anl_or_fcst}\" " ;; @@ -490,13 +483,13 @@ bination of external model (extrn_mdl_name) and analysis or forecast # #----------------------------------------------------------------------- # - if [ "$anl_or_fcst" = "ANL" ]; then + if [ "${anl_or_fcst}" = "ANL" ]; then sysbasedir="$EXTRN_MDL_FILES_SYSBASEDIR_ICS" - elif [ "$anl_or_fcst" = "FCST" ]; then + elif [ "${anl_or_fcst}" = "FCST" ]; then sysbasedir="$EXTRN_MDL_FILES_SYSBASEDIR_LBCS" fi - case "$extrn_mdl_name" in + case "${extrn_mdl_name}" in # # It is not clear which, if any, systems the (old) spectral GFS model is @@ -523,7 +516,7 @@ bination of external model (extrn_mdl_name) and analysis or forecast print_err_msg_exit "${func_name}" "\ The system directory in which to look for external model output files has not been specified for this external model and machine combination: - extrn_mdl_name = \"$extrn_mdl_name\" + extrn_mdl_name = \"${extrn_mdl_name}\" MACHINE = \"$MACHINE\" " ;; @@ -552,7 +545,7 @@ has not been specified for this external model and machine combination: print_err_msg_exit "${func_name}" "\ The system directory in which to look for external model output files has not been specified for this external model and machine combination: - extrn_mdl_name = \"$extrn_mdl_name\" + extrn_mdl_name = \"${extrn_mdl_name}\" MACHINE = \"$MACHINE\" " ;; @@ -581,7 +574,7 @@ has not been specified for this external model and machine combination: print_err_msg_exit "${func_name}" "\ The system directory in which to look for external model output files has not been specified for this external model and machine combination: - extrn_mdl_name = \"$extrn_mdl_name\" + extrn_mdl_name = \"${extrn_mdl_name}\" MACHINE = \"$MACHINE\" " ;; @@ -610,7 +603,7 @@ has not been specified for this external model and machine combination: print_err_msg_exit "${func_name}" "\ The system directory in which to look for external model output files has not been specified for this external model and machine combination: - extrn_mdl_name = \"$extrn_mdl_name\" + extrn_mdl_name = \"${extrn_mdl_name}\" MACHINE = \"$MACHINE\" " ;; @@ -622,7 +615,7 @@ has not been specified for this external model and machine combination: print_err_msg_exit "${func_name}" "\ The system directory in which to look for external model output files has not been specified for this external model: - extrn_mdl_name = \"$extrn_mdl_name\" + extrn_mdl_name = \"${extrn_mdl_name}\" " esac @@ -643,16 +636,16 @@ has not been specified for this external model: # #----------------------------------------------------------------------- # - case "$extrn_mdl_name" in + case "${extrn_mdl_name}" in "GSMGFS") arcv_dir="/NCEPPROD/hpssprod/runhistory/rh${yyyy}/${yyyy}${mm}/${yyyymmdd}" arcv_fmt="tar" arcv_fns="gpfs_hps_nco_ops_com_gfs_prod_gfs.${cdate}." - if [ "$anl_or_fcst" = "ANL" ]; then + if [ "${anl_or_fcst}" = "ANL" ]; then arcv_fns="${arcv_fns}anl" arcvrel_dir="." - elif [ "$anl_or_fcst" = "FCST" ]; then + elif [ "${anl_or_fcst}" = "FCST" ]; then arcv_fns="${arcv_fns}sigma" arcvrel_dir="/gpfs/hps/nco/ops/com/gfs/prod/gfs.${yyyymmdd}" fi @@ -671,10 +664,10 @@ has not been specified for this external model: arcv_fns="gpfs_dell1_nco_ops_com_gfs_prod_gfs.${yyyymmdd}_${hh}." fi arcv_fmt="tar" - if [ "$anl_or_fcst" = "ANL" ]; then + if [ "${anl_or_fcst}" = "ANL" ]; then arcv_fns="${arcv_fns}gfs_nemsioa" arcvrel_dir="./gfs.${yyyymmdd}/${hh}" - elif [ "$anl_or_fcst" = "FCST" ]; then + elif [ "${anl_or_fcst}" = "FCST" ]; then last_fhr_in_nemsioa="39" first_lbc_fhr="${lbc_update_fhrs[0]}" last_lbc_fhr="${lbc_update_fhrs[-1]}" @@ -755,7 +748,7 @@ has not been specified for this external model: *) print_err_msg_exit "${func_name}" "\ Archive file information has not been specified for this external model: - extrn_mdl_name = \"$extrn_mdl_name\" + extrn_mdl_name = \"${extrn_mdl_name}\" " ;; diff --git a/ush/process_args.sh b/ush/process_args.sh index 8a7e8c6a1..9741c341f 100755 --- a/ush/process_args.sh +++ b/ush/process_args.sh @@ -2,88 +2,81 @@ #----------------------------------------------------------------------- # # This function processes a list of variable name and value pairs passed -# to it as a set of arguments (starting with the second argument). Each -# name-value pair must have the form -# -# VAR_NAME=VAR_VALUE -# -# where VAR_NAME is the name of a variable and VAR_VALUE is the value it -# should have. For each name-value pair, this function creates a varia- -# ble of the specified name and assigns to it its corresponding value. -# -# The first argument to this function (valid_var_names) is the name of -# an array defined in the calling script that contains a list of valid -# variable values. The variable name specified in each name-value pair -# must correspond to one of the elements of this array. If it isn't, -# this function prints out an error message and exits with a nonzero -# exit code. Any variable in the list of valid variable names that is -# not assigned a value in a name-value pair gets set to the null string. -# -# This function may be called from a script as follows: -# -# valid_args=( "arg1" "arg2" "arg3" "arg4" ) -# process_args valid_args \ -# arg1="hello" \ -# arg3="goodbye" -# -# After the call to process_args in this script, there will exist four -# new (or reset) variables: arg1, arg2, arg3, and arg4. arg1 will be -# set to the string "hello", arg3 will be set to the string "goodby", -# and arg2 and arg4 will be set to the null string, i.e. "". -# -# The purpose of this function is to allow a script to process a set of -# arguments passed to it as variable name-and-value pairs by another -# script (aka the calling script) such that: -# -# 1) The calling script can only pass one of a restricted set of varia- -# bles to the child script. This set is specified within the child -# script and is known as the -# -# 2) The calling script can specify a subset of the allowed variables in -# the child script. Variables that are not specified are set to the -# null string. -# -# 1) The "export" feature doesn't have to be used -#. For exam- -# ple, assume the script outer_script.sh calls a second script named in- -# ner_script.sh as follows: -# -# inner_script.sh \ -# arg1="hi there" \ -# arg2="all done" -# -# To process the arguments arg1 and arg2 passed to it, inner_script.sh -# may contain the following code: -# +# to it as a set of arguments, starting with the second argument. We +# refer to these pairs as argument-value pairs (or "arg-val" pairs for +# short) because the variable names in these pairs represent the names +# of arguments to the script or function that calls this function (which +# we refer to here as the "caller"). The first argument to this func- +# tion being the name of an array that contains a list of valid argument +# names that the caller is allowed to accept. Each arg-val pair must +# have the form +# +# ARG_NAME=VAR_VALUE +# +# where ARG_NAME is the name of an argument and VAR_VALUE is the value +# to set that argument to. For each arg-val pair, this function creates +# a global variable named ARG_NAME and assigns to it the value VAR_VAL- +# UE. +# +# The purpose of this function is to provide a mechanism by which a pa- +# rent script, say parent.sh, can pass variable values to a child script +# or function, say child.sh, that makes it very clear which arguments of +# child.sh are being set and to what values. For example, parent.sh can +# call child.sh as follows: +# +# ... +# child.sh arg3="Hello" arg2="bye" arg4=("this" "is" "an" "array") +# ... +# +# Then child.sh can use this function (process_args) as follows to pro- +# cess the arg-val pairs passed to it: +# +# ... # valid_args=( "arg1" "arg2" "arg3" "arg4" ) # process_args valid_args "$@" -# -# The call to process_args here would cause arg1 and arg2 to be created -# and set to "hi_there" and "all done", respectively, and for arg3 and -# arg4 to be created and set to "". Note that arg1 through arg4 would -# not be defined in the environment of outer_script.sh; they would only -# be defined in the environment of inner_script.sh. -# -# Note that variables may also be set to arrays. For example, the call -# in outer_script.sh to inner_script.sh may be modified to -# -# inner_script.sh \ -# arg1="hi there" \ -# arg2="all done" -# arg4='( "dog" "cat" )' -# -# This would cause the scalar variables arg1 and arg2 to be created in -# the environment of inner_script.sh and set to "hi there" and "all -# done", respectively, for arg3 to be created and set to "", and for -# arg4 to be created (as an array) and set to the array ( "dog" "cat" ). -# - -# process_args valid_args "$@" -# The variable may be set to a scalar or -# array value. -# creating a variable of the same name as the one specified in each -# name-value pair and assigning to it the value specified in that pair. -# The variable in each name-value pair can be a scalar or an array. +# ... +# +# Here, valid_args is an array that defines or "declares" the argument +# list for child.sh, i.e. it defines the variable names that child.sh is +# allowed to accept as arguments. Its name is passed to process_args as +# the first argument. The "$@" appearing in the call to process_args +# passes to process_args the list of arg-val pairs that parent.sh passes +# to child.sh as the second through N-th arguments. In the example +# above, "$@" represents: +# +# arg3="Hello" arg2="bye" arg4=("this" "is" "an" "array") +# +# After the call to process_args in child.sh, the variables arg1, arg2, +# arg3, and arg4 will be set as follows in child.sh: +# +# arg1="" +# arg2="bye" +# arg3="Hello" +# arg4=("this" "is" "an" "array") +# +# Note that: +# +# 1) The set of arg-val pairs may list only a subset of the list of arg- +# uments declared in valid_args; the unlisted arguments will be set +# to the null string. In the example above, arg1 is set to the null +# string because it is not specified in any of the arg-val pairs in +# the call to child.sh in parent.sh. +# +# 2) The arg-val pairs in the call to child.sh do not have to be in the +# same order as the list of "declared" arguments in child.sh. For +# instance, in the example above, the arg-val pair for arg3 is listed +# before the one for arg2. +# +# 3) An argument can be set to an array by starting and ending the value +# portion of its arg-val pair with opening and closing parentheses, +# repsectively, and listing the elements within (each one in a set of +# double-quotes and separated fromt the next by whitespace). In the +# example above, this is done for arg4. +# +# 4) If the value portion of an arg-val pair contains an argument that +# is not defined in the array valid_args in child.sh, the call to +# process_args in child.sh will result in an error message and exit +# from the caller. # #----------------------------------------------------------------------- # @@ -110,21 +103,27 @@ function process_args() { Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. Usage: - ${FUNCNAME[0]} valid_var_names_array var_name_val_pair1 ... var_name_val_pairN + ${FUNCNAME[0]} valid_arg_names_array_name \ + arg_val_pair1 \ + ... \ + arg_val_pairN where the arguments are defined as follows: - valid_var_names_arrray: - The name of the array containing a list of valid variable names. + valid_arg_names_array_name: + The name of the array containing a list of valid argument names. + + arg_val_pair1 ... arg_val_pairN: + A list of N argument-value pairs. These have the form - var_name_val_pair1 ... var_name_val_pairN: - A list of N variable name-value pairs. These have the form - var_name1=\"var_val1\" ... var_nameN=\"var_valN\" - where each variable name (var_nameI) needs to be in the list of valid - variable names specified in valid_var_names_array. Note that not all - the valid variables listed in valid_var_names_array need to be set, - and the name-value pairs can be in any order (i.e. they don't have to - follow the order of variables listed in valid_var_names_array).\n" + arg1=\"val1\" ... argN=\"valN\" + + where each argument name (argI) needs to be in the list of valid argu- + ment names specified in valid_arg_names_array_name. Note that not all + the valid arguments listed in valid_arg_names_array_name need to be + set, and the argument-value pairs can be in any order, i.e. they don't + have to follow the order of arguments listed in valid_arg_names_ar- + ray_name.\n" fi # @@ -134,122 +133,134 @@ where the arguments are defined as follows: # #----------------------------------------------------------------------- # - local valid_var_names_at \ - valid_var_names \ - valid_var_names_str \ - num_valid_var_names \ - num_name_val_pairs \ - i valid_var_name name_val_pair var_name var_value is_array + local valid_arg_names_array_name \ + valid_arg_names_at \ + valid_arg_names \ + num_valid_args \ + num_arg_val_pairs \ + i valid_arg_name arg_already_specified \ + arg_val_pair arg_name arg_value is_array \ + err_msg cmd_line - valid_var_names_at="$1[@]" - valid_var_names=("${!valid_var_names_at}") - valid_var_names_str=$(printf "\"%s\" " "${valid_var_names[@]}"); - num_valid_var_names=${#valid_var_names[@]} + valid_arg_names_array_name="$1" + valid_arg_names_at="${valid_arg_names_array_name}[@]" + valid_arg_names=("${!valid_arg_names_at}") + num_valid_args=${#valid_arg_names[@]} # #----------------------------------------------------------------------- # -# Get the number of name-value pairs specified as inputs to this func- -# tion. These consist of the all arguments starting with the 2nd, so -# we subtract 1 from the total number of arguments. +# Get the number of argument-value pairs (or arg-val pairs, for short) +# being passed into this function. These consist of all arguments +# starting with the 2nd, so we subtract 1 from the total number of argu- +# ments. # #----------------------------------------------------------------------- # - num_name_val_pairs=$(( $#-1 )) + num_arg_val_pairs=$(( $# - 1 )) # #----------------------------------------------------------------------- # -# Make sure that the number of name-value pairs is less than or equal to -# the number of valid variable names. +# Make sure that the number of arg-val pairs is less than or equal to +# the number of valid arguments. # #----------------------------------------------------------------------- # - if [ "${num_name_val_pairs}" -gt "${num_valid_var_names}" ]; then + if [ "${num_arg_val_pairs}" -gt "${num_valid_args}" ]; then print_err_msg_exit "\ Function \"${FUNCNAME[0]}\": -The number of variable name-value pairs specified on the command line -must be less than or equal to the number of valid variable names speci- -fied in the array valid_var_names: - num_name_val_pairs = \"$num_name_val_pairs\" - num_valid_var_names = \"$num_valid_var_names\" +The number of argument-value pairs specified on the command line (num_- +arg_val_pairs) must be less than or equal to the number of valid argu- +ments (num_valid_args) specified in the array valid_arg_names: + num_arg_val_pairs = ${num_arg_val_pairs} + num_valid_args = ${num_valid_args} " - fi # #----------------------------------------------------------------------- # -# Initialize all valid variables to the null string. +# Initialize all valid arguments to the null string. Note that the +# scope of this initialization is global, i.e. the calling script or +# function will be aware of these initializations. Also, initialize +# each element of the array arg_already_specified to "false". This ar- +# ray keeps track of whether each valid argument has already been set +# to a value by an arg-val specification. # #----------------------------------------------------------------------- # - for (( i=0; i<$num_valid_var_names; i++ )); do - valid_var_name="${valid_var_names[$i]}" - eval ${valid_var_name}="" - valid_var_specified[$i]="false" + for (( i=0; i<${num_valid_args}; i++ )); do + valid_arg_name="${valid_arg_names[$i]}" + eval ${valid_arg_name}="" + arg_already_specified[$i]="false" done # #----------------------------------------------------------------------- # -# Loop over the list of variable name-value pairs and set variable val- -# ues. +# Loop over the list of arg-val pairs and set argument values. # #----------------------------------------------------------------------- # - for name_val_pair in "${@:2}"; do - - var_name=$(echo ${name_val_pair} | cut -f1 -d=) - var_value=$(echo ${name_val_pair} | cut -f2 -d=) + for arg_val_pair in "${@:2}"; do + arg_name=$(echo ${arg_val_pair} | cut -f1 -d=) + arg_value=$(echo ${arg_val_pair} | cut -f2 -d=) +# +# If the first character of the argument's value is an opening parenthe- +# sis and its last character is a closing parenthesis, then the argument +# is an array. Check for this and set the is_array flag accordingly. +# is_array="false" - if [ "${var_value:0:1}" = "(" ] && \ - [ "${var_value: -1}" = ")" ]; then + if [ "${arg_value:0:1}" = "(" ] && \ + [ "${arg_value: -1}" = ")" ]; then is_array="true" fi # #----------------------------------------------------------------------- # -# Make sure that the specified variable name is valid. +# Make sure that the argument name specified by the current argument- +# value pair is valid. # #----------------------------------------------------------------------- # - iselementof "${var_name}" valid_var_names || { \ - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": -The specified variable name in the current variable name-and-value pair -is not valid: - name_val_pair = \"${name_val_pair}\" - var_name = \"${var_name}\" -var_name must be set to one of the following: - $valid_var_names_str -"; } + err_msg="\ +The specified argument name (arg_name) in the current argument-value +pair (arg_val_pair) is not valid: + arg_val_pair = \"${arg_val_pair}\" + arg_name = \"${arg_name}\"" + check_var_valid_value "arg_name" "valid_arg_names" "${err_msg}" # #----------------------------------------------------------------------- # -# Loop through the list of valid variable names and find the one that -# the current name-value pair corresponds to. Then set that variable to +# Loop through the list of valid argument names and find the one that +# the current arg-val pair corresponds to. Then set that argument to # the specified value. # #----------------------------------------------------------------------- # - for (( i=0; i<${num_valid_var_names}; i++ )); do - - valid_var_name="${valid_var_names[$i]}" - if [ "${var_name}" = "${valid_var_name}" ]; then + for (( i=0; i<${num_valid_args}; i++ )); do - if [ "${valid_var_specified[$i]}" = "false" ]; then - valid_var_specified[$i]="true" + valid_arg_name="${valid_arg_names[$i]}" + if [ "${arg_name}" = "${valid_arg_name}" ]; then +# +# Check whether the current argument has already been set by a previous +# arg-val pair on the command line. If not, proceed to set the argument +# to the specified value. If so, print out an error message and exit +# the calling script. +# + if [ "${arg_already_specified[$i]}" = "false" ]; then + arg_already_specified[$i]="true" if [ "${is_array}" = "true" ]; then - eval ${var_name}=${var_value} + eval ${arg_name}=${arg_value} else - eval ${var_name}=\"${var_value}\" + eval ${arg_name}=\"${arg_value}\" fi else cmd_line=$( printf "\'%s\' " "${@:1}" ) print_err_msg_exit "\ -The current variable has already been assigned a value on the command +The current argument has already been assigned a value on the command line: - var_name = \"${var_name}\" + arg_name = \"${arg_name}\" cmd_line = ${cmd_line} -Please assign values to variables only once on the command line. +Please assign values to arguments only once on the command line. " fi fi diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index 766c20d9a..cce46ddd2 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -650,15 +650,12 @@ if [ "$QUILTING" = "TRUE" ]; then # # First, make sure that WRTCMP_output_grid is set to a valid value. # - iselementof "$WRTCMP_output_grid" valid_vals_WRTCMP_output_grid || { \ - valid_vals_WRTCMP_output_grid_str=$(printf "\"%s\" " "${valid_vals_WRTCMP_output_grid[@]}"); - print_err_msg_exit "\ -The write-component coordinate system specified in WRTCMP_output_grid is -not supported: - WRTCMP_output_grid = \"$WRTCMP_output_grid\" -WRTCMP_output_grid must be set to one of the following: - $valid_vals_WRTCMP_output_grid_str -"; } + err_msg="\ +The coordinate system used by the write-component output grid specified +in WRTCMP_output_grid is not supported: + WRTCMP_output_grid = \"${WRITECMP_output_grid}\"" + check_var_valid_value \ + "WRTCMP_output_grid" "valid_vals_WRTCMP_output_grid" "${err_msg}" # # Now set the name of the write-component template file. # diff --git a/ush/setup.sh b/ush/setup.sh index bf01bc34a..c9815d019 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -108,14 +108,7 @@ fi # #----------------------------------------------------------------------- # -iselementof "$RUN_ENVIR" valid_vals_RUN_ENVIR || { \ -valid_vals_RUN_ENVIR_str=$(printf "\"%s\" " "${valid_vals_RUN_ENVIR[@]}"); -print_err_msg_exit "${script_name}" "\ -Value specified in RUN_ENVIR is not supported: - RUN_ENVIR = \"$RUN_ENVIR\" -RUN_ENVIR must be set to one of the following: - $valid_vals_RUN_ENVIR_str -"; } +check_var_valid_value "RUN_ENVIR" "valid_vals_RUN_ENVIR" # #----------------------------------------------------------------------- # @@ -123,14 +116,7 @@ RUN_ENVIR must be set to one of the following: # #----------------------------------------------------------------------- # -iselementof "$VERBOSE" valid_vals_VERBOSE || { \ -valid_vals_VERBOSE_str=$(printf "\"%s\" " "${valid_vals_VERBOSE[@]}"); -print_err_msg_exit "${script_name}" "\ -Value specified in VERBOSE is not supported: - VERBOSE = \"$VERBOSE\" -VERBOSE must be set to one of the following: - $valid_vals_VERBOSE_str -"; } +check_var_valid_value "VERBOSE" "valid_vals_VERBOSE" # # Set VERBOSE to either "TRUE" or "FALSE" so we don't have to consider # other valid values later on. @@ -150,14 +136,7 @@ fi # #----------------------------------------------------------------------- # -iselementof "${RUN_TASK_MAKE_GRID}" valid_vals_RUN_TASK_MAKE_GRID || { \ -valid_vals_RUN_TASK_MAKE_GRID_str=$(printf "\"%s\" " "${valid_vals_RUN_TASK_MAKE_GRID[@]}"); -print_err_msg_exit "${script_name}" "\ -Value specified in RUN_TASK_MAKE_GRID is not supported: - RUN_TASK_MAKE_GRID = \"${RUN_TASK_MAKE_GRID}\" -RUN_TASK_MAKE_GRID must be set to one of the following: - ${valid_vals_RUN_TASK_MAKE_GRID_str} -"; } +check_var_valid_value "RUN_TASK_MAKE_GRID" "valid_vals_RUN_TASK_MAKE_GRID" # # Set RUN_TASK_MAKE_GRID to either "TRUE" or "FALSE" so we don't have to # consider other valid values later on. @@ -177,14 +156,8 @@ fi # #----------------------------------------------------------------------- # -iselementof "$RUN_TASK_MAKE_SFC_CLIMO" valid_vals_RUN_TASK_MAKE_SFC_CLIMO || { \ -valid_vals_RUN_TASK_MAKE_SFC_CLIMO_str=$(printf "\"%s\" " "${valid_vals_RUN_TASK_MAKE_SFC_CLIMO[@]}"); -print_err_msg_exit "${script_name}" "\ -Value specified in RUN_TASK_MAKE_SFC_CLIMO is not supported: - RUN_TASK_MAKE_SFC_CLIMO = \"$RUN_TASK_MAKE_SFC_CLIMO\" -RUN_TASK_MAKE_SFC_CLIMO must be set to one of the following: - $valid_vals_RUN_TASK_MAKE_SFC_CLIMO_str -"; } +check_var_valid_value \ + "RUN_TASK_MAKE_SFC_CLIMO" "valid_vals_RUN_TASK_MAKE_SFC_CLIMO" # # Set RUN_TASK_MAKE_SFC_CLIMO to either "TRUE" or "FALSE" so we don't # have to consider other valid values later on. @@ -229,15 +202,7 @@ fi #----------------------------------------------------------------------- # MACHINE=$( printf "%s" "$MACHINE" | sed -e 's/\(.*\)/\U\1/' ) - -iselementof "$MACHINE" valid_vals_MACHINE || { \ -valid_vals_MACHINE_str=$(printf "\"%s\" " "${valid_vals_MACHINE[@]}"); -print_err_msg_exit "${script_name}" "\ -Machine specified in MACHINE is not supported: - MACHINE = \"$MACHINE\" -MACHINE must be set to one of the following: - $valid_vals_MACHINE_str -"; } +check_var_valid_value "MACHINE" "valid_vals_MACHINE" # #----------------------------------------------------------------------- # @@ -347,14 +312,7 @@ TILE_RGNL="7" # #----------------------------------------------------------------------- # -iselementof "$gtype" valid_vals_gtype || { \ -valid_vals_gtype_str=$(printf "\"%s\" " "${valid_vals_gtype[@]}"); -print_err_msg_exit "${script_name}" "\ -Value specified in gtype is not supported: - gtype = \"$gtype\" -gtype must be set to one of the following: - $valid_vals_gtype_str -"; } +check_var_valid_value "gtype" "valid_vals_gtype" # #----------------------------------------------------------------------- # @@ -363,19 +321,13 @@ gtype must be set to one of the following: #----------------------------------------------------------------------- # if [ ! -z ${PREDEF_GRID_NAME} ]; then - iselementof "$PREDEF_GRID_NAME" valid_vals_PREDEF_GRID_NAME || { \ - valid_vals_PREDEF_GRID_NAME_str=$(printf "\"%s\" " "${valid_vals_PREDEF_GRID_NAME[@]}"); - print_err_msg_exit "${script_name}" "\ -The predefined regional domain specified in PREDEF_GRID_NAME is not sup- + err_msg="\ +The predefined regional grid specified in PREDEF_GRID_NAME is not sup- ported: - PREDEF_GRID_NAME = \"$PREDEF_GRID_NAME\" -PREDEF_GRID_NAME must be set either to an empty string or to one of the -following: - $valid_vals_PREDEF_GRID_NAME_str -"; } + PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\"" + check_var_valid_value \ + "PREDEF_GRID_NAME" "valid_vals_PREDEF_GRID_NAME" "${err_msg}" fi - - # #----------------------------------------------------------------------- # @@ -383,14 +335,8 @@ fi # #----------------------------------------------------------------------- # -iselementof "${PREEXISTING_DIR_METHOD}" valid_vals_PREEXISTING_DIR_METHOD || { \ -valid_vals_PREEXISTING_DIR_METHOD_str=$(printf "\"%s\" " "${valid_vals_PREEXISTING_DIR_METHOD[@]}"); -print_err_msg_exit "${script_name}" "\ -Value specified in PREEXISTING_DIR_METHOD is not supported: - PREEXISTING_DIR_METHOD = \"${PREEXISTING_DIR_METHOD}\" -PREEXISTING_DIR_METHOD must be set to one of the following: - $valid_vals_PREEXISTING_DIR_METHOD_str -"; } +check_var_valid_value \ + "PREEXISTING_DIR_METHOD" "valid_vals_PREEXISTING_DIR_METHOD" # #----------------------------------------------------------------------- # @@ -398,14 +344,7 @@ PREEXISTING_DIR_METHOD must be set to one of the following: # #----------------------------------------------------------------------- # -iselementof "${USE_CCPP}" valid_vals_USE_CCPP || { \ -valid_vals_USE_CCPP_str=$(printf "\"%s\" " "${valid_vals_USE_CCPP[@]}"); -print_err_msg_exit "${script_name}" "\ -The value specified for the USE_CCPP flag is not supported: - USE_CCPP = \"${USE_CCPP}\" -USE_CCPP must be set to one of the following: - $valid_vals_CCPP_str -"; } +check_var_valid_value "USE_CCPP" "valid_vals_USE_CCPP" # # Set USE_CCPP to either "TRUE" or "FALSE" so we don't have to consider # other valid values later on. @@ -426,21 +365,13 @@ fi # #----------------------------------------------------------------------- # -if [ "${USE_CCPP}" = "TRUE" ]; then - - if [ ! -z ${CCPP_PHYS_SUITE} ]; then - iselementof "${CCPP_PHYS_SUITE}" valid_vals_CCPP_PHYS_SUITE || { \ - valid_vals_CCPP_PHYS_SUITE_str=$(printf "\"%s\" " "${valid_vals_CCPP_PHYS_SUITE[@]}"); - print_err_msg_exit "${script_name}" "\ +if [ "${USE_CCPP}" = "TRUE" ] && [ ! -z ${CCPP_PHYS_SUITE} ]; then + err_msg="\ The CCPP physics suite specified in CCPP_PHYS_SUITE is not supported: - CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\" -CCPP_PHYS_SUITE must be set to one of the following: - $valid_vals_CCPP_PHYS_SUITE_str - "; } - fi - + CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\"" + check_var_valid_value \ + "CCPP_PHYS_SUITE" "valid_vals_CCPP_PHYS_SUITE" "${err_msg}" fi - # #----------------------------------------------------------------------- # @@ -609,15 +540,16 @@ NEMSfv3gfs_DIR="$SORCDIR/NEMSfv3gfs" # # Make sure that the NEMSfv3gfs_DIR directory exists. # -if [ ! -d "$NEMSfv3gfs_DIR" ]; then +if [ 0 = 1 ]; then +if [ ! -d "${NEMSfv3gfs_DIR}" ]; then print_err_msg_exit "${script_name}" "\ The NEMSfv3gfs directory specified by NEMSfv3gfs_DIR that should contain the FV3 source code does not exist: - NEMSfv3gfs_DIR = \"$NEMSfv3gfs_DIR\" + NEMSfv3gfs_DIR = \"${NEMSfv3gfs_DIR}\" Please clone the NEMSfv3gfs repository in this directory, build the FV3 executable, and then rerun the workflow." fi - +fi case $MACHINE in @@ -733,19 +665,12 @@ fi #----------------------------------------------------------------------- # if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - - iselementof "$RES" valid_vals_RES || { \ - valid_vals_RES_str=$(printf "\"%s\" " "${valid_vals_RES[@]}"); - print_err_msg_exit "${script_name}" "\ -Number of grid cells per tile (in each horizontal direction) specified in -RES is not supported: - RES = \"$RES\" -RES must be one of the following: - $valid_vals_RES_str -"; } - + err_msg="\ +The number of grid cells per tile in each horizontal direction specified +in RES is not supported: + RES = \"${RES}\"" + check_var_valid_value "RES" "valid_vals_RES" "${err_msg}" CRES="C${RES}" - fi # #----------------------------------------------------------------------- @@ -1023,15 +948,12 @@ fi # #----------------------------------------------------------------------- # -iselementof "$EXTRN_MDL_NAME_ICS" valid_vals_EXTRN_MDL_NAME_ICS || { \ -valid_vals_EXTRN_MDL_NAME_ICS_str=$(printf "\"%s\" " "${valid_vals_EXTRN_MDL_NAME_ICS[@]}"); -print_err_msg_exit "${script_name}" "\ +err_msg="\ The external model specified in EXTRN_MDL_NAME_ICS that provides initial conditions (ICs) and surface fields to the FV3SAR is not supported: - EXTRN_MDL_NAME_ICS = \"$EXTRN_MDL_NAME_ICS\" -EXTRN_MDL_NAME_ICS must be one of the following: - $valid_vals_EXTRN_MDL_NAME_ICS_str -"; } + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" +check_var_valid_value \ + "EXTRN_MDL_NAME_ICS" "valid_vals_EXTRN_MDL_NAME_ICS" "${err_msg}" # #----------------------------------------------------------------------- # @@ -1039,15 +961,12 @@ EXTRN_MDL_NAME_ICS must be one of the following: # #----------------------------------------------------------------------- # -iselementof "$EXTRN_MDL_NAME_LBCS" valid_vals_EXTRN_MDL_NAME_LBCS || { \ -valid_vals_EXTRN_MDL_NAME_LBCS_str=$(printf "\"%s\" " "${valid_vals_EXTRN_MDL_NAME_LBCS[@]}"); -print_err_msg_exit "${script_name}" "\ -The external model specified in EXTRN_MDL_NAME_LBCS that provides later- -al boundary conditions (LBCs) to the FV3SAR is not supported: - EXTRN_MDL_NAME_LBCS = \"$EXTRN_MDL_NAME_LBCS\" -EXTRN_MDL_NAME_LBCS must be one of the following: - $valid_vals_EXTRN_MDL_NAME_LBCS_str -"; } +err_msg="\ +The external model specified in EXTRN_MDL_NAME_ICS that provides lateral +boundary conditions (LBCs) to the FV3SAR is not supported: + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" +check_var_valid_value \ + "EXTRN_MDL_NAME_LBCS" "valid_vals_EXTRN_MDL_NAME_LBCS" "${err_msg}" # #----------------------------------------------------------------------- # @@ -1055,14 +974,15 @@ EXTRN_MDL_NAME_LBCS must be one of the following: # #----------------------------------------------------------------------- # -iselementof "${FV3GFS_FILE_FMT}" valid_vals_FV3GFS_FILE_FMT || { \ -valid_vals_FV3GFS_FILE_FMT_str=$(printf "\"%s\" " "${valid_vals_FV3GFS_FILE_FMT[@]}"); -print_err_msg_exit "${script_name}" "\ -The data type specified in FV3GFS_FILE_FMT is not supported: - FV3GFS_FILE_FMT = \"${FV3GFS_FILE_FMT}\" -FV3GFS_FILE_FMT must be one of the following: - $valid_vals_FV3GFS_FILE_FMT_str -"; } +if [ "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ] || \ + [ "${EXTRN_MDL_NAME_LBCS}" = "FV3GFS" ]; then + err_msg="\ +The file format for FV3GFS external model files specified in FV3GFS_- +FILE_FMT is not supported: + FV3GFS_FILE_FMT = \"${FV3GFS_FILE_FMT}\"" + check_var_valid_value \ + "FV3GFS_FILE_FMT" "valid_vals_FV3GFS_FILE_FMT" "${err_msg}" +fi # #----------------------------------------------------------------------- # @@ -1165,14 +1085,12 @@ nh4_T7=$(( $nh3_T7 + 1 )) # #----------------------------------------------------------------------- # -iselementof "${GRID_GEN_METHOD}" valid_vals_GRID_GEN_METHOD || { \ -valid_vals_GRID_GEN_METHOD_str=$(printf "\"%s\" " "${valid_vals_GRID_GEN_METHOD[@]}"); -print_err_msg_exit "${script_name}" "\ -The grid generation method specified in GRID_GEN_METHOD is not supported: - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -GRID_GEN_METHOD must be one of the following: - $valid_vals_GRID_GEN_METHOD_str -"; } +err_msg="\ +The horizontal grid generation method specified in GRID_GEN_METHOD is +not supported: + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" +check_var_valid_value \ + "GRID_GEN_METHOD" "valid_vals_GRID_GEN_METHOD" "${err_msg}" # #----------------------------------------------------------------------- # @@ -1204,14 +1122,7 @@ fi # #----------------------------------------------------------------------- # -iselementof "$QUILTING" valid_vals_QUILTING || { \ -valid_vals_QUILTING_str=$(printf "\"%s\" " "${valid_vals_QUILTING[@]}"); -print_err_msg_exit "${script_name}" "\ -Value specified in QUILTING is not supported: - QUILTING = \"$QUILTING\" -QUILTING must be set to one of the following: - $valid_vals_QUILTING_str -"; } +check_var_valid_value "QUILTING" "valid_vals_QUILTING" # # Set QUILTING to either "TRUE" or "FALSE" so we don't have to consider # other valid values later on. diff --git a/ush/source_funcs.sh b/ush/source_funcs.sh index 2771fa649..21cd34303 100644 --- a/ush/source_funcs.sh +++ b/ush/source_funcs.sh @@ -86,6 +86,15 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # +# Source the file containing the function that checks the validity of a +# variable's value (given a set of valid values). +# +#----------------------------------------------------------------------- +# +. ${FUNCS_DIR}/check_var_valid_value.sh +# +#----------------------------------------------------------------------- +# # Source the file containing the function that processes a list of argu- # ments to a script or function, where the list is comprised of a set of # argument name-value pairs, e.g. arg1="value1", ... From 3948734cedd46becd2a4ccca9ef2181960cf7526 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 18 Oct 2019 14:07:34 -0600 Subject: [PATCH 003/203] Add back accidentally deleted file. --- tests/run_one_expt.sh | 397 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 397 insertions(+) create mode 100755 tests/run_one_expt.sh diff --git a/tests/run_one_expt.sh b/tests/run_one_expt.sh new file mode 100755 index 000000000..0dbefd75e --- /dev/null +++ b/tests/run_one_expt.sh @@ -0,0 +1,397 @@ +#!/bin/bash -l + +# +#----------------------------------------------------------------------- +# +# Set directories. +# +#----------------------------------------------------------------------- +# +basedir="$(pwd)/../.." +USHDIR="$basedir/regional_workflow/ush" +# +#----------------------------------------------------------------------- +# +# Source bash utility functions. +# +#----------------------------------------------------------------------- +# +. $USHDIR/source_funcs.sh +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Set the script name and print out an informational message informing +# the user that we've entered this script. +# +#----------------------------------------------------------------------- +# +script_name=$( basename "${BASH_SOURCE[0]}" ) +# +#----------------------------------------------------------------------- +# +# Specify the set of valid argument names for this script/function. +# Then process the arguments provided to this script/function (which +# should consist of a set of name-value pairs of the form arg1="value1", +# etc). +# +#----------------------------------------------------------------------- +# +valid_args=( \ +"verbose" \ +"run_envir" \ +"predef_domain" \ +"grid_gen_method" \ +"use_CCPP" \ +"phys_suite" \ +"cdate" \ +"fcst_len_hrs" \ +"quilting" \ +) +process_args valid_args "$@" + +# If verbose is set to TRUE, print out what each valid argument has been +# set to. +if [ "$verbose" = "TRUE" ]; then + num_valid_args="${#valid_args[@]}" + print_info_msg "\n\ +The arguments to script/function \"${script_name}\" have been set as +follows: +" + for (( i=0; i<$num_valid_args; i++ )); do + line=$( declare -p "${valid_args[$i]}" ) + printf " $line\n" + done +fi +# +#----------------------------------------------------------------------- +# +# Set defaults for various +# +#----------------------------------------------------------------------- +# +run_envir=${run_envir:-"community"} +machine=${machine:-"hera"} +account=${account:-"gsd-fv3"} +queue_default=${queue_default:-"batch"} +queue_hpss=${queue_hpss:-"service"} +queue_fcst=${queue_hpss:-"batch"} + +basedir=${basedir:-"/path/to/your/workflow/base/directory"} +expt_basedir=${expt_basedir:-"$basedir/expt_dirs"} +expt_subdir=${expt_subdir:-""} + +predef_domain=${predef_domain:-"GSD_HRRR25km"} +grid_gen_method=${grid_gen_method:-"JPgrid"} +use_CCPP=${use_CCPP:-"TRUE"} +phys_suite=${phys_suite:-"GFS"} +quilting=${quilting:-"TRUE"} + +extrn_mdl_name_ics=${extrn_mdl_name_ics:-"FV3GFS"} +extrn_mdl_name_lbcs=${extrn_mdl_name_ics:-"FV3GFS"} + +run_task_make_grid="${run_task_make_grid:-"TRUE"}" +run_task_make_orog="${run_task_make_orog:-"TRUE"}" +run_task_make_sfc_climo="${run_task_make_sfc_climo:-"TRUE"}" + +date_first_cycl=${date_first_cycl:-""} +date_last_cycl=${date_last_cycl:-""} +cycl_hrs=${cycl_hrs:-""} +fcst_len_hrs=${fcst_len_hrs:-} +lbc_update_intvl_hrs=${lbc_update_intvl_hrs:-"3"} + +if [ -z "${basedir}" ]; then + print_err_msg_exit "${script_name}" " +A base directory must be specified. +" +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# +if [ 0 = 1 ]; then + +if [ "$#" -ne 7 ]; then + + printf "\ + +Script \"$0\": Incorrect number of arguments specified. +Usage: + + $0 predef_domain grid_gen_method use_CCPP phys_suite CDATE fcst_len_hrs quilting + +where the arguments are defined as follows: + + predef_domain: + The predefined domain to use. + + grid_gen_method: + The horizontal grid generation method to use. + + use_CCPP + Whether or not to run a CCPP-enabled verson of the FV3SAR. + + phys_suite + The physics suite to use. + + CDATE + The starting date (and hour) of the forecast. + + fsct_len_hrs + The length of the forecast, in hours. + + quilting + Whether or not to use the write-component to write output files. + +These are described in more detail in the documentation of the FV3SAR +workflow. + +Exiting script with nonzero exit code.\n" + + exit 1 + +fi +fi +# +#----------------------------------------------------------------------- +# +# Set forecast parameters. +# +#----------------------------------------------------------------------- +# +predef_domain=${1:-} +grid_gen_method=${2:-} +use_CCPP=${3:-} +phys_suite=${4:-} +CDATE=${5:-} +fcst_len_hrs=${6:-} +quilting="${7:-}" + +dot_quilting=".${quilting}." + +print_info_msg "\ +User-specified forecast parameters: + + predef_domain = \"${predef_domain}\" + grid_gen_method = \"${grid_gen_method}\" + use_CCPP = \"${use_CCPP}\" + phys_suite = \"${phys_suite}\" + CDATE = \"${CDATE}\" + fcst_len_hrs = \"${fcst_len_hrs}\" + quilting = \"${quilting}\"" +# +#----------------------------------------------------------------------- +# +# Construct new variables based on input arguments. +# +#----------------------------------------------------------------------- +# +CONFIG_FN="config.sh" +CONFIG_FP="${USHDIR}/${CONFIG_FN}" + +EXPT_NAME="${predef_domain}_${grid_gen_method}_CCPP${use_CCPP}_${phys_suite}phys_${CDATE}_FCST${fcst_len_hrs}hrs_QUILT$quilting" +#TEST_DATE=$( date "+%Y%m%d-%H_%M" ) +TEST_DATE=$( date "+%Y%m%d" ) +RUNDIR_BASE="$BASEDIR/run_dirs" +RUN_SUBDIR="test_date_${TEST_DATE}/$EXPT_NAME" +TMPDIR="$BASEDIR/work_dirs" + +print_info_msg "\ +Variables constructed from user-specified forecast parameters: + + BASEDIR = \"${BASEDIR}\" + USHDIR = \"${USHDIR}\" + CONFIG_FN = \"${CONFIG_FN}\" + CONFIG_FP = \"${CONFIG_FP}\" + + EXPT_NAME = \"${EXPT_NAME}\" + TEST_DATE = \"${TEST_DATE}\" + RUNDIR_BASE = \"${RUNDIR_BASE}\" + RUN_SUBDIR = \"${RUN_SUBDIR}\"" +# +#----------------------------------------------------------------------- +# +# The GSD physics suite cannot be run without CCPP. Check for this and +# issue an error message if found. +# +#----------------------------------------------------------------------- +# +if [ $use_CCPP = "false" ] && [ $phys_suite = "GSD" ]; then + + print_err_msg_exit "\ +The GSD physics suite cannot be run without CCPP: + use_CCPP = \"${use_CCPP}\" + phys_suite = \"${phys_suite}\" +Not generating a workflow for this set of experiment parameters." + +fi +# +#----------------------------------------------------------------------- +# +# Use a heredoc to construct the configuration file for the forecast. +# Note that whatever is not specified in this file is obtained from +# config_defaults.sh. +# +#----------------------------------------------------------------------- +# +{ cat << EOM > $CONFIG_FP +# +#----------------------------------------------------------------------- +# +# This is the local workflow configuration file. It is not tracked by +# the git repository. +# +#----------------------------------------------------------------------- +# +RUN_ENVIR="${run_envir}" +MACHINE="${machine}" +ACCOUNT="${account}" +QUEUE_DEFAULT="${queue_default}" +QUEUE_HPSS="${queue_hpss}" +QUEUE_FCST="${queue_fcst}" +# +BASEDIR="${basedir}" +EXPT_BASEDIR="${expt_basedir}" +EXPT_SUBDIR="${expt_subdir}" +# +DATE_FIRST_CYCL="${date_first_cycl}" +DATE_LAST_CYCL="${date_last_cycl}" +CYCL_HRS="${cycl_hrs}" +FCST_LEN_HRS="${fcst_len_hrs}" +LBC_UPDATE_INTVL_HRS="${lbc_update_intvl_hrs}" + +RUNDIR_BASE="$RUNDIR_BASE" +RUN_SUBDIR="$RUN_SUBDIR" +# +CDATE="$CDATE" +fcst_len_hrs="$fcst_len_hrs" +BC_update_intvl_hrs="6" +# +run_title="" +# +predef_domain="$predef_domain" +# +grid_gen_method="$grid_gen_method" +# +preexisting_dir_method="delete" +quilting="$dot_quilting" +# +use_CCPP="$use_CCPP" +phys_suite="$phys_suite" +EOM +} +# +#----------------------------------------------------------------------- +# +# Generate workflow XML for the specified experiment configuration and +# save the output in a log file for debugging. Then move the log file +# to the run directory. +# +#----------------------------------------------------------------------- +# +cd_vrfy $USHDIR +LOG_GEN_WFLOW_FP="$USHDIR/log.generate_FV3SAR_wflow" +./generate_FV3SAR_wflow.sh > "$LOG_GEN_WFLOW_FP" 2>&1 +if [ "${PIPESTATUS[0]}" -ne 0 ]; then + print_err_msg_exit "\ +Workflow generation script returned with a nonzero exit status. +Check the log file located at: + LOG_GEN_WFLOW_FP = \"$LOG_GEN_WFLOW_FP\"" +fi + +RUNDIR="$RUNDIR_BASE/$RUN_SUBDIR" +mv_vrfy log.generate_FV3SAR_wflow $RUNDIR +# +#----------------------------------------------------------------------- +# +# Create a script in the run directory that can be used to (re)launch +# the workflow and report on its status. This script saves its output +# to a log file (in the run directory) for debugging purposes and to al- +# low the user to check on the status of the workflow. +# +#----------------------------------------------------------------------- +# +cd_vrfy $RUNDIR + +XML_BASENAME="FV3SAR_wflow" +RELAUNCH_SCR="relaunch_wflow.sh" + +{ cat << EOM > ${RELAUNCH_SCR} +#!/bin/sh -l + +module load rocoto/1.3.0 +cd $RUNDIR +{ +rocotorun -w ${XML_BASENAME}.xml -d ${XML_BASENAME}.db -v 10 ; +echo ; +rocotostat -w ${XML_BASENAME}.xml -d ${XML_BASENAME}.db -v 10 ; +} >> log.rocotostat 2>&1 +EOM +} +# +# Make the relaunch script executable. +# +chmod u+x $RELAUNCH_SCR +# +#----------------------------------------------------------------------- +# +# Add a line to the user's cron table to call the (re)launch script at +# some frequency (e.g. every 5 minutes). +# +#----------------------------------------------------------------------- +# +CRONTAB_ORIG="$(pwd)/crontab.orig" +print_info_msg "\ +Copying contents of user cron table to backup file: + CRONTAB_ORIG = \"$CRONTAB_ORIG\"" +crontab -l > $CRONTAB_ORIG + +crontab_line="*/5 * * * * cd $RUNDIR && ./$RELAUNCH_SCR" +# +# Below, we use "grep" to determine whether the above crontab line is +# already present in the cron table. For that purpose, we need to es- +# cape the asterisks in the crontab line with backslashes. Do this +# next. +# +crontab_line_esc_astr=$( echo "$crontab_line" | sed -r -e "s![*]!\\\\*!g" ) +grep_output=$( crontab -l | grep "$crontab_line_esc_astr" ) +exit_status=$? + +if [ "$exit_status" -eq 0 ]; then + + print_info_msg "\ +The following line already exists in the cron table and thus will not be +added: + crontab_line = \"$crontab_line\"" + +else + + print_info_msg "\ +Adding the following line to the cron table in order to automatically +resubmit FV3SAR workflow: + crontab_line = \"$crontab_line\"" + + (crontab -l 2>/dev/null; echo "$crontab_line") | crontab - + +fi + +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# +{ restore_shell_opts; } > /dev/null 2>&1 + From fe35ea81d22c046691a67c1a0f33f1a3a6c37d72 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 21 Oct 2019 12:04:53 -0600 Subject: [PATCH 004/203] Change permissions on namelist template file to make it non-executable. --- ush/templates/input_ccpp_gfsextern_gfsphys.nml | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 ush/templates/input_ccpp_gfsextern_gfsphys.nml diff --git a/ush/templates/input_ccpp_gfsextern_gfsphys.nml b/ush/templates/input_ccpp_gfsextern_gfsphys.nml old mode 100755 new mode 100644 From 0a035e0a9f10a39a3ebd5d99f8b091227b4f3fe5 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 22 Oct 2019 12:28:14 -0600 Subject: [PATCH 005/203] Update variable names to all caps, etc. --- tests/param_arrays.sample.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/param_arrays.sample.sh b/tests/param_arrays.sample.sh index e1d43b347..ca1f52f37 100644 --- a/tests/param_arrays.sample.sh +++ b/tests/param_arrays.sample.sh @@ -1,7 +1,7 @@ -all_vals_predef_domain=( "HRRR" ) -all_vals_grid_gen_method=( "JPgrid" ) -all_vals_CCPP=( "true" ) -all_vals_phys_suite=( "GSD" ) +all_vals_PREDEF_GRID_NAME=( "GSD_HRRR25km" ) +all_vals_GRID_GEN_METHOD=( "JPgrid" ) +all_vals_USE_CCPP=( "true" ) +all_vals_CCPP_PHYS_SUITE=( "GSD" ) all_vals_CDATE=( "2017090700" ) -all_vals_fcst_len_hrs=( "6" ) -all_vals_quilting=( "true" ) +all_vals_FCST_LEN_HRS=( "6" ) +all_vals_QUILTING=( "true" ) From 40cc506a296c58deef6f02754799c1d415d2e58c Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 22 Oct 2019 12:31:33 -0600 Subject: [PATCH 006/203] Rename or remove namelist template files. --- ...ys.nml => input_ccpp_GFSphys_GFSextrn.nml} | 0 ...ern_gsdphys.nml => input_ccpp_GSDphys.nml} | 2 +- .../input_ccpp_raphrrrextern_gsdphys.nml | 255 ------------------ 3 files changed, 1 insertion(+), 256 deletions(-) rename ush/templates/{input_ccpp_gfsextern_gfsphys.nml => input_ccpp_GFSphys_GFSextrn.nml} (100%) rename ush/templates/{input_ccpp_gfsextern_gsdphys.nml => input_ccpp_GSDphys.nml} (99%) delete mode 100644 ush/templates/input_ccpp_raphrrrextern_gsdphys.nml diff --git a/ush/templates/input_ccpp_gfsextern_gfsphys.nml b/ush/templates/input_ccpp_GFSphys_GFSextrn.nml similarity index 100% rename from ush/templates/input_ccpp_gfsextern_gfsphys.nml rename to ush/templates/input_ccpp_GFSphys_GFSextrn.nml diff --git a/ush/templates/input_ccpp_gfsextern_gsdphys.nml b/ush/templates/input_ccpp_GSDphys.nml similarity index 99% rename from ush/templates/input_ccpp_gfsextern_gsdphys.nml rename to ush/templates/input_ccpp_GSDphys.nml index 1a55e0736..404cd2efe 100644 --- a/ush/templates/input_ccpp_gfsextern_gsdphys.nml +++ b/ush/templates/input_ccpp_GSDphys.nml @@ -195,7 +195,7 @@ do_skeb = .F. do_sfcperts = .F. lsm = 3 - lsoil = 4 + lsoil = lsoil_lsm = 9 icloud_bl = 1 bl_mynn_tkeadvect = .true. diff --git a/ush/templates/input_ccpp_raphrrrextern_gsdphys.nml b/ush/templates/input_ccpp_raphrrrextern_gsdphys.nml deleted file mode 100644 index 6e5794acd..000000000 --- a/ush/templates/input_ccpp_raphrrrextern_gsdphys.nml +++ /dev/null @@ -1,255 +0,0 @@ - &amip_interp_nml - interp_oi_sst = .true. - use_ncep_sst = .true. - use_ncep_ice = .false. - no_anom_sst = .false. - data_set = 'reynolds_oi', - date_out_of_range = 'climo', -/ - - &atmos_model_nml - blocksize = - chksum_debug = .false. - dycore_only = .false. - fdiag = 1 - ccpp_suite = 'FV3_GSD_v0' -/ - -&diag_manager_nml - prepend_date = .F. -/ - - &fms_io_nml - checksum_required = .false. - max_files_r = 100, - max_files_w = 100, -/ - - &fms_nml - clock_grain = 'ROUTINE', - domains_stack_size = 3000000, - print_memory_usage = .false. -/ - - &fv_grid_nml - grid_file = 'INPUT/grid_spec.nc' -/ - - &fv_core_nml - layout = , - io_layout = 1,1 - npx = - npy = - ntiles = 1, - npz = 64 - !grid_type = -1 - make_nh = .T. - fv_debug = .T. - range_warn = .T. - reset_eta = .F. - n_sponge = 24 - nudge_qv = .F. - tau = 5. - rf_cutoff = 20.e2 - d2_bg_k1 = 0.20 - d2_bg_k2 = 0.04 - kord_tm = -11 - kord_mt = 11 - kord_wz = 11 - kord_tr = 11 - hydrostatic = .F. - phys_hydrostatic = .F. - use_hydro_pressure = .F. - beta = 0. - a_imp = 1. - p_fac = 0.1 - k_split = 4 - n_split = 5 - nwat = 6 - na_init = 1 - d_ext = 0.0 - dnats = 0 - fv_sg_adj = 300 - d2_bg = 0. - nord = 2 - dddmp = 0.1 - d4_bg = 0.12 - vtdm4 = 0.02 - ke_bg = 0. - do_vort_damp = .true. - external_ic = .T. - external_eta = .T. - gfs_phil = .false. - nggps_ic = .T. - mountain = .F. - ncep_ic = .F. - d_con = 1.0 - delt_max = 0.002 - hord_mt = 6 - hord_vt = 6 - hord_tm = 6 - hord_dp = -6 - hord_tr = 8 - adjust_dry_mass = .F. - consv_te = 0. - do_sat_adj = .F. - consv_am = .F. - fill = .T. - dwind_2d = .F. - print_freq = 6 - warm_start = .F. - no_dycore = .false. - z_tracer = .T. - read_increment = .F. - res_latlon_dynamics = "fv3_increment.nc" - - do_schmidt = .true. - target_lat = - target_lon = - stretch_fac = -!! nord_zs_filter = 4 - n_zs_filter = 0 - regional = .true. - bc_update_interval = - - full_zs_filter = .F. !unreleased feature - - nord_zs_filter = 4 - n_zs_filter = 0 ! safety -/ - -&surf_map_nml - zero_ocean = .F. - cd4 = 0.12 - cd2 = -1 - n_del2_strong = 0 - n_del2_weak = 2 - n_del4 = 1 - max_slope = 0.4 - peak_fac = 1. -/ - - &external_ic_nml - filtered_terrain = .true. - levp = 65 - gfs_dwinds = .true. - checker_tr = .F. - nt_checker = 0 -/ - - &gfs_physics_nml - fhzero = 1. - h2o_phys = .true. - ldiag3d = .false. - fhcyc = 0. - nst_anl = .true. - use_ufo = .true. - pre_rad = .false. - ncld = 5 - imp_physics = 8 - ttendlim = 0.005 - !ttendlim = 0.008 - ltaerosol = .T. - lradar = .T. - pdfcld = .false. - fhswr = 3600. - fhlwr = 3600. - ialb = 1 - iems = 1 - iaer = 111 - ico2 = 2 - isubc_sw = 2 - isubc_lw = 2 - isol = 2 - lwhtr = .true. - swhtr = .true. - cnvgwd = .true. - shal_cnv = .true. - cal_pre = .false. - redrag = .true. - dspheat = .true. - hybedmf = .false. - satmedmf = .false. - lheatstrg = .F. - do_mynnedmf = .true. - do_mynnsfclay = .false. - random_clds = .false. - trans_trac = .true. - cnvcld = .true. - imfshalcnv = 3 - imfdeepcnv = 3 - cdmbgwd = 3.5,0.25 - prslrd0 = 0. - ivegsrc = 1 - isot = 1 - debug = .false. - oz_phys = .false. - oz_phys_2015 = .true. - nstf_name = 2,1,1,0,5 - cplflx = .F. - iau_delthrs = 6 - iaufhrs = 30 - iau_inc_files = '' - do_sppt = .F. - do_shum = .F. - do_skeb = .F. - do_sfcperts = .F. - lsm = 3 - lsoil = 9 - lsoil_lsm = 9 - icloud_bl = 1 - bl_mynn_tkeadvect = .true. - bl_mynn_edmf = 1 - bl_mynn_edmf_mom = 1 -/ - - &interpolator_nml - interp_method = 'conserve_great_circle' -/ - -&namsfc - FNGLAC = "global_glacier.2x2.grb", - FNMXIC = "global_maxice.2x2.grb", - FNTSFC = "RTGSST.1982.2012.monthly.clim.grb", - FNSNOC = "global_snoclim.1.875.grb", - FNZORC = "igbp" -! FNZORC = "global_zorclim.1x1.grb", - FNALBC = "global_snowfree_albedo.bosu.t126.384.190.rg.grb", - FNALBC2 = "global_albedo4.1x1.grb", - FNAISC = "CFSR.SEAICE.1982.2012.monthly.clim.grb", - FNTG3C = "global_tg3clim.2.6x1.5.grb", - FNVEGC = "global_vegfrac.0.144.decpercent.grb", - FNVETC = "global_vegtype.igbp.t126.384.190.rg.grb", - FNSOTC = "global_soiltype.statsgo.t126.384.190.rg.grb", - FNSMCC = "global_soilmgldas.t126.384.190.grb", - FNMSKH = "seaice_newland.grb", - FNTSFA = "", - FNACNA = "", - FNSNOA = "", - FNVMNC = "global_shdmin.0.144x0.144.grb", - FNVMXC = "global_shdmax.0.144x0.144.grb", - FNSLPC = "global_slope.1x1.grb", - FNABSC = "global_mxsnoalb.uariz.t126.384.190.rg.grb", - LDEBUG =.false., - FSMCL(2) = 99999 - FSMCL(3) = 99999 - FSMCL(4) = 99999 - FTSFS = 90 - FAISS = 99999 - FSNOL = 99999 - FSICL = 99999 - FTSFL = 99999, - FAISL = 99999, - FVETL = 99999, - FSOTL = 99999, - FvmnL = 99999, - FvmxL = 99999, - FSLPL = 99999, - FABSL = 99999, - FSNOS = 99999, - FSICS = 99999, -/ -&nam_stochy -/ -&nam_sfcperts -/ From d019b4d706eb0889921094e0b487934559847727 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 22 Oct 2019 13:03:27 -0600 Subject: [PATCH 007/203] For clarity, place variables that contain at least one underscore in curly braces. --- scripts/exregional_run_fv3.sh | 80 +++++++++++++++++------------------ 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/scripts/exregional_run_fv3.sh b/scripts/exregional_run_fv3.sh index 6cdb62e1d..02e971142 100755 --- a/scripts/exregional_run_fv3.sh +++ b/scripts/exregional_run_fv3.sh @@ -98,7 +98,7 @@ case $MACHINE in ulimit -s unlimited ulimit -a - APRUN="mpirun -l -np $PE_MEMBER01" + APRUN="mpirun -l -np ${PE_MEMBER01}" ;; # "THEIA") @@ -194,7 +194,7 @@ case $MACHINE in ulimit -s unlimited ulimit -a - APRUN="srun -n $PE_MEMBER01" + APRUN="srun -n ${PE_MEMBER01}" ;; # esac @@ -465,20 +465,20 @@ MODEL_CONFIG_FP="${CYCLE_DIR}/${MODEL_CONFIG_FN}" print_info_msg_verbose "\ Setting parameters in file: - MODEL_CONFIG_FP = \"$MODEL_CONFIG_FP\"" + MODEL_CONFIG_FP = \"${MODEL_CONFIG_FP}\"" dot_quilting_dot="."${QUILTING,,}"." -set_file_param "$MODEL_CONFIG_FP" "PE_MEMBER01" "$PE_MEMBER01" -set_file_param "$MODEL_CONFIG_FP" "dt_atmos" "$dt_atmos" -set_file_param "$MODEL_CONFIG_FP" "start_year" "$YYYY" -set_file_param "$MODEL_CONFIG_FP" "start_month" "$MM" -set_file_param "$MODEL_CONFIG_FP" "start_day" "$DD" -set_file_param "$MODEL_CONFIG_FP" "start_hour" "$HH" -set_file_param "$MODEL_CONFIG_FP" "nhours_fcst" "${FCST_LEN_HRS}" -set_file_param "$MODEL_CONFIG_FP" "ncores_per_node" "$ncores_per_node" -set_file_param "$MODEL_CONFIG_FP" "quilting" "${dot_quilting_dot}" -set_file_param "$MODEL_CONFIG_FP" "print_esmf" "$print_esmf" +set_file_param "${MODEL_CONFIG_FP}" "PE_MEMBER01" "${PE_MEMBER01}" +set_file_param "${MODEL_CONFIG_FP}" "dt_atmos" "${dt_atmos}" +set_file_param "${MODEL_CONFIG_FP}" "start_year" "$YYYY" +set_file_param "${MODEL_CONFIG_FP}" "start_month" "$MM" +set_file_param "${MODEL_CONFIG_FP}" "start_day" "$DD" +set_file_param "${MODEL_CONFIG_FP}" "start_hour" "$HH" +set_file_param "${MODEL_CONFIG_FP}" "nhours_fcst" "${FCST_LEN_HRS}" +set_file_param "${MODEL_CONFIG_FP}" "ncores_per_node" "${ncores_per_node}" +set_file_param "${MODEL_CONFIG_FP}" "quilting" "${dot_quilting_dot}" +set_file_param "${MODEL_CONFIG_FP}" "print_esmf" "$print_esmf" # #----------------------------------------------------------------------- # @@ -496,29 +496,29 @@ set_file_param "$MODEL_CONFIG_FP" "print_esmf" "$print_esmf" # if [ "$QUILTING" = "TRUE" ]; then - cat $WRTCMP_PARAMS_TEMPLATE_FP >> $MODEL_CONFIG_FP + cat ${WRTCMP_PARAMS_TEMPLATE_FP} >> ${MODEL_CONFIG_FP} - set_file_param "$MODEL_CONFIG_FP" "write_groups" "$WRTCMP_write_groups" - set_file_param "$MODEL_CONFIG_FP" "write_tasks_per_group" "$WRTCMP_write_tasks_per_group" + set_file_param "${MODEL_CONFIG_FP}" "write_groups" "$WRTCMP_write_groups" + set_file_param "${MODEL_CONFIG_FP}" "write_tasks_per_group" "$WRTCMP_write_tasks_per_group" - set_file_param "$MODEL_CONFIG_FP" "output_grid" "\'$WRTCMP_output_grid\'" - set_file_param "$MODEL_CONFIG_FP" "cen_lon" "$WRTCMP_cen_lon" - set_file_param "$MODEL_CONFIG_FP" "cen_lat" "$WRTCMP_cen_lat" - set_file_param "$MODEL_CONFIG_FP" "lon1" "$WRTCMP_lon_lwr_left" - set_file_param "$MODEL_CONFIG_FP" "lat1" "$WRTCMP_lat_lwr_left" + set_file_param "${MODEL_CONFIG_FP}" "output_grid" "\'$WRTCMP_output_grid\'" + set_file_param "${MODEL_CONFIG_FP}" "cen_lon" "$WRTCMP_cen_lon" + set_file_param "${MODEL_CONFIG_FP}" "cen_lat" "$WRTCMP_cen_lat" + set_file_param "${MODEL_CONFIG_FP}" "lon1" "$WRTCMP_lon_lwr_left" + set_file_param "${MODEL_CONFIG_FP}" "lat1" "$WRTCMP_lat_lwr_left" if [ "${WRTCMP_output_grid}" = "rotated_latlon" ]; then - set_file_param "$MODEL_CONFIG_FP" "lon2" "$WRTCMP_lon_upr_rght" - set_file_param "$MODEL_CONFIG_FP" "lat2" "$WRTCMP_lat_upr_rght" - set_file_param "$MODEL_CONFIG_FP" "dlon" "$WRTCMP_dlon" - set_file_param "$MODEL_CONFIG_FP" "dlat" "$WRTCMP_dlat" + set_file_param "${MODEL_CONFIG_FP}" "lon2" "$WRTCMP_lon_upr_rght" + set_file_param "${MODEL_CONFIG_FP}" "lat2" "$WRTCMP_lat_upr_rght" + set_file_param "${MODEL_CONFIG_FP}" "dlon" "$WRTCMP_dlon" + set_file_param "${MODEL_CONFIG_FP}" "dlat" "$WRTCMP_dlat" elif [ "${WRTCMP_output_grid}" = "lambert_conformal" ]; then - set_file_param "$MODEL_CONFIG_FP" "stdlat1" "$WRTCMP_stdlat1" - set_file_param "$MODEL_CONFIG_FP" "stdlat2" "$WRTCMP_stdlat2" - set_file_param "$MODEL_CONFIG_FP" "nx" "$WRTCMP_nx" - set_file_param "$MODEL_CONFIG_FP" "ny" "$WRTCMP_ny" - set_file_param "$MODEL_CONFIG_FP" "dx" "$WRTCMP_dx" - set_file_param "$MODEL_CONFIG_FP" "dy" "$WRTCMP_dy" + set_file_param "${MODEL_CONFIG_FP}" "stdlat1" "$WRTCMP_stdlat1" + set_file_param "${MODEL_CONFIG_FP}" "stdlat2" "$WRTCMP_stdlat2" + set_file_param "${MODEL_CONFIG_FP}" "nx" "$WRTCMP_nx" + set_file_param "${MODEL_CONFIG_FP}" "ny" "$WRTCMP_ny" + set_file_param "${MODEL_CONFIG_FP}" "dx" "$WRTCMP_dx" + set_file_param "${MODEL_CONFIG_FP}" "dy" "$WRTCMP_dy" fi fi @@ -534,14 +534,14 @@ DIAG_TABLE_FP="${CYCLE_DIR}/${DIAG_TABLE_FN}" print_info_msg_verbose "\ Setting parameters in file: - DIAG_TABLE_FP = \"$DIAG_TABLE_FP\"" + DIAG_TABLE_FP = \"${DIAG_TABLE_FP}\"" -set_file_param "$DIAG_TABLE_FP" "CRES" "$CRES" -set_file_param "$DIAG_TABLE_FP" "YYYY" "$YYYY" -set_file_param "$DIAG_TABLE_FP" "MM" "$MM" -set_file_param "$DIAG_TABLE_FP" "DD" "$DD" -set_file_param "$DIAG_TABLE_FP" "HH" "$HH" -set_file_param "$DIAG_TABLE_FP" "YYYYMMDD" "$YYYYMMDD" +set_file_param "${DIAG_TABLE_FP}" "CRES" "$CRES" +set_file_param "${DIAG_TABLE_FP}" "YYYY" "$YYYY" +set_file_param "${DIAG_TABLE_FP}" "MM" "$MM" +set_file_param "${DIAG_TABLE_FP}" "DD" "$DD" +set_file_param "${DIAG_TABLE_FP}" "HH" "$HH" +set_file_param "${DIAG_TABLE_FP}" "YYYYMMDD" "$YYYYMMDD" # #----------------------------------------------------------------------- # @@ -550,9 +550,9 @@ set_file_param "$DIAG_TABLE_FP" "YYYYMMDD" "$YYYYMMDD" #----------------------------------------------------------------------- # if [ "${USE_CCPP}" = "TRUE" ]; then - FV3SAR_EXEC="$NEMSfv3gfs_DIR/tests/fv3.exe" + FV3SAR_EXEC="${NEMSfv3gfs_DIR}/tests/fv3.exe" else - FV3SAR_EXEC="$NEMSfv3gfs_DIR/tests/fv3_32bit.exe" + FV3SAR_EXEC="${NEMSfv3gfs_DIR}/tests/fv3_32bit.exe" fi if [ -f $FV3SAR_EXEC ]; then From 3065fd18bcb0cab917f5bf43a7244fb189cccbe9 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 22 Oct 2019 13:19:33 -0600 Subject: [PATCH 008/203] Changes to fix bugs in which combinations of physcis suite and external model ICs/LBCs can be used. Details below. (1) For GSD physics, combine namelist template files for GSFGFS/FV3GFS ICs/LBCs vs. for RAPX/HRRRX ICs/LBCs into one template file with a placeholder for lsoil. Set this placeholder value to 4 or 9 according to the external models specified for ICs/LBCs. (2) Do not allow GFS physics to be used with external ICs/LBCs models other than GSMGFS and FV3GFS. (3) For clarity, change variable names and names of FV3 namelist template files. (4) For clarity, place shell variables containing one or more underscores in curly braces. --- Externals.cfg | 1 + ush/config_defaults.sh | 22 +++--- ush/generate_FV3SAR_wflow.sh | 148 ++++++++++++++++++++--------------- ush/set_file_param.sh | 16 ++-- ush/setup.sh | 48 +++++++----- 5 files changed, 132 insertions(+), 103 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index c999e3591..512878ef1 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -10,6 +10,7 @@ branch = feature/chgres_grib2 protocol = git repo_url = https://github.com/NCAR/UFS_UTILS local_path = sorc/UFS_UTILS_chgres_grib2 +hash = cd0af74 required = True [NEMSfv3gfs] diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 11e9120ac..b7be3fb0b 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -99,17 +99,14 @@ PTMP="/path/to/temporary/directory/ptmp" # FV3_NML_FN: # Name of file containing the FV3SAR namelist settings. # -# FV3_NML_CCPP_GFSEXTERN_GFSPHYS_FN: -# Name of file containing the FV3SAR namelist settings for a CCPP- -# enabled forecast that uses GFS external model data and GFS physics. +# FV3_NML_CCPP_GFSPHYS_GFSEXTRN_FN: +# Name of file containing the FV3SAR namelist settings for a CCPP-ena- +# bled forecast that uses GFS external model data for the initial condi- +# tions (ICs) and GFS physics. # -# FV3_NML_CCPP_GFSEXTERN_GSDPHYS_FN: -# Name of file containing the FV3SAR namelist settings for a CCPP- -# enabled forecast that uses GFS external model data and GSD physics. -# -# FV3_NML_CCPP_RAPHRRREXTERN_GSDPHYS_FN: -# Name of file containing the FV3SAR namelist settings for a CCPP- -# enabled forecast that uses RAP or HRRR external model data and GSD physics. +# FV3_NML_CCPP_GSDPHYS_FN: +# Name of file containing the FV3SAR namelist settings for a CCPP-ena- +# bled forecast that uses GSD physics. # # DIAG_TABLE_FN: # Name of file that specifies the fields that the FV3SAR will output. @@ -170,9 +167,8 @@ PTMP="/path/to/temporary/directory/ptmp" # RGNL_GRID_NML_FN="regional_grid.nml" FV3_NML_FN="input.nml" -FV3_NML_CCPP_GFSEXTERN_GFSPHYS_FN="input_ccpp_gfsextern_gfsphys.nml" -FV3_NML_CCPP_GFSEXTERN_GSDPHYS_FN="input_ccpp_gfsextern_gsdphys.nml" -FV3_NML_CCPP_RAPHRRREXTERN_GSDPHYS_FN="input_ccpp_raphrrrextern_gsdphys.nml" +FV3_NML_CCPP_GFSPHYS_GFSEXTRN_FN="input_ccpp_GFSphys_GFSextrn.nml" +FV3_NML_CCPP_GSDPHYS_FN="input_ccpp_GSDphys.nml" DIAG_TABLE_FN="diag_table" DIAG_TABLE_CCPP_GSD_FN="diag_table_ccpp_gsd" FIELD_TABLE_FN="field_table" diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 53682600b..e99120bc7 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -23,7 +23,7 @@ ushdir=$(pwd) # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u +x; } > /dev/null 2>&1 +{ save_shell_opts; set -u -x; } > /dev/null 2>&1 script_name=$( basename "${BASH_SOURCE[0]}" ) @@ -49,8 +49,8 @@ script_name=$( basename "${BASH_SOURCE[0]}" ) # #----------------------------------------------------------------------- # -TEMPLATE_XML_FP="$TEMPLATE_DIR/$WFLOW_XML_FN" -WFLOW_XML_FP="$EXPTDIR/$WFLOW_XML_FN" +TEMPLATE_XML_FP="${TEMPLATE_DIR}/${WFLOW_XML_FN}" +WFLOW_XML_FP="$EXPTDIR/${WFLOW_XML_FN}" # #----------------------------------------------------------------------- # @@ -58,7 +58,7 @@ WFLOW_XML_FP="$EXPTDIR/$WFLOW_XML_FN" # #----------------------------------------------------------------------- # -cp_vrfy $TEMPLATE_XML_FP $WFLOW_XML_FP +cp_vrfy ${TEMPLATE_XML_FP} ${WFLOW_XML_FP} # #----------------------------------------------------------------------- # @@ -94,32 +94,32 @@ else CYCLE_DIR="$EXPTDIR/${CDATE_generic}" fi -set_file_param "$WFLOW_XML_FP" "SCRIPT_VAR_DEFNS_FP" "$SCRIPT_VAR_DEFNS_FP" -set_file_param "$WFLOW_XML_FP" "CYCLE_DIR" "${CYCLE_DIR}" -set_file_param "$WFLOW_XML_FP" "ACCOUNT" "$ACCOUNT" -set_file_param "$WFLOW_XML_FP" "SCHED" "$SCHED" -set_file_param "$WFLOW_XML_FP" "QUEUE_DEFAULT" "$QUEUE_DEFAULT" -set_file_param "$WFLOW_XML_FP" "QUEUE_HPSS" "$QUEUE_HPSS" -set_file_param "$WFLOW_XML_FP" "QUEUE_FCST" "$QUEUE_FCST" -set_file_param "$WFLOW_XML_FP" "USHDIR" "$USHDIR" -set_file_param "$WFLOW_XML_FP" "JOBSDIR" "$JOBSDIR" -set_file_param "$WFLOW_XML_FP" "EXPTDIR" "$EXPTDIR" -set_file_param "$WFLOW_XML_FP" "LOGDIR" "$LOGDIR" -set_file_param "$WFLOW_XML_FP" "EXTRN_MDL_NAME_ICS" "$EXTRN_MDL_NAME_ICS" -set_file_param "$WFLOW_XML_FP" "EXTRN_MDL_NAME_LBCS" "$EXTRN_MDL_NAME_LBCS" -set_file_param "$WFLOW_XML_FP" "EXTRN_MDL_FILES_SYSBASEDIR_ICS" "$EXTRN_MDL_FILES_SYSBASEDIR_ICS" -set_file_param "$WFLOW_XML_FP" "EXTRN_MDL_FILES_SYSBASEDIR_LBCS" "$EXTRN_MDL_FILES_SYSBASEDIR_LBCS" -set_file_param "$WFLOW_XML_FP" "PROC_RUN_FV3" "$PROC_RUN_FV3" -set_file_param "$WFLOW_XML_FP" "DATE_FIRST_CYCL" "$DATE_FIRST_CYCL" -set_file_param "$WFLOW_XML_FP" "DATE_LAST_CYCL" "$DATE_LAST_CYCL" -set_file_param "$WFLOW_XML_FP" "YYYY_FIRST_CYCL" "$YYYY_FIRST_CYCL" -set_file_param "$WFLOW_XML_FP" "MM_FIRST_CYCL" "$MM_FIRST_CYCL" -set_file_param "$WFLOW_XML_FP" "DD_FIRST_CYCL" "$DD_FIRST_CYCL" -set_file_param "$WFLOW_XML_FP" "HH_FIRST_CYCL" "$HH_FIRST_CYCL" -set_file_param "$WFLOW_XML_FP" "FHR" "$FHR" -set_file_param "$WFLOW_XML_FP" "RUN_TASK_MAKE_GRID" "$RUN_TASK_MAKE_GRID" -set_file_param "$WFLOW_XML_FP" "RUN_TASK_MAKE_OROG" "$RUN_TASK_MAKE_OROG" -set_file_param "$WFLOW_XML_FP" "RUN_TASK_MAKE_SFC_CLIMO" "$RUN_TASK_MAKE_SFC_CLIMO" +set_file_param "${WFLOW_XML_FP}" "SCRIPT_VAR_DEFNS_FP" "$SCRIPT_VAR_DEFNS_FP" +set_file_param "${WFLOW_XML_FP}" "CYCLE_DIR" "${CYCLE_DIR}" +set_file_param "${WFLOW_XML_FP}" "ACCOUNT" "$ACCOUNT" +set_file_param "${WFLOW_XML_FP}" "SCHED" "$SCHED" +set_file_param "${WFLOW_XML_FP}" "QUEUE_DEFAULT" "$QUEUE_DEFAULT" +set_file_param "${WFLOW_XML_FP}" "QUEUE_HPSS" "$QUEUE_HPSS" +set_file_param "${WFLOW_XML_FP}" "QUEUE_FCST" "$QUEUE_FCST" +set_file_param "${WFLOW_XML_FP}" "USHDIR" "$USHDIR" +set_file_param "${WFLOW_XML_FP}" "JOBSDIR" "$JOBSDIR" +set_file_param "${WFLOW_XML_FP}" "EXPTDIR" "$EXPTDIR" +set_file_param "${WFLOW_XML_FP}" "LOGDIR" "$LOGDIR" +set_file_param "${WFLOW_XML_FP}" "EXTRN_MDL_NAME_ICS" "$EXTRN_MDL_NAME_ICS" +set_file_param "${WFLOW_XML_FP}" "EXTRN_MDL_NAME_LBCS" "$EXTRN_MDL_NAME_LBCS" +set_file_param "${WFLOW_XML_FP}" "EXTRN_MDL_FILES_SYSBASEDIR_ICS" "$EXTRN_MDL_FILES_SYSBASEDIR_ICS" +set_file_param "${WFLOW_XML_FP}" "EXTRN_MDL_FILES_SYSBASEDIR_LBCS" "$EXTRN_MDL_FILES_SYSBASEDIR_LBCS" +set_file_param "${WFLOW_XML_FP}" "PROC_RUN_FV3" "$PROC_RUN_FV3" +set_file_param "${WFLOW_XML_FP}" "DATE_FIRST_CYCL" "$DATE_FIRST_CYCL" +set_file_param "${WFLOW_XML_FP}" "DATE_LAST_CYCL" "$DATE_LAST_CYCL" +set_file_param "${WFLOW_XML_FP}" "YYYY_FIRST_CYCL" "$YYYY_FIRST_CYCL" +set_file_param "${WFLOW_XML_FP}" "MM_FIRST_CYCL" "$MM_FIRST_CYCL" +set_file_param "${WFLOW_XML_FP}" "DD_FIRST_CYCL" "$DD_FIRST_CYCL" +set_file_param "${WFLOW_XML_FP}" "HH_FIRST_CYCL" "$HH_FIRST_CYCL" +set_file_param "${WFLOW_XML_FP}" "FHR" "$FHR" +set_file_param "${WFLOW_XML_FP}" "RUN_TASK_MAKE_GRID" "$RUN_TASK_MAKE_GRID" +set_file_param "${WFLOW_XML_FP}" "RUN_TASK_MAKE_OROG" "$RUN_TASK_MAKE_OROG" +set_file_param "${WFLOW_XML_FP}" "RUN_TASK_MAKE_SFC_CLIMO" "$RUN_TASK_MAKE_SFC_CLIMO" # #----------------------------------------------------------------------- # @@ -144,7 +144,7 @@ regex_search="(^\s*)(\&DATE_FIRST_CYCL;)(CC00)( i=0 for cycl in "${CYCL_HRS[@]}"; do regex_replace="\1${cycl}\3\4${cycl}00 \7${cycl}00\9" - crnt_line=$( sed -n -r -e "s%$regex_search%$regex_replace%p" "$WFLOW_XML_FP" ) + crnt_line=$( sed -n -r -e "s%$regex_search%$regex_replace%p" "${WFLOW_XML_FP}" ) if [ "$i" -eq "0" ]; then all_cycledefs="${crnt_line}" else @@ -169,7 +169,7 @@ all_cycledefs=${all_cycledefs//&/\\\&} # # Perform the subsutitution. # -sed -i -r -e "s|${regex_search}|${all_cycledefs}|g" "$WFLOW_XML_FP" +sed -i -r -e "s|${regex_search}|${all_cycledefs}|g" "${WFLOW_XML_FP}" # #----------------------------------------------------------------------- # @@ -487,14 +487,29 @@ the FV3SAR under NEMS to the experiment directory..." # if [ "${CCPP_PHYS_SUITE}" = "GFS" ]; then - print_info_msg_verbose " + if [ "${EXTRN_MDL_NAME_ICS}" = "GSMGFS" -o \ + "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ] && \ + [ "${EXTRN_MDL_NAME_LBCS}" = "GSMGFS" -o \ + "${EXTRN_MDL_NAME_LBCS}" = "FV3GFS" ]; then + + print_info_msg_verbose " Copying the FV3 namelist file for the GFS physics suite to the experi- ment directory..." -# cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_CCPP_GFS_FN} \ -# $EXPTDIR/${FV3_NML_FN} - if [ "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ]; then - cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_CCPP_GFSEXTERN_GFSPHYS_FN} \ + cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_CCPP_GFSPHYS_GFSEXTRN_FN} \ $EXPTDIR/${FV3_NML_FN} + + else + + print_err_msg_exit " +A template FV3 namelist file is not available for the following combina- +tion of physics suite and external models for ICs and LBCs: + CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" +Please change one or more of these parameters or provide a template +namelist file for this combination (and change workflow generation +script(s) accordingly) and rerun." + fi print_info_msg_verbose " @@ -520,16 +535,8 @@ directory..." print_info_msg_verbose " Copying the FV3 namelist file for the GSD physics suite to the experi- ment directory..." -# cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_CCPP_GSD_FN} \ -# $EXPTDIR/${FV3_NML_FN} - - if [ "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ]; then - cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_CCPP_GFSEXTERN_GSDPHYS_FN} \ - $EXPTDIR/${FV3_NML_FN} - elif [ "${EXTRN_MDL_NAME_ICS}" = "HRRRX" ]; then - cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_CCPP_RAPHRRREXTERN_GSDPHYS_FN} \ - $EXPTDIR/${FV3_NML_FN} - fi + cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_CCPP_GSDPHYS_FN} \ + $EXPTDIR/${FV3_NML_FN} print_info_msg_verbose " Copying the field table file for the GSD physics suite to the experiment @@ -549,24 +556,6 @@ GSD suite) to the experiment directory..." cp_vrfy $FIXgsd/CCN_ACTIVATE.BIN $EXPTDIR fi - -# Original changes in stage_static.sh by Jeff: -# if [ "${CCPP_PHYS_SUITE}" = "GFS" ] && [ "$EXTRN_MDL_NAME_ICS" = "FV3GFS" ]; then -# -# cp_vrfy $TEMPLATE_DIR/$FV3_NML_CCPP_GFSEXTERN_GFSPHYS_FN $EXPTDIR/$FV3_NML_FN -# cp_vrfy $TEMPLATE_DIR/$FIELD_TABLE_FN $EXPTDIR -# -# elif [ "${CCPP_PHYS_SUITE}" = "GSD" ] && [ "$EXTRN_MDL_NAME_ICS" = "FV3GFS" ]; then -# -# cp_vrfy $TEMPLATE_DIR/$FV3_NML_CCPP_GFSEXTERN_GSDPHYS_FN $EXPTDIR/$FV3_NML_FN -# cp_vrfy $TEMPLATE_DIR/$FIELD_TABLE_CCPP_GSD_FN $EXPTDIR/$FIELD_TABLE_FN -# -# elif [ "${CCPP_PHYS_SUITE}" = "GSD" ] && [ "$EXTRN_MDL_NAME_ICS" = "HRRRX" ]; then -# -# cp_vrfy $TEMPLATE_DIR/$FV3_NML_CCPP_RAPHRRREXTERN_GSDPHYS_FN $EXPTDIR/$FV3_NML_FN -# cp_vrfy $TEMPLATE_DIR/$FIELD_TABLE_CCPP_GSD_FN $EXPTDIR/$FIELD_TABLE_FN -# -# fi # #----------------------------------------------------------------------- # @@ -629,6 +618,35 @@ fi set_file_param "${FV3_NML_FP}" "stretch_fac" "${stretch_fac}" set_file_param "${FV3_NML_FP}" "bc_update_interval" "${LBC_UPDATE_INTVL_HRS}" # +# For GSD physics, set the parameter lsoil according to the external mo- +# dels specified for ICs and LBCs. +# +if [ "${CCPP_PHYS_SUITE}" = "GSD" ]; then + + if [ "${EXTRN_MDL_NAME_ICS}" = "GSMGFS" -o \ + "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ] && \ + [ "${EXTRN_MDL_NAME_LBCS}" = "GSMGFS" -o \ + "${EXTRN_MDL_NAME_LBCS}" = "FV3GFS" ]; then + set_file_param "${FV3_NML_FP}" "lsoil" "4" + elif [ "${EXTRN_MDL_NAME_ICS}" = "RAPX" -o \ + "${EXTRN_MDL_NAME_ICS}" = "HRRRX" ] && \ + [ "${EXTRN_MDL_NAME_LBCS}" = "RAPX" -o \ + "${EXTRN_MDL_NAME_LBCS}" = "HRRRX" ]; then + set_file_param "${FV3_NML_FP}" "lsoil" "9" + else + print_err_msg_exit " +The value to set the variable lsoil to in the FV3 namelist file (FV3_- +NML_FP) has not been specified for the following combination of physics +suite and external models for ICs and LBCs: + CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" +Please change one or more of these parameters or provide a value for +lsoil (and change workflow generation script(s) accordingly) and rerun." + fi + +fi +# #----------------------------------------------------------------------- # # Restore the shell options saved at the beginning of this script/func- diff --git a/ush/set_file_param.sh b/ush/set_file_param.sh index 2ad365b32..bee0476fa 100644 --- a/ush/set_file_param.sh +++ b/ush/set_file_param.sh @@ -86,32 +86,36 @@ Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." case $file in # - "$WFLOW_XML_FN") + "${WFLOW_XML_FN}") regex_search="(^\s*.*)" regex_replace="\1$value\3" ;; # - "$RGNL_GRID_NML_FN") + "${RGNL_GRID_NML_FN}") regex_search="^(\s*$param\s*=)(.*)" regex_replace="\1 $value" ;; # - "$FV3_NML_FN" | "$FV3_NML_CCPP_GFSEXTERN_GFSPHYS_FN" | "$FV3_NML_CCPP_GFSEXTERN_GSDPHYS_FN" | "$FV3_NML_CCPP_RAPHRRREXTERN_GSDPHYS_FN") + "${FV3_NML_FN}" | \ + "${FV3_NML_CCPP_GFSPHYS_GFSEXTRN_FN}" | \ + "${FV3_NML_CCPP_GSDPHYS_FN}") regex_search="^(\s*$param\s*=)(.*)" regex_replace="\1 $value" ;; # - "$DIAG_TABLE_FN" | "$DIAG_TABLE_CCPP_GSD_FN" | "$DIAG_TABLE_CCPP_GSD_FN") + "${DIAG_TABLE_FN}" | \ + "${DIAG_TABLE_CCPP_GSD_FN}" | \ + "${DIAG_TABLE_CCPP_GSD_FN}") regex_search="(.*)(<$param>)(.*)" regex_replace="\1$value\3" ;; # - "$MODEL_CONFIG_FN") + "${MODEL_CONFIG_FN}") regex_search="^(\s*$param:\s*)(.*)" regex_replace="\1$value" ;; # - "$SCRIPT_VAR_DEFNS_FN") + "${SCRIPT_VAR_DEFNS_FN}") regex_search="(^\s*$param=)(\".*\"|[^ \"]*)(\s*[#].*)?$" # Whole line with regex_replace=\1. # regex_search="(^\s*$param=)(\".*\"|[^ \"]*)(\s*[#].*)?" regex_search="(^\s*$param=)(\".*\")?([^ \"]*)?(\(.*\))?(\s*[#].*)?" diff --git a/ush/setup.sh b/ush/setup.sh index c9815d019..1b2378f50 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -375,16 +375,27 @@ fi # #----------------------------------------------------------------------- # -# Do not allow the option of running with RAP or HRRR external model -# data and GFS physics. This option is currently untested. +# For GFS physics, only allow "GSMGFS" and "FV3GFS" as the external mo- +# dels for ICs and LBCs. # #----------------------------------------------------------------------- # -if [ "$EXTRN_MDL_NAME_ICS" = "HRRRX" -o "$EXTRN_MDL_NAME_LBCS" = "RAPX" ] && \ - [ "${CCPP_PHYS_SUITE}" = "GFS" ]; then - print_err_msg_exit "${script_name}" "\ -Using $EXTRN_MDL_NAME_ICS external model data and ${CCPP_PHYS_SUITE} physics through CCPP is -untested and not currently an option in the community SAR workflow." +if [ "${CCPP_PHYS_SUITE}" = "GFS" ]; then + + if [ "${EXTRN_MDL_NAME_ICS}" != "GSMGFS" -a \ + "${EXTRN_MDL_NAME_ICS}" != "FV3GFS" ] || \ + [ "${EXTRN_MDL_NAME_LBCS}" != "GSMGFS" -a \ + "${EXTRN_MDL_NAME_LBCS}" != "FV3GFS" ]; then + print_info_msg_verbose " +The following combination of physics suite and external models is not +allowed: + CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" +Currently, for GFS physics, the only external models that the workflow +allows for ICs and LBCs are \"GSMGFS\" and \"FV3GFS\"." + fi + fi # #----------------------------------------------------------------------- @@ -540,7 +551,6 @@ NEMSfv3gfs_DIR="$SORCDIR/NEMSfv3gfs" # # Make sure that the NEMSfv3gfs_DIR directory exists. # -if [ 0 = 1 ]; then if [ ! -d "${NEMSfv3gfs_DIR}" ]; then print_err_msg_exit "${script_name}" "\ The NEMSfv3gfs directory specified by NEMSfv3gfs_DIR that should contain @@ -549,7 +559,7 @@ the FV3 source code does not exist: Please clone the NEMSfv3gfs repository in this directory, build the FV3 executable, and then rerun the workflow." fi -fi + case $MACHINE in @@ -1146,13 +1156,13 @@ fi # PE_MEMBER01=$(( $layout_x*$layout_y )) if [ "$QUILTING" = "TRUE" ]; then - PE_MEMBER01=$(( $PE_MEMBER01 + ${WRTCMP_write_groups}*${WRTCMP_write_tasks_per_group} )) + PE_MEMBER01=$(( ${PE_MEMBER01} + ${WRTCMP_write_groups}*${WRTCMP_write_tasks_per_group} )) fi print_info_msg_verbose "\ The number of MPI tasks for the forecast (including those for the write component if it is being used) are: - PE_MEMBER01 = $PE_MEMBER01" + PE_MEMBER01 = ${PE_MEMBER01}" # #----------------------------------------------------------------------- # @@ -1224,20 +1234,20 @@ fi # if [ "$QUILTING" = "TRUE" ]; then - if [ -z "$WRTCMP_PARAMS_TEMPLATE_FN" ]; then + if [ -z "${WRTCMP_PARAMS_TEMPLATE_FN}" ]; then print_err_msg_exit "${script_name}" "\ The write-component template file name (WRTCMP_PARAMS_TEMPLATE_FN) must be set to a non-empty value when quilting (i.e. the write-component) is enabled: QUILTING = \"$QUILTING\" - WRTCMP_PARAMS_TEMPLATE_FN = \"$WRTCMP_PARAMS_TEMPLATE_FN\"" + WRTCMP_PARAMS_TEMPLATE_FN = \"${WRTCMP_PARAMS_TEMPLATE_FN}\"" fi - WRTCMP_PARAMS_TEMPLATE_FP="$TEMPLATE_DIR/$WRTCMP_PARAMS_TEMPLATE_FN" - if [ ! -f "$WRTCMP_PARAMS_TEMPLATE_FP" ]; then + WRTCMP_PARAMS_TEMPLATE_FP="${TEMPLATE_DIR}/${WRTCMP_PARAMS_TEMPLATE_FN}" + if [ ! -f "${WRTCMP_PARAMS_TEMPLATE_FP}" ]; then print_err_msg_exit "${script_name}" "\ The write-component template file does not exist or is not a file: - WRTCMP_PARAMS_TEMPLATE_FP = \"$WRTCMP_PARAMS_TEMPLATE_FP\"" + WRTCMP_PARAMS_TEMPLATE_FP = \"${WRTCMP_PARAMS_TEMPLATE_FP}\"" fi fi @@ -1290,7 +1300,7 @@ fi # #----------------------------------------------------------------------- # -NUM_NODES=$(( ($PE_MEMBER01 + $ncores_per_node - 1)/$ncores_per_node )) +NUM_NODES=$(( (${PE_MEMBER01} + ${ncores_per_node} - 1)/${ncores_per_node} )) # #----------------------------------------------------------------------- # @@ -1804,8 +1814,8 @@ LBC_UPDATE_FCST_HRS=(${LBC_UPDATE_FCST_HRS[@]}) # LBC_UPDATE_FCST_HRS is an arr # #----------------------------------------------------------------------- # -ncores_per_node="$ncores_per_node" -PE_MEMBER01="$PE_MEMBER01" +ncores_per_node="${ncores_per_node}" +PE_MEMBER01="${PE_MEMBER01}" EOM } || print_err_msg_exit "${script_name}" "\ Heredoc (cat) command to append new variable definitions to variable From ad7cbe8af3f97ceb9f7a5ff5302a640a69a96ba0 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 22 Oct 2019 14:36:34 -0600 Subject: [PATCH 009/203] Cosmetic changes (mostly, for clarity, place curly braces around variable names that contain at least one underscrore). --- jobs/JREGIONAL_GET_EXTRN_FILES | 2 +- jobs/JREGIONAL_MAKE_GRID | 2 +- jobs/JREGIONAL_MAKE_IC_LBC0 | 2 +- jobs/JREGIONAL_MAKE_LBC1_TO_LBCN | 2 +- jobs/JREGIONAL_MAKE_OROG | 2 +- jobs/JREGIONAL_MAKE_SFC_CLIMO | 2 +- jobs/JREGIONAL_RUN_FV3 | 2 +- jobs/JREGIONAL_RUN_POST | 2 +- scripts/exregional_get_extrn_files.sh | 114 +++++++++++++----------- scripts/exregional_make_grid.sh | 4 +- scripts/exregional_make_ic_lbc0.sh | 14 ++- scripts/exregional_make_lbc1_to_lbcn.sh | 15 +++- scripts/exregional_make_orog.sh | 4 +- scripts/exregional_make_sfc_climo.sh | 4 +- scripts/exregional_run_fv3.sh | 4 +- scripts/exregional_run_post.sh | 4 +- tests/run_one_expt.sh | 2 +- ush/generate_FV3SAR_wflow.sh | 2 +- ush/get_extrn_mdl_file_dir_info.sh | 2 +- ush/link_fix.sh | 11 +-- ush/set_predef_grid_params.sh | 2 +- ush/setup.sh | 10 +-- 22 files changed, 114 insertions(+), 94 deletions(-) diff --git a/jobs/JREGIONAL_GET_EXTRN_FILES b/jobs/JREGIONAL_GET_EXTRN_FILES index 3b934a92b..e32dd2ec1 100755 --- a/jobs/JREGIONAL_GET_EXTRN_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_FILES @@ -27,7 +27,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index a8f724c3f..f70c1ccd5 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -106,7 +106,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_IC_LBC0 b/jobs/JREGIONAL_MAKE_IC_LBC0 index dd9ef4eb6..8c1fa966b 100755 --- a/jobs/JREGIONAL_MAKE_IC_LBC0 +++ b/jobs/JREGIONAL_MAKE_IC_LBC0 @@ -8,7 +8,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN b/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN index 4e3a490f5..72bc97fd9 100755 --- a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN +++ b/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN @@ -8,7 +8,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_OROG b/jobs/JREGIONAL_MAKE_OROG index 67651ff8e..58ed5e7d2 100755 --- a/jobs/JREGIONAL_MAKE_OROG +++ b/jobs/JREGIONAL_MAKE_OROG @@ -8,7 +8,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO index d648381ee..7c6ba2927 100755 --- a/jobs/JREGIONAL_MAKE_SFC_CLIMO +++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO @@ -8,7 +8,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_FV3 b/jobs/JREGIONAL_RUN_FV3 index 92379be94..ecfe1a165 100755 --- a/jobs/JREGIONAL_RUN_FV3 +++ b/jobs/JREGIONAL_RUN_FV3 @@ -18,7 +18,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index 1bfd0f918..0431058f3 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -17,7 +17,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- diff --git a/scripts/exregional_get_extrn_files.sh b/scripts/exregional_get_extrn_files.sh index c21a898a0..b846a0a35 100755 --- a/scripts/exregional_get_extrn_files.sh +++ b/scripts/exregional_get_extrn_files.sh @@ -8,7 +8,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- @@ -45,9 +45,15 @@ tial or boundary condition files for the FV3 will be generated. # #----------------------------------------------------------------------- # -valid_args=( "EXTRN_MDL_FNS" "EXTRN_MDL_SYSDIR" "EXTRN_MDL_FILES_DIR" \ - "EXTRN_MDL_ARCV_FNS" "EXTRN_MDL_ARCV_FPS" "EXTRN_MDL_ARCV_FMT" \ - "EXTRN_MDL_ARCVREL_DIR" ) +valid_args=( \ +"EXTRN_MDL_FNS" \ +"EXTRN_MDL_SYSDIR" \ +"EXTRN_MDL_FILES_DIR" \ +"EXTRN_MDL_ARCV_FNS" \ +"EXTRN_MDL_ARCV_FPS" \ +"EXTRN_MDL_ARCV_FMT" \ +"EXTRN_MDL_ARCVREL_DIR" \ +) process_args valid_args "$@" # If VERBOSE is set to TRUE, print out what each valid argument has been @@ -58,7 +64,7 @@ if [ "$VERBOSE" = "TRUE" ]; then The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<$num_valid_args; i++ )); do + for (( i=0; i<${num_valid_args}; i++ )); do line=$( declare -p "${valid_args[$i]}" ) printf " $line\n" done @@ -142,7 +148,7 @@ fi # EXTRN_MDL_FNS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_FNS[@]}" )")" -if [ "$DATA_SRC" = "disk" ]; then +if [ "${DATA_SRC}" = "disk" ]; then if [ "${RUN_ENVIR}" = "nco" ]; then @@ -150,9 +156,9 @@ if [ "$DATA_SRC" = "disk" ]; then Creating links in local directory (EXTRN_MDL_FILES_DIR) to external mo- del files (EXTRN_MDL_FNS) in the system directory on disk (EXTRN_MDL_- SYSDIR): - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - EXTRN_MDL_SYSDIR = \"$EXTRN_MDL_SYSDIR\" - EXTRN_MDL_FNS = $EXTRN_MDL_FNS_str + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + EXTRN_MDL_SYSDIR = \"${EXTRN_MDL_SYSDIR}\" + EXTRN_MDL_FNS = ${EXTRN_MDL_FNS_str} " ln_vrfy -sf -t ${EXTRN_MDL_FILES_DIR} ${EXTRN_MDL_FPS[@]} @@ -161,11 +167,11 @@ SYSDIR): print_info_msg "\ Copying external model files (EXTRN_MDL_FNS) from the system directory on disk (EXTRN_MDL_SYSDIR) to local directory (EXTRN_MDL_FILES_DIR): - EXTRN_MDL_SYSDIR = \"$EXTRN_MDL_SYSDIR\" - EXTRN_MDL_FNS = $EXTRN_MDL_FNS_str - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" + EXTRN_MDL_SYSDIR = \"${EXTRN_MDL_SYSDIR}\" + EXTRN_MDL_FNS = ${EXTRN_MDL_FNS_str} + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" " - cp_vrfy ${EXTRN_MDL_FPS[@]} $EXTRN_MDL_FILES_DIR + cp_vrfy ${EXTRN_MDL_FPS[@]} ${EXTRN_MDL_FILES_DIR} fi # @@ -175,7 +181,7 @@ on disk (EXTRN_MDL_SYSDIR) to local directory (EXTRN_MDL_FILES_DIR): # #----------------------------------------------------------------------- # - if [ "$ICS_OR_LBCS" = "ICS" ]; then + if [ "${ICS_OR_LBCS}" = "ICS" ]; then print_info_msg "\n\ ======================================================================== @@ -185,7 +191,7 @@ forecast!!! Exiting script: \"${script_name}\" ========================================================================" - elif [ "$ICS_OR_LBCS" = "LBCS" ]; then + elif [ "${ICS_OR_LBCS}" = "LBCS" ]; then print_info_msg "\n\ ======================================================================== @@ -204,7 +210,7 @@ Exiting script: \"${script_name}\" # #----------------------------------------------------------------------- # -elif [ "$DATA_SRC" = "HPSS" ]; then +elif [ "${DATA_SRC}" = "HPSS" ]; then # #----------------------------------------------------------------------- # @@ -213,7 +219,7 @@ elif [ "$DATA_SRC" = "HPSS" ]; then # #----------------------------------------------------------------------- # - prefix=${EXTRN_MDL_ARCVREL_DIR:+$EXTRN_MDL_ARCVREL_DIR/} + prefix=${EXTRN_MDL_ARCVREL_DIR:+${EXTRN_MDL_ARCVREL_DIR}/} EXTRN_MDL_FPS=( "${EXTRN_MDL_FNS[@]/#/$prefix}" ) EXTRN_MDL_FPS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_FPS[@]}" )")" @@ -224,9 +230,9 @@ Fetching model output files from HPSS. The model output files (EXTRN_- MDL_FPS), the archive files on HPSS in which these output files are stored (EXTRN_MDL_ARCV_FPS), and the local directory into which they will be copied (EXTRN_MDL_FILES_DIR) are: - EXTRN_MDL_FPS = $EXTRN_MDL_FPS_str - EXTRN_MDL_ARCV_FPS = $EXTRN_MDL_ARCV_FPS_str - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\"" + EXTRN_MDL_FPS = ${EXTRN_MDL_FPS_str} + EXTRN_MDL_ARCV_FPS = ${EXTRN_MDL_ARCV_FPS_str} + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\"" # #----------------------------------------------------------------------- # @@ -253,7 +259,7 @@ will be copied (EXTRN_MDL_FILES_DIR) are: # #----------------------------------------------------------------------- # - if [ "$EXTRN_MDL_ARCV_FMT" = "tar" ]; then + if [ "${EXTRN_MDL_ARCV_FMT}" = "tar" ]; then # #----------------------------------------------------------------------- # @@ -264,7 +270,7 @@ will be copied (EXTRN_MDL_FILES_DIR) are: # num_files_to_extract="${#EXTRN_MDL_FPS[@]}" - for (( narcv=0; narcv<$num_arcv_files; narcv++ )); do + for (( narcv=0; narcv<${num_arcv_files}; narcv++ )); do narcv_formatted=$( printf "%02d" $narcv ) ARCV_FP="${EXTRN_MDL_ARCV_FPS[$narcv]}" @@ -290,13 +296,13 @@ will be copied (EXTRN_MDL_FILES_DIR) are: print_err_msg_exit "${script_name}" "\ htar file list operation (\"htar -tvf ...\") failed. Check the log file HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - HTAR_LOG_FN = \"$HTAR_LOG_FN\" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + HTAR_LOG_FN = \"${HTAR_LOG_FN}\" " i=0 files_in_crnt_arcv=() - for (( nfile=0; nfile<$num_files_to_extract; nfile++ )); do + for (( nfile=0; nfile<${num_files_to_extract}; nfile++ )); do extrn_mdl_fp="${EXTRN_MDL_FPS[$nfile]}" # grep -n ${extrn_mdl_fp} ${HTAR_LOG_FN} 2>&1 && { \ grep -n ${extrn_mdl_fp} ${HTAR_LOG_FN} > /dev/null 2>&1 && { \ @@ -314,8 +320,8 @@ HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: print_err_msg_exit "${script_name}" "\ The current archive file (ARCV_FP) does not contain any of the external model files listed in EXTRN_MDL_FPS: - ARCV_FP = \"$ARCV_FP\" - EXTRN_MDL_FPS = $EXTRN_MDL_FPS_str + ARCV_FP = \"${ARCV_FP}\" + EXTRN_MDL_FPS = ${EXTRN_MDL_FPS_str} The archive file should contain at least one external model file; other- wise, it would not be needed. " @@ -331,8 +337,8 @@ wise, it would not be needed. print_err_msg_exit "${script_name}" "\ htar file extract operation (\"htar -xvf ...\") failed. Check the log file HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - HTAR_LOG_FN = \"$HTAR_LOG_FN\" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + HTAR_LOG_FN = \"${HTAR_LOG_FN}\" " # # Note that the htar file extract operation above may return with a 0 @@ -359,12 +365,12 @@ file HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: print_err_msg_exit "${script_name}" "\ External model output file FP not extracted from tar archive file ARCV_- FP: - ARCV_FP = \"$ARCV_FP\" + ARCV_FP = \"${ARCV_FP}\" FP = \"$FP\" Check the log file HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - HTAR_LOG_FN = \"$HTAR_LOG_FN\" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + HTAR_LOG_FN = \"${HTAR_LOG_FN}\" " done @@ -382,7 +388,7 @@ details: # #----------------------------------------------------------------------- # - if [ "$EXTRN_MDL_ARCVREL_DIR" != "." ]; then + if [ "${EXTRN_MDL_ARCVREL_DIR}" != "." ]; then # # The code below works if the first character of EXTRN_MDL_ARCVREL_DIR # is a "/", which is the only case encountered thus far. The code may @@ -392,7 +398,7 @@ details: if [ "${EXTRN_MDL_ARCVREL_DIR:0:1}" = "/" -o \ "${EXTRN_MDL_ARCVREL_DIR:0:2}" = "./" ]; then - mv_vrfy $EXTRN_MDL_ARCVREL_DIR/* . + mv_vrfy ${EXTRN_MDL_ARCVREL_DIR}/* . # # Get the first subdirectory in EXTRN_MDL_ARCVREL_DIR, i.e. the directo- # ry after the first forward slash. This is the subdirectory that we @@ -400,7 +406,7 @@ details: # subdir_to_remove=$( printf "%s" "${EXTRN_MDL_ARCVREL_DIR}" | \ sed -r 's%^(\/|\.\/)([^/]*).*%\2%' ) - rm_vrfy -rf ./$subdir_to_remove + rm_vrfy -rf ./${subdir_to_remove} # # If EXTRN_MDL_ARCVREL_DIR does not start with a "/" (and it is not # equal to "."), then print out an error message and exit. @@ -412,8 +418,8 @@ The archive-relative directory specified by EXTRN_MDL_ARCVREL_DIR [i.e. the directory \"within\" the tar file(s) listed in EXTRN_MDL_ARCV_FPS] is not the current directory (i.e. it is not \".\"), and it does not start with a \"/\": - EXTRN_MDL_ARCVREL_DIR = \"$EXTRN_MDL_ARCVREL_DIR\" - EXTRN_MDL_ARCV_FPS = $EXTRN_MDL_ARCV_FPS_str + EXTRN_MDL_ARCVREL_DIR = \"${EXTRN_MDL_ARCVREL_DIR}\" + EXTRN_MDL_ARCV_FPS = ${EXTRN_MDL_ARCV_FPS_str} This script (\"${script_name}\) must be modified to account for this case. " fi @@ -427,7 +433,7 @@ This script (\"${script_name}\) must be modified to account for this case. # #----------------------------------------------------------------------- # - elif [ "$EXTRN_MDL_ARCV_FMT" = "zip" ]; then + elif [ "${EXTRN_MDL_ARCV_FMT}" = "zip" ]; then # #----------------------------------------------------------------------- # @@ -440,13 +446,13 @@ This script (\"${script_name}\) must be modified to account for this case. # #----------------------------------------------------------------------- # - if [ "$num_arcv_files" -gt 1 ]; then + if [ "${num_arcv_files}" -gt 1 ]; then print_err_msg_exit "${script_name}" "\ Currently, this script is coded to handle only one archive file if the archive file format is specified to be \"zip\", but the number of archive files (num_arcv_files) passed to this script is greater than 1: - EXTRN_MDL_ARCV_FMT = \"$EXTRN_MDL_ARCV_FMT\" - num_arcv_files = $num_arcv_files + EXTRN_MDL_ARCV_FMT = \"${EXTRN_MDL_ARCV_FMT}\" + num_arcv_files = ${num_arcv_files} Please modify the script to handle more than one \"zip\" archive file. Note that code already exists in this script that can handle multiple archive files if the archive file format is specified to be \"tar\", so @@ -467,8 +473,8 @@ that can be used as a guide for the \"zip\" case." print_err_msg_exit "${script_name}" "\ hsi file get operation (\"hsi get ...\") failed. Check the log file HSI_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - HSI_LOG_FN = \"$HSI_LOG_FN\" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + HSI_LOG_FN = \"${HSI_LOG_FN}\" " # #----------------------------------------------------------------------- @@ -484,9 +490,9 @@ HSI_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: unzip operation to list the contents of the zip archive file ARCV_FN in the directory EXTRN_MDL_FILES_DIR failed. Check the log file UNZIP_- LOG_FN in that directory for details: - ARCV_FN = \"$ARCV_FN\" - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - UNZIP_LOG_FN = \"$UNZIP_LOG_FN\" + ARCV_FN = \"${ARCV_FN}\" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + UNZIP_LOG_FN = \"${UNZIP_LOG_FN}\" " # #----------------------------------------------------------------------- @@ -506,10 +512,10 @@ LOG_FN in that directory for details: External model output file FP does not exist in the zip archive file ARCV_FN in the directory EXTRN_MDL_FILES_DIR. Check the log file UN- ZIP_LOG_FN in that directory for the contents of the zip archive: - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - ARCV_FN = \"$ARCV_FN\" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + ARCV_FN = \"${ARCV_FN}\" FP = \"$FP\" - UNZIP_LOG_FN = \"$UNZIP_LOG_FN\" + UNZIP_LOG_FN = \"${UNZIP_LOG_FN}\" " done # @@ -527,8 +533,8 @@ ZIP_LOG_FN in that directory for the contents of the zip archive: print_err_msg_exit "${script_name}" "\ unzip file extract operation (\"unzip -o ...\") failed. Check the log file UNZIP_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\" - UNZIP_LOG_FN = \"$UNZIP_LOG_FN\" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" + UNZIP_LOG_FN = \"${UNZIP_LOG_FN}\" " # # NOTE: @@ -550,7 +556,7 @@ file UNZIP_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: # #----------------------------------------------------------------------- # - if [ "$ICS_OR_LBCS" = "ICS" ]; then + if [ "${ICS_OR_LBCS}" = "ICS" ]; then print_info_msg "\n\ ======================================================================== @@ -559,7 +565,7 @@ fields for the FV3SAR successfully fetched from HPSS!!! Exiting script: \"${script_name}\" ========================================================================" - elif [ "$ICS_OR_LBCS" = "LBCS" ]; then + elif [ "${ICS_OR_LBCS}" = "LBCS" ]; then print_info_msg "\n\ ======================================================================== @@ -580,4 +586,4 @@ fi # #----------------------------------------------------------------------- # -{ restore_shell_opts; } > /dev/null 2>&1 +{ restore_shell_opts; } > /dev/null 2>& diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 980ce1fe9..b21461eab 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -8,7 +8,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- @@ -63,7 +63,7 @@ if [ "$VERBOSE" = "TRUE" ]; then The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<$num_valid_args; i++ )); do + for (( i=0; i<${num_valid_args}; i++ )); do line=$( declare -p "${valid_args[$i]}" ) printf " $line\n" done diff --git a/scripts/exregional_make_ic_lbc0.sh b/scripts/exregional_make_ic_lbc0.sh index 8b15061cd..185e2755d 100755 --- a/scripts/exregional_make_ic_lbc0.sh +++ b/scripts/exregional_make_ic_lbc0.sh @@ -8,7 +8,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- @@ -45,8 +45,14 @@ for FV3 (in NetCDF format). # #----------------------------------------------------------------------- # -valid_args=( "EXTRN_MDL_FNS" "EXTRN_MDL_FILES_DIR" "EXTRN_MDL_CDATE" \ - "WGRIB2_DIR" "APRUN" "ICS_DIR" ) +valid_args=( \ +"EXTRN_MDL_FNS" \ +"EXTRN_MDL_FILES_DIR" \ +"EXTRN_MDL_CDATE" \ +"WGRIB2_DIR" \ +"APRUN" \ +"ICS_DIR" \ +) process_args valid_args "$@" # If VERBOSE is set to TRUE, print out what each valid argument has been @@ -57,7 +63,7 @@ if [ "$VERBOSE" = "TRUE" ]; then The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<$num_valid_args; i++ )); do + for (( i=0; i<${num_valid_args}; i++ )); do line=$( declare -p "${valid_args[$i]}" ) printf " $line\n" done diff --git a/scripts/exregional_make_lbc1_to_lbcn.sh b/scripts/exregional_make_lbc1_to_lbcn.sh index 09c99a3cf..88f41fe5a 100755 --- a/scripts/exregional_make_lbc1_to_lbcn.sh +++ b/scripts/exregional_make_lbc1_to_lbcn.sh @@ -8,7 +8,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- @@ -45,8 +45,15 @@ hour zero). # #----------------------------------------------------------------------- # -valid_args=("EXTRN_MDL_FNS" "EXTRN_MDL_FILES_DIR" "EXTRN_MDL_CDATE" "WGRIB2_DIR" \ - "APRUN" "LBCS_DIR" "EXTRN_MDL_LBC_UPDATE_FHRS") +valid_args=( \ +"EXTRN_MDL_FNS" \ +"EXTRN_MDL_FILES_DIR" \ +"EXTRN_MDL_CDATE" \ +"WGRIB2_DIR" \ +"APRUN" \ +"LBCS_DIR" \ +"EXTRN_MDL_LBC_UPDATE_FHRS" \ +) process_args valid_args "$@" # If VERBOSE is set to TRUE, print out what each valid argument has been @@ -57,7 +64,7 @@ if [ "$VERBOSE" = "TRUE" ]; then The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<$num_valid_args; i++ )); do + for (( i=0; i<${num_valid_args}; i++ )); do line=$( declare -p "${valid_args[$i]}" ) printf " $line\n" done diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index db788dde8..1e0584758 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -8,7 +8,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- @@ -63,7 +63,7 @@ if [ "$VERBOSE" = "TRUE" ]; then The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<$num_valid_args; i++ )); do + for (( i=0; i<${num_valid_args}; i++ )); do line=$( declare -p "${valid_args[$i]}" ) printf " $line\n" done diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index f43e5f0fa..68c717f14 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -8,7 +8,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- @@ -55,7 +55,7 @@ if [ "$VERBOSE" = "TRUE" ]; then The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<$num_valid_args; i++ )); do + for (( i=0; i<${num_valid_args}; i++ )); do line=$( declare -p "${valid_args[$i]}" ) printf " $line\n" done diff --git a/scripts/exregional_run_fv3.sh b/scripts/exregional_run_fv3.sh index 02e971142..6a98938ac 100755 --- a/scripts/exregional_run_fv3.sh +++ b/scripts/exregional_run_fv3.sh @@ -8,7 +8,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- @@ -55,7 +55,7 @@ if [ "$VERBOSE" = "TRUE" ]; then The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<$num_valid_args; i++ )); do + for (( i=0; i<${num_valid_args}; i++ )); do line=$( declare -p "${valid_args[$i]}" ) printf " $line\n" done diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 44eb1f0c7..7e0df6465 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -8,7 +8,7 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP +. ${SCRIPT_VAR_DEFNS_FP} . $USHDIR/source_funcs.sh # #----------------------------------------------------------------------- @@ -55,7 +55,7 @@ if [ "$VERBOSE" = "TRUE" ]; then The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<$num_valid_args; i++ )); do + for (( i=0; i<${num_valid_args}; i++ )); do line=$( declare -p "${valid_args[$i]}" ) printf " $line\n" done diff --git a/tests/run_one_expt.sh b/tests/run_one_expt.sh index 0dbefd75e..c5b6ff3b0 100755 --- a/tests/run_one_expt.sh +++ b/tests/run_one_expt.sh @@ -66,7 +66,7 @@ if [ "$verbose" = "TRUE" ]; then The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<$num_valid_args; i++ )); do + for (( i=0; i<${num_valid_args}; i++ )); do line=$( declare -p "${valid_args[$i]}" ) printf " $line\n" done diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index e99120bc7..9bb85f319 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -94,7 +94,7 @@ else CYCLE_DIR="$EXPTDIR/${CDATE_generic}" fi -set_file_param "${WFLOW_XML_FP}" "SCRIPT_VAR_DEFNS_FP" "$SCRIPT_VAR_DEFNS_FP" +set_file_param "${WFLOW_XML_FP}" "SCRIPT_VAR_DEFNS_FP" "${SCRIPT_VAR_DEFNS_FP}" set_file_param "${WFLOW_XML_FP}" "CYCLE_DIR" "${CYCLE_DIR}" set_file_param "${WFLOW_XML_FP}" "ACCOUNT" "$ACCOUNT" set_file_param "${WFLOW_XML_FP}" "SCHED" "$SCHED" diff --git a/ush/get_extrn_mdl_file_dir_info.sh b/ush/get_extrn_mdl_file_dir_info.sh index bc3f79038..56585979f 100755 --- a/ush/get_extrn_mdl_file_dir_info.sh +++ b/ush/get_extrn_mdl_file_dir_info.sh @@ -69,7 +69,7 @@ function get_extrn_mdl_file_dir_info () { The arguments to script/function \"${func_name}\" have been set as follows: " - for (( i=0; i<$num_valid_args; i++ )); do + for (( i=0; i<${num_valid_args}; i++ )); do line=$( declare -p "${valid_args[$i]}" ) printf " $line\n" done diff --git a/ush/link_fix.sh b/ush/link_fix.sh index 3af35cb41..f5287555e 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -41,10 +41,11 @@ USHDIR="${script_dir}" # #----------------------------------------------------------------------- # -valid_args=( "verbose" \ - "script_var_defns_fp" \ - "file_group" \ - ) +valid_args=( \ +"verbose" \ +"script_var_defns_fp" \ +"file_group" \ +) process_args valid_args "$@" # #----------------------------------------------------------------------- @@ -69,7 +70,7 @@ if [ "$verbose" = "TRUE" ]; then The arguments to script/function \"${script_name}\" have been set as follows: " 1>&2 - for (( i=0; i<$num_valid_args; i++ )); do + for (( i=0; i<${num_valid_args}; i++ )); do line=$( declare -p "${valid_args[$i]}" ) printf " $line\n" 1>&2 done diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index cce46ddd2..f1b250e45 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -653,7 +653,7 @@ if [ "$QUILTING" = "TRUE" ]; then err_msg="\ The coordinate system used by the write-component output grid specified in WRTCMP_output_grid is not supported: - WRTCMP_output_grid = \"${WRITECMP_output_grid}\"" + WRTCMP_output_grid = \"${WRTCMP_output_grid}\"" check_var_valid_value \ "WRTCMP_output_grid" "valid_vals_WRTCMP_output_grid" "${err_msg}" # diff --git a/ush/setup.sh b/ush/setup.sh index 1b2378f50..667590b54 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1464,7 +1464,7 @@ str_to_insert=${str_to_insert//$'\n'/\\n} # containing the name of the interpreter. # REGEXP="(^#!.*)" -sed -i -r -e "s|$REGEXP|\1\n\n$str_to_insert\n|g" $SCRIPT_VAR_DEFNS_FP +sed -i -r -e "s|$REGEXP|\1\n\n$str_to_insert\n|g" ${SCRIPT_VAR_DEFNS_FP} @@ -1612,7 +1612,7 @@ done <<< "${line_list}" # #----------------------------------------------------------------------- # -{ cat << EOM >> $SCRIPT_VAR_DEFNS_FP +{ cat << EOM >> ${SCRIPT_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -1693,7 +1693,7 @@ definitions file returned with a nonzero status." # if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - { cat << EOM >> $SCRIPT_VAR_DEFNS_FP + { cat << EOM >> ${SCRIPT_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -1722,7 +1722,7 @@ file returned with a nonzero status." elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - { cat << EOM >> $SCRIPT_VAR_DEFNS_FP + { cat << EOM >> ${SCRIPT_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -1759,7 +1759,7 @@ fi # #----------------------------------------------------------------------- # -{ cat << EOM >> $SCRIPT_VAR_DEFNS_FP +{ cat << EOM >> ${SCRIPT_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # From 19456a99a2846d895867f379e7441ac351d03c04 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 22 Oct 2019 15:13:42 -0600 Subject: [PATCH 010/203] Bug fix for the case of the archive-relative directory starting with a "/". --- scripts/exregional_get_extrn_files.sh | 35 ++++++++++++++++----------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/scripts/exregional_get_extrn_files.sh b/scripts/exregional_get_extrn_files.sh index b846a0a35..918ad5c37 100755 --- a/scripts/exregional_get_extrn_files.sh +++ b/scripts/exregional_get_extrn_files.sh @@ -390,22 +390,28 @@ details: # if [ "${EXTRN_MDL_ARCVREL_DIR}" != "." ]; then # -# The code below works if the first character of EXTRN_MDL_ARCVREL_DIR -# is a "/", which is the only case encountered thus far. The code may -# have to be modified to accomodate the case of the first character of -# EXTRN_MDL_ARCVREL_DIR not being a "/". +# The code below works if EXTRN_MDL_ARCVREL_DIR starts with a "/" or a +# "./", which are the only case encountered thus far. The code may have +# to be modified to accomodate other cases. # if [ "${EXTRN_MDL_ARCVREL_DIR:0:1}" = "/" -o \ "${EXTRN_MDL_ARCVREL_DIR:0:2}" = "./" ]; then - - mv_vrfy ${EXTRN_MDL_ARCVREL_DIR}/* . # -# Get the first subdirectory in EXTRN_MDL_ARCVREL_DIR, i.e. the directo- -# ry after the first forward slash. This is the subdirectory that we -# want to remove. +# Strip the "/" or "./" from the beginning of EXTRN_MDL_ARCVREL_DIR to +# obtain the relative directory from which to move the extracted files +# to the current directory. Then move the files. +# + rel_dir=$( printf "%s" "${EXTRN_MDL_ARCVREL_DIR}" | \ + sed -r 's%^(\/|\.\/)([^/]*)(.*)%\2\3%' ) + mv_vrfy ${rel_dir}/* . # - subdir_to_remove=$( printf "%s" "${EXTRN_MDL_ARCVREL_DIR}" | \ - sed -r 's%^(\/|\.\/)([^/]*).*%\2%' ) +# Get the first subdirectory in rel_dir, i.e. the subdirectory before +# the first forward slash. This is the subdirectory that we want to re- +# move since it no longer contains any files (only subdirectories). +# Then remove it. +# + subdir_to_remove=$( printf "%s" "${rel_dir}" | \ + sed -r 's%^([^/]*)(.*)%\1%' ) rm_vrfy -rf ./${subdir_to_remove} # # If EXTRN_MDL_ARCVREL_DIR does not start with a "/" (and it is not @@ -417,10 +423,10 @@ details: The archive-relative directory specified by EXTRN_MDL_ARCVREL_DIR [i.e. the directory \"within\" the tar file(s) listed in EXTRN_MDL_ARCV_FPS] is not the current directory (i.e. it is not \".\"), and it does not start -with a \"/\": +with a \"/\" or a \"./\": EXTRN_MDL_ARCVREL_DIR = \"${EXTRN_MDL_ARCVREL_DIR}\" EXTRN_MDL_ARCV_FPS = ${EXTRN_MDL_ARCV_FPS_str} -This script (\"${script_name}\) must be modified to account for this case. +This script must be modified to account for this case. " fi @@ -586,4 +592,5 @@ fi # #----------------------------------------------------------------------- # -{ restore_shell_opts; } > /dev/null 2>& +{ restore_shell_opts; } > /dev/null 2>&1 + From 52686a93fb5ef16e1d7237c732f600600cebfe38 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 24 Oct 2019 14:35:09 -0600 Subject: [PATCH 011/203] Modify sed command to ensure that the variable name does not include an equal sign or a double quote. --- ush/compare_config_scripts.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/compare_config_scripts.sh b/ush/compare_config_scripts.sh index a24ed139a..626cbbd20 100644 --- a/ush/compare_config_scripts.sh +++ b/ush/compare_config_scripts.sh @@ -78,7 +78,7 @@ while read crnt_line; do # Note that a variable name will be found only if the equal sign immed- # iately follows the variable name. # - var_name=$( printf "%s" "${crnt_line}" | sed -n -r -e "s/^([^ ]*)=.*/\1/p") + var_name=$( printf "%s" "${crnt_line}" | sed -n -r -e "s/^([^ =\"]*)=.*/\1/p") if [ -z "${var_name}" ]; then From ddcb898946da079bd37bae8a145612b92f3fd2d1 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 24 Oct 2019 14:50:58 -0600 Subject: [PATCH 012/203] (1) Remove variable DIAG_TABLE_CCPP_GFS_FN since it is not used (for GFS physics, the appropriate template file for the diag table is specified by DIAG_TABLE_FN); (2) For clarity, place curly braces around variable names containing underscores. --- ush/config_defaults.sh | 8 ++------ ush/generate_FV3SAR_wflow.sh | 4 ++-- ush/set_file_param.sh | 9 ++++----- 3 files changed, 8 insertions(+), 13 deletions(-) diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index b7be3fb0b..957fc760c 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -109,13 +109,9 @@ PTMP="/path/to/temporary/directory/ptmp" # bled forecast that uses GSD physics. # # DIAG_TABLE_FN: -# Name of file that specifies the fields that the FV3SAR will output. -# -# DIAG_TABLE_CCPP_GFS_FN: # Name of file that specifies the fields that the FV3SAR will output for -# a CCPP-enabled forecast that uses GFS physics. This is needed because -# the current version of the CCPP-enabled FV3SAR executable using GFS -# physics cannot handle refl_10cm variable in diag_table. +# a forecast that uses GFS physics. The forecast can be either with +# CCPP enabled or disabled. # # DIAG_TABLE_CCPP_GSD_FN: # Name of file that specifies the fields that the FV3SAR will output for diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 9bb85f319..ae30deb52 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -144,13 +144,13 @@ regex_search="(^\s*)(\&DATE_FIRST_CYCL;)(CC00)( i=0 for cycl in "${CYCL_HRS[@]}"; do regex_replace="\1${cycl}\3\4${cycl}00 \7${cycl}00\9" - crnt_line=$( sed -n -r -e "s%$regex_search%$regex_replace%p" "${WFLOW_XML_FP}" ) + crnt_line=$( sed -n -r -e "s%${regex_search}%${regex_replace}%p" "${WFLOW_XML_FP}" ) if [ "$i" -eq "0" ]; then all_cycledefs="${crnt_line}" else all_cycledefs=$( printf "%s\n%s" "${all_cycledefs}" "${crnt_line}" ) fi - i=$(( $i+1 )) + i=$((i+1)) done # # Replace all actual newlines in the variable all_cycledefs with back- diff --git a/ush/set_file_param.sh b/ush/set_file_param.sh index bee0476fa..83d690772 100644 --- a/ush/set_file_param.sh +++ b/ush/set_file_param.sh @@ -104,7 +104,6 @@ Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." ;; # "${DIAG_TABLE_FN}" | \ - "${DIAG_TABLE_CCPP_GSD_FN}" | \ "${DIAG_TABLE_CCPP_GSD_FN}") regex_search="(.*)(<$param>)(.*)" regex_replace="\1$value\3" @@ -151,18 +150,18 @@ specified for this file: # #----------------------------------------------------------------------- # - grep -q -E "$regex_search" $file_full_path + grep -q -E "${regex_search}" "${file_full_path}" if [ $? -eq 0 ]; then - sed -i -r -e "s%$regex_search%$regex_replace%" $file_full_path + sed -i -r -e "s%${regex_search}%${regex_replace}%" "${file_full_path}" else print_err_msg_exit "\ Specified file (file_full_path) does not contain the searched-for regular expression (regex_search): - file_full_path = \"$file_full_path\" + file_full_path = \"${file_full_path}\" param = \"$param\" value = \"$value\" - regex_search = $regex_search" + regex_search = ${regex_search}" fi # #----------------------------------------------------------------------- From 97489578f43e74b2301534efdafcdfafdefd58de Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 25 Oct 2019 10:37:47 -0600 Subject: [PATCH 013/203] Combine the functions print_info_msg and print_info_msg_verbose into one function (print_info_msg) that can accept one (the message to print out) or two (the verbose flag and the message) arguments. --- scripts/exregional_make_grid.sh | 10 +- scripts/exregional_make_orog.sh | 16 +-- scripts/exregional_run_fv3.sh | 16 +-- scripts/exregional_run_post.sh | 3 +- ush/generate_FV3SAR_wflow.sh | 24 ++-- ush/print_msg.sh | 189 ++++++++++++++++---------------- ush/set_file_param.sh | 2 +- ush/set_gridparams_GFDLgrid.sh | 10 +- ush/setup.sh | 6 +- 9 files changed, 138 insertions(+), 138 deletions(-) diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index b21461eab..e6c8fa16a 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -320,7 +320,7 @@ mkdir_vrfy -p "$tmpdir" # #----------------------------------------------------------------------- # -print_info_msg_verbose "Starting grid file generation..." +print_info_msg "$VERBOSE" "Starting grid file generation..." if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then @@ -360,7 +360,7 @@ elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then RGNL_GRID_NML_FP="$tmpdir/${RGNL_GRID_NML_FN}" cp_vrfy ${TEMPLATE_DIR}/${RGNL_GRID_NML_FN} ${RGNL_GRID_NML_FP} - print_info_msg_verbose "\ + print_info_msg "$VERBOSE" "\ Setting parameters in file: RGNL_GRID_NML_FP = \"$RGNL_GRID_NML_FP\"" # @@ -430,7 +430,7 @@ mv_vrfy ${CRES}_grid.tile${TILE_RGNL}.nc \ mv_vrfy ${CRES}_mosaic.nc ${GRID_DIR} cd_vrfy - -print_info_msg_verbose "Grid file generation complete." +print_info_msg "$VERBOSE" "Grid file generation complete." # #----------------------------------------------------------------------- # @@ -462,7 +462,7 @@ cd_vrfy ${tmpdir} # call the shave executable. Finally, move the resultant file to the # GRID_DIR directory. # -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ \"Shaving\" grid file with wide halo to obtain grid file with ${nh3_T7}-cell- wide halo..." @@ -487,7 +487,7 @@ mv_vrfy ${shaved_fp} ${GRID_DIR} # call the shave executable. Finally, move the resultant file to the # GRID_DIR directory. # -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ \"Shaving\" grid file with wide halo to obtain grid file with ${nh4_T7}-cell- wide halo..." diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 1e0584758..fac29fad9 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -256,7 +256,7 @@ export exec_dir="$EXECDIR" # #----------------------------------------------------------------------- # -print_info_msg_verbose "Starting orography file generation..." +print_info_msg "$VERBOSE" "Starting orography file generation..." tmp_dir="${raw_dir}/tmp" @@ -316,7 +316,7 @@ ln_vrfy -sf oro.${CRES}.tile${TILE_RGNL}.halo${nhw_T7}.nc \ oro.${CRES}.tile${TILE_RGNL}.nc cd_vrfy - -print_info_msg_verbose "Orography file generation complete." +print_info_msg "$VERBOSE" "Orography file generation complete." # #----------------------------------------------------------------------- # @@ -324,7 +324,7 @@ print_info_msg_verbose "Orography file generation complete." # #----------------------------------------------------------------------- # -print_info_msg_verbose "Setting orography filtering parameters..." +print_info_msg "$VERBOSE" "Setting orography filtering parameters..." # Need to fix the following (also above). Then redo to get cell_size_avg. #cd_vrfy ${GRID_DIR} @@ -441,7 +441,7 @@ fi # #----------------------------------------------------------------------- # -print_info_msg_verbose "Starting filtering of orography..." +print_info_msg "$VERBOSE" "Starting filtering of orography..." echo "gtype = \"$gtype\"" # The script below creates absolute symlinks in $filter_dir. That's @@ -475,7 +475,7 @@ mv_vrfy oro.${CRES}.tile${TILE_RGNL}.nc \ # oro.${CRES}.tile${TILE_RGNL}.nc cd_vrfy - -print_info_msg_verbose "Filtering of orography complete." +print_info_msg "$VERBOSE" "Filtering of orography complete." # #----------------------------------------------------------------------- # @@ -508,7 +508,7 @@ cd_vrfy ${shave_dir} # call the shave executable. Finally, move the resultant file to the # OROG_DIR directory. # -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ \"Shaving\" orography file with wide halo to obtain orography file with ${nh0_T7}-cell-wide halo..." @@ -533,7 +533,7 @@ mv_vrfy ${shaved_fp} ${OROG_DIR} # Then call the shave executable. Finally, move the resultant file to # the OROG_DIR directory. # -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ \"Shaving\" orography file with wide halo to obtain orography file with ${nh4_T7}-cell-wide halo..." @@ -580,7 +580,7 @@ Call to script to create links to orography files failed. # Moved the following to exregional_make_sfc_climo.sh script since it # needs to be done only if the make_sfc_climo task is run. -#print_info_msg_verbose "\ +#print_info_msg "$VERBOSE" "\ #Creating links needed by the make_sfc_climo task to the 4-halo grid and #orography files..." # diff --git a/scripts/exregional_run_fv3.sh b/scripts/exregional_run_fv3.sh index 6a98938ac..a188c5a52 100755 --- a/scripts/exregional_run_fv3.sh +++ b/scripts/exregional_run_fv3.sh @@ -215,7 +215,7 @@ esac # #----------------------------------------------------------------------- # -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ Creating links in the INPUT subdirectory of the current cycle's run di- rectory to the grid and (filtered) orography files ..." @@ -328,7 +328,7 @@ fi # #----------------------------------------------------------------------- # -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ Creating links with names that FV3 looks for in the INPUT subdirectory of the current cycle's run directory (CYCLE_DIR)..." @@ -345,7 +345,7 @@ ln_vrfy -sf sfc_data.tile${TILE_RGNL}.halo${nh0_T7}.nc sfc_data.nc # cd_vrfy ${CYCLE_DIR} -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ Creating links in the current cycle's run directory to static (fix) files in the FIXam directory..." # @@ -396,7 +396,7 @@ rm_vrfy -f time_stamp.out # #----------------------------------------------------------------------- # -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ Creating links in the current cycle's run directory to cycle-independent model input files in the main experiment directory..." @@ -425,7 +425,7 @@ fi # #----------------------------------------------------------------------- # -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ Copying cycle-independent model input files from the templates directory to the current cycle's run directory..." @@ -463,7 +463,7 @@ YYYYMMDD=${CDATE:0:8} # MODEL_CONFIG_FP="${CYCLE_DIR}/${MODEL_CONFIG_FN}" -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ Setting parameters in file: MODEL_CONFIG_FP = \"${MODEL_CONFIG_FP}\"" @@ -532,7 +532,7 @@ fi # DIAG_TABLE_FP="${CYCLE_DIR}/${DIAG_TABLE_FN}" -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ Setting parameters in file: DIAG_TABLE_FP = \"${DIAG_TABLE_FP}\"" @@ -556,7 +556,7 @@ else fi if [ -f $FV3SAR_EXEC ]; then - print_info_msg_verbose "\ + print_info_msg "$VERBOSE" "\ Copying the FV3SAR executable to the run directory..." cp_vrfy ${FV3SAR_EXEC} ${CYCLE_DIR}/fv3_gfs.x else diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 7e0df6465..c5abcc54b 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -67,7 +67,8 @@ fi # #----------------------------------------------------------------------- # -print_info_msg_verbose "Starting post-processing for fhr = $fhr hr..." +print_info_msg "$VERBOSE" "\ +Starting post-processing for fhr = $fhr hr..." case $MACHINE in diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index ae30deb52..d2044a5b4 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -366,7 +366,7 @@ fi # that can be changed. if [ "${RUN_ENVIR}" != "nco" ]; then - print_info_msg_verbose "\ + print_info_msg "$VERBOSE" "\ Copying fixed files from system directory to the workflow directory..." check_for_preexist_dir $FIXam "delete" @@ -386,7 +386,7 @@ fi # #----------------------------------------------------------------------- # -print_info_msg_verbose " +print_info_msg "$VERBOSE" " Copying templates of various input files to the experiment directory..." # #----------------------------------------------------------------------- @@ -440,7 +440,7 @@ if [ "${USE_CCPP}" = "TRUE" ]; then # 4) Uses the "module use ..." command to prepend or append paths to # Lmod's search path (MODULEPATH). # - print_info_msg_verbose " + print_info_msg "$VERBOSE" " Copying the shell script that initializes the Lmod (Lua-based module) system/software for handling modules..." # @@ -473,7 +473,7 @@ Heredoc (cat) command to append command to add path to CCPP libraries to the Lmod initialization script in the experiment directory returned with a nonzero status." - print_info_msg_verbose " + print_info_msg "$VERBOSE" " Copying the modulefile required for running the CCPP-enabled version of the FV3SAR under NEMS to the experiment directory..." cp_vrfy ${NEMSfv3gfs_DIR}/NEMS/src/conf/modules.nems $EXPTDIR/modules.fv3 @@ -492,7 +492,7 @@ the FV3SAR under NEMS to the experiment directory..." [ "${EXTRN_MDL_NAME_LBCS}" = "GSMGFS" -o \ "${EXTRN_MDL_NAME_LBCS}" = "FV3GFS" ]; then - print_info_msg_verbose " + print_info_msg "$VERBOSE" " Copying the FV3 namelist file for the GFS physics suite to the experi- ment directory..." cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_CCPP_GFSPHYS_GFSEXTRN_FN} \ @@ -512,13 +512,13 @@ script(s) accordingly) and rerun." fi - print_info_msg_verbose " + print_info_msg "$VERBOSE" " Copying the field table file for the GFS physics suite to the experiment directory..." cp_vrfy ${TEMPLATE_DIR}/${FIELD_TABLE_FN} \ $EXPTDIR - print_info_msg_verbose " + print_info_msg "$VERBOSE" " Copying the CCPP XML file for the GFS physics suite to the experiment directory..." cp_vrfy ${NEMSfv3gfs_DIR}/ccpp/suites/suite_FV3_GFS_2017_gfdlmp.xml \ @@ -532,25 +532,25 @@ directory..." # elif [ "${CCPP_PHYS_SUITE}" = "GSD" ]; then - print_info_msg_verbose " + print_info_msg "$VERBOSE" " Copying the FV3 namelist file for the GSD physics suite to the experi- ment directory..." cp_vrfy ${TEMPLATE_DIR}/${FV3_NML_CCPP_GSDPHYS_FN} \ $EXPTDIR/${FV3_NML_FN} - print_info_msg_verbose " + print_info_msg "$VERBOSE" " Copying the field table file for the GSD physics suite to the experiment directory..." cp_vrfy ${TEMPLATE_DIR}/${FIELD_TABLE_CCPP_GSD_FN} \ $EXPTDIR/${FIELD_TABLE_FN} - print_info_msg_verbose " + print_info_msg "$VERBOSE" " Copying the CCPP XML file for the GSD physics suite to the experiment directory..." cp_vrfy ${NEMSfv3gfs_DIR}/ccpp/suites/suite_FV3_GSD_v0.xml \ $EXPTDIR/suite_FV3_GSD_v0.xml - print_info_msg_verbose " + print_info_msg "$VERBOSE" " Copying the CCN fixed file needed by Thompson microphysics (part of the GSD suite) to the experiment directory..." cp_vrfy $FIXgsd/CCN_ACTIVATE.BIN $EXPTDIR @@ -582,7 +582,7 @@ cp_vrfy ${TEMPLATE_DIR}/${NEMS_CONFIG_FN} $EXPTDIR # FV3_NML_FP="$EXPTDIR/${FV3_NML_FN}" -print_info_msg_verbose " +print_info_msg "$VERBOSE" " Setting parameters in FV3 namelist file (FV3_NML_FP): FV3_NML_FP = \"${FV3_NML_FP}\"" # diff --git a/ush/print_msg.sh b/ush/print_msg.sh index bd778f6f8..9ee85c102 100644 --- a/ush/print_msg.sh +++ b/ush/print_msg.sh @@ -27,54 +27,79 @@ function print_info_msg() { # #----------------------------------------------------------------------- # -# Check arguments. +# Get the name of this function. # #----------------------------------------------------------------------- # - if [ "$#" -ne 1 ]; then - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. -Usage: - - ${FUNCNAME[0]} msg - -where msg is the message to print." - fi + local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # -# Set local variables. +# Declare local variables. # #----------------------------------------------------------------------- # - local info_msg="$1" + local info_msg + local verbose # #----------------------------------------------------------------------- # -# Remove trailing newlines from info_msg. Command substitution [i.e. -# the $( ... )] will do this automatically. +# If one argument is supplied, we assume it is the message to print out. +# between informational lines that are always printed. # #----------------------------------------------------------------------- # - info_msg=$( printf '%s' "${info_msg}" ) + if [ "$#" -eq 1 ]; then + + info_msg="$1" + verbose="FALSE" + + elif [ "$#" -eq 2 ]; then + + verbose="$1" + info_msg="$2" # #----------------------------------------------------------------------- # -# Add informational lines at the beginning and end of the message. +# If no arguments or more than two arguments are supplied, print out a +# usage message and exit. # #----------------------------------------------------------------------- # - local MSG=$(printf "\ -$info_msg -") + else + + printf "\ +Function \"${func_name}\": Incorrect number of arguments specified. +Usage: + + ${func_name} [verbose] info_msg + +where the arguments are defined as follows: + + verbose: + This is an optional argument. If set to \"TRUE\", info_msg will be + printed to stdout. Otherwise, info_msg will not be printed. + + info_msg: + This is the informational message to print to stdout. + +This function prints an informational message to stout. If one argument +is passed in, then that argument is assumed to be info_msg and is print- +ed. If two arguments are passed in, then the first is assumed to be +verbose and the second info_msg. In this case, info_msg gets printed +only if verbose is set to \"TRUE\".\n" + + fi # #----------------------------------------------------------------------- # -# Print out the message. +# If verbose is set to "TRUE", print out the message. # #----------------------------------------------------------------------- # - printf '%s\n' "$MSG" + if [ "$verbose" = "TRUE" ]; then + printf "%s\n" "${info_msg}" + fi # #----------------------------------------------------------------------- # @@ -88,15 +113,16 @@ $info_msg + # #----------------------------------------------------------------------- # -# Function to print informational messages using printf, but only if the -# VERBOSE flag is set to "TRUE". +# Function to print out an error message to stderr using printf and then +# exit. # #----------------------------------------------------------------------- # -function print_info_msg_verbose() { +function print_err_msg_exit() { # #----------------------------------------------------------------------- # @@ -109,59 +135,20 @@ function print_info_msg_verbose() { # #----------------------------------------------------------------------- # -# Check arguments. -# -#----------------------------------------------------------------------- -# - if [ "$#" -ne 1 ]; then - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. -Usage: - - ${FUNCNAME[0]} msg - -where msg is the message to print." - fi -# -#----------------------------------------------------------------------- -# -# Print the message only if VERBOSE is set to "TRUE". -# -#----------------------------------------------------------------------- -# - if [ "$VERBOSE" = "TRUE" ]; then - print_info_msg "$1" - fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 -} - - - -# -#----------------------------------------------------------------------- -# -# Function to print error messages using printf and exit. +# Get the name of this function. # #----------------------------------------------------------------------- # -function print_err_msg_exit() { + local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# Declare local variables. # #----------------------------------------------------------------------- # - { save_shell_opts; set -u +x; } > /dev/null 2>&1 + local err_msg + local caller_name # #----------------------------------------------------------------------- # @@ -171,9 +158,9 @@ function print_err_msg_exit() { # if [ "$#" -eq 0 ]; then - local MSG=$(printf "\ -ERROR. Exiting script or function with nonzero status. -") + err_msg=$( printf "\ +ERROR. Exiting script or function with nonzero status." + ) # #----------------------------------------------------------------------- # @@ -184,7 +171,7 @@ ERROR. Exiting script or function with nonzero status. # elif [ "$#" -eq 1 ]; then - local err_msg="$1" + err_msg="$1" # #----------------------------------------------------------------------- # @@ -201,11 +188,11 @@ ERROR. Exiting script or function with nonzero status. # #----------------------------------------------------------------------- # - local MSG=$(printf "\ + err_msg=$( printf "\ ERROR: -$err_msg -Exiting script/function with nonzero status. -") +${err_msg} +Exiting script/function with nonzero status." + ) # #----------------------------------------------------------------------- # @@ -218,8 +205,8 @@ Exiting script/function with nonzero status. # elif [ "$#" -eq 2 ]; then - local script_func_name="$1" - local err_msg="$2" + caller_name="$1" + err_msg="$2" # #----------------------------------------------------------------------- # @@ -236,43 +223,55 @@ Exiting script/function with nonzero status. # #----------------------------------------------------------------------- # - local MSG=$(printf "\ -ERROR from script/function \"${script_func_name}\": -$err_msg -Exiting script/function with nonzero status. -") + err_msg=$(printf "\ +ERROR from script/file \"${caller_name}\": +${err_msg} +Exiting script/function with nonzero status." + ) # #----------------------------------------------------------------------- # -# If more than two arguments are supplied, print out a usage error mes- -# sage. +# If more than two arguments are supplied, print out a usage message and +# exit. # #----------------------------------------------------------------------- # - elif [ "$#" -gt 1 ]; then + else - local MSG=$(printf "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. + printf "\ +Function \"${func_name}\": Incorrect number of arguments specified. Usage: - ${FUNCNAME[0]} + ${func_name} [caller_name] [err_msg] + +where the arguments are defined as follows: -or + caller_name: + This is an optional argument that specifies the name of the script or + function that calls this function (i.e. the caller). - ${FUNCNAME[0]} msg + err_msg: + This is an optional argument that specifies the error message to print + to stderr. -where msg is an optional error message to print. Exiting with nonzero status. -") +This function prints an error message to stderr. If no arguments are +passed in, then a standard error message is printed. If only one argu- +ment is passed in, then that argument is assumed to be err_msg, and this +along with appropriate leading and trailing lines are printed. If two +arguments are passed in, then the first is assumed to be caller_name and +the second err_msg. In this case, err_msg along with appropriate lead- +ing and trailing lines are printed, with the leading line containing the +name of the caller.\n" fi # #----------------------------------------------------------------------- # -# Print out MSG and exit function/script with nonzero status. +# Print out err_msg and exit function/script with nonzero status. # #----------------------------------------------------------------------- # - printf '\n%s\n' "$MSG" 1>&2 + printf "\n%s\n" "${err_msg}" 1>&2 exit 1 # #----------------------------------------------------------------------- diff --git a/ush/set_file_param.sh b/ush/set_file_param.sh index 83d690772..62382cf87 100644 --- a/ush/set_file_param.sh +++ b/ush/set_file_param.sh @@ -67,7 +67,7 @@ where the arguments are defined as follows: # #----------------------------------------------------------------------- # - print_info_msg_verbose "\ + print_info_msg "$VERBOSE" "\ Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." # #----------------------------------------------------------------------- diff --git a/ush/set_gridparams_GFDLgrid.sh b/ush/set_gridparams_GFDLgrid.sh index 0ca3fa665..94b8557c5 100644 --- a/ush/set_gridparams_GFDLgrid.sh +++ b/ush/set_gridparams_GFDLgrid.sh @@ -197,7 +197,7 @@ fi # #----------------------------------------------------------------------- # -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ Original values of the halo width on the tile 6 supergrid and on the tile 7 grid are: nhw_T6SG = $nhw_T6SG @@ -207,7 +207,7 @@ nhw_T6SG=$(( $istart_rgnl_T6SG - $istart_rgnl_wide_halo_T6SG )) nhw_T6=$(( $nhw_T6SG/2 )) nhw_T7=$(( $nhw_T6*$refine_ratio )) -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ Values of the halo width on the tile 6 supergrid and on the tile 7 grid AFTER adjustments are: nhw_T6SG = $nhw_T6SG @@ -240,7 +240,7 @@ ny_T6SG=$(( $ny_T6*2 )) prime_factors_nx_T7=$( factor $nx_T7 | sed -r -e 's/^[0-9]+: (.*)/\1/' ) prime_factors_ny_T7=$( factor $ny_T7 | sed -r -e 's/^[0-9]+: (.*)/\1/' ) -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ The number of cells in the two horizontal directions (x and y) on the parent tile's (tile 6) grid and supergrid are: nx_T6 = $nx_T6 @@ -301,12 +301,12 @@ ny_wide_halo_T6SG=$(( $jend_rgnl_wide_halo_T6SG - $jstart_rgnl_wide_halo_T6SG + ny_wide_halo_T6=$(( $ny_wide_halo_T6SG/2 )) ny_wide_halo_T7=$(( $ny_wide_halo_T6*$refine_ratio )) -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ nx_wide_halo_T7 = $nx_T7 \ (istart_rgnl_wide_halo_T6SG = $istart_rgnl_wide_halo_T6SG, \ iend_rgnl_wide_halo_T6SG = $iend_rgnl_wide_halo_T6SG)" -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ ny_wide_halo_T7 = $ny_T7 \ (jstart_rgnl_wide_halo_T6SG = $jstart_rgnl_wide_halo_T6SG, \ jend_rgnl_wide_halo_T6SG = $jend_rgnl_wide_halo_T6SG)" diff --git a/ush/setup.sh b/ush/setup.sh index 667590b54..fa65e75b5 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -386,7 +386,7 @@ if [ "${CCPP_PHYS_SUITE}" = "GFS" ]; then "${EXTRN_MDL_NAME_ICS}" != "FV3GFS" ] || \ [ "${EXTRN_MDL_NAME_LBCS}" != "GSMGFS" -a \ "${EXTRN_MDL_NAME_LBCS}" != "FV3GFS" ]; then - print_info_msg_verbose " + print_info_msg "$VERBOSE" " The following combination of physics suite and external models is not allowed: CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\" @@ -1159,7 +1159,7 @@ if [ "$QUILTING" = "TRUE" ]; then PE_MEMBER01=$(( ${PE_MEMBER01} + ${WRTCMP_write_groups}*${WRTCMP_write_tasks_per_group} )) fi -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ The number of MPI tasks for the forecast (including those for the write component if it is being used) are: PE_MEMBER01 = ${PE_MEMBER01}" @@ -1189,7 +1189,7 @@ by the number of MPI tasks in the y direction (layout_y): layout_y = $layout_y" fi -print_info_msg_verbose "\ +print_info_msg "$VERBOSE" "\ The MPI task layout is: layout_x = $layout_x layout_y = $layout_y" From d653a6380d6c4a9e99f44ee4bab2aec1af50225c Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 25 Oct 2019 15:50:45 -0600 Subject: [PATCH 014/203] (1) Change functions print_info_msg and print_err_msg_exit so that they get information about the calling function or script from built-in bash variables; (2) Eliminate the argument "script_name" to print_err_msg_exit (since now it can be obtained from bash built-in variables); (3) Change calls to print_err_msg_exit in other scripts/functions to eliminate the "script_name" argument. --- jobs/JREGIONAL_GET_EXTRN_FILES | 29 ++--- jobs/JREGIONAL_MAKE_GRID | 2 +- jobs/JREGIONAL_MAKE_IC_LBC0 | 7 +- jobs/JREGIONAL_MAKE_LBC1_TO_LBCN | 7 +- jobs/JREGIONAL_MAKE_OROG | 2 +- jobs/JREGIONAL_MAKE_SFC_CLIMO | 2 +- jobs/JREGIONAL_RUN_FV3 | 2 +- jobs/JREGIONAL_RUN_POST | 2 +- scripts/exregional_get_extrn_files.sh | 48 +++---- scripts/exregional_make_grid.sh | 40 +++--- scripts/exregional_make_ic_lbc0.sh | 8 +- scripts/exregional_make_lbc1_to_lbcn.sh | 10 +- scripts/exregional_make_orog.sh | 29 ++--- scripts/exregional_make_sfc_climo.sh | 9 +- scripts/exregional_run_fv3.sh | 13 +- tests/run_mltpl_expts.sh | 5 +- tests/run_one_expt.sh | 5 +- ush/check_for_preexist_dir.sh | 3 +- ush/check_var_valid_value.sh | 10 +- ush/count_files.sh | 3 +- ush/generate_FV3SAR_wflow.sh | 23 ++-- ush/get_extrn_mdl_file_dir_info.sh | 46 +++---- ush/interpol_to_arbit_CRES.sh | 4 +- ush/is_array.sh | 4 +- ush/iselementof.sh | 4 +- ush/link_fix.sh | 33 ++--- ush/print_msg.sh | 165 ++++++++---------------- ush/process_args.sh | 13 +- ush/set_extrn_mdl_params.sh | 56 ++++---- ush/set_file_param.sh | 8 +- ush/set_predef_grid_params.sh | 24 ++-- ush/setup.sh | 89 ++++++------- 32 files changed, 287 insertions(+), 418 deletions(-) diff --git a/jobs/JREGIONAL_GET_EXTRN_FILES b/jobs/JREGIONAL_GET_EXTRN_FILES index e32dd2ec1..3b21afa82 100755 --- a/jobs/JREGIONAL_GET_EXTRN_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_FILES @@ -79,14 +79,13 @@ case $EXTRN_MDL_NAME in # last CDATE for the GSMGFS to the one 6 hours before this. CDATE_max="2019061206" if [ "$CDATE" -gt "$CDATE_max" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Output from the specified external model (EXTRN_MDL_NAME) is not availa- ble for the specified cycle date and time (CDATE) because the latter is later than the last forecast date and time (CDATE_max) with this model: EXTRN_MDL_NAME = \"${EXTRN_MDL_NAME}\" CDATE_max = \"${CDATE_max}\" - CDATE = \"${CDATE}\" -" + CDATE = \"${CDATE}\"" fi ;; @@ -98,14 +97,13 @@ later than the last forecast date and time (CDATE_max) with this model: # CDATE_min="2019061212" CDATE_min="2018121500" if [ "$CDATE" -lt "$CDATE_min" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Output from the specified external model (EXTRN_MDL_NAME) is not availa- ble for the specified cycle date and time (CDATE) because the latter is earlier than the implementation date of this model: EXTRN_MDL_NAME = \"${EXTRN_MDL_NAME}\" CDATE_min = \"${CDATE_min}\" - CDATE = \"${CDATE}\" -" + CDATE = \"${CDATE}\"" fi ;; @@ -114,14 +112,13 @@ earlier than the implementation date of this model: # July 01, 2015. CDATE_min="2015070100" if [ "$CDATE" -lt "$CDATE_min" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Output from the specified external model (EXTRN_MDL_NAME) is not availa- ble for the specified cycle date and time (CDATE) because the latter is earlier than the implementation date of this model: EXTRN_MDL_NAME = \"${EXTRN_MDL_NAME}\" CDATE_min = \"${CDATE_min}\" - CDATE = \"${CDATE}\" -" + CDATE = \"${CDATE}\"" fi ;; @@ -131,14 +128,13 @@ earlier than the implementation date of this model: # ber 30, 2014. CDATE_min="2014103000" if [ "$CDATE" -lt "$CDATE_min" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Output from the specified external model (EXTRN_MDL_NAME) is not availa- ble for the specified cycle date and time (CDATE) because the latter is earlier than the implementation date of this model: EXTRN_MDL_NAME = \"${EXTRN_MDL_NAME}\" CDATE_min = \"${CDATE_min}\" - CDATE = \"${CDATE}\" -" + CDATE = \"${CDATE}\"" fi ;; @@ -164,12 +160,11 @@ elif [ "$ICS_OR_LBCS" = "LBCS" ]; then TIME_OFFSET_HRS="$EXTRN_MDL_LBCS_OFFSET_HRS" else valid_vals_ICS_OR_LBCS_str=$( printf "\"%s\" " "${valid_vals_ICS_OR_LBCS[@]}" ) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Invalid value specified for ICS_OR_LBCS: ICS_OR_LBCS = \"$ICS_OR_LBCS\" Valid values are: - ${valid_vals_ICS_OR_LBCS_str} -" + ${valid_vals_ICS_OR_LBCS_str}" fi # #----------------------------------------------------------------------- @@ -192,7 +187,7 @@ EXTRN_MDL_FILES_DIR="${CYCLE_DIR}/${EXTRN_MDL_NAME}/${ICS_OR_LBCS}" #----------------------------------------------------------------------- # mkdir_vrfy -p "$EXTRN_MDL_FILES_DIR" -cd_vrfy $EXTRN_MDL_FILES_DIR || print_err_msg_exit "${script_name}" "\ +cd_vrfy $EXTRN_MDL_FILES_DIR || print_err_msg_exit "\ Could not change directory to EXTRN_MDL_FILES_DIR: EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\"" # @@ -237,7 +232,7 @@ $SCRIPTSDIR/exregional_get_extrn_files.sh \ EXTRN_MDL_ARCV_FPS="${EXTRN_MDL_ARCV_FPS_str}" \ EXTRN_MDL_ARCV_FMT="${EXTRN_MDL_ARCV_FMT}" \ EXTRN_MDL_ARCVREL_DIR="${EXTRN_MDL_ARCVREL_DIR}" || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index f70c1ccd5..6198efae4 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -150,7 +150,7 @@ export stretch_fac # ${SCRIPTSDIR}/exregional_make_grid.sh \ WORKDIR_LOCAL="ABCD" || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_IC_LBC0 b/jobs/JREGIONAL_MAKE_IC_LBC0 index 8c1fa966b..b844ba19f 100755 --- a/jobs/JREGIONAL_MAKE_IC_LBC0 +++ b/jobs/JREGIONAL_MAKE_IC_LBC0 @@ -183,10 +183,9 @@ get_extrn_mdl_file_dir_info \ # #----------------------------------------------------------------------- # -WGRIB2_DIR=$( which wgrib2 ) || print_err_msg_exit "${script_name}" "\ +WGRIB2_DIR=$( which wgrib2 ) || print_err_msg_exit "\ Directory in which the wgrib2 executable is located not found: - WGRIB2_DIR = \"${WGRIB2_DIR}\" -" + WGRIB2_DIR = \"${WGRIB2_DIR}\"" # #----------------------------------------------------------------------- # @@ -212,7 +211,7 @@ $SCRIPTSDIR/exregional_make_ic_lbc0.sh \ ICS_DIR="${ICS_DIR}" \ WGRIB2_DIR="${WGRIB2_DIR}" \ APRUN="${APRUN}" || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN b/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN index 72bc97fd9..bde2b4696 100755 --- a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN +++ b/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN @@ -184,10 +184,9 @@ get_extrn_mdl_file_dir_info \ # #----------------------------------------------------------------------- # -WGRIB2_DIR=$( which wgrib2 ) || print_err_msg_exit "${script_name}" "\ +WGRIB2_DIR=$( which wgrib2 ) || print_err_msg_exit "\ Directory in which the wgrib2 executable is located not found: - WGRIB2_DIR = \"${WGRIB2_DIR}\" -" + WGRIB2_DIR = \"${WGRIB2_DIR}\"" # #----------------------------------------------------------------------- # @@ -215,7 +214,7 @@ $SCRIPTSDIR/exregional_make_lbc1_to_lbcn.sh \ APRUN="${APRUN}" \ LBCS_DIR="${LBCS_DIR}" \ EXTRN_MDL_LBC_UPDATE_FHRS="${EXTRN_MDL_LBC_UPDATE_FHRS_str}" || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_OROG b/jobs/JREGIONAL_MAKE_OROG index 58ed5e7d2..d85deee52 100755 --- a/jobs/JREGIONAL_MAKE_OROG +++ b/jobs/JREGIONAL_MAKE_OROG @@ -43,7 +43,7 @@ This is the J-job script for the task that generates orography files. # ${SCRIPTSDIR}/exregional_make_orog.sh \ WORKDIR_LOCAL="ABCD" || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO index 7c6ba2927..3d43cee41 100755 --- a/jobs/JREGIONAL_MAKE_SFC_CLIMO +++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO @@ -65,7 +65,7 @@ mkdir_vrfy $workdir # ${SCRIPTSDIR}/exregional_make_sfc_climo.sh \ workdir="$workdir" || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_FV3 b/jobs/JREGIONAL_RUN_FV3 index ecfe1a165..71c8b1b20 100755 --- a/jobs/JREGIONAL_RUN_FV3 +++ b/jobs/JREGIONAL_RUN_FV3 @@ -63,7 +63,7 @@ mkdir_vrfy -p ${CYCLE_DIR}/RESTART # $SCRIPTSDIR/exregional_run_fv3.sh \ CYCLE_DIR="${CYCLE_DIR}" || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index 0431058f3..575f9a733 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -87,7 +87,7 @@ $SCRIPTSDIR/exregional_run_post.sh \ postprd_dir="${postprd_dir}" \ fhr_dir="${fhr_dir}" \ fhr="${fhr}" || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- diff --git a/scripts/exregional_get_extrn_files.sh b/scripts/exregional_get_extrn_files.sh index 918ad5c37..120bc564a 100755 --- a/scripts/exregional_get_extrn_files.sh +++ b/scripts/exregional_get_extrn_files.sh @@ -293,12 +293,11 @@ will be copied (EXTRN_MDL_FILES_DIR) are: # HTAR_LOG_FN="log.htar_tvf.${narcv_formatted}" htar -tvf ${ARCV_FP} ${EXTRN_MDL_FPS[@]} >& ${HTAR_LOG_FN} || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ htar file list operation (\"htar -tvf ...\") failed. Check the log file HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" - HTAR_LOG_FN = \"${HTAR_LOG_FN}\" -" + HTAR_LOG_FN = \"${HTAR_LOG_FN}\"" i=0 files_in_crnt_arcv=() @@ -317,14 +316,13 @@ HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: num_files_in_crnt_arcv=${#files_in_crnt_arcv[@]} if [ ${num_files_in_crnt_arcv} -eq 0 ]; then EXTRN_MDL_FPS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_FPS[@]}" )")" - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The current archive file (ARCV_FP) does not contain any of the external model files listed in EXTRN_MDL_FPS: ARCV_FP = \"${ARCV_FP}\" EXTRN_MDL_FPS = ${EXTRN_MDL_FPS_str} The archive file should contain at least one external model file; other- -wise, it would not be needed. -" +wise, it would not be needed." fi # # Extract from the current tar archive file on HPSS all the external mo- @@ -334,12 +332,11 @@ wise, it would not be needed. # HTAR_LOG_FN="log.htar_xvf.${narcv_formatted}" htar -xvf ${ARCV_FP} ${files_in_crnt_arcv[@]} >& ${HTAR_LOG_FN} || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ htar file extract operation (\"htar -xvf ...\") failed. Check the log file HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" - HTAR_LOG_FN = \"${HTAR_LOG_FN}\" -" + HTAR_LOG_FN = \"${HTAR_LOG_FN}\"" # # Note that the htar file extract operation above may return with a 0 # exit code (success) even if one or more (or all) external model output @@ -362,7 +359,7 @@ file HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: fi grep -n "${FP}" "${HTAR_LOG_FN}" > /dev/null 2>&1 || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ External model output file FP not extracted from tar archive file ARCV_- FP: ARCV_FP = \"${ARCV_FP}\" @@ -370,8 +367,7 @@ FP: Check the log file HTAR_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" - HTAR_LOG_FN = \"${HTAR_LOG_FN}\" -" + HTAR_LOG_FN = \"${HTAR_LOG_FN}\"" done @@ -419,15 +415,15 @@ details: # else - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The archive-relative directory specified by EXTRN_MDL_ARCVREL_DIR [i.e. the directory \"within\" the tar file(s) listed in EXTRN_MDL_ARCV_FPS] is not the current directory (i.e. it is not \".\"), and it does not start with a \"/\" or a \"./\": EXTRN_MDL_ARCVREL_DIR = \"${EXTRN_MDL_ARCVREL_DIR}\" EXTRN_MDL_ARCV_FPS = ${EXTRN_MDL_ARCV_FPS_str} -This script must be modified to account for this case. -" +This script must be modified to account for this case." + fi fi @@ -453,7 +449,7 @@ This script must be modified to account for this case. #----------------------------------------------------------------------- # if [ "${num_arcv_files}" -gt 1 ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Currently, this script is coded to handle only one archive file if the archive file format is specified to be \"zip\", but the number of archive files (num_arcv_files) passed to this script is greater than 1: @@ -476,12 +472,11 @@ that can be used as a guide for the \"zip\" case." # HSI_LOG_FN="log.hsi_get" hsi get "${ARCV_FP}" >& ${HSI_LOG_FN} || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ hsi file get operation (\"hsi get ...\") failed. Check the log file HSI_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" - HSI_LOG_FN = \"${HSI_LOG_FN}\" -" + HSI_LOG_FN = \"${HSI_LOG_FN}\"" # #----------------------------------------------------------------------- # @@ -492,14 +487,13 @@ HSI_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: # UNZIP_LOG_FN="log.unzip_lv" unzip -l -v ${ARCV_FN} >& ${UNZIP_LOG_FN} || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ unzip operation to list the contents of the zip archive file ARCV_FN in the directory EXTRN_MDL_FILES_DIR failed. Check the log file UNZIP_- LOG_FN in that directory for details: ARCV_FN = \"${ARCV_FN}\" EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" - UNZIP_LOG_FN = \"${UNZIP_LOG_FN}\" -" + UNZIP_LOG_FN = \"${UNZIP_LOG_FN}\"" # #----------------------------------------------------------------------- # @@ -514,15 +508,14 @@ LOG_FN in that directory for details: # for FP in "${EXTRN_MDL_FPS[@]}"; do grep -n "${FP}" "${UNZIP_LOG_FN}" > /dev/null 2>&1 || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ External model output file FP does not exist in the zip archive file ARCV_FN in the directory EXTRN_MDL_FILES_DIR. Check the log file UN- ZIP_LOG_FN in that directory for the contents of the zip archive: EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" ARCV_FN = \"${ARCV_FN}\" FP = \"$FP\" - UNZIP_LOG_FN = \"${UNZIP_LOG_FN}\" -" + UNZIP_LOG_FN = \"${UNZIP_LOG_FN}\"" done # #----------------------------------------------------------------------- @@ -536,12 +529,11 @@ ZIP_LOG_FN in that directory for the contents of the zip archive: # UNZIP_LOG_FN="log.unzip" unzip -o "${ARCV_FN}" ${EXTRN_MDL_FPS[@]} >& ${UNZIP_LOG_FN} || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ unzip file extract operation (\"unzip -o ...\") failed. Check the log file UNZIP_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" - UNZIP_LOG_FN = \"${UNZIP_LOG_FN}\" -" + UNZIP_LOG_FN = \"${UNZIP_LOG_FN}\"" # # NOTE: # If EXTRN_MDL_ARCVREL_DIR is not empty, the unzip command above will diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index e6c8fa16a..b8f7bc513 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -331,15 +331,16 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then ${istart_rgnl_wide_halo_T6SG} ${jstart_rgnl_wide_halo_T6SG} \ ${iend_rgnl_wide_halo_T6SG} ${jend_rgnl_wide_halo_T6SG} \ 1 $USHDIR || \ - print_err_msg_exit "${script_name}" "\ -Call to script that generates grid files returned with nonzero exit code." + print_err_msg_exit "\ +Call to script that generates grid files returned with nonzero exit +code." tile_rgnl=7 grid_fp="$tmpdir/${CRES}_grid.tile${tile_rgnl}.nc" $EXECDIR/global_equiv_resol "${grid_fp}" || \ - print_err_msg_exit "${script_name}" "\ -Call to executable that calculates equivalent global uniform cubed sphere -resolution returned with nonzero exit code." + print_err_msg_exit "\ +Call to executable that calculates equivalent global uniform cubed +sphere resolution returned with nonzero exit code." RES_equiv=$( ncdump -h "${grid_fp}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]") RES_equiv=${RES_equiv//$'\n'/} @@ -378,16 +379,16 @@ Setting parameters in file: cd_vrfy $tmpdir $EXECDIR/regional_grid ${RGNL_GRID_NML_FP} || \ - print_err_msg_exit "${script_name}" "\ -Call to executable that generates grid file (Jim Purser version) returned -with nonzero exit code." + print_err_msg_exit "\ +Call to executable that generates grid file (Jim Purser version) re- +turned with nonzero exit code." tile_rgnl=7 grid_fp="$tmpdir/regional_grid.nc" $EXECDIR/global_equiv_resol "${grid_fp}" || \ - print_err_msg_exit "${script_name}" "\ -Call to executable that calculates equivalent global uniform cubed sphere -resolution returned with nonzero exit code." + print_err_msg_exit "\ +Call to executable that calculates equivalent global uniform cubed +sphere resolution returned with nonzero exit code." RES_equiv=$( ncdump -h "${grid_fp}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]" ) # Need error checking here. RES_equiv=${RES_equiv//$'\n'/} @@ -400,7 +401,7 @@ printf "%s\n" "CRES_equiv = $CRES_equiv" mv_vrfy ${grid_fp_orig} ${grid_fp} $EXECDIR/mosaic_file $CRES_equiv || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Call to executable that creates a grid mosaic file returned with nonzero exit code." # @@ -473,13 +474,12 @@ printf "%s %s %s %s %s\n" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to executable \"${shave_exec}\" to generate a grid file with a ${nh3_T7}- cell-wide-halo returned with nonzero exit code. The namelist file nml_fn is in directory tmpdir: tmpdir = \"${tmpdir}\" - nml_fn = \"${nml_fn}\" -" + nml_fn = \"${nml_fn}\"" mv_vrfy ${shaved_fp} ${GRID_DIR} # # Create an input namelist file for the shave executable to generate an @@ -498,13 +498,12 @@ printf "%s %s %s %s %s\n" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to executable \"${shave_exec}\" to generate a grid file with a ${nh4_T7}- cell-wide-halo returned with nonzero exit code. The namelist file nml_fn is in directory tmpdir: tmpdir = \"${tmpdir}\" - nml_fn = \"${nml_fn}\" -" + nml_fn = \"${nml_fn}\"" mv_vrfy ${shaved_fp} ${GRID_DIR} # # Change location back to the directory before tmpdir. @@ -523,9 +522,8 @@ $USHDIR/link_fix.sh \ verbose="FALSE" \ script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ file_group="grid" || \ - print_err_msg_exit "\ -Call to script to create links to grid files failed. -" +print_err_msg_exit "\ +Call to script to create links to grid files failed." # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_ic_lbc0.sh b/scripts/exregional_make_ic_lbc0.sh index 185e2755d..f0b4423e6 100755 --- a/scripts/exregional_make_ic_lbc0.sh +++ b/scripts/exregional_make_ic_lbc0.sh @@ -97,7 +97,7 @@ case "${CCPP_PHYS_SUITE}" in ;; *) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Physics-suite-dependent namelist variables have not yet been specified for this physics suite: CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\"" @@ -324,7 +324,7 @@ case "$EXTRN_MDL_NAME_ICS" in *) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ External-model-dependent namelist variables have not yet been specified for this external model: EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" @@ -436,7 +436,7 @@ hh="${EXTRN_MDL_CDATE:8:2}" tg3_from_soil=${tg3_from_soil} / EOF -} || print_err_msg_exit "${script_name}" "\ +} || print_err_msg_exit "\ \"cat\" command to create a namelist file for chgres_cube to generate ICs, surface fields, and the 0-th hour (initial) LBCs returned with nonzero status." @@ -456,7 +456,7 @@ status." # A similar thing happens in the forecast task. # ${APRUN} ${EXECDIR}/chgres_cube.exe || \ -print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to executable to generate surface and initial conditions files for the FV3SAR failed: EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" diff --git a/scripts/exregional_make_lbc1_to_lbcn.sh b/scripts/exregional_make_lbc1_to_lbcn.sh index 88f41fe5a..b822e08fe 100755 --- a/scripts/exregional_make_lbc1_to_lbcn.sh +++ b/scripts/exregional_make_lbc1_to_lbcn.sh @@ -98,7 +98,7 @@ case "${CCPP_PHYS_SUITE}" in ;; *) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Physics-suite-dependent namelist variables have not yet been specified for this physics suite: CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\"" @@ -315,7 +315,7 @@ case "$EXTRN_MDL_NAME_LBCS" in *) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ External-model-dependent namelist variables have not yet been specified for this external model: EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" @@ -359,7 +359,7 @@ for (( i=0; i<$num_fhrs; i++ )); do fn_grib2="${EXTRN_MDL_FNS[$i]}" ;; *) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The external model output file name to use in the chgres FORTRAN name- list file has not specified for this external model: EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" @@ -421,7 +421,7 @@ list file has not specified for this external model: phys_suite="${phys_suite}" / EOF - } || print_err_msg_exit "${script_name}" "\ + } || print_err_msg_exit "\ \"cat\" command to create a namelist file for chgres_cube to generate LBCs for all boundary update times (except the 0-th forecast hour) returned with nonzero status." @@ -429,7 +429,7 @@ with nonzero status." # Run chgres_cube. # ${APRUN} ${EXECDIR}/chgres_cube.exe || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Call to executable to generate lateral boundary conditions file for the the FV3SAR failed: EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index fac29fad9..8922d6850 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -289,7 +289,7 @@ ${tmp_dir}" \ "THEIA" | "HERA" | "JET" | "ODIN") ${ufs_utils_ushdir}/${orog_gen_scr} \ $RES ${TILE_RGNL} ${FIXsar} ${raw_dir} ${UFS_UTILS_DIR} ${topo_dir} ${tmp_dir} || \ - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Call to script that generates raw orography file returned with nonzero exit code." ;; @@ -329,8 +329,9 @@ print_info_msg "$VERBOSE" "Setting orography filtering parameters..." # Need to fix the following (also above). Then redo to get cell_size_avg. #cd_vrfy ${GRID_DIR} #$SORCDIR/regional_grid/regional_grid $RGNL_GRID_NML_FP $CRES || \ -# print_err_msg_exit "${script_name}" "\ -#Call to script that generates grid file (Jim Purser version) returned with nonzero exit code." +#print_err_msg_exit "\ +#Call to script that generates grid file (Jim Purser version) returned +#with nonzero exit code." #${CRES}_grid.tile${TILE_RGNL}.halo${nhw_T7}.nc @@ -363,11 +364,10 @@ peak_fac_array=( "1.1" "1.1" "1.05" "1.0" "1.0" "1.0" "1.0") # Need to fix this so that the stderr from a failed call to interpol_to_arbit_CRES # gets sent to the stderr of this script. cd4=$( interpol_to_arbit_CRES "${RES_equiv}" "RES_array" "cd4_array" ) || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to script that interpolated cd4 to the regional grid's equiavlent global cubed-sphere resolution (RES_equiv) failed: - RES_equiv = \"${RES_equiv}\" -" + RES_equiv = \"${RES_equiv}\"" echo "====>>>> cd4 = $cd4" # max_slope=$( interpol_to_arbit_CRES "${RES_equiv}" "RES_array" "max_slope_array" ) @@ -456,7 +456,7 @@ ${ufs_utils_ushdir}/${orog_fltr_scr} \ ${FIXsar} ${raw_dir} ${filter_dir} \ $cd4 ${peak_fac} ${max_slope} ${n_del2_weak} \ ${ufs_utils_ushdir} || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to script that generates filtered orography file returned with non- zero exit code." # @@ -519,13 +519,12 @@ printf "%s %s %s %s %s\n" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to \"shave\" executable to generate (filtered) orography file with a 4-cell wide halo returned with nonzero exit code. The namelist file nml_fn is in directory shave_dir: shave_dir = \"${shave_dir}\" - nml_fn = \"${nml_fn}\" -" + nml_fn = \"${nml_fn}\"" mv_vrfy ${shaved_fp} ${OROG_DIR} # # Create an input namelist file for the shave executable to generate an @@ -544,13 +543,12 @@ printf "%s %s %s %s %s\n" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ - print_err_msg_exit "${script_name}" "\ +print_err_msg_exit "\ Call to \"shave\" executable to generate (filtered) orography file with a 4-cell wide halo returned with nonzero exit code. The namelist file nml_fn is in directory shave_dir: shave_dir = \"${shave_dir}\" - nml_fn = \"${nml_fn}\" -" + nml_fn = \"${nml_fn}\"" mv_vrfy ${shaved_fp} ${OROG_DIR} # # Change location back to the directory before shave_dir. @@ -573,9 +571,8 @@ $USHDIR/link_fix.sh \ verbose="FALSE" \ script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ file_group="orog" || \ - print_err_msg_exit "\ -Call to script to create links to orography files failed. -" +print_err_msg_exit "\ +Call to script to create links to orography files failed." # Moved the following to exregional_make_sfc_climo.sh script since it # needs to be done only if the make_sfc_climo task is run. diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index 8371007a9..ffdf6fa16 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -145,7 +145,7 @@ case $MACHINE in ;; *) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Run command has not been specified for this machine: MACHINE = \"$MACHINE\" APRUN_SFC = \"$APRUN_SFC\"" @@ -159,7 +159,7 @@ esac # #----------------------------------------------------------------------- # -$APRUN_SFC ${EXECDIR}/sfc_climo_gen || print_err_msg_exit "${script_name}" "\ +$APRUN_SFC ${EXECDIR}/sfc_climo_gen || print_err_msg_exit "\ Call to executable that generates surface climatology files returned with nonzero exit code." # @@ -233,9 +233,8 @@ $USHDIR/link_fix.sh \ verbose="FALSE" \ script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ file_group="sfc_climo" || \ - print_err_msg_exit "\ -Call to script to create links to surface climatology files failed. -" +print_err_msg_exit "\ +Call to script to create links to surface climatology files failed." # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_fv3.sh b/scripts/exregional_run_fv3.sh index a188c5a52..d71ed8f1a 100755 --- a/scripts/exregional_run_fv3.sh +++ b/scripts/exregional_run_fv3.sh @@ -235,7 +235,7 @@ target="${FIXsar}/${CRES}_mosaic.nc" if [ -f "${target}" ]; then ln_vrfy -sf ${relative_or_null} $target grid_spec.nc else - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Cannot create symlink because target does not exist: target = \"$target}\"" fi @@ -245,7 +245,7 @@ target="${FIXsar}/${CRES}_grid.tile${TILE_RGNL}.halo${nh3_T7}.nc" if [ -f "${target}" ]; then ln_vrfy -sf ${relative_or_null} $target ${CRES}_grid.tile${TILE_RGNL}.nc else - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Cannot create symlink because target does not exist: target = \"$target}\"" fi @@ -266,7 +266,7 @@ target="${FIXsar}/${CRES}_grid.tile${TILE_RGNL}.halo${nh4_T7}.nc" if [ -f "${target}" ]; then ln_vrfy -sf $target ${relative_or_null} grid.tile${TILE_RGNL}.halo${nh4_T7}.nc else - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Cannot create symlink because target does not exist: target = \"$target}\"" fi @@ -283,7 +283,7 @@ target="${FIXsar}/${CRES}_oro_data.tile${TILE_RGNL}.halo${nh0_T7}.nc" if [ -f "${target}" ]; then ln_vrfy -sf ${relative_or_null} $target oro_data.nc else - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Cannot create symlink because target does not exist: target = \"$target}\"" fi @@ -305,7 +305,7 @@ target="${FIXsar}/${CRES}_oro_data.tile${TILE_RGNL}.halo${nh4_T7}.nc" if [ -f "${target}" ]; then ln_vrfy -sf $target ${relative_or_null} oro_data.tile${TILE_RGNL}.halo${nh4_T7}.nc else - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Cannot create symlink because target does not exist: target = \"$target}\"" fi @@ -587,7 +587,8 @@ export OMP_STACKSIZE=1024m #----------------------------------------------------------------------- # $APRUN ./fv3_gfs.x || print_err_msg_exit "\ -Call to executable to run FV3SAR forecast returned with nonzero exit code." +Call to executable to run FV3SAR forecast returned with nonzero exit +code." # #----------------------------------------------------------------------- # diff --git a/tests/run_mltpl_expts.sh b/tests/run_mltpl_expts.sh index 13113db97..e8aca9201 100755 --- a/tests/run_mltpl_expts.sh +++ b/tests/run_mltpl_expts.sh @@ -30,7 +30,6 @@ TESTSDIR="$FV3SAR_WFLOW_DIR/tests" if [ "$#" -ne 1 ]; then print_err_msg_exit "\ -Script \"$0\": Incorrect number of arguments specified. Usage: $0 \${test_suite} @@ -58,11 +57,9 @@ PARAM_ARRAYS_FP="$TESTSDIR/$PARAM_ARRAYS_FN" if [ ! -f ${PARAM_ARRAYS_FP} ]; then print_err_msg_exit "\ -Script \"$0\": The file specified by PARAM_ARRAYS_FP defining the arrays that specify the values that each experiment parameter will take on does not exist: - PARAM_ARRAYS_FP = \"$PARAM_ARRAYS_FP\" -" + PARAM_ARRAYS_FP = \"$PARAM_ARRAYS_FP\"" else . ${PARAM_ARRAYS_FP} fi diff --git a/tests/run_one_expt.sh b/tests/run_one_expt.sh index c5b6ff3b0..38cc958dc 100755 --- a/tests/run_one_expt.sh +++ b/tests/run_one_expt.sh @@ -109,9 +109,8 @@ fcst_len_hrs=${fcst_len_hrs:-} lbc_update_intvl_hrs=${lbc_update_intvl_hrs:-"3"} if [ -z "${basedir}" ]; then - print_err_msg_exit "${script_name}" " -A base directory must be specified. -" + print_err_msg_exit "\ +A base directory must be specified." # #----------------------------------------------------------------------- # diff --git a/ush/check_for_preexist_dir.sh b/ush/check_for_preexist_dir.sh index 982d066dd..37d611404 100644 --- a/ush/check_for_preexist_dir.sh +++ b/ush/check_for_preexist_dir.sh @@ -38,8 +38,7 @@ where the arguments are defined as follows: preexisting_dir_method: String specifying the action to take if a preexisting version of dir - is found. Valid values are \"delete\", \"rename\", and \"quit\". -" + is found. Valid values are \"delete\", \"rename\", and \"quit\"." fi # #----------------------------------------------------------------------- diff --git a/ush/check_var_valid_value.sh b/ush/check_var_valid_value.sh index 7d1a85a95..7a3768c90 100755 --- a/ush/check_var_valid_value.sh +++ b/ush/check_var_valid_value.sh @@ -91,13 +91,13 @@ The value specified in ${var_name} is not supported: #----------------------------------------------------------------------- # iselementof "${var_value}" valid_var_values || { \ - caller_name=$( basename "${BASH_SOURCE[1]}" ) - valid_var_values_str=$(printf "\"%s\" " "${valid_var_values[@]}"); - print_err_msg_exit "${caller_name}" "\ + caller_name=$( basename "${BASH_SOURCE[1]}" ) + valid_var_values_str=$(printf "\"%s\" " "${valid_var_values[@]}"); + print_err_msg_exit "\ ${err_msg} ${var_name} must be set to one of the following: - ${valid_var_values_str} -"; } + ${valid_var_values_str}"; \ + } # #----------------------------------------------------------------------- # diff --git a/ush/count_files.sh b/ush/count_files.sh index 55d269082..83e99bb50 100755 --- a/ush/count_files.sh +++ b/ush/count_files.sh @@ -14,8 +14,7 @@ function count_files() { if [ "$#" -ne 1 ]; then print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. -Usage: +Incorrect number of arguments specified. Usage: ${FUNCNAME[0]} file_extension diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index d2044a5b4..dc4d50d5d 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -266,7 +266,7 @@ if [ "${RUN_ENVIR}" = "nco" ]; then num_files=$( ls -1 ${glob_pattern} 2>/dev/null | wc -l ) if [ "${num_files}" -ne "1" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Exactly one file must exist in directory FIXsar matching the globbing pattern glob_pattern: FIXsar = \"${FIXsar}\" @@ -306,9 +306,8 @@ else verbose="FALSE" \ script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ file_group="grid" || \ - print_err_msg_exit "\ -Call to script to create links to grid files failed. -" + print_err_msg_exit "\ +Call to script to create links to grid files failed." fi # #----------------------------------------------------------------------- @@ -324,9 +323,8 @@ Call to script to create links to grid files failed. verbose="FALSE" \ script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ file_group="orog" || \ - print_err_msg_exit "\ -Call to script to create links to orography files failed. -" + print_err_msg_exit "\ +Call to script to create links to orography files failed." fi # #----------------------------------------------------------------------- @@ -343,9 +341,8 @@ Call to script to create links to orography files failed. verbose="FALSE" \ script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ file_group="sfc_climo" || \ - print_err_msg_exit "\ -Call to script to create links to surface climatology files failed. -" + print_err_msg_exit "\ +Call to script to create links to surface climatology files failed." fi fi @@ -468,7 +465,7 @@ system/software for handling modules..." # export LD_LIBRARY_PATH="${NEMSfv3gfs_DIR}/ccpp/lib\${LD_LIBRARY_PATH:+:\$LD_LIBRARY_PATH}" EOM -} || print_err_msg_exit "${script_name}" " +} || print_err_msg_exit "\ Heredoc (cat) command to append command to add path to CCPP libraries to the Lmod initialization script in the experiment directory returned with a nonzero status." @@ -500,7 +497,7 @@ ment directory..." else - print_err_msg_exit " + print_err_msg_exit "\ A template FV3 namelist file is not available for the following combina- tion of physics suite and external models for ICs and LBCs: CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\" @@ -634,7 +631,7 @@ if [ "${CCPP_PHYS_SUITE}" = "GSD" ]; then "${EXTRN_MDL_NAME_LBCS}" = "HRRRX" ]; then set_file_param "${FV3_NML_FP}" "lsoil" "9" else - print_err_msg_exit " + print_err_msg_exit "\ The value to set the variable lsoil to in the FV3 namelist file (FV3_- NML_FP) has not been specified for the following combination of physics suite and external models for ICs and LBCs: diff --git a/ush/get_extrn_mdl_file_dir_info.sh b/ush/get_extrn_mdl_file_dir_info.sh index 56585979f..4727e7fdf 100755 --- a/ush/get_extrn_mdl_file_dir_info.sh +++ b/ush/get_extrn_mdl_file_dir_info.sh @@ -86,9 +86,8 @@ follows: if [ 0 = 1 ]; then if [ "$#" -ne "13" ]; then - print_err_msg_exit "${func_name}" "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. -Usage: + print_err_msg_exit "\ +Incorrect number of arguments specified. Usage: ${FUNCNAME[0]} \ extrn_mdl_name \ @@ -174,7 +173,6 @@ where the arguments are defined as follows: Name of the global variable that will contain the archive-relative di- rectory, i.e. the directory \"inside\" the archive file in which the ex- ternal model output files may be stored. - " fi @@ -395,13 +393,12 @@ fi ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ The external model file names have not yet been specified for this com- bination of external model (extrn_mdl_name) and analysis or forecast (anl_or_fcst): extrn_mdl_name = \"${extrn_mdl_name}\" - anl_or_fcst = \"${anl_or_fcst}\" -" + anl_or_fcst = \"${anl_or_fcst}\"" ;; esac @@ -458,13 +455,12 @@ bination of external model (extrn_mdl_name) and analysis or forecast ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ The external model file names have not yet been specified for this com- bination of external model (extrn_mdl_name) and analysis or forecast (anl_or_fcst): extrn_mdl_name = \"${extrn_mdl_name}\" - anl_or_fcst = \"${anl_or_fcst}\" -" + anl_or_fcst = \"${anl_or_fcst}\"" ;; esac @@ -513,12 +509,11 @@ bination of external model (extrn_mdl_name) and analysis or forecast sysdir="" ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ The system directory in which to look for external model output files has not been specified for this external model and machine combination: extrn_mdl_name = \"${extrn_mdl_name}\" - MACHINE = \"$MACHINE\" -" + MACHINE = \"$MACHINE\"" ;; esac ;; @@ -542,12 +537,11 @@ has not been specified for this external model and machine combination: sysdir="$sysbasedir/${yyyymmdd}" ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ The system directory in which to look for external model output files has not been specified for this external model and machine combination: extrn_mdl_name = \"${extrn_mdl_name}\" - MACHINE = \"$MACHINE\" -" + MACHINE = \"$MACHINE\"" ;; esac ;; @@ -571,12 +565,11 @@ has not been specified for this external model and machine combination: sysdir="$sysbasedir" ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ The system directory in which to look for external model output files has not been specified for this external model and machine combination: extrn_mdl_name = \"${extrn_mdl_name}\" - MACHINE = \"$MACHINE\" -" + MACHINE = \"$MACHINE\"" ;; esac ;; @@ -600,23 +593,21 @@ has not been specified for this external model and machine combination: sysdir="$sysbasedir" ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ The system directory in which to look for external model output files has not been specified for this external model and machine combination: extrn_mdl_name = \"${extrn_mdl_name}\" - MACHINE = \"$MACHINE\" -" + MACHINE = \"$MACHINE\"" ;; esac ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ The system directory in which to look for external model output files has not been specified for this external model: - extrn_mdl_name = \"${extrn_mdl_name}\" -" + extrn_mdl_name = \"${extrn_mdl_name}\"" esac # @@ -746,10 +737,9 @@ has not been specified for this external model: ;; *) - print_err_msg_exit "${func_name}" "\ + print_err_msg_exit "\ Archive file information has not been specified for this external model: - extrn_mdl_name = \"${extrn_mdl_name}\" -" + extrn_mdl_name = \"${extrn_mdl_name}\"" ;; esac diff --git a/ush/interpol_to_arbit_CRES.sh b/ush/interpol_to_arbit_CRES.sh index d6d4e6ac9..98291abb9 100755 --- a/ush/interpol_to_arbit_CRES.sh +++ b/ush/interpol_to_arbit_CRES.sh @@ -28,8 +28,7 @@ function interpol_to_arbit_CRES () { if [ "$#" -ne 3 ]; then print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. -Usage: +Incorrect number of arguments specified. Usage: ${FUNCNAME[0]} RES RES_array prop_array @@ -48,7 +47,6 @@ where the arguments are defined as follows: prop_array: The name of the array containing the values of the property corres- ponding to the cubed-sphere resolutions in RES_array. - " fi diff --git a/ush/is_array.sh b/ush/is_array.sh index ed9ecc252..8f050e0cb 100644 --- a/ush/is_array.sh +++ b/ush/is_array.sh @@ -33,14 +33,12 @@ function is_array () { if [ "$#" -ne 1 ]; then print_err_msg_exit "\ -Function \"${func_name}\": Incorrect number of arguments specified. -Usage: +Incorrect number of arguments specified. Usage: ${func_name} var_name where var_name is the name of the variable to check to determine whether or not it is an array. - " fi # diff --git a/ush/iselementof.sh b/ush/iselementof.sh index 8c93fbe59..41009216e 100644 --- a/ush/iselementof.sh +++ b/ush/iselementof.sh @@ -49,8 +49,7 @@ function iselementof () { # if [ "$#" -ne 2 ]; then print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. -Usage: +Incorrect number of arguments specified. Usage: ${FUNCNAME[0]} str_to_match array_name @@ -61,7 +60,6 @@ where the arguments are defined as follows: array_name: The name of the array to search. - " fi diff --git a/ush/link_fix.sh b/ush/link_fix.sh index f5287555e..6b876623c 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -132,6 +132,16 @@ done # #----------------------------------------------------------------------- # +# Set the valid values that file_group can take on and then check whe- +# ther it is in fact set to one of these valid values. +# +#----------------------------------------------------------------------- +# +valid_vals_file_group=( "grid" "orog" "sfc_climo" ) +check_var_valid_value "file_group" "valid_vals_file_group" +# +#----------------------------------------------------------------------- +# # Prepend appropriate directory to each set of file name globbing pat- # terns. # @@ -150,10 +160,6 @@ elif [ "${file_group}" = "orog" ]; then elif [ "${file_group}" = "sfc_climo" ]; then fps_all=( "${fps_sfc_climo[@]}" ) run_task="${RUN_TASK_MAKE_SFC_CLIMO}" -else - print_err_msg_exit "${script_name}" "\ -Invalid value specified for file_group. Valid values are: -" fi # #----------------------------------------------------------------------- @@ -176,26 +182,24 @@ printf " fn = %s\n" "$fn" res=$( printf "%s" $fn | sed -n -r -e "s/^C([0-9]*).*/\1/p" ) if [ -z $res ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The C-resolution could not be extracted from the current file's name. The full path to the file (fp) is: fp = \"${fp}\" This may be because fp contains the * globbing character, which would imply that no files were found that match the globbing pattern specified -in fp. -" +in fp." fi printf " res_prev = %s\n" "${res_prev}" printf " res = %s\n" "${res}" if [ $i -gt 0 ] && [ ${res} != ${res_prev} ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The C-resolutions (as obtained from the file names) of the previous and current file (fp_prev and fp, respectively) are different: fp_prev = \"${fp_prev}\" fp = \"${fp}\" -Please ensure that all files have the same C-resolution. -" +Please ensure that all files have the same C-resolution." fi i=$((i+1)) @@ -219,7 +223,7 @@ if [ "$RES" = "$res" ] || [ "$RES" = "" ]; then set_file_param "${SCRIPT_VAR_DEFNS_FP}" "RES" "${res}" set_file_param "${SCRIPT_VAR_DEFNS_FP}" "CRES" "${cres}" elif [ "$RES" != "$res" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The resolution (RES) specified in the variable definitions file (script_var_defns_fp) does not match the resolution (res) found in this script for the specified file group (file_group): @@ -228,8 +232,7 @@ script for the specified file group (file_group): file_group = \"${file_group}\" res = \"${res}\" This usually means that one or more of the file groups (grid, orography, -and/or surface climatology) are defined on different grids. -" +and/or surface climatology) are defined on different grids." fi # #----------------------------------------------------------------------- @@ -263,7 +266,7 @@ for fp in "${fps_all[@]}"; do ln_vrfy -sf ${relative_or_null} $fp . # ln_vrfy -sf $fp . else - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Cannot create symlink because target file (fp) does not exist: fp = \"${fp}\"" fi @@ -297,7 +300,7 @@ if [ "${file_group}" = "sfc_climo" ]; then # ln_vrfy -sf ${relative_or_null} $target $symlink ln_vrfy -sf $target $symlink else - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Cannot create symlink because target file (target) does not exist: target = \"${target}\"" fi diff --git a/ush/print_msg.sh b/ush/print_msg.sh index 9ee85c102..6350be61c 100644 --- a/ush/print_msg.sh +++ b/ush/print_msg.sh @@ -27,11 +27,16 @@ function print_info_msg() { # #----------------------------------------------------------------------- # -# Get the name of this function. +# Get the name of this function as well as information about the calling +# script/function. # #----------------------------------------------------------------------- # - local func_name="${FUNCNAME[0]}" + local crnt_func="${FUNCNAME[0]}" + local caller_fp=$( readlink -f "${BASH_SOURCE[1]}" ) + local caller_fn=$( basename "${caller_fp}" ) + local caller_dir=$( dirname "${caller_fp}" ) + local caller_func="${FUNCNAME[1]}" # #----------------------------------------------------------------------- # @@ -39,8 +44,8 @@ function print_info_msg() { # #----------------------------------------------------------------------- # - local info_msg - local verbose + local verbose \ + info_msg # #----------------------------------------------------------------------- # @@ -51,8 +56,8 @@ function print_info_msg() { # if [ "$#" -eq 1 ]; then + verbose="TRUE" info_msg="$1" - verbose="FALSE" elif [ "$#" -eq 2 ]; then @@ -69,10 +74,10 @@ function print_info_msg() { else printf "\ -Function \"${func_name}\": Incorrect number of arguments specified. +Function \"${crnt_func}\": Incorrect number of arguments specified. Usage: - ${func_name} [verbose] info_msg + ${crnt_func} [verbose] info_msg where the arguments are defined as follows: @@ -135,11 +140,16 @@ function print_err_msg_exit() { # #----------------------------------------------------------------------- # -# Get the name of this function. +# Get the name of this function as well as information about the calling +# script/function. # #----------------------------------------------------------------------- # - local func_name="${FUNCNAME[0]}" + local crnt_func="${FUNCNAME[0]}" + local caller_fp=$( readlink -f "${BASH_SOURCE[1]}" ) + local caller_fn=$( basename "${caller_fp}" ) + local caller_dir=$( dirname "${caller_fp}" ) + local caller_func="${FUNCNAME[1]}" # #----------------------------------------------------------------------- # @@ -147,135 +157,70 @@ function print_err_msg_exit() { # #----------------------------------------------------------------------- # - local err_msg - local caller_name + local msg_header \ + msg_footer \ + err_msg # #----------------------------------------------------------------------- # -# If no arguments are supplied, use a standard error message. +# Set the message header and footer. # #----------------------------------------------------------------------- # - if [ "$#" -eq 0 ]; then - - err_msg=$( printf "\ -ERROR. Exiting script or function with nonzero status." - ) + msg_header=$( printf "\n\ +ERROR from: + function: \"${caller_func}\" (will be set to \"source\" for a script) + file: \"${caller_fn}\" + directory: \"${caller_dir}\" +" + ) + msg_footer=$( printf "\nExiting with nonzero status." ) # #----------------------------------------------------------------------- # -# If one argument is supplied, we assume it is the message to print out -# between informational lines that are always printed. +# Check number of arguments and, if necessary, print out a usage message +# and exit. # #----------------------------------------------------------------------- # - elif [ "$#" -eq 1 ]; then + if [ "$#" -gt 1 ]; then - err_msg="$1" -# -#----------------------------------------------------------------------- -# -# Remove trailing newlines from err_msg. Command substitution [i.e. the -# $( ... )] will do this automatically. -# -#----------------------------------------------------------------------- -# - err_msg=$( printf '%s' "${err_msg}" ) -# -#----------------------------------------------------------------------- -# -# Add informational lines at the beginning and end of the message. -# -#----------------------------------------------------------------------- -# - err_msg=$( printf "\ -ERROR: -${err_msg} -Exiting script/function with nonzero status." - ) -# -#----------------------------------------------------------------------- -# -# If two arguments are supplied, we assume the first argument is the -# name of the script or function from which this function is being -# called while the second argument is the message to print out between -# informational lines that are always printed. -# -#----------------------------------------------------------------------- -# - elif [ "$#" -eq 2 ]; then + printf "\ +Function \"${crnt_func}\": Incorrect number of arguments specified. +Usage: - caller_name="$1" - err_msg="$2" -# -#----------------------------------------------------------------------- -# -# Remove trailing newlines from err_msg. Command substitution [i.e. the -# $( ... )] will do this automatically. -# -#----------------------------------------------------------------------- -# - err_msg=$( printf '%s' "${err_msg}" ) -# -#----------------------------------------------------------------------- -# -# Add informational lines at the beginning and end of the message. -# -#----------------------------------------------------------------------- -# - err_msg=$(printf "\ -ERROR from script/file \"${caller_name}\": -${err_msg} -Exiting script/function with nonzero status." - ) + ${crnt_func} err_msg + +where err_msg is an optional error message to print to stderr. Note +that a header and a footer are always added to err_msg. Thus, if err_- +msg is not specified, the message that is printed will consist of only +the header and footer. +" 1>&2 + exit 1 # #----------------------------------------------------------------------- # -# If more than two arguments are supplied, print out a usage message and -# exit. +# If an argument is listed, set err_msg to that argument. Otherwise, +# set it to a null string. Then print out the complete error message to +# stderr and exit. # #----------------------------------------------------------------------- # else - printf "\ -Function \"${func_name}\": Incorrect number of arguments specified. -Usage: - - ${func_name} [caller_name] [err_msg] - -where the arguments are defined as follows: - - caller_name: - This is an optional argument that specifies the name of the script or - function that calls this function (i.e. the caller). - - err_msg: - This is an optional argument that specifies the error message to print - to stderr. + if [ "$#" -eq 0 ]; then + err_msg="" + elif [ "$#" -eq 1 ]; then + err_msg="\n$1" + fi -This function prints an error message to stderr. If no arguments are -passed in, then a standard error message is printed. If only one argu- -ment is passed in, then that argument is assumed to be err_msg, and this -along with appropriate leading and trailing lines are printed. If two -arguments are passed in, then the first is assumed to be caller_name and -the second err_msg. In this case, err_msg along with appropriate lead- -ing and trailing lines are printed, with the leading line containing the -name of the caller.\n" + printf "${msg_header}${err_msg}${msg_footer}\n" 1>&2 + exit 1 fi # #----------------------------------------------------------------------- # -# Print out err_msg and exit function/script with nonzero status. -# -#----------------------------------------------------------------------- -# - printf "\n%s\n" "${err_msg}" 1>&2 - exit 1 -# -#----------------------------------------------------------------------- -# # Restore the shell options saved at the beginning of this script/func- # tion. This statement will not be reached due to the preceeding exit # statement, but we include it here for completeness (i.e. there should diff --git a/ush/process_args.sh b/ush/process_args.sh index 9741c341f..4504e271c 100755 --- a/ush/process_args.sh +++ b/ush/process_args.sh @@ -100,8 +100,7 @@ function process_args() { if [ "$#" -lt 1 ]; then print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. -Usage: +Incorrect number of arguments specified. Usage: ${FUNCNAME[0]} valid_arg_names_array_name \ arg_val_pair1 \ @@ -123,7 +122,8 @@ where the arguments are defined as follows: the valid arguments listed in valid_arg_names_array_name need to be set, and the argument-value pairs can be in any order, i.e. they don't have to follow the order of arguments listed in valid_arg_names_ar- - ray_name.\n" + ray_name. +" fi # @@ -167,13 +167,11 @@ where the arguments are defined as follows: # if [ "${num_arg_val_pairs}" -gt "${num_valid_args}" ]; then print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": The number of argument-value pairs specified on the command line (num_- arg_val_pairs) must be less than or equal to the number of valid argu- ments (num_valid_args) specified in the array valid_arg_names: num_arg_val_pairs = ${num_arg_val_pairs} - num_valid_args = ${num_valid_args} -" + num_valid_args = ${num_valid_args}" fi # #----------------------------------------------------------------------- @@ -260,8 +258,7 @@ The current argument has already been assigned a value on the command line: arg_name = \"${arg_name}\" cmd_line = ${cmd_line} -Please assign values to arguments only once on the command line. -" +Please assign values to arguments only once on the command line." fi fi diff --git a/ush/set_extrn_mdl_params.sh b/ush/set_extrn_mdl_params.sh index d5036abff..56e86a8c9 100644 --- a/ush/set_extrn_mdl_params.sh +++ b/ush/set_extrn_mdl_params.sh @@ -46,8 +46,7 @@ The system directory in which to look for the files generated by the ex- ternal model specified by EXTRN_MDL_NAME_ICS has not been specified for this machine and external model combination: MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" -" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" ;; esac ;; @@ -76,8 +75,7 @@ The system directory in which to look for the files generated by the ex- ternal model specified by EXTRN_MDL_NAME_ICS has not been specified for this machine and external model combination: MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" -" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" ;; esac ;; @@ -97,8 +95,7 @@ The system directory in which to look for the files generated by the ex- ternal model specified by EXTRN_MDL_NAME_ICS has not been specified for this machine and external model combination: MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" -" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" ;; esac ;; @@ -118,8 +115,7 @@ The system directory in which to look for the files generated by the ex- ternal model specified by EXTRN_MDL_NAME_ICS has not been specified for this machine and external model combination: MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" -" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" ;; esac ;; @@ -195,12 +191,11 @@ else ;; *) print_err_msg_exit "\ - The system directory in which to look for the files generated by the ex- - ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for - this machine and external model combination: - MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" -" +The system directory in which to look for the files generated by the ex- +ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for +this machine and external model combination: + MACHINE = \"$MACHINE\" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" ;; esac ;; @@ -226,12 +221,11 @@ else ;; *) print_err_msg_exit "\ - The system directory in which to look for the files generated by the ex- - ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for - this machine and external model combination: - MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" -" +The system directory in which to look for the files generated by the ex- +ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for +this machine and external model combination: + MACHINE = \"$MACHINE\" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" ;; esac ;; @@ -248,12 +242,11 @@ else ;; *) print_err_msg_exit "\ - The system directory in which to look for the files generated by the ex- - ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for - this machine and external model combination: - MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" -" +The system directory in which to look for the files generated by the ex- +ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for +this machine and external model combination: + MACHINE = \"$MACHINE\" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" ;; esac ;; @@ -273,12 +266,11 @@ else *) print_err_msg_exit "\ - The system directory in which to look for the files generated by the ex- - ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for - this machine and external model combination: - MACHINE = \"$MACHINE\" - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" -" +The system directory in which to look for the files generated by the ex- +ternal model specified by EXTRN_MDL_NAME_LBCS has not been specified for +this machine and external model combination: + MACHINE = \"$MACHINE\" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" ;; esac ;; diff --git a/ush/set_file_param.sh b/ush/set_file_param.sh index 62382cf87..ed51e228a 100644 --- a/ush/set_file_param.sh +++ b/ush/set_file_param.sh @@ -25,8 +25,7 @@ function set_file_param() { # if [ "$#" -ne 3 ]; then print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. -Usage: +Incorrect number of arguments specified. Usage: ${FUNCNAME[0]} file_full_path param value @@ -134,7 +133,6 @@ Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." # *) print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": The regular expressions for performing search and replace have not been specified for this file: file = \"$file\"" @@ -156,8 +154,8 @@ specified for this file: sed -i -r -e "s%${regex_search}%${regex_replace}%" "${file_full_path}" else print_err_msg_exit "\ -Specified file (file_full_path) does not contain the searched-for regular -expression (regex_search): +Specified file (file_full_path) does not contain the searched-for regu- +lar expression (regex_search): file_full_path = \"${file_full_path}\" param = \"$param\" value = \"$value\" diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index f1b250e45..f85c521a1 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -60,8 +60,7 @@ case ${PREDEF_GRID_NAME} in The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then @@ -115,8 +114,7 @@ predefined domain: The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then @@ -170,8 +168,7 @@ predefined domain: The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then @@ -309,8 +306,7 @@ predefined domain: The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then @@ -364,8 +360,7 @@ predefined domain: The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then @@ -419,8 +414,7 @@ predefined domain: The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then @@ -560,8 +554,7 @@ predefined domain: The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" # Values from an EMC script. @@ -629,8 +622,7 @@ predefined domain: The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this predefined domain: PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" -" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" fi ;; diff --git a/ush/setup.sh b/ush/setup.sh index fa65e75b5..428bddbe5 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -177,11 +177,10 @@ fi # if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ] && \ [ ! -d "${SFC_CLIMO_DIR}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The directory (SFC_CLIMO_DIR) that should contain the pre-generated sur- face climatology files does not exist: - SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\" -" + SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\"" fi # # If RUN_TASK_MAKE_SFC_CLIMO is set to "TRUE" and the variable specify- @@ -216,7 +215,7 @@ case $MACHINE in # "WCOSS_C") # - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Don't know how to set several parameters on MACHINE=\"$MACHINE\". Please specify the correct parameters for this machine in the setup script. Then remove this message and rerun." @@ -229,7 +228,7 @@ Then remove this message and rerun." # "WCOSS") # - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Don't know how to set several parameters on MACHINE=\"$MACHINE\". Please specify the correct parameters for this machine in the setup script. Then remove this message and rerun." @@ -279,7 +278,7 @@ Then remove this message and rerun." # "CHEYENNE") # - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Don't know how to set several parameters on MACHINE=\"$MACHINE\". Please specify the correct parameters for this machine in the setup script. Then remove this message and rerun." @@ -407,7 +406,7 @@ fi # DATE_OR_NULL=$( printf "%s" "$DATE_FIRST_CYCL" | sed -n -r -e "s/^([0-9]{8})$/\1/p" ) if [ -z "${DATE_OR_NULL}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ DATE_FIRST_CYCL must be a string consisting of exactly 8 digits of the form \"YYYYMMDD\", where YYYY is the 4-digit year, MM is the 2-digit month, DD is the 2-digit day-of-month, and HH is the 2-digit hour-of- @@ -417,7 +416,7 @@ fi DATE_OR_NULL=$( printf "%s" "$DATE_LAST_CYCL" | sed -n -r -e "s/^([0-9]{8})$/\1/p" ) if [ -z "${DATE_OR_NULL}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ DATE_LAST_CYCL must be a string consisting of exactly 8 digits of the form \"YYYYMMDD\", where YYYY is the 4-digit year, MM is the 2-digit month, DD is the 2-digit day-of-month, and HH is the 2-digit hour-of- @@ -441,7 +440,7 @@ for CYCL in "${CYCL_HRS[@]}"; do CYCL_OR_NULL=$( printf "%s" "$CYCL" | sed -n -r -e "s/^([0-9]{2})$/\1/p" ) if [ -z "${CYCL_OR_NULL}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Each element of CYCL_HRS must be a string consisting of exactly 2 digits (including a leading \"0\", if necessary) specifying an hour-of-day. Ele- ment #$i of CYCL_HRS (where the index of the first element is 0) does not @@ -451,7 +450,7 @@ have this form: fi if [ "${CYCL_OR_NULL}" -lt "0" ] || [ "${CYCL_OR_NULL}" -gt "23" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Each element of CYCL_HRS must be an integer between \"00\" and \"23\", in- clusive (including a leading \"0\", if necessary), specifying an hour-of- day. Element #$i of CYCL_HRS (where the index of the first element is 0) @@ -552,7 +551,7 @@ NEMSfv3gfs_DIR="$SORCDIR/NEMSfv3gfs" # Make sure that the NEMSfv3gfs_DIR directory exists. # if [ ! -d "${NEMSfv3gfs_DIR}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The NEMSfv3gfs directory specified by NEMSfv3gfs_DIR that should contain the FV3 source code does not exist: NEMSfv3gfs_DIR = \"${NEMSfv3gfs_DIR}\" @@ -599,10 +598,9 @@ case $MACHINE in ;; *) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Directories have not been specified for this machine: - MACHINE = \"$MACHINE\" -" + MACHINE = \"$MACHINE\"" ;; esac @@ -618,7 +616,7 @@ esac # FCST_LEN_HRS_MAX="999" if [ "$FCST_LEN_HRS" -gt "$FCST_LEN_HRS_MAX" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ Forecast length is greater than maximum allowed length: FCST_LEN_HRS = $FCST_LEN_HRS FCST_LEN_HRS_MAX = $FCST_LEN_HRS_MAX" @@ -636,7 +634,7 @@ fi rem=$(( ${FCST_LEN_HRS}%${LBC_UPDATE_INTVL_HRS} )) if [ "$rem" -ne "0" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The forecast length (FCST_LEN_HRS) is not evenly divisible by the later- al boundary conditions update interval (LBC_UPDATE_INTVL_HRS): FCST_LEN_HRS = $FCST_LEN_HRS @@ -731,10 +729,9 @@ mkdir_vrfy -p "${EXPT_BASEDIR}" #----------------------------------------------------------------------- # if [ -z "${EXPT_SUBDIR}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The name of the experiment subdirectory (EXPT_SUBDIR) cannot be empty: - EXPT_SUBDIR = \"${EXPT_SUBDIR}\" -" + EXPT_SUBDIR = \"${EXPT_SUBDIR}\"" fi # #----------------------------------------------------------------------- @@ -891,15 +888,12 @@ else #----------------------------------------------------------------------- # if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then - if [ ! -d "${GRID_DIR}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The directory (GRID_DIR) that should contain the pre-generated grid files does not exist: - GRID_DIR = \"${GRID_DIR}\" -" + GRID_DIR = \"${GRID_DIR}\"" fi - else GRID_DIR="$EXPTDIR/grid" fi @@ -914,15 +908,12 @@ files does not exist: #----------------------------------------------------------------------- # if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then - if [ ! -d "${OROG_DIR}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The directory (OROG_DIR) that should contain the pre-generated orography files does not exist: - OROG_DIR = \"${OROG_DIR}\" -" + OROG_DIR = \"${OROG_DIR}\"" fi - else OROG_DIR="$EXPTDIR/orog" fi @@ -939,11 +930,10 @@ files does not exist: if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then if [ ! -d "${SFC_CLIMO_DIR}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The directory (SFC_CLIMO_DIR) that should contain the pre-generated orography files does not exist: - SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\" -" + SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\"" fi else @@ -1005,22 +995,20 @@ if [ "${RUN_ENVIR}" = "nco" ]; then if [ "${EXTRN_MDL_NAME_ICS}" != "FV3GFS" ] && \ [ "${EXTRN_MDL_NAME_ICS}" != "GSMGFS" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ When RUN_ENVIR set to \"nco\", the external model used for the initial conditions and surface fields must be either \"FV3GFS\" or \"GSMGFS\": RUN_ENVIR = \"${RUN_ENVIR}\" - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\" -" + EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" fi if [ "${EXTRN_MDL_NAME_LBCS}" != "FV3GFS" ] && \ [ "${EXTRN_MDL_NAME_LBCS}" != "GSMGFS" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ When RUN_ENVIR set to \"nco\", the external model used for the initial conditions and surface fields must be either \"FV3GFS\" or \"GSMGFS\": RUN_ENVIR = \"${RUN_ENVIR}\" - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\" -" + EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" fi fi @@ -1173,7 +1161,7 @@ component if it is being used) are: # rem=$(( $nx_T7%$layout_x )) if [ $rem -ne 0 ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The number of grid cells in the x direction (nx_T7) is not evenly divisible by the number of MPI tasks in the x direction (layout_x): nx_T7 = $nx_T7 @@ -1182,7 +1170,7 @@ fi rem=$(( $ny_T7%$layout_y )) if [ $rem -ne 0 ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The number of grid cells in the y direction (ny_T7) is not evenly divisible by the number of MPI tasks in the y direction (layout_y): ny_T7 = $ny_T7 @@ -1208,7 +1196,7 @@ num_cols_per_task=$(( $nx_per_task*$ny_per_task )) rem=$(( $num_cols_per_task%$blocksize )) if [ $rem -ne 0 ]; then prime_factors_num_cols_per_task=$( factor $num_cols_per_task | sed -r -e 's/^[0-9]+: (.*)/\1/' ) - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The number of columns assigned to a given MPI task must be divisible by the blocksize: nx_per_task = nx_T7/layout_x = $nx_T7/$layout_x = $nx_per_task @@ -1235,7 +1223,7 @@ fi if [ "$QUILTING" = "TRUE" ]; then if [ -z "${WRTCMP_PARAMS_TEMPLATE_FN}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The write-component template file name (WRTCMP_PARAMS_TEMPLATE_FN) must be set to a non-empty value when quilting (i.e. the write-component) is enabled: @@ -1245,7 +1233,7 @@ enabled: WRTCMP_PARAMS_TEMPLATE_FP="${TEMPLATE_DIR}/${WRTCMP_PARAMS_TEMPLATE_FN}" if [ ! -f "${WRTCMP_PARAMS_TEMPLATE_FP}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The write-component template file does not exist or is not a file: WRTCMP_PARAMS_TEMPLATE_FP = \"${WRTCMP_PARAMS_TEMPLATE_FP}\"" fi @@ -1270,7 +1258,7 @@ if [ "$QUILTING" = "TRUE" ]; then rem=$(( $ny_T7%${WRTCMP_write_tasks_per_group} )) if [ $rem -ne 0 ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The number of grid points in the y direction on the regional grid (ny_- T7) must be evenly divisible by the number of tasks per write group (WRTCMP_write_tasks_per_group): @@ -1316,12 +1304,11 @@ NUM_NODES=$(( (${PE_MEMBER01} + ${ncores_per_node} - 1)/${ncores_per_node} )) num_fixam_files_sysdir="${#FIXam_FILES_SYSDIR[@]}" num_fixam_files_exptdir="${#FIXam_FILES_EXPTDIR[@]}" if [ "${num_fixam_files_sysdir}" -ne "${num_fixam_files_exptdir}" ]; then - print_err_msg_exit "${script_name}" "\ + print_err_msg_exit "\ The number of fixed files specified in FIXam_FILES_SYSDIR must be equal to that specified in FIXam_FILES_EXPTDIR: num_fixam_files_sysdir = ${num_fixam_files_sysdir} - num_fixam_files_exptdir = ${num_fixam_files_exptdir} -" + num_fixam_files_exptdir = ${num_fixam_files_exptdir}" else NUM_FIXam_FILES="${num_fixam_files_sysdir}" fi @@ -1680,7 +1667,7 @@ nh0_T7="$nh0_T7" nh3_T7="$nh3_T7" nh4_T7="$nh4_T7" EOM -} || print_err_msg_exit "${script_name}" "\ +} || print_err_msg_exit "\ Heredoc (cat) command to append new variable definitions to variable definitions file returned with a nonzero status." # @@ -1716,7 +1703,7 @@ jstart_rgnl_wide_halo_T6SG="$jstart_rgnl_wide_halo_T6SG" jend_rgnl_wide_halo_T6SG="$jend_rgnl_wide_halo_T6SG" CRES="$CRES" EOM -} || print_err_msg_exit "${script_name}" "\ +} || print_err_msg_exit "\ Heredoc (cat) command to append grid parameters to variable definitions file returned with a nonzero status." @@ -1746,7 +1733,7 @@ RES="" # This will be set after the grid generation task is complete. CRES="" # This will be set after the grid generation task is complete. stretch_fac="$stretch_fac" EOM -} || print_err_msg_exit "${script_name}" "\ +} || print_err_msg_exit "\ Heredoc (cat) command to append grid parameters to variable definitions file returned with a nonzero status." @@ -1817,7 +1804,7 @@ LBC_UPDATE_FCST_HRS=(${LBC_UPDATE_FCST_HRS[@]}) # LBC_UPDATE_FCST_HRS is an arr ncores_per_node="${ncores_per_node}" PE_MEMBER01="${PE_MEMBER01}" EOM -} || print_err_msg_exit "${script_name}" "\ +} || print_err_msg_exit "\ Heredoc (cat) command to append new variable definitions to variable definitions file returned with a nonzero status." # From e76b2564c2e7db67b1d80a30b599a73ab356bb3e Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 25 Oct 2019 15:59:48 -0600 Subject: [PATCH 015/203] For clarity, change name of variable LOCAL_CONFIG_FN to CUSTOM_CONFIG_FN and improve informational and error messages about this. --- ush/compare_config_scripts.sh | 17 ++++++++++------- ush/setup.sh | 6 +++--- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/ush/compare_config_scripts.sh b/ush/compare_config_scripts.sh index 626cbbd20..ed04598d8 100644 --- a/ush/compare_config_scripts.sh +++ b/ush/compare_config_scripts.sh @@ -60,7 +60,7 @@ sed -r \ -e "s/^([ ]*)([^ ]+.*)/\2/g" \ -e "/^#.*/d" \ -e "/^$/d" \ - ${LOCAL_CONFIG_FN} \ + ${CUSTOM_CONFIG_FN} \ ) # #----------------------------------------------------------------------- @@ -83,8 +83,9 @@ while read crnt_line; do if [ -z "${var_name}" ]; then print_info_msg "\ -Current line of configuration script \"${LOCAL_CONFIG_FN}\" does not contain -a variable name: +Current line (crnt_line) of custom experiment/workflow configuration +script (CUSTOM_CONFIG_FN) does not contain a variable name (var_name): + CUSTOM_CONFIG_FN = \"${CUSTOM_CONFIG_FN}\" crnt_line = \"${crnt_line}\" var_name = \"${var_name}\" Skipping to next line." @@ -98,11 +99,13 @@ Skipping to next line." # grep "^${var_name}=" <<< "${var_list_default}" > /dev/null 2>&1 || \ print_err_msg_exit "\ -Variable in local configuration script \"${LOCAL_CONFIG_FN}\" not set in default -configuration script \"${DEFAULT_CONFIG_FN}\": +Variable (var_name) in custom configuration script (CUSTOM_CONFIG_FN) +not defined in default configuration script (DEFAULT_CONFIG_FN): + CUSTOM_CONFIG_FN = \"${CUSTOM_CONFIG_FN}\" + DEFAULT_CONFIG_FN = \"${DEFAULT_CONFIG_FN}\" var_name = \"${var_name}\" -Please assign a default value to this variable in \"${DEFAULT_CONFIG_FN}\" -and rerun." +Please assign a default value to this variable in the default configura- +tion script and rerun." fi diff --git a/ush/setup.sh b/ush/setup.sh index 428bddbe5..884e001da 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -51,7 +51,7 @@ script_dir=$( dirname ${BASH_SOURCE[0]} ) #----------------------------------------------------------------------- # DEFAULT_CONFIG_FN="config_defaults.sh" -LOCAL_CONFIG_FN="config.sh" +CUSTOM_CONFIG_FN="config.sh" # #----------------------------------------------------------------------- # @@ -77,7 +77,7 @@ LOCAL_CONFIG_FN="config.sh" # #----------------------------------------------------------------------- # -if [ -f "$LOCAL_CONFIG_FN" ]; then +if [ -f "${CUSTOM_CONFIG_FN}" ]; then # # We require that the variables being set in the local configuration # script have counterparts in the default configuration script. This is @@ -90,7 +90,7 @@ if [ -f "$LOCAL_CONFIG_FN" ]; then # # Now source the local configuration script. # - . ./$LOCAL_CONFIG_FN + . ./${CUSTOM_CONFIG_FN} # fi # From 8c4129ef933a6654db5b6493d7fb89c439a5c3e5 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 25 Oct 2019 16:16:37 -0600 Subject: [PATCH 016/203] (1) Remove older hash from Externals.cfg; (2) Cleanup up usage message and commented out lines in set_file_param.sh. --- Externals.cfg | 1 - ush/set_file_param.sh | 15 ++++++--------- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index 90d6c2b7f..feb02ac71 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -12,7 +12,6 @@ repo_url = https://github.com/NCAR/UFS_UTILS #Working hash of feature/chgres_grib2 branch hash = d6bd8e3 local_path = sorc/UFS_UTILS_chgres_grib2 -hash = cd0af74 required = True [NEMSfv3gfs] diff --git a/ush/set_file_param.sh b/ush/set_file_param.sh index ed51e228a..fe6ce0297 100644 --- a/ush/set_file_param.sh +++ b/ush/set_file_param.sh @@ -27,18 +27,20 @@ function set_file_param() { print_err_msg_exit "\ Incorrect number of arguments specified. Usage: - ${FUNCNAME[0]} file_full_path param value + ${func_name} file_full_path param value where the arguments are defined as follows: file_full_path: - Full path to the file in which the specified parameter's value will be set. + Full path to the file in which the specified parameter's value will be + set. param: Name of the parameter whose value will be set. value: - Value to set the parameter to." + Value to set the parameter to. +" fi # @@ -114,14 +116,9 @@ Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." ;; # "${SCRIPT_VAR_DEFNS_FN}") - regex_search="(^\s*$param=)(\".*\"|[^ \"]*)(\s*[#].*)?$" # Whole line with regex_replace=\1. -# regex_search="(^\s*$param=)(\".*\"|[^ \"]*)(\s*[#].*)?" regex_search="(^\s*$param=)(\".*\")?([^ \"]*)?(\(.*\))?(\s*[#].*)?" -# regex_replace="\1\"$value\"\3" -# regex_replace="\1$value\3" -# regex_replace="\1\3" -# regex_replace="\1AAAA\2BBBB\3CCCC\4DDDD\5" regex_replace="\1$value\5" +# set_bash_param "${file_full_path}" "$param" "$value" ;; # #----------------------------------------------------------------------- From 979eb88204305d9193d0ff5cf06fda817b4ad760 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 28 Oct 2019 15:44:09 -0600 Subject: [PATCH 017/203] (1) In ush/print_msg.sh, change local variable names for clarity and clean up error messages; (2) For each J-job and ex-script, standardize the way the script's name and directory are obtained as well as the messages printed out upon entering and exiting the script. --- jobs/JREGIONAL_GET_EXTRN_FILES | 15 ++++--- jobs/JREGIONAL_MAKE_GRID | 19 ++++---- jobs/JREGIONAL_MAKE_IC_LBC0 | 19 ++++---- jobs/JREGIONAL_MAKE_LBC1_TO_LBCN | 15 ++++--- jobs/JREGIONAL_MAKE_OROG | 15 ++++--- jobs/JREGIONAL_MAKE_SFC_CLIMO | 15 ++++--- jobs/JREGIONAL_RUN_FV3 | 15 ++++--- jobs/JREGIONAL_RUN_POST | 15 ++++--- scripts/exregional_get_extrn_files.sh | 22 +++++---- scripts/exregional_make_grid.sh | 20 +++++---- scripts/exregional_make_ic_lbc0.sh | 18 +++++--- scripts/exregional_make_lbc1_to_lbcn.sh | 18 +++++--- scripts/exregional_make_orog.sh | 20 +++++---- scripts/exregional_make_sfc_climo.sh | 18 +++++--- scripts/exregional_run_fv3.sh | 18 +++++--- scripts/exregional_run_post.sh | 18 +++++--- ush/print_msg.sh | 59 ++++++++++++++----------- 17 files changed, 201 insertions(+), 138 deletions(-) diff --git a/jobs/JREGIONAL_GET_EXTRN_FILES b/jobs/JREGIONAL_GET_EXTRN_FILES index 3b21afa82..38528a51f 100755 --- a/jobs/JREGIONAL_GET_EXTRN_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_FILES @@ -50,15 +50,18 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the J-job script for the task that copies/fetches to a local di- rectory (either from disk or HPSS) the external model files from which initial or boundary condition files for the FV3 will be generated. @@ -243,7 +246,7 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # print_info_msg "\n\ ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index 6198efae4..a9041135d 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -116,19 +116,22 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the J-job script for the task that generates grid files. ========================================================================" # @@ -183,9 +186,9 @@ touch "$LOGDIR/make_grid_task_complete.txt" # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_IC_LBC0 b/jobs/JREGIONAL_MAKE_IC_LBC0 index b844ba19f..571cf84e7 100755 --- a/jobs/JREGIONAL_MAKE_IC_LBC0 +++ b/jobs/JREGIONAL_MAKE_IC_LBC0 @@ -31,17 +31,20 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the J-job script for the task that generates initial condition -(IC), surface, and zeroth hour lateral boundary condition (LBC0) files +(IC), surface, and zeroth-hour lateral boundary condition (LBC0) files for the FV3 (in NetCDF format). ========================================================================" # @@ -220,9 +223,9 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg "\ ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN b/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN index bde2b4696..cc22e60e2 100755 --- a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN +++ b/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN @@ -31,15 +31,18 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the J-job script for the task that generates lateral boundary condition (LBC) files (in NetCDF format) for all LBC update hours (ex- cept hour zero). @@ -225,7 +228,7 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # print_info_msg "\n\ ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_OROG b/jobs/JREGIONAL_MAKE_OROG index d85deee52..d9524af8c 100755 --- a/jobs/JREGIONAL_MAKE_OROG +++ b/jobs/JREGIONAL_MAKE_OROG @@ -22,15 +22,18 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the J-job script for the task that generates orography files. ========================================================================" # @@ -78,7 +81,7 @@ touch "$LOGDIR/make_orog_task_complete.txt" # print_info_msg "\n\ ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO index 3d43cee41..be2038e2a 100755 --- a/jobs/JREGIONAL_MAKE_SFC_CLIMO +++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO @@ -22,15 +22,18 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the J-job script for the task that generates surface fields from climatology. ========================================================================" @@ -100,7 +103,7 @@ touch "$LOGDIR/make_sfc_climo_task_complete.txt" # print_info_msg "\n\ ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_FV3 b/jobs/JREGIONAL_RUN_FV3 index 71c8b1b20..0d1be76fc 100755 --- a/jobs/JREGIONAL_RUN_FV3 +++ b/jobs/JREGIONAL_RUN_FV3 @@ -32,15 +32,18 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the J-job script for the task that runs a forecast with FV3 for the specified cycle. ========================================================================" @@ -74,7 +77,7 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # print_info_msg "\n\ ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index 575f9a733..bda095ed9 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -31,15 +31,18 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the J-job script for the task that runs the post-processor (UPP) on the output files corresponding to a specified forecast hour. ========================================================================" @@ -98,7 +101,7 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # print_info_msg "\n\ ======================================================================== -Exiting script: \"${script_name}\" +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_get_extrn_files.sh b/scripts/exregional_get_extrn_files.sh index 120bc564a..548d312a8 100755 --- a/scripts/exregional_get_extrn_files.sh +++ b/scripts/exregional_get_extrn_files.sh @@ -22,15 +22,18 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the ex-script for the task that copies/fetches to a local direc- tory (either from disk or HPSS) the external model files from which ini- tial or boundary condition files for the FV3 will be generated. @@ -183,22 +186,23 @@ on disk (EXTRN_MDL_SYSDIR) to local directory (EXTRN_MDL_FILES_DIR): # if [ "${ICS_OR_LBCS}" = "ICS" ]; then - print_info_msg "\n\ + print_info_msg " ======================================================================== Successfully copied or linked to external model files on system disk needed for generating initial conditions and surface fields for the FV3 forecast!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${script_path}\" ========================================================================" elif [ "${ICS_OR_LBCS}" = "LBCS" ]; then - print_info_msg "\n\ + print_info_msg " ======================================================================== Successfully copied or linked to external model files on system disk needed for generating lateral boundary conditions for the FV3 fore- cast!!! -Exiting script: \"${script_name}\" +Exiting script: \"${script_path}\" ========================================================================" fi diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index b8f7bc513..74f03519b 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -27,19 +27,22 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the ex-script for the task that generates grid files. ========================================================================" # @@ -531,10 +534,11 @@ Call to script to create links to grid files failed." # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Grid files with various halo widths generated successfully!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_make_ic_lbc0.sh b/scripts/exregional_make_ic_lbc0.sh index f0b4423e6..2387d2ed2 100755 --- a/scripts/exregional_make_ic_lbc0.sh +++ b/scripts/exregional_make_ic_lbc0.sh @@ -22,15 +22,18 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the ex-script for the task that generates initial condition (IC), surface, and zeroth hour lateral boundary condition (LBC0) files for FV3 (in NetCDF format). @@ -485,11 +488,12 @@ mv_vrfy gfs_bndy.nc ${ICS_DIR}/gfs_bndy.tile${TILE_RGNL}.000.nc # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Initial condition, surface, and zeroth hour lateral boundary condition files (in NetCDF format) for FV3 generated successfully!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_make_lbc1_to_lbcn.sh b/scripts/exregional_make_lbc1_to_lbcn.sh index b822e08fe..7f59501d4 100755 --- a/scripts/exregional_make_lbc1_to_lbcn.sh +++ b/scripts/exregional_make_lbc1_to_lbcn.sh @@ -22,15 +22,18 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the ex-script for the task that generates lateral boundary con- dition (LBC) files (in NetCDF format) for all LBC update hours (except hour zero). @@ -452,11 +455,12 @@ done # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Lateral boundary condition (LBC) files (in NetCDF format) generated suc- cessfully for all LBC update hours (except hour zero)!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 8922d6850..3fc104bad 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -31,16 +31,19 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" -This is the ex-script for the task that generates grid files. +Entering script: \"${script_path}\" + +This is the ex-script for the task that generates orography files. ========================================================================" # #----------------------------------------------------------------------- @@ -616,10 +619,11 @@ fi # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Orography files with various halo widths generated successfully!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index ffdf6fa16..671d83165 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -22,15 +22,18 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the ex-script for the task that generates surface fields from climatology. ========================================================================" @@ -254,10 +257,11 @@ touch "make_sfc_climo_files_task_complete.txt" # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== All surface climatology files generated successfully!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_run_fv3.sh b/scripts/exregional_run_fv3.sh index d71ed8f1a..a315aa836 100755 --- a/scripts/exregional_run_fv3.sh +++ b/scripts/exregional_run_fv3.sh @@ -22,15 +22,18 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the ex-script for the task that runs a forecast with FV3 for the specified cycle. ========================================================================" @@ -596,10 +599,11 @@ code." # #----------------------------------------------------------------------- # -print_info_msg "\ +print_info_msg " ======================================================================== FV3 forecast completed successfully!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index c5abcc54b..645729215 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -22,15 +22,18 @@ # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the name of this script as well as the directory in which it is +# located. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -print_info_msg "\n\ +script_path=$( readlink -f "${BASH_SOURCE[0]}" ) +script_name=$( basename "${script_path}" ) +script_dir=$( dirname "${script_path}" ) +print_info_msg " ======================================================================== -Entering script: \"${script_name}\" +Entering script: \"${script_path}\" + This is the ex-script for the task that runs the post-processor (UPP) on the output files corresponding to a specified forecast hour. ========================================================================" @@ -310,10 +313,11 @@ rm_vrfy -rf ${fhr_dir} # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Post-processing for forecast hour $fhr completed successfully. -Exiting script: \"${script_name}\" + +Exiting script: \"${script_path}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/ush/print_msg.sh b/ush/print_msg.sh index 6350be61c..5b0c3b03e 100644 --- a/ush/print_msg.sh +++ b/ush/print_msg.sh @@ -28,15 +28,15 @@ function print_info_msg() { #----------------------------------------------------------------------- # # Get the name of this function as well as information about the calling -# script/function. +# script or function. # #----------------------------------------------------------------------- # local crnt_func="${FUNCNAME[0]}" - local caller_fp=$( readlink -f "${BASH_SOURCE[1]}" ) - local caller_fn=$( basename "${caller_fp}" ) - local caller_dir=$( dirname "${caller_fp}" ) - local caller_func="${FUNCNAME[1]}" + local caller_path=$( readlink -f "${BASH_SOURCE[1]}" ) + local caller_filename=$( basename "${caller_path}" ) + local caller_dir=$( dirname "${caller_path}" ) + local caller_name="${FUNCNAME[1]}" # #----------------------------------------------------------------------- # @@ -73,11 +73,15 @@ function print_info_msg() { # else - printf "\ -Function \"${crnt_func}\": Incorrect number of arguments specified. + print_err_msg_exit " +Incorrect number of arguments specified: + + script/function name = \"${crnt_func}\" + number of arguments specified = $# + Usage: - ${crnt_func} [verbose] info_msg + ${crnt_func} [verbose] info_msg where the arguments are defined as follows: @@ -88,11 +92,12 @@ where the arguments are defined as follows: info_msg: This is the informational message to print to stdout. -This function prints an informational message to stout. If one argument -is passed in, then that argument is assumed to be info_msg and is print- -ed. If two arguments are passed in, then the first is assumed to be -verbose and the second info_msg. In this case, info_msg gets printed -only if verbose is set to \"TRUE\".\n" +This function prints an informational message to stdout. If one argu- +ment is passed in, then that argument is assumed to be info_msg and is +printed. If two arguments are passed in, then the first is assumed to +be verbose and the second info_msg. In this case, info_msg gets printed +only if verbose is set to \"TRUE\". +" fi # @@ -141,15 +146,15 @@ function print_err_msg_exit() { #----------------------------------------------------------------------- # # Get the name of this function as well as information about the calling -# script/function. +# script or function. # #----------------------------------------------------------------------- # local crnt_func="${FUNCNAME[0]}" - local caller_fp=$( readlink -f "${BASH_SOURCE[1]}" ) - local caller_fn=$( basename "${caller_fp}" ) - local caller_dir=$( dirname "${caller_fp}" ) - local caller_func="${FUNCNAME[1]}" + local caller_path=$( readlink -f "${BASH_SOURCE[1]}" ) + local caller_filename=$( basename "${caller_path}" ) + local caller_dir=$( dirname "${caller_path}" ) + local caller_name="${FUNCNAME[1]}" # #----------------------------------------------------------------------- # @@ -168,10 +173,9 @@ function print_err_msg_exit() { #----------------------------------------------------------------------- # msg_header=$( printf "\n\ -ERROR from: - function: \"${caller_func}\" (will be set to \"source\" for a script) - file: \"${caller_fn}\" - directory: \"${caller_dir}\" +ERROR: + From script/function: \"${caller_name}\" (This gets set to \"source\" for a script, or to \"main\" for the top-level script.) + In file: \"${caller_path}\" " ) msg_footer=$( printf "\nExiting with nonzero status." ) @@ -185,18 +189,21 @@ ERROR from: # if [ "$#" -gt 1 ]; then - printf "\ -Function \"${crnt_func}\": Incorrect number of arguments specified. + print_err_msg_exit " +Incorrect number of arguments specified: + + script/function name = \"${crnt_func}\" + number of arguments specified = $# + Usage: - ${crnt_func} err_msg + ${crnt_func} err_msg where err_msg is an optional error message to print to stderr. Note that a header and a footer are always added to err_msg. Thus, if err_- msg is not specified, the message that is printed will consist of only the header and footer. " 1>&2 - exit 1 # #----------------------------------------------------------------------- # From eaa63f2c3a1631c90c67492e8c8070e29a9deb82 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 28 Oct 2019 16:15:00 -0600 Subject: [PATCH 018/203] (1) Remove "\n\" character set from start of messages passed to print_info_msg function; (2) Include newlines at start of and remove newlines from end of all informational messages issued using the function print_info_msg. --- jobs/JREGIONAL_GET_EXTRN_FILES | 2 +- jobs/JREGIONAL_MAKE_IC_LBC0 | 2 +- jobs/JREGIONAL_MAKE_LBC1_TO_LBCN | 2 +- jobs/JREGIONAL_MAKE_OROG | 2 +- jobs/JREGIONAL_MAKE_SFC_CLIMO | 2 +- jobs/JREGIONAL_RUN_FV3 | 2 +- jobs/JREGIONAL_RUN_POST | 2 +- scripts/exregional_get_extrn_files.sh | 34 ++++++++++++++----------- scripts/exregional_make_grid.sh | 22 ++++++++-------- scripts/exregional_make_ic_lbc0.sh | 2 +- scripts/exregional_make_lbc1_to_lbcn.sh | 2 +- scripts/exregional_make_orog.sh | 2 +- scripts/exregional_make_sfc_climo.sh | 2 +- scripts/exregional_run_fv3.sh | 18 ++++++------- scripts/exregional_run_post.sh | 4 +-- tests/run_one_expt.sh | 12 ++++----- ush/get_extrn_mdl_file_dir_info.sh | 2 +- ush/link_fix.sh | 5 ++-- ush/set_file_param.sh | 2 +- 19 files changed, 63 insertions(+), 58 deletions(-) diff --git a/jobs/JREGIONAL_GET_EXTRN_FILES b/jobs/JREGIONAL_GET_EXTRN_FILES index 38528a51f..1db51edad 100755 --- a/jobs/JREGIONAL_GET_EXTRN_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_FILES @@ -244,7 +244,7 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Exiting script: \"${script_path}\" ========================================================================" diff --git a/jobs/JREGIONAL_MAKE_IC_LBC0 b/jobs/JREGIONAL_MAKE_IC_LBC0 index 571cf84e7..92786dc65 100755 --- a/jobs/JREGIONAL_MAKE_IC_LBC0 +++ b/jobs/JREGIONAL_MAKE_IC_LBC0 @@ -223,7 +223,7 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # -print_info_msg "\ +print_info_msg " ======================================================================== Exiting script: \"${script_path}\" ========================================================================" diff --git a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN b/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN index cc22e60e2..3761418f1 100755 --- a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN +++ b/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN @@ -226,7 +226,7 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Exiting script: \"${script_path}\" ========================================================================" diff --git a/jobs/JREGIONAL_MAKE_OROG b/jobs/JREGIONAL_MAKE_OROG index d9524af8c..6f799c15e 100755 --- a/jobs/JREGIONAL_MAKE_OROG +++ b/jobs/JREGIONAL_MAKE_OROG @@ -79,7 +79,7 @@ touch "$LOGDIR/make_orog_task_complete.txt" # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Exiting script: \"${script_path}\" ========================================================================" diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO index be2038e2a..f2e630e34 100755 --- a/jobs/JREGIONAL_MAKE_SFC_CLIMO +++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO @@ -101,7 +101,7 @@ touch "$LOGDIR/make_sfc_climo_task_complete.txt" # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Exiting script: \"${script_path}\" ========================================================================" diff --git a/jobs/JREGIONAL_RUN_FV3 b/jobs/JREGIONAL_RUN_FV3 index 0d1be76fc..9cd8234d5 100755 --- a/jobs/JREGIONAL_RUN_FV3 +++ b/jobs/JREGIONAL_RUN_FV3 @@ -75,7 +75,7 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Exiting script: \"${script_path}\" ========================================================================" diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index bda095ed9..3144cb8f7 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -99,7 +99,7 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # #----------------------------------------------------------------------- # -print_info_msg "\n\ +print_info_msg " ======================================================================== Exiting script: \"${script_path}\" ========================================================================" diff --git a/scripts/exregional_get_extrn_files.sh b/scripts/exregional_get_extrn_files.sh index 548d312a8..cf4d7337a 100755 --- a/scripts/exregional_get_extrn_files.sh +++ b/scripts/exregional_get_extrn_files.sh @@ -63,7 +63,7 @@ process_args valid_args "$@" # set to. if [ "$VERBOSE" = "TRUE" ]; then num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ + print_info_msg " The arguments to script/function \"${script_name}\" have been set as follows: " @@ -99,26 +99,30 @@ for FP in "${EXTRN_MDL_FPS[@]}"; do if [ -f "$FP" ]; then if [ $( find "$FP" -mmin +${min_age} ) ]; then + num_files_found_on_disk=$(( num_files_found_on_disk+1 )) - print_info_msg "\n\ + print_info_msg " File FP exists on system disk and is older than the minimum required age of min_age minutes: FP = \"$FP\" - min_age = ${min_age} min" + min_age = ${min_age} minutes" + else - print_info_msg "\n\ + + print_info_msg " File FP exists on system disk and but is NOT older than the minumum re- quired age of min_age minutes: FP = \"$FP\" - min_age = ${min_age} min + min_age = ${min_age} minutes Will try fetching all external model files from HPSS. Not checking pre- sence and age of remaining external model files on system disk." break + fi else - print_info_msg "\n\ + print_info_msg " File FP does NOT exist on system disk: FP = \"$FP\" Will try fetching all external model files from HPSS. Not checking pre- @@ -155,25 +159,25 @@ if [ "${DATA_SRC}" = "disk" ]; then if [ "${RUN_ENVIR}" = "nco" ]; then - print_info_msg "\ + print_info_msg " Creating links in local directory (EXTRN_MDL_FILES_DIR) to external mo- del files (EXTRN_MDL_FNS) in the system directory on disk (EXTRN_MDL_- SYSDIR): EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" EXTRN_MDL_SYSDIR = \"${EXTRN_MDL_SYSDIR}\" - EXTRN_MDL_FNS = ${EXTRN_MDL_FNS_str} -" + EXTRN_MDL_FNS = ${EXTRN_MDL_FNS_str}" + ln_vrfy -sf -t ${EXTRN_MDL_FILES_DIR} ${EXTRN_MDL_FPS[@]} else - print_info_msg "\ + print_info_msg " Copying external model files (EXTRN_MDL_FNS) from the system directory on disk (EXTRN_MDL_SYSDIR) to local directory (EXTRN_MDL_FILES_DIR): EXTRN_MDL_SYSDIR = \"${EXTRN_MDL_SYSDIR}\" EXTRN_MDL_FNS = ${EXTRN_MDL_FNS_str} - EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\" -" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\"" + cp_vrfy ${EXTRN_MDL_FPS[@]} ${EXTRN_MDL_FILES_DIR} fi @@ -229,7 +233,7 @@ elif [ "${DATA_SRC}" = "HPSS" ]; then EXTRN_MDL_FPS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_FPS[@]}" )")" EXTRN_MDL_ARCV_FPS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_ARCV_FPS[@]}" )")" - print_info_msg "\n\ + print_info_msg " Fetching model output files from HPSS. The model output files (EXTRN_- MDL_FPS), the archive files on HPSS in which these output files are stored (EXTRN_MDL_ARCV_FPS), and the local directory into which they @@ -560,7 +564,7 @@ file UNZIP_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: # if [ "${ICS_OR_LBCS}" = "ICS" ]; then - print_info_msg "\n\ + print_info_msg " ======================================================================== External model files needed for generating initial condition and surface fields for the FV3SAR successfully fetched from HPSS!!! @@ -569,7 +573,7 @@ Exiting script: \"${script_name}\" elif [ "${ICS_OR_LBCS}" = "LBCS" ]; then - print_info_msg "\n\ + print_info_msg " ======================================================================== External model files needed for generating lateral boundary conditions on the halo of the FV3SAR's regional grid successfully fetched from diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 74f03519b..770d5ed38 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -62,7 +62,7 @@ process_args valid_args "$@" # set to. if [ "$VERBOSE" = "TRUE" ]; then num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ + print_info_msg " The arguments to script/function \"${script_name}\" have been set as follows: " @@ -323,7 +323,8 @@ mkdir_vrfy -p "$tmpdir" # #----------------------------------------------------------------------- # -print_info_msg "$VERBOSE" "Starting grid file generation..." +print_info_msg "$VERBOSE" " +Starting grid file generation..." if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then @@ -364,7 +365,7 @@ elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then RGNL_GRID_NML_FP="$tmpdir/${RGNL_GRID_NML_FN}" cp_vrfy ${TEMPLATE_DIR}/${RGNL_GRID_NML_FN} ${RGNL_GRID_NML_FP} - print_info_msg "$VERBOSE" "\ + print_info_msg "$VERBOSE" " Setting parameters in file: RGNL_GRID_NML_FP = \"$RGNL_GRID_NML_FP\"" # @@ -434,7 +435,8 @@ mv_vrfy ${CRES}_grid.tile${TILE_RGNL}.nc \ mv_vrfy ${CRES}_mosaic.nc ${GRID_DIR} cd_vrfy - -print_info_msg "$VERBOSE" "Grid file generation complete." +print_info_msg "$VERBOSE" " +Grid file generation complete." # #----------------------------------------------------------------------- # @@ -466,9 +468,9 @@ cd_vrfy ${tmpdir} # call the shave executable. Finally, move the resultant file to the # GRID_DIR directory. # -print_info_msg "$VERBOSE" "\ -\"Shaving\" grid file with wide halo to obtain grid file with ${nh3_T7}-cell- -wide halo..." +print_info_msg "$VERBOSE" " +\"Shaving\" grid file with wide halo to obtain grid file with ${nh3_T7}-cell-wide +halo..." nml_fn="input.shave.grid.halo${nh3_T7}" shaved_fp="${tmpdir}/${CRES}_grid.tile${TILE_RGNL}.halo${nh3_T7}.nc" @@ -490,9 +492,9 @@ mv_vrfy ${shaved_fp} ${GRID_DIR} # call the shave executable. Finally, move the resultant file to the # GRID_DIR directory. # -print_info_msg "$VERBOSE" "\ -\"Shaving\" grid file with wide halo to obtain grid file with ${nh4_T7}-cell- -wide halo..." +print_info_msg "$VERBOSE" " +\"Shaving\" grid file with wide halo to obtain grid file with ${nh4_T7}-cell-wide +halo..." nml_fn="input.shave.grid.halo${nh4_T7}" shaved_fp="${tmpdir}/${CRES}_grid.tile${TILE_RGNL}.halo${nh4_T7}.nc" diff --git a/scripts/exregional_make_ic_lbc0.sh b/scripts/exregional_make_ic_lbc0.sh index 2387d2ed2..387fd1ad9 100755 --- a/scripts/exregional_make_ic_lbc0.sh +++ b/scripts/exregional_make_ic_lbc0.sh @@ -62,7 +62,7 @@ process_args valid_args "$@" # set to. if [ "$VERBOSE" = "TRUE" ]; then num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ + print_info_msg " The arguments to script/function \"${script_name}\" have been set as follows: " diff --git a/scripts/exregional_make_lbc1_to_lbcn.sh b/scripts/exregional_make_lbc1_to_lbcn.sh index 7f59501d4..057e362b4 100755 --- a/scripts/exregional_make_lbc1_to_lbcn.sh +++ b/scripts/exregional_make_lbc1_to_lbcn.sh @@ -63,7 +63,7 @@ process_args valid_args "$@" # set to. if [ "$VERBOSE" = "TRUE" ]; then num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ + print_info_msg " The arguments to script/function \"${script_name}\" have been set as follows: " diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 3fc104bad..53a7d8ad9 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -62,7 +62,7 @@ process_args valid_args "$@" # set to. if [ "$VERBOSE" = "TRUE" ]; then num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ + print_info_msg " The arguments to script/function \"${script_name}\" have been set as follows: " diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index 671d83165..066517531 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -54,7 +54,7 @@ process_args valid_args "$@" # set to. if [ "$VERBOSE" = "TRUE" ]; then num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ + print_info_msg " The arguments to script/function \"${script_name}\" have been set as follows: " diff --git a/scripts/exregional_run_fv3.sh b/scripts/exregional_run_fv3.sh index a315aa836..cba0b3b0b 100755 --- a/scripts/exregional_run_fv3.sh +++ b/scripts/exregional_run_fv3.sh @@ -54,7 +54,7 @@ process_args valid_args "$@" # set to. if [ "$VERBOSE" = "TRUE" ]; then num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ + print_info_msg " The arguments to script/function \"${script_name}\" have been set as follows: " @@ -218,7 +218,7 @@ esac # #----------------------------------------------------------------------- # -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " Creating links in the INPUT subdirectory of the current cycle's run di- rectory to the grid and (filtered) orography files ..." @@ -331,7 +331,7 @@ fi # #----------------------------------------------------------------------- # -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " Creating links with names that FV3 looks for in the INPUT subdirectory of the current cycle's run directory (CYCLE_DIR)..." @@ -348,7 +348,7 @@ ln_vrfy -sf sfc_data.tile${TILE_RGNL}.halo${nh0_T7}.nc sfc_data.nc # cd_vrfy ${CYCLE_DIR} -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " Creating links in the current cycle's run directory to static (fix) files in the FIXam directory..." # @@ -399,7 +399,7 @@ rm_vrfy -f time_stamp.out # #----------------------------------------------------------------------- # -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " Creating links in the current cycle's run directory to cycle-independent model input files in the main experiment directory..." @@ -428,7 +428,7 @@ fi # #----------------------------------------------------------------------- # -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " Copying cycle-independent model input files from the templates directory to the current cycle's run directory..." @@ -466,7 +466,7 @@ YYYYMMDD=${CDATE:0:8} # MODEL_CONFIG_FP="${CYCLE_DIR}/${MODEL_CONFIG_FN}" -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " Setting parameters in file: MODEL_CONFIG_FP = \"${MODEL_CONFIG_FP}\"" @@ -535,7 +535,7 @@ fi # DIAG_TABLE_FP="${CYCLE_DIR}/${DIAG_TABLE_FN}" -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " Setting parameters in file: DIAG_TABLE_FP = \"${DIAG_TABLE_FP}\"" @@ -559,7 +559,7 @@ else fi if [ -f $FV3SAR_EXEC ]; then - print_info_msg "$VERBOSE" "\ + print_info_msg "$VERBOSE" " Copying the FV3SAR executable to the run directory..." cp_vrfy ${FV3SAR_EXEC} ${CYCLE_DIR}/fv3_gfs.x else diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 645729215..362b3228e 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -54,7 +54,7 @@ process_args valid_args "$@" # set to. if [ "$VERBOSE" = "TRUE" ]; then num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ + print_info_msg " The arguments to script/function \"${script_name}\" have been set as follows: " @@ -70,7 +70,7 @@ fi # #----------------------------------------------------------------------- # -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " Starting post-processing for fhr = $fhr hr..." case $MACHINE in diff --git a/tests/run_one_expt.sh b/tests/run_one_expt.sh index 38cc958dc..7c066c197 100755 --- a/tests/run_one_expt.sh +++ b/tests/run_one_expt.sh @@ -62,7 +62,7 @@ process_args valid_args "$@" # set to. if [ "$verbose" = "TRUE" ]; then num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ + print_info_msg " The arguments to script/function \"${script_name}\" have been set as follows: " @@ -178,7 +178,7 @@ quilting="${7:-}" dot_quilting=".${quilting}." -print_info_msg "\ +print_info_msg " User-specified forecast parameters: predef_domain = \"${predef_domain}\" @@ -205,7 +205,7 @@ RUNDIR_BASE="$BASEDIR/run_dirs" RUN_SUBDIR="test_date_${TEST_DATE}/$EXPT_NAME" TMPDIR="$BASEDIR/work_dirs" -print_info_msg "\ +print_info_msg " Variables constructed from user-specified forecast parameters: BASEDIR = \"${BASEDIR}\" @@ -350,7 +350,7 @@ chmod u+x $RELAUNCH_SCR #----------------------------------------------------------------------- # CRONTAB_ORIG="$(pwd)/crontab.orig" -print_info_msg "\ +print_info_msg " Copying contents of user cron table to backup file: CRONTAB_ORIG = \"$CRONTAB_ORIG\"" crontab -l > $CRONTAB_ORIG @@ -368,14 +368,14 @@ exit_status=$? if [ "$exit_status" -eq 0 ]; then - print_info_msg "\ + print_info_msg " The following line already exists in the cron table and thus will not be added: crontab_line = \"$crontab_line\"" else - print_info_msg "\ + print_info_msg " Adding the following line to the cron table in order to automatically resubmit FV3SAR workflow: crontab_line = \"$crontab_line\"" diff --git a/ush/get_extrn_mdl_file_dir_info.sh b/ush/get_extrn_mdl_file_dir_info.sh index 4727e7fdf..5a9e0f644 100755 --- a/ush/get_extrn_mdl_file_dir_info.sh +++ b/ush/get_extrn_mdl_file_dir_info.sh @@ -65,7 +65,7 @@ function get_extrn_mdl_file_dir_info () { # set to. if [ "$VERBOSE" = "TRUE" ]; then num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ + print_info_msg " The arguments to script/function \"${func_name}\" have been set as follows: " diff --git a/ush/link_fix.sh b/ush/link_fix.sh index 6b876623c..61d067f5c 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -66,7 +66,7 @@ process_args valid_args "$@" # if [ "$verbose" = "TRUE" ]; then num_valid_args="${#valid_args[@]}" - print_info_msg "\n\ + print_info_msg " The arguments to script/function \"${script_name}\" have been set as follows: " 1>&2 @@ -91,8 +91,7 @@ fi # if [ "$verbose" = "TRUE" ]; then print_info_msg " -Creating links in the FIXsar directory to the grid files... -" +Creating links in the FIXsar directory to the grid files..." fi # #----------------------------------------------------------------------- diff --git a/ush/set_file_param.sh b/ush/set_file_param.sh index fe6ce0297..1c92d2955 100644 --- a/ush/set_file_param.sh +++ b/ush/set_file_param.sh @@ -68,7 +68,7 @@ where the arguments are defined as follows: # #----------------------------------------------------------------------- # - print_info_msg "$VERBOSE" "\ + print_info_msg "$VERBOSE" " Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." # #----------------------------------------------------------------------- From f3b815a18bee5e5a8d84eb929c56a1c18e429454 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 28 Oct 2019 16:48:59 -0600 Subject: [PATCH 019/203] For files missed in previous commit, include newlines at start of and remove newlines from end of all informational messages issued using the function print_info_msg. --- scripts/exregional_make_orog.sh | 21 +++++++++++++-------- tests/run_mltpl_expts.sh | 2 +- ush/compare_config_scripts.sh | 2 +- ush/generate_FV3SAR_wflow.sh | 4 ++-- ush/set_gridparams_GFDLgrid.sh | 10 +++++----- ush/setup.sh | 22 +++++++++------------- 6 files changed, 31 insertions(+), 30 deletions(-) diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 53a7d8ad9..c6154046b 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -259,7 +259,8 @@ export exec_dir="$EXECDIR" # #----------------------------------------------------------------------- # -print_info_msg "$VERBOSE" "Starting orography file generation..." +print_info_msg "$VERBOSE" " +Starting orography file generation..." tmp_dir="${raw_dir}/tmp" @@ -319,7 +320,8 @@ ln_vrfy -sf oro.${CRES}.tile${TILE_RGNL}.halo${nhw_T7}.nc \ oro.${CRES}.tile${TILE_RGNL}.nc cd_vrfy - -print_info_msg "$VERBOSE" "Orography file generation complete." +print_info_msg "$VERBOSE" " +Orography file generation complete." # #----------------------------------------------------------------------- # @@ -327,7 +329,8 @@ print_info_msg "$VERBOSE" "Orography file generation complete." # #----------------------------------------------------------------------- # -print_info_msg "$VERBOSE" "Setting orography filtering parameters..." +print_info_msg "$VERBOSE" " +Setting orography filtering parameters..." # Need to fix the following (also above). Then redo to get cell_size_avg. #cd_vrfy ${GRID_DIR} @@ -444,7 +447,8 @@ fi # #----------------------------------------------------------------------- # -print_info_msg "$VERBOSE" "Starting filtering of orography..." +print_info_msg "$VERBOSE" " +Starting filtering of orography..." echo "gtype = \"$gtype\"" # The script below creates absolute symlinks in $filter_dir. That's @@ -478,7 +482,8 @@ mv_vrfy oro.${CRES}.tile${TILE_RGNL}.nc \ # oro.${CRES}.tile${TILE_RGNL}.nc cd_vrfy - -print_info_msg "$VERBOSE" "Filtering of orography complete." +print_info_msg "$VERBOSE" " +Filtering of orography complete." # #----------------------------------------------------------------------- # @@ -511,7 +516,7 @@ cd_vrfy ${shave_dir} # call the shave executable. Finally, move the resultant file to the # OROG_DIR directory. # -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " \"Shaving\" orography file with wide halo to obtain orography file with ${nh0_T7}-cell-wide halo..." @@ -535,7 +540,7 @@ mv_vrfy ${shaved_fp} ${OROG_DIR} # Then call the shave executable. Finally, move the resultant file to # the OROG_DIR directory. # -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " \"Shaving\" orography file with wide halo to obtain orography file with ${nh4_T7}-cell-wide halo..." @@ -580,7 +585,7 @@ Call to script to create links to orography files failed." # Moved the following to exregional_make_sfc_climo.sh script since it # needs to be done only if the make_sfc_climo task is run. -#print_info_msg "$VERBOSE" "\ +#print_info_msg "$VERBOSE" " #Creating links needed by the make_sfc_climo task to the 4-halo grid and #orography files..." # diff --git a/tests/run_mltpl_expts.sh b/tests/run_mltpl_expts.sh index e8aca9201..92e569e19 100755 --- a/tests/run_mltpl_expts.sh +++ b/tests/run_mltpl_expts.sh @@ -78,7 +78,7 @@ all_vals_CDATE_str=$(printf "\"%s\" " "${all_vals_CDATE[@]}") all_vals_fcst_len_hrs_str=$(printf "\"%s\" " "${all_vals_fcst_len_hrs[@]}") all_vals_quilting_str=$(printf "\"%s\" " "${all_vals_quilting[@]}") -print_info_msg "\ +print_info_msg " Creating and launching workflows for test suite: test_suite = \"$test_suite\" diff --git a/ush/compare_config_scripts.sh b/ush/compare_config_scripts.sh index ed04598d8..5f2341a1b 100644 --- a/ush/compare_config_scripts.sh +++ b/ush/compare_config_scripts.sh @@ -82,7 +82,7 @@ while read crnt_line; do if [ -z "${var_name}" ]; then - print_info_msg "\ + print_info_msg " Current line (crnt_line) of custom experiment/workflow configuration script (CUSTOM_CONFIG_FN) does not contain a variable name (var_name): CUSTOM_CONFIG_FN = \"${CUSTOM_CONFIG_FN}\" diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index dc4d50d5d..467bb9aeb 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -196,7 +196,7 @@ load_rocoto_cmd="module load rocoto/1.3.1" rocotorun_cmd="rocotorun -w ${WFLOW_XML_FN} -d ${WFLOW_DB_FN} -v 10" rocotostat_cmd="rocotostat -w ${WFLOW_XML_FN} -d ${WFLOW_DB_FN} -v 10" -print_info_msg "\ +print_info_msg " ======================================================================== ======================================================================== @@ -363,7 +363,7 @@ fi # that can be changed. if [ "${RUN_ENVIR}" != "nco" ]; then - print_info_msg "$VERBOSE" "\ + print_info_msg "$VERBOSE" " Copying fixed files from system directory to the workflow directory..." check_for_preexist_dir $FIXam "delete" diff --git a/ush/set_gridparams_GFDLgrid.sh b/ush/set_gridparams_GFDLgrid.sh index 94b8557c5..c500270e9 100644 --- a/ush/set_gridparams_GFDLgrid.sh +++ b/ush/set_gridparams_GFDLgrid.sh @@ -197,7 +197,7 @@ fi # #----------------------------------------------------------------------- # -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " Original values of the halo width on the tile 6 supergrid and on the tile 7 grid are: nhw_T6SG = $nhw_T6SG @@ -207,7 +207,7 @@ nhw_T6SG=$(( $istart_rgnl_T6SG - $istart_rgnl_wide_halo_T6SG )) nhw_T6=$(( $nhw_T6SG/2 )) nhw_T7=$(( $nhw_T6*$refine_ratio )) -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " Values of the halo width on the tile 6 supergrid and on the tile 7 grid AFTER adjustments are: nhw_T6SG = $nhw_T6SG @@ -240,7 +240,7 @@ ny_T6SG=$(( $ny_T6*2 )) prime_factors_nx_T7=$( factor $nx_T7 | sed -r -e 's/^[0-9]+: (.*)/\1/' ) prime_factors_ny_T7=$( factor $ny_T7 | sed -r -e 's/^[0-9]+: (.*)/\1/' ) -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " The number of cells in the two horizontal directions (x and y) on the parent tile's (tile 6) grid and supergrid are: nx_T6 = $nx_T6 @@ -301,12 +301,12 @@ ny_wide_halo_T6SG=$(( $jend_rgnl_wide_halo_T6SG - $jstart_rgnl_wide_halo_T6SG + ny_wide_halo_T6=$(( $ny_wide_halo_T6SG/2 )) ny_wide_halo_T7=$(( $ny_wide_halo_T6*$refine_ratio )) -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " nx_wide_halo_T7 = $nx_T7 \ (istart_rgnl_wide_halo_T6SG = $istart_rgnl_wide_halo_T6SG, \ iend_rgnl_wide_halo_T6SG = $iend_rgnl_wide_halo_T6SG)" -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " ny_wide_halo_T7 = $ny_T7 \ (jstart_rgnl_wide_halo_T6SG = $jstart_rgnl_wide_halo_T6SG, \ jend_rgnl_wide_halo_T6SG = $jend_rgnl_wide_halo_T6SG)" diff --git a/ush/setup.sh b/ush/setup.sh index 884e001da..16b7dde5d 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -793,7 +793,7 @@ if [ "${RUN_ENVIR}" = "nco" ]; then [ "${RUN_TASK_MAKE_GRID}" = "FALSE" -a \ "${GRID_DIR}" != "$FIXsar" ]; then - msg="\ + msg=" When RUN_ENVIR is set to \"nco\", it is assumed that grid files already exist in the directory specified by FIXsar. Thus, the grid file genera- tion task must not be run (i.e. RUN_TASK_MAKE_GRID must be set to @@ -811,7 +811,6 @@ of FIXsar. Reset values are:" msg="$msg"" RUN_TASK_MAKE_GRID = \"${RUN_TASK_MAKE_GRID}\" GRID_DIR = \"${GRID_DIR}\" - " print_info_msg "$msg" @@ -822,7 +821,7 @@ of FIXsar. Reset values are:" [ "${RUN_TASK_MAKE_OROG}" = "FALSE" -a \ "${OROG_DIR}" != "$FIXsar" ]; then - msg="\ + msg=" When RUN_ENVIR is set to \"nco\", it is assumed that orography files al- ready exist in the directory specified by FIXsar. Thus, the orography file generation task must not be run (i.e. RUN_TASK_MAKE_OROG must be @@ -840,7 +839,6 @@ of FIXsar. Reset values are:" msg="$msg"" RUN_TASK_MAKE_OROG = \"${RUN_TASK_MAKE_OROG}\" OROG_DIR = \"${OROG_DIR}\" - " print_info_msg "$msg" @@ -851,7 +849,7 @@ of FIXsar. Reset values are:" [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" -a \ "${SFC_CLIMO_DIR}" != "$FIXsar" ]; then - msg="\ + msg=" When RUN_ENVIR is set to \"nco\", it is assumed that surface climatology files already exist in the directory specified by FIXsar. Thus, the surface climatology file generation task must not be run (i.e. RUN_- @@ -868,8 +866,7 @@ contents of FIXsar. Reset values are:" msg="$msg"" RUN_TASK_MAKE_SFC_CLIMO = \"${RUN_TASK_MAKE_SFC_CLIMO}\" - SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\"\n - + SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\" " print_info_msg "$msg" @@ -1177,7 +1174,7 @@ by the number of MPI tasks in the y direction (layout_y): layout_y = $layout_y" fi -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " The MPI task layout is: layout_x = $layout_x layout_y = $layout_y" @@ -1557,11 +1554,12 @@ while read crnt_line; do # else - print_info_msg "\ + print_info_msg " The variable specified by \"var_name\" is not set in the current envi- ronment: var_name = \"${var_name}\" Setting its value in the variable definitions file to an empty string." + var_value="\"\"" fi @@ -1577,8 +1575,7 @@ Setting its value in the variable definitions file to an empty string." # else - print_info_msg "\ - + print_info_msg " Could not extract a variable name from the current line in \"line_list\" (probably because it does not contain an equal sign with no spaces on either side): @@ -1814,8 +1811,7 @@ definitions file returned with a nonzero status." # #----------------------------------------------------------------------- # -print_info_msg "\ - +print_info_msg " ======================================================================== Setup script completed successfully!!! ========================================================================" From dd09e85bcedb85dbb1105a040f6c54bf36bbe431 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 28 Oct 2019 16:50:13 -0600 Subject: [PATCH 020/203] Delete unneeded local variable caller_name (it is now obtained automatically in function print_err_msg_exit, where it was previously needed). --- ush/check_var_valid_value.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/ush/check_var_valid_value.sh b/ush/check_var_valid_value.sh index 7a3768c90..b00f0e911 100755 --- a/ush/check_var_valid_value.sh +++ b/ush/check_var_valid_value.sh @@ -91,7 +91,6 @@ The value specified in ${var_name} is not supported: #----------------------------------------------------------------------- # iselementof "${var_value}" valid_var_values || { \ - caller_name=$( basename "${BASH_SOURCE[1]}" ) valid_var_values_str=$(printf "\"%s\" " "${valid_var_values[@]}"); print_err_msg_exit "\ ${err_msg} From 894a0dcd1eaeb611b15dd89085ee11a8becd9e1f Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 28 Oct 2019 16:53:49 -0600 Subject: [PATCH 021/203] Clean up error and informational messages. --- ush/check_for_preexist_dir.sh | 29 +++++++++++++++---- ush/filesys_cmds_vrfy.sh | 54 ++++++++++++++++++++++++++++------- 2 files changed, 66 insertions(+), 17 deletions(-) diff --git a/ush/check_for_preexist_dir.sh b/ush/check_for_preexist_dir.sh index 37d611404..b0f3f3b56 100644 --- a/ush/check_for_preexist_dir.sh +++ b/ush/check_for_preexist_dir.sh @@ -17,6 +17,18 @@ function check_for_preexist_dir() { #----------------------------------------------------------------------- # { save_shell_opts; set -u +x; } > /dev/null 2>&1 +#----------------------------------------------------------------------- +# +# Get the name of this function as well as information about the calling +# script or function. +# +#----------------------------------------------------------------------- +# + local crnt_func="${FUNCNAME[0]}" + local caller_path=$( readlink -f "${BASH_SOURCE[1]}" ) + local caller_filename=$( basename "${caller_path}" ) + local caller_dir=$( dirname "${caller_path}" ) + local caller_name="${FUNCNAME[1]}" # #----------------------------------------------------------------------- # @@ -25,11 +37,15 @@ function check_for_preexist_dir() { #----------------------------------------------------------------------- # if [ "$#" -ne 2 ]; then - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. + print_err_msg_exit " +Incorrect number of arguments specified: + + script/function name = \"${crnt_func}\" + number of arguments specified = $# + Usage: - ${FUNCNAME[0]} dir preexisting_dir_method + ${crnt_func} dir preexisting_dir_method where the arguments are defined as follows: @@ -38,7 +54,8 @@ where the arguments are defined as follows: preexisting_dir_method: String specifying the action to take if a preexisting version of dir - is found. Valid values are \"delete\", \"rename\", and \"quit\"." + is found. Valid values are \"delete\", \"rename\", and \"quit\". +" 1>@2 fi # #----------------------------------------------------------------------- @@ -93,8 +110,8 @@ where the arguments are defined as follows: old_dir="${dir}_old${old_indx}" done - print_info_msg "\ -Function \"${FUNCNAME[0]}\": Directory already exists: + print_info_msg $VERBOSE" " +Directory already exists: dir = \"$dir\" Moving (renaming) preexisting directory to: old_dir = \"$old_dir\"" diff --git a/ush/filesys_cmds_vrfy.sh b/ush/filesys_cmds_vrfy.sh index d6988fb7b..a938251de 100644 --- a/ush/filesys_cmds_vrfy.sh +++ b/ush/filesys_cmds_vrfy.sh @@ -22,19 +22,44 @@ function filesys_cmd_vrfy() { # #----------------------------------------------------------------------- # +# Get the name of this function as well as information about the calling +# script or function. +# +#----------------------------------------------------------------------- +# + local crnt_func="${FUNCNAME[0]}" +# Note: +# Below, the index into BASH_SOURCE and FUNCNAME is 2 (not 1 as is usu- +# ally the case) because this function is called by functions such as +# cp_vrfy, mv_vrfy, rm_vrfy, ln_vrfy, mkdir_vrfy, and cd_vrfy, but these +# are just wrappers, and in the error and informational messages we are +# really interested in the scripts/functions that call these wrappers. + local caller_path=$( readlink -f "${BASH_SOURCE[2]}" ) + local caller_filename=$( basename "${caller_path}" ) + local caller_dir=$( dirname "${caller_path}" ) + local caller_name="${FUNCNAME[2]}" +# +#----------------------------------------------------------------------- +# # Check that at least one argument is supplied. # #----------------------------------------------------------------------- # - if [ "$#" -lt 1 ]; then + if [ "$#" -lt 2 ]; then - print_err_msg_exit "\ -From function \"${FUNCNAME[0]}\": At least one argument must be specified: - number of arguments = \$# = $# -Usage is: - ${FUNCNAME[0]} cmd args_to_cmd -where \"cmd\" is the command to execute and \"args_to_cmd\" are the options and -arguments to pass to that command." + print_err_msg_exit " +At least two arguments must be specified: + + script/function name = \"${crnt_func}\" + number of arguments specified = $# + +Usage: + + ${crnt_func} cmd args_to_cmd + +where \"cmd\" is the command to execute and \"args_to_cmd\" are the options +and arguments to pass to that command. +" fi # @@ -85,7 +110,10 @@ arguments to pass to that command." # if [ $exit_code -ne 0 ]; then print_err_msg_exit "\ -From function \"${FUNCNAME[0]}\": \"$cmd\" operation failed: +Call to function \"${cmd}_vrfy\" failed. This function was called: + From script/function: \"${caller_name}\" (This gets set to \"source\" for a script, or to \"main\" for the top-level script.) + In file: \"${caller_path}\" +Error message from \"${cmd}_vrfy\" function's \"$cmd\" operation: $output" fi # @@ -112,8 +140,12 @@ $output" #----------------------------------------------------------------------- # if [ -n "$output" ]; then - print_info_msg "\ -From function \"${FUNCNAME[0]}\": Message from \"$cmd\" operation: + print_info_msg " +\"${cmd}_vrfy\" operation returned with a message. This command was +issued: + From script/function: \"${caller_name}\" (This gets set to \"source\" for a script, or to \"main\" for the top-level script.) + In file: \"${caller_path}\" +Message from \"${cmd}_vrfy\" function's \"$cmd\" operation: $output" fi # From 530e72ba2da6202770ff78e21f331efd2843870b Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 28 Oct 2019 17:21:42 -0600 Subject: [PATCH 022/203] Remove "if [ "$VERBOSE" = "TRUE" ]; then ..." statement from around calls to print_info_msg since now that function can accept VERBOSE as its first argument. --- scripts/exregional_get_extrn_files.sh | 26 +++++++++++++---------- scripts/exregional_make_grid.sh | 28 +++++++++++++------------ scripts/exregional_make_ic_lbc0.sh | 26 +++++++++++++---------- scripts/exregional_make_lbc1_to_lbcn.sh | 26 +++++++++++++---------- scripts/exregional_make_orog.sh | 28 +++++++++++++------------ scripts/exregional_make_sfc_climo.sh | 26 +++++++++++++---------- scripts/exregional_run_fv3.sh | 26 +++++++++++++---------- scripts/exregional_run_post.sh | 26 +++++++++++++---------- ush/link_fix.sh | 4 +--- 9 files changed, 121 insertions(+), 95 deletions(-) diff --git a/scripts/exregional_get_extrn_files.sh b/scripts/exregional_get_extrn_files.sh index cf4d7337a..191333450 100755 --- a/scripts/exregional_get_extrn_files.sh +++ b/scripts/exregional_get_extrn_files.sh @@ -58,20 +58,24 @@ valid_args=( \ "EXTRN_MDL_ARCVREL_DIR" \ ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg " +# +#----------------------------------------------------------------------- +# +# If VERBOSE is set to "TRUE", print out values of arguments passed to +# this script. +# +#----------------------------------------------------------------------- +# +msg=" The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi +num_valid_args="${#valid_args[@]}" +for (( i=0; i<${num_valid_args}; i++ )); do + line=$( declare -p "${valid_args[$i]}" ) + msg="$msg"$( printf " $line\n" ) +done +print_info_msg "$VERBOSE" "$msg" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 770d5ed38..768e7852b 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -57,22 +57,24 @@ This is the ex-script for the task that generates grid files. # valid_args=( "WORKDIR_LOCAL" ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg " +# +#----------------------------------------------------------------------- +# +# If VERBOSE is set to "TRUE", print out values of arguments passed to +# this script. +# +#----------------------------------------------------------------------- +# +msg=" The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi - - +num_valid_args="${#valid_args[@]}" +for (( i=0; i<${num_valid_args}; i++ )); do + line=$( declare -p "${valid_args[$i]}" ) + msg="$msg"$( printf " $line\n" ) +done +print_info_msg "$VERBOSE" "$msg" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_ic_lbc0.sh b/scripts/exregional_make_ic_lbc0.sh index 387fd1ad9..1682a8613 100755 --- a/scripts/exregional_make_ic_lbc0.sh +++ b/scripts/exregional_make_ic_lbc0.sh @@ -57,20 +57,24 @@ valid_args=( \ "ICS_DIR" \ ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg " +# +#----------------------------------------------------------------------- +# +# If VERBOSE is set to "TRUE", print out values of arguments passed to +# this script. +# +#----------------------------------------------------------------------- +# +msg=" The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi +num_valid_args="${#valid_args[@]}" +for (( i=0; i<${num_valid_args}; i++ )); do + line=$( declare -p "${valid_args[$i]}" ) + msg="$msg"$( printf " $line\n" ) +done +print_info_msg "$VERBOSE" "$msg" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_lbc1_to_lbcn.sh b/scripts/exregional_make_lbc1_to_lbcn.sh index 057e362b4..1e380eb48 100755 --- a/scripts/exregional_make_lbc1_to_lbcn.sh +++ b/scripts/exregional_make_lbc1_to_lbcn.sh @@ -58,20 +58,24 @@ valid_args=( \ "EXTRN_MDL_LBC_UPDATE_FHRS" \ ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg " +# +#----------------------------------------------------------------------- +# +# If VERBOSE is set to "TRUE", print out values of arguments passed to +# this script. +# +#----------------------------------------------------------------------- +# +msg=" The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi +num_valid_args="${#valid_args[@]}" +for (( i=0; i<${num_valid_args}; i++ )); do + line=$( declare -p "${valid_args[$i]}" ) + msg="$msg"$( printf " $line\n" ) +done +print_info_msg "$VERBOSE" "$msg" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index c6154046b..71b31d507 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -57,22 +57,24 @@ This is the ex-script for the task that generates orography files. # valid_args=( "WORKDIR_LOCAL" ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg " +# +#----------------------------------------------------------------------- +# +# If VERBOSE is set to "TRUE", print out values of arguments passed to +# this script. +# +#----------------------------------------------------------------------- +# +msg=" The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi - - +num_valid_args="${#valid_args[@]}" +for (( i=0; i<${num_valid_args}; i++ )); do + line=$( declare -p "${valid_args[$i]}" ) + msg="$msg"$( printf " $line\n" ) +done +print_info_msg "$VERBOSE" "$msg" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index 066517531..e0853fcc0 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -49,20 +49,24 @@ climatology. # valid_args=( "workdir" ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg " +# +#----------------------------------------------------------------------- +# +# If VERBOSE is set to "TRUE", print out values of arguments passed to +# this script. +# +#----------------------------------------------------------------------- +# +msg=" The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi +num_valid_args="${#valid_args[@]}" +for (( i=0; i<${num_valid_args}; i++ )); do + line=$( declare -p "${valid_args[$i]}" ) + msg="$msg"$( printf " $line\n" ) +done +print_info_msg "$VERBOSE" "$msg" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_fv3.sh b/scripts/exregional_run_fv3.sh index cba0b3b0b..7d8856172 100755 --- a/scripts/exregional_run_fv3.sh +++ b/scripts/exregional_run_fv3.sh @@ -49,20 +49,24 @@ specified cycle. # valid_args=( "CYCLE_DIR" ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg " +# +#----------------------------------------------------------------------- +# +# If VERBOSE is set to "TRUE", print out values of arguments passed to +# this script. +# +#----------------------------------------------------------------------- +# +msg=" The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi +num_valid_args="${#valid_args[@]}" +for (( i=0; i<${num_valid_args}; i++ )); do + line=$( declare -p "${valid_args[$i]}" ) + msg="$msg"$( printf " $line\n" ) +done +print_info_msg "$VERBOSE" "$msg" # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 362b3228e..37b2405cd 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -49,20 +49,24 @@ the output files corresponding to a specified forecast hour. # valid_args=( "cycle_dir" "postprd_dir" "fhr_dir" "fhr" ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. -if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg " +# +#----------------------------------------------------------------------- +# +# If VERBOSE is set to "TRUE", print out values of arguments passed to +# this script. +# +#----------------------------------------------------------------------- +# +msg=" The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi +num_valid_args="${#valid_args[@]}" +for (( i=0; i<${num_valid_args}; i++ )); do + line=$( declare -p "${valid_args[$i]}" ) + msg="$msg"$( printf " $line\n" ) +done +print_info_msg "$VERBOSE" "$msg" # #----------------------------------------------------------------------- # diff --git a/ush/link_fix.sh b/ush/link_fix.sh index 61d067f5c..4551a75e1 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -89,10 +89,8 @@ fi # #----------------------------------------------------------------------- # -if [ "$verbose" = "TRUE" ]; then - print_info_msg " +print_info_msg "$verbose" " Creating links in the FIXsar directory to the grid files..." -fi # #----------------------------------------------------------------------- # From ccb5e081d6b0d6d37f77b8fcc0398bf64165d77d Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 28 Oct 2019 17:23:50 -0600 Subject: [PATCH 023/203] Add files missed in last commit. --- ush/get_extrn_mdl_file_dir_info.sh | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/ush/get_extrn_mdl_file_dir_info.sh b/ush/get_extrn_mdl_file_dir_info.sh index 5a9e0f644..ca3e743c7 100755 --- a/ush/get_extrn_mdl_file_dir_info.sh +++ b/ush/get_extrn_mdl_file_dir_info.sh @@ -60,22 +60,24 @@ function get_extrn_mdl_file_dir_info () { "varname_extrn_mdl_arcvrel_dir" \ ) process_args valid_args "$@" - -# If VERBOSE is set to TRUE, print out what each valid argument has been -# set to. - if [ "$VERBOSE" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg " -The arguments to script/function \"${func_name}\" have been set as +# +#----------------------------------------------------------------------- +# +# If VERBOSE is set to "TRUE", print out values of arguments passed to +# this script. +# +#----------------------------------------------------------------------- +# + msg=" +The arguments to script/function \"${script_name}\" have been set as follows: " - for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done - fi - - + num_valid_args="${#valid_args[@]}" + for (( i=0; i<${num_valid_args}; i++ )); do + line=$( declare -p "${valid_args[$i]}" ) + msg="$msg"$( printf " $line\n" ) + done + print_info_msg "$VERBOSE" "$msg" # #----------------------------------------------------------------------- # From 666825a4fdd257759c2b2c1e40d4a3be9608e2d8 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 28 Oct 2019 17:24:17 -0600 Subject: [PATCH 024/203] Bug fix. --- ush/check_for_preexist_dir.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/check_for_preexist_dir.sh b/ush/check_for_preexist_dir.sh index b0f3f3b56..f0a884c32 100644 --- a/ush/check_for_preexist_dir.sh +++ b/ush/check_for_preexist_dir.sh @@ -110,7 +110,7 @@ where the arguments are defined as follows: old_dir="${dir}_old${old_indx}" done - print_info_msg $VERBOSE" " + print_info_msg "$VERBOSE" " Directory already exists: dir = \"$dir\" Moving (renaming) preexisting directory to: From 60966550ed7f8f5fd5c4509e63e00622c87fbaa5 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 4 Nov 2019 15:20:17 -0700 Subject: [PATCH 025/203] Changes for clarity and ease-of-use, as listed below. (1) Rename variable SCRIPT_VAR_DEFNS_FP to GLOBAL_VAR_DEFNS_FP. (2) In each file, get name of current script's/function's file path, file name, and directory. These are useful to print out along with informational and error messages. (3) In the committed files, for clarity rename source_funcs.sh to source_util_funcs.sh. This renaming has not yet been committed to the repo (will be committed soon). (4) Improve informational and error messages. (5) In the committed files, assume we will use a function named print_input_args() (not yet defined) to print out what the arguments to a script or function get set to (by the process_args() function). This is for debugging purposes. (6) In scripts/exregional_make_orog.sh, loop over the orography filtering parameters that need to be interpolated to arbitrary resolution (from values specified at a limited set of resolutions); this is instead of interpolating each parameter individually. --- jobs/JREGIONAL_GET_EXTRN_FILES | 30 ++- jobs/JREGIONAL_MAKE_GRID | 30 ++- jobs/JREGIONAL_MAKE_IC_LBC0 | 30 ++- jobs/JREGIONAL_MAKE_LBC1_TO_LBCN | 30 ++- jobs/JREGIONAL_MAKE_OROG | 30 ++- jobs/JREGIONAL_MAKE_SFC_CLIMO | 30 ++- jobs/JREGIONAL_RUN_FV3 | 30 ++- jobs/JREGIONAL_RUN_POST | 30 ++- scripts/.exregional_make_orog.sh.swp | Bin 0 -> 40960 bytes scripts/exregional_get_extrn_files.sh | 58 +++-- scripts/exregional_make_grid.sh | 68 +++--- scripts/exregional_make_ic_lbc0.sh | 46 ++-- scripts/exregional_make_lbc1_to_lbcn.sh | 46 ++-- scripts/exregional_make_orog.sh | 79 +++--- scripts/exregional_make_sfc_climo.sh | 46 ++-- scripts/exregional_run_fv3.sh | 44 ++-- scripts/exregional_run_post.sh | 44 ++-- tests/run_experiments.sh | 310 ++++++++++++++++++++++++ tests/run_mltpl_expts.sh | 2 +- tests/run_one_expt.sh | 35 ++- 20 files changed, 712 insertions(+), 306 deletions(-) create mode 100644 scripts/.exregional_make_orog.sh.swp create mode 100755 tests/run_experiments.sh diff --git a/jobs/JREGIONAL_GET_EXTRN_FILES b/jobs/JREGIONAL_GET_EXTRN_FILES index 1db51edad..0db028979 100755 --- a/jobs/JREGIONAL_GET_EXTRN_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_FILES @@ -27,8 +27,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -50,17 +50,26 @@ # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the J-job script for the task that copies/fetches to a local di- rectory (either from disk or HPSS) the external model files from which @@ -236,7 +245,7 @@ $SCRIPTSDIR/exregional_get_extrn_files.sh \ EXTRN_MDL_ARCV_FMT="${EXTRN_MDL_ARCV_FMT}" \ EXTRN_MDL_ARCVREL_DIR="${EXTRN_MDL_ARCVREL_DIR}" || \ print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -246,7 +255,8 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # print_info_msg " ======================================================================== -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index a9041135d..1b2a982b9 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -106,8 +106,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -120,17 +120,26 @@ # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the J-job script for the task that generates grid files. ========================================================================" @@ -154,7 +163,7 @@ export stretch_fac ${SCRIPTSDIR}/exregional_make_grid.sh \ WORKDIR_LOCAL="ABCD" || \ print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -188,7 +197,8 @@ touch "$LOGDIR/make_grid_task_complete.txt" # print_info_msg " ======================================================================== -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_IC_LBC0 b/jobs/JREGIONAL_MAKE_IC_LBC0 index 92786dc65..465b7cf49 100755 --- a/jobs/JREGIONAL_MAKE_IC_LBC0 +++ b/jobs/JREGIONAL_MAKE_IC_LBC0 @@ -8,8 +8,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -31,17 +31,26 @@ # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the J-job script for the task that generates initial condition (IC), surface, and zeroth-hour lateral boundary condition (LBC0) files @@ -215,7 +224,7 @@ $SCRIPTSDIR/exregional_make_ic_lbc0.sh \ WGRIB2_DIR="${WGRIB2_DIR}" \ APRUN="${APRUN}" || \ print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -225,7 +234,8 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # print_info_msg " ======================================================================== -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN b/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN index 3761418f1..3348626a3 100755 --- a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN +++ b/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN @@ -8,8 +8,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -31,17 +31,26 @@ # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the J-job script for the task that generates lateral boundary condition (LBC) files (in NetCDF format) for all LBC update hours (ex- @@ -218,7 +227,7 @@ $SCRIPTSDIR/exregional_make_lbc1_to_lbcn.sh \ LBCS_DIR="${LBCS_DIR}" \ EXTRN_MDL_LBC_UPDATE_FHRS="${EXTRN_MDL_LBC_UPDATE_FHRS_str}" || \ print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -228,7 +237,8 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # print_info_msg " ======================================================================== -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_OROG b/jobs/JREGIONAL_MAKE_OROG index 6f799c15e..d1ccea3ad 100755 --- a/jobs/JREGIONAL_MAKE_OROG +++ b/jobs/JREGIONAL_MAKE_OROG @@ -8,8 +8,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -22,17 +22,26 @@ # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the J-job script for the task that generates orography files. ========================================================================" @@ -47,7 +56,7 @@ This is the J-job script for the task that generates orography files. ${SCRIPTSDIR}/exregional_make_orog.sh \ WORKDIR_LOCAL="ABCD" || \ print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -81,7 +90,8 @@ touch "$LOGDIR/make_orog_task_complete.txt" # print_info_msg " ======================================================================== -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO index f2e630e34..23771df22 100755 --- a/jobs/JREGIONAL_MAKE_SFC_CLIMO +++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO @@ -8,8 +8,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -22,17 +22,26 @@ # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the J-job script for the task that generates surface fields from climatology. @@ -69,7 +78,7 @@ mkdir_vrfy $workdir ${SCRIPTSDIR}/exregional_make_sfc_climo.sh \ workdir="$workdir" || \ print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -103,7 +112,8 @@ touch "$LOGDIR/make_sfc_climo_task_complete.txt" # print_info_msg " ======================================================================== -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_FV3 b/jobs/JREGIONAL_RUN_FV3 index 9cd8234d5..38f75da12 100755 --- a/jobs/JREGIONAL_RUN_FV3 +++ b/jobs/JREGIONAL_RUN_FV3 @@ -18,8 +18,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -32,17 +32,26 @@ # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the J-job script for the task that runs a forecast with FV3 for the specified cycle. @@ -67,7 +76,7 @@ mkdir_vrfy -p ${CYCLE_DIR}/RESTART $SCRIPTSDIR/exregional_run_fv3.sh \ CYCLE_DIR="${CYCLE_DIR}" || \ print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -77,7 +86,8 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # print_info_msg " ======================================================================== -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index 3144cb8f7..12fa5344c 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -17,8 +17,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -31,17 +31,26 @@ # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the J-job script for the task that runs the post-processor (UPP) on the output files corresponding to a specified forecast hour. @@ -91,7 +100,7 @@ $SCRIPTSDIR/exregional_run_post.sh \ fhr_dir="${fhr_dir}" \ fhr="${fhr}" || \ print_err_msg_exit "\ -Call to ex-script corresponding to J-job \"${script_name}\" failed." +Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." # #----------------------------------------------------------------------- # @@ -101,7 +110,8 @@ Call to ex-script corresponding to J-job \"${script_name}\" failed." # print_info_msg " ======================================================================== -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/.exregional_make_orog.sh.swp b/scripts/.exregional_make_orog.sh.swp new file mode 100644 index 0000000000000000000000000000000000000000..a8edb41ea8a69dea7b17d6aa12f089c6b7aad684 GIT binary patch literal 40960 zcmeI53vgstdFR_9*w|nokOBt+xzkdN1X8!8(Y!62K_kt`o{^>`jcu4Q?W^wFQqT0u zeY^EwP&qh2YzV=aY`}qKp@>bgA+gD3aSE1K1(H+(F$r;AfI|_oMc6VJaI$&u*x&!0 zdvD)vJv?K#oeJAs`m68rJic?zcfQv-y)bm`)G=pIYBa&;*@?sj@BZWspP0K~{-S@l zFmc$cxwTyC8n5o<3vO9YZ2DQsdqcyU-f6v<_Oms&o?RMAzv9?uC%MCU?sHDGKhQnzQlLwLE(N+2=u)6dfi4BQ6zEc*OMxF@3e>Al zOWaKTek`ctnc(|^w(pCA^S1@x#kTuT3(iZy_xIcG|9Eiztl;}}kU^Nw#liKl;Crp@ z{!@eNHwNDyYrB78aQ$@f{i2{kVfs%Au7~Hp&~`t#-TmrPpi6-+1-caIQlLwLE(N+2 z=u)6dfi4BQ6zEdmN0I_=E|D0dX8$C`82^8$|38AF{$22G@ay1ia3i=LTnjD*PY2(5 zW+L$g@Mqvw@J4VI*adciZ(N#4d>z~mJ_lY8ZUOtji@{HWFFb?y!F$1bzzHx1E(V`` zdLr>(z}vyq;0U-3eCv`#;t#;5!C9~dt^rdZ0e*l%;UVyA;OD?e@G9_p@HvbXe*$g+ zKL_@MM=)^wCU`%1AGjV|3-*8)fXl%B7#ThX-U!CPF0d1P1LMTk!DqoAgV%wZ!EW#n zhK~oqm%!V>+rT{73-*A^!1pmud=EST?gJkNzYJEuGMENO!BfBm;N#dX*1&F{c6%f= zYMg5i?QxR+lDq6BoxVcKOF1>KR9W_%`jY1?xy6c;uT@I@PS&f{-9p(}tQB&OTh2KO zw5Yk&rL{y4ulQ9jTQ4ko#kG{<94z{k0mt|1CXupR@|;TE9DA!?wo!K%ik_1%@UP)} zIj3GB37z@_=AkI%@S?KIS86(~d$mG&anr|0WG=|kq^*4YFykOxguIleNo~<96Vg%- zlT*bKJ&B&d&U{F!k@qu=dZEaFe<@d}jU{`}DyHSc{M6C8%%Q2-^)xqJ(HpNGS!;rtrm#op<+&~E*u}9otU_OYHogN+Eg!dY-0Y% z%%Szpwnq=$rr^r zR;%>q5)V1K!eD1>TV}*Co&iw3&3DGm4tm8~q3^kx7 zT!i7aQYaRk1&=6&-vm1)?~P2C!zpzl;&-?9^OE%=?SWVKewdcNOINe1i7lZhr}$NHS)bu+Wq z(3vtvXT}d6O*;LZEl7{!)wBJE+Dny2G3R6}WuHMMM&}v|av;_>Xh;N63blv}G73v{ z@5SM>}WsVu3 z00+~&p@yUwhAzs_G>|EkZ&>eUNilD9mQrIK`Xm$cVjKLaYToLg?dCy681=4yhFf&@ zU`A2D3@1lBg{2Th$_#PK1J?FC`kWG@&(=gr!)8w<6BFfNezRHdT>PPYd7WoERI9N> z$RP5SOvzt#lAWb~k8@lM$YBol-9^uVAr`WBwq>xwbh%jxIYwu*juCaNw@+j@B1nwf z`Rcj`X82&ftmhV7-+QzNt9lS-pDPy1XPm*jChJ!moI8@4n>jH%K5=sB29t0{#fToq zjxqH}<=Pb$kTXjKAa+iL4_#Y*6vW;3zv$-juLkZ!LK5}&#-n>|>EVxD3 zVa;9Zhuh7I?TS;Dy_N9M&K5X@WOZWhh>cMM54n-XI^pe zXy)33vzbE^lhbpV$>SZDJT>q%X$J(-fRj*ZG@&?k>`;mZWi-#mp6P7sC3<$G7YgMx z+;1r%{r~UK<9`TD{C}WB>ih?~*8OxT(4|0^0$mDpDbS@rmjYc1bScoKK$ikt3Un#Z zrNDoB3P}I|41V?(;fI(0AFBKx<=}(hH^CBE1TO^l(l{0Dpw9{ykt0^nuI41Ni9g1-}h`3w#jV3El~A1}_EA z0w2L|{~mAz41mAFNB>cf2UFk?eDi+=ZUO6H6#N+YE&THT9sCBk8{7rn4_*bX2hRq7 zh+qB_;A7x*;AW5qIdBkM1^zF-_=2s|f>R&XY+pBTsLDU-D=uyHxd(wH@#S~4&cQyPXsl)|l(NEn=8 z+&TDW=QpUU5>oL3 z@3Psq!o*b=R}yt!!M-gT=y?&%wtE}xCDt&ef~1RUJ1j3HJv=*g zD5KKMlTk~{wD_UStxUdHuVt`LMwMwHE=84L(pWByyVtZkeM=N+HUc(K9&(gos`u>p z?8Mx9N|o4?Dre0tl_Xy&7Aq?<9GjGEfn%M%wWzl9o7fH0mNK)_JT#L$p-U^7`8`Ch z6pMbpwcndDWTILOeX|uz_I|a3e~JuSx+n=9wm~A^Ho3R=h+xrEL(%1OCg&ALGAo{YW^80Mx?JTvlXtUY z!>J)^T&~oOD-18aycRG5IWz+2MYgfv zt;jRV^;|_G1jm2K@67w^2cgje*vC#F(fkvM~sPA|R98T4*)b`I?sac(%E<}aUP zj>EendbNktDs00bPikDrgI7vkWT*=1?6XUtw@Hm+WkL0?>>CW6(Hd}Fj!*0IxMB6gwzn4kmP=0BSn$|v!?#cr=V{Y(tZ z!7h_B3`1pI^lAgnD^t#OZg~+-Qz|Sj!JU|J&xkwu6(pY$5<#Kt`RQz>RE4+KeDNg1 zRfzOMj;IrgSaE8|HAo94WyU>DS9lH?1-UR+&v3c6Qt&-1#AGWui50N|Dg?y%;b3l- z(J{dU?>my^I5AU7Fx@FQj>!^1p`8kkp&4T3*rp|-&Zl;WPLhe}@G*9R=>2W-W*Sni zd?jJ3QGeo^f>@$+Vpx)i&88EYqf#VZQc*?G)6Cf}jhfq^7MZZyIj;n7KFWcwVI+{G6W>no#d&6=FrW@hkX8hjGRQdQOe-6^~ZY8 z_VAsN$nBqe`3>vDe+6d>|5E*(WJEBIV@($uPbM;qkRgO`;!dJ|>DLJktl6;DD6~R7 zdK$r4^<#3Sj=H6~YHp>%!COZ7ML94eV<1_Ig;Z)C^k`|p#BbY_vhzs}r-qZQ-$T2& z!1bZv`o{16c>j)4OF{>ejz1Ntynl&{X1wr2lL0zt2lYMgPB%FMaMq*RO#Q z@DRHG=fR(XHvsAXi(nk|gFf(8bpHPe*1#$l0Xu+f0S}@3e+_&L{35ssoCMDS7l0pP z2lz1fT~Gpj;A_|cz6$OH?*Oj@*MS}2Yv}&>fzUQ^JI7^E1U+B}_-pI|cYtfakAv@E z4|ouK3A`P=4crcX2~2|TVHda${7UzR)%B5l^Uk=!`Juy7mA&N=SRs@CxH=}dKAqKC(h?w=7!DG=g`xCA+ zIw-}fsVvij>y28Ou57fUa;5wluU0`8VgP8OWEP%h9Eh_vRr84RpR(%|P9=FET1Egz z48ty+K0EPxx&C$yQQ2+5`Fjr@pFJ_{^aeJAbQpcFJl35nB%>Gttvj!Mt#b<74pC;& zHO@F*Ma2|V%WL7m(vUgQ#K!;@84cwrx(VoPxnRpBnaFoyf2YbkY@`46u43;a+OAVd zqdRN6%S_I#yGDubJ7(l@DV`L`mIUoOY_YMVwXB%LQd0U`A5GFV!`Dpn z4Mw((R4SEBoJ!77d~{_h(_%7Em^Vsy^t2|c%7R%mve|P%a}>&9{Aq6x3CfBg!Hp$VF`_=pPO3djR4t>_gafzGKsB#vY0ZqW z;vu&B446{2$Zo6yn028oRp${M zYj@rn7H14R<(x}&w=zTDauGYwsw`&+=Yts44ZlZBi7mMyXG3mZ=24Pq;V03ssjpOQ z+-z&0y)!r%(XTn4N#!{9?1Gp>wAf9gj@GxChTD4junf;}JKG+U(Q-EQ$Y$pX$2GjR zJw!jwA|s|D`)%yA?a-5vwO9_8n)xQzb04QnM`1qOW)jXk;rNb(kZs>8XGy=w2xHu& zvs70&J*$bV$3&uW`6UuQOsul$2BuT^h1bZvfqGS~pg+L|tY)b}B4+G2JS7NPbHO9wrb}$W*k8E2 zRprwOAPb*xoI1~qm8d{DJ zk={&`c>P$qESIElwQcZ8^1gaRo;7-PXSW&n;vKM+7FS*5T8vUL6DGN70X2!VjX$ar z4pO;s=rKkX74z6*nz0cqwT>Kzrt2B~Uux#3rEjADpVkSV-$U;&gO`ACt^c>s^X~?? zfm^|i;3vVqpyU58xEtIBmcfnSGVpoy``-p{1r0C{X2E6PdEgn~67VT>{!f5auoL_M zJ^y>)KJW|R4d4o}AM67?;91~7bp6kOdq5rdU;um@o&RC*1@I=&06w@890gAYe~aF) zn1R0p4}dR#H-THhIFL=?^Waax8^F(lDtI-x0sIWm+W$WSe+X^^^WeGQE9n1U244hs zfZKuA{m+7nfouf-{1j{$;51MS!6Zn6zs45um*6en&0riH0Q zsWp?yjmtRW3pCM$B-1&csZc+-g*3tq9UGb0aty7j}FcBXWrA=g}$F zcI?>kBd`#(StCt&Qf8%T(j6@2=uDkdfAWPyT=dn<-=u#O&m{Ia$!hIm7 zZRXbOXp`eH)NjHRvY3Q)P+Z`M2^;%_LW(l^tXz$(-!^)$i0)fi9sMAZf?~R9JYYvt zZjfxGQ4dJK3XDS0`$$om%9ZU!vTmhHbt6ur;N+fefM zZ8RS4$PF4+B(&R#lVBTt0{32I9%y%EIugCT%px9dtj2mdX-(Ho;8T(p6V)kS_5-`} zW(#0te0H2=ueN})JeHh1cyw+;*TU7bDqRL&q$-6~yrVrf*bT@E!p6+2#xU(_^m03* za7!2$teRqaW`1J7qU~19o*2m9oixq6k$3=|+rv zOZY8@ebkz!*D3F&fvRZ~P(!3HCPnc`tMByup93-YCMGqY1>8=M%kprA^# zwJWrl)jHbw$W5Jy1p*Fz%+% z<}Baee}o6BDVSJK+BP02^C$w0JW%HNKhy)I5f@Hg&9SI452GkQURM;!Y&)Ere8m_L z%<$jb9Ysok(D67uQS$#^h6=fgZYljg+~5Cyq4$3T+ypA%D7YN_1b7zs1N8n+gV%yr zg8z-K{~O=~;ML$17zcgea`0tz{4at#!0q5Lcs6(@_ze2~?*pw3_z=+gfOmmAK>;j* z8K8atE&&&V2hsh%3_c4KAFv8!6G(z*0PO+z@7MwU4g4#378CMr&*D?IXcROoG&MRw}8Wf})}f5oT+Wu(n1kLh|LWDTzlOPi*a- zl|4M)&C*vK=fDB}HE>o^=0P`htX=fXWz3&k4$Dr&rEQ$>*G1E7EBLX~2&!o+R&XV< z>dgF3sNxiZH%$=8SkgzdcLvMOuo^Wvb7*Q>;aWV_RuDXHv5jTCdWJ1<*yYfaD0AAY zYm=QQZN-5Gr7GtAooVd!r`Zh9PcK!ogDaK%a8Ssw?`HDLJ57nVE@z!h1CuJ(IUDz7 z)Rx;pC9j^C-@Hki{p?)()^K`yeB$__iEGm~oK;^emvEolwo~h*MRR_&x7o(cR;5w* zu&|@`mO?^lkG-_VuGGk$*qcub)sIZf9_$p^N88CRvvNdxf4Q7y{4D1EC$|~w5Q5k{ zHxN5$p}nc$7@do1A&r^MTV}V0(XLptJu&Sve`I3n@i)w36>2XrM7>fr91G8bfwj^zrzCZQY@YUR>yHDuGy;517j22bl>PXDr5LiDS3 zcGZnunw~i{F*n9fQg@15950IM(K){fL1)bklA97n zpJp^`vLilUR#Owe6KqN-@hzP49PJBT$cJkmo0bA38oR5Z4Xv_?1fqp}l@emC<>9CX zik-S70^xc`UI{%VavQLpwEPA{ABp*vh=^muQ#}~wM(#31bTE=C=#De=*17qEbda!9#OnyE|I|c7Zh^u+&>igHiw7HM5>=4L7s`THJgd z5u@!XBLS&-rlb9NRfSkN4c%I*W!=X?a*_?}OR|_DaXQWwV`I)#kRd*F%w5Klj*wWd zH5AM{YqkoT(+vi@@GU012*Nxj+0jD3!AP9U1XN^Q1!Z^l-)xk^V; zskT}y5wl~Kfp%^H*qBJs8re{m$kb{wp)y7xB4bHA+aOp<5;r?yBmD(W$1dw}jHX6XwVc7UgX2c_qOPlMkFzXLu5?gj4xcY@>KV(=;S{*Qp)19yR& z!79+60XKl-U<{}tOGkejcrDlmc7VS@uht#|alQKn&QstA=+u7!6d&LL7wiY4;HSW4Ksxtr z;N?K=_b+P0bN$#bl-BhiVdT=V)K3$WqE0g45ItpA3NkQ>j#11D_I9xb#<20b$fL&2 zvWiJE3}MBvBok*WHsCskCl4L9Pioa14qtN;1x@8i$mAK*L}MFZnkk1F!mOMki%P6W zeQ^&F69`O&Az%s`%m3=*Tj#D4w$|36W7`#vrt0}TAx~O!lkxI-^hDiC^aSVaYaA8D zA3n>D!#y~m(H&zCFJzs;qI1O+SFrx+rM=YxzKKfdrM)#bS7`W|34*DX(tV>Z?Nxy@ z@_s1SezSPYmZ{~2O4f3L^5OQ0zpD}^`+8XlzUsWp(f3+^n$<}ZE?8iT&)LKo9iEsD zH{wr{aMDsO8LdFwvLdQ5RnVrdUPcl`#UyVQv2K-k*d?0DL-o*8sS0ep!Xb(??J+aO z##JQ{VO)V+^b~6mu?-v;N|0WZzU@_gR~Jgo@kY%jeh%lC&jj0#Q7Krp5_%KcTyAvy z-np4sZ8?#Slz64tdPYZJWk$x(cKX;+wwZN$TJS*oC%j*DM?S!!eVGq;t0%BUVNwVoN?($*^rbtwvriA7cYo{z-Yf4T}c7UF&PbPOGwke!V zL%&%v6C4zi3py;x&9vGUW45#N55q%i(Z_xiTahMx7XlN-0J&E~w`i~Mlg zzSS0l9R|^MT6l%WHn&v?La~jLL&L$?He2@LFhlczStUsnhYhYK#x1^DxWCOnDnXXf zV;Ffb=9|kPwYvzn%+Jr5Fj=dm*xxQNQP@GRGs+3cRBHOg4m&d_S~L6^njqyC^D3jy z%rsc}SY<=#Ww;j86esS+Hie6amorn5wp|6sv2h@m8ic4Yzw+ZLW|=U9gM`i_&U~A2 zcM_Upw%aBIoz4;q&DbL9y|cX|Zu@O641Bam^o{+c$+hFNLc)QPXzrOd7S447W5kT( zOO-g;G@`uqSA2LX)OXi)T&l|2+YDph7~vkqnFs2D!pxJ_6eI247|V1ZTcy6+?a(zA zqU*_1&cJp@O2W>Qpe4eih53j*VPSc0XCsj(1K>cq{J&$@5_Hgg``AE=nQ1Xe1mD5 z^?SkZ&zL}x67dPBNRtY<9tgwZBd@6ls9{bzVh>U8*BcC!a%IIVwlgUieM$jC#{Cpz z8P5JjpVDuupsX5WjZv%v6V8D72`V)Jyh4_&5{B;r;g3Q5w_o6bTQ7UWg4)n+Drjv! zk8b}-1l>=OgCYA)+nuCVD+Zu*YSkS z5)ZR2Rq$g?VHALRSd4l`nK5xmOc*Abr)9$_tYT)@5$m7~G;k${35h(mt+X~cRWFip1^VOPqU(PgoCH^bZ=&CS z6ub-E0#1PAUq0t9G20RU12>uQo{1?HSKn2VI`NRJL zeO$lMr+ol3;Q!Z8kJGcsmWP{MGQ86a)L1E{L0_wfR-T{KVnUBZtS3&ZE7u z&3&k~!ASSG_lG>kB%hGHVZ!xu%-_1Gw`uH7rE?xM?pT(-dmc$W0WEP(xtn~?=}8Gp z=P)z_9Z#LW$W`49O1m4BVseYKU!x7Tta?PIHnuWr#*K;N8#QprWRXh-?bzcmaBl+bo!i}@ zw0(opgcej`PHNjOwD+tQAW7p~m%qxKnV+NO*REM|#65GyK8rh|2f2ZbDDXRuJxM!C z#tVw6Mxsk(;E{W$ZD&bzaecP3FM8A}sI95{7W+-g6o&04s%`sztg%Ab%? /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the ex-script for the task that copies/fetches to a local direc- tory (either from disk or HPSS) the external model files from which ini- @@ -61,21 +70,13 @@ process_args valid_args "$@" # #----------------------------------------------------------------------- # -# If VERBOSE is set to "TRUE", print out values of arguments passed to -# this script. +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. # #----------------------------------------------------------------------- # -msg=" -The arguments to script/function \"${script_name}\" have been set as -follows: -" -num_valid_args="${#valid_args[@]}" -for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - msg="$msg"$( printf " $line\n" ) -done -print_info_msg "$VERBOSE" "$msg" +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -200,7 +201,8 @@ Successfully copied or linked to external model files on system disk needed for generating initial conditions and surface fields for the FV3 forecast!!! -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" elif [ "${ICS_OR_LBCS}" = "LBCS" ]; then @@ -210,7 +212,9 @@ Exiting script: \"${script_path}\" Successfully copied or linked to external model files on system disk needed for generating lateral boundary conditions for the FV3 fore- cast!!! -Exiting script: \"${script_path}\" + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" fi @@ -572,7 +576,9 @@ file UNZIP_LOG_FN in the directory EXTRN_MDL_FILES_DIR for details: ======================================================================== External model files needed for generating initial condition and surface fields for the FV3SAR successfully fetched from HPSS!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" elif [ "${ICS_OR_LBCS}" = "LBCS" ]; then @@ -582,7 +588,9 @@ Exiting script: \"${script_name}\" External model files needed for generating lateral boundary conditions on the halo of the FV3SAR's regional grid successfully fetched from HPSS!!! -Exiting script: \"${script_name}\" + +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" fi diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 768e7852b..524c9e353 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -8,8 +8,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -31,17 +31,26 @@ # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the ex-script for the task that generates grid files. ========================================================================" @@ -60,21 +69,13 @@ process_args valid_args "$@" # #----------------------------------------------------------------------- # -# If VERBOSE is set to "TRUE", print out values of arguments passed to -# this script. +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. # #----------------------------------------------------------------------- # -msg=" -The arguments to script/function \"${script_name}\" have been set as -follows: -" -num_valid_args="${#valid_args[@]}" -for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - msg="$msg"$( printf " $line\n" ) -done -print_info_msg "$VERBOSE" "$msg" +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -162,8 +163,10 @@ case $MACHINE in . /apps/lmod/lmod/init/sh module purge module load intel/18.0.5.274 - module load netcdf/4.6.1 - module load hdf5/1.10.4 +# module load netcdf/4.6.1 +# module load hdf5/1.10.4 + module load netcdf/4.7.0 + module load hdf5/1.10.5 module list { restore_shell_opts; } > /dev/null 2>&1 @@ -418,8 +421,8 @@ exit code." RES="$RES_equiv" CRES="$CRES_equiv" - set_file_param "${SCRIPT_VAR_DEFNS_FP}" "RES" "$RES" - set_file_param "${SCRIPT_VAR_DEFNS_FP}" "CRES" "$CRES" + set_file_param "${GLOBAL_VAR_DEFNS_FP}" "RES" "$RES" + set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "$CRES" fi # @@ -482,9 +485,9 @@ printf "%s %s %s %s %s\n" \ $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ print_err_msg_exit "\ -Call to executable \"${shave_exec}\" to generate a grid file with a ${nh3_T7}- -cell-wide-halo returned with nonzero exit code. The namelist file -nml_fn is in directory tmpdir: +Call to executable \"${shave_exec}\" to generate a grid file with a ${nh3_T7}-cell-wide +halo returned with nonzero exit code. The namelist file nml_fn is in +directory tmpdir: tmpdir = \"${tmpdir}\" nml_fn = \"${nml_fn}\"" mv_vrfy ${shaved_fp} ${GRID_DIR} @@ -506,9 +509,9 @@ printf "%s %s %s %s %s\n" \ $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ print_err_msg_exit "\ -Call to executable \"${shave_exec}\" to generate a grid file with a ${nh4_T7}- -cell-wide-halo returned with nonzero exit code. The namelist file -nml_fn is in directory tmpdir: +Call to executable \"${shave_exec}\" to generate a grid file with a ${nh4_T7}-cell-wide +halo returned with nonzero exit code. The namelist file nml_fn is in +directory tmpdir: tmpdir = \"${tmpdir}\" nml_fn = \"${nml_fn}\"" mv_vrfy ${shaved_fp} ${GRID_DIR} @@ -527,7 +530,7 @@ cd_vrfy - # $USHDIR/link_fix.sh \ verbose="FALSE" \ - script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ + global_var_defns_fp="${GLOBAL_VAR_DEFNS_FP}" \ file_group="grid" || \ print_err_msg_exit "\ Call to script to create links to grid files failed." @@ -542,7 +545,8 @@ print_info_msg " ======================================================================== Grid files with various halo widths generated successfully!!! -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_make_ic_lbc0.sh b/scripts/exregional_make_ic_lbc0.sh index 1682a8613..67bb8afd9 100755 --- a/scripts/exregional_make_ic_lbc0.sh +++ b/scripts/exregional_make_ic_lbc0.sh @@ -8,8 +8,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -18,21 +18,30 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the ex-script for the task that generates initial condition (IC), surface, and zeroth hour lateral boundary condition (LBC0) files @@ -60,21 +69,13 @@ process_args valid_args "$@" # #----------------------------------------------------------------------- # -# If VERBOSE is set to "TRUE", print out values of arguments passed to -# this script. +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. # #----------------------------------------------------------------------- # -msg=" -The arguments to script/function \"${script_name}\" have been set as -follows: -" -num_valid_args="${#valid_args[@]}" -for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - msg="$msg"$( printf " $line\n" ) -done -print_info_msg "$VERBOSE" "$msg" +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -497,7 +498,8 @@ print_info_msg " Initial condition, surface, and zeroth hour lateral boundary condition files (in NetCDF format) for FV3 generated successfully!!! -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_make_lbc1_to_lbcn.sh b/scripts/exregional_make_lbc1_to_lbcn.sh index 1e380eb48..9b49b16f5 100755 --- a/scripts/exregional_make_lbc1_to_lbcn.sh +++ b/scripts/exregional_make_lbc1_to_lbcn.sh @@ -8,8 +8,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -18,21 +18,30 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the ex-script for the task that generates lateral boundary con- dition (LBC) files (in NetCDF format) for all LBC update hours (except @@ -61,21 +70,13 @@ process_args valid_args "$@" # #----------------------------------------------------------------------- # -# If VERBOSE is set to "TRUE", print out values of arguments passed to -# this script. +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. # #----------------------------------------------------------------------- # -msg=" -The arguments to script/function \"${script_name}\" have been set as -follows: -" -num_valid_args="${#valid_args[@]}" -for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - msg="$msg"$( printf " $line\n" ) -done -print_info_msg "$VERBOSE" "$msg" +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -464,7 +465,8 @@ print_info_msg " Lateral boundary condition (LBC) files (in NetCDF format) generated suc- cessfully for all LBC update hours (except hour zero)!!! -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 71b31d507..4a3abceed 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -8,8 +8,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -27,21 +27,30 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the ex-script for the task that generates orography files. ========================================================================" @@ -60,21 +69,13 @@ process_args valid_args "$@" # #----------------------------------------------------------------------- # -# If VERBOSE is set to "TRUE", print out values of arguments passed to -# this script. +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. # #----------------------------------------------------------------------- # -msg=" -The arguments to script/function \"${script_name}\" have been set as -follows: -" -num_valid_args="${#valid_args[@]}" -for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - msg="$msg"$( printf " $line\n" ) -done -print_info_msg "$VERBOSE" "$msg" +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -185,7 +186,7 @@ case $MACHINE in . /apps/lmod/lmod/init/sh module purge module load intel/18.0.5.274 - module load netcdf/4.6.1 + module load netcdf/4.7.0 module load hdf5/1.10.4 module list @@ -371,26 +372,19 @@ peak_fac_array=( "1.1" "1.1" "1.05" "1.0" "1.0" "1.0" "1.0") # Need to fix this so that the stderr from a failed call to interpol_to_arbit_CRES # gets sent to the stderr of this script. -cd4=$( interpol_to_arbit_CRES "${RES_equiv}" "RES_array" "cd4_array" ) || \ -print_err_msg_exit "\ -Call to script that interpolated cd4 to the regional grid's equiavlent +var_names=( "cd4" "max_slope" "n_del2_weak" "peak_fac" ) +num_vars=${#var_names[@]} +for (( i=0; i<${num_vars}; i++ )); do + var_name=${var_names[$i]} + eval ${var_name}=$( interpol_to_arbit_CRES "${RES_equiv}" "RES_array" "${var_name}_array" ) || \ + print_err_msg_exit "\ +Call to script that interpolated ${var_name} to the regional grid's equiavlent global cubed-sphere resolution (RES_equiv) failed: RES_equiv = \"${RES_equiv}\"" -echo "====>>>> cd4 = $cd4" -# -max_slope=$( interpol_to_arbit_CRES "${RES_equiv}" "RES_array" "max_slope_array" ) -echo "====>>>> max_slope = $max_slope" -# -n_del2_weak=$( interpol_to_arbit_CRES "${RES_equiv}" "RES_array" "n_del2_weak_array" ) -# n_del2_weak is defined to be of integer type in the filter_topo code -# that uses it, so round it to the nearest integer. Otherwise, the code -# might break on some machines/compilers. -n_del2_weak=$( printf "%.0f" ${n_del2_weak} ) # cast to integer, Y. Wang -echo "====>>>> n_del2_weak = $n_del2_weak" -# -peak_fac=$( interpol_to_arbit_CRES "${RES_equiv}" "RES_array" "peak_fac_array" ) -echo "====>>>> peak_fac = $peak_fac" -# + var_value=${!var_name} + echo "====>>>> ${var_name} = ${var_value}" +done + if [ 0 = 1 ]; then @@ -579,7 +573,7 @@ cd_vrfy - $USHDIR/link_fix.sh \ verbose="FALSE" \ - script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ + global_var_defns_fp="${GLOBAL_VAR_DEFNS_FP}" \ file_group="orog" || \ print_err_msg_exit "\ Call to script to create links to orography files failed." @@ -630,7 +624,8 @@ print_info_msg " ======================================================================== Orography files with various halo widths generated successfully!!! -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index e0853fcc0..5cfc3b714 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -8,8 +8,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -22,17 +22,26 @@ # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the ex-script for the task that generates surface fields from climatology. @@ -52,21 +61,13 @@ process_args valid_args "$@" # #----------------------------------------------------------------------- # -# If VERBOSE is set to "TRUE", print out values of arguments passed to -# this script. +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. # #----------------------------------------------------------------------- # -msg=" -The arguments to script/function \"${script_name}\" have been set as -follows: -" -num_valid_args="${#valid_args[@]}" -for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - msg="$msg"$( printf " $line\n" ) -done -print_info_msg "$VERBOSE" "$msg" +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -238,7 +239,7 @@ esac # $USHDIR/link_fix.sh \ verbose="FALSE" \ - script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ + global_var_defns_fp="${GLOBAL_VAR_DEFNS_FP}" \ file_group="sfc_climo" || \ print_err_msg_exit "\ Call to script to create links to surface climatology files failed." @@ -265,7 +266,8 @@ print_info_msg " ======================================================================== All surface climatology files generated successfully!!! -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_run_fv3.sh b/scripts/exregional_run_fv3.sh index 7d8856172..e014eb201 100755 --- a/scripts/exregional_run_fv3.sh +++ b/scripts/exregional_run_fv3.sh @@ -8,8 +8,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -22,17 +22,26 @@ # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the ex-script for the task that runs a forecast with FV3 for the specified cycle. @@ -52,21 +61,13 @@ process_args valid_args "$@" # #----------------------------------------------------------------------- # -# If VERBOSE is set to "TRUE", print out values of arguments passed to -# this script. +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. # #----------------------------------------------------------------------- # -msg=" -The arguments to script/function \"${script_name}\" have been set as -follows: -" -num_valid_args="${#valid_args[@]}" -for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - msg="$msg"$( printf " $line\n" ) -done -print_info_msg "$VERBOSE" "$msg" +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -607,7 +608,8 @@ print_info_msg " ======================================================================== FV3 forecast completed successfully!!! -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 37b2405cd..76d662a98 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -8,8 +8,8 @@ # #----------------------------------------------------------------------- # -. ${SCRIPT_VAR_DEFNS_FP} -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -22,17 +22,26 @@ # #----------------------------------------------------------------------- # -# Get the name of this script as well as the directory in which it is -# located. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Print message indicating entry into script. # #----------------------------------------------------------------------- # -script_path=$( readlink -f "${BASH_SOURCE[0]}" ) -script_name=$( basename "${script_path}" ) -script_dir=$( dirname "${script_path}" ) print_info_msg " ======================================================================== -Entering script: \"${script_path}\" +Entering script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" This is the ex-script for the task that runs the post-processor (UPP) on the output files corresponding to a specified forecast hour. @@ -52,21 +61,13 @@ process_args valid_args "$@" # #----------------------------------------------------------------------- # -# If VERBOSE is set to "TRUE", print out values of arguments passed to -# this script. +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. # #----------------------------------------------------------------------- # -msg=" -The arguments to script/function \"${script_name}\" have been set as -follows: -" -num_valid_args="${#valid_args[@]}" -for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - msg="$msg"$( printf " $line\n" ) -done -print_info_msg "$VERBOSE" "$msg" +print_input_args valid_args # #----------------------------------------------------------------------- # @@ -321,7 +322,8 @@ print_info_msg " ======================================================================== Post-processing for forecast hour $fhr completed successfully. -Exiting script: \"${script_path}\" +Exiting script: \"${scrfunc_fn}\" +In directory: \"${scrfunc_dir}\" ========================================================================" # #----------------------------------------------------------------------- diff --git a/tests/run_experiments.sh b/tests/run_experiments.sh new file mode 100755 index 000000000..e31c62d3e --- /dev/null +++ b/tests/run_experiments.sh @@ -0,0 +1,310 @@ +#!/bin/bash -l + +set -x +# +#----------------------------------------------------------------------- +# +# Get the name of this script/function (scrfunc_name), the full path to +# the file in which it is located (scrfunc_fp), the name of the file +# (scrfunc_fn), and the directory in which the file is located (scr- +# func_-dir). Note that if this is a script (as opposed to a function), +# the script name (scrfunc_name) will be set to "main" if it is the top- +# level script and to "script" if it is a script called from another +# script or function. +# +#----------------------------------------------------------------------- +# +scrfunc_name="${FUNCNAME[0]}" +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +# The current script should be located in the "tests" subdirectory of +# the workflow directory. Thus, the workflow directory is the one above +# the directory of the current script. Get the path to this directory +# and save it in HOMErrfs. +# +HOMErrfs=${scrfunc_dir%/*} +# +#----------------------------------------------------------------------- +# +# Set directories. +# +#----------------------------------------------------------------------- +# +#basedir="$(pwd)/../.." +#USHDIR="$basedir/regional_workflow/ush" +USHDIR="$HOMErrfs/ush" +TESTSDIR="$HOMErrfs/tests" +# +#----------------------------------------------------------------------- +# +# Source bash utility functions. +# +#----------------------------------------------------------------------- +# +echo "AAAAAAAA scrfunc_dir = ${scrfunc_dir}" +. $USHDIR/source_util_funcs.sh +echo "BBBBBBBB scrfunc_dir = ${scrfunc_dir}" +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Set site and computational parameters. +# +#----------------------------------------------------------------------- +# +MACHINE="HERA" +ACCOUNT="gsd-fv3" +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" +#VERBOSE="FALSE" # This should be removed later. To do so, must remove VERBOSE variable from function print_info_msg in file print_msg.sh. +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# +readarray -t experiments_list < ${TESTSDIR}/experiments_list.txt +printf "%s\n" "${experiments_list[@]}" +num_elem="${#experiments_list[@]}" +echo "num_elem = ${num_elem}" +echo "scrfunc_dir = ${scrfunc_dir}" +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# +set +x +echo +i=0 +while [ ! -z "${experiments_list[$i]}" ]; do +echo +echo "======================================================" +echo "i = $i" +echo "experiments_list[$i] = '${experiments_list[$i]}'" + +# Remove all leading and trailing whitespace. + experiments_list[$i]=$( \ + printf "%s" "${experiments_list[$i]}" | \ + sed -r -e "s/^[ ]*//" -e "s/[ ]*$//" ) +# sed -r -n -e "s/^[ ]*//" -e "s/[ ]*$//p" ) +echo "experiments_list[$i] = '${experiments_list[$i]}'" +# Remove spaces before and after all separators. We use the pipe symbol +# as the separator. + experiments_list[$i]=$( \ + printf "%s" "${experiments_list[$i]}" | \ + sed -r -e "s/[ ]*\|[ ]*/\|/g" ) +# sed -r -n -e "s/[ ]*\|[ ]*/\|/gp" ) +echo "experiments_list[$i] = '${experiments_list[$i]}'" + +# regex_search="^[ ]*([^\|]*)[ ]*\|[ ]*(.*)" +# regex_search="^([^\|]*)\|(.*)" + regex_search="^([^\|]*)(\|(.*)|)" + + baseline_name=$( printf "%s" "${experiments_list[$i]}" | sed -r -n -e "s/${regex_search}/\1/p" ) + remainder=$( printf "%s" "${experiments_list[$i]}" | sed -r -n -e "s/${regex_search}/\3/p" ) +echo +echo " baseline_name = '${baseline_name}'" +echo " remainder = '$remainder'" + + modvar_name=() + modvar_value=() + num_mod_vars=0 + while [ ! -z "${remainder}" ]; do +# next_field=$( printf "%s" "$remainder" | sed -r -n -e "s/${regex_search}/\1/p" ) +# remainder=$( printf "%s" "$remainder" | sed -r -n -e "s/${regex_search}/\3/p" ) + next_field=$( printf "%s" "$remainder" | sed -r -e "s/${regex_search}/\1/" ) + remainder=$( printf "%s" "$remainder" | sed -r -e "s/${regex_search}/\3/" ) +# modvar_name[${num_mod_vars}]=$( printf "%s" "${next_field}" | sed -r -n -e "s/^([^=]*)=(.*)/\1/p" ) +# modvar_value[${num_mod_vars}]=$( printf "%s" "${next_field}" | sed -r -n -e "s/^([^=]*)=(.*)/\2/p" ) + modvar_name[${num_mod_vars}]=$( printf "%s" "${next_field}" | sed -r -e "s/^([^=]*)=(.*)/\1/" ) + modvar_value[${num_mod_vars}]=$( printf "%s" "${next_field}" | sed -r -e "s/^([^=]*)=(\")?([^\"]+*)(\")?/\3/" ) +echo +echo " next_field = '${next_field}'" +echo " remainder = '$remainder'" +echo " modvar_name[${num_mod_vars}] = ${modvar_name[${num_mod_vars}]}" +echo " modvar_value[${num_mod_vars}] = ${modvar_value[${num_mod_vars}]}" + num_mod_vars=$((num_mod_vars+1)) +echo " num_mod_vars = ${num_mod_vars}" + + done + + + baseline_config_fp="${TESTSDIR}/baseline_configs/config.${baseline_name}.sh" + if [ ! -f "${baseline_config_fp}" ]; then + print_err_msg_exit "\ +The experiment/workflow configuration file (baseline_config_fp) for the +specified baseline (baseline_name) does not exist: + baseline_name = \"${baseline_name}\" + baseline_config_fp = \"${baseline_config_fp}\"" + fi + + experiment_name="${baseline_name}" + for (( j=0; j<${num_mod_vars}; j++ )); do + if [ $j -lt ${#modvar_name[@]} ]; then + experiment_name="${experiment_name}__${modvar_name[$j]}=${modvar_value[$j]}" + else + break + fi + done +echo +echo "experiment_name = '${experiment_name}'" + + experiment_config_fp="${USHDIR}/config.${experiment_name}.sh" + cp_vrfy "${baseline_config_fp}" "${experiment_config_fp}" + + EXPT_SUBDIR="${experiment_name}" + + set_bash_param "${experiment_config_fp}" "MACHINE" "$MACHINE" + set_bash_param "${experiment_config_fp}" "ACCOUNT" "$ACCOUNT" + set_bash_param "${experiment_config_fp}" "QUEUE_DEFAULT" "${QUEUE_DEFAULT}" + set_bash_param "${experiment_config_fp}" "QUEUE_HPSS" "${QUEUE_HPSS}" + set_bash_param "${experiment_config_fp}" "QUEUE_FCST" "${QUEUE_FCST}" +# set_bash_param "${experiment_config_fp}" "VERBOSE" "$VERBOSE" + set_bash_param "${experiment_config_fp}" "EXPT_SUBDIR" "${EXPT_SUBDIR}" + + ln_vrfy -fs "${experiment_config_fp}" "$USHDIR/config.sh" + + log_fp="$USHDIR/log.generate_wflow.${experiment_name}" + $USHDIR/generate_FV3SAR_wflow.sh 2>&1 >& "${log_fp}" || { \ + print_err_msg_exit "\ +Could not generate an experiment/workflow for the test specified by +experiment_name: + experiment_name = \"${experiment_name}\" +The log file from the generation script is in the file specified by +log_fp: + log_fp = \"${log_fp}\""; + } +# +#----------------------------------------------------------------------- +# +# Set the experiment directory to the one that the workflow will create. +# Then move the configuration file and experiment/workflow generation +# log file to the experiment directory. +# +#----------------------------------------------------------------------- +# + EXPTDIR=$( readlink -f "$HOMErrfs/../expt_dirs/${EXPT_SUBDIR}" ) + mv_vrfy "${experiment_config_fp}" "${EXPTDIR}" + mv_vrfy "${log_fp}" "${EXPTDIR}" +# +#----------------------------------------------------------------------- +# +# Create a script in the run directory that can be used to (re)launch +# the workflow and report on its status. This script saves its output +# to a log file (in the run directory) for debugging purposes and to al- +# low the user to check on the status of the workflow. +# +#----------------------------------------------------------------------- +# + cd_vrfy $EXPTDIR + + xml_bn="FV3SAR_wflow" + xml_fn="${xml_bn}.xml" + db_fn="${xml_bn}.db" + relaunch_script_fn="relaunch_wflow.sh" + + { cat << EOM > ${relaunch_script_fn} +#!/bin/bash -l + +module load rocoto +cd "$EXPTDIR" +{ +rocotorun -w "${xml_fn}" -d "${db_fn}" -v 10; +echo; +rocotostat -w "${xml_fn}" -d "${db_fn}" -v 10; +} >> log.rocotostat 2>&1 + +dead_tasks=$( rocotostat -w "${xml_fn}" -d "${db_fn}" -v 10 | grep "DEAD" ) +if [ ! -z ${dead_tasks} ]; then + printf "%s\n" " +The end-to-end workflow test for the experiment specified below FAILED: + experiment_name = \"${experiment_name}\" +Removing the corresponding line from the crontab file.\n" +fi +EOM + } || print_err_msg_exit "\ +cat operation to create a relaunch script (relaunch_script_fn) in the experi- +ment directory (EXPTDIR) failed: + EXPTDIR = \"$EXPTDIR\" + relaunch_script_fn = \"${relaunch_script_fn}\"" +# +# Make the relaunch script executable. +# + chmod u+x ${relaunch_script_fn} +# +#----------------------------------------------------------------------- +# +# Add a line to the user's cron table to call the (re)launch script at +# some frequency (e.g. every 5 minutes). +# +#----------------------------------------------------------------------- +# + crontab_orig_fp="$(pwd)/crontab.orig" + print_info_msg " +Copying contents of user cron table to backup file: + crontab_orig_fp = \"${crontab_orig_fp}\"" + crontab -l > ${crontab_orig_fp} + + crontab_line="*/5 * * * * cd $EXPTDIR && ./${relaunch_script_fn}" +# +# Below, we use "grep" to determine whether the above crontab line is +# already present in the cron table. For that purpose, we need to es- +# cape the asterisks in the crontab line with backslashes. Do this +# next. +# + crontab_line_esc_astr=$( printf "%s" "${crontab_line}" | \ + sed -r -e "s![*]!\\\\*!g" ) + grep_output=$( crontab -l | grep "${crontab_line_esc_astr}" ) + exit_status=$? + + if [ "${exit_status}" -eq 0 ]; then + + print_info_msg " +The following line already exists in the cron table and thus will not be +added: + crontab_line = \"${crontab_line}\"" + + else + + print_info_msg " +Adding the following line to the cron table in order to automatically +resubmit FV3SAR workflow: + crontab_line = \"${crontab_line}\"" + + (crontab -l 2>/dev/null; echo "${crontab_line}") | crontab - + + fi +# +#----------------------------------------------------------------------- +# +# Increment the index that keeps track of the test/experiment number. +# +#----------------------------------------------------------------------- +# + i=$((i+1)) + +done +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# +{ restore_shell_opts; } > /dev/null 2>&1 + diff --git a/tests/run_mltpl_expts.sh b/tests/run_mltpl_expts.sh index 92e569e19..cde95a288 100755 --- a/tests/run_mltpl_expts.sh +++ b/tests/run_mltpl_expts.sh @@ -19,7 +19,7 @@ TESTSDIR="$FV3SAR_WFLOW_DIR/tests" # #----------------------------------------------------------------------- # -. $USHDIR/source_funcs.sh +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # diff --git a/tests/run_one_expt.sh b/tests/run_one_expt.sh index 7c066c197..fb5f2f0f8 100755 --- a/tests/run_one_expt.sh +++ b/tests/run_one_expt.sh @@ -16,7 +16,7 @@ USHDIR="$basedir/regional_workflow/ush" # #----------------------------------------------------------------------- # -. $USHDIR/source_funcs.sh +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -29,12 +29,15 @@ USHDIR="$basedir/regional_workflow/ush" # #----------------------------------------------------------------------- # -# Set the script name and print out an informational message informing -# the user that we've entered this script. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # @@ -57,20 +60,16 @@ valid_args=( \ "quilting" \ ) process_args valid_args "$@" - -# If verbose is set to TRUE, print out what each valid argument has been -# set to. -if [ "$verbose" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg " -The arguments to script/function \"${script_name}\" have been set as -follows: -" - for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" - done -fi +# +#----------------------------------------------------------------------- +# +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. +# +#----------------------------------------------------------------------- +# +print_input_args valid_args # #----------------------------------------------------------------------- # From 81c35347642ad659e7a750332b401b16ac746ab3 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 4 Nov 2019 15:34:28 -0700 Subject: [PATCH 026/203] Several changes to reorganize the code for clarity, as follows: (1) For clarity, rename ush/source_funcs.sh to ush/source_util_funcs.sh. (2) Assume that the utility functions that ush/source_util_funcs.sh sources will be moved to a subdirectory named bash_utils under the ush directory. Note that the actual moving of these files will be done in a later commit. (3) In ush/source_util_funcs.sh, instead of sourcing the utility functions directly, define a new function named source_util_funcs() that sources these utility functions and then call this function right after each definition. This allows local convenience variables to be declared in the scope of this new function (such as the name of the file, the directory in which the file is located, etc) that have the same names as variables in the script or function that sources ush/source_util_funcs.sh but that have a different scope so that they don't overwrite the values of the identically-named variables in the sourcing script/function. This allows error and informational messages to use correct names for files, directories, functions, etc. --- ush/{source_funcs.sh => source_util_funcs.sh} | 66 +++++++++++++++---- 1 file changed, 53 insertions(+), 13 deletions(-) rename ush/{source_funcs.sh => source_util_funcs.sh} (64%) diff --git a/ush/source_funcs.sh b/ush/source_util_funcs.sh similarity index 64% rename from ush/source_funcs.sh rename to ush/source_util_funcs.sh index 21cd34303..5fac6a6df 100644 --- a/ush/source_funcs.sh +++ b/ush/source_util_funcs.sh @@ -1,12 +1,33 @@ +function source_util_funcs() { # #----------------------------------------------------------------------- # -# Set the location to look for the sourced function definition files. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -#FUNCS_DIR=${USHDIR:-"."} -FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Set the directory in which the files defining the various utility +# functions are located. +# +#----------------------------------------------------------------------- +# + local bashutils_dir="${scrfunc_dir}/bash_utils" # #----------------------------------------------------------------------- # @@ -15,7 +36,7 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/save_restore_shell_opts.sh + . ${bashutils_dir}/save_restore_shell_opts.sh # #----------------------------------------------------------------------- # @@ -23,7 +44,15 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/print_msg.sh + . ${bashutils_dir}/print_msg.sh +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# + . ${bashutils_dir}/set_bash_param.sh # #----------------------------------------------------------------------- # @@ -33,7 +62,7 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/set_file_param.sh + . ${bashutils_dir}/set_file_param.sh # #----------------------------------------------------------------------- # @@ -44,7 +73,7 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/check_for_preexist_dir.sh + . ${bashutils_dir}/check_for_preexist_dir.sh # #----------------------------------------------------------------------- # @@ -54,7 +83,7 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/filesys_cmds_vrfy.sh + . ${bashutils_dir}/filesys_cmds_vrfy.sh # #----------------------------------------------------------------------- # @@ -63,7 +92,7 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/iselementof.sh + . ${bashutils_dir}/iselementof.sh # #----------------------------------------------------------------------- # @@ -72,7 +101,7 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/is_array.sh + . ${bashutils_dir}/is_array.sh # #----------------------------------------------------------------------- # @@ -82,7 +111,7 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/interpol_to_arbit_CRES.sh + . ${bashutils_dir}/interpol_to_arbit_CRES.sh # #----------------------------------------------------------------------- # @@ -91,7 +120,15 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/check_var_valid_value.sh + . ${bashutils_dir}/check_var_valid_value.sh +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# + . ${bashutils_dir}/print_input_args.sh # #----------------------------------------------------------------------- # @@ -101,5 +138,8 @@ FUNCS_DIR=${FUNCS_DIR:-${USHDIR:-"."}} # #----------------------------------------------------------------------- # -. ${FUNCS_DIR}/process_args.sh + . ${bashutils_dir}/process_args.sh +} +source_util_funcs + From c59b8a5a3eaefc044152df482ac2ec3764d1c938 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 5 Nov 2019 05:27:53 -0700 Subject: [PATCH 027/203] (1) Move bash utility files from ush to a subdirectory, ush/bash_utils. (2) In each file in ush/bash_utils, get the full path to the file, the name of the file, the directory it is in, the name of the function, and, if necessary, similar information about the calling script or function. (3) Improve informational and error messages in bash_utils files. (4) Source count_files.sh in source_util_funcs.sh. (5) In check_for_preexist_dir.sh, create array containing valid values for preexisting_dir_method and use the check_var_valid_value function to verify that the specified value for this variable is valid. --- .../check_for_preexist_dir.sh | 71 +++++----- ush/{ => bash_utils}/check_var_valid_value.sh | 30 ++++- ush/bash_utils/count_files.sh | 86 ++++++++++++ ush/{ => bash_utils}/filesys_cmds_vrfy.sh | 99 ++++++++++---- .../interpol_to_arbit_CRES.sh | 34 ++++- ush/{ => bash_utils}/is_array.sh | 40 +++++- ush/{ => bash_utils}/iselementof.sh | 35 ++++- ush/{ => bash_utils}/print_msg.sh | 125 ++++++++++++++---- ush/{ => bash_utils}/process_args.sh | 49 +++++-- .../save_restore_shell_opts.sh | 0 ush/{ => bash_utils}/set_file_param.sh | 34 ++++- ush/count_files.sh | 33 ----- 12 files changed, 486 insertions(+), 150 deletions(-) rename ush/{ => bash_utils}/check_for_preexist_dir.sh (77%) rename ush/{ => bash_utils}/check_var_valid_value.sh (76%) mode change 100755 => 100644 create mode 100644 ush/bash_utils/count_files.sh rename ush/{ => bash_utils}/filesys_cmds_vrfy.sh (72%) rename ush/{ => bash_utils}/interpol_to_arbit_CRES.sh (80%) mode change 100755 => 100644 rename ush/{ => bash_utils}/is_array.sh (68%) rename ush/{ => bash_utils}/iselementof.sh (74%) rename ush/{ => bash_utils}/print_msg.sh (61%) rename ush/{ => bash_utils}/process_args.sh (88%) mode change 100755 => 100644 rename ush/{ => bash_utils}/save_restore_shell_opts.sh (100%) rename ush/{ => bash_utils}/set_file_param.sh (84%) delete mode 100755 ush/count_files.sh diff --git a/ush/check_for_preexist_dir.sh b/ush/bash_utils/check_for_preexist_dir.sh similarity index 77% rename from ush/check_for_preexist_dir.sh rename to ush/bash_utils/check_for_preexist_dir.sh index f0a884c32..730b39317 100644 --- a/ush/check_for_preexist_dir.sh +++ b/ush/bash_utils/check_for_preexist_dir.sh @@ -17,18 +17,26 @@ function check_for_preexist_dir() { #----------------------------------------------------------------------- # { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# #----------------------------------------------------------------------- # -# Get the name of this function as well as information about the calling -# script or function. +# Get the name of this function. # #----------------------------------------------------------------------- # - local crnt_func="${FUNCNAME[0]}" - local caller_path=$( readlink -f "${BASH_SOURCE[1]}" ) - local caller_filename=$( basename "${caller_path}" ) - local caller_dir=$( dirname "${caller_path}" ) - local caller_name="${FUNCNAME[1]}" + local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # @@ -37,15 +45,16 @@ function check_for_preexist_dir() { #----------------------------------------------------------------------- # if [ "$#" -ne 2 ]; then + print_err_msg_exit " Incorrect number of arguments specified: - script/function name = \"${crnt_func}\" - number of arguments specified = $# + Function name: \"${func_name}\" + Number of arguments specified: $# Usage: - ${crnt_func} dir preexisting_dir_method + ${func_name} dir preexisting_dir_method where the arguments are defined as follows: @@ -55,7 +64,8 @@ where the arguments are defined as follows: preexisting_dir_method: String specifying the action to take if a preexisting version of dir is found. Valid values are \"delete\", \"rename\", and \"quit\". -" 1>@2 +" + fi # #----------------------------------------------------------------------- @@ -69,6 +79,17 @@ where the arguments are defined as follows: # #----------------------------------------------------------------------- # +# Set the valid values that preexisting_dir_method can take on and check +# to make sure the specified value is valid. +# +#----------------------------------------------------------------------- +# + local valid_vals_preexisting_dir_method=( "delete" "rename" "quit" ) + check_var_valid_value "preexisting_dir_method" \ + "valid_vals_preexisting_dir_method" +# +#----------------------------------------------------------------------- +# # Check if dir already exists. If so, act depending on the value of # preexisting_dir_method. # @@ -76,7 +97,7 @@ where the arguments are defined as follows: # if [ -d "$dir" ]; then - case $preexisting_dir_method in + case ${preexisting_dir_method} in # #----------------------------------------------------------------------- # @@ -111,12 +132,12 @@ where the arguments are defined as follows: done print_info_msg "$VERBOSE" " -Directory already exists: +Specified directory (dir) already exists: dir = \"$dir\" Moving (renaming) preexisting directory to: - old_dir = \"$old_dir\"" + old_dir = \"${old_dir}\"" - mv_vrfy "$dir" "$old_dir" + mv_vrfy "$dir" "${old_dir}" ;; # #----------------------------------------------------------------------- @@ -131,26 +152,9 @@ Moving (renaming) preexisting directory to: "quit") print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Directory already exists: +Specified directory (dir) already exists: dir = \"$dir\"" ;; -# -#----------------------------------------------------------------------- -# -# If preexisting_dir_method is set to a disallowed value, we simply exit -# with a nonzero status. Note that "exit" is different than "return" -# because it will cause the calling script (in which this file/function -# is sourced) to stop execution. -# -#----------------------------------------------------------------------- -# - *) - - print_err_msg_exit "\ -Disallowed value for \"preexisting_dir_method\": - preexisting_dir_method = \"$preexisting_dir_method\" -Allowed values are: \"delete\" \"rename\" \"quit\"" - ;; esac @@ -164,6 +168,7 @@ Allowed values are: \"delete\" \"rename\" \"quit\"" #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 + } diff --git a/ush/check_var_valid_value.sh b/ush/bash_utils/check_var_valid_value.sh old mode 100755 new mode 100644 similarity index 76% rename from ush/check_var_valid_value.sh rename to ush/bash_utils/check_var_valid_value.sh index b00f0e911..2e2d35ddf --- a/ush/check_var_valid_value.sh +++ b/ush/bash_utils/check_var_valid_value.sh @@ -19,17 +19,41 @@ function check_var_valid_value() { # #----------------------------------------------------------------------- # +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# # Check arguments. # #----------------------------------------------------------------------- # if [ "$#" -lt 2 ] || [ "$#" -gt 3 ]; then - print_err_msg_exit "\ -Function \"${FUNCNAME[0]}\": Incorrect number of arguments specified. + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + Usage: - ${FUNCNAME[0]} var_name valid_var_values_array_name [msg] + ${func_name} var_name valid_var_values_array_name [msg] where the arguments are defined as follows: diff --git a/ush/bash_utils/count_files.sh b/ush/bash_utils/count_files.sh new file mode 100644 index 000000000..633bbf7ce --- /dev/null +++ b/ush/bash_utils/count_files.sh @@ -0,0 +1,86 @@ +# +#----------------------------------------------------------------------- +# +# This function returns the number of files in the current directory +# that end with the specified extension (file_extension). +# +#----------------------------------------------------------------------- +# +function count_files() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# + if [ "$#" -ne 1 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} file_extension + +where file_extension is the file extension to use for counting files. +The file count returned will be equal to the number of files in the cur- +rent directory that end with \".${file_extension}\". +" + + fi +# +#----------------------------------------------------------------------- +# +# Count the number of files and then print it to stdout. +# +#----------------------------------------------------------------------- +# + local file_extension="$1" + local glob_pattern="*.${file_extension}" + local num_files=$( ls -1 ${glob_pattern} 2>/dev/null | wc -l ) + print_info_msg "${num_files}" +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 + +} + diff --git a/ush/filesys_cmds_vrfy.sh b/ush/bash_utils/filesys_cmds_vrfy.sh similarity index 72% rename from ush/filesys_cmds_vrfy.sh rename to ush/bash_utils/filesys_cmds_vrfy.sh index a938251de..d11ec4720 100644 --- a/ush/filesys_cmds_vrfy.sh +++ b/ush/bash_utils/filesys_cmds_vrfy.sh @@ -22,21 +22,53 @@ function filesys_cmd_vrfy() { # #----------------------------------------------------------------------- # -# Get the name of this function as well as information about the calling -# script or function. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # - local crnt_func="${FUNCNAME[0]}" -# Note: -# Below, the index into BASH_SOURCE and FUNCNAME is 2 (not 1 as is usu- + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Get information about the script or function that calls this function. +# Note that caller_name will be set as follows: +# +# 1) If the caller is a function, caller_name will be set to the name of +# that function. +# 2) If the caller is a sourced script, caller_name will be set to +# "script". Note that a sourced script cannot be the top level +# script since by defintion, it is sourced by another script or func- +# tion. +# 3) If the caller is the top-level script, caller_name will be set to +# "main". +# +# Thus, if caller_name is set to "script" or "main", the caller is a +# script, and if it is set to anything else, the caller is a function. +# +# Below, the index into FUNCNAME and BASH_SOURCE is 2 (not 1 as is usu- # ally the case) because this function is called by functions such as # cp_vrfy, mv_vrfy, rm_vrfy, ln_vrfy, mkdir_vrfy, and cd_vrfy, but these -# are just wrappers, and in the error and informational messages we are -# really interested in the scripts/functions that call these wrappers. - local caller_path=$( readlink -f "${BASH_SOURCE[2]}" ) - local caller_filename=$( basename "${caller_path}" ) - local caller_dir=$( dirname "${caller_path}" ) +# are just wrappers, and in the error and informational messages, we are +# really interested in the scripts/functions that in turn call these +# wrappers. +# +#----------------------------------------------------------------------- +# + local caller_fp=$( readlink -f "${BASH_SOURCE[2]}" ) + local caller_fn=$( basename "${caller_fp}" ) + local caller_dir=$( dirname "${caller_fp}" ) local caller_name="${FUNCNAME[2]}" # #----------------------------------------------------------------------- @@ -45,20 +77,20 @@ function filesys_cmd_vrfy() { # #----------------------------------------------------------------------- # - if [ "$#" -lt 2 ]; then + if [ "$#" -lt 1 ]; then print_err_msg_exit " -At least two arguments must be specified: +Incorrect number of arguments specified: - script/function name = \"${crnt_func}\" - number of arguments specified = $# + Function name: \"${func_name}\" + Number of arguments specified: $# Usage: - ${crnt_func} cmd args_to_cmd + ${func_name} cmd [args_to_cmd] -where \"cmd\" is the command to execute and \"args_to_cmd\" are the options -and arguments to pass to that command. +where \"cmd\" is the name of the command to execute and \"args_to_cmd\" +are zero or more options and arguments to pass to that command. " fi @@ -68,8 +100,8 @@ and arguments to pass to that command. # The first argument to this function is the command to execute while # the remaining ones are the arguments to that command. Extract the # command and save it in the variable "cmd". Then shift the argument -# list so that $@ contains the arguments to the command but not the com- -# mand itself. +# list so that $@ contains the arguments to the command but not the +# name of the command itself. # #----------------------------------------------------------------------- # @@ -108,13 +140,24 @@ and arguments to pass to that command. # #----------------------------------------------------------------------- # - if [ $exit_code -ne 0 ]; then + if [ "${caller_name}" = "main" ] || \ + [ "${caller_name}" = "script" ]; then + script_or_function="the script" + else + script_or_function="function \"${caller_name}\"" + fi + + if [ ${exit_code} -ne 0 ]; then + print_err_msg_exit "\ -Call to function \"${cmd}_vrfy\" failed. This function was called: - From script/function: \"${caller_name}\" (This gets set to \"source\" for a script, or to \"main\" for the top-level script.) - In file: \"${caller_path}\" +Call to function \"${cmd}_vrfy\" failed. This function was called from +${script_or_function} in file: + + \"${caller_fp}\" + Error message from \"${cmd}_vrfy\" function's \"$cmd\" operation: $output" + fi # #----------------------------------------------------------------------- @@ -140,13 +183,16 @@ $output" #----------------------------------------------------------------------- # if [ -n "$output" ]; then + print_info_msg " \"${cmd}_vrfy\" operation returned with a message. This command was -issued: - From script/function: \"${caller_name}\" (This gets set to \"source\" for a script, or to \"main\" for the top-level script.) - In file: \"${caller_path}\" +issued from ${script_or_function} in file: + + \"${caller_fp}\" + Message from \"${cmd}_vrfy\" function's \"$cmd\" operation: $output" + fi # #----------------------------------------------------------------------- @@ -157,6 +203,7 @@ $output" #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 + } diff --git a/ush/interpol_to_arbit_CRES.sh b/ush/bash_utils/interpol_to_arbit_CRES.sh old mode 100755 new mode 100644 similarity index 80% rename from ush/interpol_to_arbit_CRES.sh rename to ush/bash_utils/interpol_to_arbit_CRES.sh index 98291abb9..74c42a89e --- a/ush/interpol_to_arbit_CRES.sh +++ b/ush/bash_utils/interpol_to_arbit_CRES.sh @@ -8,7 +8,7 @@ # #----------------------------------------------------------------------- # -function interpol_to_arbit_CRES () { +function interpol_to_arbit_CRES() { # #----------------------------------------------------------------------- # @@ -21,16 +21,41 @@ function interpol_to_arbit_CRES () { # #----------------------------------------------------------------------- # +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# # Check arguments. # #----------------------------------------------------------------------- # if [ "$#" -ne 3 ]; then - print_err_msg_exit "\ -Incorrect number of arguments specified. Usage: + print_err_msg_exit " +Incorrect number of arguments specified: - ${FUNCNAME[0]} RES RES_array prop_array + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} RES RES_array prop_array where the arguments are defined as follows: @@ -127,4 +152,3 @@ where the arguments are defined as follows: } - diff --git a/ush/is_array.sh b/ush/bash_utils/is_array.sh similarity index 68% rename from ush/is_array.sh rename to ush/bash_utils/is_array.sh index 8f050e0cb..16be2fbb7 100644 --- a/ush/is_array.sh +++ b/ush/bash_utils/is_array.sh @@ -12,7 +12,7 @@ # #----------------------------------------------------------------------- # -function is_array () { +function is_array() { # #----------------------------------------------------------------------- # @@ -25,21 +25,46 @@ function is_array () { # #----------------------------------------------------------------------- # +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# # Check arguments. # #----------------------------------------------------------------------- # - func_name="${FUNCNAME[0]}" - if [ "$#" -ne 1 ]; then - print_err_msg_exit "\ -Incorrect number of arguments specified. Usage: - ${func_name} var_name + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} var_name where var_name is the name of the variable to check to determine whether or not it is an array. " + fi # #----------------------------------------------------------------------- @@ -65,10 +90,11 @@ or not it is an array. # #----------------------------------------------------------------------- # -# Return the variable "contains". +# Return the variable "is_an_array". # #----------------------------------------------------------------------- # return ${is_an_array} + } diff --git a/ush/iselementof.sh b/ush/bash_utils/iselementof.sh similarity index 74% rename from ush/iselementof.sh rename to ush/bash_utils/iselementof.sh index 41009216e..71cdd2160 100644 --- a/ush/iselementof.sh +++ b/ush/bash_utils/iselementof.sh @@ -30,7 +30,7 @@ # #----------------------------------------------------------------------- # -function iselementof () { +function iselementof() { # #----------------------------------------------------------------------- # @@ -43,15 +43,41 @@ function iselementof () { # #----------------------------------------------------------------------- # +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# # Check arguments. # #----------------------------------------------------------------------- # if [ "$#" -ne 2 ]; then - print_err_msg_exit "\ -Incorrect number of arguments specified. Usage: - ${FUNCNAME[0]} str_to_match array_name + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} str_to_match array_name where the arguments are defined as follows: @@ -105,5 +131,6 @@ where the arguments are defined as follows: #----------------------------------------------------------------------- # return $contains + } diff --git a/ush/print_msg.sh b/ush/bash_utils/print_msg.sh similarity index 61% rename from ush/print_msg.sh rename to ush/bash_utils/print_msg.sh index 5b0c3b03e..5a48d4ad0 100644 --- a/ush/print_msg.sh +++ b/ush/bash_utils/print_msg.sh @@ -27,15 +27,46 @@ function print_info_msg() { # #----------------------------------------------------------------------- # -# Get the name of this function as well as information about the calling -# script or function. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # - local crnt_func="${FUNCNAME[0]}" - local caller_path=$( readlink -f "${BASH_SOURCE[1]}" ) - local caller_filename=$( basename "${caller_path}" ) - local caller_dir=$( dirname "${caller_path}" ) + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Get information about the script or function that calls this function. +# Note that caller_name will be set as follows: +# +# 1) If the caller is a function, caller_name will be set to the name of +# that function. +# 2) If the caller is a sourced script, caller_name will be set to +# "script". Note that a sourced script cannot be the top level +# script since by defintion, it is sourced by another script or func- +# tion. +# 3) If the caller is the top-level script, caller_name will be set to +# "main". +# +# Thus, if caller_name is set to "script" or "main", the caller is a +# script, and if it is set to anything else, the caller is a function. +# +#----------------------------------------------------------------------- +# + local caller_fp=$( readlink -f "${BASH_SOURCE[1]}" ) + local caller_fn=$( basename "${caller_fp}" ) + local caller_dir=$( dirname "${caller_fp}" ) local caller_name="${FUNCNAME[1]}" # #----------------------------------------------------------------------- @@ -76,12 +107,12 @@ function print_info_msg() { print_err_msg_exit " Incorrect number of arguments specified: - script/function name = \"${crnt_func}\" - number of arguments specified = $# + Function name: \"${func_name}\" + Number of arguments specified: $# Usage: - ${crnt_func} [verbose] info_msg + ${func_name} [verbose] info_msg where the arguments are defined as follows: @@ -145,15 +176,46 @@ function print_err_msg_exit() { # #----------------------------------------------------------------------- # -# Get the name of this function as well as information about the calling -# script or function. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. # #----------------------------------------------------------------------- # - local crnt_func="${FUNCNAME[0]}" - local caller_path=$( readlink -f "${BASH_SOURCE[1]}" ) - local caller_filename=$( basename "${caller_path}" ) - local caller_dir=$( dirname "${caller_path}" ) + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Get information about the script or function that calls this function. +# Note that caller_name will be set as follows: +# +# 1) If the caller is a function, caller_name will be set to the name of +# that function. +# 2) If the caller is a sourced script, caller_name will be set to +# "script". Note that a sourced script cannot be the top level +# script since by defintion, it is sourced by another script or func- +# tion. +# 3) If the caller is the top-level script, caller_name will be set to +# "main". +# +# Thus, if caller_name is set to "script" or "main", the caller is a +# script, and if it is set to anything else, the caller is a function. +# +#----------------------------------------------------------------------- +# + local caller_fp=$( readlink -f "${BASH_SOURCE[1]}" ) + local caller_fn=$( basename "${caller_fp}" ) + local caller_dir=$( dirname "${caller_fp}" ) local caller_name="${FUNCNAME[1]}" # #----------------------------------------------------------------------- @@ -172,12 +234,28 @@ function print_err_msg_exit() { # #----------------------------------------------------------------------- # - msg_header=$( printf "\n\ + if [ "${caller_name}" = "main" ] || \ + [ "${caller_name}" = "script" ]; then + + msg_header=$( printf "\n\ ERROR: - From script/function: \"${caller_name}\" (This gets set to \"source\" for a script, or to \"main\" for the top-level script.) - In file: \"${caller_path}\" + From script in file: \"${caller_fn}\" + In directory: \"${caller_dir}\" " - ) + ) + + else + + msg_header=$( printf "\n\ +ERROR: + From function: \"${caller_name}\" + In file: \"${caller_fn}\" + In directory: \"${caller_dir}\" +" + ) + + fi + msg_footer=$( printf "\nExiting with nonzero status." ) # #----------------------------------------------------------------------- @@ -192,18 +270,18 @@ ERROR: print_err_msg_exit " Incorrect number of arguments specified: - script/function name = \"${crnt_func}\" - number of arguments specified = $# + Function name: \"${func_name}\" + Number of arguments specified: $# Usage: - ${crnt_func} err_msg + ${func_name} err_msg where err_msg is an optional error message to print to stderr. Note that a header and a footer are always added to err_msg. Thus, if err_- msg is not specified, the message that is printed will consist of only the header and footer. -" 1>&2 +" # #----------------------------------------------------------------------- # @@ -237,5 +315,6 @@ the header and footer. #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 + } diff --git a/ush/process_args.sh b/ush/bash_utils/process_args.sh old mode 100755 new mode 100644 similarity index 88% rename from ush/process_args.sh rename to ush/bash_utils/process_args.sh index 4504e271c..3e7e2df34 --- a/ush/process_args.sh +++ b/ush/bash_utils/process_args.sh @@ -93,23 +93,48 @@ function process_args() { # #----------------------------------------------------------------------- # +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# # Check arguments. # #----------------------------------------------------------------------- # if [ "$#" -lt 1 ]; then - print_err_msg_exit "\ -Incorrect number of arguments specified. Usage: + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: - ${FUNCNAME[0]} valid_arg_names_array_name \ - arg_val_pair1 \ - ... \ - arg_val_pairN + ${func_name} array_name_valid_arg_names \ + arg_val_pair1 \ + ... \ + arg_val_pairN where the arguments are defined as follows: - valid_arg_names_array_name: + array_name_valid_arg_names: The name of the array containing a list of valid argument names. arg_val_pair1 ... arg_val_pairN: @@ -118,8 +143,8 @@ where the arguments are defined as follows: arg1=\"val1\" ... argN=\"valN\" where each argument name (argI) needs to be in the list of valid argu- - ment names specified in valid_arg_names_array_name. Note that not all - the valid arguments listed in valid_arg_names_array_name need to be + ment names specified in array_name_valid_arg_names. Note that not all + the valid arguments listed in array_name_valid_arg_names need to be set, and the argument-value pairs can be in any order, i.e. they don't have to follow the order of arguments listed in valid_arg_names_ar- ray_name. @@ -133,7 +158,7 @@ where the arguments are defined as follows: # #----------------------------------------------------------------------- # - local valid_arg_names_array_name \ + local array_name_valid_arg_names \ valid_arg_names_at \ valid_arg_names \ num_valid_args \ @@ -142,8 +167,8 @@ where the arguments are defined as follows: arg_val_pair arg_name arg_value is_array \ err_msg cmd_line - valid_arg_names_array_name="$1" - valid_arg_names_at="${valid_arg_names_array_name}[@]" + array_name_valid_arg_names="$1" + valid_arg_names_at="${array_name_valid_arg_names}[@]" valid_arg_names=("${!valid_arg_names_at}") num_valid_args=${#valid_arg_names[@]} # diff --git a/ush/save_restore_shell_opts.sh b/ush/bash_utils/save_restore_shell_opts.sh similarity index 100% rename from ush/save_restore_shell_opts.sh rename to ush/bash_utils/save_restore_shell_opts.sh diff --git a/ush/set_file_param.sh b/ush/bash_utils/set_file_param.sh similarity index 84% rename from ush/set_file_param.sh rename to ush/bash_utils/set_file_param.sh index 1c92d2955..e368318da 100644 --- a/ush/set_file_param.sh +++ b/ush/bash_utils/set_file_param.sh @@ -19,13 +19,39 @@ function set_file_param() { # #----------------------------------------------------------------------- # +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# # Check arguments. # #----------------------------------------------------------------------- # if [ "$#" -ne 3 ]; then - print_err_msg_exit "\ -Incorrect number of arguments specified. Usage: + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: ${func_name} file_full_path param value @@ -115,7 +141,7 @@ Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." regex_replace="\1$value" ;; # - "${SCRIPT_VAR_DEFNS_FN}") + "${GLOBAL_VAR_DEFNS_FN}") regex_search="(^\s*$param=)(\".*\")?([^ \"]*)?(\(.*\))?(\s*[#].*)?" regex_replace="\1$value\5" # set_bash_param "${file_full_path}" "$param" "$value" @@ -167,6 +193,6 @@ lar expression (regex_search): #----------------------------------------------------------------------- # { restore_shell_opts; } > /dev/null 2>&1 -} +} diff --git a/ush/count_files.sh b/ush/count_files.sh deleted file mode 100755 index 83e99bb50..000000000 --- a/ush/count_files.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash - -# -#----------------------------------------------------------------------- -# -# This function returns the number of files in the current directory -# that end with the specified extension (file_extension). -# -#----------------------------------------------------------------------- -# -. ./source_funcs.sh - -function count_files() { - - if [ "$#" -ne 1 ]; then - print_err_msg_exit "\ -Incorrect number of arguments specified. Usage: - - ${FUNCNAME[0]} file_extension - -where file_extension is the file extension to use for counting files. -The file count returned will be equal to the number of files in the cur- -rent directory that end with \".${file_extension}\"." - fi - - local file_extension="$1" - local glob_pattern="*.${file_extension}" - local num_files=$( ls -1 ${glob_pattern} 2>/dev/null | wc -l ) - print_info_msg "${num_files}" - -} - - From e33d2a5c4d653d7f1a1e361bf8110d92e55d3122 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 5 Nov 2019 05:44:29 -0700 Subject: [PATCH 028/203] Add file missed in previous commit. --- ush/source_util_funcs.sh | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/ush/source_util_funcs.sh b/ush/source_util_funcs.sh index 5fac6a6df..45dd4697e 100644 --- a/ush/source_util_funcs.sh +++ b/ush/source_util_funcs.sh @@ -66,6 +66,14 @@ function source_util_funcs() { # #----------------------------------------------------------------------- # +# +# +#----------------------------------------------------------------------- +# + . ${bashutils_dir}/count_files.sh +# +#----------------------------------------------------------------------- +# # Source the file containing the function that checks for preexisting # directories and handles them according to the setting of the variable # PREEXISTING_DIR_METHOD [which is specified in the experiment/workflow From 2f29f6848238e0e78c09b87ed27d9b21f3d1ed2b Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 5 Nov 2019 05:45:33 -0700 Subject: [PATCH 029/203] Add bash utility functions: (1) print_input_args.sh is used to print out the values that the arguments to a script or function get set to (by the function process_args); (2) set_bash_params.sh is used to set a specified parameter to a specified value in a bash script (using sed). --- ush/bash_utils/print_input_args.sh | 171 +++++++++++++++++++++++++++++ ush/bash_utils/set_bash_param.sh | 144 ++++++++++++++++++++++++ 2 files changed, 315 insertions(+) create mode 100644 ush/bash_utils/print_input_args.sh create mode 100644 ush/bash_utils/set_bash_param.sh diff --git a/ush/bash_utils/print_input_args.sh b/ush/bash_utils/print_input_args.sh new file mode 100644 index 000000000..16494a8ae --- /dev/null +++ b/ush/bash_utils/print_input_args.sh @@ -0,0 +1,171 @@ +# +#----------------------------------------------------------------------- +# +# This file defines a function that prints to stdout the names and val- +# ues of a specified list of variables that are the valid arguments to +# the script or function that calls this function. It is mainly used +# for debugging to check that the argument values passed to the calling +# script/function have been set correctly. Note that if a global varia- +# ble named VERBOSE is not defined, the message will be printed out. If +# a global variable named VERBOSE is defined, then the message will be +# printed out only if VERBOSE is set to TRUE. +# +#----------------------------------------------------------------------- +# +function print_input_args() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Get information about the script or function that calls this function. +# Note that caller_name will be set as follows: +# +# 1) If the caller is a function, caller_name will be set to the name of +# that function. +# 2) If the caller is a sourced script, caller_name will be set to +# "script". Note that a sourced script cannot be the top level +# script since by defintion, it is sourced by another script or func- +# tion. +# 3) If the caller is the top-level script, caller_name will be set to +# "main". +# +# Thus, if caller_name is set to "script" or "main", the caller is a +# script, and if it is set to anything else, the caller is a function. +# +#----------------------------------------------------------------------- +# + local caller_fp=$( readlink -f "${BASH_SOURCE[1]}" ) + local caller_fn=$( basename "${caller_fp}" ) + local caller_dir=$( dirname "${caller_fp}" ) + local caller_name="${FUNCNAME[1]}" +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# + if [ "$#" -ne 1 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} array_name_valid_caller_args + +where array_name_valid_caller_args is the name of the array containing +the names of valid arguments that can be passed to the calling script or +function. +" + + fi +# +#----------------------------------------------------------------------- +# +# Declare local variables. +# +#----------------------------------------------------------------------- +# + local array_name_valid_caller_args \ + valid_caller_args \ + script_or_function \ + msg \ + num_valid_args \ + i \ + line +# +#----------------------------------------------------------------------- +# +# Set the array containing the names of the arguments that can be passed +# to the calling script/function. +# +#----------------------------------------------------------------------- +# + array_name_valid_caller_args="$1" + valid_caller_args="${array_name_valid_caller_args}[@]" + valid_caller_args=("${!valid_caller_args}") +# +#----------------------------------------------------------------------- +# +# Set the message to print to stdout. +# +#----------------------------------------------------------------------- +# + if [ "${caller_name}" = "main" ] || \ + [ "${caller_name}" = "script" ]; then + script_or_function="the script" + else + script_or_function="function \"${caller_name}\"" + fi + + msg=" +The arguments to ${script_or_function} in file + + \"${caller_fp}\" + +have been set as follows: +" + + num_valid_caller_args="${#valid_caller_args[@]}" + for (( i=0; i<${num_valid_caller_args}; i++ )); do + line=$( declare -p "${valid_caller_args[$i]}" ) + msg=$( printf "%s\n%s" "$msg" " $line" ) + done +# +#----------------------------------------------------------------------- +# +# If a global variable named VERBOSE is not defined, print out the mes- +# sage. If it is defined, print out the message only if VERBOSE is set +# to TRUE. +# +#----------------------------------------------------------------------- +# + if [ ! -v VERBOSE ]; then + print_info_msg "$msg" + else + print_info_msg "$VERBOSE" "$msg" + fi +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 + +} + diff --git a/ush/bash_utils/set_bash_param.sh b/ush/bash_utils/set_bash_param.sh new file mode 100644 index 000000000..fb3281ecc --- /dev/null +++ b/ush/bash_utils/set_bash_param.sh @@ -0,0 +1,144 @@ +# +#----------------------------------------------------------------------- +# +# This file defines a function that replaces placeholder values of vari- +# ables in several different types of files with actual values. +# +#----------------------------------------------------------------------- +# +function set_bash_param() { +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# + if [ "$#" -ne 3 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} file_full_path param value + +where the arguments are defined as follows: + + file_full_path: + Full path to the file in which the specified parameter's value will be + set. + + param: + Name of the parameter whose value will be set. + + value: + Value to set the parameter to. +" + + fi +# +#----------------------------------------------------------------------- +# +# Set local variables to appropriate input arguments. +# +#----------------------------------------------------------------------- +# + local file_full_path="$1" + local param="$2" + local value="$3" +# +#----------------------------------------------------------------------- +# +# Extract just the file name from the full path. +# +#----------------------------------------------------------------------- +# + local file="${file_full_path##*/}" +# +#----------------------------------------------------------------------- +# +# If VERBOSE is set to "TRUE", print out an informational message. +# +#----------------------------------------------------------------------- +# + print_info_msg "$VERBOSE" " +Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." +# +#----------------------------------------------------------------------- +# +# The procedure we use to set the value of the specified parameter de- +# pends on the file the parameter is in. Compare the file name to sev- +# eral known file names and set the regular expression to search for +# (regex_search) and the one to replace with (regex_replace) according- +# ly. See the default configuration file (config_defaults.sh) for defi- +# nitions of the known file names. +# +#----------------------------------------------------------------------- +# + local regex_search="(^\s*$param=)(\".*\")?([^ \"]*)?(\(.*\))?(\s*[#].*)?" + local regex_replace="\1\"$value\"\5" +# +#----------------------------------------------------------------------- +# +# Use grep to determine whether regex_search exists in the specified +# file. If so, perform the regex replacement using sed. If not, print +# out an error message and exit. +# +#----------------------------------------------------------------------- +# + grep -q -E "${regex_search}" "${file_full_path}" || { \ + print_err_msg_exit "\ +Specified file (file_full_path) does not contain the searched-for regu- +lar expression (regex_search): + file_full_path = \"${file_full_path}\" + param = \"$param\" + value = \"$value\" + regex_search = ${regex_search}" + }; + + sed -i -r -e "s%${regex_search}%${regex_replace}%" "${file_full_path}" +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 + +} + From 09b3d4b208ab3db84b11a03989ddbf077b9c20d9 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 5 Nov 2019 05:48:19 -0700 Subject: [PATCH 030/203] Remove accidentally added vim swap file. --- scripts/.exregional_make_orog.sh.swp | Bin 40960 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 scripts/.exregional_make_orog.sh.swp diff --git a/scripts/.exregional_make_orog.sh.swp b/scripts/.exregional_make_orog.sh.swp deleted file mode 100644 index a8edb41ea8a69dea7b17d6aa12f089c6b7aad684..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 40960 zcmeI53vgstdFR_9*w|nokOBt+xzkdN1X8!8(Y!62K_kt`o{^>`jcu4Q?W^wFQqT0u zeY^EwP&qh2YzV=aY`}qKp@>bgA+gD3aSE1K1(H+(F$r;AfI|_oMc6VJaI$&u*x&!0 zdvD)vJv?K#oeJAs`m68rJic?zcfQv-y)bm`)G=pIYBa&;*@?sj@BZWspP0K~{-S@l zFmc$cxwTyC8n5o<3vO9YZ2DQsdqcyU-f6v<_Oms&o?RMAzv9?uC%MCU?sHDGKhQnzQlLwLE(N+2=u)6dfi4BQ6zEc*OMxF@3e>Al zOWaKTek`ctnc(|^w(pCA^S1@x#kTuT3(iZy_xIcG|9Eiztl;}}kU^Nw#liKl;Crp@ z{!@eNHwNDyYrB78aQ$@f{i2{kVfs%Au7~Hp&~`t#-TmrPpi6-+1-caIQlLwLE(N+2 z=u)6dfi4BQ6zEdmN0I_=E|D0dX8$C`82^8$|38AF{$22G@ay1ia3i=LTnjD*PY2(5 zW+L$g@Mqvw@J4VI*adciZ(N#4d>z~mJ_lY8ZUOtji@{HWFFb?y!F$1bzzHx1E(V`` zdLr>(z}vyq;0U-3eCv`#;t#;5!C9~dt^rdZ0e*l%;UVyA;OD?e@G9_p@HvbXe*$g+ zKL_@MM=)^wCU`%1AGjV|3-*8)fXl%B7#ThX-U!CPF0d1P1LMTk!DqoAgV%wZ!EW#n zhK~oqm%!V>+rT{73-*A^!1pmud=EST?gJkNzYJEuGMENO!BfBm;N#dX*1&F{c6%f= zYMg5i?QxR+lDq6BoxVcKOF1>KR9W_%`jY1?xy6c;uT@I@PS&f{-9p(}tQB&OTh2KO zw5Yk&rL{y4ulQ9jTQ4ko#kG{<94z{k0mt|1CXupR@|;TE9DA!?wo!K%ik_1%@UP)} zIj3GB37z@_=AkI%@S?KIS86(~d$mG&anr|0WG=|kq^*4YFykOxguIleNo~<96Vg%- zlT*bKJ&B&d&U{F!k@qu=dZEaFe<@d}jU{`}DyHSc{M6C8%%Q2-^)xqJ(HpNGS!;rtrm#op<+&~E*u}9otU_OYHogN+Eg!dY-0Y% z%%Szpwnq=$rr^r zR;%>q5)V1K!eD1>TV}*Co&iw3&3DGm4tm8~q3^kx7 zT!i7aQYaRk1&=6&-vm1)?~P2C!zpzl;&-?9^OE%=?SWVKewdcNOINe1i7lZhr}$NHS)bu+Wq z(3vtvXT}d6O*;LZEl7{!)wBJE+Dny2G3R6}WuHMMM&}v|av;_>Xh;N63blv}G73v{ z@5SM>}WsVu3 z00+~&p@yUwhAzs_G>|EkZ&>eUNilD9mQrIK`Xm$cVjKLaYToLg?dCy681=4yhFf&@ zU`A2D3@1lBg{2Th$_#PK1J?FC`kWG@&(=gr!)8w<6BFfNezRHdT>PPYd7WoERI9N> z$RP5SOvzt#lAWb~k8@lM$YBol-9^uVAr`WBwq>xwbh%jxIYwu*juCaNw@+j@B1nwf z`Rcj`X82&ftmhV7-+QzNt9lS-pDPy1XPm*jChJ!moI8@4n>jH%K5=sB29t0{#fToq zjxqH}<=Pb$kTXjKAa+iL4_#Y*6vW;3zv$-juLkZ!LK5}&#-n>|>EVxD3 zVa;9Zhuh7I?TS;Dy_N9M&K5X@WOZWhh>cMM54n-XI^pe zXy)33vzbE^lhbpV$>SZDJT>q%X$J(-fRj*ZG@&?k>`;mZWi-#mp6P7sC3<$G7YgMx z+;1r%{r~UK<9`TD{C}WB>ih?~*8OxT(4|0^0$mDpDbS@rmjYc1bScoKK$ikt3Un#Z zrNDoB3P}I|41V?(;fI(0AFBKx<=}(hH^CBE1TO^l(l{0Dpw9{ykt0^nuI41Ni9g1-}h`3w#jV3El~A1}_EA z0w2L|{~mAz41mAFNB>cf2UFk?eDi+=ZUO6H6#N+YE&THT9sCBk8{7rn4_*bX2hRq7 zh+qB_;A7x*;AW5qIdBkM1^zF-_=2s|f>R&XY+pBTsLDU-D=uyHxd(wH@#S~4&cQyPXsl)|l(NEn=8 z+&TDW=QpUU5>oL3 z@3Psq!o*b=R}yt!!M-gT=y?&%wtE}xCDt&ef~1RUJ1j3HJv=*g zD5KKMlTk~{wD_UStxUdHuVt`LMwMwHE=84L(pWByyVtZkeM=N+HUc(K9&(gos`u>p z?8Mx9N|o4?Dre0tl_Xy&7Aq?<9GjGEfn%M%wWzl9o7fH0mNK)_JT#L$p-U^7`8`Ch z6pMbpwcndDWTILOeX|uz_I|a3e~JuSx+n=9wm~A^Ho3R=h+xrEL(%1OCg&ALGAo{YW^80Mx?JTvlXtUY z!>J)^T&~oOD-18aycRG5IWz+2MYgfv zt;jRV^;|_G1jm2K@67w^2cgje*vC#F(fkvM~sPA|R98T4*)b`I?sac(%E<}aUP zj>EendbNktDs00bPikDrgI7vkWT*=1?6XUtw@Hm+WkL0?>>CW6(Hd}Fj!*0IxMB6gwzn4kmP=0BSn$|v!?#cr=V{Y(tZ z!7h_B3`1pI^lAgnD^t#OZg~+-Qz|Sj!JU|J&xkwu6(pY$5<#Kt`RQz>RE4+KeDNg1 zRfzOMj;IrgSaE8|HAo94WyU>DS9lH?1-UR+&v3c6Qt&-1#AGWui50N|Dg?y%;b3l- z(J{dU?>my^I5AU7Fx@FQj>!^1p`8kkp&4T3*rp|-&Zl;WPLhe}@G*9R=>2W-W*Sni zd?jJ3QGeo^f>@$+Vpx)i&88EYqf#VZQc*?G)6Cf}jhfq^7MZZyIj;n7KFWcwVI+{G6W>no#d&6=FrW@hkX8hjGRQdQOe-6^~ZY8 z_VAsN$nBqe`3>vDe+6d>|5E*(WJEBIV@($uPbM;qkRgO`;!dJ|>DLJktl6;DD6~R7 zdK$r4^<#3Sj=H6~YHp>%!COZ7ML94eV<1_Ig;Z)C^k`|p#BbY_vhzs}r-qZQ-$T2& z!1bZv`o{16c>j)4OF{>ejz1Ntynl&{X1wr2lL0zt2lYMgPB%FMaMq*RO#Q z@DRHG=fR(XHvsAXi(nk|gFf(8bpHPe*1#$l0Xu+f0S}@3e+_&L{35ssoCMDS7l0pP z2lz1fT~Gpj;A_|cz6$OH?*Oj@*MS}2Yv}&>fzUQ^JI7^E1U+B}_-pI|cYtfakAv@E z4|ouK3A`P=4crcX2~2|TVHda${7UzR)%B5l^Uk=!`Juy7mA&N=SRs@CxH=}dKAqKC(h?w=7!DG=g`xCA+ zIw-}fsVvij>y28Ou57fUa;5wluU0`8VgP8OWEP%h9Eh_vRr84RpR(%|P9=FET1Egz z48ty+K0EPxx&C$yQQ2+5`Fjr@pFJ_{^aeJAbQpcFJl35nB%>Gttvj!Mt#b<74pC;& zHO@F*Ma2|V%WL7m(vUgQ#K!;@84cwrx(VoPxnRpBnaFoyf2YbkY@`46u43;a+OAVd zqdRN6%S_I#yGDubJ7(l@DV`L`mIUoOY_YMVwXB%LQd0U`A5GFV!`Dpn z4Mw((R4SEBoJ!77d~{_h(_%7Em^Vsy^t2|c%7R%mve|P%a}>&9{Aq6x3CfBg!Hp$VF`_=pPO3djR4t>_gafzGKsB#vY0ZqW z;vu&B446{2$Zo6yn028oRp${M zYj@rn7H14R<(x}&w=zTDauGYwsw`&+=Yts44ZlZBi7mMyXG3mZ=24Pq;V03ssjpOQ z+-z&0y)!r%(XTn4N#!{9?1Gp>wAf9gj@GxChTD4junf;}JKG+U(Q-EQ$Y$pX$2GjR zJw!jwA|s|D`)%yA?a-5vwO9_8n)xQzb04QnM`1qOW)jXk;rNb(kZs>8XGy=w2xHu& zvs70&J*$bV$3&uW`6UuQOsul$2BuT^h1bZvfqGS~pg+L|tY)b}B4+G2JS7NPbHO9wrb}$W*k8E2 zRprwOAPb*xoI1~qm8d{DJ zk={&`c>P$qESIElwQcZ8^1gaRo;7-PXSW&n;vKM+7FS*5T8vUL6DGN70X2!VjX$ar z4pO;s=rKkX74z6*nz0cqwT>Kzrt2B~Uux#3rEjADpVkSV-$U;&gO`ACt^c>s^X~?? zfm^|i;3vVqpyU58xEtIBmcfnSGVpoy``-p{1r0C{X2E6PdEgn~67VT>{!f5auoL_M zJ^y>)KJW|R4d4o}AM67?;91~7bp6kOdq5rdU;um@o&RC*1@I=&06w@890gAYe~aF) zn1R0p4}dR#H-THhIFL=?^Waax8^F(lDtI-x0sIWm+W$WSe+X^^^WeGQE9n1U244hs zfZKuA{m+7nfouf-{1j{$;51MS!6Zn6zs45um*6en&0riH0Q zsWp?yjmtRW3pCM$B-1&csZc+-g*3tq9UGb0aty7j}FcBXWrA=g}$F zcI?>kBd`#(StCt&Qf8%T(j6@2=uDkdfAWPyT=dn<-=u#O&m{Ia$!hIm7 zZRXbOXp`eH)NjHRvY3Q)P+Z`M2^;%_LW(l^tXz$(-!^)$i0)fi9sMAZf?~R9JYYvt zZjfxGQ4dJK3XDS0`$$om%9ZU!vTmhHbt6ur;N+fefM zZ8RS4$PF4+B(&R#lVBTt0{32I9%y%EIugCT%px9dtj2mdX-(Ho;8T(p6V)kS_5-`} zW(#0te0H2=ueN})JeHh1cyw+;*TU7bDqRL&q$-6~yrVrf*bT@E!p6+2#xU(_^m03* za7!2$teRqaW`1J7qU~19o*2m9oixq6k$3=|+rv zOZY8@ebkz!*D3F&fvRZ~P(!3HCPnc`tMByup93-YCMGqY1>8=M%kprA^# zwJWrl)jHbw$W5Jy1p*Fz%+% z<}Baee}o6BDVSJK+BP02^C$w0JW%HNKhy)I5f@Hg&9SI452GkQURM;!Y&)Ere8m_L z%<$jb9Ysok(D67uQS$#^h6=fgZYljg+~5Cyq4$3T+ypA%D7YN_1b7zs1N8n+gV%yr zg8z-K{~O=~;ML$17zcgea`0tz{4at#!0q5Lcs6(@_ze2~?*pw3_z=+gfOmmAK>;j* z8K8atE&&&V2hsh%3_c4KAFv8!6G(z*0PO+z@7MwU4g4#378CMr&*D?IXcROoG&MRw}8Wf})}f5oT+Wu(n1kLh|LWDTzlOPi*a- zl|4M)&C*vK=fDB}HE>o^=0P`htX=fXWz3&k4$Dr&rEQ$>*G1E7EBLX~2&!o+R&XV< z>dgF3sNxiZH%$=8SkgzdcLvMOuo^Wvb7*Q>;aWV_RuDXHv5jTCdWJ1<*yYfaD0AAY zYm=QQZN-5Gr7GtAooVd!r`Zh9PcK!ogDaK%a8Ssw?`HDLJ57nVE@z!h1CuJ(IUDz7 z)Rx;pC9j^C-@Hki{p?)()^K`yeB$__iEGm~oK;^emvEolwo~h*MRR_&x7o(cR;5w* zu&|@`mO?^lkG-_VuGGk$*qcub)sIZf9_$p^N88CRvvNdxf4Q7y{4D1EC$|~w5Q5k{ zHxN5$p}nc$7@do1A&r^MTV}V0(XLptJu&Sve`I3n@i)w36>2XrM7>fr91G8bfwj^zrzCZQY@YUR>yHDuGy;517j22bl>PXDr5LiDS3 zcGZnunw~i{F*n9fQg@15950IM(K){fL1)bklA97n zpJp^`vLilUR#Owe6KqN-@hzP49PJBT$cJkmo0bA38oR5Z4Xv_?1fqp}l@emC<>9CX zik-S70^xc`UI{%VavQLpwEPA{ABp*vh=^muQ#}~wM(#31bTE=C=#De=*17qEbda!9#OnyE|I|c7Zh^u+&>igHiw7HM5>=4L7s`THJgd z5u@!XBLS&-rlb9NRfSkN4c%I*W!=X?a*_?}OR|_DaXQWwV`I)#kRd*F%w5Klj*wWd zH5AM{YqkoT(+vi@@GU012*Nxj+0jD3!AP9U1XN^Q1!Z^l-)xk^V; zskT}y5wl~Kfp%^H*qBJs8re{m$kb{wp)y7xB4bHA+aOp<5;r?yBmD(W$1dw}jHX6XwVc7UgX2c_qOPlMkFzXLu5?gj4xcY@>KV(=;S{*Qp)19yR& z!79+60XKl-U<{}tOGkejcrDlmc7VS@uht#|alQKn&QstA=+u7!6d&LL7wiY4;HSW4Ksxtr z;N?K=_b+P0bN$#bl-BhiVdT=V)K3$WqE0g45ItpA3NkQ>j#11D_I9xb#<20b$fL&2 zvWiJE3}MBvBok*WHsCskCl4L9Pioa14qtN;1x@8i$mAK*L}MFZnkk1F!mOMki%P6W zeQ^&F69`O&Az%s`%m3=*Tj#D4w$|36W7`#vrt0}TAx~O!lkxI-^hDiC^aSVaYaA8D zA3n>D!#y~m(H&zCFJzs;qI1O+SFrx+rM=YxzKKfdrM)#bS7`W|34*DX(tV>Z?Nxy@ z@_s1SezSPYmZ{~2O4f3L^5OQ0zpD}^`+8XlzUsWp(f3+^n$<}ZE?8iT&)LKo9iEsD zH{wr{aMDsO8LdFwvLdQ5RnVrdUPcl`#UyVQv2K-k*d?0DL-o*8sS0ep!Xb(??J+aO z##JQ{VO)V+^b~6mu?-v;N|0WZzU@_gR~Jgo@kY%jeh%lC&jj0#Q7Krp5_%KcTyAvy z-np4sZ8?#Slz64tdPYZJWk$x(cKX;+wwZN$TJS*oC%j*DM?S!!eVGq;t0%BUVNwVoN?($*^rbtwvriA7cYo{z-Yf4T}c7UF&PbPOGwke!V zL%&%v6C4zi3py;x&9vGUW45#N55q%i(Z_xiTahMx7XlN-0J&E~w`i~Mlg zzSS0l9R|^MT6l%WHn&v?La~jLL&L$?He2@LFhlczStUsnhYhYK#x1^DxWCOnDnXXf zV;Ffb=9|kPwYvzn%+Jr5Fj=dm*xxQNQP@GRGs+3cRBHOg4m&d_S~L6^njqyC^D3jy z%rsc}SY<=#Ww;j86esS+Hie6amorn5wp|6sv2h@m8ic4Yzw+ZLW|=U9gM`i_&U~A2 zcM_Upw%aBIoz4;q&DbL9y|cX|Zu@O641Bam^o{+c$+hFNLc)QPXzrOd7S447W5kT( zOO-g;G@`uqSA2LX)OXi)T&l|2+YDph7~vkqnFs2D!pxJ_6eI247|V1ZTcy6+?a(zA zqU*_1&cJp@O2W>Qpe4eih53j*VPSc0XCsj(1K>cq{J&$@5_Hgg``AE=nQ1Xe1mD5 z^?SkZ&zL}x67dPBNRtY<9tgwZBd@6ls9{bzVh>U8*BcC!a%IIVwlgUieM$jC#{Cpz z8P5JjpVDuupsX5WjZv%v6V8D72`V)Jyh4_&5{B;r;g3Q5w_o6bTQ7UWg4)n+Drjv! zk8b}-1l>=OgCYA)+nuCVD+Zu*YSkS z5)ZR2Rq$g?VHALRSd4l`nK5xmOc*Abr)9$_tYT)@5$m7~G;k${35h(mt+X~cRWFip1^VOPqU(PgoCH^bZ=&CS z6ub-E0#1PAUq0t9G20RU12>uQo{1?HSKn2VI`NRJL zeO$lMr+ol3;Q!Z8kJGcsmWP{MGQ86a)L1E{L0_wfR-T{KVnUBZtS3&ZE7u z&3&k~!ASSG_lG>kB%hGHVZ!xu%-_1Gw`uH7rE?xM?pT(-dmc$W0WEP(xtn~?=}8Gp z=P)z_9Z#LW$W`49O1m4BVseYKU!x7Tta?PIHnuWr#*K;N8#QprWRXh-?bzcmaBl+bo!i}@ zw0(opgcej`PHNjOwD+tQAW7p~m%qxKnV+NO*REM|#65GyK8rh|2f2ZbDDXRuJxM!C z#tVw6Mxsk(;E{W$ZD&bzaecP3FM8A}sI95{7W+-g6o&04s%`sztg%Ab%? Date: Tue, 5 Nov 2019 05:51:04 -0700 Subject: [PATCH 031/203] Add files needed for specifying baseline tests of end-to-end workflow. --- tests/baseline_configs/config.GSDstd01.sh | 97 +++++++++++++++++++++++ tests/baseline_configs/config.GSDstd02.sh | 97 +++++++++++++++++++++++ tests/experiments_list.txt | 3 + 3 files changed, 197 insertions(+) create mode 100644 tests/baseline_configs/config.GSDstd01.sh create mode 100644 tests/baseline_configs/config.GSDstd02.sh create mode 100644 tests/experiments_list.txt diff --git a/tests/baseline_configs/config.GSDstd01.sh b/tests/baseline_configs/config.GSDstd01.sh new file mode 100644 index 000000000..e955e9c5f --- /dev/null +++ b/tests/baseline_configs/config.GSDstd01.sh @@ -0,0 +1,97 @@ +# +#----------------------------------------------------------------------- +# +# This is the local (i.e. user-specific) experiment/workflow configura- +# tion file. It is not tracked by the git repository. +# +#----------------------------------------------------------------------- +# +RUN_ENVIR="nco" +RUN_ENVIR="community" + +MACHINE="HERA" +ACCOUNT="gsd-fv3" +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +# Can specify EXPT_BASEDIR if you want. If not specified, will default +# to "$HOMErrfs/../expt_dirs". +#EXPT_BASEDIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/test_latest_20190927/expt_dirs" + +PREDEF_GRID_NAME="GSD_HRRR25km" +#PREDEF_GRID_NAME="GSD_HRRR13km" +#PREDEF_GRID_NAME="GSD_HRRR3km" +#PREDEF_GRID_NAME="GSD_HAFSV0.A" +#PREDEF_GRID_NAME="EMC_HI3km" +# +GRID_GEN_METHOD="JPgrid" +# +PREEXISTING_DIR_METHOD="delete" +QUILTING="TRUE" +# +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="GFS" +CCPP_PHYS_SUITE="GSD" + +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" +#LBC_UPDATE_INTVL_HRS="12" +#LBC_UPDATE_INTVL_HRS="1" + + +if [ "${RUN_ENVIR}" = "nco" ]; then + + EXPT_SUBDIR="test_NCO" + + RUN="an_experiment" + COMINgfs="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" + +# STMP="/scratch2/NCEPDEV/stmp3/${USER}" +# PTMP="/scratch2/NCEPDEV/stmp3/${USER}" + + DATE_FIRST_CYCL="20190422" + DATE_LAST_CYCL="20190422" +# DATE_FIRST_CYCL="20181216" +# DATE_LAST_CYCL="20181216" + CYCL_HRS=( "00" ) + + EXTRN_MDL_NAME_ICS="FV3GFS" + EXTRN_MDL_NAME_LBCS="FV3GFS" + +else + + EXPT_SUBDIR="test_community" + EXPT_SUBDIR="yunheng_GSMGFS_20190520_GSDphys" + +# DATE_FIRST_CYCL="20190701" +# DATE_LAST_CYCL="20190701" + DATE_FIRST_CYCL="20190520" + DATE_LAST_CYCL="20190520" +# CYCL_HRS=( "00" "12" ) + CYCL_HRS=( "00" ) + + EXTRN_MDL_NAME_ICS="GSMGFS" +# EXTRN_MDL_NAME_ICS="FV3GFS" +# EXTRN_MDL_NAME_ICS="HRRRX" + + EXTRN_MDL_NAME_LBCS="GSMGFS" +# EXTRN_MDL_NAME_LBCS="FV3GFS" +# EXTRN_MDL_NAME_LBCS="RAPX" + + RUN_TASK_MAKE_GRID="TRUE" +# RUN_TASK_MAKE_GRID="FALSE" + GRID_DIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/pregen_grid/GSD_HRRR25km" + + RUN_TASK_MAKE_OROG="TRUE" +# RUN_TASK_MAKE_OROG="FALSE" + OROG_DIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/pregen_orog/GSD_HRRR25km" + + RUN_TASK_MAKE_SFC_CLIMO="TRUE" +# RUN_TASK_MAKE_SFC_CLIMO="FALSE" + SFC_CLIMO_DIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/pregen_sfc_climo/GSD_HRRR25km" + +fi + diff --git a/tests/baseline_configs/config.GSDstd02.sh b/tests/baseline_configs/config.GSDstd02.sh new file mode 100644 index 000000000..e955e9c5f --- /dev/null +++ b/tests/baseline_configs/config.GSDstd02.sh @@ -0,0 +1,97 @@ +# +#----------------------------------------------------------------------- +# +# This is the local (i.e. user-specific) experiment/workflow configura- +# tion file. It is not tracked by the git repository. +# +#----------------------------------------------------------------------- +# +RUN_ENVIR="nco" +RUN_ENVIR="community" + +MACHINE="HERA" +ACCOUNT="gsd-fv3" +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +# Can specify EXPT_BASEDIR if you want. If not specified, will default +# to "$HOMErrfs/../expt_dirs". +#EXPT_BASEDIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/test_latest_20190927/expt_dirs" + +PREDEF_GRID_NAME="GSD_HRRR25km" +#PREDEF_GRID_NAME="GSD_HRRR13km" +#PREDEF_GRID_NAME="GSD_HRRR3km" +#PREDEF_GRID_NAME="GSD_HAFSV0.A" +#PREDEF_GRID_NAME="EMC_HI3km" +# +GRID_GEN_METHOD="JPgrid" +# +PREEXISTING_DIR_METHOD="delete" +QUILTING="TRUE" +# +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="GFS" +CCPP_PHYS_SUITE="GSD" + +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" +#LBC_UPDATE_INTVL_HRS="12" +#LBC_UPDATE_INTVL_HRS="1" + + +if [ "${RUN_ENVIR}" = "nco" ]; then + + EXPT_SUBDIR="test_NCO" + + RUN="an_experiment" + COMINgfs="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" + +# STMP="/scratch2/NCEPDEV/stmp3/${USER}" +# PTMP="/scratch2/NCEPDEV/stmp3/${USER}" + + DATE_FIRST_CYCL="20190422" + DATE_LAST_CYCL="20190422" +# DATE_FIRST_CYCL="20181216" +# DATE_LAST_CYCL="20181216" + CYCL_HRS=( "00" ) + + EXTRN_MDL_NAME_ICS="FV3GFS" + EXTRN_MDL_NAME_LBCS="FV3GFS" + +else + + EXPT_SUBDIR="test_community" + EXPT_SUBDIR="yunheng_GSMGFS_20190520_GSDphys" + +# DATE_FIRST_CYCL="20190701" +# DATE_LAST_CYCL="20190701" + DATE_FIRST_CYCL="20190520" + DATE_LAST_CYCL="20190520" +# CYCL_HRS=( "00" "12" ) + CYCL_HRS=( "00" ) + + EXTRN_MDL_NAME_ICS="GSMGFS" +# EXTRN_MDL_NAME_ICS="FV3GFS" +# EXTRN_MDL_NAME_ICS="HRRRX" + + EXTRN_MDL_NAME_LBCS="GSMGFS" +# EXTRN_MDL_NAME_LBCS="FV3GFS" +# EXTRN_MDL_NAME_LBCS="RAPX" + + RUN_TASK_MAKE_GRID="TRUE" +# RUN_TASK_MAKE_GRID="FALSE" + GRID_DIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/pregen_grid/GSD_HRRR25km" + + RUN_TASK_MAKE_OROG="TRUE" +# RUN_TASK_MAKE_OROG="FALSE" + OROG_DIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/pregen_orog/GSD_HRRR25km" + + RUN_TASK_MAKE_SFC_CLIMO="TRUE" +# RUN_TASK_MAKE_SFC_CLIMO="FALSE" + SFC_CLIMO_DIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/pregen_sfc_climo/GSD_HRRR25km" + +fi + diff --git a/tests/experiments_list.txt b/tests/experiments_list.txt new file mode 100644 index 000000000..358143693 --- /dev/null +++ b/tests/experiments_list.txt @@ -0,0 +1,3 @@ + GSDstd01 | LBC_UPDATE_INTVL_HRS="3" | AAA=GGGGG_HH | PREDEF_GRID_NAME="GSD_HRRR25km" | EXTRN_MDL_NAME_ICS="FV3GFS" +GSDstd02 | + From 121731b821600228afd201dcb22771ba5d69ae06 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 5 Nov 2019 09:36:58 -0700 Subject: [PATCH 032/203] (1) Change variable name from "SCRIPT_VAR_DEFNS..." to "GLOBAL_VAR_DEFNS...". (2) Change statements that source the bash utilitiy functions from source_funcs.sh to source_util_funcs.sh. --- ush/config_defaults.sh | 4 ++-- ush/generate_FV3SAR_wflow.sh | 14 +++++------ ush/get_extrn_mdl_file_dir_info.sh | 6 ++--- ush/link_fix.sh | 12 +++++----- ush/setup.sh | 38 +++++++++++++++--------------- 5 files changed, 37 insertions(+), 37 deletions(-) diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 957fc760c..0de7537d2 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -139,7 +139,7 @@ PTMP="/path/to/temporary/directory/ptmp" # WFLOW_XML_FN: # Name of the workflow XML file to be passed to rocoto. # -# SCRIPT_VAR_DEFNS_FN: +# GLOBAL_VAR_DEFNS_FN: # Name of file that is sourced by the worflow scripts to set variable # values. # @@ -173,7 +173,7 @@ DATA_TABLE_FN="data_table" MODEL_CONFIG_FN="model_configure" NEMS_CONFIG_FN="nems.configure" WFLOW_XML_FN="FV3SAR_wflow.xml" -SCRIPT_VAR_DEFNS_FN="var_defns.sh" +GLOBAL_VAR_DEFNS_FN="var_defns.sh" WRTCMP_PARAMS_TEMPLATE_FN="" # #----------------------------------------------------------------------- diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 467bb9aeb..af78388af 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -14,7 +14,7 @@ ushdir=$(pwd) # #----------------------------------------------------------------------- # -. $ushdir/source_funcs.sh +. $ushdir/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -94,7 +94,7 @@ else CYCLE_DIR="$EXPTDIR/${CDATE_generic}" fi -set_file_param "${WFLOW_XML_FP}" "SCRIPT_VAR_DEFNS_FP" "${SCRIPT_VAR_DEFNS_FP}" +set_file_param "${WFLOW_XML_FP}" "GLOBAL_VAR_DEFNS_FP" "${GLOBAL_VAR_DEFNS_FP}" set_file_param "${WFLOW_XML_FP}" "CYCLE_DIR" "${CYCLE_DIR}" set_file_param "${WFLOW_XML_FP}" "ACCOUNT" "$ACCOUNT" set_file_param "${WFLOW_XML_FP}" "SCHED" "$SCHED" @@ -288,8 +288,8 @@ echo "RES = $RES" # RES="$RES_equiv" # CRES="$CRES_equiv" - set_file_param "${SCRIPT_VAR_DEFNS_FP}" "RES" "${RES}" - set_file_param "${SCRIPT_VAR_DEFNS_FP}" "CRES" "${CRES}" + set_file_param "${GLOBAL_VAR_DEFNS_FP}" "RES" "${RES}" + set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "${CRES}" else # @@ -304,7 +304,7 @@ else if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then $USHDIR/link_fix.sh \ verbose="FALSE" \ - script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ + global_var_defns_fp="${GLOBAL_VAR_DEFNS_FP}" \ file_group="grid" || \ print_err_msg_exit "\ Call to script to create links to grid files failed." @@ -321,7 +321,7 @@ Call to script to create links to grid files failed." if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then $USHDIR/link_fix.sh \ verbose="FALSE" \ - script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ + global_var_defns_fp="${GLOBAL_VAR_DEFNS_FP}" \ file_group="orog" || \ print_err_msg_exit "\ Call to script to create links to orography files failed." @@ -339,7 +339,7 @@ Call to script to create links to orography files failed." if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then $USHDIR/link_fix.sh \ verbose="FALSE" \ - script_var_defns_fp="${SCRIPT_VAR_DEFNS_FP}" \ + global_var_defns_fp="${GLOBAL_VAR_DEFNS_FP}" \ file_group="sfc_climo" || \ print_err_msg_exit "\ Call to script to create links to surface climatology files failed." diff --git a/ush/get_extrn_mdl_file_dir_info.sh b/ush/get_extrn_mdl_file_dir_info.sh index ca3e743c7..ee5661005 100755 --- a/ush/get_extrn_mdl_file_dir_info.sh +++ b/ush/get_extrn_mdl_file_dir_info.sh @@ -6,8 +6,8 @@ # #----------------------------------------------------------------------- # -. $SCRIPT_VAR_DEFNS_FP -. $USHDIR/source_funcs.sh +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -19,7 +19,7 @@ # #----------------------------------------------------------------------- # -function get_extrn_mdl_file_dir_info () { +function get_extrn_mdl_file_dir_info() { # #----------------------------------------------------------------------- # diff --git a/ush/link_fix.sh b/ush/link_fix.sh index 4551a75e1..43ce28a0c 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -22,7 +22,7 @@ USHDIR="${script_dir}" # #----------------------------------------------------------------------- # -. $USHDIR/source_funcs.sh +. $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -43,7 +43,7 @@ USHDIR="${script_dir}" # valid_args=( \ "verbose" \ -"script_var_defns_fp" \ +"global_var_defns_fp" \ "file_group" \ ) process_args valid_args "$@" @@ -217,14 +217,14 @@ done RES=${RES:-""} if [ "$RES" = "$res" ] || [ "$RES" = "" ]; then cres="C${res}" - set_file_param "${SCRIPT_VAR_DEFNS_FP}" "RES" "${res}" - set_file_param "${SCRIPT_VAR_DEFNS_FP}" "CRES" "${cres}" + set_file_param "${GLOBAL_VAR_DEFNS_FP}" "RES" "${res}" + set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "${cres}" elif [ "$RES" != "$res" ]; then print_err_msg_exit "\ The resolution (RES) specified in the variable definitions file -(script_var_defns_fp) does not match the resolution (res) found in this +(global_var_defns_fp) does not match the resolution (res) found in this script for the specified file group (file_group): - script_var_defns_fp = \"${script_var_defns_fp}\" + global_var_defns_fp = \"${global_var_defns_fp}\" RES = \"${RES}\" file_group = \"${file_group}\" res = \"${res}\" diff --git a/ush/setup.sh b/ush/setup.sh index 16b7dde5d..3f71f3f18 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -32,7 +32,7 @@ script_dir=$( dirname ${BASH_SOURCE[0]} ) # #----------------------------------------------------------------------- # -. ./source_funcs.sh +. ./source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -530,8 +530,8 @@ HH_FIRST_CYCL=${CYCL_HRS[0]} # # The current script should be located in the ush subdirectory of the # workflow directory. Thus, the workflow directory is the one above the -# directory of the current script. Get the path to this directory and -# save it in HOMErrfs. +# directory of the current script. Get the path to this latter directo- +# ry and save it in HOMErrfs. # HOMErrfs=${script_dir%/*} @@ -1335,7 +1335,7 @@ fi # 1) Copying the default workflow/experiment configuration script (spe- # fied by DEFAULT_CONFIG_FN and located in the shell script directory # USHDIR) to the run directory and renaming it to the name specified -# by SCRIPT_VAR_DEFNS_FN. +# by GLOBAL_VAR_DEFNS_FN. # # 2) Resetting the original values of the variables defined in this file # to their current values. This is necessary because these variables @@ -1353,8 +1353,8 @@ fi # #----------------------------------------------------------------------- # -SCRIPT_VAR_DEFNS_FP="$EXPTDIR/$SCRIPT_VAR_DEFNS_FN" -cp_vrfy ./${DEFAULT_CONFIG_FN} ${SCRIPT_VAR_DEFNS_FP} +GLOBAL_VAR_DEFNS_FP="$EXPTDIR/$GLOBAL_VAR_DEFNS_FN" +cp_vrfy ./${DEFAULT_CONFIG_FN} ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -1362,15 +1362,15 @@ cp_vrfy ./${DEFAULT_CONFIG_FN} ${SCRIPT_VAR_DEFNS_FP} #----------------------------------------------------------------------- # -# Read all lines of SCRIPT_VAR_DEFNS file into the variable line_list. -line_list=$( sed -r -e "s/(.*)/\1/g" ${SCRIPT_VAR_DEFNS_FP} ) +# Read all lines of GLOBAL_VAR_DEFNS file into the variable line_list. +line_list=$( sed -r -e "s/(.*)/\1/g" ${GLOBAL_VAR_DEFNS_FP} ) # # Loop through the lines in line_list and concatenate lines ending with # the line bash continuation character "\". # -rm_vrfy ${SCRIPT_VAR_DEFNS_FP} +rm_vrfy ${GLOBAL_VAR_DEFNS_FP} while read crnt_line; do - printf "%s\n" "${crnt_line}" >> ${SCRIPT_VAR_DEFNS_FP} + printf "%s\n" "${crnt_line}" >> ${GLOBAL_VAR_DEFNS_FP} done <<< "${line_list}" # #----------------------------------------------------------------------- @@ -1408,7 +1408,7 @@ line_list=$( sed -r \ -e "s/^([ ]*)([^ ]+.*)/\2/g" \ -e "/^#.*/d" \ -e "/^$/d" \ - ${SCRIPT_VAR_DEFNS_FP} ) + ${GLOBAL_VAR_DEFNS_FP} ) echo echo "The variable \"line_list\" contains:" echo @@ -1444,11 +1444,11 @@ EOM # str_to_insert=${str_to_insert//$'\n'/\\n} # -# Insert str_to_insert into SCRIPT_VAR_DEFNS_FP right after the line +# Insert str_to_insert into GLOBAL_VAR_DEFNS_FP right after the line # containing the name of the interpreter. # REGEXP="(^#!.*)" -sed -i -r -e "s|$REGEXP|\1\n\n$str_to_insert\n|g" ${SCRIPT_VAR_DEFNS_FP} +sed -i -r -e "s|$REGEXP|\1\n\n$str_to_insert\n|g" ${GLOBAL_VAR_DEFNS_FP} @@ -1567,7 +1567,7 @@ Setting its value in the variable definitions file to an empty string." # Now place var_value on the right-hand side of the assignment statement # on the appropriate line in variable definitions file. # - set_file_param "${SCRIPT_VAR_DEFNS_FP}" "${var_name}" "${var_value}" + set_file_param "${GLOBAL_VAR_DEFNS_FP}" "${var_name}" "${var_value}" # # If var_name is empty, then a variable name was not found in the cur- # rent line in line_list. In this case, print out a warning and move on @@ -1596,7 +1596,7 @@ done <<< "${line_list}" # #----------------------------------------------------------------------- # -{ cat << EOM >> ${SCRIPT_VAR_DEFNS_FP} +{ cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- @@ -1648,7 +1648,7 @@ SFC_CLIMO_DIR="${SFC_CLIMO_DIR}" # #----------------------------------------------------------------------- # -SCRIPT_VAR_DEFNS_FP="${SCRIPT_VAR_DEFNS_FP}" +GLOBAL_VAR_DEFNS_FP="${GLOBAL_VAR_DEFNS_FP}" WRTCMP_PARAMS_TEMPLATE_FP="${WRTCMP_PARAMS_TEMPLATE_FP}" # #----------------------------------------------------------------------- @@ -1677,7 +1677,7 @@ definitions file returned with a nonzero status." # if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - { cat << EOM >> ${SCRIPT_VAR_DEFNS_FP} + { cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -1706,7 +1706,7 @@ file returned with a nonzero status." elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - { cat << EOM >> ${SCRIPT_VAR_DEFNS_FP} + { cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # @@ -1743,7 +1743,7 @@ fi # #----------------------------------------------------------------------- # -{ cat << EOM >> ${SCRIPT_VAR_DEFNS_FP} +{ cat << EOM >> ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # From 91fd9a0c5255acc0089cfe5ae596deadbd55021d Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 5 Nov 2019 10:15:48 -0700 Subject: [PATCH 033/203] (1) Change sourced scripts so that the contents of each are wrapped within a function, and that function is called following the function definition. This allows for local variables to be defined that have the same names (but are different than) as variables in the calling function/script. (2) In each such function, get the full path to the file, the name of the file, the directory it is in, and the name of the function. These are handy to have so they can be printed in informational and error messages (and possibly to source external scripts). (3) Improve informational and error messages. --- ush/compare_config_scripts.sh | 46 ++++++++++++++----- ush/generate_FV3SAR_wflow.sh | 72 +++++++++++++++++++++++++----- ush/get_extrn_mdl_file_dir_info.sh | 43 +++++++++++------- ush/set_extrn_mdl_params.sh | 40 +++++++++++++++++ ush/set_gridparams_GFDLgrid.sh | 44 +++++++++++++++--- ush/set_gridparams_JPgrid.sh | 43 ++++++++++++++++-- ush/set_predef_grid_params.sh | 39 ++++++++++++++++ ush/setup.sh | 60 +++++++++++++++++-------- 8 files changed, 321 insertions(+), 66 deletions(-) diff --git a/ush/compare_config_scripts.sh b/ush/compare_config_scripts.sh index 5f2341a1b..5685a0996 100644 --- a/ush/compare_config_scripts.sh +++ b/ush/compare_config_scripts.sh @@ -1,18 +1,35 @@ -# This file is always sourced by another script (i.e. it's never run in -# its own shell), so there's no need to put the #!/bin/some_shell on the -# first line. - # #----------------------------------------------------------------------- # -# This script checks that all variables defined in the local configura- -# tion script (whose file name is stored in the variable LOCAL_CONFIG_- -# FN) are also assigned a default value in the default configuration -# script (whose file name is stored in the variable DEFAULT_CONFIG_FN). +# This file defines and then calls a function that checks that all vari- +# ables defined in the local configuration script (whose file name is +# stored in the variable LOCAL_CONFIG_FN) are also assigned a default +# value in the default configuration script (whose file name is stored +# in the variable DEFAULT_CONFIG_FN). # #----------------------------------------------------------------------- # - +function compare_config_scripts() { +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +local scrfunc_fn=$( basename "${scrfunc_fp}" ) +local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# +local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # @@ -20,7 +37,7 @@ # #----------------------------------------------------------------------- # -. ./source_funcs.sh +. ${scrfunc_dir}/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -120,4 +137,13 @@ done <<< "${var_list_local}" # { restore_shell_opts; } > /dev/null 2>&1 +} +# +#----------------------------------------------------------------------- +# +# Call the function defined above. +# +#----------------------------------------------------------------------- +# +compare_config_scripts diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index af78388af..ea65a0ca9 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -1,12 +1,42 @@ -#!/bin/sh -l - -# These need to be made machine-dependent. The following work only on -# Hera. -module load intel/19.0.4.243 -module load netcdf/4.7.0 - - -ushdir=$(pwd) +#!/bin/bash -l +# +#----------------------------------------------------------------------- +# +# This file defines and then calls a function that sets up a forecast +# experiment and creates a workflow (according to the parameters speci- +# fied in the configuration file; see instructions). +# +#----------------------------------------------------------------------- +# +function generate_FV3SAR_wflow() { +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +local scrfunc_fn=$( basename "${scrfunc_fp}" ) +local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# +local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Set directories. +# +#----------------------------------------------------------------------- +# +ushdir="${scrfunc_dir}" # #----------------------------------------------------------------------- # @@ -24,9 +54,18 @@ ushdir=$(pwd) #----------------------------------------------------------------------- # { save_shell_opts; set -u -x; } > /dev/null 2>&1 - - -script_name=$( basename "${BASH_SOURCE[0]}" ) +# +#----------------------------------------------------------------------- +# +# Load modules. +# +#----------------------------------------------------------------------- +# +module purge +# These need to be made machine-dependent. The following work only on +# Hera. +module load intel/19.0.4.243 +module load netcdf/4.7.0 # #----------------------------------------------------------------------- # @@ -653,5 +692,14 @@ fi # { restore_shell_opts; } > /dev/null 2>&1 +} +# +#----------------------------------------------------------------------- +# +# Call the function defined above. +# +#----------------------------------------------------------------------- +# +generate_FV3SAR_wflow diff --git a/ush/get_extrn_mdl_file_dir_info.sh b/ush/get_extrn_mdl_file_dir_info.sh index ee5661005..14262ee3e 100755 --- a/ush/get_extrn_mdl_file_dir_info.sh +++ b/ush/get_extrn_mdl_file_dir_info.sh @@ -32,8 +32,19 @@ function get_extrn_mdl_file_dir_info() { # #----------------------------------------------------------------------- # -# Get the name of the current function. This is useful as part of error -# and/or informational messages. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. # #----------------------------------------------------------------------- # @@ -63,21 +74,13 @@ function get_extrn_mdl_file_dir_info() { # #----------------------------------------------------------------------- # -# If VERBOSE is set to "TRUE", print out values of arguments passed to -# this script. +# For debugging purposes, print out values of arguments passed to this +# script/function. Note that these will be printed out only if VERBOSE +# is set to TRUE. # #----------------------------------------------------------------------- # - msg=" -The arguments to script/function \"${script_name}\" have been set as -follows: -" - num_valid_args="${#valid_args[@]}" - for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - msg="$msg"$( printf " $line\n" ) - done - print_info_msg "$VERBOSE" "$msg" + print_input_args valid_args # #----------------------------------------------------------------------- # @@ -88,10 +91,16 @@ follows: if [ 0 = 1 ]; then if [ "$#" -ne "13" ]; then - print_err_msg_exit "\ -Incorrect number of arguments specified. Usage: - ${FUNCNAME[0]} \ + print_err_msg_exit " +Incorrect number of arguments specified: + + Function name: \"${func_name}\" + Number of arguments specified: $# + +Usage: + + ${func_name} \ extrn_mdl_name \ anl_or_fcst \ cdate_FV3SAR \ diff --git a/ush/set_extrn_mdl_params.sh b/ush/set_extrn_mdl_params.sh index 56e86a8c9..de6073fae 100644 --- a/ush/set_extrn_mdl_params.sh +++ b/ush/set_extrn_mdl_params.sh @@ -1,6 +1,36 @@ # #----------------------------------------------------------------------- # +# This file defines and then calls a function that sets parameters rela- +# ting to the external model used for initial conditions (ICs) and the +# one used for lateral boundary conditions (LBCs). +# +#----------------------------------------------------------------------- +# +function set_extrn_mdl_params() { +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +local scrfunc_fn=$( basename "${scrfunc_fp}" ) +local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# +local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# # Set the system directory (i.e. location on disk, not on HPSS) in which # the files generated by the external model specified by EXTRN_MDL_- # NAME_ICS that are necessary for generating initial condition (IC) @@ -280,3 +310,13 @@ this machine and external model combination: esac fi + +} +# +#----------------------------------------------------------------------- +# +# Call the function defined above. +# +#----------------------------------------------------------------------- +# +set_extrn_mdl_params diff --git a/ush/set_gridparams_GFDLgrid.sh b/ush/set_gridparams_GFDLgrid.sh index c500270e9..e38b70297 100644 --- a/ush/set_gridparams_GFDLgrid.sh +++ b/ush/set_gridparams_GFDLgrid.sh @@ -1,7 +1,33 @@ -# This file is always sourced by another script (i.e. it's never run in -# its own shell), so there's no need to put the #!/bin/some_shell on the -# first line. - +# +#----------------------------------------------------------------------- +# +# This file defines and then calls a function that sets the parameters +# for a grid that is to be generated using the "GFDLgrid" grid genera- +# tion method (i.e. GRID_GEN_METHOD set to "GFDLgrid"). +# +#----------------------------------------------------------------------- +# +function set_gridparams_GFDLgrid() { +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +local scrfunc_fn=$( basename "${scrfunc_fp}" ) +local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# +local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # @@ -319,5 +345,13 @@ jend_rgnl_wide_halo_T6SG = $jend_rgnl_wide_halo_T6SG)" # { restore_shell_opts; } > /dev/null 2>&1 - +} +# +#----------------------------------------------------------------------- +# +# Call the function defined above. +# +#----------------------------------------------------------------------- +# +set_gridparams_GFDLgrid diff --git a/ush/set_gridparams_JPgrid.sh b/ush/set_gridparams_JPgrid.sh index 21755d52a..21fcb9dd6 100644 --- a/ush/set_gridparams_JPgrid.sh +++ b/ush/set_gridparams_JPgrid.sh @@ -1,7 +1,33 @@ -# This file is always sourced by another script (i.e. it's never run in -# its own shell), so there's no need to put the #!/bin/some_shell on the -# first line. - +# +#----------------------------------------------------------------------- +# +# This file defines and then calls a function that sets the parameters +# for a grid that is to be generated using the "JPgrid" grid generation +# method (i.e. GRID_GEN_METHOD set to "JPgrid"). +# +#----------------------------------------------------------------------- +# +function set_gridparams_JPgrid() { +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +local scrfunc_fn=$( basename "${scrfunc_fp}" ) +local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# +local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # @@ -39,4 +65,13 @@ mns_ny_T7_pls_wide_halo=$( bc -l <<< "-($ny_T7 + 2*$nhw_T7)" ) mns_ny_T7_pls_wide_halo=$( printf "%.0f\n" $mns_ny_T7_pls_wide_halo ) echo "mns_ny_T7_pls_wide_halo = $mns_ny_T7_pls_wide_halo" +} +# +#----------------------------------------------------------------------- +# +# Call the function defined above. +# +#----------------------------------------------------------------------- +# +set_gridparams_JPgrid diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index f85c521a1..6a2c39a39 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -1,6 +1,35 @@ # #----------------------------------------------------------------------- # +# This file defines and then calls a function that sets grid parameters +# for the specified predefined grid. +# +#----------------------------------------------------------------------- +# +function set_predef_grid_params() { +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +local scrfunc_fn=$( basename "${scrfunc_fp}" ) +local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. +# +#----------------------------------------------------------------------- +# +local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# # Set grid and other parameters according to the value of the predefined # domain (PREDEF_GRID_NAME). Note that the code will enter this script on- # ly if PREDEF_GRID_NAME has a valid (and non-empty) value. @@ -655,3 +684,13 @@ in WRTCMP_output_grid is not supported: fi +} +# +#----------------------------------------------------------------------- +# +# Call the function defined above. +# +#----------------------------------------------------------------------- +# +set_predef_grid_params + diff --git a/ush/setup.sh b/ush/setup.sh index 3f71f3f18..af52ebf01 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1,30 +1,47 @@ -#!/bin/sh -l - # #----------------------------------------------------------------------- # -# This script sets parameters needed by the various scripts that are -# called by the rocoto workflow. This secondary set of parameters is -# calculated using the primary set of user-defined parameters in the -# default and local workflow/experiment configuration scripts (whose -# file names are defined below). This script then saves both sets of -# parameters in a variable-definitions script in the run directory that -# will be sourced by the various scripts called by the workflow. +# This file defines and then calls a function that sets a secondary set +# of parameters needed by the various scripts that are called by the +# FV3SAR rocoto community workflow. This secondary set of parameters is +# calculated using the primary set of user-defined parameters in the de- +# fault and custom experiment/workflow configuration scripts (whose file +# names are defined below). This script then saves both sets of parame- +# ters in a global variable definitions file (really a bash script) in +# the experiment directory. This file then gets sourced by the various +# scripts called by the tasks in the workflow. # #----------------------------------------------------------------------- # - - +function setup() { +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +local scrfunc_fn=$( basename "${scrfunc_fp}" ) +local scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Get the name of this function. # #----------------------------------------------------------------------- # -# Set the current script's name and the directory in which it is loca- -# ted. +local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -script_dir=$( dirname ${BASH_SOURCE[0]} ) +# +# +#----------------------------------------------------------------------- +# +cd_vrfy ${scrfunc_dir} # #----------------------------------------------------------------------- # @@ -533,7 +550,7 @@ HH_FIRST_CYCL=${CYCL_HRS[0]} # directory of the current script. Get the path to this latter directo- # ry and save it in HOMErrfs. # -HOMErrfs=${script_dir%/*} +HOMErrfs=${scrfunc_dir%/*} USHDIR="$HOMErrfs/ush" SCRIPTSDIR="$HOMErrfs/scripts" @@ -1824,6 +1841,13 @@ Setup script completed successfully!!! # { restore_shell_opts; } > /dev/null 2>&1 - - +} +# +#----------------------------------------------------------------------- +# +# Call the function defined above. +# +#----------------------------------------------------------------------- +# +setup From 03b0c31095cc5cb5b2f2f3596502b759448b7703 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 5 Nov 2019 10:32:26 -0700 Subject: [PATCH 034/203] In link_fix.sh: (1) Get the full path to the file, the name of the file, and the directory it is in. These are handy to have so they can be printed in informational and error messages (and possibly to source external scripts). (2) Use the print_input_args function to print out the values that the arguments to this script get set to (instead of printing out these values in a local loop). --- ush/link_fix.sh | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/ush/link_fix.sh b/ush/link_fix.sh index 43ce28a0c..ffc97f03e 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -3,16 +3,25 @@ # #----------------------------------------------------------------------- # -# Get the name of the current script and the directory in which it is -# located. This script should be located in USHDIR, so set USHDIR to -# the script directory (USHDIR is needed in various places below or in +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# This script should be located in USHDIR, so set USHDIR to this +# script's directory (USHDIR is needed in various places below or in # sourced scripts). # #----------------------------------------------------------------------- # -script_name=$(basename ${BASH_SOURCE[0]}) -script_dir=$(dirname ${BASH_SOURCE[0]}) -USHDIR="${script_dir}" +USHDIR="${scrfunc_dir}" # #----------------------------------------------------------------------- # @@ -50,31 +59,22 @@ process_args valid_args "$@" # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. # #----------------------------------------------------------------------- # -. ${script_var_defns_fp} +print_input_args valid_args # #----------------------------------------------------------------------- # -# If verbose is set to TRUE, print out what each valid argument has been -# set to. +# Source the variable definitions script and the function definitions +# file. # #----------------------------------------------------------------------- # -if [ "$verbose" = "TRUE" ]; then - num_valid_args="${#valid_args[@]}" - print_info_msg " -The arguments to script/function \"${script_name}\" have been set as -follows: -" 1>&2 - for (( i=0; i<${num_valid_args}; i++ )); do - line=$( declare -p "${valid_args[$i]}" ) - printf " $line\n" 1>&2 - done -fi +. ${global_var_defns_fp} # #----------------------------------------------------------------------- # From 0a9fab1d2ee3d3f7ae64c738f14c4a51e5e39c0d Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 5 Nov 2019 10:42:21 -0700 Subject: [PATCH 035/203] Updates to cmp_expt_to_baseline: (1) Get the full path to the file, the name of the file, and the directory it is in. These are handy to have so they can be printed in informational and error messages (and possibly to source external scripts). (2) Source the bash utility functions to they can be used in the script. (3) Use the set_shell_opts and restore_shell_opts functions to set/restore shell options. (4) Replace printf with print_info_msg and print_err_msg_exit in a couple of places; still need to replace everywhere. (5) Use the count_files bash utility function to count number of files having a given extension. --- ush/cmp_expt_to_baseline.sh | 61 +++++++++++++++++++++++++++++-------- 1 file changed, 49 insertions(+), 12 deletions(-) diff --git a/ush/cmp_expt_to_baseline.sh b/ush/cmp_expt_to_baseline.sh index 4961a817f..4cce2481e 100755 --- a/ush/cmp_expt_to_baseline.sh +++ b/ush/cmp_expt_to_baseline.sh @@ -16,23 +16,52 @@ # Script has only been tested on theia #----------------------------------------------------------------------- +# Do these need to be machine specific, e.g. by using modulefiles? module load intel module load nccmp # #----------------------------------------------------------------------- # +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Source bash utility functions. +# +#----------------------------------------------------------------------- +# +. ${scrfunc_dir}/source_util_funcs.sh +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# # Process arguments. # #----------------------------------------------------------------------- # -script_name=$( basename "${BASH_SOURCE[0]}" ) -if [ $# -ne 1 ] && [ $# -ne 2 ]; then +if [ $# -eq 0 ] || [ $# -gt 2 ]; then printf " -ERROR from script ${script_name}: +ERROR from script ${scrfunc_fn}: Only 1 or 2 arguments may be specified. Usage: - > ${script_name} expt_dir [baseline_dir] + > ${scrfunc_fn} expt_dir [baseline_dir] where expt_dir is the experiment directory and baseline_dir is an op- tional baseline directory. @@ -50,12 +79,10 @@ fi # expt_dir="$1" if [ ! -d "${expt_dir}" ]; then - printf "\n + print_err_msg_exit "\ The specified experiment directory (expt_dir) does not exist: expt_dir = \"$expt_dir\" -Exiting script with nonzero return code. -" - exit 1 +Exiting script with nonzero return code." fi # #----------------------------------------------------------------------- @@ -119,11 +146,10 @@ fi # #----------------------------------------------------------------------- # -printf " +print_info_msg " The experiment and baseline directories are: expt_dir = \"$expt_dir\" - baseline_dir = \"$baseline_dir\" -" + baseline_dir = \"$baseline_dir\"" # #----------------------------------------------------------------------- # @@ -237,7 +263,8 @@ Exiting script with nonzero exit code. #----------------------------------------------------------------------- # cd ${expt_dir}/$subdir - num_files=$( ls -1 *.${file_ext} 2>/dev/null | wc -l ) +# num_files=$( ls -1 *.${file_ext} 2>/dev/null | wc -l ) + num_files=$( count_files *.${file_ext} 2>/dev/null | wc -l ) printf " Number of files with extension \"${file_ext}\" in subdirectory \"$subdir\" of the experiment directory is: ${num_files} @@ -370,5 +397,15 @@ else fi printf "Final result of regression test: ${result_str}\n" +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# +{ restore_shell_opts; } > /dev/null 2>&1 + exit ${exit_code} From c4734ff9b0f5c655edfeb08db266308d64e254bb Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 5 Nov 2019 11:09:56 -0700 Subject: [PATCH 036/203] (1) Change variable name from "SCRIPT_VAR_DEFNS..." to "GLOBAL_VAR_DEFNS...". (2) Other minor changes. --- ush/bash_utils/set_file_param.sh | 2 +- ush/generate_FV3SAR_wflow.sh | 4 ++-- ush/setup.sh | 18 ++++++++++-------- ush/templates/FV3SAR_wflow.xml | 20 ++++++++++---------- 4 files changed, 23 insertions(+), 21 deletions(-) diff --git a/ush/bash_utils/set_file_param.sh b/ush/bash_utils/set_file_param.sh index e368318da..ea643adfe 100644 --- a/ush/bash_utils/set_file_param.sh +++ b/ush/bash_utils/set_file_param.sh @@ -94,7 +94,7 @@ where the arguments are defined as follows: # #----------------------------------------------------------------------- # - print_info_msg "$VERBOSE" " + print_info_msg "$VERBOSE" "\ Setting parameter \"$param\" in file \"$file\" to \"$value\" ..." # #----------------------------------------------------------------------- diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index ea65a0ca9..93ce0df65 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -53,7 +53,7 @@ ushdir="${scrfunc_dir}" # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -403,7 +403,7 @@ fi if [ "${RUN_ENVIR}" != "nco" ]; then print_info_msg "$VERBOSE" " -Copying fixed files from system directory to the workflow directory..." +Copying fixed files from system directory to the experiment directory..." check_for_preexist_dir $FIXam "delete" mkdir -p $FIXam diff --git a/ush/setup.sh b/ush/setup.sh index af52ebf01..f460b3f3b 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -58,7 +58,7 @@ cd_vrfy ${scrfunc_dir} # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -1161,7 +1161,7 @@ if [ "$QUILTING" = "TRUE" ]; then PE_MEMBER01=$(( ${PE_MEMBER01} + ${WRTCMP_write_groups}*${WRTCMP_write_tasks_per_group} )) fi -print_info_msg "$VERBOSE" "\ +print_info_msg "$VERBOSE" " The number of MPI tasks for the forecast (including those for the write component if it is being used) are: PE_MEMBER01 = ${PE_MEMBER01}" @@ -1426,11 +1426,12 @@ line_list=$( sed -r \ -e "/^#.*/d" \ -e "/^$/d" \ ${GLOBAL_VAR_DEFNS_FP} ) -echo -echo "The variable \"line_list\" contains:" -echo -printf "%s\n" "${line_list}" -echo + +print_info_msg "$VERBOSE" " +The variable \"line_list\" contains: + +${line_list} +" # #----------------------------------------------------------------------- # @@ -1495,7 +1496,8 @@ while read crnt_line; do # if [ ! -z $var_name ]; then - printf "\n%s\n" "var_name = \"${var_name}\"" + print_info_msg "$VERBOSE" " +var_name = \"${var_name}\"" # # If the variable specified in var_name is set in the current environ- # ment (to either an empty or non-empty string), get its value and in- diff --git a/ush/templates/FV3SAR_wflow.xml b/ush/templates/FV3SAR_wflow.xml index 7985e9c69..6eedf4a68 100644 --- a/ush/templates/FV3SAR_wflow.xml +++ b/ush/templates/FV3SAR_wflow.xml @@ -47,7 +47,7 @@ The following are variables that are passed to the shell scripts that execute the various workflow tasks but are not otherwise used in the workflow XML. --> - + 50km # C384 --> 25km @@ -350,28 +350,28 @@ GRID_GEN_METHOD="JPgrid" # the global grid tiles varies somewhat as we move across a tile. # # * Tile 6 has arbitrarily been chosen as the tile to use to orient the -# global grid on the sphere (Earth). This is done by specifying lon_- -# ctr_T6 and lat_ctr_T6, which are the longitude and latitude (in de- +# global grid on the sphere (Earth). This is done by specifying LON_- +# CTR_T6 and LAT_CTR_T6, which are the longitude and latitude (in de- # grees) of the center of tile 6. # -# * Setting the Schmidt stretching factor stretch_fac to a value greater +# * Setting the Schmidt stretching factor STRETCH_FAC to a value greater # than 1 shrinks tile 6, while setting it to a value less than 1 (but # still greater than 0) expands tile 6. The remaining 5 tiles change # shape as necessary to maintain global coverage of the grid. # # * The cell size on a given global tile depends on both RES and -# stretch_fac (since changing RES changes the number of cells in the -# tile, and changing stretch_fac modifies the shape and size of the +# STRETCH_FAC (since changing RES changes the number of cells in the +# tile, and changing STRETCH_FAC modifies the shape and size of the # tile). # # * The regional grid is embedded within tile 6 (i.e. it doesn't extend # beyond the boundary of tile 6). Its exact location within tile 6 is # is determined by the starting and ending i and j indices # -# istart_rgnl_T6 -# jstart_rgnl_T6 -# iend_rgnl_T6 -# jend_rgnl_T6 +# ISTART_RGNL_T6 +# JSTART_RGNL_T6 +# IEND_RGNL_T6 +# JEND_RGNL_T6 # # where i is the grid index in the x direction and j is the grid index # in the y direction. @@ -383,12 +383,12 @@ GRID_GEN_METHOD="JPgrid" # 6). Tile 6 is often referred to as the "parent" tile of the region- # al grid. # -# * refine_ratio is the refinement ratio of the regional grid (tile 7) +# * REFINE_RATIO is the refinement ratio of the regional grid (tile 7) # with respect to the grid on its parent tile (tile 6), i.e. it is the # number of grid cells along the boundary of the regional grid that # abut one cell on tile 6. Thus, the cell size on the regional grid -# depends not only on RES and stretch_fac (because the cell size on -# tile 6 depends on these two parameters) but also on refine_ratio. +# depends not only on RES and STRETCH_FAC (because the cell size on +# tile 6 depends on these two parameters) but also on REFINE_RATIO. # Note that as on the tiles of the global grid, the cell size on the # regional grid is not uniform but varies as we move across the grid. # @@ -399,29 +399,29 @@ GRID_GEN_METHOD="JPgrid" # on each tile of the global grid. Must be "48", "96", "192", "384", # "768", "1152", or "3072" # -# lon_ctr_T6: +# LON_CTR_T6: # Longitude of the center of tile 6 (in degrees). # -# lat_ctr_T6: +# LAT_CTR_T6: # Latitude of the center of tile 6 (in degrees). # -# stretch_fac: +# STRETCH_FAC: # Stretching factor used in the Schmidt transformation applied to the # cubed sphere grid. # -# istart_rgnl_T6: +# ISTART_RGNL_T6: # i-index on tile 6 at which the regional grid (tile 7) starts. # -# iend_rgnl_T6: +# IEND_RGNL_T6: # i-index on tile 6 at which the regional grid (tile 7) ends. # -# jstart_rgnl_T6: +# JSTART_RGNL_T6: # j-index on tile 6 at which the regional grid (tile 7) starts. # -# jend_rgnl_T6: +# JEND_RGNL_T6: # j-index on tile 6 at which the regional grid (tile 7) ends. # -# refine_ratio: +# REFINE_RATIO: # Cell refinement ratio for the regional grid, i.e. the number of cells # in either the x or y direction on the regional grid (tile 7) that abut # one cell on its parent tile (tile 6). @@ -431,14 +431,14 @@ GRID_GEN_METHOD="JPgrid" if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then RES="384" - lon_ctr_T6=-97.5 - lat_ctr_T6=35.5 - stretch_fac=1.5 - istart_rgnl_T6=10 - iend_rgnl_T6=374 - jstart_rgnl_T6=10 - jend_rgnl_T6=374 - refine_ratio=3 + LON_CTR_T6=-97.5 + LAT_CTR_T6=35.5 + STRETCH_FAC=1.5 + ISTART_RGNL_T6=10 + IEND_RGNL_T6=374 + JSTART_RGNL_T6=10 + JEND_RGNL_T6=374 + REFINE_RATIO=3 # #----------------------------------------------------------------------- # @@ -446,53 +446,53 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # without a global parent (i.e. for GRID_GEN_METHOD set to "JPgrid"). # These are: # -# lon_rgnl_ctr: +# LON_RGNL_CTR: # The longitude of the center of the grid (in degrees). # -# lat_rgnl_ctr: +# LAT_RGNL_CTR: # The latitude of the center of the grid (in degrees). # -# delx: +# DELX: # The cell size in the zonal direction of the regional grid (in meters). # -# dely: +# DELY: # The cell size in the meridional direction of the regional grid (in me- # ters). # -# nx_T7: +# NX_T7: # The number of cells in the zonal direction on the regional grid. # -# ny_T7: +# NY_T7: # The number of cells in the meridional direction on the regional grid. # -# nhw_T7: +# NHW_T7: # The width of the wide halo (in units of number of cells) to create # around the regional grid. A grid with a halo of this width will first # be created and stored in a grid specification file. This grid will # then be shaved down to obtain grids with 3-cell-wide and 4-cell-wide # halos. # -# a_grid_param: -# The "a" parameter used in the Jim Purser map projection/grid genera- -# tion method. +# ALPHA_JPGRID_PARAM: +# The alpha parameter used in the Jim Purser map projection/grid gene- +# ration method. # -# k_grid_param: -# The "k" parameter used in the Jim Purser map projection/grid genera- -# tion method. +# KAPPA_JPGRID_PARAM: +# The kappa parameter used in the Jim Purser map projection/grid gene- +# ration method. # #----------------------------------------------------------------------- # elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-97.5 - lat_rgnl_ctr=35.5 - delx="3000.0" - dely="3000.0" - nx_T7=1000 - ny_T7=1000 - nhw_T7=6 - a_grid_param="0.21423" - k_grid_param="-0.23209" + LON_RGNL_CTR=-97.5 + LAT_RGNL_CTR=35.5 + DELX="3000.0" + DELY="3000.0" + NX_T7=1000 + NY_T7=1000 + NHW_T7=6 + ALPHA_JPGRID_PARAM="0.21423" + KAPPA_JPGRID_PARAM="-0.23209" fi # @@ -523,14 +523,14 @@ PREDEF_GRID_NAME="" # #----------------------------------------------------------------------- # -# Set the model integraton time step dt_atmos. This is the time step +# Set the model integraton time step DT_ATMOS. This is the time step # for the largest atmosphere model loop. It corresponds to the frequen- # cy with which the top level routine in the dynamics is called as well # as the frequency with which the physics is called. # #----------------------------------------------------------------------- # -dt_atmos=18 #Preliminary values: 18 for 3-km runs, 90 for 13-km runs +DT_ATMOS=18 #Preliminary values: 18 for 3-km runs, 90 for 13-km runs # #----------------------------------------------------------------------- # @@ -579,19 +579,19 @@ VERBOSE="TRUE" # #----------------------------------------------------------------------- # -layout_x="20" -layout_y="20" +LAYOUT_X="20" +LAYOUT_Y="20" # #----------------------------------------------------------------------- # -# Set the blocksize to use. This is the amount of data that is passed +# Set the BLOCKSIZE to use. This is the amount of data that is passed # into the cache at a time. The number of vertical columns per MPI task -# needs to be divisible by the blocksize; otherwise, unexpected results -# may occur. +# needs to be divisible by BLOCKSIZE; otherwise, unexpected results may +# occur. # #----------------------------------------------------------------------- # -blocksize="24" +BLOCKSIZE="24" # #----------------------------------------------------------------------- # @@ -600,14 +600,14 @@ blocksize="24" # QUILTING: # Flag for whether or not to use the write component for output. # -# write_groups: +# WRTCMP_write_groups: # The number of write groups (i.e. groups of MPI tasks) to use in the # write component. # -# write_tasks_per_group: +# WRTCMP_write_tasks_per_group: # The number of MPI tasks to allocate for each write group. # -# print_esmf: +# PRINT_ESMF: # Flag for whether or not to output extra (debugging) information from # ESMF routines. Must be ".true." or ".false.". Note that the write # component uses ESMF library routines to interpolate from the native @@ -618,7 +618,7 @@ blocksize="24" #----------------------------------------------------------------------- # QUILTING="TRUE" -print_esmf=".false." +PRINT_ESMF=".false." WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="20" diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 89cb8648f..64b8f7870 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -106,7 +106,7 @@ cp_vrfy ${TEMPLATE_XML_FP} ${WFLOW_XML_FP} # #----------------------------------------------------------------------- # -PROC_RUN_FV3="${NUM_NODES}:ppn=${ncores_per_node}" +PROC_RUN_FV3="${NUM_NODES}:ppn=${NCORES_PER_NODE}" FHR=( $( seq 0 1 ${FCST_LEN_HRS} ) ) i=0 @@ -513,7 +513,6 @@ a nonzero status." Copying the modulefile required for running the CCPP-enabled version of the FV3SAR under NEMS to the experiment directory..." cp_vrfy ${NEMSfv3gfs_DIR}/NEMS/src/conf/modules.nems $EXPTDIR/modules.fv3 - # #----------------------------------------------------------------------- # @@ -622,18 +621,18 @@ print_info_msg "$VERBOSE" " Setting parameters in FV3 namelist file (FV3_NML_FP): FV3_NML_FP = \"${FV3_NML_FP}\"" # -# Set npx_T7 and npy_T7, which are just nx_T7 plus 1 and ny_T7 plus 1, +# Set npx_T7 and npy_T7, which are just NX_T7 plus 1 and NY_T7 plus 1, # respectively. These need to be set in the FV3SAR Fortran namelist # file. They represent the number of cell vertices in the x and y di- # rections on the regional grid (tile 7). # -npx_T7=$(($nx_T7+1)) -npy_T7=$(($ny_T7+1)) +npx_T7=$(( NX_T7+1 )) +npy_T7=$(( NY_T7+1 )) # # Set parameters. # -set_file_param "${FV3_NML_FP}" "blocksize" "$blocksize" -set_file_param "${FV3_NML_FP}" "layout" "${layout_x},${layout_y}" +set_file_param "${FV3_NML_FP}" "blocksize" "$BLOCKSIZE" +set_file_param "${FV3_NML_FP}" "layout" "${LAYOUT_X},${LAYOUT_Y}" set_file_param "${FV3_NML_FP}" "npx" "${npx_T7}" set_file_param "${FV3_NML_FP}" "npy" "${npy_T7}" @@ -645,13 +644,13 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # are not necessarily the same [although assuming there is only one re- # gional domain within tile 6, i.e. assuming there is no tile 8, 9, etc, # there is no reason not to center tile 7 with respect to tile 6]. - set_file_param "${FV3_NML_FP}" "target_lon" "${lon_ctr_T6}" - set_file_param "${FV3_NML_FP}" "target_lat" "${lat_ctr_T6}" + set_file_param "${FV3_NML_FP}" "target_lon" "${LON_CTR_T6}" + set_file_param "${FV3_NML_FP}" "target_lat" "${LAT_CTR_T6}" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - set_file_param "${FV3_NML_FP}" "target_lon" "${lon_rgnl_ctr}" - set_file_param "${FV3_NML_FP}" "target_lat" "${lat_rgnl_ctr}" + set_file_param "${FV3_NML_FP}" "target_lon" "${LON_RGNL_CTR}" + set_file_param "${FV3_NML_FP}" "target_lat" "${LAT_RGNL_CTR}" fi -set_file_param "${FV3_NML_FP}" "stretch_fac" "${stretch_fac}" +set_file_param "${FV3_NML_FP}" "stretch_fac" "${STRETCH_FAC}" set_file_param "${FV3_NML_FP}" "bc_update_interval" "${LBC_UPDATE_INTVL_HRS}" # # For GSD physics, set the parameter lsoil according to the external mo- diff --git a/ush/link_fix.sh b/ush/link_fix.sh index ffc97f03e..710b142cf 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -100,13 +100,13 @@ Creating links in the FIXsar directory to the grid files..." # fns_grid=( \ "C*_mosaic.nc" \ -"C*_grid.tile${TILE_RGNL}.halo${nh3_T7}.nc" \ -"C*_grid.tile${TILE_RGNL}.halo${nh4_T7}.nc" \ +"C*_grid.tile${TILE_RGNL}.halo${NH3_T7}.nc" \ +"C*_grid.tile${TILE_RGNL}.halo${NH4_T7}.nc" \ ) fns_orog=( \ -"C*_oro_data.tile${TILE_RGNL}.halo${nh0_T7}.nc" \ -"C*_oro_data.tile${TILE_RGNL}.halo${nh4_T7}.nc" \ +"C*_oro_data.tile${TILE_RGNL}.halo${NH0_T7}.nc" \ +"C*_oro_data.tile${TILE_RGNL}.halo${NH4_T7}.nc" \ ) sfc_climo_fields=( \ @@ -123,8 +123,8 @@ num_fields=${#sfc_climo_fields[@]} fns_sfc_climo=() for (( i=0; i<${num_fields}; i++ )); do ii=$((2*i)) - fns_sfc_climo[$ii]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${nh0_T7}.nc" - fns_sfc_climo[$ii+1]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${nh4_T7}.nc" + fns_sfc_climo[$ii]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH0_T7}.nc" + fns_sfc_climo[$ii+1]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH4_T7}.nc" done # #----------------------------------------------------------------------- @@ -278,7 +278,7 @@ done # if [ "${file_group}" = "grid" ]; then # Create link to grid file needed by the make_ic and make_lbc tasks. - filename="${cres}_grid.tile${TILE_RGNL}.halo${nh4_T7}.nc" + filename="${cres}_grid.tile${TILE_RGNL}.halo${NH4_T7}.nc" ln_vrfy -sf ${relative_or_null} $filename ${cres}_grid.tile${TILE_RGNL}.nc fi @@ -286,7 +286,7 @@ fi if [ "${file_group}" = "sfc_climo" ]; then tmp=( "${sfc_climo_fields[@]/#/${cres}.}" ) - fns_sfc_climo_with_halo=( "${tmp[@]/%/.tile${TILE_RGNL}.halo${nh4_T7}.nc}" ) + fns_sfc_climo_with_halo=( "${tmp[@]/%/.tile${TILE_RGNL}.halo${NH4_T7}.nc}" ) fns_sfc_climo_no_halo=( "${tmp[@]/%/.tile${TILE_RGNL}.nc}" ) cd_vrfy $FIXsar diff --git a/ush/set_gridparams_GFDLgrid.sh b/ush/set_gridparams_GFDLgrid.sh index e38b70297..6932e8c4e 100644 --- a/ush/set_gridparams_GFDLgrid.sh +++ b/ush/set_gridparams_GFDLgrid.sh @@ -50,18 +50,18 @@ local func_name="${FUNCNAME[0]}" # puts the index limits of the regional grid on the tile 6 grid, not its # supergrid. These are given by # -# istart_rgnl_T6 -# iend_rgnl_T6 -# jstart_rgnl_T6 -# jend_rgnl_T6 +# ISTART_RGNL_T6 +# IEND_RGNL_T6 +# JSTART_RGNL_T6 +# JEND_RGNL_T6 # # We can obtain the former from the latter by recalling that the super- # grid has twice the resolution of the original grid. Thus, # -# istart_rgnl_T6SG = 2*istart_rgnl_T6 - 1 -# iend_rgnl_T6SG = 2*iend_rgnl_T6 -# jstart_rgnl_T6SG = 2*jstart_rgnl_T6 - 1 -# jend_rgnl_T6SG = 2*jend_rgnl_T6 +# istart_rgnl_T6SG = 2*ISTART_RGNL_T6 - 1 +# iend_rgnl_T6SG = 2*IEND_RGNL_T6 +# jstart_rgnl_T6SG = 2*JSTART_RGNL_T6 - 1 +# jend_rgnl_T6SG = 2*JEND_RGNL_T6 # # These are obtained assuming that grid cells on tile 6 must either be # completely within the regional domain or completely outside of it, @@ -73,10 +73,10 @@ local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # -istart_rgnl_T6SG=$(( 2*$istart_rgnl_T6 - 1 )) -iend_rgnl_T6SG=$(( 2*$iend_rgnl_T6 )) -jstart_rgnl_T6SG=$(( 2*$jstart_rgnl_T6 - 1 )) -jend_rgnl_T6SG=$(( 2*$jend_rgnl_T6 )) +istart_rgnl_T6SG=$(( 2*ISTART_RGNL_T6 - 1 )) +iend_rgnl_T6SG=$(( 2*IEND_RGNL_T6 )) +jstart_rgnl_T6SG=$(( 2*JSTART_RGNL_T6 - 1 )) +jend_rgnl_T6SG=$(( 2*JEND_RGNL_T6 )) # #----------------------------------------------------------------------- # @@ -99,27 +99,27 @@ jend_rgnl_T6SG=$(( 2*$jend_rgnl_T6 )) # the model needs later on by "shaving" layers of cells from this wide- # halo grid. Next, we describe how to calculate the above indices. # -# Let nhw_T7 denote the width of the "wide" halo in units of number of +# Let NHW_T7 denote the width of the "wide" halo in units of number of # grid cells on the regional grid (i.e. tile 7) that we'd like to have # along all four edges of the regional domain (left, right, bottom, and # top). To obtain the corresponding halo width in units of number of # cells on the tile 6 grid -- which we denote by nhw_T6 -- we simply di- -# vide nhw_T7 by the refinement ratio, i.e. +# vide NHW_T7 by the refinement ratio, i.e. # -# nhw_T6 = nhw_T7/refine_ratio +# nhw_T6 = NHW_T7/REFINE_RATIO # # The corresponding halo width on the tile 6 supergrid is then given by # # nhw_T6SG = 2*nhw_T6 -# = 2*nhw_T7/refine_ratio +# = 2*NHW_T7/REFINE_RATIO # # Note that nhw_T6SG must be an integer, but the expression for it de- # rived above may not yield an integer. To ensure that the halo has a -# width of at least nhw_T7 cells on the regional grid, we round up the +# width of at least NHW_T7 cells on the regional grid, we round up the # result of the expression above for nhw_T6SG, i.e. we redefine nhw_T6SG # to be # -# nhw_T6SG = ceil(2*nhw_T7/refine_ratio) +# nhw_T6SG = ceil(2*NHW_T7/REFINE_RATIO) # # where ceil(...) is the ceiling function, i.e. it rounds its floating # point argument up to the next larger integer. Since in bash division @@ -128,15 +128,15 @@ jend_rgnl_T6SG=$(( 2*$jend_rgnl_T6 )) # adding the denominator (of the argument of ceil(...) above) minus 1 to # the original numerator, i.e. by redefining nhw_T6SG to be # -# nhw_T6SG = (2*nhw_T7 + refine_ratio - 1)/refine_ratio +# nhw_T6SG = (2*NHW_T7 + REFINE_RATIO - 1)/REFINE_RATIO # # This trick works when dividing one positive integer by another. # # In order to calculate nhw_T6G using the above expression, we must -# first specify nhw_T7. Next, we specify an initial value for it by +# first specify NHW_T7. Next, we specify an initial value for it by # setting it to one more than the largest-width halo that the model ac- -# tually needs, which is nh4_T7. We then calculate nhw_T6SG using the -# above expression. Note that these values of nhw_T7 and nhw_T6SG will +# tually needs, which is NH4_T7. We then calculate nhw_T6SG using the +# above expression. Note that these values of NHW_T7 and nhw_T6SG will # likely not be their final values; their final values will be calcula- # ted later below after calculating the starting and ending indices of # the regional grid with wide halo on the tile 6 supergrid and then ad- @@ -144,8 +144,8 @@ jend_rgnl_T6SG=$(( 2*$jend_rgnl_T6 )) # #----------------------------------------------------------------------- # -nhw_T7=$(( $nh4_T7 + 1 )) -nhw_T6SG=$(( (2*nhw_T7 + refine_ratio - 1)/refine_ratio )) +NHW_T7=$(( NH4_T7+1 )) +nhw_T6SG=$(( (2*NHW_T7 + REFINE_RATIO - 1)/REFINE_RATIO )) # #----------------------------------------------------------------------- # @@ -164,10 +164,10 @@ nhw_T6SG=$(( (2*nhw_T7 + refine_ratio - 1)/refine_ratio )) # #----------------------------------------------------------------------- # -istart_rgnl_wide_halo_T6SG=$(( $istart_rgnl_T6SG - $nhw_T6SG )) -iend_rgnl_wide_halo_T6SG=$(( $iend_rgnl_T6SG + $nhw_T6SG )) -jstart_rgnl_wide_halo_T6SG=$(( $jstart_rgnl_T6SG - $nhw_T6SG )) -jend_rgnl_wide_halo_T6SG=$(( $jend_rgnl_T6SG + $nhw_T6SG )) +istart_rgnl_wide_halo_T6SG=$(( istart_rgnl_T6SG - nhw_T6SG )) +iend_rgnl_wide_halo_T6SG=$(( iend_rgnl_T6SG + nhw_T6SG )) +jstart_rgnl_wide_halo_T6SG=$(( jstart_rgnl_T6SG - nhw_T6SG )) +jend_rgnl_wide_halo_T6SG=$(( jend_rgnl_T6SG + nhw_T6SG )) # #----------------------------------------------------------------------- # @@ -178,9 +178,9 @@ jend_rgnl_wide_halo_T6SG=$(( $jend_rgnl_T6SG + $nhw_T6SG )) # starting indices on the tile 6 supergrid of the grid with wide halo # must be odd while the ending indices must be even. Thus, below, we # subtract 1 from the starting indices if they are even (which ensures -# that there will be at least nhw_T7 halo cells along the left and bot- +# that there will be at least NHW_T7 halo cells along the left and bot- # tom boundaries), and we add 1 to the ending indices if they are odd -# (which ensures that there will be at least nhw_T7 halo cells along the +# (which ensures that there will be at least NHW_T7 halo cells along the # right and top boundaries). # #----------------------------------------------------------------------- @@ -227,34 +227,34 @@ print_info_msg "$VERBOSE" " Original values of the halo width on the tile 6 supergrid and on the tile 7 grid are: nhw_T6SG = $nhw_T6SG - nhw_T7 = $nhw_T7" + NHW_T7 = ${NHW_T7}" -nhw_T6SG=$(( $istart_rgnl_T6SG - $istart_rgnl_wide_halo_T6SG )) -nhw_T6=$(( $nhw_T6SG/2 )) -nhw_T7=$(( $nhw_T6*$refine_ratio )) +nhw_T6SG=$(( istart_rgnl_T6SG - istart_rgnl_wide_halo_T6SG )) +nhw_T6=$(( nhw_T6SG/2 )) +NHW_T7=$(( nhw_T6*REFINE_RATIO )) print_info_msg "$VERBOSE" " Values of the halo width on the tile 6 supergrid and on the tile 7 grid AFTER adjustments are: nhw_T6SG = $nhw_T6SG - nhw_T7 = $nhw_T7" + NHW_T7 = ${NHW_T7}" # #----------------------------------------------------------------------- # # Calculate the number of cells that the regional domain (without halo) # has in each of the two horizontal directions (say x and y). We denote -# these by nx_T7 and ny_T7, respectively. These will be needed in the +# these by NX_T7 and NY_T7, respectively. These will be needed in the # "shave" steps later below. # #----------------------------------------------------------------------- # -nx_rgnl_T6SG=$(( $iend_rgnl_T6SG - $istart_rgnl_T6SG + 1 )) -nx_rgnl_T6=$(( $nx_rgnl_T6SG/2 )) -nx_T7=$(( $nx_rgnl_T6*$refine_ratio )) +nx_rgnl_T6SG=$(( iend_rgnl_T6SG - istart_rgnl_T6SG + 1 )) +nx_rgnl_T6=$(( nx_rgnl_T6SG/2 )) +NX_T7=$(( nx_rgnl_T6*REFINE_RATIO )) -ny_rgnl_T6SG=$(( $jend_rgnl_T6SG - $jstart_rgnl_T6SG + 1 )) -ny_rgnl_T6=$(( $ny_rgnl_T6SG/2 )) -ny_T7=$(( $ny_rgnl_T6*$refine_ratio )) +ny_rgnl_T6SG=$(( jend_rgnl_T6SG - jstart_rgnl_T6SG + 1 )) +ny_rgnl_T6=$(( ny_rgnl_T6SG/2 )) +NY_T7=$(( ny_rgnl_T6*REFINE_RATIO )) # # The following are set only for informational purposes. # @@ -263,8 +263,8 @@ ny_T6=$RES nx_T6SG=$(( $nx_T6*2 )) ny_T6SG=$(( $ny_T6*2 )) -prime_factors_nx_T7=$( factor $nx_T7 | sed -r -e 's/^[0-9]+: (.*)/\1/' ) -prime_factors_ny_T7=$( factor $ny_T7 | sed -r -e 's/^[0-9]+: (.*)/\1/' ) +prime_factors_nx_T7=$( factor ${NX_T7} | sed -r -e 's/^[0-9]+: (.*)/\1/' ) +prime_factors_ny_T7=$( factor ${NY_T7} | sed -r -e 's/^[0-9]+: (.*)/\1/' ) print_info_msg "$VERBOSE" " The number of cells in the two horizontal directions (x and y) on the @@ -284,10 +284,10 @@ are: The starting and ending i and j indices on the tile 6 grid used to generate this regional grid are: - istart_rgnl_T6 = $istart_rgnl_T6 - iend_rgnl_T6 = $iend_rgnl_T6 - jstart_rgnl_T6 = $jstart_rgnl_T6 - jend_rgnl_T6 = $jend_rgnl_T6 + ISTART_RGNL_T6 = ${ISTART_RGNL_T6} + IEND_RGNL_T6 = ${IEND_RGNL_T6} + JSTART_RGNL_T6 = ${JSTART_RGNL_T6} + JEND_RGNL_T6 = ${JEND_RGNL_T6} The corresponding starting and ending i and j indices on the tile 6 supergrid are: @@ -298,15 +298,15 @@ supergrid are: The refinement ratio (ratio of the number of cells in tile 7 that abut a single cell in tile 6) is: - refine_ratio = $refine_ratio + REFINE_RATIO = ${REFINE_RATIO} The number of cells in the two horizontal directions on the regional tile's/domain's (tile 7) grid WITHOUT A HALO are: - nx_T7 = $nx_T7 - ny_T7 = $ny_T7 + NX_T7 = ${NX_T7} + NY_T7 = ${NY_T7} -The prime factors of nx_T7 and ny_T7 are (useful for determining an MPI -task layout, i.e. layout_x and layout_y): +The prime factors of NX_T7 and NY_T7 are (useful for determining an MPI +task layout, i.e. LAYOUT_X and LAYOUT_Y): prime_factors_nx_T7: $prime_factors_nx_T7 prime_factors_ny_T7: $prime_factors_ny_T7" # @@ -314,26 +314,26 @@ task layout, i.e. layout_x and layout_y): # # For informational purposes, calculate the number of cells in each di- # rection on the regional grid that includes the wide halo (of width -# nhw_T7 cells). We denote these by nx_wide_halo_T7 and ny_wide_halo_- +# NHW_T7 cells). We denote these by nx_wide_halo_T7 and ny_wide_halo_- # T7, respectively. # #----------------------------------------------------------------------- # -nx_wide_halo_T6SG=$(( $iend_rgnl_wide_halo_T6SG - $istart_rgnl_wide_halo_T6SG + 1 )) -nx_wide_halo_T6=$(( $nx_wide_halo_T6SG/2 )) -nx_wide_halo_T7=$(( $nx_wide_halo_T6*$refine_ratio )) +nx_wide_halo_T6SG=$(( iend_rgnl_wide_halo_T6SG - istart_rgnl_wide_halo_T6SG + 1 )) +nx_wide_halo_T6=$(( nx_wide_halo_T6SG/2 )) +nx_wide_halo_T7=$(( nx_wide_halo_T6*REFINE_RATIO )) -ny_wide_halo_T6SG=$(( $jend_rgnl_wide_halo_T6SG - $jstart_rgnl_wide_halo_T6SG + 1 )) -ny_wide_halo_T6=$(( $ny_wide_halo_T6SG/2 )) -ny_wide_halo_T7=$(( $ny_wide_halo_T6*$refine_ratio )) +ny_wide_halo_T6SG=$(( jend_rgnl_wide_halo_T6SG - jstart_rgnl_wide_halo_T6SG + 1 )) +ny_wide_halo_T6=$(( ny_wide_halo_T6SG/2 )) +ny_wide_halo_T7=$(( ny_wide_halo_T6*REFINE_RATIO )) print_info_msg "$VERBOSE" " -nx_wide_halo_T7 = $nx_T7 \ +nx_wide_halo_T7 = ${NX_T7} \ (istart_rgnl_wide_halo_T6SG = $istart_rgnl_wide_halo_T6SG, \ iend_rgnl_wide_halo_T6SG = $iend_rgnl_wide_halo_T6SG)" print_info_msg "$VERBOSE" " -ny_wide_halo_T7 = $ny_T7 \ +ny_wide_halo_T7 = ${NY_T7} \ (jstart_rgnl_wide_halo_T6SG = $jstart_rgnl_wide_halo_T6SG, \ jend_rgnl_wide_halo_T6SG = $jend_rgnl_wide_halo_T6SG)" # diff --git a/ush/set_gridparams_JPgrid.sh b/ush/set_gridparams_JPgrid.sh index 21fcb9dd6..e45d4d82d 100644 --- a/ush/set_gridparams_JPgrid.sh +++ b/ush/set_gridparams_JPgrid.sh @@ -39,8 +39,8 @@ local func_name="${FUNCNAME[0]}" . ${USHDIR}/constants.sh echo echo "pi_geom = $pi_geom" -echo "degs_per_radian = $degs_per_radian" -echo "radius_Earth = $radius_Earth" +echo "degs_per_radian = ${degs_per_radian}" +echo "radius_Earth = ${radius_Earth}" # #----------------------------------------------------------------------- # @@ -48,22 +48,22 @@ echo "radius_Earth = $radius_Earth" # #----------------------------------------------------------------------- # -del_angle_x_SG=$( bc -l <<< "($delx/(2.0*$radius_Earth))*$degs_per_radian" ) -del_angle_x_SG=$( printf "%0.10f\n" $del_angle_x_SG ) +DEL_ANGLE_X_SG=$( bc -l <<< "($DELX/(2.0*${radius_Earth}))*${degs_per_radian}" ) +DEL_ANGLE_X_SG=$( printf "%0.10f\n" ${DEL_ANGLE_X_SG} ) -del_angle_y_SG=$( bc -l <<< "($dely/(2.0*$radius_Earth))*$degs_per_radian" ) -del_angle_y_SG=$( printf "%0.10f\n" $del_angle_y_SG ) +DEL_ANGLE_Y_SG=$( bc -l <<< "($DELY/(2.0*${radius_Earth}))*${degs_per_radian}" ) +DEL_ANGLE_Y_SG=$( printf "%0.10f\n" ${DEL_ANGLE_Y_SG} ) -echo "del_angle_x_SG = $del_angle_x_SG" -echo "del_angle_y_SG = $del_angle_y_SG" +echo "DEL_ANGLE_X_SG = ${DEL_ANGLE_X_SG}" +echo "DEL_ANGLE_Y_SG = ${DEL_ANGLE_Y_SG}" -mns_nx_T7_pls_wide_halo=$( bc -l <<< "-($nx_T7 + 2*$nhw_T7)" ) -mns_nx_T7_pls_wide_halo=$( printf "%.0f\n" $mns_nx_T7_pls_wide_halo ) -echo "mns_nx_T7_pls_wide_halo = $mns_nx_T7_pls_wide_halo" +MNS_NX_T7_PLS_WIDE_HALO=$( bc -l <<< "-(${NX_T7} + 2*${NHW_T7})" ) +MNS_NX_T7_PLS_WIDE_HALO=$( printf "%.0f\n" ${MNS_NX_T7_PLS_WIDE_HALO} ) +echo "MNS_NX_T7_PLS_WIDE_HALO = ${MNS_NX_T7_PLS_WIDE_HALO}" -mns_ny_T7_pls_wide_halo=$( bc -l <<< "-($ny_T7 + 2*$nhw_T7)" ) -mns_ny_T7_pls_wide_halo=$( printf "%.0f\n" $mns_ny_T7_pls_wide_halo ) -echo "mns_ny_T7_pls_wide_halo = $mns_ny_T7_pls_wide_halo" +MNS_NY_T7_PLS_WIDE_HALO=$( bc -l <<< "-(${NY_T7} + 2*${NHW_T7})" ) +MNS_NY_T7_PLS_WIDE_HALO=$( printf "%.0f\n" ${MNS_NY_T7_PLS_WIDE_HALO} ) +echo "MNS_NY_T7_PLS_WIDE_HALO = ${MNS_NY_T7_PLS_WIDE_HALO}" } # diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index 6a2c39a39..514794a43 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -93,37 +93,37 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-62.0 - lat_rgnl_ctr=22.0 + LON_RGNL_CTR=-62.0 + LAT_RGNL_CTR=22.0 - delx="3000.0" - dely="3000.0" + DELX="3000.0" + DELY="3000.0" - nx_T7=2880 - ny_T7=1920 + NX_T7=2880 + NY_T7=1920 - nhw_T7=6 + NHW_T7=6 - dt_atmos="100" + DT_ATMOS="100" - layout_x="32" - layout_y="24" - blocksize="32" + LAYOUT_X="32" + LAYOUT_Y="24" + BLOCKSIZE="32" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="32" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" - WRTCMP_stdlat1="${lat_rgnl_ctr}" - WRTCMP_stdlat2="${lat_rgnl_ctr}" + WRTCMP_cen_lon="${LON_RGNL_CTR}" + WRTCMP_cen_lat="${LAT_RGNL_CTR}" + WRTCMP_stdlat1="${LAT_RGNL_CTR}" + WRTCMP_stdlat2="${LAT_RGNL_CTR}" WRTCMP_nx="2937" WRTCMP_ny="1788" WRTCMP_lon_lwr_left="-97.83959" WRTCMP_lat_lwr_left="-5.67929305" - WRTCMP_dx="$delx" - WRTCMP_dy="$dely" + WRTCMP_dx="$DELX" + WRTCMP_dy="$DELY" fi fi @@ -147,37 +147,37 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-62.0 - lat_rgnl_ctr=22.0 + LON_RGNL_CTR=-62.0 + LAT_RGNL_CTR=22.0 - delx="13000.0" - dely="13000.0" + DELX="13000.0" + DELY="13000.0" - nx_T7=665 - ny_T7=444 + NX_T7=665 + NY_T7=444 - nhw_T7=6 + NHW_T7=6 - dt_atmos="180" + DT_ATMOS="180" - layout_x="19" - layout_y="12" - blocksize="35" + LAYOUT_X="19" + LAYOUT_Y="12" + BLOCKSIZE="35" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="12" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" - WRTCMP_stdlat1="${lat_rgnl_ctr}" - WRTCMP_stdlat2="${lat_rgnl_ctr}" + WRTCMP_cen_lon="${LON_RGNL_CTR}" + WRTCMP_cen_lat="${LAT_RGNL_CTR}" + WRTCMP_stdlat1="${LAT_RGNL_CTR}" + WRTCMP_stdlat2="${LAT_RGNL_CTR}" WRTCMP_nx="658" WRTCMP_ny="412" WRTCMP_lon_lwr_left="-98.0" WRTCMP_lat_lwr_left="-5.33" - WRTCMP_dx="$delx" - WRTCMP_dy="$dely" + WRTCMP_dx="$DELX" + WRTCMP_dy="$DELY" fi fi @@ -201,37 +201,37 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-62.0 - lat_rgnl_ctr=22.0 + LON_RGNL_CTR=-62.0 + LAT_RGNL_CTR=22.0 - delx="25000.0" - dely="25000.0" + DELX="25000.0" + DELY="25000.0" - nx_T7=345 - ny_T7=230 + NX_T7=345 + NY_T7=230 - nhw_T7=6 + NHW_T7=6 - dt_atmos="300" + DT_ATMOS="300" - layout_x="5" - layout_y="5" - blocksize="6" + LAYOUT_X="5" + LAYOUT_Y="5" + BLOCKSIZE="6" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="10" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" - WRTCMP_stdlat1="${lat_rgnl_ctr}" - WRTCMP_stdlat2="${lat_rgnl_ctr}" + WRTCMP_cen_lon="${LON_RGNL_CTR}" + WRTCMP_cen_lat="${LAT_RGNL_CTR}" + WRTCMP_stdlat1="${LAT_RGNL_CTR}" + WRTCMP_stdlat2="${LAT_RGNL_CTR}" WRTCMP_nx="337" WRTCMP_ny="210" WRTCMP_lon_lwr_left="-98.0" WRTCMP_lat_lwr_left="-4.5" - WRTCMP_dx="$delx" - WRTCMP_dy="$dely" + WRTCMP_dx="$DELX" + WRTCMP_dy="$DELY" fi fi @@ -247,36 +247,36 @@ predefined domain: if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - lon_ctr_T6=-106.0 - lat_ctr_T6=54.0 - stretch_fac=0.63 + LON_CTR_T6=-106.0 + LAT_CTR_T6=54.0 + STRETCH_FAC=0.63 RES="384" - refine_ratio=3 + REFINE_RATIO=3 num_margin_cells_T6_left=10 - istart_rgnl_T6=$(( $num_margin_cells_T6_left + 1 )) + ISTART_RGNL_T6=$(( num_margin_cells_T6_left + 1 )) num_margin_cells_T6_right=10 - iend_rgnl_T6=$(( $RES - $num_margin_cells_T6_right )) + IEND_RGNL_T6=$(( RES - num_margin_cells_T6_right )) num_margin_cells_T6_bottom=10 - jstart_rgnl_T6=$(( $num_margin_cells_T6_bottom + 1 )) + JSTART_RGNL_T6=$(( num_margin_cells_T6_bottom + 1 )) num_margin_cells_T6_top=10 - jend_rgnl_T6=$(( $RES - $num_margin_cells_T6_top )) + JEND_RGNL_T6=$(( RES - num_margin_cells_T6_top )) - dt_atmos="90" + DT_ATMOS="90" - layout_x="14" - layout_y="14" - blocksize="26" + LAYOUT_X="14" + LAYOUT_Y="14" + BLOCKSIZE="26" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="14" WRTCMP_output_grid="rotated_latlon" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" + WRTCMP_cen_lon="${LON_RGNL_CTR}" + WRTCMP_cen_lat="${LAT_RGNL_CTR}" WRTCMP_lon_lwr_left="-57.9926" WRTCMP_lat_lwr_left="-50.74344" WRTCMP_lon_upr_rght="57.99249" @@ -287,29 +287,29 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-106.0 - lat_rgnl_ctr=54.0 + LON_RGNL_CTR=-106.0 + LAT_RGNL_CTR=54.0 - delx="13000.0" - dely="13000.0" + DELX="13000.0" + DELY="13000.0" - nx_T7=960 - ny_T7=960 + NX_T7=960 + NY_T7=960 - nhw_T7=6 + NHW_T7=6 - dt_atmos="90" + DT_ATMOS="90" - layout_x="16" - layout_y="16" - blocksize="30" + LAYOUT_X="16" + LAYOUT_Y="16" + BLOCKSIZE="30" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="16" WRTCMP_output_grid="rotated_latlon" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" + WRTCMP_cen_lon="${LON_RGNL_CTR}" + WRTCMP_cen_lat="${LAT_RGNL_CTR}" WRTCMP_lon_lwr_left="-57.9926" WRTCMP_lat_lwr_left="-50.74344" WRTCMP_lon_upr_rght="57.99249" @@ -339,37 +339,37 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-97.5 - lat_rgnl_ctr=38.5 + LON_RGNL_CTR=-97.5 + LAT_RGNL_CTR=38.5 - delx="25000.0" - dely="25000.0" + DELX="25000.0" + DELY="25000.0" - nx_T7=200 - ny_T7=110 + NX_T7=200 + NY_T7=110 - nhw_T7=6 + NHW_T7=6 - dt_atmos="300" + DT_ATMOS="300" - layout_x="2" - layout_y="2" - blocksize="2" + LAYOUT_X="2" + LAYOUT_Y="2" + BLOCKSIZE="2" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="2" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" - WRTCMP_stdlat1="${lat_rgnl_ctr}" - WRTCMP_stdlat2="${lat_rgnl_ctr}" + WRTCMP_cen_lon="${LON_RGNL_CTR}" + WRTCMP_cen_lat="${LAT_RGNL_CTR}" + WRTCMP_stdlat1="${LAT_RGNL_CTR}" + WRTCMP_stdlat2="${LAT_RGNL_CTR}" WRTCMP_nx="191" WRTCMP_ny="97" WRTCMP_lon_lwr_left="-120.72962370" WRTCMP_lat_lwr_left="25.11648583" - WRTCMP_dx="$delx" - WRTCMP_dy="$dely" + WRTCMP_dx="$DELX" + WRTCMP_dy="$DELY" fi fi @@ -393,37 +393,37 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-97.5 - lat_rgnl_ctr=38.5 + LON_RGNL_CTR=-97.5 + LAT_RGNL_CTR=38.5 - delx="13000.0" - dely="13000.0" + DELX="13000.0" + DELY="13000.0" - nx_T7=390 - ny_T7=210 + NX_T7=390 + NY_T7=210 - nhw_T7=6 + NHW_T7=6 - dt_atmos="180" + DT_ATMOS="180" - layout_x="10" - layout_y="10" - blocksize="39" + LAYOUT_X="10" + LAYOUT_Y="10" + BLOCKSIZE="39" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="10" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" - WRTCMP_stdlat1="${lat_rgnl_ctr}" - WRTCMP_stdlat2="${lat_rgnl_ctr}" + WRTCMP_cen_lon="${LON_RGNL_CTR}" + WRTCMP_cen_lat="${LAT_RGNL_CTR}" + WRTCMP_stdlat1="${LAT_RGNL_CTR}" + WRTCMP_stdlat2="${LAT_RGNL_CTR}" WRTCMP_nx="383" WRTCMP_ny="195" WRTCMP_lon_lwr_left="-121.58647982" WRTCMP_lat_lwr_left="24.36006861" - WRTCMP_dx="$delx" - WRTCMP_dy="$dely" + WRTCMP_dx="$DELX" + WRTCMP_dy="$DELY" fi fi @@ -447,37 +447,37 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-97.5 - lat_rgnl_ctr=38.5 + LON_RGNL_CTR=-97.5 + LAT_RGNL_CTR=38.5 - delx="3000.0" - dely="3000.0" + DELX="3000.0" + DELY="3000.0" - nx_T7=1734 - ny_T7=1008 + NX_T7=1734 + NY_T7=1008 - nhw_T7=6 + NHW_T7=6 - dt_atmos="100" + DT_ATMOS="100" - layout_x="34" - layout_y="24" - blocksize="34" + LAYOUT_X="34" + LAYOUT_Y="24" + BLOCKSIZE="34" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="24" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" - WRTCMP_stdlat1="${lat_rgnl_ctr}" - WRTCMP_stdlat2="${lat_rgnl_ctr}" + WRTCMP_cen_lon="${LON_RGNL_CTR}" + WRTCMP_cen_lat="${LAT_RGNL_CTR}" + WRTCMP_stdlat1="${LAT_RGNL_CTR}" + WRTCMP_stdlat2="${LAT_RGNL_CTR}" WRTCMP_nx="1738" WRTCMP_ny="974" WRTCMP_lon_lwr_left="-122.21414225" WRTCMP_lat_lwr_left="22.41403305" - WRTCMP_dx="$delx" - WRTCMP_dy="$dely" + WRTCMP_dx="$DELX" + WRTCMP_dy="$DELY" fi fi @@ -527,44 +527,44 @@ predefined domain: #dlat=0.02 - lon_ctr_T6=-97.5 - lat_ctr_T6=38.5 - stretch_fac=1.5 + LON_CTR_T6=-97.5 + LAT_CTR_T6=38.5 + STRETCH_FAC=1.5 RES="768" - refine_ratio=3 + REFINE_RATIO=3 num_margin_cells_T6_left=61 - istart_rgnl_T6=$(( $num_margin_cells_T6_left + 1 )) + ISTART_RGNL_T6=$(( num_margin_cells_T6_left + 1 )) num_margin_cells_T6_right=67 - iend_rgnl_T6=$(( $RES - $num_margin_cells_T6_right )) + IEND_RGNL_T6=$(( RES - num_margin_cells_T6_right )) num_margin_cells_T6_bottom=165 - jstart_rgnl_T6=$(( $num_margin_cells_T6_bottom + 1 )) + JSTART_RGNL_T6=$(( num_margin_cells_T6_bottom + 1 )) num_margin_cells_T6_top=171 - jend_rgnl_T6=$(( $RES - $num_margin_cells_T6_top )) + JEND_RGNL_T6=$(( RES - num_margin_cells_T6_top )) - dt_atmos="18" + DT_ATMOS="18" - layout_x="16" - layout_y="72" + LAYOUT_X="16" + LAYOUT_Y="72" write_tasks_per_group="72" - blocksize=32 + BLOCKSIZE=32 elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-97.5 - lat_rgnl_ctr=38.5 + LON_RGNL_CTR=-97.5 + LAT_RGNL_CTR=38.5 - delx="3000.0" - dely="3000.0" + DELX="3000.0" + DELY="3000.0" - nx_T7=960 - ny_T7=960 + NX_T7=960 + NY_T7=960 - nhw_T7=6 + NHW_T7=6 fi ;; @@ -619,31 +619,31 @@ predefined domain: #dlon=0.03 #dlat=0.03 - lon_ctr_T6=-153.0 - lat_ctr_T6=61.0 - stretch_fac=1.0 # ??? + LON_CTR_T6=-153.0 + LAT_CTR_T6=61.0 + STRETCH_FAC=1.0 # ??? RES="768" - refine_ratio=3 # ??? + REFINE_RATIO=3 # ??? num_margin_cells_T6_left=61 - istart_rgnl_T6=$(( $num_margin_cells_T6_left + 1 )) + ISTART_RGNL_T6=$(( num_margin_cells_T6_left + 1 )) num_margin_cells_T6_right=67 - iend_rgnl_T6=$(( $RES - $num_margin_cells_T6_right )) + IEND_RGNL_T6=$(( RES - num_margin_cells_T6_right )) num_margin_cells_T6_bottom=165 - jstart_rgnl_T6=$(( $num_margin_cells_T6_bottom + 1 )) + JSTART_RGNL_T6=$(( num_margin_cells_T6_bottom + 1 )) num_margin_cells_T6_top=171 - jend_rgnl_T6=$(( $RES - $num_margin_cells_T6_top )) + JEND_RGNL_T6=$(( RES - num_margin_cells_T6_top )) - dt_atmos="18" + DT_ATMOS="18" - layout_x="16" - layout_y="48" + LAYOUT_X="16" + LAYOUT_Y="48" write_groups="2" write_tasks_per_group="24" - blocksize=32 + BLOCKSIZE=32 elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then diff --git a/ush/setup.sh b/ush/setup.sh index 13eb30534..9b51bd37f 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -236,7 +236,7 @@ case $MACHINE in Don't know how to set several parameters on MACHINE=\"$MACHINE\". Please specify the correct parameters for this machine in the setup script. Then remove this message and rerun." - ncores_per_node="" + NCORES_PER_NODE="" SCHED="" QUEUE_DEFAULT=${QUEUE_DEFAULT:-""} QUEUE_HPSS=${QUEUE_HPSS:-""} @@ -250,7 +250,7 @@ Don't know how to set several parameters on MACHINE=\"$MACHINE\". Please specify the correct parameters for this machine in the setup script. Then remove this message and rerun." - ncores_per_node="" + NCORES_PER_NODE="" SCHED="" QUEUE_DEFAULT=${QUEUE_DEFAULT:-""} QUEUE_HPSS=${QUEUE_HPSS:-""} @@ -259,7 +259,7 @@ Then remove this message and rerun." # "THEIA") # - ncores_per_node=24 + NCORES_PER_NODE=24 SCHED="slurm" QUEUE_DEFAULT=${QUEUE_DEFAULT:-"batch"} QUEUE_HPSS=${QUEUE_HPSS:-"service"} @@ -268,7 +268,7 @@ Then remove this message and rerun." # "HERA") # - ncores_per_node=24 + NCORES_PER_NODE=24 SCHED="slurm" QUEUE_DEFAULT=${QUEUE_DEFAULT:-"batch"} QUEUE_HPSS=${QUEUE_HPSS:-"service"} @@ -277,7 +277,7 @@ Then remove this message and rerun." # "JET") # - ncores_per_node=24 + NCORES_PER_NODE=24 SCHED="slurm" QUEUE_DEFAULT=${QUEUE_DEFAULT:-"batch"} QUEUE_HPSS=${QUEUE_HPSS:-"service"} @@ -286,7 +286,7 @@ Then remove this message and rerun." # "ODIN") # - ncores_per_node=24 + NCORES_PER_NODE=24 SCHED="slurm" QUEUE_DEFAULT=${QUEUE_DEFAULT:-""} QUEUE_HPSS=${QUEUE_HPSS:-""} @@ -300,7 +300,7 @@ Don't know how to set several parameters on MACHINE=\"$MACHINE\". Please specify the correct parameters for this machine in the setup script. Then remove this message and rerun." - ncores_per_node= + NCORES_PER_NODE= SCHED="" QUEUE_DEFAULT=${QUEUE_DEFAULT:-""} QUEUE_HPSS=${QUEUE_HPSS:-""} @@ -310,25 +310,25 @@ esac # #----------------------------------------------------------------------- # -# Set the grid type (gtype). In general, in the FV3 code, this can take +# Set the grid type (GTYPE). In general, in the FV3 code, this can take # on one of the following values: "global", "stretch", "nest", and "re- # gional". The first three values are for various configurations of a # global grid, while the last one is for a regional grid. Since here we -# are only interested in a regional grid, gtype must be set to "region- +# are only interested in a regional grid, GTYPE must be set to "region- # al". # #----------------------------------------------------------------------- # -gtype="regional" +GTYPE="regional" TILE_RGNL="7" # #----------------------------------------------------------------------- # -# Make sure that gtype is set to a valid value. +# Make sure that GTYPE is set to a valid value. # #----------------------------------------------------------------------- # -check_var_valid_value "gtype" "valid_vals_gtype" +check_var_valid_value "GTYPE" "valid_vals_GTYPE" # #----------------------------------------------------------------------- # @@ -714,16 +714,16 @@ fi # the stretch factor needs to be set to 1 because we are considering an # equivalent globally UNIFORM grid. However, it turns out that with a # non-symmetric regional grid (one in which nx is not equal to ny), set- -# ting stretch_fac to 1 fails because the orography filtering program is +# ting STRETCH_FAC to 1 fails because the orography filtering program is # designed for a global cubed-sphere grid and thus assumes that nx and -# ny for a given tile are equal when stretch_fac is exactly equal to 1. <-- Why is this? Seems like symmetry btwn x and y should still hold when stretch_fac is not equal to 1. -# It turns out that the program will work if we set stretch_fac that is +# ny for a given tile are equal when STRETCH_FAC is exactly equal to 1. <-- Why is this? Seems like symmetry btwn x and y should still hold when STRETCH_FAC is not equal to 1. +# It turns out that the program will work if we set STRETCH_FAC that is # not exactly 1. This is what we do below. # #----------------------------------------------------------------------- # if [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - stretch_fac="0.999" + STRETCH_FAC="0.999" fi # #----------------------------------------------------------------------- @@ -1076,22 +1076,22 @@ fi # Note that the regional grid is referred to as "tile 7" in the code. # We will let: # -# * nh0_T7 denote the width (in units of number of cells on tile 7) of -# the 0-cell-wide halo, i.e. nh0_T7 = 0; +# * NH0_T7 denote the width (in units of number of cells on tile 7) of +# the 0-cell-wide halo, i.e. NH0_T7 = 0; # -# * nh3_T7 denote the width (in units of number of cells on tile 7) of -# the 3-cell-wide halo, i.e. nh3_T7 = 3; and +# * NH3_T7 denote the width (in units of number of cells on tile 7) of +# the 3-cell-wide halo, i.e. NH3_T7 = 3; and # -# * nh4_T7 denote the width (in units of number of cells on tile 7) of -# the 4-cell-wide halo, i.e. nh4_T7 = 4. +# * NH4_T7 denote the width (in units of number of cells on tile 7) of +# the 4-cell-wide halo, i.e. NH4_T7 = 4. # # We define these variables next. # #----------------------------------------------------------------------- # -nh0_T7=0 -nh3_T7=3 -nh4_T7=$(( $nh3_T7 + 1 )) +NH0_T7=0 +NH3_T7=3 +NH4_T7=4 # #----------------------------------------------------------------------- # @@ -1158,7 +1158,7 @@ fi # #----------------------------------------------------------------------- # -PE_MEMBER01=$(( $layout_x*$layout_y )) +PE_MEMBER01=$(( LAYOUT_X*LAYOUT_Y )) if [ "$QUILTING" = "TRUE" ]; then PE_MEMBER01=$(( ${PE_MEMBER01} + ${WRTCMP_write_groups}*${WRTCMP_write_tasks_per_group} )) fi @@ -1171,57 +1171,57 @@ component if it is being used) are: #----------------------------------------------------------------------- # # Make sure that the number of cells in the x and y direction are divi- -# sible by the MPI task dimensions layout_x and layout_y, respectively. +# sible by the MPI task dimensions LAYOUT_X and LAYOUT_Y, respectively. # #----------------------------------------------------------------------- # -rem=$(( $nx_T7%$layout_x )) +rem=$(( NX_T7%LAYOUT_X )) if [ $rem -ne 0 ]; then print_err_msg_exit "\ -The number of grid cells in the x direction (nx_T7) is not evenly divisible -by the number of MPI tasks in the x direction (layout_x): - nx_T7 = $nx_T7 - layout_x = $layout_x" +The number of grid cells in the x direction (NX_T7) is not evenly divisible +by the number of MPI tasks in the x direction (LAYOUT_X): + NX_T7 = ${NX_T7} + LAYOUT_X = ${LAYOUT_X}" fi -rem=$(( $ny_T7%$layout_y )) +rem=$(( NY_T7%LAYOUT_Y )) if [ $rem -ne 0 ]; then print_err_msg_exit "\ -The number of grid cells in the y direction (ny_T7) is not evenly divisible -by the number of MPI tasks in the y direction (layout_y): - ny_T7 = $ny_T7 - layout_y = $layout_y" +The number of grid cells in the y direction (NY_T7) is not evenly divisible +by the number of MPI tasks in the y direction (LAYOUT_Y): + NY_T7 = ${NY_T7} + LAYOUT_Y = ${LAYOUT_Y}" fi print_info_msg "$VERBOSE" " The MPI task layout is: - layout_x = $layout_x - layout_y = $layout_y" + LAYOUT_X = ${LAYOUT_X} + LAYOUT_Y = ${LAYOUT_Y}" # #----------------------------------------------------------------------- # # Make sure that, for a given MPI task, the number columns (which is -# equal to the number of horizontal cells) is divisible by the blocksize. +# equal to the number of horizontal cells) is divisible by BLOCKSIZE. # #----------------------------------------------------------------------- # -nx_per_task=$(( $nx_T7/$layout_x )) -ny_per_task=$(( $ny_T7/$layout_y )) +nx_per_task=$(( NX_T7/LAYOUT_X )) +ny_per_task=$(( NY_T7/LAYOUT_Y )) num_cols_per_task=$(( $nx_per_task*$ny_per_task )) -rem=$(( $num_cols_per_task%$blocksize )) +rem=$(( num_cols_per_task%BLOCKSIZE )) if [ $rem -ne 0 ]; then prime_factors_num_cols_per_task=$( factor $num_cols_per_task | sed -r -e 's/^[0-9]+: (.*)/\1/' ) print_err_msg_exit "\ The number of columns assigned to a given MPI task must be divisible by -the blocksize: - nx_per_task = nx_T7/layout_x = $nx_T7/$layout_x = $nx_per_task - ny_per_task = ny_T7/layout_y = $ny_T7/$layout_y = $ny_per_task +BLOCKSIZE: + nx_per_task = NX_T7/LAYOUT_X = ${NX_T7}/${LAYOUT_X} = $nx_per_task + ny_per_task = NY_T7/LAYOUT_Y = ${NY_T7}/${LAYOUT_Y} = $ny_per_task num_cols_per_task = nx_per_task*ny_per_task = $num_cols_per_task - blocksize = $blocksize - rem = num_cols_per_task%%blocksize = $rem -The prime factors of num_cols_per_task are (useful for determining a valid -blocksize): + BLOCKSIZE = $BLOCKSIZE + rem = num_cols_per_task%%BLOCKSIZE = $rem +The prime factors of num_cols_per_task are (useful for determining a va- +lid BLOCKSIZE): prime_factors_num_cols_per_task: $prime_factors_num_cols_per_task" fi # @@ -1259,11 +1259,11 @@ fi #----------------------------------------------------------------------- # # If the write component is going to be used, make sure that the number -# of grid cells in the y direction (ny_T7) is divisible by the number of -# write tasks per group. This is because the ny_T7 rows of the grid +# of grid cells in the y direction (NY_T7) is divisible by the number of +# write tasks per group. This is because the NY_T7 rows of the grid # must be distributed evenly among the write_tasks_per_group tasks in a # given write group, i.e. each task must receive the same number of -# rows. This implies that ny_T7 must be evenly divisible by write_- +# rows. This implies that NY_T7 must be evenly divisible by write_- # tasks_per_group. If it isn't, the write component will hang or fail. # We check for this below. # @@ -1271,16 +1271,16 @@ fi # if [ "$QUILTING" = "TRUE" ]; then - rem=$(( $ny_T7%${WRTCMP_write_tasks_per_group} )) + rem=$(( NY_T7%WRTCMP_write_tasks_per_group )) if [ $rem -ne 0 ]; then print_err_msg_exit "\ The number of grid points in the y direction on the regional grid (ny_- T7) must be evenly divisible by the number of tasks per write group (WRTCMP_write_tasks_per_group): - ny_T7 = $ny_T7 + NY_T7 = ${NY_T7} WRTCMP_write_tasks_per_group = $WRTCMP_write_tasks_per_group - ny_T7%%write_tasks_per_group = $rem" + NY_T7%%write_tasks_per_group = $rem" fi fi @@ -1289,9 +1289,9 @@ fi # # Calculate the number of nodes (NUM_NODES) to request from the job # scheduler. This is just PE_MEMBER01 dividied by the number of cores -# per node (ncores_per_node) rounded up to the nearest integer, i.e. +# per node (NCORES_PER_NODE) rounded up to the nearest integer, i.e. # -# NUM_NODES = ceil(PE_MEMBER01/ncores_per_node) +# NUM_NODES = ceil(PE_MEMBER01/NCORES_PER_NODE) # # where ceil(...) is the ceiling function, i.e. it rounds its floating # point argument up to the next larger integer. Since in bash division @@ -1300,11 +1300,11 @@ fi # adding the denominator (of the argument of ceil(...) above) minus 1 to # the original numerator, i.e. by redefining NUM_NODES to be # -# NUM_NODES = (PE_MEMBER01 + ncores_per_node - 1)/ncores_per_node +# NUM_NODES = (PE_MEMBER01 + NCORES_PER_NODE - 1)/NCORES_PER_NODE # #----------------------------------------------------------------------- # -NUM_NODES=$(( (${PE_MEMBER01} + ${ncores_per_node} - 1)/${ncores_per_node} )) +NUM_NODES=$(( (PE_MEMBER01 + NCORES_PER_NODE - 1)/NCORES_PER_NODE )) # #----------------------------------------------------------------------- # @@ -1679,11 +1679,11 @@ WRTCMP_PARAMS_TEMPLATE_FP="${WRTCMP_PARAMS_TEMPLATE_FP}" # #----------------------------------------------------------------------- # -gtype="$gtype" +GTYPE="$GTYPE" TILE_RGNL="${TILE_RGNL}" -nh0_T7="$nh0_T7" -nh3_T7="$nh3_T7" -nh4_T7="$nh4_T7" +NH0_T7="${NH0_T7}" +NH3_T7="${NH3_T7}" +NH4_T7="${NH4_T7}" EOM } || print_err_msg_exit "\ Heredoc (cat) command to append new variable definitions to variable @@ -1712,9 +1712,9 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # #----------------------------------------------------------------------- # -nhw_T7="$nhw_T7" -nx_T7="$nx_T7" -ny_T7="$ny_T7" +NHW_T7="${NHW_T7}" +NX_T7="${NX_T7}" +NY_T7="${NY_T7}" istart_rgnl_wide_halo_T6SG="$istart_rgnl_wide_halo_T6SG" iend_rgnl_wide_halo_T6SG="$iend_rgnl_wide_halo_T6SG" jstart_rgnl_wide_halo_T6SG="$jstart_rgnl_wide_halo_T6SG" @@ -1738,10 +1738,10 @@ elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then # #----------------------------------------------------------------------- # -del_angle_x_SG="$del_angle_x_SG" -del_angle_y_SG="$del_angle_y_SG" -mns_nx_T7_pls_wide_halo="$mns_nx_T7_pls_wide_halo" -mns_ny_T7_pls_wide_halo="$mns_ny_T7_pls_wide_halo" +DEL_ANGLE_X_SG="${DEL_ANGLE_X_SG}" +DEL_ANGLE_Y_SG="${DEL_ANGLE_Y_SG}" +MNS_NX_T7_PLS_WIDE_HALO="${MNS_NX_T7_PLS_WIDE_HALO}" +MNS_NY_T7_PLS_WIDE_HALO="${MNS_NY_T7_PLS_WIDE_HALO}" # # The following variables must be set in order to be able to use the # same scripting machinary for the case of GRID_GEN_METHOD set to "JP- @@ -1749,7 +1749,7 @@ mns_ny_T7_pls_wide_halo="$mns_ny_T7_pls_wide_halo" # RES="" # This will be set after the grid generation task is complete. CRES="" # This will be set after the grid generation task is complete. -stretch_fac="$stretch_fac" +STRETCH_FAC="${STRETCH_FAC}" EOM } || print_err_msg_exit "\ Heredoc (cat) command to append grid parameters to variable definitions @@ -1819,7 +1819,7 @@ LBC_UPDATE_FCST_HRS=(${LBC_UPDATE_FCST_HRS[@]}) # LBC_UPDATE_FCST_HRS is an arr # #----------------------------------------------------------------------- # -ncores_per_node="${ncores_per_node}" +NCORES_PER_NODE="${NCORES_PER_NODE}" PE_MEMBER01="${PE_MEMBER01}" EOM } || print_err_msg_exit "\ diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index 4510bf0c0..a42fbfd4d 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -10,7 +10,7 @@ valid_vals_EXTRN_MDL_NAME_LBCS=("GSMGFS" "FV3GFS" "RAPX" "HRRRX") valid_vals_FV3GFS_FILE_FMT=("nemsio" "grib2") valid_vals_GRID_GEN_METHOD=("GFDLgrid" "JPgrid") valid_vals_PREEXISTING_DIR_METHOD=("delete" "rename" "quit") -valid_vals_gtype=("nest" "regional") +valid_vals_GTYPE=("regional") valid_vals_WRTCMP_output_grid=("rotated_latlon" "lambert_conformal") valid_vals_RUN_TASK_MAKE_GRID=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_RUN_TASK_MAKE_OROG=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") From 88d922a36bd3e7157854fbd9481d0156d053ff57 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 5 Nov 2019 16:31:22 -0700 Subject: [PATCH 041/203] Minor changes. --- tests/run_experiments.sh | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/run_experiments.sh b/tests/run_experiments.sh index 2476fbddd..4b76b03c0 100755 --- a/tests/run_experiments.sh +++ b/tests/run_experiments.sh @@ -48,7 +48,7 @@ TESTSDIR="$HOMErrfs/tests" # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u +x; } > /dev/null 2>&1 +{ save_shell_opts; set -u -x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -213,6 +213,10 @@ echo "experiment_name = '${experiment_name}'" ln_vrfy -fs "${experiment_config_fp}" "$USHDIR/config.sh" + print_info_msg " +Generating experiment with name: + experiment_name = \"${experiment_name}\"" + log_fp="$USHDIR/log.generate_wflow.${experiment_name}" $USHDIR/generate_FV3SAR_wflow.sh 2>&1 >& "${log_fp}" || { \ print_err_msg_exit "\ From be8c6837d212358296972a848f43a2d8f2fe21d2 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 6 Nov 2019 11:04:19 -0700 Subject: [PATCH 042/203] Change the build script for pre/post processing codes to match the latest from the NOAA-EMC repo (Julie's changes). --- regional/build_regional | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/regional/build_regional b/regional/build_regional index ac1bdd705..82f6b2e38 100755 --- a/regional/build_regional +++ b/regional/build_regional @@ -14,10 +14,6 @@ if [ ${SITE} == "cheyenne" ]; then fi cd ${RGNL_WFLOW_DIR}/sorc -./build_fre-nctools.sh ${SITE} >& out.build_fre-nctools -./build_orog.sh ${SITE} >& out.build_orog -# Don't build the old chgres any more. Build chgres_cube instead (below). -#./build_chgres.sh ${SITE} >& out.build_chgres # # The following build several new utilities needed in order to use the # new Jim Purser-type grid in the SAR. The following only works on @@ -29,11 +25,26 @@ cd ${RGNL_WFLOW_DIR}/sorc # # Build sfc_climo_gen. # -#cd ${RGNL_WFLOW_DIR}/../UFS_UTILS_gridgen_sfc/sorc cd ${RGNL_WFLOW_DIR}/sorc/UFS_UTILS_develop/sorc ./build_sfc_climo_gen.sh >& out.build_sfc_climo_gen cp ../exec/sfc_climo_gen ${RGNL_WFLOW_DIR}/exec # +# Build fre-nctools. +# +./build_fre-nctools.sh >& out.build_fre-nctools +cp ../exec/filter_topo ${RGNL_WFLOW_DIR}/exec +cp ../exec/fregrid ${RGNL_WFLOW_DIR}/exec +cp ../exec/fregrid_parallel ${RGNL_WFLOW_DIR}/exec +cp ../exec/make_hgrid ${RGNL_WFLOW_DIR}/exec +cp ../exec/make_hgrid_parallel ${RGNL_WFLOW_DIR}/exec +cp ../exec/make_solo_mosaic ${RGNL_WFLOW_DIR}/exec +cp ../exec/shave.x ${RGNL_WFLOW_DIR}/exec +# +# Build orog. +# +./build_orog.sh >& out.build_orog +cp ../exec/orog.x ${RGNL_WFLOW_DIR}/exec +# # Build chgres_cube. # # The first case is using Larissa's make.sh script, the second case uses From 3c72be13b1d4c79bd6cd94b5af26f421ac12ca48 Mon Sep 17 00:00:00 2001 From: Julie Schramm Date: Wed, 6 Nov 2019 18:31:48 +0000 Subject: [PATCH 043/203] Initial commit for EMC-like build --- sorc/build_all.sh | 201 +++++++--------------------------------- sorc/build_forecast.sh | 31 +++++++ sorc/build_post.sh | 20 ++++ sorc/build_utils.sh | 82 ++++++++++++++++ sorc/install_all.sh | 97 +++++++++++++++++++ sorc/link_fix.sh | 29 ++++++ sorc/partial_build.sh | 176 +++++++++++++++++++++++++++++++++++ sorc/regional_build.cfg | 18 ++++ 8 files changed, 487 insertions(+), 167 deletions(-) create mode 100755 sorc/build_forecast.sh create mode 100755 sorc/build_post.sh create mode 100755 sorc/build_utils.sh create mode 100755 sorc/install_all.sh create mode 100755 sorc/link_fix.sh create mode 100755 sorc/partial_build.sh create mode 100644 sorc/regional_build.cfg diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 3be543b88..35a523896 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -1,5 +1,5 @@ #!/bin/sh -set -eu +set -eux #------------------------------------ # USER DEFINED STUFF: # @@ -9,6 +9,18 @@ set -eu export USE_PREINST_LIBS="true" +if [ $# -eq 0 ] ; then + echo "ERROR: You must provide the platform as a command-line argument" + exit 1 +fi + +SITE=${1} + +RGNL_WFLOW_DIR=$( pwd )/.. +if [ ${SITE} == "cheyenne" ]; then + export NCEPLIB_DIR=/glade/p/ral/jntp/GMTB/tools/NCEPlibs/20180717/intel-18.0.1/ +fi + #------------------------------------ # END USER DEFINED STUFF #------------------------------------ @@ -42,189 +54,44 @@ echo " .... Library build not currently supported .... " } #------------------------------------ -# build fv3 -#------------------------------------ -$Build_fv3gfs && { -echo " .... Building fv3 .... " -./build_fv3.sh > $logs_dir/build_fv3.log 2>&1 -} - -#------------------------------------ -# build gsi -#------------------------------------ -$Build_gsi && { -echo " .... Building gsi .... " -./build_gsi.sh > $logs_dir/build_gsi.log 2>&1 -} - -#------------------------------------ -# build ncep_post -#------------------------------------ -$Build_ncep_post && { -echo " .... Building ncep_post .... " -./build_ncep_post.sh > $logs_dir/build_ncep_post.log 2>&1 -} - -#------------------------------------ -# build gfs_wafs -#------------------------------------ -#$Build_gfs_wafs && { -#echo " .... Building gfs_wafs .... " -#./build_gfs_wafs.sh > $logs_dir/build_gfs_wafs .log 2>&1 -#} - -#------------------------------------ -# build NEMS util -#------------------------------------ -$Build_nems_util && { -echo " .... Building NEMS util .... " -./build_nems_util.sh > $logs_dir/build_NEMS.log 2>&1 -} - -#------------------------------------ -# build chgres -#------------------------------------ -$Build_chgres && { -echo " .... Building chgres .... " -./build_chgres.sh > $logs_dir/build_chgres.log 2>&1 -} - -#------------------------------------ -# build sfcanl_nsttfchg -#------------------------------------ -$Build_sfcanl_nsttfchg && { -echo " .... Building gaussian_sfcanl and nst_tf_chg .... " -./build_sfcanl_nsttfchg.sh > $logs_dir/build_sfcanl_nsttfchg.log 2>&1 -} - -#------------------------------------ -# build orog -#------------------------------------ -$Build_orog && { -echo " .... Building orog .... " -./build_orog.sh > $logs_dir/build_orog.log 2>&1 -} - -#------------------------------------ -# build cycle -#------------------------------------ -$Build_cycle && { -echo " .... Building cycle .... " -./build_cycle.sh > $logs_dir/build_cycle.log 2>&1 -} - -#------------------------------------ -# build enkf_chgres_recenter -#------------------------------------ -$Build_enkf_chgres_recenter && { -echo " .... Building enkf_chgres_recenter .... " -./build_enkf_chgres_recenter.sh > $logs_dir/build_enkf_chgres_recenter.log 2>&1 -} - -#------------------------------------ -# build tropcy_NEMS -#------------------------------------ -$Build_tropcy && { -echo " .... Building tropcy_NEMS .... " -./build_tropcy_NEMS.sh > $logs_dir/build_tropcy_NEMS.log 2>&1 -} - -#------------------------------------ -# build gdas -#------------------------------------ -$Build_gdas && { -echo " .... Building gdas .... " -./build_gdas.sh > $logs_dir/build_gdas.log 2>&1 -} - -#------------------------------------ -# build gfs_fbwndgfs -#------------------------------------ -$Build_gfs_fbwndgfs && { -echo " .... Building gfs_fbwndgfs .... " -./build_gfs_fbwndgfs.sh > $logs_dir/build_gfs_fbwndgfs.log 2>&1 -} - -#------------------------------------ -# build gfs_overpdtg2 -#------------------------------------ -$Build_gfs_overpdtg2 && { -echo " .... Building gfs_overpdtg2 .... " -./build_gfs_overpdtg2.sh > $logs_dir/build_gfs_overpdtg2.log 2>&1 -} - -#------------------------------------ -# build gfs_wintemv -#------------------------------------ -$Build_gfs_wintemv && { -echo " .... Building gfs_wintemv .... " -./build_gfs_wintemv.sh > $logs_dir/build_gfs_wintemv.log 2>&1 -} - -#------------------------------------ -# build gfs_bufrsnd +# build forecast #------------------------------------ -$Build_gfs_bufrsnd && { -echo " .... Building gfs_bufrsnd .... " -./build_gfs_bufrsnd.sh > $logs_dir/build_gfs_bufrsnd.log 2>&1 +$Build_forecast && { +echo " .... Building forecast .... " +./build_forecast.sh > $logs_dir/build_forecast.log 2>&1 } #------------------------------------ -# build emcsfc +# build forecast ccpp #------------------------------------ -$Build_emcsfc && { -echo " .... Building emcsfc .... " -./build_emcsfc.sh > $logs_dir/build_emcsfc.log 2>&1 +$Build_forecast_ccpp && { +echo " .... Building forecast ccpp.... " +export CCPP=true +./build_forecast.sh > $logs_dir/build_forecast_ccpp.log 2>&1 } #------------------------------------ -# build fre-nctools +# build post #------------------------------------ -$Build_nctools && { -echo " .... Building fre-nctools .... " -./build_fre-nctools.sh > $logs_dir/build_fre-nctools.log 2>&1 +$Build_post && { +echo " .... Building post .... " +./build_post.sh > $logs_dir/build_post.log 2>&1 } #------------------------------------ -# build fv3nc2nemsio +# build utils #------------------------------------ -$Build_fv3nc2nemsio && { -echo " .... Building fv3nc2nemsio .... " -./build_fv3nc2nemsio.sh > $logs_dir/build_fv3nc2nemsio.log 2>&1 +$Build_utils && { +echo " .... Building utils .... " +./build_utils.sh > $logs_dir/build_utils.log 2>&1 } #------------------------------------ -# build regrid_nemsio -#------------------------------------ -$Build_regrid_nemsio && { -echo " .... Building regrid_nemsio .... " -./build_regrid_nemsio.sh > $logs_dir/build_regrid_nemsio.log 2>&1 -} - -#------------------------------------ -# build gfs_util -#------------------------------------ -$Build_gfs_util && { -echo " .... Building gfs_util .... " -./build_gfs_util.sh > $logs_dir/build_gfs_util.log 2>&1 -} - -#------------------------------------ -# build prod_util -#------------------------------------ -$Build_prod_util && { -echo " .... prod_util build not currently supported .... " -#echo " .... Building prod_util .... " -#./build_prod_util.sh > $logs_dir/build_prod_util.log 2>&1 -} - -#------------------------------------ -# build grib_util +# build gsi #------------------------------------ -$Build_grib_util && { -echo " .... grib_util build not currently supported .... " -#echo " .... Building grib_util .... " -#./build_grib_util.sh > $logs_dir/build_grib_util.log 2>&1 +$Build_gsi && { +echo " .... Building gsi .... " +./build_gsi.sh > $logs_dir/build_gsi.log 2>&1 } echo;echo " .... Build system finished .... " diff --git a/sorc/build_forecast.sh b/sorc/build_forecast.sh new file mode 100755 index 000000000..798825271 --- /dev/null +++ b/sorc/build_forecast.sh @@ -0,0 +1,31 @@ +#! /usr/bin/env bash +set -eux + +source ./machine-setup.sh > /dev/null 2>&1 +cwd=`pwd` + +USE_PREINST_LIBS=${USE_PREINST_LIBS:-"true"} +if [ $USE_PREINST_LIBS = true ]; then + export MOD_PATH=/scratch3/NCEPDEV/nwprod/lib/modulefiles +else + export MOD_PATH=${cwd}/lib/modulefiles +fi + +# Check final exec folder exists +if [ ! -d "../exec" ]; then + mkdir ../exec +fi + +if [ $target = hera ]; then target=hera.intel ; fi + +cd regional_forecast.fd/ +FV3=$( pwd -P )/FV3 +CCPP=${CCPP:-"false"} +cd tests/ +if [ $CCPP = true ] || [ $CCPP = TRUE ] ; then + ./compile.sh "$FV3" "$target" "NCEP64LEV=Y HYDRO=N 32BIT=Y CCPP=Y STATIC=Y SUITES=FV3_GFS_2017_gfdlmp_regional" 1 +else + ./compile.sh "$FV3" "$target" "NCEP64LEV=Y HYDRO=N 32BIT=Y" 1 +fi +##mv -f fv3_1.exe ../NEMS/exe/fv3_gfs_nh.prod.32bit.x +mv -f fv3_1.exe ../NEMS/exe/NEMS.x diff --git a/sorc/build_post.sh b/sorc/build_post.sh new file mode 100755 index 000000000..06d68596f --- /dev/null +++ b/sorc/build_post.sh @@ -0,0 +1,20 @@ +#! /usr/bin/env bash +set -eux + +source ./machine-setup.sh > /dev/null 2>&1 +cwd=`pwd` + +USE_PREINST_LIBS=${USE_PREINST_LIBS:-"true"} +if [ $USE_PREINST_LIBS = true ]; then + export MOD_PATH=/scratch3/NCEPDEV/nwprod/lib/modulefiles +else + export MOD_PATH=${cwd}/lib/modulefiles +fi + +# Check final exec folder exists +if [ ! -d "../exec" ]; then + mkdir ../exec +fi + +cd regional_post.fd/sorc +sh build_ncep_post.sh diff --git a/sorc/build_utils.sh b/sorc/build_utils.sh new file mode 100755 index 000000000..a2aba1615 --- /dev/null +++ b/sorc/build_utils.sh @@ -0,0 +1,82 @@ +#!/bin/sh +set -eux + +export USE_PREINST_LIBS="true" + +#------------------------------------ +# END USER DEFINED STUFF +#------------------------------------ + +build_dir=`pwd` +logs_dir=$build_dir/logs +if [ ! -d $logs_dir ]; then + echo "Creating logs folder" + mkdir $logs_dir +fi + +#------------------------------------ +# INCLUDE PARTIAL BUILD +#------------------------------------ + +. ./partial_build.sh + +UFS_UTILS_DEV=$build_dir/UFS_UTILS_develop/sorc +UFS_UTILS_CHGRES_GRIB2=$build_dir/UFS_UTILS_chgres_grib2/sorc + +#------------------------------------ +# build chgres +#------------------------------------ +$Build_chgres && { +echo " .... Chgres build not currently supported .... " +#echo " .... Building chgres .... " +#./build_chgres.sh > $logs_dir/build_chgres.log 2>&1 +} + +#------------------------------------ +# build chgres_cube +#------------------------------------ +$Build_chgres_cube && { +echo " .... Building chgres_cube .... " +cd $UFS_UTILS_CHGRES_GRIB2 +./build_chgres_cube.sh > $logs_dir/build_chgres_cube.log 2>&1 +} + +#------------------------------------ +# build orog +#------------------------------------ +$Build_orog && { +echo " .... Building orog .... " +cd $UFS_UTILS_DEV +./build_orog.sh > $logs_dir/build_orog.log 2>&1 +} + +#------------------------------------ +# build fre-nctools +#------------------------------------ +$Build_nctools && { +echo " .... Building fre-nctools .... " +cd $UFS_UTILS_DEV +./build_fre-nctools.sh > $logs_dir/build_fre-nctools.log 2>&1 +} + +#------------------------------------ +# build sfc_climo_gen +#------------------------------------ +$Build_sfc_climo_gen && { +echo " .... Building sfc_climo_gen .... " +cd $UFS_UTILS_DEV +./build_sfc_climo_gen.sh > $logs_dir/build_sfc_climo_gen.log 2>&1 +} + +#------------------------------------ +# build regional_grid +#------------------------------------ +$Build_regional_grid && { +echo " .... Building regional_grid .... " +cd $build_dir +./build_regional_grid.sh > $logs_dir/build_regional_grid.log 2>&1 +} + +cd $build_dir + +echo 'Building utils done' diff --git a/sorc/install_all.sh b/sorc/install_all.sh new file mode 100755 index 000000000..47adedb17 --- /dev/null +++ b/sorc/install_all.sh @@ -0,0 +1,97 @@ +#!/bin/sh +set -xeu + +build_dir=`pwd` + +CP='cp -rp' + +# Check final exec folder exists +if [ ! -d "../exec" ]; then + echo "Creating ../exec folder" + mkdir ../exec +fi + +#------------------------------------ +# INCLUDE PARTIAL BUILD +#------------------------------------ + +. ./partial_build.sh + +#------------------------------------ +# install forecast +#------------------------------------ + ${CP} regional_forecast.fd/NEMS/exe/NEMS.x ../exec/regional_forecast.x + +#------------------------------------ +# install post +#------------------------------------ + ${CP} regional_post.fd/exec/ncep_post ../exec/regional_post.x + +#------------------------------------ +# install chgres +#------------------------------------ + ${CP} regional_utils.fd/exec/global_chgres ../exec/regional_chgres.x + +#------------------------------------ +# install chgres_cube +#------------------------------------ + ${CP} regional_utils.fd/exec/chgres_cube.exe ../exec/regional_chgres_cube.x + +#------------------------------------ +# install orog +#------------------------------------ + ${CP} regional_utils.fd/exec/orog.x ../exec/regional_orog.x + +#------------------------------------ +# install sfc_climo_gen +#------------------------------------ + ${CP} regional_utils.fd/exec/sfc_climo_gen ../exec/regional_sfc_climo_gen.x + +#------------------------------------ +# install regional_grid +#------------------------------------ + ${CP} regional_utils.fd/exec/regional_grid ../exec/regional_grid.x + +#------------------------------------ +# install fre-nctools +#------------------------------------ + ${CP} regional_utils.fd/exec/make_hgrid ../exec/regional_make_hgrid.x +#${CP} regional_utils.fd/exec/make_hgrid_parallel ../exec/regional_make_hgrid_parallel.x + ${CP} regional_utils.fd/exec/make_solo_mosaic ../exec/regional_make_solo_mosaic.x + ${CP} regional_utils.fd/exec/fregrid ../exec/regional_fregrid.x +#${CP} regional_utils.fd/exec/fregrid_parallel ../exec/regional_fregrid_parallel.x + ${CP} regional_utils.fd/exec/filter_topo ../exec/regional_filter_topo.x + ${CP} regional_utils.fd/exec/shave.x ../exec/regional_shave.x + +#------------------------------------ +# install gsi +#------------------------------------ +$Build_gsi && { + ${CP} regional_gsi.fd/exec/global_gsi.x ../exec/regional_gsi.x + ${CP} regional_gsi.fd/exec/global_enkf.x ../exec/regional_enkf.x + ${CP} regional_gsi.fd/exec/adderrspec.x ../exec/regional_adderrspec.x + ${CP} regional_gsi.fd/exec/adjustps.x ../exec/regional_adjustps.x + ${CP} regional_gsi.fd/exec/calc_increment_ens.x ../exec/regional_calc_increment_ens.x + ${CP} regional_gsi.fd/exec/calc_increment_serial.x ../exec/regional_calc_increment_serial.x + ${CP} regional_gsi.fd/exec/getnstensmeanp.x ../exec/regional_getnstensmeanp.x + ${CP} regional_gsi.fd/exec/getsfcensmeanp.x ../exec/regional_getsfcensmeanp.x + ${CP} regional_gsi.fd/exec/getsfcnstensupdp.x ../exec/regional_getsfcnstensupdp.x + ${CP} regional_gsi.fd/exec/getsigensmeanp_smooth.x ../exec/regional_getsigensmeanp_smooth.x + ${CP} regional_gsi.fd/exec/getsigensstatp.x ../exec/regional_getsigensstatp.x + ${CP} regional_gsi.fd/exec/gribmean.x ../exec/regional_gribmean.x + ${CP} regional_gsi.fd/exec/nc_diag_cat.x ../exec/regional_nc_diag_cat.x + ${CP} regional_gsi.fd/exec/nc_diag_cat_serial.x ../exec/regional_nc_diag_cat_serial.x + ${CP} regional_gsi.fd/exec/oznmon_horiz.x ../exec/regional_oznmon_horiz.x + ${CP} regional_gsi.fd/exec/oznmon_time.x ../exec/regional_oznmon_time.x + ${CP} regional_gsi.fd/exec/radmon_angle.x ../exec/regional_radmon_angle.x + ${CP} regional_gsi.fd/exec/radmon_bcoef.x ../exec/regional_radmon_bcoef.x + ${CP} regional_gsi.fd/exec/radmon_bcor.x ../exec/regional_radmon_bcor.x + ${CP} regional_gsi.fd/exec/radmon_time.x ../exec/regional_radmon_time.x + ${CP} regional_gsi.fd/exec/recenternemsiop_hybgain.x ../exec/regional_recenternemsiop_hybgain.x + ${CP} regional_gsi.fd/exec/recentersigp.x ../exec/regional_recentersigp.x + ${CP} regional_gsi.fd/exec/test_nc_unlimdims.x ../exec/regional_test_nc_unlimdims.x +} + +echo;echo " .... Install system finished .... " + +exit 0 diff --git a/sorc/link_fix.sh b/sorc/link_fix.sh new file mode 100755 index 000000000..af58f5c33 --- /dev/null +++ b/sorc/link_fix.sh @@ -0,0 +1,29 @@ +#!/bin/sh +set -xeu + +source ./machine-setup.sh > /dev/null 2>&1 + +LINK="cp -rp" + +pwd=$(pwd -P) + +if [[ ${target} == "wcoss_dell_p3" || ${target} == "wcoss" || ${target} == "wcoss_cray" ]]; then + FIX_DIR="/gpfs/dell2/emc/modeling/noscrub/emc.campara/fix_fv3cam" +elif [ ${target} == "hera" ]; then + FIX_DIR="/scratch2/NCEPDEV/fv3-cam/emc.campara/fix_fv3cam" +elif [ ${target} == "jet" ]; then + FIX_DIR="/scratch4/NCEPDEV/global/save/glopara/git/fv3gfs/fix" +else + echo "Unknown site " ${target} + exit 1 +fi + +mkdir -p ${pwd}/../fix +cd ${pwd}/../fix ||exit 8 +for dir in fix_am fix_nest fix_sar ; do + [[ -d $dir ]] && rm -rf $dir +done + +${LINK} $FIX_DIR/* . + +exit diff --git a/sorc/partial_build.sh b/sorc/partial_build.sh new file mode 100755 index 000000000..e0189c2ef --- /dev/null +++ b/sorc/partial_build.sh @@ -0,0 +1,176 @@ +# +# define the array of the name of build program +# + declare -a Build_prg=("Build_libs" "Build_forecast" "Build_forecast_ccpp" "Build_gsi" \ + "Build_post" "Build_utils" "Build_chgres" "Build_chgres_cube" \ + "Build_orog" "Build_sfc_climo_gen" "Build_regional_grid" "Build_nctools") + +# +# function parse_cfg: read config file and retrieve the values +# + parse_cfg() { + declare -i n + declare -i num_args + declare -i total_args + declare -a all_prg + total_args=$# + num_args=$1 + (( num_args == 0 )) && return 0 + config=$2 + [[ ${config,,} == "--verbose" ]] && config=$3 + all_prg=() + for (( n = num_args + 2; n <= total_args; n++ )); do + all_prg+=( ${!n} ) + done + + if [[ ${config^^} == ALL ]]; then +# +# set all values to true +# + for var in "${Build_prg[@]}"; do + eval "$var=true" + done + elif [[ $config == config=* ]]; then +# +# process config file +# + cfg_file=${config#config=} + $verbose && echo "INFO: settings in config file: $cfg_file" + while read cline; do +# remove leading white space + clean_line="${cline#"${cline%%[![:space:]]*}"}" + ( [[ -z "$clean_line" ]] || [[ "${clean_line:0:1}" == "#" ]] ) || { + $verbose && echo $clean_line + first9=${clean_line:0:9} + [[ ${first9,,} == "building " ]] && { + short_prg=$(sed -e 's/.*(\(.*\)).*/\1/' <<< "$clean_line") +# remove trailing white space + clean_line="${cline%"${cline##*[![:space:]]}"}" + build_action=true + last5=${clean_line: -5} + [[ ${last5,,} == ". yes" ]] && build_action=true + last4=${clean_line: -4} + [[ ${last4,,} == ". no" ]] && build_action=false + found=false + for prg in ${all_prg[@]}; do + [[ $prg == "Build_"$short_prg ]] && { + found=true + eval "$prg=$build_action" + break + } + done + $found || { + echo "*** Unrecognized line in config file \"$cfg_file\":" 2>&1 + echo "$cline" 2>&1 + exit 3 + } + } + } + done < $cfg_file + elif [[ $config == select=* ]]; then +# +# set all values to (default) false +# + for var in "${Build_prg[@]}"; do + eval "$var=false" + done +# +# read command line partial build setting +# + del="" + sel_prg=${config#select=} + for separator in " " "," ";" ":" "/" "|"; do + [[ "${sel_prg/$separator}" == "$sel_prg" ]] || { + del=$separator + sel_prg=${sel_prg//$del/ } + } + done + [[ $del == "" ]] && { + short_prg=$sel_prg + found=false + for prg in ${all_prg[@]}; do + [[ $prg == "Build_"$short_prg ]] && { + found=true + eval "$prg=true" + break + } + done + $found || { + echo "*** Unrecognized program name \"$short_prg\" in command line" 2>&1 + exit 4 + } + } || { + for short_prg in $(echo ${sel_prg}); do + found=false + for prg in ${all_prg[@]}; do + [[ $prg == "Build_"$short_prg ]] && { + found=true + eval "$prg=true" + break + } + done + $found || { + echo "*** Unrecognized program name \"$short_prg\" in command line" 2>&1 + exit 5 + } + done + } + else + echo "*** Unrecognized command line option \"$config\"" 2>&1 + exit 6 + fi + } + +# +# read command line arguments; processing config file +# + verbose=false + num_arg=$# + (( num_arg > 1 )) && { + [[ ${1,,} == "--verbose" ]] && { + verbose=true + } || { + echo "Usage: $0 [ALL|config=config_file|[select=][prog1[,prog2[,...]]]" 2>&1 + exit 1 + } + } + (( num_arg == 1 )) && { + ( [[ $1 == "-h" ]] || [[ $1 == "--help" ]] ) && { + echo "Usage: $0 [ALL|config=config_file|[select=][prog1[,prog2[,...]]]" 2>&1 + exit 2 + } + ( [[ $1 == "-v" ]] || [[ ${1,,} == "--verbose" ]] ) && { + verbose=true + num_arg=0 + } || { + echo "Usage: $0 [ALL|config=config_file|[select=][prog1[,prog2[,...]]]" 2>&1 + exit 3 + } + } + + if (( num_arg == 0 )); then +# +# set default values for partial build +# + parse_cfg 1 "config=regional_build.cfg" ${Build_prg[@]} + else + +# +# call arguments retriever/config parser +# + parse_cfg $num_arg "$@" ${Build_prg[@]} + fi + +# +# print values of build array +# + $verbose && { + echo "INFO: partial build settings:" + for var in "${Build_prg[@]}"; do + echo -n " $var: " + ${!var} && echo True || echo False + done + } + + echo "=== end of partial build setting ===" > /dev/null + diff --git a/sorc/regional_build.cfg b/sorc/regional_build.cfg new file mode 100644 index 000000000..5f4c1a27e --- /dev/null +++ b/sorc/regional_build.cfg @@ -0,0 +1,18 @@ +# +# ***** configuration of regional build ***** + + Building libraries (libs) ............................. no + Building forecast (forecast) .......................... no + Building forecast_ccpp (forecast_ccpp) ................ no + Building gsi (gsi) .................................... no + Building post (post) .................................. no + Building utils (utils) ................................ yes + Building chgres (chgres) .............................. no + Building chgres_cube (chgres_cube) .................... yes + Building sfc_climo_gen (sfc_climo_gen) ................ yes + Building regional_grid (regional_grid) ................ yes + Building orog (orog) .................................. yes + Building fre-nctools (nctools) ........................ yes + +# -- END -- + From 1cee1522c6825756738feedf2322b7cf58bc5730 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 6 Nov 2019 14:39:19 -0700 Subject: [PATCH 044/203] (1) Update Externals.cfg in fork to use the same hash for UFS_UTILS's chgres_cube branch as in the NOAA-EMC/regional_workflow repo's community_develop branch. (2) Update input_ccpp_GFSphys_GFSextrn.nml to include the latest namelists from the the NOAA-EMC/regional_workflow repo's community_develop branch. Somehow these changes did not get merged into my (gsketefian's) fork on github. --- Externals.cfg | 2 +- ush/templates/input_ccpp_GFSphys_GFSextrn.nml | 22 ++++++++++++++++--- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index 08517889a..45b075d61 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -9,7 +9,7 @@ required = True protocol = git repo_url = https://github.com/NCAR/UFS_UTILS #Working hash of feature/chgres_grib2 branch -hash = d6bd8e3 +hash = c29174e local_path = sorc/UFS_UTILS_chgres_grib2 required = True diff --git a/ush/templates/input_ccpp_GFSphys_GFSextrn.nml b/ush/templates/input_ccpp_GFSphys_GFSextrn.nml index baf82650b..877afaab7 100644 --- a/ush/templates/input_ccpp_GFSphys_GFSextrn.nml +++ b/ush/templates/input_ccpp_GFSphys_GFSextrn.nml @@ -103,7 +103,6 @@ z_tracer = .T. read_increment = .F. res_latlon_dynamics = "fv3_increment.nc" - do_schmidt = .true. target_lat = target_lon = @@ -111,9 +110,7 @@ regional = .true. bc_update_interval = agrid_vel_rst = .false. - full_zs_filter = .F. !unreleased feature - !! nord_zs_filter = 4 n_zs_filter = 0 ! safety / @@ -236,9 +233,26 @@ icloud_f = 1 mp_time = 90. / + +&cires_ugwp_nml + knob_ugwp_solver = 2 + knob_ugwp_source = 1,1,0,0 + knob_ugwp_wvspec = 1,25,25,25 + knob_ugwp_azdir = 2,4,4,4 + knob_ugwp_stoch = 0,0,0,0 + knob_ugwp_effac = 1,1,1,1 + knob_ugwp_doaxyz = 1 + knob_ugwp_doheat = 1 + knob_ugwp_dokdis = 1 + knob_ugwp_ndx4lh = 1 + knob_ugwp_version = 0 + launch_level = 25 +/ + &interpolator_nml interp_method = 'conserve_great_circle' / + &namsfc FNGLAC = "global_glacier.2x2.grb", FNMXIC = "global_maxice.2x2.grb", @@ -280,7 +294,9 @@ FSNOS = 99999, FSICS = 99999, / + &nam_stochy / + &nam_sfcperts / From d1c1d051bf6d833b1e09de11c13deda47da1206a Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 6 Nov 2019 17:17:29 -0700 Subject: [PATCH 045/203] Minor changes to make the way indexed do-loops over arrays be consistent. --- scripts/exregional_make_lbc1_to_lbcn.sh | 2 +- ush/bash_utils/interpol_to_arbit_CRES.sh | 2 +- ush/get_extrn_mdl_file_dir_info.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/exregional_make_lbc1_to_lbcn.sh b/scripts/exregional_make_lbc1_to_lbcn.sh index 666157d71..939d80c5a 100755 --- a/scripts/exregional_make_lbc1_to_lbcn.sh +++ b/scripts/exregional_make_lbc1_to_lbcn.sh @@ -340,7 +340,7 @@ esac #----------------------------------------------------------------------- # num_fhrs="${#EXTRN_MDL_LBC_UPDATE_FHRS[@]}" -for (( i=0; i<$num_fhrs; i++ )); do +for (( i=0; i<${num_fhrs}; i++ )); do # # Get the forecast hour of the external model. # diff --git a/ush/bash_utils/interpol_to_arbit_CRES.sh b/ush/bash_utils/interpol_to_arbit_CRES.sh index 74c42a89e..6f685c1a5 100644 --- a/ush/bash_utils/interpol_to_arbit_CRES.sh +++ b/ush/bash_utils/interpol_to_arbit_CRES.sh @@ -120,7 +120,7 @@ where the arguments are defined as follows: else - for ((i=0; i < ${num_valid_RESes}-1 ; i++)); do + for (( i=0; i<$((num_valid_RESes-1)); i++ )); do if [ "$RES" -gt "${RES_array[$i]}" ] && \ [ "$RES" -le "${RES_array[$i+1]}" ]; then diff --git a/ush/get_extrn_mdl_file_dir_info.sh b/ush/get_extrn_mdl_file_dir_info.sh index fbf95e69b..4ccbfe07d 100755 --- a/ush/get_extrn_mdl_file_dir_info.sh +++ b/ush/get_extrn_mdl_file_dir_info.sh @@ -318,7 +318,7 @@ fi # the start time of the external model run. # num_fhrs=${#lbc_update_fhrs[@]} - for (( i=0; i<=$(( $num_fhrs - 1 )); i++ )); do + for (( i=0; i<=$((num_fhrs-1)); i++ )); do lbc_update_fhrs[$i]=$(( ${lbc_update_fhrs[$i]} + time_offset_hrs )) done From a0f9dc4c5d91636abd138ad6c6f6937275984c8a Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 8 Nov 2019 12:22:18 -0700 Subject: [PATCH 046/203] (1) Create a workflow launch script (launch_FV3SAR_wflow.sh) that can be used to (re)launch the workflow. During the experiment/workflow generation step, copy this script into the experiment directory. (2) Include the capability to (re)launch the workflow using a cron job (by introducing two new global workflow variables: USE_CRON_TO_RELAUNCH and CRON_RELAUNCH_INTVL_MNTS). This capability adds the appropriate line to the user's cron table during the experiment/workflow generation step, and it removes this line once the workflow has succeeded or failed (i.e. no longer in-progress). --- tests/baseline_configs/config.GSDstd01.sh | 3 + tests/baseline_configs/config.GSDstd02.sh | 3 + ush/config_defaults.sh | 11 + ush/generate_FV3SAR_wflow.sh | 76 +++++ ush/launch_FV3SAR_wflow.sh | 349 ++++++++++++++++++++++ ush/setup.sh | 48 +++ ush/valid_param_vals.sh | 1 + 7 files changed, 491 insertions(+) create mode 100755 ush/launch_FV3SAR_wflow.sh diff --git a/tests/baseline_configs/config.GSDstd01.sh b/tests/baseline_configs/config.GSDstd01.sh index e955e9c5f..109a5f475 100644 --- a/tests/baseline_configs/config.GSDstd01.sh +++ b/tests/baseline_configs/config.GSDstd01.sh @@ -15,6 +15,9 @@ QUEUE_DEFAULT="batch" QUEUE_HPSS="service" QUEUE_FCST="batch" +USE_CRON_TO_RELAUNCH="TRUE" +CRON_RELAUNCH_INTVL_MNTS="03" + VERBOSE="TRUE" # Can specify EXPT_BASEDIR if you want. If not specified, will default diff --git a/tests/baseline_configs/config.GSDstd02.sh b/tests/baseline_configs/config.GSDstd02.sh index e955e9c5f..109a5f475 100644 --- a/tests/baseline_configs/config.GSDstd02.sh +++ b/tests/baseline_configs/config.GSDstd02.sh @@ -15,6 +15,9 @@ QUEUE_DEFAULT="batch" QUEUE_HPSS="service" QUEUE_FCST="batch" +USE_CRON_TO_RELAUNCH="TRUE" +CRON_RELAUNCH_INTVL_MNTS="03" + VERBOSE="TRUE" # Can specify EXPT_BASEDIR if you want. If not specified, will default diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 28817eddd..f0360fc1a 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -57,6 +57,15 @@ QUEUE_FCST="production_queue" # #----------------------------------------------------------------------- # +# +# +#----------------------------------------------------------------------- +# +USE_CRON_TO_RELAUNCH="FALSE" +CRON_RELAUNCH_INTVL_MNTS="03" +# +#----------------------------------------------------------------------- +# # dir_doc_start # Set directories. Definitions: # @@ -175,6 +184,8 @@ NEMS_CONFIG_FN="nems.configure" WFLOW_XML_FN="FV3SAR_wflow.xml" GLOBAL_VAR_DEFNS_FN="var_defns.sh" WRTCMP_PARAMS_TEMPLATE_FN="" +WFLOW_LAUNCH_SCRIPT_FN="launch_FV3SAR_wflow.sh" +WFLOW_LAUNCH_LOG_FN="log.launch_FV3SAR_wflow" # #----------------------------------------------------------------------- # diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 64b8f7870..9004e1c55 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -289,8 +289,84 @@ Done. +# +#----------------------------------------------------------------------- +# +# Copy the workflow (re)launch script to the experiment directory. +# +#----------------------------------------------------------------------- +# +print_info_msg " +Copying the workflow launcher script (WFLOW_LAUNCH_SCRIPT_FN) from the +USHDIR to the experiment directory (with the destination path specified +by WFLOW_LAUNCH_SCRIPT_FP): + USHDIR = \"${USHDIR}\" + WFLOW_LAUNCH_SCRIPT_FN = \"${WFLOW_LAUNCH_SCRIPT_FN}\" + WFLOW_LAUNCH_SCRIPT_FP = \"${WFLOW_LAUNCH_SCRIPT_FP}\"" +cp_vrfy "$USHDIR/${WFLOW_LAUNCH_SCRIPT_FN}" "${WFLOW_LAUNCH_SCRIPT_FP}" +# +#----------------------------------------------------------------------- +# +# If USE_CRON_TO_RELAUNCH is set to TRUE, add a line to the user's cron +# table to call the (re)launch script every CRON_RELAUNCH_INTVL_MNTS mi- +# nutes. +# +#----------------------------------------------------------------------- +# +if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then +# +# Make a backup copy of the user's crontab file and save it in a file. +# + date_stamp=$( date "+%Y%m%d%H%M%S" ) + crontab_backup_fp="$EXPTDIR/crontab.${date_stamp}" + print_info_msg " +Copying contents of user cron table to backup file: + crontab_backup_fp = \"${crontab_backup_fp}\"" + crontab -l > ${crontab_backup_fp} +# +# Below, we use "grep" to determine whether the crontab line that the +# variable CRONTAB_LINE contains is already present in the cron table. +# For that purpose, we need to escape the asterisks in the string in +# CRONTAB_LINE with backslashes. Do this next. +# + crontab_line_esc_astr=$( printf "%s" "${CRONTAB_LINE}" | \ + sed -r -e "s%[*]%\\\\*%g" ) +# +# In the grep command below, the "^" at the beginning of the string be- +# ing passed to grep is a start-of-line anchor while the "$" at the end +# of the string is an end-of-line anchor. Thus, in order for grep to +# find a match on any given line of the output of "crontab -l", that +# line must contain exactly the string in the variable crontab_line_- +# esc_astr without any leading or trailing characters. This is to eli- +# minate situations in which a line in the output of "crontab -l" con- +# tains the string in crontab_line_esc_astr but is precedeeded, for ex- +# ample, by the comment character "#" (in which case cron ignores that +# line) and/or is followed by further commands that are not part of the +# string in crontab_line_esc_astr (in which case it does something more +# than the command portion of the string in crontab_line_esc_astr does). +# + grep_output=$( crontab -l | grep "^${crontab_line_esc_astr}$" ) + exit_status=$? + if [ "${exit_status}" -eq 0 ]; then + print_info_msg " +The following line already exists in the cron table and thus will not be +added: + CRONTAB_LINE = \"${CRONTAB_LINE}\"" + + else + + print_info_msg " +Adding the following line to the cron table in order to automatically +resubmit FV3SAR workflow: + CRONTAB_LINE = \"${CRONTAB_LINE}\"" + + ( crontab -l; echo "${CRONTAB_LINE}" ) | crontab - + + fi + +fi # #----------------------------------------------------------------------- # diff --git a/ush/launch_FV3SAR_wflow.sh b/ush/launch_FV3SAR_wflow.sh new file mode 100755 index 000000000..7f9280aaa --- /dev/null +++ b/ush/launch_FV3SAR_wflow.sh @@ -0,0 +1,349 @@ +#!/bin/bash -l + +# +#----------------------------------------------------------------------- +# +# Set shell options. +# +#----------------------------------------------------------------------- +# +set -u +#set -x +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file. This is assumed to be in the +# same directory as this script. +# +#----------------------------------------------------------------------- +# +. ${scrfunc_dir}/var_defns.sh +# +#----------------------------------------------------------------------- +# +# Set the variables containing the full path to the experiment directo- +# ry, the experiment name, and the full path to the workflow launch +# script (this script). In doing so, we assume that: +# +# 1) This script has been copied to the experiment directory. Thus, the +# directory in which it is located is the experiment directory. +# 2) The name of the experiment subdirectory (i.e. the string after the +# last "/" in the full path to the experiment directory) is identical +# to the experiment name. +# +#----------------------------------------------------------------------- +# +expt_name="${EXPT_SUBDIR}" +# +#----------------------------------------------------------------------- +# +# Load necessary modules. +# +#----------------------------------------------------------------------- +# +module load rocoto +# +#----------------------------------------------------------------------- +# +# Set file names. +# +#----------------------------------------------------------------------- +# +rocoto_xml_bn=$( basename "${WFLOW_XML_FN}" ".xml" ) +rocoto_database_fn="${rocoto_xml_bn}.db" +launch_log_fn="log.launch_${rocoto_xml_bn}" +# +#----------------------------------------------------------------------- +# +# Set the default status of the workflow to be "IN PROGRESS". Also, +# change directory to the experiment directory. +# +#----------------------------------------------------------------------- +# +workflow_status="IN PROGRESS" +cd "$EXPTDIR" +# +#----------------------------------------------------------------------- +# +# Issue the rocotorun command to launch/relaunch the next task in the +# workflow. Then check for error messages in the output of rocotorun. +# If any are found, it means the end-to-end run of the workflow failed. +# In this case, we remove the crontab entry that launches the workflow, +# and we append an appropriate failure message at the end of the launch +# log file. +# +#----------------------------------------------------------------------- +# + +#rocotorun_output=$( ls -alF ) +#echo +#echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" +#echo "${rocotorun_output}" +#echo "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB" + +#rocotorun_output=$( \ +#rocotorun -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 \ +#) +#rocotorun_output=$( (rocotorun -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10) 2>&1 ) # This freezes the script. +#rocotorun_output=$( (rocotorun -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10) 1>&2 ) # This leaves rocotorun_output empty. +#rocotorun_output=$( rocotorun -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 ) +#{ error=$(command 2>&1 1>&$out); } {out}>&1 +#{ rocotorun_output=$( rocotorun -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 2>&1 1>&$out); } {out}>&1 # This freezes the script. + +# +# Ideally, the following two lines should work, but for some reason the +# output of rocotorun cannot be captured in a variable using the $(...) +# notation. Maybe it's not being written to stdout, although I tried +# redirecting stderr to stdout and other tricks but nothing seemed to +# work. For this reason, below we first redirect the output of rocoto- +# run to a temporary file and then read in the contents of that file in- +# to the rocotorun_output variable using the cat command. +# +#rocotorun_cmd="rocotorun -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10" +#rocotorun_output=$( eval ${rocotorun_cmd} 2>&1 ) +# +tmp_fn="rocotorun_output.txt" +#rocotorun_cmd="rocotorun -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10 > ${tmp_fn}" +rocotorun_cmd="rocotorun -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10" +eval ${rocotorun_cmd} > ${tmp_fn} 2>&1 +rocotorun_output=$( cat "${tmp_fn}" ) +#rm "${tmp_fn}" + +#rocotorun -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 > ${tmp_fn} 2>&1 + +error_msg="sbatch: error: Batch job submission failed:" +# Job violates accounting/QOS policy (job submit limit, user's size and/or time limits)" +while read -r line; do + grep_output=$( printf "$line" | grep "${error_msg}" ) + if [ $? -eq 0 ]; then + workflow_status="FAILED" + break + fi +done <<< "${rocotorun_output}" +# +#----------------------------------------------------------------------- +# +# Issue the rocotostat command to obtain a table specifying the status +# of each task. Then check for dead tasks in the output of rocotostat. +# If any are found, it means the end-to-end run of the workflow failed. +# In this case, we remove the crontab entry that launches the workflow, +# and we append an appropriate failure message at the end of the launch +# log file. +# +#----------------------------------------------------------------------- +# +#rocotostat_cmd="{ pwd; rocotostat -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10; }" +#rocotostat_cmd="{ pwd; ls -alF; rocotostat -w ${WFLOW_XML_FN} -d ${rocoto_database_fn} -v 10; }" +#rocotostat_cmd="{ pwd; ls -alF; rocotostat -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10; }" +#rocotostat_cmd="{ pwd; rocotostat -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10; }" +#rocotostat_cmd="{ rocotostat -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10; }" +rocotostat_cmd="rocotostat -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10" + +#rocotostat_output=$( pwd; rocotostat -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 2>&1 ) +#rocotostat_output=$( rocotostat -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 2>&1 ) +rocotostat_output=$( eval ${rocotostat_cmd} 2>&1 ) +#rocotostat_output=$( ${rocotostat_cmd} 2>&1 ) +#rocotostat_output=$( { pwd; ls -alF; } 2>&1 ) +error_msg="DEAD" +while read -r line; do +# grep_output=$( printf "$line" | grep "DEAD" ) + grep_output=$( printf "$line" | grep "${error_msg}" ) + if [ $? -eq 0 ]; then + workflow_status="FAILED" + break + fi +done <<< "${rocotostat_output}" +# +#----------------------------------------------------------------------- +# +# Place the outputs of the rocotorun and rocotostat commands obtained +# above into the launch log file. +# +#----------------------------------------------------------------------- +# +printf " + +======================================================================== +Start of output from script \"${scrfunc_fn}\". +======================================================================== + +Running rocotorun command (rocotorun_cmd): +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + rocotorun_cmd = \'${rocotorun_cmd}\' + +Output of rocotorun_cmd is: +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +${rocotorun_output} + +Running rocotostat command (rocotostat_cmd): +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + rocotostat_cmd = \'${rocotostat_cmd}\' + +Output of rocotostat_cmd is: +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +${rocotostat_output} +" >> "${WFLOW_LAUNCH_LOG_FN}" 2>&1 +# +#----------------------------------------------------------------------- +# +# Use the rocotostat command with the "-s" flag to obtain a summary of +# the status of each cycle in the workflow. The output of this command +# has the following format: +# +# CYCLE STATE ACTIVATED DEACTIVATED +# 201905200000 Active Nov 07 2019 00:23:30 - +# ... +# +# Thus, the first row is a header line containing the column titles, and +# the remaining rows each correspond to one cycle in the workflow. Be- +# low, we are interested in the first and second columns of each row. +# The first column is a string containing the start time of the cycle +# (in the format YYYYMMDDHHmm, where YYYY is the 4-digit year, MM is the +# 2-digit month, DD is the 2-digit day of the month, HH is the 2-digit +# hour of the day, and mm is the 2-digit minute of the hour). The se- +# cond column is a string containing the state of the cycle. This can +# be "Active" or "Done". Below, we read in and store these two columns +# in (1-D) arrays. +# +#----------------------------------------------------------------------- +# +rocotostat_output=$( rocotostat -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 -s ) + +regex_search="^[ ]*([0-9]+)[ ]+([A-Za-z]+)[ ]+.*" +cycle_str=() +cycle_status=() +i=0 +while read -r line; do +# +# Note that the first line in rocotostat_output is a header line con- +# taining the column titles. Thus, we ignore it and consider only the +# remaining lines (of which there is one per cycle). +# + if [ $i -gt 0 ]; then + im1=$((i-1)) + cycle_str[im1]=$( echo "$line" | sed -r -n -e "s/${regex_search}/\1/p" ) + cycle_status[im1]=$( echo "$line" | sed -r -n -e "s/${regex_search}/\2/p" ) + fi + i=$((i+1)) +done <<< "${rocotostat_output}" +# +#----------------------------------------------------------------------- +# +# Get the number of cycles. Then count the number of completed cycles +# by finding the number of cycles for which the corresponding element in +# the cycle_status array is set to "Done". +# +#----------------------------------------------------------------------- +# +num_cycles_total=${#cycle_str[@]} +num_cycles_completed=0 +for (( i=0; i<=$((num_cycles_total-1)); i++ )); do + if [ "${cycle_status}" = "Done" ]; then + num_cycles_completed=$((num_cycles_completed+1)) + fi +done +# +#----------------------------------------------------------------------- +# +# If the number of completed cycles is equal to the total number of cy- +# cles, it means the end-to-end run of the workflow was successful. In +# this case, we reset the workflow_status to "SUCCEEDED". +# +#----------------------------------------------------------------------- +# +if [ ${num_cycles_completed} -eq ${num_cycles_total} ]; then + workflow_status="SUCCEEDED" +fi +# +#----------------------------------------------------------------------- +# +# Print informational messages about the workflow to the launch log +# file, including the workflow status. +# +#----------------------------------------------------------------------- +# +printf " + +Summary of workflow status: +~~~~~~~~~~~~~~~~~~~~~~~~~~ + + ${num_cycles_completed} out of ${num_cycles_total} cycles completed. + Workflow status: ${workflow_status} + +======================================================================== +End of output from script \"${scrfunc_fn}\". +======================================================================== + +" >> ${WFLOW_LAUNCH_LOG_FN} 2>&1 +# +#----------------------------------------------------------------------- +# +# If the workflow status is now either "SUCCEEDED" or "FAILED", indicate +# this by appending an appropriate message to the end of the launch log +# file. +# +#----------------------------------------------------------------------- +# +msg=" +The end-to-end run of the workflow for the experiment specified by +expt_name ${workflow_status}: + expt_name = \"${expt_name}\" +" + +if [ "${workflow_status}" = "SUCCEEDED" ] || \ + [ "${workflow_status}" = "FAILED" ]; then + + printf "$msg" >> ${WFLOW_LAUNCH_LOG_FN} 2>&1 +# +# If a cron job was being used to periodically relaunch the workflow, we +# now remove the entry in the crontab corresponding to the workflow be- +# cause the end-to-end run of the workflow has now either succeeded or +# failed and will remain in that state without manual user intervention. +# Thus, there is no need to try to relaunch it. +# + if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then + + msg="$msg +Removing the corresponding line (CRONTAB_LINE) from the crontab file: + CRONTAB_LINE = \"${CRONTAB_LINE}\" +" + printf "$msg" +# +# Below, we use "grep" to determine whether the crontab line that the +# variable CRONTAB_LINE contains is already present in the cron table. +# For that purpose, we need to escape the asterisks in the string in +# CRONTAB_LINE with backslashes. Do this next. +# + crontab_line_esc_astr=$( printf "%s" "${CRONTAB_LINE}" | \ + sed -r -e "s%[*]%\\\\*%g" ) +# +# In the string passed to the grep command below, we use the line start +# and line end anchors ("^" and "$", respectively) to ensure that we on- +# ly find lines in the crontab that contain exactly the string in cron- +# tab_line_esc_astr without any leading or trailing characters. +# + ( crontab -l | grep -v "^${crontab_line_esc_astr}$" ) | crontab - + + fi + +fi + + + + diff --git a/ush/setup.sh b/ush/setup.sh index 9b51bd37f..13224c706 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -149,6 +149,26 @@ fi # #----------------------------------------------------------------------- # +# Make sure that USE_CRON_TO_RELAUNCH is set to a valid value. +# +#----------------------------------------------------------------------- +# +check_var_valid_value "USE_CRON_TO_RELAUNCH" "valid_vals_USE_CRON_TO_RELAUNCH" +# +# Set USE_CRON_TO_RELAUNCH to either "TRUE" or "FALSE" so we don't have to consider +# other valid values later on. +# +USE_CRON_TO_RELAUNCH=${USE_CRON_TO_RELAUNCH^^} +if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ] || \ + [ "${USE_CRON_TO_RELAUNCH}" = "YES" ]; then + USE_CRON_TO_RELAUNCH="TRUE" +elif [ "${USE_CRON_TO_RELAUNCH}" = "FALSE" ] || \ + [ "${USE_CRON_TO_RELAUNCH}" = "NO" ]; then + USE_CRON_TO_RELAUNCH="FALSE" +fi +# +#----------------------------------------------------------------------- +# # Make sure that RUN_TASK_MAKE_GRID is set to a valid value. # #----------------------------------------------------------------------- @@ -785,6 +805,24 @@ fi # #----------------------------------------------------------------------- # +# Set the full path to the script that can be used to launch/relaunch +# the workflow. Also, set the line to add to the cron table to automa- +# tically relaunch the workflow every CRON_RELAUNCH_INTVL_MNTS minutes +# (if USE_CRON_TO_RELAUNCH is set to TRUE). +# +#----------------------------------------------------------------------- +# +WFLOW_LAUNCH_SCRIPT_FP="$EXPTDIR/${WFLOW_LAUNCH_SCRIPT_FN}" +WFLOW_LAUNCH_LOG_FP="$EXPTDIR/${WFLOW_LAUNCH_LOG_FN}" +if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then + CRONTAB_LINE="*/${CRON_RELAUNCH_INTVL_MNTS} * * * * cd $EXPTDIR && \ +./${WFLOW_LAUNCH_SCRIPT_FN} >> ./${WFLOW_LAUNCH_LOG_FN} 2>&1" +else + CRONTAB_LINE="" +fi +# +#----------------------------------------------------------------------- +# # Define the various work subdirectories under the main work directory. # Each of these corresponds to a different step/substep/task in the pre- # processing, as follows: @@ -1631,6 +1669,16 @@ done <<< "${line_list}" #----------------------------------------------------------------------- # +# +#----------------------------------------------------------------------- +# +# Workflow launcher script and cron table line. +# +#----------------------------------------------------------------------- +# +WFLOW_LAUNCH_SCRIPT_FP="${WFLOW_LAUNCH_SCRIPT_FP}" +WFLOW_LAUNCH_LOG_FP="${WFLOW_LAUNCH_LOG_FP}" +CRONTAB_LINE="${CRONTAB_LINE}" # #----------------------------------------------------------------------- # diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index a42fbfd4d..91878c2f2 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -16,3 +16,4 @@ valid_vals_RUN_TASK_MAKE_GRID=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "n valid_vals_RUN_TASK_MAKE_OROG=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_RUN_TASK_MAKE_SFC_CLIMO=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_QUILTING=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") +valid_vals_USE_CRON_TO_RELAUNCH=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") From 2653df1be3020d59b6c1896088c5b53455a02bf6 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 8 Nov 2019 13:55:40 -0700 Subject: [PATCH 047/203] (1) In launch_FV3SAR_wflow.sh: (a) Assume the script will be called from the experiment directory, thus set EXPTDIR to the current directory; (b) purge module files to make sure there are no assumptions in the modules used; (c) remove temporary file used to store output from rocotorun after we're done using it; (d) change workflow status possibilities from "SUCCEEDED" and "FAILED" to "SUCCESS" and "FAILURE", respectively; (e) clean up the way the workflow completion message is generated and where it is output to (i.e. only the launch log file or both that file and to screen); (f) improve comments. (2) In the remaining files, instead of copying the workflow launch script to the experiment directory, make a symlink to it in the experiment directory. --- ush/generate_FV3SAR_wflow.sh | 10 ++-- ush/launch_FV3SAR_wflow.sh | 106 ++++++++++++++++++++++------------- ush/setup.sh | 11 ++-- 3 files changed, 76 insertions(+), 51 deletions(-) diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 9004e1c55..8de0a1b95 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -297,13 +297,11 @@ Done. #----------------------------------------------------------------------- # print_info_msg " -Copying the workflow launcher script (WFLOW_LAUNCH_SCRIPT_FN) from the -USHDIR to the experiment directory (with the destination path specified -by WFLOW_LAUNCH_SCRIPT_FP): - USHDIR = \"${USHDIR}\" - WFLOW_LAUNCH_SCRIPT_FN = \"${WFLOW_LAUNCH_SCRIPT_FN}\" +Creating symlink in the experiment directory (EXPTDIR) to the workflow +launch script (WFLOW_LAUNCH_SCRIPT_FP): + EXPTDIR = \"${EXPTDIR}\" WFLOW_LAUNCH_SCRIPT_FP = \"${WFLOW_LAUNCH_SCRIPT_FP}\"" -cp_vrfy "$USHDIR/${WFLOW_LAUNCH_SCRIPT_FN}" "${WFLOW_LAUNCH_SCRIPT_FP}" +ln_vrfy -fs "${WFLOW_LAUNCH_SCRIPT_FP}" "$EXPTDIR" # #----------------------------------------------------------------------- # diff --git a/ush/launch_FV3SAR_wflow.sh b/ush/launch_FV3SAR_wflow.sh index 7f9280aaa..05a6410e2 100755 --- a/ush/launch_FV3SAR_wflow.sh +++ b/ush/launch_FV3SAR_wflow.sh @@ -24,24 +24,30 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -# Source the variable definitions file. This is assumed to be in the -# same directory as this script. +# Get the experiment directory. This is assumed to be the directory +# from which this script is called. (There will normally be a symlink +# in the experiment directory with the same name as this script pointing +# to the actual location of this script in the workflow directory struc- +# ture. Thus, when this script is called from the experiment directory, +# the working directory will be the experiment directory.) # #----------------------------------------------------------------------- # -. ${scrfunc_dir}/var_defns.sh +EXPTDIR=$( readlink -f "$(pwd)" ) # #----------------------------------------------------------------------- # -# Set the variables containing the full path to the experiment directo- -# ry, the experiment name, and the full path to the workflow launch -# script (this script). In doing so, we assume that: +# Source the variable definitions file for the experiment. # -# 1) This script has been copied to the experiment directory. Thus, the -# directory in which it is located is the experiment directory. -# 2) The name of the experiment subdirectory (i.e. the string after the -# last "/" in the full path to the experiment directory) is identical -# to the experiment name. +#----------------------------------------------------------------------- +# +. $EXPTDIR/var_defns.sh +# +#----------------------------------------------------------------------- +# +# Set the name of the experiment. We take this to be the name of the +# experiment subdirectory (i.e. the string after the last "/" in the +# full path to the experiment directory). # #----------------------------------------------------------------------- # @@ -53,11 +59,14 @@ expt_name="${EXPT_SUBDIR}" # #----------------------------------------------------------------------- # +module purge module load rocoto # #----------------------------------------------------------------------- # -# Set file names. +# Set file names. These include the rocoto database file and the log +# file in which to store output from this script (aka the workflow +# launch script). # #----------------------------------------------------------------------- # @@ -67,17 +76,23 @@ launch_log_fn="log.launch_${rocoto_xml_bn}" # #----------------------------------------------------------------------- # -# Set the default status of the workflow to be "IN PROGRESS". Also, -# change directory to the experiment directory. +# Initialize the default status of the workflow to "IN PROGRESS". +# +#----------------------------------------------------------------------- +# +wflow_status="IN PROGRESS" +# +#----------------------------------------------------------------------- +# +# Change location to the experiment directory. # #----------------------------------------------------------------------- # -workflow_status="IN PROGRESS" cd "$EXPTDIR" # #----------------------------------------------------------------------- # -# Issue the rocotorun command to launch/relaunch the next task in the +# Issue the rocotorun command to (re)launch the next task in the # workflow. Then check for error messages in the output of rocotorun. # If any are found, it means the end-to-end run of the workflow failed. # In this case, we remove the crontab entry that launches the workflow, @@ -119,16 +134,14 @@ tmp_fn="rocotorun_output.txt" rocotorun_cmd="rocotorun -w \"${WFLOW_XML_FN}\" -d \"${rocoto_database_fn}\" -v 10" eval ${rocotorun_cmd} > ${tmp_fn} 2>&1 rocotorun_output=$( cat "${tmp_fn}" ) -#rm "${tmp_fn}" - -#rocotorun -w "${WFLOW_XML_FN}" -d "${rocoto_database_fn}" -v 10 > ${tmp_fn} 2>&1 +rm "${tmp_fn}" error_msg="sbatch: error: Batch job submission failed:" # Job violates accounting/QOS policy (job submit limit, user's size and/or time limits)" while read -r line; do grep_output=$( printf "$line" | grep "${error_msg}" ) if [ $? -eq 0 ]; then - workflow_status="FAILED" + wflow_status="FAILURE" break fi done <<< "${rocotorun_output}" @@ -158,10 +171,9 @@ rocotostat_output=$( eval ${rocotostat_cmd} 2>&1 ) #rocotostat_output=$( { pwd; ls -alF; } 2>&1 ) error_msg="DEAD" while read -r line; do -# grep_output=$( printf "$line" | grep "DEAD" ) grep_output=$( printf "$line" | grep "${error_msg}" ) if [ $? -eq 0 ]; then - workflow_status="FAILED" + wflow_status="FAILURE" break fi done <<< "${rocotostat_output}" @@ -263,12 +275,12 @@ done # # If the number of completed cycles is equal to the total number of cy- # cles, it means the end-to-end run of the workflow was successful. In -# this case, we reset the workflow_status to "SUCCEEDED". +# this case, we reset the wflow_status to "SUCCESS". # #----------------------------------------------------------------------- # if [ ${num_cycles_completed} -eq ${num_cycles_total} ]; then - workflow_status="SUCCEEDED" + wflow_status="SUCCESS" fi # #----------------------------------------------------------------------- @@ -284,7 +296,7 @@ Summary of workflow status: ~~~~~~~~~~~~~~~~~~~~~~~~~~ ${num_cycles_completed} out of ${num_cycles_total} cycles completed. - Workflow status: ${workflow_status} + Workflow status: ${wflow_status} ======================================================================== End of output from script \"${scrfunc_fn}\". @@ -294,36 +306,38 @@ End of output from script \"${scrfunc_fn}\". # #----------------------------------------------------------------------- # -# If the workflow status is now either "SUCCEEDED" or "FAILED", indicate -# this by appending an appropriate message to the end of the launch log -# file. +# If the workflow status is now either "SUCCESS" or "FAILURE", indicate +# this by appending an appropriate workflow completion message to the +# end of the launch log file. # #----------------------------------------------------------------------- # -msg=" -The end-to-end run of the workflow for the experiment specified by -expt_name ${workflow_status}: +if [ "${wflow_status}" = "SUCCESS" ] || \ + [ "${wflow_status}" = "FAILURE" ]; then + + msg=" +The end-to-end run of the workflow for the forecast experiment specified +by expt_name has completed with the following workflow status (wflow_- +status): expt_name = \"${expt_name}\" + wflow_status = \"${wflow_status}\" " - -if [ "${workflow_status}" = "SUCCEEDED" ] || \ - [ "${workflow_status}" = "FAILED" ]; then - - printf "$msg" >> ${WFLOW_LAUNCH_LOG_FN} 2>&1 # # If a cron job was being used to periodically relaunch the workflow, we # now remove the entry in the crontab corresponding to the workflow be- # cause the end-to-end run of the workflow has now either succeeded or # failed and will remain in that state without manual user intervention. -# Thus, there is no need to try to relaunch it. +# Thus, there is no need to try to relaunch it. We also append a mes- +# sage to the completion message above to indicate this. # if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then - msg="$msg -Removing the corresponding line (CRONTAB_LINE) from the crontab file: + msg="${msg}\ +Thus, there is no need to relaunch the workflow via a cron job. Remo- +ving from the crontab the line (CRONTAB_LINE) that calls the workflow +launch script for this experiment: CRONTAB_LINE = \"${CRONTAB_LINE}\" " - printf "$msg" # # Below, we use "grep" to determine whether the crontab line that the # variable CRONTAB_LINE contains is already present in the cron table. @@ -341,6 +355,18 @@ Removing the corresponding line (CRONTAB_LINE) from the crontab file: ( crontab -l | grep -v "^${crontab_line_esc_astr}$" ) | crontab - fi +# +# Print the workflow completion message to the launch log file. +# + printf "$msg" >> ${WFLOW_LAUNCH_LOG_FN} 2>&1 +# +# If the stdout from this script is being sent to the screen (e.g. it is +# not being redirected to a file), then also print out the workflow +# completion message to the screen. +# + if [ -t 1 ]; then + printf "$msg" + fi fi diff --git a/ush/setup.sh b/ush/setup.sh index 13224c706..face010f1 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -805,14 +805,15 @@ fi # #----------------------------------------------------------------------- # -# Set the full path to the script that can be used to launch/relaunch -# the workflow. Also, set the line to add to the cron table to automa- -# tically relaunch the workflow every CRON_RELAUNCH_INTVL_MNTS minutes -# (if USE_CRON_TO_RELAUNCH is set to TRUE). +# Set the full path to the script that can be used to (re)launch the +# workflow. Also, if USE_CRON_TO_RELAUNCH is set to TRUE, set the line +# to add to the cron table to automatically relaunch the workflow every +# CRON_RELAUNCH_INTVL_MNTS minutes. Otherwise, set the variable con- +# taining this line to a null string. # #----------------------------------------------------------------------- # -WFLOW_LAUNCH_SCRIPT_FP="$EXPTDIR/${WFLOW_LAUNCH_SCRIPT_FN}" +WFLOW_LAUNCH_SCRIPT_FP="$USHDIR/${WFLOW_LAUNCH_SCRIPT_FN}" WFLOW_LAUNCH_LOG_FP="$EXPTDIR/${WFLOW_LAUNCH_LOG_FN}" if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then CRONTAB_LINE="*/${CRON_RELAUNCH_INTVL_MNTS} * * * * cd $EXPTDIR && \ From e3a9ca338294cd7fde16c521b4babd09fa3a228b Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 8 Nov 2019 17:14:32 -0700 Subject: [PATCH 048/203] Clean up the experiment/workflow configuration files for baselines GSDstd01 and GSDstd02. These baselines are still being tested and have not yet been finalized. --- tests/baseline_configs/config.GSDstd01.sh | 90 +++-------------------- tests/baseline_configs/config.GSDstd02.sh | 90 +++-------------------- 2 files changed, 24 insertions(+), 156 deletions(-) diff --git a/tests/baseline_configs/config.GSDstd01.sh b/tests/baseline_configs/config.GSDstd01.sh index 109a5f475..99007fb25 100644 --- a/tests/baseline_configs/config.GSDstd01.sh +++ b/tests/baseline_configs/config.GSDstd01.sh @@ -1,100 +1,34 @@ -# -#----------------------------------------------------------------------- -# -# This is the local (i.e. user-specific) experiment/workflow configura- -# tion file. It is not tracked by the git repository. -# -#----------------------------------------------------------------------- -# -RUN_ENVIR="nco" -RUN_ENVIR="community" - MACHINE="HERA" ACCOUNT="gsd-fv3" QUEUE_DEFAULT="batch" QUEUE_HPSS="service" QUEUE_FCST="batch" +VERBOSE="TRUE" USE_CRON_TO_RELAUNCH="TRUE" CRON_RELAUNCH_INTVL_MNTS="03" -VERBOSE="TRUE" - -# Can specify EXPT_BASEDIR if you want. If not specified, will default -# to "$HOMErrfs/../expt_dirs". -#EXPT_BASEDIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/test_latest_20190927/expt_dirs" +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="delete" PREDEF_GRID_NAME="GSD_HRRR25km" -#PREDEF_GRID_NAME="GSD_HRRR13km" -#PREDEF_GRID_NAME="GSD_HRRR3km" -#PREDEF_GRID_NAME="GSD_HAFSV0.A" -#PREDEF_GRID_NAME="EMC_HI3km" -# GRID_GEN_METHOD="JPgrid" -# -PREEXISTING_DIR_METHOD="delete" QUILTING="TRUE" -# USE_CCPP="TRUE" CCPP_PHYS_SUITE="GFS" -CCPP_PHYS_SUITE="GSD" - FCST_LEN_HRS="06" LBC_UPDATE_INTVL_HRS="6" -#LBC_UPDATE_INTVL_HRS="12" -#LBC_UPDATE_INTVL_HRS="1" - - -if [ "${RUN_ENVIR}" = "nco" ]; then - - EXPT_SUBDIR="test_NCO" - - RUN="an_experiment" - COMINgfs="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" - -# STMP="/scratch2/NCEPDEV/stmp3/${USER}" -# PTMP="/scratch2/NCEPDEV/stmp3/${USER}" - - DATE_FIRST_CYCL="20190422" - DATE_LAST_CYCL="20190422" -# DATE_FIRST_CYCL="20181216" -# DATE_LAST_CYCL="20181216" - CYCL_HRS=( "00" ) - - EXTRN_MDL_NAME_ICS="FV3GFS" - EXTRN_MDL_NAME_LBCS="FV3GFS" - -else - - EXPT_SUBDIR="test_community" - EXPT_SUBDIR="yunheng_GSMGFS_20190520_GSDphys" - -# DATE_FIRST_CYCL="20190701" -# DATE_LAST_CYCL="20190701" - DATE_FIRST_CYCL="20190520" - DATE_LAST_CYCL="20190520" -# CYCL_HRS=( "00" "12" ) - CYCL_HRS=( "00" ) - - EXTRN_MDL_NAME_ICS="GSMGFS" -# EXTRN_MDL_NAME_ICS="FV3GFS" -# EXTRN_MDL_NAME_ICS="HRRRX" - - EXTRN_MDL_NAME_LBCS="GSMGFS" -# EXTRN_MDL_NAME_LBCS="FV3GFS" -# EXTRN_MDL_NAME_LBCS="RAPX" - RUN_TASK_MAKE_GRID="TRUE" -# RUN_TASK_MAKE_GRID="FALSE" - GRID_DIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/pregen_grid/GSD_HRRR25km" +EXPT_SUBDIR="GSDstd01" - RUN_TASK_MAKE_OROG="TRUE" -# RUN_TASK_MAKE_OROG="FALSE" - OROG_DIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/pregen_orog/GSD_HRRR25km" +DATE_FIRST_CYCL="20190520" +DATE_LAST_CYCL="20190520" +CYCL_HRS=( "00" ) - RUN_TASK_MAKE_SFC_CLIMO="TRUE" -# RUN_TASK_MAKE_SFC_CLIMO="FALSE" - SFC_CLIMO_DIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/pregen_sfc_climo/GSD_HRRR25km" +EXTRN_MDL_NAME_ICS="GSMGFS" +EXTRN_MDL_NAME_LBCS="GSMGFS" -fi +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" diff --git a/tests/baseline_configs/config.GSDstd02.sh b/tests/baseline_configs/config.GSDstd02.sh index 109a5f475..cf8544ef8 100644 --- a/tests/baseline_configs/config.GSDstd02.sh +++ b/tests/baseline_configs/config.GSDstd02.sh @@ -1,100 +1,34 @@ -# -#----------------------------------------------------------------------- -# -# This is the local (i.e. user-specific) experiment/workflow configura- -# tion file. It is not tracked by the git repository. -# -#----------------------------------------------------------------------- -# -RUN_ENVIR="nco" -RUN_ENVIR="community" - MACHINE="HERA" ACCOUNT="gsd-fv3" QUEUE_DEFAULT="batch" QUEUE_HPSS="service" QUEUE_FCST="batch" +VERBOSE="TRUE" USE_CRON_TO_RELAUNCH="TRUE" CRON_RELAUNCH_INTVL_MNTS="03" -VERBOSE="TRUE" - -# Can specify EXPT_BASEDIR if you want. If not specified, will default -# to "$HOMErrfs/../expt_dirs". -#EXPT_BASEDIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/test_latest_20190927/expt_dirs" +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="delete" PREDEF_GRID_NAME="GSD_HRRR25km" -#PREDEF_GRID_NAME="GSD_HRRR13km" -#PREDEF_GRID_NAME="GSD_HRRR3km" -#PREDEF_GRID_NAME="GSD_HAFSV0.A" -#PREDEF_GRID_NAME="EMC_HI3km" -# GRID_GEN_METHOD="JPgrid" -# -PREEXISTING_DIR_METHOD="delete" QUILTING="TRUE" -# USE_CCPP="TRUE" CCPP_PHYS_SUITE="GFS" -CCPP_PHYS_SUITE="GSD" - FCST_LEN_HRS="06" LBC_UPDATE_INTVL_HRS="6" -#LBC_UPDATE_INTVL_HRS="12" -#LBC_UPDATE_INTVL_HRS="1" - - -if [ "${RUN_ENVIR}" = "nco" ]; then - - EXPT_SUBDIR="test_NCO" - - RUN="an_experiment" - COMINgfs="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" - -# STMP="/scratch2/NCEPDEV/stmp3/${USER}" -# PTMP="/scratch2/NCEPDEV/stmp3/${USER}" - - DATE_FIRST_CYCL="20190422" - DATE_LAST_CYCL="20190422" -# DATE_FIRST_CYCL="20181216" -# DATE_LAST_CYCL="20181216" - CYCL_HRS=( "00" ) - - EXTRN_MDL_NAME_ICS="FV3GFS" - EXTRN_MDL_NAME_LBCS="FV3GFS" - -else - - EXPT_SUBDIR="test_community" - EXPT_SUBDIR="yunheng_GSMGFS_20190520_GSDphys" - -# DATE_FIRST_CYCL="20190701" -# DATE_LAST_CYCL="20190701" - DATE_FIRST_CYCL="20190520" - DATE_LAST_CYCL="20190520" -# CYCL_HRS=( "00" "12" ) - CYCL_HRS=( "00" ) - - EXTRN_MDL_NAME_ICS="GSMGFS" -# EXTRN_MDL_NAME_ICS="FV3GFS" -# EXTRN_MDL_NAME_ICS="HRRRX" - - EXTRN_MDL_NAME_LBCS="GSMGFS" -# EXTRN_MDL_NAME_LBCS="FV3GFS" -# EXTRN_MDL_NAME_LBCS="RAPX" - RUN_TASK_MAKE_GRID="TRUE" -# RUN_TASK_MAKE_GRID="FALSE" - GRID_DIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/pregen_grid/GSD_HRRR25km" +EXPT_SUBDIR="GSDstd02" - RUN_TASK_MAKE_OROG="TRUE" -# RUN_TASK_MAKE_OROG="FALSE" - OROG_DIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/pregen_orog/GSD_HRRR25km" +DATE_FIRST_CYCL="20190520" +DATE_LAST_CYCL="20190520" +CYCL_HRS=( "00" ) - RUN_TASK_MAKE_SFC_CLIMO="TRUE" -# RUN_TASK_MAKE_SFC_CLIMO="FALSE" - SFC_CLIMO_DIR="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/pregen_sfc_climo/GSD_HRRR25km" +EXTRN_MDL_NAME_ICS="GSMGFS" +EXTRN_MDL_NAME_LBCS="GSMGFS" -fi +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" From 4b511c22be5c10d29c2c4501aef0e1ecad12eba5 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 8 Nov 2019 17:15:29 -0700 Subject: [PATCH 049/203] Change variable name for clarity. --- ush/generate_FV3SAR_wflow.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 8de0a1b95..e7028089e 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -315,8 +315,8 @@ if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then # # Make a backup copy of the user's crontab file and save it in a file. # - date_stamp=$( date "+%Y%m%d%H%M%S" ) - crontab_backup_fp="$EXPTDIR/crontab.${date_stamp}" + time_stamp=$( date "+%Y%m%d%H%M%S" ) + crontab_backup_fp="$EXPTDIR/crontab.bak.${time_stamp}" print_info_msg " Copying contents of user cron table to backup file: crontab_backup_fp = \"${crontab_backup_fp}\"" From 723cce72d74c150402339e78cd933daeeb559cbf Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 8 Nov 2019 17:19:39 -0700 Subject: [PATCH 050/203] Rename file that specifies the experiments to run. --- tests/experiments_list.txt | 3 --- tests/expts_list.txt | 3 +++ 2 files changed, 3 insertions(+), 3 deletions(-) delete mode 100644 tests/experiments_list.txt create mode 100644 tests/expts_list.txt diff --git a/tests/experiments_list.txt b/tests/experiments_list.txt deleted file mode 100644 index 358143693..000000000 --- a/tests/experiments_list.txt +++ /dev/null @@ -1,3 +0,0 @@ - GSDstd01 | LBC_UPDATE_INTVL_HRS="3" | AAA=GGGGG_HH | PREDEF_GRID_NAME="GSD_HRRR25km" | EXTRN_MDL_NAME_ICS="FV3GFS" -GSDstd02 | - diff --git a/tests/expts_list.txt b/tests/expts_list.txt new file mode 100644 index 000000000..a96fc693b --- /dev/null +++ b/tests/expts_list.txt @@ -0,0 +1,3 @@ + GSDstd01 | LBC_UPDATE_INTVL_HRS="3" | PREDEF_GRID_NAME="GSD_HRRR25km" | EXTRN_MDL_NAME_ICS="FV3GFS" +GSDstd02 | + From a9adbc728eda29bb9716ae4a8797897fc2da01e0 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 8 Nov 2019 17:22:30 -0700 Subject: [PATCH 051/203] New version of run_experiments.sh to generate and run experiments and workflows for multiple experiments. Improved error checking, comments, and informational and error messages. --- tests/run_experiments.sh | 414 ++++++++++++++++++++------------------- 1 file changed, 217 insertions(+), 197 deletions(-) diff --git a/tests/run_experiments.sh b/tests/run_experiments.sh index 4b76b03c0..f09fb6eac 100755 --- a/tests/run_experiments.sh +++ b/tests/run_experiments.sh @@ -48,7 +48,7 @@ TESTSDIR="$HOMErrfs/tests" # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -58,18 +58,17 @@ TESTSDIR="$HOMErrfs/tests" # MACHINE="HERA" ACCOUNT="gsd-fv3" -QUEUE_DEFAULT="batch" -QUEUE_HPSS="service" -QUEUE_FCST="batch" -VERBOSE="TRUE" + +USE_CRON_TO_RELAUNCH="TRUE" +#USE_CRON_TO_RELAUNCH="FALSE" +CRON_RELAUNCH_INTVL_MNTS="02" # #----------------------------------------------------------------------- # # Read in the list of experiments (which might be baselines) to run. -# This entails reading in each line of the file experiments_list.txt in -# the directory of this script and saving the result in the array varia- -# ble experiments_list. Note that each line of experiments_list.txt has -# the form +# This entails reading in each line of the file expts_list.txt in the +# directory of this script and saving the result in the array variable +# expts_list. Note that each line of expts_list.txt has the form # # BASELINE_NAME | VAR_NAME_1="VAR_VALUE_1" | ... | VAR_NAME_N="VAR_VALUE_N" # @@ -81,104 +80,176 @@ VERBOSE="TRUE" # config.BASELINE_NAME.sh in a subdirectory named baseline_configs # in the directory of this script. # -# 2) The variable name-value pairs on each line of the experiments_- -# list.txt file are delimited from the baseline and from each other -# by pipe characters (i.e. "|"). +# 2) The variable name-value pairs on each line of the expts_list.txt +# file are delimited from the baseline and from each other by pipe +# characters (i.e. "|"). # #----------------------------------------------------------------------- # -EXPTS_LIST_FN="${TESTSDIR}/experiments_list.txt" +EXPTS_LIST_FN="expts_list.txt" +EXPTS_LIST_FP="${TESTSDIR}/${EXPTS_LIST_FN}" -print_info_msg "$VERBOSE" " +print_info_msg " Reading in list of forecast experiments from file + EXPTS_LIST_FP = \"${EXPTS_LIST_FP}\" +and storing result in the array \"all_lines\" (one array element per expe- +riment)..." - EXPTS_LIST_FN = \"${EXPTS_LIST_FN}\" +readarray -t all_lines < "${EXPTS_LIST_FP}" -and storing result in the array \"experiments_list\" (one array element -per experiment)..." +all_lines_str=$( printf "\'%s\'\n" "${all_lines[@]}" ) +print_info_msg " +All lines from experiments list file EXPTS_LIST_FP read in, where: -readarray -t experiments_list < "${EXPTS_LIST_FN}" + EXPTS_LIST_FP = \"${EXPTS_LIST_FP}\" -msg=$( printf "%s\n" "${experiments_list[@]}" ) -msg=" -List of forecast experiments to run is given by: +Contents of file (line by line, before any processing) are: -experiments_list = ( -$msg -) +${all_lines_str} " -print_info_msg "$VERBOSE" "$msg" +# +#----------------------------------------------------------------------- +# +# Loop through the elements of all_lines and modify each line to remove +# leading and trailing whitespace and any whitespace before and after +# the field separator character (which is the pipe character, "|"). Al- +# so, drop any elements that are empty after this processing, and save +# the resulting set of non-empty elements in the array expts_list. +# +#----------------------------------------------------------------------- +# +expts_list=() +field_separator="\|" # Need backslash as an escape sequence in the sed commands. -num_elem="${#experiments_list[@]}" +j=0 +num_lines="${#all_lines[@]}" +for (( i=0; i<=$((num_lines-1)); i++ )); do +# +# Remove all leading and trailing whitespace from the current element of +# all_lines. +# + all_lines[$i]=$( printf "%s" "${all_lines[$i]}" | \ + sed -r -e "s/^[ ]*//" -e "s/[ ]*$//" ) +# +# Remove spaces before and after all field separators in the current +# element of all_lines. Note that we use the pipe symbol, "|", as the +# field separator. +# + all_lines[$i]=$( printf "%s" "${all_lines[$i]}" | \ + sed -r -e "s/[ ]*${field_separator}[ ]*/${field_separator}/g" ) +# +# If the last character of the current line is a field separator, remove +# it. +# + all_lines[$i]=$( printf "%s" "${all_lines[$i]}" | \ + sed -r -e "s/${field_separator}$//g" ) +# +# If after the processing above the current element of all_lines is not +# empty, save it as the next element of expts_list. +# + if [ ! -z "${all_lines[$i]}" ]; then + expts_list[$j]="${all_lines[$i]}" + j=$((j+1)) + fi -echo -echo "num_elem = ${num_elem}" -echo "scrfunc_dir = ${scrfunc_dir}" +done # #----------------------------------------------------------------------- # -# Loop through the experiments list. For each experiment, generate a -# workflow and launch it. +# Get the number of experiments to run and print out an informational +# message. # #----------------------------------------------------------------------- # -#set -x -i=0 -while [ ! -z "${experiments_list[$i]}" ]; do +num_expts="${#expts_list[@]}" +expts_list_str=$( printf " \'%s\'\n" "${expts_list[@]}" ) +print_info_msg " +After processing, the number of experiments to run (num_expts) is: -echo -echo "======================================================" -echo "i = $i" -echo "experiments_list[$i] = '${experiments_list[$i]}'" + num_expts = ${num_expts} -# Remove all leading and trailing whitespace. - experiments_list[$i]=$( \ - printf "%s" "${experiments_list[$i]}" | \ - sed -r -e "s/^[ ]*//" -e "s/[ ]*$//" ) -# sed -r -n -e "s/^[ ]*//" -e "s/[ ]*$//p" ) -echo "experiments_list[$i] = '${experiments_list[$i]}'" -# Remove spaces before and after all separators. We use the pipe symbol -# as the separator. - experiments_list[$i]=$( \ - printf "%s" "${experiments_list[$i]}" | \ - sed -r -e "s/[ ]*\|[ ]*/\|/g" ) -# sed -r -n -e "s/[ ]*\|[ ]*/\|/gp" ) -echo "experiments_list[$i] = '${experiments_list[$i]}'" +The list of forecast experiments to run (one experiment per line) is gi- +ven by: -# regex_search="^[ ]*([^\|]*)[ ]*\|[ ]*(.*)" -# regex_search="^([^\|]*)\|(.*)" - regex_search="^([^\|]*)(\|(.*)|)" +${expts_list_str} +" +# +#----------------------------------------------------------------------- +# +# Loop through the elements of the array expts_list. For each element +# (i.e. for each experiment), generate an experiment directory and cor- +# responding workflow and then launch the workflow. +# +#----------------------------------------------------------------------- +# +for (( i=0; i<=$((num_expts-1)); i++ )); do - baseline_name=$( printf "%s" "${experiments_list[$i]}" | sed -r -n -e "s/${regex_search}/\1/p" ) - remainder=$( printf "%s" "${experiments_list[$i]}" | sed -r -n -e "s/${regex_search}/\3/p" ) -echo -echo " baseline_name = '${baseline_name}'" -echo " remainder = '$remainder'" + print_info_msg " +Processing experiment #$((${i}+1)): +------------------------ + +The experiment specification line for this experiment is given by: + + ${expts_list[$i]} +" +# +# Get the name of the baseline on which the current experiment is based. +# Then save the remainder of the current element of expts_list in the +# variable "remainder". Note that if this variable is empty, then the +# current experiment is identical to the current baseline. If not, then +# "remainder" contains the modifications that need to be made to the +# current baseline to obtain the current experiment. +# + regex_search="^([^\|]*)(\|(.*)|)" + baseline_name=$( printf "%s" "${expts_list[$i]}" | \ + sed -r -n -e "s/${regex_search}/\1/p" ) + remainder=$( printf "%s" "${expts_list[$i]}" | \ + sed -r -n -e "s/${regex_search}/\3/p" ) +# +# Get the names and corresponding values of the variables that need to +# be modified in the current baseline to obtain the current experiment. +# The following while-loop steps through all the variables listed in +# "remainder" +# modvar_name=() modvar_value=() num_mod_vars=0 while [ ! -z "${remainder}" ]; do -# next_field=$( printf "%s" "$remainder" | sed -r -n -e "s/${regex_search}/\1/p" ) -# remainder=$( printf "%s" "$remainder" | sed -r -n -e "s/${regex_search}/\3/p" ) - next_field=$( printf "%s" "$remainder" | sed -r -e "s/${regex_search}/\1/" ) - remainder=$( printf "%s" "$remainder" | sed -r -e "s/${regex_search}/\3/" ) -# modvar_name[${num_mod_vars}]=$( printf "%s" "${next_field}" | sed -r -n -e "s/^([^=]*)=(.*)/\1/p" ) -# modvar_value[${num_mod_vars}]=$( printf "%s" "${next_field}" | sed -r -n -e "s/^([^=]*)=(.*)/\2/p" ) - modvar_name[${num_mod_vars}]=$( printf "%s" "${next_field}" | sed -r -e "s/^([^=]*)=(.*)/\1/" ) - modvar_value[${num_mod_vars}]=$( printf "%s" "${next_field}" | sed -r -e "s/^([^=]*)=(\")?([^\"]+*)(\")?/\3/" ) -echo -echo " next_field = '${next_field}'" -echo " remainder = '$remainder'" -echo " modvar_name[${num_mod_vars}] = ${modvar_name[${num_mod_vars}]}" -echo " modvar_value[${num_mod_vars}] = ${modvar_value[${num_mod_vars}]}" +# +# Get the next variable-value pair in remainder, and save what is left +# of remainder back into itself. +# + next_field=$( printf "%s" "$remainder" | \ + sed -r -e "s/${regex_search}/\1/" ) + remainder=$( printf "%s" "$remainder" | \ + sed -r -e "s/${regex_search}/\3/" ) +# +# Save the name of the variable in the variable-value pair obtained +# above in the array modvar_name. Then save the value in the variable- +# value pair in the array modvar_value. + modvar_name[${num_mod_vars}]=$( printf "%s" "${next_field}" | \ + sed -r -e "s/^([^=]*)=(.*)/\1/" ) + modvar_value[${num_mod_vars}]=$( printf "%s" "${next_field}" | \ + sed -r -e "s/^([^=]*)=(\")?([^\"]+*)(\")?/\3/" ) +# +# Increment the index that keeps track of the number of variables that +# need to be modified in the current baseline to obtain the current ex- +# periment. +# num_mod_vars=$((num_mod_vars+1)) -echo " num_mod_vars = ${num_mod_vars}" done - - +# +# Generate the path to the configuration file for the current baseline. +# This will be modified to obtain the configuration file for the current +# experiment. +# baseline_config_fp="${TESTSDIR}/baseline_configs/config.${baseline_name}.sh" +# +# Print out an error message and exit if a configuration file for the +# current baseline does not exist. +# if [ ! -f "${baseline_config_fp}" ]; then print_err_msg_exit "\ The experiment/workflow configuration file (baseline_config_fp) for the @@ -186,43 +257,87 @@ specified baseline (baseline_name) does not exist: baseline_name = \"${baseline_name}\" baseline_config_fp = \"${baseline_config_fp}\"" fi - - experiment_name="${baseline_name}" +# +# We require that EXPT_SUBDIR in the configuration file for the baseline +# be set to the name of the baseline. Check for this by extracting the +# value of EXPT_SUBDIR from the baseline configuration file and compa- +# ring it to baseline_name. +# + regex_search="^[ ]*EXPT_SUBDIR=(\")?([^ =\"]+)(.*)" + EXPT_SUBDIR=$( sed -r -n -e "s/${regex_search}/\2/p" \ + "${baseline_config_fp}" ) + if [ "${EXPT_SUBDIR}" != "${baseline_name}" ]; then + print_err_msg_exit "\ +The name of the experiment subdirectory (EXPT_SUBDIR) in the configura- +tion file (baseline_config_fp) for the current baseline does not match +the name of the baseline (baseline_name): + baseline_name = \"${baseline_name}\" + baseline_config_fp = \"${baseline_config_fp}\" + EXPT_SUBDIR = \"${EXPT_SUBDIR}\"" + fi +# +# Generate a name for the current experiment. We start with the name of +# the current baseline and modify it to indicate which variables must be +# reset to obtain the current experiment. +# + expt_name="${baseline_name}" for (( j=0; j<${num_mod_vars}; j++ )); do if [ $j -lt ${#modvar_name[@]} ]; then - experiment_name="${experiment_name}__${modvar_name[$j]}=${modvar_value[$j]}" + expt_name="${expt_name}__${modvar_name[$j]}.eq.${modvar_value[$j]}" else break fi done -echo -echo "experiment_name = '${experiment_name}'" - - experiment_config_fp="${USHDIR}/config.${experiment_name}.sh" - cp_vrfy "${baseline_config_fp}" "${experiment_config_fp}" - - EXPT_SUBDIR="${experiment_name}" +# +# Reset EXPT_SUBDIR to the name of the current experiment. Below, we +# will write this to the configuration file for the current experiment. +# + EXPT_SUBDIR="${expt_name}" +# +# Create a configuration file for the current experiment. We do this by +# first copying the baseline configuration file and then modifying the +# the values of those variables within it that are different between the +# baseline and the experiment. +# + expt_config_fp="${USHDIR}/config.${expt_name}.sh" + cp_vrfy "${baseline_config_fp}" "${expt_config_fp}" - set_bash_param "${experiment_config_fp}" "MACHINE" "$MACHINE" - set_bash_param "${experiment_config_fp}" "ACCOUNT" "$ACCOUNT" - set_bash_param "${experiment_config_fp}" "QUEUE_DEFAULT" "${QUEUE_DEFAULT}" - set_bash_param "${experiment_config_fp}" "QUEUE_HPSS" "${QUEUE_HPSS}" - set_bash_param "${experiment_config_fp}" "QUEUE_FCST" "${QUEUE_FCST}" - set_bash_param "${experiment_config_fp}" "VERBOSE" "$VERBOSE" - set_bash_param "${experiment_config_fp}" "EXPT_SUBDIR" "${EXPT_SUBDIR}" + set_bash_param "${expt_config_fp}" "MACHINE" "$MACHINE" + set_bash_param "${expt_config_fp}" "ACCOUNT" "$ACCOUNT" + set_bash_param "${expt_config_fp}" "USE_CRON_TO_RELAUNCH" "${USE_CRON_TO_RELAUNCH}" + set_bash_param "${expt_config_fp}" "CRON_RELAUNCH_INTVL_MNTS" "${CRON_RELAUNCH_INTVL_MNTS}" + set_bash_param "${expt_config_fp}" "EXPT_SUBDIR" "${EXPT_SUBDIR}" - ln_vrfy -fs "${experiment_config_fp}" "$USHDIR/config.sh" - + printf "" + for (( j=0; j<${num_mod_vars}; j++ )); do + set_bash_param "${expt_config_fp}" "${modvar_name[$j]}" "${modvar_value[$j]}" + done +# +# Create a symlink called "config.sh" in USHDIR that points to the cur- +# rent experiment's configuration file. This must be done because the +# experiment/workflow generation script assumes that this is the name +# and location of the configuration file to use to generate a new expe- +# riment and corresponding workflow. +# + ln_vrfy -fs "${expt_config_fp}" "$USHDIR/config.sh" +# +#----------------------------------------------------------------------- +# +# Call the experiment/workflow generation script for the current experi- +# ment. +# +#----------------------------------------------------------------------- +# print_info_msg " Generating experiment with name: - experiment_name = \"${experiment_name}\"" + expt_name = \"${expt_name}\"" - log_fp="$USHDIR/log.generate_wflow.${experiment_name}" + log_fp="$USHDIR/log.generate_FV3SAR_wflow.${expt_name}" $USHDIR/generate_FV3SAR_wflow.sh 2>&1 >& "${log_fp}" || { \ print_err_msg_exit "\ Could not generate an experiment/workflow for the test specified by -experiment_name: - experiment_name = \"${experiment_name}\" +expt_name: + expt_name = \"${expt_name}\" The log file from the generation script is in the file specified by log_fp: log_fp = \"${log_fp}\""; @@ -231,110 +346,15 @@ log_fp: #----------------------------------------------------------------------- # # Set the experiment directory to the one that the workflow will create. -# Then move the configuration file and experiment/workflow generation -# log file to the experiment directory. +# Then, in order to have a record of how the experiment and workflow +# were generated, move the configuration file and experiment/workflow +# generation log file to the experiment directory. # #----------------------------------------------------------------------- # EXPTDIR=$( readlink -f "$HOMErrfs/../expt_dirs/${EXPT_SUBDIR}" ) - mv_vrfy "${experiment_config_fp}" "${EXPTDIR}" + mv_vrfy "${expt_config_fp}" "${EXPTDIR}" mv_vrfy "${log_fp}" "${EXPTDIR}" -# -#----------------------------------------------------------------------- -# -# Create a script in the run directory that can be used to (re)launch -# the workflow and report on its status. This script saves its output -# to a log file (in the run directory) for debugging purposes and to al- -# low the user to check on the status of the workflow. -# -#----------------------------------------------------------------------- -# - cd_vrfy $EXPTDIR - - xml_bn="FV3SAR_wflow" - xml_fn="${xml_bn}.xml" - db_fn="${xml_bn}.db" - relaunch_script_fn="relaunch_wflow.sh" - - { cat << EOM > ${relaunch_script_fn} -#!/bin/bash -l - -module load rocoto -cd "$EXPTDIR" -{ -rocotorun -w "${xml_fn}" -d "${db_fn}" -v 10; -echo; -rocotostat -w "${xml_fn}" -d "${db_fn}" -v 10; -} >> log.rocotostat 2>&1 - -dead_tasks=$( rocotostat -w "${xml_fn}" -d "${db_fn}" -v 10 | grep "DEAD" ) -if [ ! -z ${dead_tasks} ]; then - printf "%s\n" " -The end-to-end workflow test for the experiment specified below FAILED: - experiment_name = \"${experiment_name}\" -Removing the corresponding line from the crontab file.\n" -fi -EOM - } || print_err_msg_exit "\ -cat operation to create a relaunch script (relaunch_script_fn) in the experi- -ment directory (EXPTDIR) failed: - EXPTDIR = \"$EXPTDIR\" - relaunch_script_fn = \"${relaunch_script_fn}\"" -# -# Make the relaunch script executable. -# - chmod u+x ${relaunch_script_fn} -# -#----------------------------------------------------------------------- -# -# Add a line to the user's cron table to call the (re)launch script at -# some frequency (e.g. every 5 minutes). -# -#----------------------------------------------------------------------- -# - crontab_orig_fp="$(pwd)/crontab.orig" - print_info_msg " -Copying contents of user cron table to backup file: - crontab_orig_fp = \"${crontab_orig_fp}\"" - crontab -l > ${crontab_orig_fp} - - crontab_line="*/5 * * * * cd $EXPTDIR && ./${relaunch_script_fn}" -# -# Below, we use "grep" to determine whether the above crontab line is -# already present in the cron table. For that purpose, we need to es- -# cape the asterisks in the crontab line with backslashes. Do this -# next. -# - crontab_line_esc_astr=$( printf "%s" "${crontab_line}" | \ - sed -r -e "s![*]!\\\\*!g" ) - grep_output=$( crontab -l | grep "${crontab_line_esc_astr}" ) - exit_status=$? - - if [ "${exit_status}" -eq 0 ]; then - - print_info_msg " -The following line already exists in the cron table and thus will not be -added: - crontab_line = \"${crontab_line}\"" - - else - - print_info_msg " -Adding the following line to the cron table in order to automatically -resubmit FV3SAR workflow: - crontab_line = \"${crontab_line}\"" - - (crontab -l 2>/dev/null; echo "${crontab_line}") | crontab - - - fi -# -#----------------------------------------------------------------------- -# -# Increment the index that keeps track of the test/experiment number. -# -#----------------------------------------------------------------------- -# - i=$((i+1)) done # From 261b94ed38f16e38293193d918f370c8ef712cbe Mon Sep 17 00:00:00 2001 From: Julie Schramm Date: Mon, 11 Nov 2019 20:37:10 +0000 Subject: [PATCH 052/203] Modify build scripts to resemble build on develop branch: - Update README.md with new build instructions. - Modify build_all.sh to not build GSI; add install and link steps at end - Modify build_forecast.sh script to point to NEMSfv3gfs directory and use build command from community workflow instructions - Modify build_post.sh to point to EMC_post directory - Add install_all.sh and link_fix.sh scripts - Add builds for global_equiv_resol and mosaic_file to build_utils.sh - Modify install_all.sh to use executable names for community_develop - Modify link_fix.sh to use links from build_regional script - Remove build_regional script - Add Build_mosaic_file and Build_global_equiv_resol to partial_build.sh - Add regional_build.cfg These modifications are non-answer changing: Final result of regression test: PASS :) --- README.md | 12 ++--- regional/build_regional | 117 ---------------------------------------- sorc/build_all.sh | 27 +++++----- sorc/build_forecast.sh | 8 +-- sorc/build_post.sh | 2 +- sorc/build_utils.sh | 18 +++++++ sorc/install_all.sh | 28 +++++----- sorc/link_fix.sh | 47 ++++++++++------ sorc/partial_build.sh | 3 +- sorc/regional_build.cfg | 6 ++- 10 files changed, 92 insertions(+), 176 deletions(-) delete mode 100755 regional/build_regional diff --git a/README.md b/README.md index 0a01fc3ac..bd9e9ef80 100644 --- a/README.md +++ b/README.md @@ -10,16 +10,12 @@ This is the community\_develop branch of the regional\_workflow used to run the This step will checkout EMC\_post, NEMSfv3gfs and its submodules, UFS\_UTILS\_chgres\_grib2 and UFS\_UTILS\_develop in the sorc directory. -2. Build the utilities: +2. Build the utilities, post and FV3: ``` -cd regional -./build_regional theia >& out.build_regional -``` -3. Build FV3: -``` -cd sorc/NEMSfv3gfs/tests -compile.sh $BASEDIR/regional_workflow/sorc/NEMSfv3gfs/FV3 theia.intel "CCPP=Y STATIC=N 32BIT=Y REPRO=Y" >& out.compile_32bit +cd sorc +./build_all.sh ``` +This step will also copy the executables to the `exec` directory and link the fix files. 4. Create a `config.sh` file in the `ush` directory (see Users Guide). 5. Generate a workflow: ``` diff --git a/regional/build_regional b/regional/build_regional deleted file mode 100755 index 82f6b2e38..000000000 --- a/regional/build_regional +++ /dev/null @@ -1,117 +0,0 @@ -#!/bin/bash -set -eux - -if [ $# -eq 0 ] ; then - echo "ERROR: You must provide the platform as a command-line argument" - exit 1 -fi - -SITE=${1} - -RGNL_WFLOW_DIR=$( pwd )/.. -if [ ${SITE} == "cheyenne" ]; then - export NCEPLIB_DIR=/glade/p/ral/jntp/GMTB/tools/NCEPlibs/20180717/intel-18.0.1/ -fi - -cd ${RGNL_WFLOW_DIR}/sorc -# -# The following build several new utilities needed in order to use the -# new Jim Purser-type grid in the SAR. The following only works on -# theia for now. It needs to be ported to other platforms. -# -./build_regional_grid.sh ${SITE} >& out.build_regional_grid -./build_global_equiv_resol.sh ${SITE} >& out.build_global_equiv_resol -./build_mosaic_file.sh ${SITE} >& out.build_mosaic_file -# -# Build sfc_climo_gen. -# -cd ${RGNL_WFLOW_DIR}/sorc/UFS_UTILS_develop/sorc -./build_sfc_climo_gen.sh >& out.build_sfc_climo_gen -cp ../exec/sfc_climo_gen ${RGNL_WFLOW_DIR}/exec -# -# Build fre-nctools. -# -./build_fre-nctools.sh >& out.build_fre-nctools -cp ../exec/filter_topo ${RGNL_WFLOW_DIR}/exec -cp ../exec/fregrid ${RGNL_WFLOW_DIR}/exec -cp ../exec/fregrid_parallel ${RGNL_WFLOW_DIR}/exec -cp ../exec/make_hgrid ${RGNL_WFLOW_DIR}/exec -cp ../exec/make_hgrid_parallel ${RGNL_WFLOW_DIR}/exec -cp ../exec/make_solo_mosaic ${RGNL_WFLOW_DIR}/exec -cp ../exec/shave.x ${RGNL_WFLOW_DIR}/exec -# -# Build orog. -# -./build_orog.sh >& out.build_orog -cp ../exec/orog.x ${RGNL_WFLOW_DIR}/exec -# -# Build chgres_cube. -# -# The first case is using Larissa's make.sh script, the second case uses -# the more formal way also used by the other codes in UFS_UTILS. -if [ 0 = 1 ]; then - cd ${RGNL_WFLOW_DIR}/sorc/UFS_UTILS_chgres_grib2/sorc/chgres_cube.fd - ./make.sh >& out.build_chgres_cube -# Is the following needed? The version of the global_chgres.exe execu- -# table used by the tasks that use chgres_cube (the ICs/surf/LBC0 and -# LBC1_to_LBCN tasks) is located in the exec directory under the UFS_- -# UTILS_chgres_grib2 directory, not in ${RGNL_WFLOW_DIR}/exec. - cp ../exec/global_chgres.exe ${RGNL_WFLOW_DIR}/exec -else - cd ${RGNL_WFLOW_DIR}/sorc/UFS_UTILS_chgres_grib2/sorc - ./build_chgres_cube.sh >& out.build_chgres_cube -# Is the following needed? The version of the global_chgres.exe execu- -# table used by the tasks that use chgres_cube (the ICs/surf/LBC0 and -# LBC1_to_LBCN tasks) is located in the exec directory under the UFS_- -# UTILS_chgres_grib2 directory, not in ${RGNL_WFLOW_DIR}/exec. - cp ../exec/chgres_cube.exe ${RGNL_WFLOW_DIR}/exec -fi - -# -# Built EMC_post -# -cd ${RGNL_WFLOW_DIR}/sorc/EMC_post/sorc -./build_ncep_post.sh >& out.build_EMC_post -cp ../exec/ncep_post ${RGNL_WFLOW_DIR}/exec - -# prepare fixed data directories - -cd ${RGNL_WFLOW_DIR} -mkdir -p fix/fix_fv3 -cd fix - -if [ ${SITE} == "theia" ]; then - - ln -sfn /scratch4/NCEPDEV/global/save/glopara/git/fv3gfs/fix/fix_am fix_am - -elif [ ${SITE} == "hera" ]; then - - ln -sfn /scratch1/NCEPDEV/global/glopara/fix/fix_am fix_am - -elif [ ${SITE} == "wcoss" ] || [ ${SITE} == "dell" ]; then - - ln -sfn /gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix/fix_am fix_am - -elif [ ${SITE} == "wcoss_cray" ]; then - - ln -sfn /gpfs/hps3/emc/global/noscrub/emc.glopara/git/fv3gfs/fix/fix_am fix_am - echo "module swap pmi pmi/5.0.11" >> ${FV3GFS_DIR}/../NEMSfv3gfs/modulefiles/wcoss_cray/fv3 - -elif [ ${SITE} == "odin" ]; then - - ln -sfn /scratch/ywang/fix/theia_fix/fix_am fix_am - -elif [ ${SITE} == "cheyenne" ]; then - - ln -sfn /glade/p/ral/jntp/GMTB/FV3GFS_V1_RELEASE/fix/fix_am/ fix_am - -elif [ ${SITE} == "jet" ]; then - - ln -sfn regional/build_regional/fix/fix_am fix_am - -else - - echo "Unknown site " ${SITE} - exit - -fi diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 35a523896..4ce365d4f 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -9,18 +9,6 @@ set -eux export USE_PREINST_LIBS="true" -if [ $# -eq 0 ] ; then - echo "ERROR: You must provide the platform as a command-line argument" - exit 1 -fi - -SITE=${1} - -RGNL_WFLOW_DIR=$( pwd )/.. -if [ ${SITE} == "cheyenne" ]; then - export NCEPLIB_DIR=/glade/p/ral/jntp/GMTB/tools/NCEPlibs/20180717/intel-18.0.1/ -fi - #------------------------------------ # END USER DEFINED STUFF #------------------------------------ @@ -90,10 +78,21 @@ echo " .... Building utils .... " # build gsi #------------------------------------ $Build_gsi && { -echo " .... Building gsi .... " -./build_gsi.sh > $logs_dir/build_gsi.log 2>&1 +echo " .... GSI build not currently supported .... " +#echo " .... Building gsi .... " +#./build_gsi.sh > $logs_dir/build_gsi.log 2>&1 } echo;echo " .... Build system finished .... " +echo;echo " .... Installing executables .... " + +./install_all.sh + +echo;echo " .... Installation finished .... " +echo;echo " .... Linking fix files .... " + +./link_fix.sh + +echo;echo " .... Linking fix files finished .... " exit 0 diff --git a/sorc/build_forecast.sh b/sorc/build_forecast.sh index 798825271..2ab9c32a6 100755 --- a/sorc/build_forecast.sh +++ b/sorc/build_forecast.sh @@ -18,14 +18,14 @@ fi if [ $target = hera ]; then target=hera.intel ; fi -cd regional_forecast.fd/ +cd NEMSfv3gfs/ FV3=$( pwd -P )/FV3 CCPP=${CCPP:-"false"} cd tests/ if [ $CCPP = true ] || [ $CCPP = TRUE ] ; then - ./compile.sh "$FV3" "$target" "NCEP64LEV=Y HYDRO=N 32BIT=Y CCPP=Y STATIC=Y SUITES=FV3_GFS_2017_gfdlmp_regional" 1 +#EMC ./compile.sh "$FV3" "$target" "NCEP64LEV=Y HYDRO=N 32BIT=Y CCPP=Y STATIC=Y SUITES=FV3_GFS_2017_gfdlmp_regional" 1 + ./compile.sh "$FV3" "$target" "CCPP=Y STATIC=N 32BIT=Y REPRO=Y" else ./compile.sh "$FV3" "$target" "NCEP64LEV=Y HYDRO=N 32BIT=Y" 1 fi -##mv -f fv3_1.exe ../NEMS/exe/fv3_gfs_nh.prod.32bit.x -mv -f fv3_1.exe ../NEMS/exe/NEMS.x +#mv -f fv3.exe ../NEMS/exe/NEMS.x diff --git a/sorc/build_post.sh b/sorc/build_post.sh index 06d68596f..c9354414c 100755 --- a/sorc/build_post.sh +++ b/sorc/build_post.sh @@ -16,5 +16,5 @@ if [ ! -d "../exec" ]; then mkdir ../exec fi -cd regional_post.fd/sorc +cd EMC_post/sorc sh build_ncep_post.sh diff --git a/sorc/build_utils.sh b/sorc/build_utils.sh index a2aba1615..b902d7ee1 100755 --- a/sorc/build_utils.sh +++ b/sorc/build_utils.sh @@ -77,6 +77,24 @@ cd $build_dir ./build_regional_grid.sh > $logs_dir/build_regional_grid.log 2>&1 } +#------------------------------------ +# build global_equiv_resol +#------------------------------------ +$Build_global_equiv_resol && { +echo " .... Building global_equiv_resol .... " +cd $build_dir +./build_global_equiv_resol.sh > $logs_dir/build_global_equiv_resol.log 2>&1 +} + +#------------------------------------ +# build mosaic file +#------------------------------------ +$Build_mosaic_file && { +echo " .... Building mosaic_file .... " +cd $build_dir +./build_mosaic_file.sh > $logs_dir/build_mosaic_file.log 2>&1 +} + cd $build_dir echo 'Building utils done' diff --git a/sorc/install_all.sh b/sorc/install_all.sh index 47adedb17..0087caefd 100755 --- a/sorc/install_all.sh +++ b/sorc/install_all.sh @@ -20,48 +20,48 @@ fi #------------------------------------ # install forecast #------------------------------------ - ${CP} regional_forecast.fd/NEMS/exe/NEMS.x ../exec/regional_forecast.x +#${CP} NEMSfv3gfs/fv3.exe ../exec/regional_forecast.x #------------------------------------ # install post #------------------------------------ - ${CP} regional_post.fd/exec/ncep_post ../exec/regional_post.x + ${CP} EMC_post/exec/ncep_post ../exec/ncep_post #------------------------------------ # install chgres #------------------------------------ - ${CP} regional_utils.fd/exec/global_chgres ../exec/regional_chgres.x +# ${CP} regional_utils.fd/exec/global_chgres ../exec/regional_chgres.x #------------------------------------ # install chgres_cube #------------------------------------ - ${CP} regional_utils.fd/exec/chgres_cube.exe ../exec/regional_chgres_cube.x + ${CP} UFS_UTILS_chgres_grib2/exec/chgres_cube.exe ../exec/chgres_cube.exe #------------------------------------ # install orog #------------------------------------ - ${CP} regional_utils.fd/exec/orog.x ../exec/regional_orog.x + ${CP} UFS_UTILS_develop/exec/orog.x ../exec/orog.x #------------------------------------ # install sfc_climo_gen #------------------------------------ - ${CP} regional_utils.fd/exec/sfc_climo_gen ../exec/regional_sfc_climo_gen.x + ${CP} UFS_UTILS_develop/exec/sfc_climo_gen ../exec/sfc_climo_gen #------------------------------------ # install regional_grid #------------------------------------ - ${CP} regional_utils.fd/exec/regional_grid ../exec/regional_grid.x +# ${CP} regional_utils.fd/exec/regional_grid ../exec/regional_grid.x #------------------------------------ # install fre-nctools #------------------------------------ - ${CP} regional_utils.fd/exec/make_hgrid ../exec/regional_make_hgrid.x -#${CP} regional_utils.fd/exec/make_hgrid_parallel ../exec/regional_make_hgrid_parallel.x - ${CP} regional_utils.fd/exec/make_solo_mosaic ../exec/regional_make_solo_mosaic.x - ${CP} regional_utils.fd/exec/fregrid ../exec/regional_fregrid.x -#${CP} regional_utils.fd/exec/fregrid_parallel ../exec/regional_fregrid_parallel.x - ${CP} regional_utils.fd/exec/filter_topo ../exec/regional_filter_topo.x - ${CP} regional_utils.fd/exec/shave.x ../exec/regional_shave.x + ${CP} UFS_UTILS_develop/exec/make_hgrid ../exec/make_hgrid + ${CP} UFS_UTILS_develop/exec/make_hgrid_parallel ../exec/make_hgrid_parallel + ${CP} UFS_UTILS_develop/exec/make_solo_mosaic ../exec/make_solo_mosaic + ${CP} UFS_UTILS_develop/exec/fregrid ../exec/fregrid + ${CP} UFS_UTILS_develop/exec/fregrid_parallel ../exec/fregrid_parallel + ${CP} UFS_UTILS_develop/exec/filter_topo ../exec/filter_topo + ${CP} UFS_UTILS_develop/exec/shave.x ../exec/shave.x #------------------------------------ # install gsi diff --git a/sorc/link_fix.sh b/sorc/link_fix.sh index af58f5c33..191c36683 100755 --- a/sorc/link_fix.sh +++ b/sorc/link_fix.sh @@ -3,27 +3,44 @@ set -xeu source ./machine-setup.sh > /dev/null 2>&1 -LINK="cp -rp" - pwd=$(pwd -P) -if [[ ${target} == "wcoss_dell_p3" || ${target} == "wcoss" || ${target} == "wcoss_cray" ]]; then - FIX_DIR="/gpfs/dell2/emc/modeling/noscrub/emc.campara/fix_fv3cam" +# prepare fixed data directories + +RGNL_WFLOW_DIR=$( pwd )/.. +cd ${RGNL_WFLOW_DIR} +mkdir -p fix/fix_fv3 +cd fix + +if [ ${target} == "theia" ]; then + + ln -sfn /scratch4/NCEPDEV/global/save/glopara/git/fv3gfs/fix/fix_am fix_am + elif [ ${target} == "hera" ]; then - FIX_DIR="/scratch2/NCEPDEV/fv3-cam/emc.campara/fix_fv3cam" + + ln -sfn /scratch1/NCEPDEV/global/glopara/fix/fix_am fix_am + +elif [[ ${target} == "wcoss_dell_p3" || ${target} == "wcoss" || ${target} == "wcoss_cray" ]]; then + + ln -sfn /gpfs/dell2/emc/modeling/noscrub/emc.campara/fix_fv3cam fix_am + +elif [ ${target} == "odin" ]; then + + ln -sfn /scratch/ywang/fix/theia_fix/fix_am fix_am + +elif [ ${target} == "cheyenne" ]; then + + ln -sfn /glade/p/ral/jntp/GMTB/FV3GFS_V1_RELEASE/fix/fix_am/ fix_am + elif [ ${target} == "jet" ]; then - FIX_DIR="/scratch4/NCEPDEV/global/save/glopara/git/fv3gfs/fix" + + ln -sfn regional/build_regional/fix/fix_am fix_am + else - echo "Unknown site " ${target} - exit 1 -fi -mkdir -p ${pwd}/../fix -cd ${pwd}/../fix ||exit 8 -for dir in fix_am fix_nest fix_sar ; do - [[ -d $dir ]] && rm -rf $dir -done + echo "Unknown target " ${target} + exit 1 -${LINK} $FIX_DIR/* . +fi exit diff --git a/sorc/partial_build.sh b/sorc/partial_build.sh index e0189c2ef..413b8af1a 100755 --- a/sorc/partial_build.sh +++ b/sorc/partial_build.sh @@ -3,7 +3,8 @@ # declare -a Build_prg=("Build_libs" "Build_forecast" "Build_forecast_ccpp" "Build_gsi" \ "Build_post" "Build_utils" "Build_chgres" "Build_chgres_cube" \ - "Build_orog" "Build_sfc_climo_gen" "Build_regional_grid" "Build_nctools") + "Build_orog" "Build_sfc_climo_gen" "Build_regional_grid" "Build_nctools" \ + "Build_mosaic_file" "Build_global_equiv_resol") # # function parse_cfg: read config file and retrieve the values diff --git a/sorc/regional_build.cfg b/sorc/regional_build.cfg index 5f4c1a27e..2101f92e9 100644 --- a/sorc/regional_build.cfg +++ b/sorc/regional_build.cfg @@ -3,9 +3,9 @@ Building libraries (libs) ............................. no Building forecast (forecast) .......................... no - Building forecast_ccpp (forecast_ccpp) ................ no + Building forecast_ccpp (forecast_ccpp) ................ yes Building gsi (gsi) .................................... no - Building post (post) .................................. no + Building post (post) .................................. yes Building utils (utils) ................................ yes Building chgres (chgres) .............................. no Building chgres_cube (chgres_cube) .................... yes @@ -13,6 +13,8 @@ Building regional_grid (regional_grid) ................ yes Building orog (orog) .................................. yes Building fre-nctools (nctools) ........................ yes + Building mosaic_file (mosaic_file) .................... yes + Building global_equiv_resol (global_equiv_resol) ...... yes # -- END -- From a261d766afa140d319ad801f7c99e776e6f08e54 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 14 Nov 2019 14:39:40 -0700 Subject: [PATCH 053/203] Start process of moving module loads out of J-jobs or ex-scripts and into into separate module files. Changes are listed below: 1) Add a new script (ush/load_modules.sh) that takes in the task name as an argument and uses the appropriate module file for that task (for the current machine) to load the necessary modules. 2) For the make_grid task and on hera only (so far), remove the module load commands from the ex-script (scripts/exregional_make_grid.sh) and move them to a new module file at $HOMErrfs/modulefiles/hera/make_grid. 3) In scripts/exregional_make_grid.sh, remove the module loads for theia. 4) In the rocoto XML template file (ush/templates/FV3SAR_wflow.xml), create a new ENTITY (LOAD_MODULES_SCR) that contains the name of the new script that loads the module files for a given task (ush/load_modules.sh). Also, create new ENTITYs that contain the names of the workflow tasks (this is just for convenience since the task name is going to appear in several places in the XML in each task). 5) For the make_grid task only (so far), I changed the "..." in "..." in the rocoto XML template to contain calls to two scripts -- first the load_modules.sh script with the task name as an argument, and then the J-job for the task -- instead of just one (the J-job for the task). This still needs to be testd. --- scripts/exregional_make_grid.sh | 62 ++++++------------ ush/load_modules.sh | 93 +++++++++++++++++++++++++++ ush/templates/FV3SAR_wflow.xml | 107 +++++++++++++++++++------------- 3 files changed, 176 insertions(+), 86 deletions(-) create mode 100644 ush/load_modules.sh diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 45f0e5d85..c74383454 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -132,50 +132,28 @@ case $MACHINE in ;; -"THEIA") +#"HERA") +## +# { save_shell_opts; set +x; } > /dev/null 2>&1 # - { save_shell_opts; set +x; } > /dev/null 2>&1 - - . /apps/lmod/lmod/init/sh - module purge - module load intel/16.1.150 - module load impi - module load hdf5/1.8.14 - module load netcdf/4.3.0 - module list - - { restore_shell_opts; } > /dev/null 2>&1 - - export APRUN="time" - export topo_dir="/scratch4/NCEPDEV/global/save/glopara/svn/fv3gfs/fix/fix_orog" - - ulimit -s unlimited - ulimit -a - ;; - - -"HERA") +# . /apps/lmod/lmod/init/sh +# module purge +# module load intel/18.0.5.274 +## module load netcdf/4.6.1 +## module load hdf5/1.10.4 +# module load netcdf/4.7.0 +# module load hdf5/1.10.5 +# module list +# +# { restore_shell_opts; } > /dev/null 2>&1 +# +# export APRUN="time" +# export topo_dir="/scratch1/NCEPDEV/global/glopara/fix/fix_orog" +# +# ulimit -s unlimited +# ulimit -a +# ;; # - { save_shell_opts; set +x; } > /dev/null 2>&1 - - . /apps/lmod/lmod/init/sh - module purge - module load intel/18.0.5.274 -# module load netcdf/4.6.1 -# module load hdf5/1.10.4 - module load netcdf/4.7.0 - module load hdf5/1.10.5 - module list - - { restore_shell_opts; } > /dev/null 2>&1 - - export APRUN="time" - export topo_dir="/scratch1/NCEPDEV/global/glopara/fix/fix_orog" - - ulimit -s unlimited - ulimit -a - ;; - "JET") # diff --git a/ush/load_modules.sh b/ush/load_modules.sh new file mode 100644 index 000000000..85fa8a6c7 --- /dev/null +++ b/ush/load_modules.sh @@ -0,0 +1,93 @@ +#!/bin/bash + +#set -x -u -e +#date +# +#----------------------------------------------------------------------- +# +# Source the variable definitions script and the function definitions +# file. +# +#----------------------------------------------------------------------- +# +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; set -u -x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# +task_name="$1" + +#. ${HOMErrfs}/rocoto/machine-setup.sh +#export machine=${target} + +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# +case "$MACHINE" in +# + "WCOSS_C") + . /opt/modules/default/init/sh + ;; +# + "DELL") + . /usrx/local/prod/lmod/lmod/init/sh + ;; +# + "HERA") + . /apps/lmod/lmod/init/sh + ;; +# + "JET") + . /apps/lmod/lmod/init/sh + ;; +# + *) + print_err_msg_exit " +The script to source to initialize lmod (module loads) has not yet been +specified for the current machine (MACHINE): + MACHINE = \"$MACHINE\"" + ;; +# +esac +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# +module use ${HOMErrfs}/modulefiles/${MACHINE} +module load ${task_name} +module list + +#exec "$@" + diff --git a/ush/templates/FV3SAR_wflow.xml b/ush/templates/FV3SAR_wflow.xml index 6eedf4a68..cf77581f8 100644 --- a/ush/templates/FV3SAR_wflow.xml +++ b/ush/templates/FV3SAR_wflow.xml @@ -77,6 +77,24 @@ Variables that are not modified by the workflow generation script. &QUEUE_HPSS;&ACCOUNT;"> &QUEUE_FCST;&ACCOUNT;"> + + + + + + + + + + + + + + ]> @@ -91,16 +109,17 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &RSRC_MAKE_GRID; &RSRV_DEFAULT; - - &JOBSDIR;/JREGIONAL_MAKE_GRID + { &USHDIR/&LOAD_MODULES_SCR; &TN_MAKE_GRID;; &JOBSDIR;/JREGIONAL_MAKE_GRID } &PROC_MAKE_GRID; - make_grid - &LOGDIR;/make_grid.log + &TN_MAKE_GRID; + &LOGDIR;/&TN_MAKE_GRID;.log + MACHINE&MACHINE; + HOMErrfs&HOMErrfs; GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; PDY@Y@m@d @@ -113,15 +132,15 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &RSRC_MAKE_OROG; &RSRV_DEFAULT; &JOBSDIR;/JREGIONAL_MAKE_OROG &PROC_MAKE_OROG; - make_orog - &LOGDIR;/make_orog.log + &TN_MAKE_OROG; + &LOGDIR;/&TN_MAKE_OROG;.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; PDY@Y@m@d @@ -142,15 +161,15 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &RSRC_MAKE_SFC_CLIMO; &RSRV_DEFAULT; &JOBSDIR;/JREGIONAL_MAKE_SFC_CLIMO &PROC_MAKE_SFC_CLIMO; - make_sfc_climo - &LOGDIR;/make_sfc_climo.log + &TN_MAKE_SFC_CLIMO; + &LOGDIR;/&TN_MAKE_SFC_CLIMO;.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; PDY@Y@m@d @@ -164,8 +183,8 @@ Variables that are not modified by the workflow generation script. &RUN_TASK_MAKE_GRID;FALSE - - &LOGDIR;/make_orog_task_complete.txt + + &LOGDIR;/&TN_MAKE_OROG;_task_complete.txt &RUN_TASK_MAKE_OROG;FALSE @@ -176,15 +195,15 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &RSRC_GET_EXTRN_MDL_FILES; &RSRV_HPSS; &JOBSDIR;/JREGIONAL_GET_EXTRN_FILES &PROC_GET_EXTRN_MDL_FILES; - get_files_ICS - &LOGDIR;/get_files_ICS_@Y@m@d@H.log + &TN_GET_EXTRN_ICS; + &LOGDIR;/&TN_GET_EXTRN_ICS;_@Y@m@d@H.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; @@ -198,15 +217,15 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &RSRC_GET_EXTRN_MDL_FILES; &RSRV_HPSS; &JOBSDIR;/JREGIONAL_GET_EXTRN_FILES &PROC_GET_EXTRN_MDL_FILES; - get_files_LBCS - &LOGDIR;/get_files_LBCS_@Y@m@d@H.log + &TN_GET_EXTRN_LBCS; + &LOGDIR;/&TN_GET_EXTRN_LBCS;_@Y@m@d@H.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; @@ -220,15 +239,15 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &RSRC_MAKE_ICS_SURF_LBC0; &RSRV_DEFAULT; &JOBSDIR;/JREGIONAL_MAKE_IC_LBC0 &PROC_MAKE_ICS_SURF_LBC0; - make_ICS_surf_LBC0 - &LOGDIR;/make_ICS_surf_LBC0_@Y@m@d@H.log + &TN_MAKE_ICS; + &LOGDIR;/&TN_MAKE_ICS;_@Y@m@d@H.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; @@ -237,20 +256,20 @@ Variables that are not modified by the workflow generation script. - + &LOGDIR;/make_grid_task_complete.txt &RUN_TASK_MAKE_GRID;FALSE - - &LOGDIR;/make_orog_task_complete.txt + + &LOGDIR;/&TN_MAKE_OROG;_task_complete.txt &RUN_TASK_MAKE_OROG;FALSE - - &LOGDIR;/make_sfc_climo_task_complete.txt + + &LOGDIR;/&TN_MAKE_SFC_CLIMO;_task_complete.txt &RUN_TASK_MAKE_SFC_CLIMO;FALSE @@ -261,15 +280,15 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &RSRC_MAKE_LBC1_TO_LBCN; &RSRV_DEFAULT; &JOBSDIR;/JREGIONAL_MAKE_LBC1_TO_LBCN &PROC_MAKE_LBC1_TO_LBCN; - make_LBC1_to_LBCN - &LOGDIR;/make_LBC1_to_LBCN_@Y@m@d@H.log + &TN_MAKE_LBCS; + &LOGDIR;/&TN_MAKE_LBCS;_@Y@m@d@H.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; @@ -278,20 +297,20 @@ Variables that are not modified by the workflow generation script. - + &LOGDIR;/make_grid_task_complete.txt &RUN_TASK_MAKE_GRID;FALSE - - &LOGDIR;/make_orog_task_complete.txt + + &LOGDIR;/&TN_MAKE_OROG;_task_complete.txt &RUN_TASK_MAKE_OROG;FALSE - - &LOGDIR;/make_sfc_climo_task_complete.txt + + &LOGDIR;/&TN_MAKE_SFC_CLIMO;_task_complete.txt &RUN_TASK_MAKE_SFC_CLIMO;FALSE @@ -302,15 +321,15 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &RSRC_RUN_FV3; &RSRV_RUN_FV3; &JOBSDIR;/JREGIONAL_RUN_FV3 &PROC_RUN_FV3; - run_FV3 - &LOGDIR;/run_FV3_@Y@m@d@H.log + &TN_RUN_FCST; + &LOGDIR;/&TN_RUN_FCST;_@Y@m@d@H.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; @@ -319,8 +338,8 @@ Variables that are not modified by the workflow generation script. - - + + @@ -329,19 +348,19 @@ Variables that are not modified by the workflow generation script. ************************************************************************ ************************************************************************ --> - + &FHR; - + &RSRC_POST; &RSRV_DEFAULT; &JOBSDIR;/JREGIONAL_RUN_POST &PROC_POST; - run_post_#fhr# - &LOGDIR;/run_post_#fhr#_@Y@m@d@H.log + &TN_RUN_POST;_#fhr# + &LOGDIR;/&TN_RUN_POST;_#fhr#_@Y@m@d@H.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; From 24825c6094506f76bee9ae22a399f02ee7b3ba17 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 14 Nov 2019 15:35:17 -0700 Subject: [PATCH 054/203] Add module file missed in last commit. --- modulefiles/hera/make_grid | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 modulefiles/hera/make_grid diff --git a/modulefiles/hera/make_grid b/modulefiles/hera/make_grid new file mode 100644 index 000000000..240cbb10e --- /dev/null +++ b/modulefiles/hera/make_grid @@ -0,0 +1,11 @@ +#%Module##################################################### +## Module file for make_grid task. +############################################################# + +. /apps/lmod/lmod/init/sh +module purge +module load intel/18.0.5.274 +module load netcdf/4.7.0 +module load hdf5/1.10.5 +module list + From cf78627478faa3d91eb34e91d9d954e365ce635c Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 15 Nov 2019 14:58:36 -0700 Subject: [PATCH 055/203] Changes for the make_grid task to get it to work with the module loads moved out of the J-job (on hera only). Now works. Details below. 1) modulefiles/hera/make_grid: Remove the sourcing of the script that defines the module() shell function out of the module file for this task ($HOMErrfs/modulefiles/hera/make_grid). That is already done in the ush/load_module_run_task.sh script (see below). 2) scripts/exregional_make_grid.sh: Uncomment parts of the HERA stanza in the case statement. We need to do this because this stanza does more than just load modules (which is now done in the ush/load_module_run_task.sh script); it defines certain variables (e.g. APRUN, topo_dir). 3) ush/load_modules.sh: a) Rename to ush/load_modules_run_task.sh because it not only loads the modules but also runs the make_grid (or whatever specified) task. b) Add appropriate error checking, usage message, informational messages, and comments. 4) ush/templates/FV3SAR_wflow.xml: a) Fix the way the J-job for the make_grid task is called -- now use the ush/load_modules_run_task.sh script to load modules and run the ask. b) Remove unnecessary settings of the environment variables MACHINE and HOMErrfs (these are available to the ush/load_moudles_run_task.sh script via sourcing the global variable definitions file). --- modulefiles/hera/make_grid | 2 - scripts/exregional_make_grid.sh | 9 +- ush/load_modules.sh | 93 ------------------ ush/load_modules_run_task.sh | 162 ++++++++++++++++++++++++++++++++ ush/templates/FV3SAR_wflow.xml | 33 ++++++- 5 files changed, 195 insertions(+), 104 deletions(-) delete mode 100644 ush/load_modules.sh create mode 100755 ush/load_modules_run_task.sh diff --git a/modulefiles/hera/make_grid b/modulefiles/hera/make_grid index 240cbb10e..66368b2aa 100644 --- a/modulefiles/hera/make_grid +++ b/modulefiles/hera/make_grid @@ -2,10 +2,8 @@ ## Module file for make_grid task. ############################################################# -. /apps/lmod/lmod/init/sh module purge module load intel/18.0.5.274 module load netcdf/4.7.0 module load hdf5/1.10.5 -module list diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index c74383454..8f5b08d69 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -132,8 +132,8 @@ case $MACHINE in ;; -#"HERA") -## +"HERA") +# # { save_shell_opts; set +x; } > /dev/null 2>&1 # # . /apps/lmod/lmod/init/sh @@ -148,11 +148,12 @@ case $MACHINE in # { restore_shell_opts; } > /dev/null 2>&1 # # export APRUN="time" -# export topo_dir="/scratch1/NCEPDEV/global/glopara/fix/fix_orog" + APRUN="time" + topo_dir="/scratch1/NCEPDEV/global/glopara/fix/fix_orog" # # ulimit -s unlimited # ulimit -a -# ;; + ;; # "JET") diff --git a/ush/load_modules.sh b/ush/load_modules.sh deleted file mode 100644 index 85fa8a6c7..000000000 --- a/ush/load_modules.sh +++ /dev/null @@ -1,93 +0,0 @@ -#!/bin/bash - -#set -x -u -e -#date -# -#----------------------------------------------------------------------- -# -# Source the variable definitions script and the function definitions -# file. -# -#----------------------------------------------------------------------- -# -. ${GLOBAL_VAR_DEFNS_FP} -. $USHDIR/source_util_funcs.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; set -u -x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# -# -#----------------------------------------------------------------------- -# -task_name="$1" - -#. ${HOMErrfs}/rocoto/machine-setup.sh -#export machine=${target} - -# -#----------------------------------------------------------------------- -# -# -# -#----------------------------------------------------------------------- -# -case "$MACHINE" in -# - "WCOSS_C") - . /opt/modules/default/init/sh - ;; -# - "DELL") - . /usrx/local/prod/lmod/lmod/init/sh - ;; -# - "HERA") - . /apps/lmod/lmod/init/sh - ;; -# - "JET") - . /apps/lmod/lmod/init/sh - ;; -# - *) - print_err_msg_exit " -The script to source to initialize lmod (module loads) has not yet been -specified for the current machine (MACHINE): - MACHINE = \"$MACHINE\"" - ;; -# -esac -# -#----------------------------------------------------------------------- -# -# -# -#----------------------------------------------------------------------- -# -module use ${HOMErrfs}/modulefiles/${MACHINE} -module load ${task_name} -module list - -#exec "$@" - diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh new file mode 100755 index 000000000..d8c843463 --- /dev/null +++ b/ush/load_modules_run_task.sh @@ -0,0 +1,162 @@ +#!/bin/bash + +# +#----------------------------------------------------------------------- +# +# Source the variable definitions script and the function definitions +# file. +# +#----------------------------------------------------------------------- +# +. ${GLOBAL_VAR_DEFNS_FP} +. $USHDIR/source_util_funcs.sh +# +#----------------------------------------------------------------------- +# +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# +{ save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# +if [ "$#" -ne 2 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Number of arguments specified: $# + +Usage: + + ${scrfunc_fn} task_name jjob_fp + +where the arguments are defined as follows: + + task_name: + The name of the rocoto task for which this script will load modules + and launch the J-job. + + jjob_fp + The full path to the J-job script corresponding to task_name. This + script will launch this J-job using the \"exec\" command (which will + first terminate this script and then launch the j-job; see man page of + the \"exec\" command). +" + +fi +# +#----------------------------------------------------------------------- +# +# Call the script that defines the module() function. This is needed so +# we can perform "module load ..." calls later below. +# +#----------------------------------------------------------------------- +# +print_info_msg "$VERBOSE" " +Initializing the shell function \"module()\" (and others) in order to be +able to use \"module load ...\" to load necessary modules ..." + +case "$MACHINE" in +# + "WCOSS_C") + . /opt/modules/default/init/sh + ;; +# + "DELL") + . /usrx/local/prod/lmod/lmod/init/sh + ;; +# + "HERA") + . /apps/lmod/lmod/init/sh + ;; +# + "JET") + . /apps/lmod/lmod/init/sh + ;; +# + *) + print_err_msg_exit " +The script to source to initialize lmod (module loads) has not yet been +specified for the current machine (MACHINE): + MACHINE = \"$MACHINE\"" + ;; +# +esac +# +#----------------------------------------------------------------------- +# +# Get the task name. Then shift the argument list so that the firs ar- +# gument (the task name) gets dropped from the start of the arguments +# list. This results in the shell variable $@ containing only the re- +# maining arguments (which in this case should consist of the full path +# to the J-job to call). +# +#----------------------------------------------------------------------- +# +task_name="$1" +jjob_fp="$2" +# +#----------------------------------------------------------------------- +# +# Purge modules and load the module file for the specified task on the +# current machine. Note that the path to this module file is given by +# +# $HOMErrfs/modulefiles/$machine/${task_name} +# +# where HOMErrfs is the workflow home directory, machine is the name of +# the current machine in lowercase, and task_name is the name of the +# task that this script will launch (via the exec command below). +# +#----------------------------------------------------------------------- +# +print_info_msg "$VERBOSE" " +Loading modules for task \"${task_name}\" ..." + +machine=${MACHINE,,} +module purge +module use $HOMErrfs/modulefiles/$machine +module load ${task_name} +module list +# +#----------------------------------------------------------------------- +# +# Use the exec ocmmand to terminate the current script and launch the +# J-job for the specified task. +# +#----------------------------------------------------------------------- +# +print_info_msg "$VERBOSE" " +Launching J-job (jjob_fp) for task \"${task_name}\" ... + jjob_fp = \"${jjob_fp}\" +" +exec "${jjob_fp}" +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# +{ restore_shell_opts; } > /dev/null 2>&1 + + diff --git a/ush/templates/FV3SAR_wflow.xml b/ush/templates/FV3SAR_wflow.xml index cf77581f8..316af6473 100644 --- a/ush/templates/FV3SAR_wflow.xml +++ b/ush/templates/FV3SAR_wflow.xml @@ -78,9 +78,10 @@ Variables that are not modified by the workflow generation script. &QUEUE_FCST;&ACCOUNT;"> - + + + + &LOAD_MODULES_RUN_TASK_FP; "&TN_MAKE_GRID;" "&JOBSDIR;/JREGIONAL_MAKE_GRID" &PROC_MAKE_GRID; &TN_MAKE_GRID; &LOGDIR;/&TN_MAKE_GRID;.log - MACHINE&MACHINE; - HOMErrfs&HOMErrfs; GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; PDY@Y@m@d From c5bf612a082c76ff52db00ff3bce75b70078f96a Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 15 Nov 2019 15:03:57 -0700 Subject: [PATCH 056/203] Improvements to tests/run_experiments.sh: (1) Remvoe variable TESTSDIR since it is not needed. (2) Introduce variable baseline_configs_dir. This is the directory in which the experiment/workflow configuration files for the baselines are assumed to be located. (3) Add argument check, usage message, and file existence check. (4) Clean up informational messages. --- tests/run_experiments.sh | 135 ++++++++++++++++++++++++--------------- 1 file changed, 83 insertions(+), 52 deletions(-) diff --git a/tests/run_experiments.sh b/tests/run_experiments.sh index f09fb6eac..e90cfae87 100755 --- a/tests/run_experiments.sh +++ b/tests/run_experiments.sh @@ -16,13 +16,13 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) #----------------------------------------------------------------------- # # The current script should be located in the "tests" subdirectory of -# the workflow directory, which we denote by HOMErrfs. Thus, the work- -# flow directory (HOMErrfs) is the one above the directory of the cur- +# the workflow directory, which we denote by homerrfs. Thus, the work- +# flow directory (homerrfs) is the one above the directory of the cur- # rent script. Set HOMRErrfs accordingly. # #----------------------------------------------------------------------- # -HOMErrfs=${scrfunc_dir%/*} +homerrfs=${scrfunc_dir%/*} # #----------------------------------------------------------------------- # @@ -30,8 +30,8 @@ HOMErrfs=${scrfunc_dir%/*} # #----------------------------------------------------------------------- # -USHDIR="$HOMErrfs/ush" -TESTSDIR="$HOMErrfs/tests" +ushdir="$homerrfs/ush" +baseline_configs_dir="$homerrfs/tests/baseline_configs" # #----------------------------------------------------------------------- # @@ -39,7 +39,7 @@ TESTSDIR="$HOMErrfs/tests" # #----------------------------------------------------------------------- # -. $USHDIR/source_util_funcs.sh +. $ushdir/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -56,12 +56,54 @@ TESTSDIR="$HOMErrfs/tests" # #----------------------------------------------------------------------- # -MACHINE="HERA" -ACCOUNT="gsd-fv3" +machine="HERA" +account="gsd-fv3" -USE_CRON_TO_RELAUNCH="TRUE" -#USE_CRON_TO_RELAUNCH="FALSE" -CRON_RELAUNCH_INTVL_MNTS="02" +use_cron_to_relaunch="TRUE" +#use_cron_to_relaunch="FALSE" +cron_relaunch_intvl_mnts="02" +# +#----------------------------------------------------------------------- +# +# Check arguments. +# +#----------------------------------------------------------------------- +# + if [ "$#" -ne 1 ]; then + + print_err_msg_exit " +Incorrect number of arguments specified: + + Number of arguments specified: $# + +Usage: + + ${scrfunc_fn} expts_file + +where expts_file is the name of the file containing the list of experi- +ments to run. If expts_file is the absolute path to a file, it is used +as is. If it is a relative path (including just a file name), it is as- +sumed to be given relative to the path from which this script is called. +" + + fi +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# + expts_file="$1" + expts_list_fp=$( readlink -f "${expts_file}" ) + + if [ ! -f "${expts_list_fp}" ]; then + print_err_msg_exit "\ +The experiments list file (expts_file) specified as an argument to this +script (and with full path given by expts_list_fp) does not exist: + expts_file = \"${expts_file}\" + expts_list_fp = \"${expts_list_fp}\"" + fi # #----------------------------------------------------------------------- # @@ -86,24 +128,20 @@ CRON_RELAUNCH_INTVL_MNTS="02" # #----------------------------------------------------------------------- # -EXPTS_LIST_FN="expts_list.txt" -EXPTS_LIST_FP="${TESTSDIR}/${EXPTS_LIST_FN}" - print_info_msg " Reading in list of forecast experiments from file - EXPTS_LIST_FP = \"${EXPTS_LIST_FP}\" + expts_list_fp = \"${expts_list_fp}\" and storing result in the array \"all_lines\" (one array element per expe- riment)..." -readarray -t all_lines < "${EXPTS_LIST_FP}" +readarray -t all_lines < "${expts_list_fp}" all_lines_str=$( printf "\'%s\'\n" "${all_lines[@]}" ) print_info_msg " -All lines from experiments list file EXPTS_LIST_FP read in, where: - - EXPTS_LIST_FP = \"${EXPTS_LIST_FP}\" - -Contents of file (line by line, before any processing) are: +All lines from experiments list file (expts_list_fp) read in, where: + expts_list_fp = \"${expts_list_fp}\" +Contents of file are (line by line, each line within single quotes, and +before any processing): ${all_lines_str} " @@ -119,7 +157,7 @@ ${all_lines_str} #----------------------------------------------------------------------- # expts_list=() -field_separator="\|" # Need backslash as an escape sequence in the sed commands. +field_separator="\|" # Need backslash as an escape sequence in the sed commands below. j=0 num_lines="${#all_lines[@]}" @@ -165,12 +203,9 @@ num_expts="${#expts_list[@]}" expts_list_str=$( printf " \'%s\'\n" "${expts_list[@]}" ) print_info_msg " After processing, the number of experiments to run (num_expts) is: - num_expts = ${num_expts} - The list of forecast experiments to run (one experiment per line) is gi- ven by: - ${expts_list_str} " # @@ -185,14 +220,7 @@ ${expts_list_str} for (( i=0; i<=$((num_expts-1)); i++ )); do print_info_msg " - -Processing experiment #$((${i}+1)): ------------------------- - -The experiment specification line for this experiment is given by: - - ${expts_list[$i]} -" +Processing experiment \"${expts_list[$i]}\" ..." # # Get the name of the baseline on which the current experiment is based. # Then save the remainder of the current element of expts_list in the @@ -245,7 +273,7 @@ The experiment specification line for this experiment is given by: # This will be modified to obtain the configuration file for the current # experiment. # - baseline_config_fp="${TESTSDIR}/baseline_configs/config.${baseline_name}.sh" + baseline_config_fp="${baseline_configs_dir}/config.${baseline_name}.sh" # # Print out an error message and exit if a configuration file for the # current baseline does not exist. @@ -255,7 +283,8 @@ The experiment specification line for this experiment is given by: The experiment/workflow configuration file (baseline_config_fp) for the specified baseline (baseline_name) does not exist: baseline_name = \"${baseline_name}\" - baseline_config_fp = \"${baseline_config_fp}\"" + baseline_config_fp = \"${baseline_config_fp}\" +Please correct and rerun." fi # # We require that EXPT_SUBDIR in the configuration file for the baseline @@ -289,37 +318,37 @@ the name of the baseline (baseline_name): fi done # -# Reset EXPT_SUBDIR to the name of the current experiment. Below, we -# will write this to the configuration file for the current experiment. +# Set expt_subdir to the name of the current experiment. Below, we will +# write this to the configuration file for the current experiment. # - EXPT_SUBDIR="${expt_name}" + expt_subdir="${expt_name}" # # Create a configuration file for the current experiment. We do this by # first copying the baseline configuration file and then modifying the # the values of those variables within it that are different between the # baseline and the experiment. # - expt_config_fp="${USHDIR}/config.${expt_name}.sh" + expt_config_fp="$ushdir/config.${expt_name}.sh" cp_vrfy "${baseline_config_fp}" "${expt_config_fp}" - set_bash_param "${expt_config_fp}" "MACHINE" "$MACHINE" - set_bash_param "${expt_config_fp}" "ACCOUNT" "$ACCOUNT" - set_bash_param "${expt_config_fp}" "USE_CRON_TO_RELAUNCH" "${USE_CRON_TO_RELAUNCH}" - set_bash_param "${expt_config_fp}" "CRON_RELAUNCH_INTVL_MNTS" "${CRON_RELAUNCH_INTVL_MNTS}" - set_bash_param "${expt_config_fp}" "EXPT_SUBDIR" "${EXPT_SUBDIR}" + set_bash_param "${expt_config_fp}" "MACHINE" "$machine" + set_bash_param "${expt_config_fp}" "ACCOUNT" "$account" + set_bash_param "${expt_config_fp}" "USE_CRON_TO_RELAUNCH" "${use_cron_to_relaunch}" + set_bash_param "${expt_config_fp}" "CRON_RELAUNCH_INTVL_MNTS" "${cron_relaunch_intvl_mnts}" + set_bash_param "${expt_config_fp}" "EXPT_SUBDIR" "${expt_subdir}" printf "" for (( j=0; j<${num_mod_vars}; j++ )); do set_bash_param "${expt_config_fp}" "${modvar_name[$j]}" "${modvar_value[$j]}" done # -# Create a symlink called "config.sh" in USHDIR that points to the cur- +# Create a symlink called "config.sh" in ushdir that points to the cur- # rent experiment's configuration file. This must be done because the # experiment/workflow generation script assumes that this is the name # and location of the configuration file to use to generate a new expe- # riment and corresponding workflow. # - ln_vrfy -fs "${expt_config_fp}" "$USHDIR/config.sh" + ln_vrfy -fs "${expt_config_fp}" "$ushdir/config.sh" # #----------------------------------------------------------------------- # @@ -328,12 +357,14 @@ the name of the baseline (baseline_name): # #----------------------------------------------------------------------- # + log_fp="$ushdir/log.generate_FV3SAR_wflow.${expt_name}" print_info_msg " Generating experiment with name: - expt_name = \"${expt_name}\"" + expt_name = \"${expt_name}\" +Log file for generation step is: + log_fp = \"${log_fp}\"" - log_fp="$USHDIR/log.generate_FV3SAR_wflow.${expt_name}" - $USHDIR/generate_FV3SAR_wflow.sh 2>&1 >& "${log_fp}" || { \ + $ushdir/generate_FV3SAR_wflow.sh > "${log_fp}" 2>&1 || { \ print_err_msg_exit "\ Could not generate an experiment/workflow for the test specified by expt_name: @@ -352,9 +383,9 @@ log_fp: # #----------------------------------------------------------------------- # - EXPTDIR=$( readlink -f "$HOMErrfs/../expt_dirs/${EXPT_SUBDIR}" ) - mv_vrfy "${expt_config_fp}" "${EXPTDIR}" - mv_vrfy "${log_fp}" "${EXPTDIR}" + exptdir=$( readlink -f "$homerrfs/../expt_dirs/${expt_subdir}" ) + mv_vrfy "${expt_config_fp}" "${exptdir}" + mv_vrfy "${log_fp}" "${exptdir}" done # From 884dddf7fd621463c713bf40154dfcc8cc333175 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 18 Nov 2019 11:59:32 -0700 Subject: [PATCH 057/203] Rename tasks and corresponding J-jobs and ex-scripts to be shorter and more similar to EMC's workflow. --- ...EGIONAL_MAKE_IC_LBC0 => JREGIONAL_MAKE_ICS} | 2 +- ...L_MAKE_LBC1_TO_LBCN => JREGIONAL_MAKE_LBCS} | 2 +- jobs/{JREGIONAL_RUN_FV3 => JREGIONAL_RUN_FCST} | 2 +- ..._make_ic_lbc0.sh => exregional_make_ics.sh} | 0 ...lbc1_to_lbcn.sh => exregional_make_lbcs.sh} | 0 ...ional_run_fv3.sh => exregional_run_fcst.sh} | 0 ush/generate_FV3SAR_wflow.sh | 4 ++-- ush/templates/FV3SAR_wflow.xml | 18 +++++++++--------- 8 files changed, 14 insertions(+), 14 deletions(-) rename jobs/{JREGIONAL_MAKE_IC_LBC0 => JREGIONAL_MAKE_ICS} (99%) rename jobs/{JREGIONAL_MAKE_LBC1_TO_LBCN => JREGIONAL_MAKE_LBCS} (99%) rename jobs/{JREGIONAL_RUN_FV3 => JREGIONAL_RUN_FCST} (98%) rename scripts/{exregional_make_ic_lbc0.sh => exregional_make_ics.sh} (100%) rename scripts/{exregional_make_lbc1_to_lbcn.sh => exregional_make_lbcs.sh} (100%) rename scripts/{exregional_run_fv3.sh => exregional_run_fcst.sh} (100%) diff --git a/jobs/JREGIONAL_MAKE_IC_LBC0 b/jobs/JREGIONAL_MAKE_ICS similarity index 99% rename from jobs/JREGIONAL_MAKE_IC_LBC0 rename to jobs/JREGIONAL_MAKE_ICS index 465b7cf49..eeaa95f43 100755 --- a/jobs/JREGIONAL_MAKE_IC_LBC0 +++ b/jobs/JREGIONAL_MAKE_ICS @@ -216,7 +216,7 @@ EXTRN_MDL_FILES_DIR="${CYCLE_DIR}/${EXTRN_MDL_NAME_ICS}/ICS" # EXTRN_MDL_FNS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_FNS[@]}" )")" -$SCRIPTSDIR/exregional_make_ic_lbc0.sh \ +$SCRIPTSDIR/exregional_make_ics.sh \ EXTRN_MDL_FNS="${EXTRN_MDL_FNS_str}" \ EXTRN_MDL_FILES_DIR="${EXTRN_MDL_FILES_DIR}" \ EXTRN_MDL_CDATE="${EXTRN_MDL_CDATE}" \ diff --git a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN b/jobs/JREGIONAL_MAKE_LBCS similarity index 99% rename from jobs/JREGIONAL_MAKE_LBC1_TO_LBCN rename to jobs/JREGIONAL_MAKE_LBCS index 3348626a3..e29919d6f 100755 --- a/jobs/JREGIONAL_MAKE_LBC1_TO_LBCN +++ b/jobs/JREGIONAL_MAKE_LBCS @@ -218,7 +218,7 @@ EXTRN_MDL_FILES_DIR="${CYCLE_DIR}/${EXTRN_MDL_NAME_LBCS}/LBCS" EXTRN_MDL_FNS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_FNS[@]}" )")" EXTRN_MDL_LBC_UPDATE_FHRS_str="( "$( printf "\"%s\" " "${EXTRN_MDL_LBC_UPDATE_FHRS[@]}" )")" -$SCRIPTSDIR/exregional_make_lbc1_to_lbcn.sh \ +$SCRIPTSDIR/exregional_make_lbcs.sh \ EXTRN_MDL_FNS="${EXTRN_MDL_FNS_str}" \ EXTRN_MDL_FILES_DIR="${EXTRN_MDL_FILES_DIR}" \ EXTRN_MDL_CDATE="${EXTRN_MDL_CDATE}" \ diff --git a/jobs/JREGIONAL_RUN_FV3 b/jobs/JREGIONAL_RUN_FCST similarity index 98% rename from jobs/JREGIONAL_RUN_FV3 rename to jobs/JREGIONAL_RUN_FCST index 38f75da12..803d70609 100755 --- a/jobs/JREGIONAL_RUN_FV3 +++ b/jobs/JREGIONAL_RUN_FCST @@ -73,7 +73,7 @@ mkdir_vrfy -p ${CYCLE_DIR}/RESTART # #----------------------------------------------------------------------- # -$SCRIPTSDIR/exregional_run_fv3.sh \ +$SCRIPTSDIR/exregional_run_fcst.sh \ CYCLE_DIR="${CYCLE_DIR}" || \ print_err_msg_exit "\ Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed." diff --git a/scripts/exregional_make_ic_lbc0.sh b/scripts/exregional_make_ics.sh similarity index 100% rename from scripts/exregional_make_ic_lbc0.sh rename to scripts/exregional_make_ics.sh diff --git a/scripts/exregional_make_lbc1_to_lbcn.sh b/scripts/exregional_make_lbcs.sh similarity index 100% rename from scripts/exregional_make_lbc1_to_lbcn.sh rename to scripts/exregional_make_lbcs.sh diff --git a/scripts/exregional_run_fv3.sh b/scripts/exregional_run_fcst.sh similarity index 100% rename from scripts/exregional_run_fv3.sh rename to scripts/exregional_run_fcst.sh diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index e7028089e..3445ed008 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -106,7 +106,7 @@ cp_vrfy ${TEMPLATE_XML_FP} ${WFLOW_XML_FP} # #----------------------------------------------------------------------- # -PROC_RUN_FV3="${NUM_NODES}:ppn=${NCORES_PER_NODE}" +PROC_RUN_FCST="${NUM_NODES}:ppn=${NCORES_PER_NODE}" FHR=( $( seq 0 1 ${FCST_LEN_HRS} ) ) i=0 @@ -148,7 +148,7 @@ set_file_param "${WFLOW_XML_FP}" "EXTRN_MDL_NAME_ICS" "${EXTRN_MDL_NAME_ICS}" set_file_param "${WFLOW_XML_FP}" "EXTRN_MDL_NAME_LBCS" "${EXTRN_MDL_NAME_LBCS}" set_file_param "${WFLOW_XML_FP}" "EXTRN_MDL_FILES_SYSBASEDIR_ICS" "${EXTRN_MDL_FILES_SYSBASEDIR_ICS}" set_file_param "${WFLOW_XML_FP}" "EXTRN_MDL_FILES_SYSBASEDIR_LBCS" "${EXTRN_MDL_FILES_SYSBASEDIR_LBCS}" -set_file_param "${WFLOW_XML_FP}" "PROC_RUN_FV3" "${PROC_RUN_FV3}" +set_file_param "${WFLOW_XML_FP}" "PROC_RUN_FCST" "${PROC_RUN_FCST}" set_file_param "${WFLOW_XML_FP}" "DATE_FIRST_CYCL" "${DATE_FIRST_CYCL}" set_file_param "${WFLOW_XML_FP}" "DATE_LAST_CYCL" "${DATE_LAST_CYCL}" set_file_param "${WFLOW_XML_FP}" "YYYY_FIRST_CYCL" "${YYYY_FIRST_CYCL}" diff --git a/ush/templates/FV3SAR_wflow.xml b/ush/templates/FV3SAR_wflow.xml index 316af6473..6d861dd0a 100644 --- a/ush/templates/FV3SAR_wflow.xml +++ b/ush/templates/FV3SAR_wflow.xml @@ -61,7 +61,7 @@ Variables that are not modified by the workflow generation script. - + 00:10:00"> @@ -70,12 +70,12 @@ Variables that are not modified by the workflow generation script. 00:45:00"> 00:30:00"> 01:00:00"> -04:30:00"> +04:30:00"> 00:30:00"> -&QUEUE_DEFAULT;&ACCOUNT;"> -&QUEUE_HPSS;&ACCOUNT;"> -&QUEUE_FCST;&ACCOUNT;"> +&QUEUE_DEFAULT;&ACCOUNT;"> +&QUEUE_HPSS;&ACCOUNT;"> +&QUEUE_FCST;&ACCOUNT;"> - &RSRC_RUN_FV3; - &RSRV_RUN_FV3; + &RSRC_RUN_FCST; + &RSRV_RUN_FCST; - &JOBSDIR;/JREGIONAL_RUN_FV3 - &PROC_RUN_FV3; + &JOBSDIR;/JREGIONAL_RUN_FCST + &PROC_RUN_FCST; &TN_RUN_FCST; &LOGDIR;/&TN_RUN_FCST;_@Y@m@d@H.log From e898524bbe3ed8ffe6e99f4a59448374867961e6 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 18 Nov 2019 12:04:40 -0700 Subject: [PATCH 058/203] For simplicity and clarity, change variable name from APRUN_SFC to APRUN in scripts/exregional_make_sfc_climo.sh. --- scripts/exregional_make_sfc_climo.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index f0927dad6..37bee4c35 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -123,19 +123,19 @@ case $MACHINE in "WCOSS_C") # This could be wrong. Just a guess since I don't have access to this machine. - APRUN_SFC=${APRUN_SFC:-"aprun -j 1 -n 6 -N 6"} + APRUN=${APRUN:-"aprun -j 1 -n 6 -N 6"} ;; "WCOSS") # This could be wrong. Just a guess since I don't have access to this machine. - APRUN_SFC=${APRUN_SFC:-"aprun -j 1 -n 6 -N 6"} + APRUN=${APRUN:-"aprun -j 1 -n 6 -N 6"} ;; "THEIA") # Need to load intel/15.1.133. This and all other module loads should go into a module file. module load intel/15.1.133 module list - APRUN_SFC="mpirun -np ${SLURM_NTASKS}" + APRUN="mpirun -np ${SLURM_NTASKS}" ;; "HERA") @@ -149,14 +149,14 @@ case $MACHINE in module load esmflocal/8_0_48b.netcdf47 #module load esmf/7.1.0r module list - APRUN_SFC="srun" + APRUN="srun" ;; *) print_err_msg_exit "\ Run command has not been specified for this machine: MACHINE = \"$MACHINE\" - APRUN_SFC = \"$APRUN_SFC\"" + APRUN = \"$APRUN\"" ;; esac @@ -167,7 +167,7 @@ esac # #----------------------------------------------------------------------- # -$APRUN_SFC ${EXECDIR}/sfc_climo_gen || print_err_msg_exit "\ +$APRUN ${EXECDIR}/sfc_climo_gen || print_err_msg_exit "\ Call to executable that generates surface climatology files returned with nonzero exit code." # From 632c41e46f8587bab8f6d31ff04dc8196e9fe944 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 18 Nov 2019 14:12:33 -0700 Subject: [PATCH 059/203] Changes (for Hera only thus far) to move module files out of J-jobs and ex-scripts and into an individual module file for each task. Details below. For all tasks on Hera: 1) Remove all module purge/use/load commands in J-jobs and ex-scripts and place them in separate module files (one per task) in directory modulefiles/hera/. 2) Modify the rocoto XML template file so that it calls each J-job via the ush/load_modules_run_task.sh script. This script first loads the module file for the task and then runs the J-job using the exec command (which kills the load_modules_run_task.sh process and replaces it with the J-job, all while keeping the appropriate modules loaded in the environment). For the run_fcst task on Hera: 1) In ush/generate_FV3SAR_wflow.sh, remove copying of module-setup.sh.inc from "${NEMSfv3gfs_DIR}/NEMS/src/conf" to the experiment directory as module-setup.sh, since we now do the module setup directly in ush/load_modules_run_task.sh. Also, instead of adding a line into module-setup.sh that modifies LD_LIBRARY_PATH to add the CCPP library path, perform this modification in scripts/exregional_run_fcst.sh directly (i.e. in the shell, without modifying any files). 2) In ush/generate_FV3SAR_wflow.sh, remove copying of modules.fv3 from "${NEMSfv3gfs_DIR}/NEMS/src/conf" to the experiment directory. Instead, create a symlink at modulefiles/hera/run_fcst_ccpp that points to "${NEMSfv3gfs_DIR}/NEMS/src/conf/modules.fv3". Note that the NEMSfv3gfs code must first be built in order for this symlink's target to exist. 3) In scripts/exregional_run_fcst.sh, remove creation of links in cycle directory that point to module-setup.sh and modules.fv3 in the experiment directory. The latter two files no longer exist, and with the above changes, the links are no longer needed. 4) Make the name of the module file for the forecast task dependent on the USE_CCPP flag, since we use different module files depending on whether or not we're using CCPP: run_fcst_ccpp vs. run_fcst_no_ccpp. The second one is an actual file while the first is the symlink mentioned above. --- jobs/JREGIONAL_MAKE_ICS | 26 ---- jobs/JREGIONAL_MAKE_LBCS | 26 ---- modulefiles/hera/get_extrn_ics | 8 ++ modulefiles/hera/get_extrn_lbcs | 8 ++ modulefiles/hera/make_grid | 1 + modulefiles/hera/make_ics | 23 ++++ modulefiles/hera/make_lbcs | 23 ++++ modulefiles/hera/make_orog | 10 ++ modulefiles/hera/make_sfc_climo | 15 +++ modulefiles/hera/run_fcst_ccpp | 1 + modulefiles/hera/run_fcst_no_ccpp | 12 ++ modulefiles/hera/run_post | 30 +++++ scripts/exregional_get_extrn_files.sh | 10 -- scripts/exregional_make_orog.sh | 17 +-- scripts/exregional_make_sfc_climo.sh | 10 -- scripts/exregional_run_fcst.sh | 31 +---- scripts/exregional_run_post.sh | 34 +----- ush/generate_FV3SAR_wflow.sh | 81 ------------- ush/load_modules_run_task.sh | 164 +++++++++++++++++++++++--- ush/templates/FV3SAR_wflow.xml | 36 +++--- 20 files changed, 298 insertions(+), 268 deletions(-) create mode 100644 modulefiles/hera/get_extrn_ics create mode 100644 modulefiles/hera/get_extrn_lbcs create mode 100644 modulefiles/hera/make_ics create mode 100644 modulefiles/hera/make_lbcs create mode 100644 modulefiles/hera/make_orog create mode 100644 modulefiles/hera/make_sfc_climo create mode 120000 modulefiles/hera/run_fcst_ccpp create mode 100644 modulefiles/hera/run_fcst_no_ccpp create mode 100644 modulefiles/hera/run_post diff --git a/jobs/JREGIONAL_MAKE_ICS b/jobs/JREGIONAL_MAKE_ICS index eeaa95f43..5614faaa2 100755 --- a/jobs/JREGIONAL_MAKE_ICS +++ b/jobs/JREGIONAL_MAKE_ICS @@ -119,35 +119,9 @@ case "$MACHINE" in ;; # "HERA") -# - { save_shell_opts; set +x; } > /dev/null 2>&1 - ulimit -s unlimited # ulimit -a - - module purge - module load intel/18.0.5.274 - module load impi/2018.0.4 - module load netcdf/4.6.1 - - module use /scratch1/NCEPDEV/nems/emc.nemspara/soft/modulefiles - module load esmf/8.0.0bs21 - - module use -a /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles - module load w3nco - module load nemsio/2.2.3 - module load bacio - module load sp - module load sfcio - module load sigio - - module load wgrib2 - - module list - APRUN="srun" - - { restore_shell_opts; } > /dev/null 2>&1 ;; # "JET") diff --git a/jobs/JREGIONAL_MAKE_LBCS b/jobs/JREGIONAL_MAKE_LBCS index e29919d6f..24ff59da6 100755 --- a/jobs/JREGIONAL_MAKE_LBCS +++ b/jobs/JREGIONAL_MAKE_LBCS @@ -119,35 +119,9 @@ case "$MACHINE" in ;; # "HERA") -# - { save_shell_opts; set +x; } > /dev/null 2>&1 - ulimit -s unlimited # ulimit -a - - module purge - module load intel/18.0.5.274 - module load impi/2018.0.4 - module load netcdf/4.6.1 - - module use /scratch1/NCEPDEV/nems/emc.nemspara/soft/modulefiles - module load esmf/8.0.0bs21 - - module use -a /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles - module load w3nco - module load nemsio/2.2.3 - module load bacio - module load sp - module load sfcio - module load sigio - - module load wgrib2 - - module list - APRUN="srun" - - { restore_shell_opts; } > /dev/null 2>&1 ;; # diff --git a/modulefiles/hera/get_extrn_ics b/modulefiles/hera/get_extrn_ics new file mode 100644 index 000000000..9935033fd --- /dev/null +++ b/modulefiles/hera/get_extrn_ics @@ -0,0 +1,8 @@ +#%Module##################################################### +## Module file for get_extrn_ics task. +############################################################# + +module purge + +module load hpss + diff --git a/modulefiles/hera/get_extrn_lbcs b/modulefiles/hera/get_extrn_lbcs new file mode 100644 index 000000000..1919f3355 --- /dev/null +++ b/modulefiles/hera/get_extrn_lbcs @@ -0,0 +1,8 @@ +#%Module##################################################### +## Module file for get_extrn_lbcs task. +############################################################# + +module purge + +module load hpss + diff --git a/modulefiles/hera/make_grid b/modulefiles/hera/make_grid index 66368b2aa..2d651a5c9 100644 --- a/modulefiles/hera/make_grid +++ b/modulefiles/hera/make_grid @@ -3,6 +3,7 @@ ############################################################# module purge + module load intel/18.0.5.274 module load netcdf/4.7.0 module load hdf5/1.10.5 diff --git a/modulefiles/hera/make_ics b/modulefiles/hera/make_ics new file mode 100644 index 000000000..29098fd72 --- /dev/null +++ b/modulefiles/hera/make_ics @@ -0,0 +1,23 @@ +#%Module##################################################### +## Module file for make_ics task. +############################################################# + +module purge + +module load intel/18.0.5.274 +module load impi/2018.0.4 +module load netcdf/4.6.1 + +module use /scratch1/NCEPDEV/nems/emc.nemspara/soft/modulefiles +module load esmf/8.0.0bs21 + +module use -a /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles +module load w3nco +module load nemsio/2.2.3 +module load bacio +module load sp +module load sfcio +module load sigio + +module load wgrib2 + diff --git a/modulefiles/hera/make_lbcs b/modulefiles/hera/make_lbcs new file mode 100644 index 000000000..e464cd9fe --- /dev/null +++ b/modulefiles/hera/make_lbcs @@ -0,0 +1,23 @@ +#%Module##################################################### +## Module file for make_lbcs task. +############################################################# + +module purge + +module load intel/18.0.5.274 +module load impi/2018.0.4 +module load netcdf/4.6.1 + +module use /scratch1/NCEPDEV/nems/emc.nemspara/soft/modulefiles +module load esmf/8.0.0bs21 + +module use -a /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles +module load w3nco +module load nemsio/2.2.3 +module load bacio +module load sp +module load sfcio +module load sigio + +module load wgrib2 + diff --git a/modulefiles/hera/make_orog b/modulefiles/hera/make_orog new file mode 100644 index 000000000..295940dae --- /dev/null +++ b/modulefiles/hera/make_orog @@ -0,0 +1,10 @@ +#%Module##################################################### +## Module file for make_orog task. +############################################################# + +module purge + +module load intel/18.0.5.274 +module load netcdf/4.7.0 +module load hdf5/1.10.4 + diff --git a/modulefiles/hera/make_sfc_climo b/modulefiles/hera/make_sfc_climo new file mode 100644 index 000000000..c83cc119b --- /dev/null +++ b/modulefiles/hera/make_sfc_climo @@ -0,0 +1,15 @@ +#%Module##################################################### +## Module file for make_sfc_climo task. +############################################################# + +module purge + +module load intel/18.0.5.274 +module load impi/2018.0.4 +module load netcdf/4.6.1 + +#NCEPLIBS=/scratch1/NCEPDEV/global/gwv/l819/lib +#module use -a $NCEPLIBS/modulefiles +module use -a /scratch1/NCEPDEV/global/gwv/l819/lib/modulefiles +module load esmflocal/8_0_48b.netcdf47 + diff --git a/modulefiles/hera/run_fcst_ccpp b/modulefiles/hera/run_fcst_ccpp new file mode 120000 index 000000000..eacd4ef1d --- /dev/null +++ b/modulefiles/hera/run_fcst_ccpp @@ -0,0 +1 @@ +../../sorc/NEMSfv3gfs/NEMS/src/conf/modules.nems \ No newline at end of file diff --git a/modulefiles/hera/run_fcst_no_ccpp b/modulefiles/hera/run_fcst_no_ccpp new file mode 100644 index 000000000..290aff9da --- /dev/null +++ b/modulefiles/hera/run_fcst_no_ccpp @@ -0,0 +1,12 @@ +#%Module##################################################### +## Module file for run_fcst task without CCPP. +############################################################# + +module purge + +module use /scratch1/NCEPDEV/nems/emc.nemspara/soft/modulefiles +module load intel/18.0.5.274 +module load impi/2018.0.4 +module load netcdf/4.6.1 +module load pnetcdf/1.10.0 + diff --git a/modulefiles/hera/run_post b/modulefiles/hera/run_post new file mode 100644 index 000000000..70df4e2e8 --- /dev/null +++ b/modulefiles/hera/run_post @@ -0,0 +1,30 @@ +#%Module##################################################### +## Module file for run_post task. +############################################################# + +module purge + +module load intel/19.0.4.243 +module load impi/2019.0.4 + +# Load nceplibs modules. +module use -a /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles + +module load sigio/2.1.1 +module load jasper/1.900.1 +module load png/1.2.44 +module load z/1.2.11 +module load sfcio/1.1.1 +module load nemsio/2.2.4 +module load bacio/2.0.3 +module load g2/3.1.1 +module load gfsio/1.1.0 +module load ip/3.0.2 +module load sp/2.0.3 +module load w3emc/2.3.1 +module load w3nco/2.0.7 +module load crtm/2.2.5 +module load netcdf/4.7.0 +module load g2tmpl/1.5.1 +module load wrfio/1.1.1 + diff --git a/scripts/exregional_get_extrn_files.sh b/scripts/exregional_get_extrn_files.sh index 635984c34..49ddec865 100755 --- a/scripts/exregional_get_extrn_files.sh +++ b/scripts/exregional_get_extrn_files.sh @@ -252,16 +252,6 @@ will be copied (EXTRN_MDL_FILES_DIR) are: # #----------------------------------------------------------------------- # -# Load necessary modules. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set +x; } > /dev/null 2>&1 - module load hpss - { restore_shell_opts; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# # Get the number of archive files to consider. # #----------------------------------------------------------------------- diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index d1912da1e..c38c41d9f 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -179,23 +179,10 @@ case $MACHINE in "HERA") -# - { save_shell_opts; set +x; } > /dev/null 2>&1 - - . /apps/lmod/lmod/init/sh - module purge - module load intel/18.0.5.274 - module load netcdf/4.7.0 - module load hdf5/1.10.4 - module list - - { restore_shell_opts; } > /dev/null 2>&1 - - export APRUN="time" - export topo_dir="/scratch1/NCEPDEV/global/glopara/fix/fix_orog" - ulimit -s unlimited ulimit -a + export APRUN="time" + export topo_dir="/scratch1/NCEPDEV/global/glopara/fix/fix_orog" ;; diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index 37bee4c35..498d45402 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -139,16 +139,6 @@ case $MACHINE in ;; "HERA") - module purge - module load intel/18.0.5.274 - module load impi/2018.0.4 - module load netcdf/4.6.1 - #module use /scratch1/NCEPDEV/nems/emc.nemspara/soft/modulefiles - export NCEPLIBS=/scratch1/NCEPDEV/global/gwv/l819/lib - module use -a $NCEPLIBS/modulefiles - module load esmflocal/8_0_48b.netcdf47 - #module load esmf/7.1.0r - module list APRUN="srun" ;; diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 7cf0a4628..1286430db 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -144,37 +144,10 @@ case $MACHINE in ;; # "HERA") -# - - if [ "${USE_CCPP}" = "TRUE" ]; then - -# Need to change to the experiment directory to correctly load necessary -# modules for CCPP-version of FV3SAR in lines below - cd_vrfy ${EXPTDIR} - - set +x - source ./module-setup.sh - module use $( pwd -P ) - module load modules.fv3 - module list - set -x - - else - - . /apps/lmod/lmod/init/sh - module purge - module use /scratch1/NCEPDEV/nems/emc.nemspara/soft/modulefiles - module load intel/18.0.5.274 - module load impi/2018.0.4 - module load netcdf/4.6.1 - module load pnetcdf/1.10.0 - module list - - fi - ulimit -s unlimited ulimit -a APRUN="srun" + LD_LIBRARY_PATH="${NEMSfv3gfs_DIR}/ccpp/lib${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}" ;; # "JET") @@ -414,8 +387,6 @@ ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/${FIELD_TABLE_FN} ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/${NEMS_CONFIG_FN} if [ "${USE_CCPP}" = "TRUE" ]; then - ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/module-setup.sh - ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/modules.fv3 if [ "${CCPP_PHYS_SUITE}" = "GSD" ]; then ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/suite_FV3_GSD_v0.xml elif [ "${CCPP_PHYS_SUITE}" = "GFS" ]; then diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 50f1d5f28..79ab2e47c 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -122,39 +122,7 @@ case $MACHINE in "HERA") - { save_shell_opts; set +x; } > /dev/null 2>&1 - module purge - - module load intel/19.0.4.243 - module load impi/2019.0.4 - -# module use /contrib/modulefiles - module use -a /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles - -# Loading nceplibs modules - module load sigio/2.1.1 - module load jasper/1.900.1 - module load png/1.2.44 - module load z/1.2.11 - module load sfcio/1.1.1 - module load nemsio/2.2.4 - module load bacio/2.0.3 - module load g2/3.1.1 -# module load xmlparse/v2.0.0 - module load gfsio/1.1.0 - module load ip/3.0.2 - module load sp/2.0.3 - module load w3emc/2.3.1 - module load w3nco/2.0.7 - module load crtm/2.2.5 -# module load netcdf/3.6.3 - module load netcdf/4.7.0 - module load g2tmpl/1.5.1 - module load wrfio/1.1.1 - - export NDATE=/scratch3/NCEPDEV/nwprod/lib/prod_util/v1.1.0/exec/ndate - - { restore_shell_opts; } > /dev/null 2>&1 +# export NDATE=/scratch3/NCEPDEV/nwprod/lib/prod_util/v1.1.0/exec/ndate APRUN="srun" ;; diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 3445ed008..1c9efead0 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -503,91 +503,10 @@ Copying templates of various input files to the experiment directory..." # # If using CCPP... # -# If USE_CCPP is set to "TRUE", copy the appropriate modulefile, the -# CCPP physics suite definition file (an XML file), and possibly other -# suite-dependent files to the experiment directory. -# -# The modulefile modules.nems in the directory -# -# $NEMSfv3gfs_DIR/NEMS/src/conf -# -# is generated during the FV3 build process and this is configured pro- -# perly for the machine, shell environment, etc. Thus, we can just copy -# it to the experiment directory without worrying about what machine -# we're on, but this still needs to be confirmed. -# -# Note that a modulefile is a file whose first line is the "magic coo- -# kie" '#%Module'. It is interpreted by the "module load ..." command. -# It sets environment variables (including prepending/appending to -# paths) and loads modules. -# -# QUESTION: -# Why don't we do this for the non-CCPP version of FV3? -# -# ANSWER: -# Because for that case, we load different versions of intel and impi -# (compare modules.nems to the modules loaded for the case of USE_CCPP -# set to "FALSE" in run_FV3SAR.sh). Maybe these can be combined at some -# point. Note that a modules.nems file is generated in the same rela- -# tive location in the non-CCPP-enabled version of NEMSfv3gfs, so maybe -# that can be used and the run_FV3SAR.sh script modified to accomodate -# such a change. That way the below can be performed for both the CCPP- -# enabled and non-CCPP-enabled versions of NEMSfv3gfs. -# #----------------------------------------------------------------------- # if [ "${USE_CCPP}" = "TRUE" ]; then # -# Copy the shell script that initializes the Lmod (Lua-based module) -# system/software for handling modules. This script: -# -# 1) Detects the shell in which it is being invoked (i.e. the shell of -# the "parent" script in which it is being sourced). -# 2) Detects the machine it is running on and and calls the appropriate -# (shell- and machine-dependent) initalization script to initialize -# Lmod. -# 3) Purges all modules. -# 4) Uses the "module use ..." command to prepend or append paths to -# Lmod's search path (MODULEPATH). -# - print_info_msg "$VERBOSE" " -Copying the shell script that initializes the Lmod (Lua-based module) -system/software for handling modules..." -# -# The following might have to be made shell-dependent, e.g. if using csh -# or tcsh, copy over the file module-setup.csh.inc??. -# -# It may be convenient to also copy over this script when running the -# non-CCPP version of the FV3SAR and try to simplify the run script -# (run_FV3SAR.sh) so that it doesn't depend on whether USE_CCPP is set -# to "TRUE" or "FALSE". We can do that, but currently the non-CCPP and -# CCPP-enabled versions of the FV3SAR code use different versions of -# intel and impi, so module-setup.sh must account for this. -# - cp_vrfy ${NEMSfv3gfs_DIR}/NEMS/src/conf/module-setup.sh.inc \ - $EXPTDIR/module-setup.sh -# -# Append the command that adds the path to the CCPP libraries (via the -# shell variable LD_LIBRARY_PATH) to the Lmod initialization script in -# the experiment directory. This is needed if running the dynamic build -# of the CCPP-enabled version of the FV3SAR. -# - { cat << EOM >> $EXPTDIR/module-setup.sh -# -# Add path to libccpp.so and libccpphys.so to LD_LIBRARY_PATH" -# -export LD_LIBRARY_PATH="${NEMSfv3gfs_DIR}/ccpp/lib\${LD_LIBRARY_PATH:+:\$LD_LIBRARY_PATH}" -EOM -} || print_err_msg_exit "\ -Heredoc (cat) command to append command to add path to CCPP libraries to -the Lmod initialization script in the experiment directory returned with -a nonzero status." - - print_info_msg "$VERBOSE" " -Copying the modulefile required for running the CCPP-enabled version of -the FV3SAR under NEMS to the experiment directory..." - cp_vrfy ${NEMSfv3gfs_DIR}/NEMS/src/conf/modules.nems $EXPTDIR/modules.fv3 -# #----------------------------------------------------------------------- # # If using CCPP with the GFS physics suite... diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index d8c843463..eeb7d4d58 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -66,8 +66,34 @@ fi # #----------------------------------------------------------------------- # -# Call the script that defines the module() function. This is needed so -# we can perform "module load ..." calls later below. +# Source the script that initializes the Lmod (Lua-based module) system/ +# software for handling modules. This script defines the module() and +# other functions. These are needed so we can perform the "module use +# ..." and "module load ..." calls later below that are used to load the +# appropriate module file for the specified task. +# +# Note that the build of NEMSfv3gfs generates the shell script at +# +# ${NEMSfv3gfs_DIR}/NEMS/src/conf/module-setup.sh +# +# that can be used to initialize the Lmod (Lua-based module) system/ +# software for handling modules. This script: +# +# 1) Detects the shell in which it is being invoked (i.e. the shell of +# the "parent" script in which it is being sourced). +# 2) Detects the machine it is running on and and calls the appropriate +# (shell- and machine-dependent) initalization script to initialize +# Lmod. +# 3) Purges all modules. +# 4) Uses the "module use ..." command to prepend or append paths to +# Lmod's search path (MODULEPATH). +# +# We could use this module-setup.sh script to initialize Lmod, but since +# it is only found in the NEMSfv3gfs code, here we prefer to perform our +# own initialization. Ideally, there should be one module-setup.sh +# script that is used by all external repos/codes, but such a script +# does not exist. If/when it does, we will consider switching to it in- +# stead of using the case-statement below. # #----------------------------------------------------------------------- # @@ -94,7 +120,7 @@ case "$MACHINE" in ;; # *) - print_err_msg_exit " + print_err_msg_exit "\ The script to source to initialize lmod (module loads) has not yet been specified for the current machine (MACHINE): MACHINE = \"$MACHINE\"" @@ -104,11 +130,7 @@ esac # #----------------------------------------------------------------------- # -# Get the task name. Then shift the argument list so that the firs ar- -# gument (the task name) gets dropped from the start of the arguments -# list. This results in the shell variable $@ containing only the re- -# maining arguments (which in this case should consist of the full path -# to the J-job to call). +# Get the task name and the name of the J-job script. # #----------------------------------------------------------------------- # @@ -117,29 +139,133 @@ jjob_fp="$2" # #----------------------------------------------------------------------- # -# Purge modules and load the module file for the specified task on the -# current machine. Note that the path to this module file is given by +# Set the directory (modules_dir) in which the module files for the va- +# rious workflow tasks are located. Also, set the name of the module +# file for the specified task. +# +# A module file is a file whose first line is the "magic cookie" string +# '#%Module'. It is interpreted by the "module load ..." command. It +# sets environment variables (including prepending/appending to paths) +# and loads modules. +# +# The regional_workflow repository contains module files for all the +# workflow tasks in the template rocoto XML file for the FV3SAR work- +# flow. The full path to a module file for a given task is +# +# $HOMErrfs/modulefiles/$machine/${task_name} +# +# where HOMErrfs is the base directory of the workflow, machine is the +# name of the machine that we're running on (in lowercase), and task_- +# name is the name of the current task (an input to this script). For +# all tasks in the rocoto XML except run_fcst, these are actual files +# (as opposed to symlinks). For the run_fcst task, there are two possi- +# ble module files. The first one is named "run_fcst_no_ccpp" and is +# used to run FV3 without CCPP (i.e. it is used if USE_CCPP is set to +# "FALSE" in the experiment/workflow configuration file). This is also +# an actual file. The second one is named "run_fcst_ccpp" and is used +# to run FV3 with CCPP (i.e. it is used if USE_CCPP is set to "TRUE"). +# This second file is a symlink (and is a part of the regional_workflow +# repo), and its target is +# +# ${NEMSfv3gfs_DIR}/NEMS/src/conf/modules.fv3 +# +# Here, NEMSfv3gfs_DIR is the directory in which the NEMSfv3gfs reposi- +# tory containing the FV3 model is cloned (normally "$HOMErrfs/sorc/ +# NEMSfv3gfs"), and modules.fv3 is a module file that is generated by +# the NEMSfv3gfs build process. It contains the appropriate modules to +# use when running the FV3 model. Thus, we just point to it via the +# symlink "run_fcst_ccpp" in the modulefiles/$machine directory. +# +# QUESTION: +# Why don't we do this for the non-CCPP version of FV3? +# +# ANSWER: +# Because for that case, we load different versions of intel and impi +# (compare modules.nems to the modules loaded for the case of USE_CCPP +# set to "FALSE" in run_FV3SAR.sh). Maybe these can be combined at some +# point. Note that a modules.nems file is generated in the same rela- +# tive location in the non-CCPP-enabled version of NEMSfv3gfs, so maybe +# that can be used and the run_FV3SAR.sh script modified to accomodate +# such a change. That way the below can be performed for both the CCPP- +# enabled and non-CCPP-enabled versions of NEMSfv3gfs. # -# $HOMErrfs/modulefiles/$machine/${task_name} +#----------------------------------------------------------------------- # -# where HOMErrfs is the workflow home directory, machine is the name of -# the current machine in lowercase, and task_name is the name of the -# task that this script will launch (via the exec command below). +machine=${MACHINE,,} +modules_dir="$HOMErrfs/modulefiles/$machine" +modulefile_name="${task_name}" + +if [ "${task_name}" = "run_fcst" ]; then + if [ "${USE_CCPP}" = "TRUE" ]; then + modulefile_name=${modulefile_name}_ccpp + else + modulefile_name=${modulefile_name}_no_ccpp + fi +fi +# +#----------------------------------------------------------------------- +# +# Use the "readlink" command to resolve the full path to the module file +# and then verify that the file exists. This is not necessary for most +# tasks, but for the run_fcst task, when CCPP is enabled, the module +# file in the modules directory is not a regular file but a symlink to a +# file in the NEMSfv3gfs external repo. This latter target file will +# exist only if the NEMSfv3gfs code has already been built. Thus, we +# now check to make sure that the module file exits. +# +#----------------------------------------------------------------------- +# +modulefile_path=$( readlink -f "${modules_dir}/${modulefile_name}" ) + +if [ ! -f "${modulefile_path}" ]; then + + if [ "${task_name}" = "run_fcst" ]; then + + print_err_msg_exit "\ +The target (modulefile_path) of the symlink (modulefile_name) in the +task modules directory (modules_dir) that points to module file for this +task (task_name) does not exsit: + task_name = \"${task_name}\" + modulefile_name = \"${modulefile_name}\" + modules_dir = \"${modules_dir}\" + modulefile_path = \"${modulefile_path}\" +This is likely because the forecast model code (NEMSfv3gfs) has not yet +been built. +" + + else + + print_err_msg_exit "\ +The module file (modulefile_path) specified for this task (task_name) +does not exist: + task_name = \"${task_name}\" + modulefile_path = \"${modulefile_path}\"" + +fi +# +#----------------------------------------------------------------------- +# +# Purge modules and load the module file for the specified task on the +# current machine. # #----------------------------------------------------------------------- # print_info_msg "$VERBOSE" " Loading modules for task \"${task_name}\" ..." - -machine=${MACHINE,,} + module purge -module use $HOMErrfs/modulefiles/$machine -module load ${task_name} + +module use "${modules_dir}" || print_err_msg_exit "\ +Call to \"module use\" command failed." + +module load ${modulefile_name} || print_err_msg_exit "\ +Call to \"module load\" command failed." + module list # #----------------------------------------------------------------------- # -# Use the exec ocmmand to terminate the current script and launch the +# Use the exec command to terminate the current script and launch the # J-job for the specified task. # #----------------------------------------------------------------------- diff --git a/ush/templates/FV3SAR_wflow.xml b/ush/templates/FV3SAR_wflow.xml index 6d861dd0a..0febda832 100644 --- a/ush/templates/FV3SAR_wflow.xml +++ b/ush/templates/FV3SAR_wflow.xml @@ -86,15 +86,15 @@ killing itelf off) using the exec command. - - - - - - - - - + + + + + + + + + ]> @@ -104,7 +104,7 @@ Task names. &DATE_FIRST_CYCL;CC00 &DATE_LAST_CYCL;CC00 24:00:00 - &LOGDIR;/FV3_wflow.log + &LOGDIR;/FV3SAR_wflow.log + + + + + + + + + + - - - - - - - - - - - ]> @@ -110,7 +110,7 @@ Task names. ************************************************************************ ************************************************************************ --> - + &RSRC_MAKE_GRID; &RSRV_DEFAULT; @@ -127,7 +127,7 @@ source LOAD_MODULES_RUN_TASK_FP within the J-job instead of here since we have are already sourcing other files in the J-job anyway. --> @@ -138,10 +138,10 @@ to LOAD_MODULES_RUN_TASK_FP (the first argument is the task name). The J-job then uses exec to run the J-job (while also terminating the LOAD_- MODULES_RUN_TASK_FP script. --> - &LOAD_MODULES_RUN_TASK_FP; "&TN_MAKE_GRID;" "&JOBSDIR;/JREGIONAL_MAKE_GRID" + &LOAD_MODULES_RUN_TASK_FP; "&MAKE_GRID_TN;" "&JOBSDIR;/JREGIONAL_MAKE_GRID" &PROC_MAKE_GRID; - &TN_MAKE_GRID; - &LOGDIR;/&TN_MAKE_GRID;.log + &MAKE_GRID_TN; + &LOGDIR;/&MAKE_GRID_TN;.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; PDY@Y@m@d @@ -155,15 +155,15 @@ MODULES_RUN_TASK_FP script. ************************************************************************ ************************************************************************ --> - + &RSRC_MAKE_OROG; &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_MAKE_OROG;" "&JOBSDIR;/JREGIONAL_MAKE_OROG" + &LOAD_MODULES_RUN_TASK_FP; "&MAKE_OROG_TN;" "&JOBSDIR;/JREGIONAL_MAKE_OROG" &PROC_MAKE_OROG; - &TN_MAKE_OROG; - &LOGDIR;/&TN_MAKE_OROG;.log + &MAKE_OROG_TN; + &LOGDIR;/&MAKE_OROG_TN;.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; PDY@Y@m@d @@ -184,15 +184,15 @@ MODULES_RUN_TASK_FP script. ************************************************************************ ************************************************************************ --> - + &RSRC_MAKE_SFC_CLIMO; &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_MAKE_SFC_CLIMO;" "&JOBSDIR;/JREGIONAL_MAKE_SFC_CLIMO" + &LOAD_MODULES_RUN_TASK_FP; "&MAKE_SFC_CLIMO_TN;" "&JOBSDIR;/JREGIONAL_MAKE_SFC_CLIMO" &PROC_MAKE_SFC_CLIMO; - &TN_MAKE_SFC_CLIMO; - &LOGDIR;/&TN_MAKE_SFC_CLIMO;.log + &MAKE_SFC_CLIMO_TN; + &LOGDIR;/&MAKE_SFC_CLIMO_TN;.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; PDY@Y@m@d @@ -206,8 +206,8 @@ MODULES_RUN_TASK_FP script. &RUN_TASK_MAKE_GRID;FALSE - - &LOGDIR;/&TN_MAKE_OROG;_task_complete.txt + + &LOGDIR;/&MAKE_OROG_TN;_task_complete.txt &RUN_TASK_MAKE_OROG;FALSE @@ -218,15 +218,15 @@ MODULES_RUN_TASK_FP script. ************************************************************************ ************************************************************************ --> - + &RSRC_GET_EXTRN_MDL_FILES; &RSRV_HPSS; - &LOAD_MODULES_RUN_TASK_FP; "&TN_GET_EXTRN_ICS;" "&JOBSDIR;/JREGIONAL_GET_EXTRN_FILES" + &LOAD_MODULES_RUN_TASK_FP; "&GET_EXTRN_ICS_TN;" "&JOBSDIR;/JREGIONAL_GET_EXTRN_FILES" &PROC_GET_EXTRN_MDL_FILES; - &TN_GET_EXTRN_ICS; - &LOGDIR;/&TN_GET_EXTRN_ICS;_@Y@m@d@H.log + &GET_EXTRN_ICS_TN; + &LOGDIR;/&GET_EXTRN_ICS_TN;_@Y@m@d@H.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; @@ -240,15 +240,15 @@ MODULES_RUN_TASK_FP script. ************************************************************************ ************************************************************************ --> - + &RSRC_GET_EXTRN_MDL_FILES; &RSRV_HPSS; - &LOAD_MODULES_RUN_TASK_FP; "&TN_GET_EXTRN_LBCS;" "&JOBSDIR;/JREGIONAL_GET_EXTRN_FILES" + &LOAD_MODULES_RUN_TASK_FP; "&GET_EXTRN_LBCS_TN;" "&JOBSDIR;/JREGIONAL_GET_EXTRN_FILES" &PROC_GET_EXTRN_MDL_FILES; - &TN_GET_EXTRN_LBCS; - &LOGDIR;/&TN_GET_EXTRN_LBCS;_@Y@m@d@H.log + &GET_EXTRN_LBCS_TN; + &LOGDIR;/&GET_EXTRN_LBCS_TN;_@Y@m@d@H.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; @@ -262,15 +262,15 @@ MODULES_RUN_TASK_FP script. ************************************************************************ ************************************************************************ --> - + &RSRC_MAKE_ICS_SURF_LBC0; &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_MAKE_ICS;" "&JOBSDIR;/JREGIONAL_MAKE_ICS" + &LOAD_MODULES_RUN_TASK_FP; "&MAKE_ICS_TN;" "&JOBSDIR;/JREGIONAL_MAKE_ICS" &PROC_MAKE_ICS_SURF_LBC0; - &TN_MAKE_ICS; - &LOGDIR;/&TN_MAKE_ICS;_@Y@m@d@H.log + &MAKE_ICS_TN; + &LOGDIR;/&MAKE_ICS_TN;_@Y@m@d@H.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; @@ -279,20 +279,20 @@ MODULES_RUN_TASK_FP script. - + &LOGDIR;/make_grid_task_complete.txt &RUN_TASK_MAKE_GRID;FALSE - - &LOGDIR;/&TN_MAKE_OROG;_task_complete.txt + + &LOGDIR;/&MAKE_OROG_TN;_task_complete.txt &RUN_TASK_MAKE_OROG;FALSE - - &LOGDIR;/&TN_MAKE_SFC_CLIMO;_task_complete.txt + + &LOGDIR;/&MAKE_SFC_CLIMO_TN;_task_complete.txt &RUN_TASK_MAKE_SFC_CLIMO;FALSE @@ -303,15 +303,15 @@ MODULES_RUN_TASK_FP script. ************************************************************************ ************************************************************************ --> - + &RSRC_MAKE_LBC1_TO_LBCN; &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_MAKE_LBCS;" "&JOBSDIR;/JREGIONAL_MAKE_LBCS" + &LOAD_MODULES_RUN_TASK_FP; "&MAKE_LBCS_TN;" "&JOBSDIR;/JREGIONAL_MAKE_LBCS" &PROC_MAKE_LBC1_TO_LBCN; - &TN_MAKE_LBCS; - &LOGDIR;/&TN_MAKE_LBCS;_@Y@m@d@H.log + &MAKE_LBCS_TN; + &LOGDIR;/&MAKE_LBCS_TN;_@Y@m@d@H.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; @@ -320,20 +320,20 @@ MODULES_RUN_TASK_FP script. - + &LOGDIR;/make_grid_task_complete.txt &RUN_TASK_MAKE_GRID;FALSE - - &LOGDIR;/&TN_MAKE_OROG;_task_complete.txt + + &LOGDIR;/&MAKE_OROG_TN;_task_complete.txt &RUN_TASK_MAKE_OROG;FALSE - - &LOGDIR;/&TN_MAKE_SFC_CLIMO;_task_complete.txt + + &LOGDIR;/&MAKE_SFC_CLIMO_TN;_task_complete.txt &RUN_TASK_MAKE_SFC_CLIMO;FALSE @@ -344,15 +344,15 @@ MODULES_RUN_TASK_FP script. ************************************************************************ ************************************************************************ --> - + &RSRC_RUN_FCST; &RSRV_RUN_FCST; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_FCST;" "&JOBSDIR;/JREGIONAL_RUN_FCST" + &LOAD_MODULES_RUN_TASK_FP; "&RUN_FCST_TN;" "&JOBSDIR;/JREGIONAL_RUN_FCST" &PROC_RUN_FCST; - &TN_RUN_FCST; - &LOGDIR;/&TN_RUN_FCST;_@Y@m@d@H.log + &RUN_FCST_TN; + &LOGDIR;/&RUN_FCST_TN;_@Y@m@d@H.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; @@ -361,8 +361,8 @@ MODULES_RUN_TASK_FP script. - - + + @@ -371,19 +371,19 @@ MODULES_RUN_TASK_FP script. ************************************************************************ ************************************************************************ --> - + &FHR; - + &RSRC_POST; &RSRV_DEFAULT; - &LOAD_MODULES_RUN_TASK_FP; "&TN_RUN_POST;" "&JOBSDIR;/JREGIONAL_RUN_POST" + &LOAD_MODULES_RUN_TASK_FP; "&RUN_POST_TN;" "&JOBSDIR;/JREGIONAL_RUN_POST" &PROC_POST; - &TN_RUN_POST;_#fhr# - &LOGDIR;/&TN_RUN_POST;_#fhr#_@Y@m@d@H.log + &RUN_POST_TN;_#fhr# + &LOGDIR;/&RUN_POST_TN;_#fhr#_@Y@m@d@H.log GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP; CYCLE_DIR&CYCLE_DIR; From 8fed29a644bd6df59e8150a7bc4df8171878a90f Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 11 Dec 2019 13:43:04 -0700 Subject: [PATCH 101/203] Changes to the way modules for the various workflow tasks are loaded to make the process more flexible, as follows: 1) Rename the module files for several tasks (only for hera thus far) from ${filename} to ${filename}.hardcoded. These are for backup and will be removed once it is clear that the "dynamic" modulefile method (see #2 below) is working reliably. 2) Put in code in the experiment/workflow generation scripts that create links to "dynamic" versions of these files in the external repos. These are dynamic in the sense that they should be compatible with the version of code in the repository. This will (hopefully!) eliminate the need to maintain hard-coded module files in the workflow repository (at least for codes that are external to the repository). 3) Modify the load_modules_run_task.sh script so that some of the "dynamic" module files are sourced like a shells script instead of being loaded using "module load ...". This is because this subset of "dynamic" module files are in fact shell scripts because they contain shell commands (like "export ${SOME_VARIABLE}"). These module files need to be fixed in the external repos (notably in the UFS_UTILS repository) so that they are true Lua module files. --- .../hera/{make_ics => make_ics.hardcoded} | 0 .../hera/{make_lbcs => make_lbcs.hardcoded} | 0 .../hera/{make_orog => make_orog.hardcoded} | 0 ...ake_sfc_climo => make_sfc_climo.hardcoded} | 0 modulefiles/hera/run_fcst_ccpp | 1 - ush/generate_FV3SAR_wflow.sh | 41 +++++++++++++++ ush/load_modules_run_task.sh | 51 +++++++++++++++---- ush/setup.sh | 2 + 8 files changed, 84 insertions(+), 11 deletions(-) rename modulefiles/hera/{make_ics => make_ics.hardcoded} (100%) rename modulefiles/hera/{make_lbcs => make_lbcs.hardcoded} (100%) rename modulefiles/hera/{make_orog => make_orog.hardcoded} (100%) rename modulefiles/hera/{make_sfc_climo => make_sfc_climo.hardcoded} (100%) delete mode 120000 modulefiles/hera/run_fcst_ccpp diff --git a/modulefiles/hera/make_ics b/modulefiles/hera/make_ics.hardcoded similarity index 100% rename from modulefiles/hera/make_ics rename to modulefiles/hera/make_ics.hardcoded diff --git a/modulefiles/hera/make_lbcs b/modulefiles/hera/make_lbcs.hardcoded similarity index 100% rename from modulefiles/hera/make_lbcs rename to modulefiles/hera/make_lbcs.hardcoded diff --git a/modulefiles/hera/make_orog b/modulefiles/hera/make_orog.hardcoded similarity index 100% rename from modulefiles/hera/make_orog rename to modulefiles/hera/make_orog.hardcoded diff --git a/modulefiles/hera/make_sfc_climo b/modulefiles/hera/make_sfc_climo.hardcoded similarity index 100% rename from modulefiles/hera/make_sfc_climo rename to modulefiles/hera/make_sfc_climo.hardcoded diff --git a/modulefiles/hera/run_fcst_ccpp b/modulefiles/hera/run_fcst_ccpp deleted file mode 120000 index eacd4ef1d..000000000 --- a/modulefiles/hera/run_fcst_ccpp +++ /dev/null @@ -1 +0,0 @@ -../../sorc/NEMSfv3gfs/NEMS/src/conf/modules.nems \ No newline at end of file diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 240a4633f..1b02c4836 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -240,6 +240,47 @@ all_cycledefs=${all_cycledefs//&/\\\&} sed -i -r -e "s|${regex_search}|${all_cycledefs}|g" "${WFLOW_XML_FP}" +# +#----------------------------------------------------------------------- +# +# For select workflow tasks, create symlinks (in an appropriate subdi- +# rectory under the workflow directory tree) that point to module files +# in the various cloned external repositories. In principle, this is +# better than having hard-coded module files for tasks because the sym- +# links will always point to updated module files. However, it does re- +# quire that these module files in the external repositories be coded +# correctly, e.g. that they really be lua module files and not contain +# any shell commands (like "export SOME_VARIABLE"). +# +#----------------------------------------------------------------------- +# +machine=${MACHINE,,} + +cd_vrfy "${MODULES_DIR}/$machine" + +# +# The "module" file (really a shell script) for orog in the UFS_UTILS +# repo uses a shell variable named MOD_PATH, but it is not clear where +# that is defined. That needs to be fixed. Until then, we have to use +# a hard-coded module file, which may or may not be compatible with the +# modules used in the UFS_UTILS repo to build the orog code. +#ln_vrfy -fs "${UFS_UTILS_DIR}/modulefiles/fv3gfs/orog.$machine" \ +# "${MAKE_OROG_TN}" +ln_vrfy -fs "make_orog.hardcoded" "${MAKE_OROG_TN}" + +ln_vrfy -fs "${UFS_UTILS_DIR}/modulefiles/modulefile.sfc_climo_gen.$machine" \ + "${MAKE_SFC_CLIMO_TN}" + +ln_vrfy -fs "${CHGRES_DIR}/modulefiles/chgres_cube.$machine" \ + "${MAKE_ICS_TN}" + +ln_vrfy -fs "${CHGRES_DIR}/modulefiles/chgres_cube.$machine" \ + "${MAKE_LBCS_TN}" + +ln_vrfy -fs "${UFS_WTHR_MDL_DIR}/NEMS/src/conf/modules.nems" \ + "${RUN_FCST_TN}" + +cd_vrfy - # #----------------------------------------------------------------------- # diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index 60dc5e43b..4a072c80b 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -204,16 +204,18 @@ modulefile_name="${task_name}" # regardless of the setting of USE_CCPP. But this requires that we then # test the non-CCPP-enabled version, which we've never done. Leave this # for another time... -if [ "${task_name}" = "run_fcst" ]; then - if [ "${USE_CCPP}" = "TRUE" ]; then - modulefile_name=${modulefile_name}_ccpp - else - modulefile_name=${modulefile_name}_no_ccpp - fi -fi +#if [ "${task_name}" = "run_fcst" ]; then +# if [ "${USE_CCPP}" = "TRUE" ]; then +# modulefile_name=${modulefile_name}_ccpp +# else +# modulefile_name=${modulefile_name}_no_ccpp +# fi +#fi # #----------------------------------------------------------------------- # +# This comment needs to be updated: +# # Use the "readlink" command to resolve the full path to the module file # and then verify that the file exists. This is not necessary for most # tasks, but for the run_fcst task, when CCPP is enabled, the module @@ -228,7 +230,11 @@ modulefile_path=$( readlink -f "${modules_dir}/${modulefile_name}" ) if [ ! -f "${modulefile_path}" ]; then - if [ "${task_name}" = "run_fcst" ]; then + if [ "${task_name}" = "${MAKE_OROG_TN}" -o \ + "${task_name}" = "${MAKE_SFC_CLIMO_TN}" -o \ + "${task_name}" = "${MAKE_ICS_TN}" -o \ + "${task_name}" = "${MAKE_LBCS_TN}" -o \ + "${task_name}" = "${RUN_FCST_TN}" ]; then print_err_msg_exit "\ The target (modulefile_path) of the symlink (modulefile_name) in the @@ -267,8 +273,33 @@ module purge module use "${modules_dir}" || print_err_msg_exit "\ Call to \"module use\" command failed." -module load ${modulefile_name} || print_err_msg_exit "\ -Call to \"module load\" command failed." +# +# Some of the task module files that are symlinks to module files in the +# external repositories are in fact shell scripts (they shouldn't be; +# such cases should be fixed in the external repositories). For such +# files, we source the "module" file. For true module files, we use the +# "module load" command. +# +case "${task_name}" in +# +"${MAKE_ICS_TN}" | "${MAKE_LBCS_TN}" | "${MAKE_SFC_CLIMO_TN}") + . ${modulefile_path} || print_err_msg_exit "\ +Sourcing of \"module\" file (modulefile_path; really a shell script) for +the specified task (task_name) failed: + task_name = \"${task_name}\" + modulefile_path = \"${modulefile_path}\"" + ;; +# +*) + module load ${modulefile_name} || print_err_msg_exit "\ +Loading of module file (modulefile_name; in directory specified by mod- +ules_dir) for the specified task (task_name) failed: + task_name = \"${task_name}\" + modulefile_name = \"${modulefile_name}\" + modules_dir = \"${modules_dir}\"" + ;; +# +esac module list # diff --git a/ush/setup.sh b/ush/setup.sh index 71a687c56..176cfbebb 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -593,6 +593,7 @@ SCRIPTSDIR="$HOMErrfs/scripts" JOBSDIR="$HOMErrfs/jobs" SORCDIR="$HOMErrfs/sorc" PARMDIR="$HOMErrfs/parm" +MODULES_DIR="$HOMErrfs/modulefiles" EXECDIR="$HOMErrfs/exec" FIXrrfs="$HOMErrfs/fix" FIXupp="$FIXrrfs/fix_upp" @@ -1907,6 +1908,7 @@ SCRIPTSDIR="$SCRIPTSDIR" JOBSDIR="$JOBSDIR" SORCDIR="$SORCDIR" PARMDIR="$PARMDIR" +MODULES_DIR="${MODULES_DIR}" EXECDIR="$EXECDIR" FIXrrfs="$FIXrrfs" FIXam="$FIXam" From ed874f9478fcaac842d549b9d76ef3a15aa36f5b Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 11 Dec 2019 13:47:08 -0700 Subject: [PATCH 102/203] Rename the hard-coded module file for running the forecast model WITHOUT CCPP such that it has the ".hardcoded" suffix. This file will probably not be needed in the future since it seems (from talking to Dom H.) that both the CCPP-enabled and CCPP-disabled versions of the forecast model will be using the same module file ($HOMErrfs/ufs_weather_model/NEMS/src/conf/modules.nems) which currently the experiment/workflow generation scripts automatically create links to. Note that the workflow has not yet been tested in CCPP-disabled mode. --- modulefiles/hera/{run_fcst_no_ccpp => run_fcst_no_ccpp.hardcoded} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename modulefiles/hera/{run_fcst_no_ccpp => run_fcst_no_ccpp.hardcoded} (100%) diff --git a/modulefiles/hera/run_fcst_no_ccpp b/modulefiles/hera/run_fcst_no_ccpp.hardcoded similarity index 100% rename from modulefiles/hera/run_fcst_no_ccpp rename to modulefiles/hera/run_fcst_no_ccpp.hardcoded From 674d0845397a387caf64683b81c0ac0ca2a5bc9b Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 11 Dec 2019 15:30:29 -0700 Subject: [PATCH 103/203] Change the way the CCPP physics suites are named to include an "FV3_" at the beginning. This is needed to be consistent with the way the input.nml namelist variable ccpp_suite expects things. These changes also require renaming the suite-dependent template files. --- scripts/exregional_make_ics.sh | 20 +++++++++---------- scripts/exregional_make_lbcs.sh | 16 +++++++-------- scripts/exregional_run_fcst.sh | 4 ++-- tests/baseline_configs/config.regional_001.sh | 2 +- tests/baseline_configs/config.regional_002.sh | 2 +- tests/baseline_configs/config.regional_003.sh | 2 +- ush/config_defaults.sh | 6 +++--- ush/generate_FV3SAR_wflow.sh | 14 ++++++------- ush/setup.sh | 4 ++-- ..._gfdlmp => diag_table.FV3_GFS_2017_gfdlmp} | 0 ...g_table.GSD_SAR => diag_table.FV3_GSD_SAR} | 0 ...iag_table.GSD_v0 => diag_table.FV3_GSD_v0} | 0 ...gfdlmp => field_table.FV3_GFS_2017_gfdlmp} | 0 ..._table.GSD_SAR => field_table.FV3_GSD_SAR} | 0 ...ld_table.GSD_v0 => field_table.FV3_GSD_v0} | 0 ...7_gfdlmp => input.nml.FV3_GFS_2017_gfdlmp} | 0 ...nput.nml.GSD_SAR => input.nml.FV3_GSD_SAR} | 0 ...{input.nml.GSD_v0 => input.nml.FV3_GSD_v0} | 0 ...mp => model_configure.FV3_GFS_2017_gfdlmp} | 0 ...re.GSD_SAR => model_configure.FV3_GSD_SAR} | 0 ...gure.GSD_v0 => model_configure.FV3_GSD_v0} | 0 ush/valid_param_vals.sh | 2 +- 22 files changed, 36 insertions(+), 36 deletions(-) rename ush/templates/{diag_table.GFS_2017_gfdlmp => diag_table.FV3_GFS_2017_gfdlmp} (100%) rename ush/templates/{diag_table.GSD_SAR => diag_table.FV3_GSD_SAR} (100%) rename ush/templates/{diag_table.GSD_v0 => diag_table.FV3_GSD_v0} (100%) rename ush/templates/{field_table.GFS_2017_gfdlmp => field_table.FV3_GFS_2017_gfdlmp} (100%) rename ush/templates/{field_table.GSD_SAR => field_table.FV3_GSD_SAR} (100%) rename ush/templates/{field_table.GSD_v0 => field_table.FV3_GSD_v0} (100%) rename ush/templates/{input.nml.GFS_2017_gfdlmp => input.nml.FV3_GFS_2017_gfdlmp} (100%) rename ush/templates/{input.nml.GSD_SAR => input.nml.FV3_GSD_SAR} (100%) rename ush/templates/{input.nml.GSD_v0 => input.nml.FV3_GSD_v0} (100%) rename ush/templates/{model_configure.GFS_2017_gfdlmp => model_configure.FV3_GFS_2017_gfdlmp} (100%) rename ush/templates/{model_configure.GSD_SAR => model_configure.FV3_GSD_SAR} (100%) rename ush/templates/{model_configure.GSD_v0 => model_configure.FV3_GSD_v0} (100%) diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index 7c1fa368c..245e2691f 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -98,11 +98,11 @@ phys_suite="" case "${CCPP_PHYS_SUITE}" in -"GFS_2017_gfdlmp") +"FV3_GFS_2017_gfdlmp") phys_suite="GFS" ;; -"GSD_v0" | "GSD_SAR") +"FV3_GSD_v0" | "FV3_GSD_SAR") phys_suite="GSD" ;; @@ -283,10 +283,10 @@ case "${EXTRN_MDL_NAME_ICS}" in # external model file type, and physics suite). # if [ "${USE_CCPP}" = "TRUE" ]; then - if [ "${CCPP_PHYS_SUITE}" = "GFS_2017_gfdlmp" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"" - elif [ "${CCPP_PHYS_SUITE}" = "GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "GSD_SAR" ]; then + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ + "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then # For GSD physics, add three additional tracers (the ice, rain and water # number concentrations) that are required for Thompson microphysics. tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\",\"ice_nc\",\"rain_nc\",\"water_nc\"" @@ -327,8 +327,8 @@ case "${EXTRN_MDL_NAME_ICS}" in internal_GSD=".false." cdate_min_HRRRX="2019111500" - if [ "${CCPP_PHYS_SUITE}" = "GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "GSD_SAR" ] && \ + if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ + "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ] && \ [ ${CDATE} -gt ${cdate_min_HRRRX} ]; then print_info_msg " Setting the chgres_cube namelist setting \"internal_GSD\" to \".true.\" in @@ -338,10 +338,10 @@ HRRRX grib2 files created after about \"${cdate_min_HRRRX}\"..." fi if [ "${USE_CCPP}" = "TRUE" ]; then - if [ "${CCPP_PHYS_SUITE}" = "GFS_2017_gfdlmp" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then numsoil_out="4" - elif [ "${CCPP_PHYS_SUITE}" = "GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "GSD_SAR" ]; then + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ + "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then numsoil_out="9" fi fi diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index b1aa017a6..f985a7ccb 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -99,11 +99,11 @@ phys_suite="" case "${CCPP_PHYS_SUITE}" in -"GFS_2017_gfdlmp") +"FV3_GFS_2017_gfdlmp") phys_suite="GFS" ;; -"GSD_v0" | "GSD_SAR") +"FV3_GSD_v0" | "FV3_GSD_SAR") phys_suite="GSD" ;; @@ -269,10 +269,10 @@ case "${EXTRN_MDL_NAME_LBCS}" in # external model file type, and physics suite). # if [ "${USE_CCPP}" = "TRUE" ]; then - if [ "${CCPP_PHYS_SUITE}" = "GFS_2017_gfdlmp" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"" - elif [ "${CCPP_PHYS_SUITE}" = "GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "GSD_SAR" ]; then + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ + "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then # For GSD physics, add three additional tracers (the ice, rain and water # number concentrations) that are required for Thompson microphysics. tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\",\"ice_nc\",\"rain_nc\",\"water_nc\"" @@ -310,10 +310,10 @@ case "${EXTRN_MDL_NAME_LBCS}" in input_type="grib2" if [ "${USE_CCPP}" = "TRUE" ]; then - if [ "${CCPP_PHYS_SUITE}" = "GFS_2017_gfdlmp" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then numsoil_out="4" - elif [ "${CCPP_PHYS_SUITE}" = "GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "GSD_SAR" ]; then + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ + "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then numsoil_out="9" fi fi diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 6ba2c61ef..51c56da75 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -390,8 +390,8 @@ if [ "${USE_CCPP}" = "TRUE" ]; then ln_vrfy -sf -t ${CYCLE_DIR} ${CCPP_PHYS_SUITE_FP} - if [ "${CCPP_PHYS_SUITE}" = "GSD_v0" -o \\ - "${CCPP_PHYS_SUITE}" = "GSD_SAR" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \\ + "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/CCN_ACTIVATE.BIN fi diff --git a/tests/baseline_configs/config.regional_001.sh b/tests/baseline_configs/config.regional_001.sh index 1870d8e62..4158bdaf3 100644 --- a/tests/baseline_configs/config.regional_001.sh +++ b/tests/baseline_configs/config.regional_001.sh @@ -40,7 +40,7 @@ PREDEF_GRID_NAME="GSD_HRRR25km" GRID_GEN_METHOD="JPgrid" QUILTING="TRUE" USE_CCPP="TRUE" -CCPP_PHYS_SUITE="GFS_2017_gfdlmp" +CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" FCST_LEN_HRS="06" LBC_UPDATE_INTVL_HRS="6" diff --git a/tests/baseline_configs/config.regional_002.sh b/tests/baseline_configs/config.regional_002.sh index f97dbe288..a6d75cda0 100644 --- a/tests/baseline_configs/config.regional_002.sh +++ b/tests/baseline_configs/config.regional_002.sh @@ -40,7 +40,7 @@ PREDEF_GRID_NAME="GSD_HRRR25km" GRID_GEN_METHOD="JPgrid" QUILTING="TRUE" USE_CCPP="TRUE" -CCPP_PHYS_SUITE="GFS_2017_gfdlmp" +CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" FCST_LEN_HRS="06" LBC_UPDATE_INTVL_HRS="6" diff --git a/tests/baseline_configs/config.regional_003.sh b/tests/baseline_configs/config.regional_003.sh index 1ea244033..689f5c4c1 100644 --- a/tests/baseline_configs/config.regional_003.sh +++ b/tests/baseline_configs/config.regional_003.sh @@ -40,7 +40,7 @@ PREDEF_GRID_NAME="GSD_HRRR25km" GRID_GEN_METHOD="JPgrid" QUILTING="TRUE" USE_CCPP="TRUE" -CCPP_PHYS_SUITE="GSD_v0" +CCPP_PHYS_SUITE="FV3_GSD_v0" FCST_LEN_HRS="06" LBC_UPDATE_INTVL_HRS="6" diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 5e216f946..b55af8007 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -287,9 +287,9 @@ USE_CCPP="FALSE" # #----------------------------------------------------------------------- # -#CCPP_PHYS_SUITE="GFS_2017_gfdlmp" -CCPP_PHYS_SUITE="GSD_v0" -#CCPP_PHYS_SUITE="GSD_SAR" +#CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" +CCPP_PHYS_SUITE="FV3_GSD_v0" +#CCPP_PHYS_SUITE="FV3_GSD_SAR" # #----------------------------------------------------------------------- # diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 1b02c4836..a10fc8748 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -333,15 +333,15 @@ num_files_ozphys_after2015=${#indx_ozphys_after2015[@]} if [ ${num_files_ozphys_2015} -eq 1 ] && \ [ ${num_files_ozphys_after2015} -eq 0 ]; then - if [ "${CCPP_PHYS_SUITE}" = "GFS_2017_gfdlmp" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then FIXam_FILES_SYSDIR[${indx_ozphys_2015}]="${ozphys_after2015_fn}" fi elif [ ${num_files_ozphys_2015} -eq 0 ] && \ [ ${num_files_ozphys_after2015} -eq 1 ]; then - if [ "${CCPP_PHYS_SUITE}" = "GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "GSD_SAR" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ + "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then FIXam_FILES_SYSDIR[${indx_ozphys_after2015}]="${ozphys_2015_fn}" fi @@ -607,8 +607,8 @@ the forecast model directory sturcture to the experiment directory..." # taining cloud condensation nuclei (CCN) data that is needed by the # Thompson microphysics parameterization to the experiment directory. # - if [ "${CCPP_PHYS_SUITE}" = "GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "GSD_SAR" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ + "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then print_info_msg "$VERBOSE" " Copying the fixed file containing cloud condensation nuclei (CCN) data (needed by the Thompson microphysics parameterization) to the experiment @@ -664,8 +664,8 @@ set_file_param "${FV3_NML_FP}" "bc_update_interval" "${LBC_UPDATE_INTVL_HRS}" # For the GSD_v0 and the GSD_SAR physics suites, set the parameter lsoil # according to the external models used to obtain ICs and LBCs. # -if [ "${CCPP_PHYS_SUITE}" = "GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "GSD_SAR" ]; then +if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ + "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then if [ "${EXTRN_MDL_NAME_ICS}" = "GSMGFS" -o \ "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ] && \ diff --git a/ush/setup.sh b/ush/setup.sh index 176cfbebb..5bcd0684f 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -430,7 +430,7 @@ fi #----------------------------------------------------------------------- # if [ "${USE_CCPP}" = "TRUE" -a \ - "${CCPP_PHYS_SUITE}" = "GFS_2017_gfdlmp" ]; then + "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then if [ "${EXTRN_MDL_NAME_ICS}" != "GSMGFS" -a \ "${EXTRN_MDL_NAME_ICS}" != "FV3GFS" ] || \ @@ -977,7 +977,7 @@ CCPP_PHYS_SUITE_IN_CCPP_FP="" CCPP_PHYS_SUITE_FP="" if [ "${USE_CCPP}" = "TRUE" ]; then - CCPP_PHYS_SUITE_FN="suite_FV3_${CCPP_PHYS_SUITE}.xml" + CCPP_PHYS_SUITE_FN="suite_${CCPP_PHYS_SUITE}.xml" CCPP_PHYS_SUITE_IN_CCPP_FP="${UFS_WTHR_MDL_DIR}/FV3/ccpp/suites/${CCPP_PHYS_SUITE_FN}" CCPP_PHYS_SUITE_FP="${EXPTDIR}/${CCPP_PHYS_SUITE_FN}" fi diff --git a/ush/templates/diag_table.GFS_2017_gfdlmp b/ush/templates/diag_table.FV3_GFS_2017_gfdlmp similarity index 100% rename from ush/templates/diag_table.GFS_2017_gfdlmp rename to ush/templates/diag_table.FV3_GFS_2017_gfdlmp diff --git a/ush/templates/diag_table.GSD_SAR b/ush/templates/diag_table.FV3_GSD_SAR similarity index 100% rename from ush/templates/diag_table.GSD_SAR rename to ush/templates/diag_table.FV3_GSD_SAR diff --git a/ush/templates/diag_table.GSD_v0 b/ush/templates/diag_table.FV3_GSD_v0 similarity index 100% rename from ush/templates/diag_table.GSD_v0 rename to ush/templates/diag_table.FV3_GSD_v0 diff --git a/ush/templates/field_table.GFS_2017_gfdlmp b/ush/templates/field_table.FV3_GFS_2017_gfdlmp similarity index 100% rename from ush/templates/field_table.GFS_2017_gfdlmp rename to ush/templates/field_table.FV3_GFS_2017_gfdlmp diff --git a/ush/templates/field_table.GSD_SAR b/ush/templates/field_table.FV3_GSD_SAR similarity index 100% rename from ush/templates/field_table.GSD_SAR rename to ush/templates/field_table.FV3_GSD_SAR diff --git a/ush/templates/field_table.GSD_v0 b/ush/templates/field_table.FV3_GSD_v0 similarity index 100% rename from ush/templates/field_table.GSD_v0 rename to ush/templates/field_table.FV3_GSD_v0 diff --git a/ush/templates/input.nml.GFS_2017_gfdlmp b/ush/templates/input.nml.FV3_GFS_2017_gfdlmp similarity index 100% rename from ush/templates/input.nml.GFS_2017_gfdlmp rename to ush/templates/input.nml.FV3_GFS_2017_gfdlmp diff --git a/ush/templates/input.nml.GSD_SAR b/ush/templates/input.nml.FV3_GSD_SAR similarity index 100% rename from ush/templates/input.nml.GSD_SAR rename to ush/templates/input.nml.FV3_GSD_SAR diff --git a/ush/templates/input.nml.GSD_v0 b/ush/templates/input.nml.FV3_GSD_v0 similarity index 100% rename from ush/templates/input.nml.GSD_v0 rename to ush/templates/input.nml.FV3_GSD_v0 diff --git a/ush/templates/model_configure.GFS_2017_gfdlmp b/ush/templates/model_configure.FV3_GFS_2017_gfdlmp similarity index 100% rename from ush/templates/model_configure.GFS_2017_gfdlmp rename to ush/templates/model_configure.FV3_GFS_2017_gfdlmp diff --git a/ush/templates/model_configure.GSD_SAR b/ush/templates/model_configure.FV3_GSD_SAR similarity index 100% rename from ush/templates/model_configure.GSD_SAR rename to ush/templates/model_configure.FV3_GSD_SAR diff --git a/ush/templates/model_configure.GSD_v0 b/ush/templates/model_configure.FV3_GSD_v0 similarity index 100% rename from ush/templates/model_configure.GSD_v0 rename to ush/templates/model_configure.FV3_GSD_v0 diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index 82080f1da..16c66a96f 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -3,7 +3,7 @@ valid_vals_VERBOSE=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_MACHINE=("WCOSS_C" "WCOSS" "DELL" "THEIA" "HERA" "JET" "ODIN" "CHEYENNE") valid_vals_PREDEF_GRID_NAME=("GSD_HAFSV0.A3km" "GSD_HAFSV0.A13km" "GSD_HAFSV0.A25km" "GSD_RAP13km" "GSD_HRRR3km" "GSD_HRRR13km" "GSD_HRRR25km" "EMC_CONUS" "EMC_AK") valid_vals_USE_CCPP=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") -valid_vals_CCPP_PHYS_SUITE=("GFS_2017_gfdlmp" "GSD_v0" "GSD_SAR") +valid_vals_CCPP_PHYS_SUITE=("FV3_GFS_2017_gfdlmp" "FV3_GSD_v0" "FV3_GSD_SAR") valid_vals_RES=("48" "96" "192" "384" "768" "1152" "3072") valid_vals_EXTRN_MDL_NAME_ICS=("GSMGFS" "FV3GFS" "RAPX" "HRRRX") valid_vals_EXTRN_MDL_NAME_LBCS=("GSMGFS" "FV3GFS" "RAPX" "HRRRX") From 565dd0eb3bb791708109e1684d69971ff1715cbe Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 11 Dec 2019 15:44:47 -0700 Subject: [PATCH 104/203] It turns out that for the make_ics and make_lbcs tasks, linking to the module file for chgres_cube in the UFS_UTILS repository is not sufficient. This is because for these tasks, an additional module (wgrib2) needs to be loaded. To fix this, create new text files (the *.local files) that contain the module load command for wgrib2 (one file per task). Then, instead of linking to the module file for chgres_cube, copy it to the modulefiles/hera directory and append to it either one or the other of the text files containing the additional wgrib2 load command. This is a bit convoluted. Coordination with the UFS_UTILS repo's admins may be required to simplify this approach. --- modulefiles/hera/make_ics.local | 1 + modulefiles/hera/make_lbcs.local | 1 + ush/generate_FV3SAR_wflow.sh | 20 ++++++++++++++------ 3 files changed, 16 insertions(+), 6 deletions(-) create mode 100644 modulefiles/hera/make_ics.local create mode 100644 modulefiles/hera/make_lbcs.local diff --git a/modulefiles/hera/make_ics.local b/modulefiles/hera/make_ics.local new file mode 100644 index 000000000..d1644e89e --- /dev/null +++ b/modulefiles/hera/make_ics.local @@ -0,0 +1 @@ +module load wgrib2/2.0.8 diff --git a/modulefiles/hera/make_lbcs.local b/modulefiles/hera/make_lbcs.local new file mode 100644 index 000000000..d1644e89e --- /dev/null +++ b/modulefiles/hera/make_lbcs.local @@ -0,0 +1 @@ +module load wgrib2/2.0.8 diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index a10fc8748..00caa8703 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -266,16 +266,24 @@ cd_vrfy "${MODULES_DIR}/$machine" # modules used in the UFS_UTILS repo to build the orog code. #ln_vrfy -fs "${UFS_UTILS_DIR}/modulefiles/fv3gfs/orog.$machine" \ # "${MAKE_OROG_TN}" -ln_vrfy -fs "make_orog.hardcoded" "${MAKE_OROG_TN}" +ln_vrfy -fs "${MAKE_OROG_TN}.hardcoded" "${MAKE_OROG_TN}" ln_vrfy -fs "${UFS_UTILS_DIR}/modulefiles/modulefile.sfc_climo_gen.$machine" \ "${MAKE_SFC_CLIMO_TN}" -ln_vrfy -fs "${CHGRES_DIR}/modulefiles/chgres_cube.$machine" \ - "${MAKE_ICS_TN}" - -ln_vrfy -fs "${CHGRES_DIR}/modulefiles/chgres_cube.$machine" \ - "${MAKE_LBCS_TN}" +#ln_vrfy -fs "${CHGRES_DIR}/modulefiles/chgres_cube.$machine" \ +# "${MAKE_ICS_TN}" +#ln_vrfy -fs "${MAKE_ICS_TN}.hardcoded" "${MAKE_ICS_TN}" +cp_vrfy "${CHGRES_DIR}/modulefiles/chgres_cube.$machine" \ + "${MAKE_ICS_TN}" +cat "${MAKE_ICS_TN}.local" >> "${MAKE_ICS_TN}" + +#ln_vrfy -fs "${CHGRES_DIR}/modulefiles/chgres_cube.$machine" \ +# "${MAKE_LBCS_TN}" +#ln_vrfy -fs "${MAKE_LBCS_TN}.hardcoded" "${MAKE_LBCS_TN}" +cp_vrfy "${CHGRES_DIR}/modulefiles/chgres_cube.$machine" \ + "${MAKE_LBCS_TN}" +cat "${MAKE_LBCS_TN}.local" >> "${MAKE_LBCS_TN}" ln_vrfy -fs "${UFS_WTHR_MDL_DIR}/NEMS/src/conf/modules.nems" \ "${RUN_FCST_TN}" From fe485e8e3397c963b398ec2186e697a9178ff506 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 11 Dec 2019 15:57:47 -0700 Subject: [PATCH 105/203] Update to the latest working hashes for GFS ICs/LBCs with GFS_2017_gfdlmp physics. This combination of hashes has not yet been tested with GSD physics or with HRRR/RAP ICs/LBCs. --- Externals.cfg | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index 527731cf7..5c1db44ec 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -11,9 +11,9 @@ required = True protocol = git repo_url = https://github.com/NCAR/UFS_UTILS # Specify either a branch name or a hash but not both. -#branch = feature/chgres_grib2_gsd -hash = 6bedd3ef -local_path = sorc/UFS_UTILS_chgres_grib2_gsd +#branch = feature/chgres_grib2 +hash = 52dbd8d6 +local_path = sorc/UFS_UTILS_chgres_grib2 required = True [ufs_weather_model] @@ -21,7 +21,7 @@ protocol = git repo_url = https://github.com/NCAR/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = dtc/develop -hash = bd76a74 +hash = bf05b97 local_path = sorc/ufs_weather_model required = True From 101f27cd180c6135d11b9cd6c3806eb882167b73 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 11 Dec 2019 16:00:41 -0700 Subject: [PATCH 106/203] Add baseline test to run Tanya S.'s FV3_GSD_SAR physics suite. This has not yet been tested. --- tests/baseline_configs/config.regional_004.sh | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 tests/baseline_configs/config.regional_004.sh diff --git a/tests/baseline_configs/config.regional_004.sh b/tests/baseline_configs/config.regional_004.sh new file mode 100644 index 000000000..cc75361a6 --- /dev/null +++ b/tests/baseline_configs/config.regional_004.sh @@ -0,0 +1,57 @@ +# +# MACHINE will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +MACHINE="" +# +# ACCOUNT will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +ACCOUNT="" +# +# EXPT_SUBDIR will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to a value obtained from the name of this file. +# +EXPT_SUBDIR="" +# +# USE_CRON_TO_RELAUNCH may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +USE_CRON_TO_RELAUNCH="TRUE" +# +# CRON_RELAUNCH_INTVL_MNTS may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +CRON_RELAUNCH_INTVL_MNTS="02" + + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +PREDEF_GRID_NAME="GSD_HRRR25km" +GRID_GEN_METHOD="JPgrid" +QUILTING="TRUE" +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GSD_SAR" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190520" +DATE_LAST_CYCL="20190520" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="HRRRX" +EXTRN_MDL_NAME_LBCS="RAPX" + +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" + From a231077e122876b06dbccb32f1ecea59238064e3 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 11 Dec 2019 16:01:33 -0700 Subject: [PATCH 107/203] Add two template configuration files to make it easier for users to get started with running the model (so we don't have to include copies of template configuration files in the google doc containing the instructions for running the FV3SAR community workflow). --- ush/config.community.sh | 32 ++++++++++++++++++++++++++++++++ ush/config.nco.sh | 35 +++++++++++++++++++++++++++++++++++ 2 files changed, 67 insertions(+) create mode 100644 ush/config.community.sh create mode 100644 ush/config.nco.sh diff --git a/ush/config.community.sh b/ush/config.community.sh new file mode 100644 index 000000000..bf75da077 --- /dev/null +++ b/ush/config.community.sh @@ -0,0 +1,32 @@ +MACHINE="hera" +ACCOUNT="an_account" +EXPT_SUBDIR="test_community" + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +PREDEF_GRID_NAME="GSD_HRRR25km" +GRID_GEN_METHOD="JPgrid" +QUILTING="TRUE" +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190701" +DATE_LAST_CYCL="20190701" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="FV3GFS" +EXTRN_MDL_NAME_LBCS="FV3GFS" + +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" + diff --git a/ush/config.nco.sh b/ush/config.nco.sh new file mode 100644 index 000000000..f4b1ae332 --- /dev/null +++ b/ush/config.nco.sh @@ -0,0 +1,35 @@ +MACHINE="hera" +ACCOUNT="an_account" +EXPT_SUBDIR="test_nco" + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="nco" +PREEXISTING_DIR_METHOD="rename" + +PREDEF_GRID_NAME="GSD_HRRR25km" +GRID_GEN_METHOD="JPgrid" +QUILTING="TRUE" +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190701" +DATE_LAST_CYCL="20190701" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="FV3GFS" +EXTRN_MDL_NAME_LBCS="FV3GFS" + +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" + +RUN="an_experiment" +COMINgfs="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" + From 9821a8135b780a41874dd2524a0f2e5c0c35c5dc Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 11 Dec 2019 16:52:23 -0700 Subject: [PATCH 108/203] Move module files for the various tasks for hera to a "tasks" subdirectory because we want to distinguish between module files for the tasks and module files needed for build of local codes (like regional_grid.fd). --- modulefiles/{ => tasks}/hera/get_extrn_ics | 0 modulefiles/{ => tasks}/hera/get_extrn_lbcs | 0 modulefiles/{ => tasks}/hera/make_grid | 0 modulefiles/{ => tasks}/hera/make_ics.hardcoded | 0 modulefiles/{ => tasks}/hera/make_ics.local | 0 modulefiles/{ => tasks}/hera/make_lbcs.hardcoded | 0 modulefiles/{ => tasks}/hera/make_lbcs.local | 0 modulefiles/{ => tasks}/hera/make_orog.hardcoded | 0 modulefiles/{ => tasks}/hera/make_sfc_climo.hardcoded | 0 modulefiles/{ => tasks}/hera/run_fcst_no_ccpp.hardcoded | 0 modulefiles/{ => tasks}/hera/run_post | 0 11 files changed, 0 insertions(+), 0 deletions(-) rename modulefiles/{ => tasks}/hera/get_extrn_ics (100%) rename modulefiles/{ => tasks}/hera/get_extrn_lbcs (100%) rename modulefiles/{ => tasks}/hera/make_grid (100%) rename modulefiles/{ => tasks}/hera/make_ics.hardcoded (100%) rename modulefiles/{ => tasks}/hera/make_ics.local (100%) rename modulefiles/{ => tasks}/hera/make_lbcs.hardcoded (100%) rename modulefiles/{ => tasks}/hera/make_lbcs.local (100%) rename modulefiles/{ => tasks}/hera/make_orog.hardcoded (100%) rename modulefiles/{ => tasks}/hera/make_sfc_climo.hardcoded (100%) rename modulefiles/{ => tasks}/hera/run_fcst_no_ccpp.hardcoded (100%) rename modulefiles/{ => tasks}/hera/run_post (100%) diff --git a/modulefiles/hera/get_extrn_ics b/modulefiles/tasks/hera/get_extrn_ics similarity index 100% rename from modulefiles/hera/get_extrn_ics rename to modulefiles/tasks/hera/get_extrn_ics diff --git a/modulefiles/hera/get_extrn_lbcs b/modulefiles/tasks/hera/get_extrn_lbcs similarity index 100% rename from modulefiles/hera/get_extrn_lbcs rename to modulefiles/tasks/hera/get_extrn_lbcs diff --git a/modulefiles/hera/make_grid b/modulefiles/tasks/hera/make_grid similarity index 100% rename from modulefiles/hera/make_grid rename to modulefiles/tasks/hera/make_grid diff --git a/modulefiles/hera/make_ics.hardcoded b/modulefiles/tasks/hera/make_ics.hardcoded similarity index 100% rename from modulefiles/hera/make_ics.hardcoded rename to modulefiles/tasks/hera/make_ics.hardcoded diff --git a/modulefiles/hera/make_ics.local b/modulefiles/tasks/hera/make_ics.local similarity index 100% rename from modulefiles/hera/make_ics.local rename to modulefiles/tasks/hera/make_ics.local diff --git a/modulefiles/hera/make_lbcs.hardcoded b/modulefiles/tasks/hera/make_lbcs.hardcoded similarity index 100% rename from modulefiles/hera/make_lbcs.hardcoded rename to modulefiles/tasks/hera/make_lbcs.hardcoded diff --git a/modulefiles/hera/make_lbcs.local b/modulefiles/tasks/hera/make_lbcs.local similarity index 100% rename from modulefiles/hera/make_lbcs.local rename to modulefiles/tasks/hera/make_lbcs.local diff --git a/modulefiles/hera/make_orog.hardcoded b/modulefiles/tasks/hera/make_orog.hardcoded similarity index 100% rename from modulefiles/hera/make_orog.hardcoded rename to modulefiles/tasks/hera/make_orog.hardcoded diff --git a/modulefiles/hera/make_sfc_climo.hardcoded b/modulefiles/tasks/hera/make_sfc_climo.hardcoded similarity index 100% rename from modulefiles/hera/make_sfc_climo.hardcoded rename to modulefiles/tasks/hera/make_sfc_climo.hardcoded diff --git a/modulefiles/hera/run_fcst_no_ccpp.hardcoded b/modulefiles/tasks/hera/run_fcst_no_ccpp.hardcoded similarity index 100% rename from modulefiles/hera/run_fcst_no_ccpp.hardcoded rename to modulefiles/tasks/hera/run_fcst_no_ccpp.hardcoded diff --git a/modulefiles/hera/run_post b/modulefiles/tasks/hera/run_post similarity index 100% rename from modulefiles/hera/run_post rename to modulefiles/tasks/hera/run_post From d4b2405648d619536af79c5061976d0c063fd87c Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 11 Dec 2019 16:54:29 -0700 Subject: [PATCH 109/203] Add directory in which to place module files for building local codes (i.e. codes that are in the regional_workflow repository). Then add the module file for buiding regional_grid on hera. --- modulefiles/codes/hera/regional_grid | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 modulefiles/codes/hera/regional_grid diff --git a/modulefiles/codes/hera/regional_grid b/modulefiles/codes/hera/regional_grid new file mode 100644 index 000000000..532e7fb65 --- /dev/null +++ b/modulefiles/codes/hera/regional_grid @@ -0,0 +1,7 @@ +#%Module##################################################### +## Module file for regional_grid +############################################################# +module purge +module load intel/18.0.5.274 +module load netcdf/4.6.1 +module load hdf5/1.10.4 From ff1061a82eed7ea61ecaa6fc03d5f9740aa735a8 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 11 Dec 2019 16:58:06 -0700 Subject: [PATCH 110/203] Add module files to build the global_equiv_resol and mosaic_file codes (which are local) on hera. Also, add module files to build all three local codes (global_equiv_resol, mosaic_file, and regional_grid) on cheyenne. --- modulefiles/codes/cheyenne/global_equiv_resol | 10 ++++++++++ modulefiles/codes/cheyenne/mosaic_file | 10 ++++++++++ modulefiles/codes/cheyenne/regional_grid | 10 ++++++++++ modulefiles/codes/hera/global_equiv_resol | 7 +++++++ modulefiles/codes/hera/mosaic_file | 7 +++++++ 5 files changed, 44 insertions(+) create mode 100644 modulefiles/codes/cheyenne/global_equiv_resol create mode 100644 modulefiles/codes/cheyenne/mosaic_file create mode 100644 modulefiles/codes/cheyenne/regional_grid create mode 100644 modulefiles/codes/hera/global_equiv_resol create mode 100644 modulefiles/codes/hera/mosaic_file diff --git a/modulefiles/codes/cheyenne/global_equiv_resol b/modulefiles/codes/cheyenne/global_equiv_resol new file mode 100644 index 000000000..d50a83ea0 --- /dev/null +++ b/modulefiles/codes/cheyenne/global_equiv_resol @@ -0,0 +1,10 @@ +#%Module##################################################### +## Module file for global_equiv_resol on NCAR/UCAR Cheyenne +############################################################# +module purge +module load ncarenv/1.3 +module load intel/18.0.5 +module load ncarcompilers/0.5.0 +module load netcdf/4.6.3 +# No hdf5 loaded since netcdf and hdf5 reside together on cheyenne + diff --git a/modulefiles/codes/cheyenne/mosaic_file b/modulefiles/codes/cheyenne/mosaic_file new file mode 100644 index 000000000..a24cf1ce6 --- /dev/null +++ b/modulefiles/codes/cheyenne/mosaic_file @@ -0,0 +1,10 @@ +#%Module##################################################### +## Module file for mosaic_file on NCAR/UCAR Cheyenne +############################################################# +module purge +module load ncarenv/1.3 +module load intel/18.0.5 +module load ncarcompilers/0.5.0 +module load impi/2018.4.274 +module load netcdf/4.6.3 +# No hdf5 loaded since netcdf and hdf5 reside together on cheyenne diff --git a/modulefiles/codes/cheyenne/regional_grid b/modulefiles/codes/cheyenne/regional_grid new file mode 100644 index 000000000..bd3d7874c --- /dev/null +++ b/modulefiles/codes/cheyenne/regional_grid @@ -0,0 +1,10 @@ +#%Module##################################################### +## Module file for regional_grid on NCAR/UCAR Cheyenne +############################################################# +module purge +module load ncarenv/1.3 +module load intel/18.0.5 +module load ncarcompilers/0.5.0 +module load impi/2018.4.274 +module load netcdf/4.6.3 +# No hdf5 loaded since netcdf and hdf5 reside together on cheyenne diff --git a/modulefiles/codes/hera/global_equiv_resol b/modulefiles/codes/hera/global_equiv_resol new file mode 100644 index 000000000..532e7fb65 --- /dev/null +++ b/modulefiles/codes/hera/global_equiv_resol @@ -0,0 +1,7 @@ +#%Module##################################################### +## Module file for regional_grid +############################################################# +module purge +module load intel/18.0.5.274 +module load netcdf/4.6.1 +module load hdf5/1.10.4 diff --git a/modulefiles/codes/hera/mosaic_file b/modulefiles/codes/hera/mosaic_file new file mode 100644 index 000000000..532e7fb65 --- /dev/null +++ b/modulefiles/codes/hera/mosaic_file @@ -0,0 +1,7 @@ +#%Module##################################################### +## Module file for regional_grid +############################################################# +module purge +module load intel/18.0.5.274 +module load netcdf/4.6.1 +module load hdf5/1.10.4 From d19178bdac2cf6cbc6931ed1e5cbf006c6fd4016 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 11 Dec 2019 17:11:09 -0700 Subject: [PATCH 111/203] In the build scripts for the local codes, update the locations where the module files for the builds are located. --- sorc/build_global_equiv_resol.sh | 6 ++++-- sorc/build_mosaic_file.sh | 6 ++++-- sorc/build_regional_grid.sh | 6 ++++-- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/sorc/build_global_equiv_resol.sh b/sorc/build_global_equiv_resol.sh index 76545d2cd..bcd57f7c6 100755 --- a/sorc/build_global_equiv_resol.sh +++ b/sorc/build_global_equiv_resol.sh @@ -43,8 +43,10 @@ cd $tmpDir # set +x module list -module use ../../../modulefiles/regional_workflow -module load ${package_name}.${target} +#module use ../../../modulefiles/global_equiv_resol +#module load ${package_name}.${target} +module use ../../../modulefiles/codes/${target} +module load ${package_name} module list set -x # diff --git a/sorc/build_mosaic_file.sh b/sorc/build_mosaic_file.sh index 097ee51ed..416398e34 100755 --- a/sorc/build_mosaic_file.sh +++ b/sorc/build_mosaic_file.sh @@ -43,8 +43,10 @@ cd $tmpDir # set +x module list -module use ../../../modulefiles/regional_workflow -module load ${package_name}.${target} +#module use ../../../modulefiles/build_mosaic +#module load ${package_name}.${target} +module use ../../../modulefiles/codes/${target} +module load ${package_name} module list set -x # diff --git a/sorc/build_regional_grid.sh b/sorc/build_regional_grid.sh index 0e059006b..9ad644502 100755 --- a/sorc/build_regional_grid.sh +++ b/sorc/build_regional_grid.sh @@ -43,8 +43,10 @@ cd $tmpDir # set +x module list -module use ../../../modulefiles/regional_workflow -module load ${package_name}.${target} +#module use ../../../modulefiles/regional_workflow +#module load ${package_name}.${target} +module use ../../../modulefiles/codes/${target} +module load ${package_name} module list set -x # From 69c1a530e862cb57c11758ca743e780f96433a2b Mon Sep 17 00:00:00 2001 From: jeff beck Date: Thu, 12 Dec 2019 00:00:32 +0000 Subject: [PATCH 112/203] New HRRR_AK 3-km pre-defined domain and changes to use RAPX data for ICs --- scripts/exregional_make_ics.sh | 37 ++++++++++++++++++++++- ush/set_predef_grid_params.sh | 55 ++++++++++++++++++++++++++++++++++ ush/valid_param_vals.sh | 2 +- 3 files changed, 92 insertions(+), 2 deletions(-) diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index 245e2691f..97c93f808 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -346,7 +346,7 @@ HRRRX grib2 files created after about \"${cdate_min_HRRRX}\"..." fi fi - geogrid_file_input_grid="/scratch2/BMC/det/beck/SAR-FV3/geo_em.d01.nc" # Maybe make this a fix file? + geogrid_file_input_grid="/scratch2/BMC/det/beck/SAR-FV3/geo_em.d01.nc_HRRRX" # Maybe make this a fix file? replace_vgtyp=".false." replace_sotyp=".false." replace_vgfrc=".false." @@ -354,6 +354,41 @@ HRRRX grib2 files created after about \"${cdate_min_HRRRX}\"..." ;; +"RAPX") + + external_model="RAP" + + fn_grib2="${EXTRN_MDL_FNS[0]}" + input_type="grib2" + + internal_GSD=".false." +# cdate_min_HRRRX="2019111500" +# if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ +# "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ] && \ +# [ ${CDATE} -gt ${cdate_min_HRRRX} ]; then +# print_info_msg " +#Setting the chgres_cube namelist setting \"internal_GSD\" to \".true.\" in +#order to read in land surface model (LSM) variables available in the +#HRRRX grib2 files created after about \"${cdate_min_HRRRX}\"..." +# internal_GSD=".true." +# fi + + if [ "${USE_CCPP}" = "TRUE" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then + numsoil_out="4" + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ + "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then + numsoil_out="9" + fi + fi + + geogrid_file_input_grid="/scratch2/BMC/det/beck/SAR-FV3/geo_em.d01.nc_RAPX" # Maybe make this a fix file? + replace_vgtyp=".false." + replace_sotyp=".false." + replace_vgfrc=".false." + tg3_from_soil=".true." + + ;; *) print_err_msg_exit "\ diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index 5283b7e93..188c9e711 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -655,6 +655,61 @@ predefined domain: fi ;; # +#----------------------------------------------------------------------- +# +# 3-km HRRR Alaska grid. +# +#----------------------------------------------------------------------- +# +"GSD_HRRR_AK_3km") + + if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then + + print_err_msg_exit "\ +The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this +predefined domain: + PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" +" + elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then + + lon_rgnl_ctr=-163.5 #HRRR-AK is -163.5 + lat_rgnl_ctr=65.8 #HRRR-AK is 60.8 + + delx="3000.0" + dely="3000.0" + + nx_T7=1230 #HRRR-AK is 1300 + ny_T7=850 #HRRR-AK is 920 + + nhw_T7=6 + + dt_atmos="50" + + layout_x="30" + layout_y="17" + blocksize="25" + + if [ "$QUILTING" = "TRUE" ]; then + WRTCMP_write_groups="1" + WRTCMP_write_tasks_per_group="2" + WRTCMP_output_grid="lambert_conformal" + WRTCMP_cen_lon="${lon_rgnl_ctr}" + WRTCMP_cen_lat="${lat_rgnl_ctr}" + WRTCMP_stdlat1="${lat_rgnl_ctr}" + WRTCMP_stdlat2="${lat_rgnl_ctr}" + WRTCMP_nx="191" + WRTCMP_ny="97" + WRTCMP_lon_lwr_left="-120.72962370" + WRTCMP_lat_lwr_left="25.11648583" + WRTCMP_dx="$delx" + WRTCMP_dy="$dely" + fi + + fi + ;; + +# esac # #----------------------------------------------------------------------- diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index 16c66a96f..d7e3ae6dd 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -1,7 +1,7 @@ valid_vals_RUN_ENVIR=("nco" "community") valid_vals_VERBOSE=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_MACHINE=("WCOSS_C" "WCOSS" "DELL" "THEIA" "HERA" "JET" "ODIN" "CHEYENNE") -valid_vals_PREDEF_GRID_NAME=("GSD_HAFSV0.A3km" "GSD_HAFSV0.A13km" "GSD_HAFSV0.A25km" "GSD_RAP13km" "GSD_HRRR3km" "GSD_HRRR13km" "GSD_HRRR25km" "EMC_CONUS" "EMC_AK") +valid_vals_PREDEF_GRID_NAME=("GSD_HAFSV0.A3km" "GSD_HAFSV0.A13km" "GSD_HAFSV0.A25km" "GSD_RAP13km" "GSD_HRRR3km" "GSD_HRRR13km" "GSD_HRRR25km" "GSD_HRRR_AK_3km" "EMC_CONUS" "EMC_AK") valid_vals_USE_CCPP=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_CCPP_PHYS_SUITE=("FV3_GFS_2017_gfdlmp" "FV3_GSD_v0" "FV3_GSD_SAR") valid_vals_RES=("48" "96" "192" "384" "768" "1152" "3072") From 51ea978883b6403e40052e73dcf772e8444858bb Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 11 Dec 2019 17:11:45 -0700 Subject: [PATCH 113/203] Update the location where the chgres_cube code is located. This needs to be automated. --- sorc/install_all.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sorc/install_all.sh b/sorc/install_all.sh index 0d8cbccb5..0087caefd 100755 --- a/sorc/install_all.sh +++ b/sorc/install_all.sh @@ -35,7 +35,7 @@ fi #------------------------------------ # install chgres_cube #------------------------------------ - ${CP} UFS_UTILS_chgres_grib2_gsd/exec/chgres_cube.exe ../exec/chgres_cube.exe + ${CP} UFS_UTILS_chgres_grib2/exec/chgres_cube.exe ../exec/chgres_cube.exe #------------------------------------ # install orog From 6f3d216ad7a868562501f7d72de8afae8a0249ba Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 13 Dec 2019 10:58:37 -0700 Subject: [PATCH 114/203] Bug fixes in paths of module files. --- ush/generate_FV3SAR_wflow.sh | 2 +- ush/load_modules_run_task.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 00caa8703..c2faa6aa4 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -256,7 +256,7 @@ sed -i -r -e "s|${regex_search}|${all_cycledefs}|g" "${WFLOW_XML_FP}" # machine=${MACHINE,,} -cd_vrfy "${MODULES_DIR}/$machine" +cd_vrfy "${MODULES_DIR}/tasks/$machine" # # The "module" file (really a shell script) for orog in the UFS_UTILS diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index 4a072c80b..907dc28b0 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -195,7 +195,7 @@ jjob_fp="$2" #----------------------------------------------------------------------- # machine=${MACHINE,,} -modules_dir="$HOMErrfs/modulefiles/$machine" +modules_dir="$HOMErrfs/modulefiles/tasks/$machine" modulefile_name="${task_name}" # Dom says that a correct modules.fv3 file is generated by the forecast From de665f8720c813252c80fdc5d30ed589f65e808c Mon Sep 17 00:00:00 2001 From: jeff beck Date: Fri, 13 Dec 2019 18:21:41 +0000 Subject: [PATCH 115/203] Bug fixes. --- scripts/exregional_run_fcst.sh | 6 +++--- ush/generate_FV3SAR_wflow.sh | 2 +- ush/load_modules_run_task.sh | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 51c56da75..7985079b3 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -390,9 +390,9 @@ if [ "${USE_CCPP}" = "TRUE" ]; then ln_vrfy -sf -t ${CYCLE_DIR} ${CCPP_PHYS_SUITE_FP} - if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \\ - "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then - ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/CCN_ACTIVATE.BIN + if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then + ln_vrfy -sf -t ${CYCLE_DIR} ${EXPTDIR}/CCN_ACTIVATE.BIN fi fi diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 00caa8703..c2faa6aa4 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -256,7 +256,7 @@ sed -i -r -e "s|${regex_search}|${all_cycledefs}|g" "${WFLOW_XML_FP}" # machine=${MACHINE,,} -cd_vrfy "${MODULES_DIR}/$machine" +cd_vrfy "${MODULES_DIR}/tasks/$machine" # # The "module" file (really a shell script) for orog in the UFS_UTILS diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index 4a072c80b..907dc28b0 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -195,7 +195,7 @@ jjob_fp="$2" #----------------------------------------------------------------------- # machine=${MACHINE,,} -modules_dir="$HOMErrfs/modulefiles/$machine" +modules_dir="$HOMErrfs/modulefiles/tasks/$machine" modulefile_name="${task_name}" # Dom says that a correct modules.fv3 file is generated by the forecast From 88c7778104bc9831b5248808305e24e1bebd2591 Mon Sep 17 00:00:00 2001 From: jeff beck Date: Fri, 13 Dec 2019 18:22:26 +0000 Subject: [PATCH 116/203] New GSD HRRR_AK 3-km SAR domain --- ush/set_predef_grid_params.sh | 42 +++++++++++++++++------------------ 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index 188c9e711..a7fa9ce9d 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -673,37 +673,37 @@ predefined domain: " elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - lon_rgnl_ctr=-163.5 #HRRR-AK is -163.5 - lat_rgnl_ctr=65.8 #HRRR-AK is 60.8 + LON_RGNL_CTR=-163.5 #HRRR-AK is -163.5 + LAT_RGNL_CTR=62.8 #HRRR-AK is 60.8 - delx="3000.0" - dely="3000.0" + DELX="3000.0" + DELY="3000.0" - nx_T7=1230 #HRRR-AK is 1300 - ny_T7=850 #HRRR-AK is 920 + NX_T7=1230 #HRRR-AK is 1300 + NY_T7=850 #HRRR-AK is 920 - nhw_T7=6 + NHW_T7=6 - dt_atmos="50" + DT_ATMOS="50" - layout_x="30" - layout_y="17" - blocksize="25" + LAYOUT_X="30" + LAYOUT_Y="17" + BLOCKSIZE="25" if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="2" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${lon_rgnl_ctr}" - WRTCMP_cen_lat="${lat_rgnl_ctr}" - WRTCMP_stdlat1="${lat_rgnl_ctr}" - WRTCMP_stdlat2="${lat_rgnl_ctr}" - WRTCMP_nx="191" - WRTCMP_ny="97" - WRTCMP_lon_lwr_left="-120.72962370" - WRTCMP_lat_lwr_left="25.11648583" - WRTCMP_dx="$delx" - WRTCMP_dy="$dely" + WRTCMP_cen_lon="${LON_RGNL_CTR}" + WRTCMP_cen_lat="${LAT_RGNL_CTR}" + WRTCMP_stdlat1="${LAT_RGNL_CTR}" + WRTCMP_stdlat2="${LAT_RGNL_CTR}" + WRTCMP_nx="1169" + WRTCMP_ny="762" + WRTCMP_lon_lwr_left="172.0" + WRTCMP_lat_lwr_left="49.0" + WRTCMP_dx="$DELX" + WRTCMP_dy="$DELY" fi fi From e3e9b2eff7c01725e8d14731cc36ee4f4e9cb2c0 Mon Sep 17 00:00:00 2001 From: jeff beck Date: Fri, 13 Dec 2019 19:41:57 +0000 Subject: [PATCH 117/203] Add 50-km GSD HRRR_AK domain --- ush/set_predef_grid_params.sh | 53 +++++++++++++++++++++++++++++++++++ ush/valid_param_vals.sh | 2 +- 2 files changed, 54 insertions(+), 1 deletion(-) diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index a7fa9ce9d..adea8645d 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -708,7 +708,60 @@ predefined domain: fi ;; +# +#----------------------------------------------------------------------- +# +# 3-km HRRR Alaska grid. +# +#----------------------------------------------------------------------- +# +"GSD_HRRR_AK_50km") + + if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then + + print_err_msg_exit "\ +The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this +predefined domain: + PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\" +" + elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then + + LON_RGNL_CTR=-163.5 #HRRR-AK is -163.5 + LAT_RGNL_CTR=62.8 #HRRR-AK is 60.8 + + DELX="50000.0" + DELY="50000.0" + NX_T7=74 #HRRR-AK is 1300 + NY_T7=51 #HRRR-AK is 920 + + NHW_T7=6 + + DT_ATMOS="600" + + LAYOUT_X="2" + LAYOUT_Y="3" + BLOCKSIZE="37" + + if [ "$QUILTING" = "TRUE" ]; then + WRTCMP_write_groups="1" + WRTCMP_write_tasks_per_group="1" + WRTCMP_output_grid="lambert_conformal" + WRTCMP_cen_lon="${LON_RGNL_CTR}" + WRTCMP_cen_lat="${LAT_RGNL_CTR}" + WRTCMP_stdlat1="${LAT_RGNL_CTR}" + WRTCMP_stdlat2="${LAT_RGNL_CTR}" + WRTCMP_nx="70" + WRTCMP_ny="45" + WRTCMP_lon_lwr_left="172.0" + WRTCMP_lat_lwr_left="49.0" + WRTCMP_dx="$DELX" + WRTCMP_dy="$DELY" + fi + + fi + ;; # esac # diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index d7e3ae6dd..6ce23983c 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -1,7 +1,7 @@ valid_vals_RUN_ENVIR=("nco" "community") valid_vals_VERBOSE=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_MACHINE=("WCOSS_C" "WCOSS" "DELL" "THEIA" "HERA" "JET" "ODIN" "CHEYENNE") -valid_vals_PREDEF_GRID_NAME=("GSD_HAFSV0.A3km" "GSD_HAFSV0.A13km" "GSD_HAFSV0.A25km" "GSD_RAP13km" "GSD_HRRR3km" "GSD_HRRR13km" "GSD_HRRR25km" "GSD_HRRR_AK_3km" "EMC_CONUS" "EMC_AK") +valid_vals_PREDEF_GRID_NAME=("GSD_HAFSV0.A3km" "GSD_HAFSV0.A13km" "GSD_HAFSV0.A25km" "GSD_RAP13km" "GSD_HRRR3km" "GSD_HRRR13km" "GSD_HRRR25km" "GSD_HRRR_AK_3km" "GSD_HRRR_AK_50km" "EMC_CONUS" "EMC_AK") valid_vals_USE_CCPP=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_CCPP_PHYS_SUITE=("FV3_GFS_2017_gfdlmp" "FV3_GSD_v0" "FV3_GSD_SAR") valid_vals_RES=("48" "96" "192" "384" "768" "1152" "3072") From 64176fd42b2ec7543fd812cc9fa0c107d487eac8 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 13 Dec 2019 13:36:56 -0700 Subject: [PATCH 118/203] Change "-o" (OR) operator to "||" and "-a" (AND) operator to "&&" whereever possible. Also, one bug fix in CCN file if-statement. --- scripts/exregional_get_extrn_files.sh | 4 ++-- scripts/exregional_make_ics.sh | 8 ++++---- scripts/exregional_make_lbcs.sh | 8 ++++---- scripts/exregional_run_fcst.sh | 4 ++-- ush/generate_FV3SAR_wflow.sh | 12 ++++++------ ush/load_modules_run_task.sh | 10 +++++----- ush/setup.sh | 7 ++++--- 7 files changed, 27 insertions(+), 26 deletions(-) diff --git a/scripts/exregional_get_extrn_files.sh b/scripts/exregional_get_extrn_files.sh index 49ddec865..fa1899ed2 100755 --- a/scripts/exregional_get_extrn_files.sh +++ b/scripts/exregional_get_extrn_files.sh @@ -396,8 +396,8 @@ details: # "./", which are the only case encountered thus far. The code may have # to be modified to accomodate other cases. # - if [ "${EXTRN_MDL_ARCVREL_DIR:0:1}" = "/" -o \ - "${EXTRN_MDL_ARCVREL_DIR:0:2}" = "./" ]; then + if [ "${EXTRN_MDL_ARCVREL_DIR:0:1}" = "/" ] || \ + [ "${EXTRN_MDL_ARCVREL_DIR:0:2}" = "./" ]; then # # Strip the "/" or "./" from the beginning of EXTRN_MDL_ARCVREL_DIR to # obtain the relative directory from which to move the extracted files diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index 245e2691f..44640027a 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -285,8 +285,8 @@ case "${EXTRN_MDL_NAME_ICS}" in if [ "${USE_CCPP}" = "TRUE" ]; then if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"" - elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then # For GSD physics, add three additional tracers (the ice, rain and water # number concentrations) that are required for Thompson microphysics. tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\",\"ice_nc\",\"rain_nc\",\"water_nc\"" @@ -340,8 +340,8 @@ HRRRX grib2 files created after about \"${cdate_min_HRRRX}\"..." if [ "${USE_CCPP}" = "TRUE" ]; then if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then numsoil_out="4" - elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then numsoil_out="9" fi fi diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index f985a7ccb..8b07715c0 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -271,8 +271,8 @@ case "${EXTRN_MDL_NAME_LBCS}" in if [ "${USE_CCPP}" = "TRUE" ]; then if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"" - elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then # For GSD physics, add three additional tracers (the ice, rain and water # number concentrations) that are required for Thompson microphysics. tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\",\"ice_nc\",\"rain_nc\",\"water_nc\"" @@ -312,8 +312,8 @@ case "${EXTRN_MDL_NAME_LBCS}" in if [ "${USE_CCPP}" = "TRUE" ]; then if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then numsoil_out="4" - elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then numsoil_out="9" fi fi diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 51c56da75..732ac8445 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -390,8 +390,8 @@ if [ "${USE_CCPP}" = "TRUE" ]; then ln_vrfy -sf -t ${CYCLE_DIR} ${CCPP_PHYS_SUITE_FP} - if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \\ - "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then ln_vrfy -sf -t ${CYCLE_DIR} $EXPTDIR/CCN_ACTIVATE.BIN fi diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index c2faa6aa4..d4f106728 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -348,8 +348,8 @@ if [ ${num_files_ozphys_2015} -eq 1 ] && \ elif [ ${num_files_ozphys_2015} -eq 0 ] && \ [ ${num_files_ozphys_after2015} -eq 1 ]; then - if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then FIXam_FILES_SYSDIR[${indx_ozphys_after2015}]="${ozphys_2015_fn}" fi @@ -615,8 +615,8 @@ the forecast model directory sturcture to the experiment directory..." # taining cloud condensation nuclei (CCN) data that is needed by the # Thompson microphysics parameterization to the experiment directory. # - if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then + if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then print_info_msg "$VERBOSE" " Copying the fixed file containing cloud condensation nuclei (CCN) data (needed by the Thompson microphysics parameterization) to the experiment @@ -672,8 +672,8 @@ set_file_param "${FV3_NML_FP}" "bc_update_interval" "${LBC_UPDATE_INTVL_HRS}" # For the GSD_v0 and the GSD_SAR physics suites, set the parameter lsoil # according to the external models used to obtain ICs and LBCs. # -if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then +if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then if [ "${EXTRN_MDL_NAME_ICS}" = "GSMGFS" -o \ "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ] && \ diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index 907dc28b0..f38da12d2 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -230,11 +230,11 @@ modulefile_path=$( readlink -f "${modules_dir}/${modulefile_name}" ) if [ ! -f "${modulefile_path}" ]; then - if [ "${task_name}" = "${MAKE_OROG_TN}" -o \ - "${task_name}" = "${MAKE_SFC_CLIMO_TN}" -o \ - "${task_name}" = "${MAKE_ICS_TN}" -o \ - "${task_name}" = "${MAKE_LBCS_TN}" -o \ - "${task_name}" = "${RUN_FCST_TN}" ]; then + if [ "${task_name}" = "${MAKE_OROG_TN}" ] || \ + [ "${task_name}" = "${MAKE_SFC_CLIMO_TN}" ] || \ + [ "${task_name}" = "${MAKE_ICS_TN}" ] || \ + [ "${task_name}" = "${MAKE_LBCS_TN}" ] || \ + [ "${task_name}" = "${RUN_FCST_TN}" ]; then print_err_msg_exit "\ The target (modulefile_path) of the symlink (modulefile_name) in the diff --git a/ush/setup.sh b/ush/setup.sh index 5bcd0684f..252c59636 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -429,8 +429,8 @@ fi # #----------------------------------------------------------------------- # -if [ "${USE_CCPP}" = "TRUE" -a \ - "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then +if [ "${USE_CCPP}" = "TRUE" ] && \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then if [ "${EXTRN_MDL_NAME_ICS}" != "GSMGFS" -a \ "${EXTRN_MDL_NAME_ICS}" != "FV3GFS" ] || \ @@ -502,7 +502,8 @@ have this form: CYCL_HRS[$i] = \"${CYCL_HRS[$i]}\"" fi - if [ "${CYCL_OR_NULL}" -lt "0" ] || [ "${CYCL_OR_NULL}" -gt "23" ]; then + if [ "${CYCL_OR_NULL}" -lt "0" ] || \ + [ "${CYCL_OR_NULL}" -gt "23" ]; then print_err_msg_exit "\ Each element of CYCL_HRS must be an integer between \"00\" and \"23\", in- clusive (including a leading \"0\", if necessary), specifying an hour-of- From c004a27cd1cd428522bdac0d86bdf5648ce93f33 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 13 Dec 2019 13:58:20 -0700 Subject: [PATCH 119/203] Remove commented-out code. --- scripts/exregional_make_ics.sh | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index c30bda000..365e8c6c5 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -362,22 +362,12 @@ HRRRX grib2 files created after about \"${cdate_min_HRRRX}\"..." input_type="grib2" internal_GSD=".false." -# cdate_min_HRRRX="2019111500" -# if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ -# "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ] && \ -# [ ${CDATE} -gt ${cdate_min_HRRRX} ]; then -# print_info_msg " -#Setting the chgres_cube namelist setting \"internal_GSD\" to \".true.\" in -#order to read in land surface model (LSM) variables available in the -#HRRRX grib2 files created after about \"${cdate_min_HRRRX}\"..." -# internal_GSD=".true." -# fi if [ "${USE_CCPP}" = "TRUE" ]; then if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then numsoil_out="4" - elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" -o \ - "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then + elif [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ + [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then numsoil_out="9" fi fi From ca224ddb04e4c058085bbb99a8ab235d40c8c769 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 13 Dec 2019 14:56:07 -0700 Subject: [PATCH 120/203] Fix wrong hash. --- Externals.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Externals.cfg b/Externals.cfg index 5c1db44ec..1b1009a98 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -12,7 +12,7 @@ protocol = git repo_url = https://github.com/NCAR/UFS_UTILS # Specify either a branch name or a hash but not both. #branch = feature/chgres_grib2 -hash = 52dbd8d6 +hash = 9dca4141 local_path = sorc/UFS_UTILS_chgres_grib2 required = True From baebb7330d447391d225d6ad766ef3dea7cb8838 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 13 Dec 2019 15:17:19 -0700 Subject: [PATCH 121/203] Clean up unused code and add informational print statements. --- .../calc_wrtcmp_grid_params_lambert_cnfrml.ncl | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/ush/NCL/calc_wrtcmp_grid_params_lambert_cnfrml.ncl b/ush/NCL/calc_wrtcmp_grid_params_lambert_cnfrml.ncl index fdfdd94d1..ae2d5d026 100644 --- a/ush/NCL/calc_wrtcmp_grid_params_lambert_cnfrml.ncl +++ b/ush/NCL/calc_wrtcmp_grid_params_lambert_cnfrml.ncl @@ -274,11 +274,9 @@ begin param_value := lat2 fmt_str = "%16.8f" else if (strcmp_exact(param_name, "nx")) then -; param_value := nxm param_value := nx fmt_str = "%10.0f" else if (strcmp_exact(param_name, "ny")) then -; param_value := nym param_value := ny fmt_str = "%10.0f" else if (strcmp_exact(param_name, "lon1")) then @@ -296,7 +294,7 @@ begin else msg := char_nl + \ -"Unknow parameter name specified for given output_coord_sys:" + char_nl + \ +"Unknown parameter name specified for given output_coord_sys:" + char_nl + \ " output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl + \ " param_name = " + char_dq + param_name + char_dq + char_nl + \ "Stopping." @@ -355,6 +353,17 @@ begin ; ; ********************************************************************** ; +; Print out the write-component parameter values calculated above. +; +; ********************************************************************** +; + msg := char_nl + \ +"Write-component parameters corresponding to this native grid are:" + char_nl + print("" + msg) + print("" + lines_final) +; +; ********************************************************************** +; ; Loop through the set of parameters and find the line in the template ; file where each is set. Then replace that line with the corresponding ; line generated above containing the parameter name, its value, and the From b86ae3c431a3c1adb59ca08c24cb2348155643e8 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 13 Dec 2019 15:17:50 -0700 Subject: [PATCH 122/203] Change run_dir to expt_dir and get rid of references to work_dir (in order to follow the updated directory structure used by the workflow). --- ush/NCL/plot_FV3SAR_field_native.ncl | 15 +++--- ush/NCL/process_plot_params.ncl | 81 ++++++++++------------------ ush/NCL/read_FV3SAR_grid_native.ncl | 13 +++-- 3 files changed, 41 insertions(+), 68 deletions(-) diff --git a/ush/NCL/plot_FV3SAR_field_native.ncl b/ush/NCL/plot_FV3SAR_field_native.ncl index 7d4f0ab86..39a2e6876 100644 --- a/ush/NCL/plot_FV3SAR_field_native.ncl +++ b/ush/NCL/plot_FV3SAR_field_native.ncl @@ -18,10 +18,10 @@ ;help = True -run_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HRRR25km" -run_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HRRR13km" -run_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HRRR3km" -run_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HAFSV0.A" +expt_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HRRR25km" +expt_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HRRR13km" +expt_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HRRR3km" +expt_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/FV3SAR_demo_PASPBL_20190725/expt_dirs/test_GSD_HAFSV0.A" tile_inds := (/ 1, 7, 4 /) @@ -465,8 +465,7 @@ end if num_tiles_to_plot = plot_params@num_tiles_to_plot inds_tiles_to_plot = plot_params@inds_tiles_to_plot cres = plot_params@cres - run_dir = plot_params@run_dir - work_dir = plot_params@work_dir + expt_dir = plot_params@expt_dir horiz_dist_units = plot_params@horiz_dist_units horiz_area_units = plot_params@horiz_area_units @@ -513,7 +512,7 @@ end if if (read_FV3SAR_wrtcmp) then - FV3SAR_wrtcmp_fn = run_dir + "/" + CDATE + "/" + "dynf000.nc" + FV3SAR_wrtcmp_fn = expt_dir + "/" + CDATE + "/" + "dynf000.nc" get_domain_bdy = True grid_info := read_FV3SAR_grid_wrtcmp( \ FV3SAR_wrtcmp_fn, \ @@ -533,7 +532,7 @@ end if else grid_info := read_FV3SAR_grid_native( \ - work_dir, \ + expt_dir, \ gtype, \ cres, \ inds_tiles_to_plot, \ diff --git a/ush/NCL/process_plot_params.ncl b/ush/NCL/process_plot_params.ncl index ac29977ab..6cf7790d9 100644 --- a/ush/NCL/process_plot_params.ncl +++ b/ush/NCL/process_plot_params.ncl @@ -110,29 +110,30 @@ usage_msg + char_nl + \ ; ; ********************************************************************** ; -; Verify that the run directory (run_dir) has been specified on the com- -; mand line. If so, verify that it exists. +; Verify that the experiment directory (expt_dir) has been specified on +; the command line. If so, verify that it exists. ; ; ********************************************************************** ; - if (.not. isvar("run_dir")) then + if (.not. isvar("expt_dir")) then msg := char_nl + \ -"The run directory (run_dir) has not been specified on the command line:" + char_nl + \ -" isvar(" + char_dq + "run_dir" + char_dq + ") = " + isvar("run_dir") + char_nl + \ -"Please rerun with the run directory specified. " + \ +"The experiment directory (expt_dir) has not been specified on the com-" + char_nl + \ +"mand line:" + char_nl + \ +" isvar(" + char_dq + "expt_dir" + char_dq + ") = " + isvar("expt_dir") + char_nl + \ +"Please rerun with the experiment directory specified. " + \ example_usage_and_help_str print("" + msg) exit - else if (.not. fileexists(run_dir)) then + else if (.not. fileexists(expt_dir)) then msg := char_nl + \ -"The specified run directory (run_dir) does not exist:" + char_nl + \ -" run_dir = " + char_dq + run_dir + char_dq + char_nl + \ -" fileexists(run_dir) = " + fileexists(run_dir) + char_nl + \ -"Please rerun with an existing run directory. " + \ +"The specified experiment directory (expt_dir) does not exist:" + char_nl + \ +" expt_dir = " + char_dq + expt_dir + char_dq + char_nl + \ +" fileexists(expt_dir) = " + fileexists(expt_dir) + char_nl + \ +"Please rerun with an existing experiment directory. " + \ example_usage_and_help_str print("" + msg) @@ -140,37 +141,16 @@ example_usage_and_help_str end if end if -; -; ********************************************************************** -; -; Set the work directory, i.e. the temporary directory in which the pre- -; processing tasks of the workflow place their output files. -; -; ********************************************************************** -; -; Don't need this line since we got rid of work_dirs and put all prepro- -; cessing output in subdirectories in the experiment directory. -; work_dir = str_sub_str(run_dir, "expt_dirs", "work_dirs") - work_dir = run_dir - - - ; ; ********************************************************************** ; ; Set the full path to the variable definitions file. This file con- -; tains various run parameters that will be needed below. +; tains various experiment and workflow parameters that will be needed +; below. ; ; ********************************************************************** ; - var_defns_fn = run_dir + "/var_defns.sh" - -; Need to change this to go up one level since we changed the directory -; structure for cycling. -; var_defns_fn = run_dir + "/../var_defns.sh" - - - + var_defns_fn = expt_dir + "/var_defns.sh" ; ; ********************************************************************** ; @@ -178,9 +158,9 @@ example_usage_and_help_str ; ; ********************************************************************** ; - cmd = "sed --silent --regexp-extended --expression " + \ - "'s/(^gtype=)([" + char_dq + "]*)([A-Za-z]+)([" + char_dq + "]*)(.*)/\3/p' " + \ - var_defns_fn + cmd = "sed --silent --regexp-extended --expression " \ + + "'s/(^gtype=)([" + char_dq + "]*)([A-Za-z]+)([" + char_dq + "]*)(.*)/\3/p' " \ + + var_defns_fn gtype := systemfunc(cmd) ; ; ********************************************************************** @@ -251,15 +231,16 @@ example_usage_and_help_str ; ; ********************************************************************** ; - cmd = "sed --silent --regexp-extended --expression " + \ - "'s/^[ ]*(RES=)([" + char_dq + "]*)([0-9]+)([" + char_dq + "]*)(.*)/\3/p' " + \ - var_defns_fn + cmd = "sed --silent --regexp-extended --expression " \ + + "'s/^[ ]*(RES=)([" + char_dq + "]*)([0-9]+)([" + char_dq + "]*)(.*)/\3/p' " \ + + var_defns_fn res := systemfunc(cmd) -res := res(0) -print("==>> res = " + res) -; res := tointeger(systemfunc(cmd)) -;print("==>> res = " + res) -pause +; +; Get the last occurrence of "RES=..." in the variable defintions file +; since that's the one that matters. +; + num_elems = dimsizes(res) + res := res(num_elems-1) ; ; ********************************************************************** ; @@ -301,11 +282,6 @@ end if ; ; cres := "C" + tostring_with_format(res, "%i") cres := "C" + res -print("==>> cres = " + char_dq + cres + char_dq) -pause - - - @@ -1037,8 +1013,7 @@ char_dq + ".halo4" + char_dq + ")] should not be specified on the command line ( plot_params@num_tiles_to_plot = num_tiles_to_plot plot_params@inds_tiles_to_plot = inds_tiles_to_plot plot_params@cres = cres - plot_params@run_dir = run_dir - plot_params@work_dir = work_dir + plot_params@expt_dir = expt_dir plot_params@horiz_dist_units = horiz_dist_units plot_params@horiz_area_units = horiz_area_units diff --git a/ush/NCL/read_FV3SAR_grid_native.ncl b/ush/NCL/read_FV3SAR_grid_native.ncl index 5947bf670..bb9b21c85 100644 --- a/ush/NCL/read_FV3SAR_grid_native.ncl +++ b/ush/NCL/read_FV3SAR_grid_native.ncl @@ -14,10 +14,10 @@ ; ; The input arguments to this function are as follows: ; -; work_dir: -; This is the work directory created by the FV3SAR workflow. The grid -; specification files corresponding to the cubed-sphere tiles are within -; subdirectories under this directory. +; expt_dir: +; This is the experiment directory created by the FV3SAR workflow gene- +; ration script. The grid specification files corresponding to the +; cubed-sphere tiles are within subdirectories under this directory. ; ; gtype: ; This is a string containing the grid type of the FV3 cubed-sphere grid @@ -70,7 +70,7 @@ loadscript(lib_location + "adjust_longitude_range.ncl") undef("read_FV3SAR_grid_native") function read_FV3SAR_grid_native( \ - work_dir:string, \ + expt_dir:string, \ gtype:string, \ cres:string, \ tile_inds:integer, \ @@ -230,11 +230,10 @@ begin grid_fn = cres + "_grid.tile" + n_tile if (strcmp_exact(gtype, "regional") .and. (n_tile .eq. 7)) then grid_fn = grid_fn + ".halo" + tostring(nhalo_T7) + ".nc" - grid_fn = work_dir + "/fix_sar/" + grid_fn else grid_fn = grid_fn + ".nc" - grid_fn = work_dir + "/grid/" + grid_fn end if + grid_fn = expt_dir + "/fix_sar/" + grid_fn grid_fn_all_tiles(nn) = grid_fn print(" grid_fn_all_tiles(" + nn + ") = " + \ From f261170810566212bda873fadc2c3f330d0c60b3 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 13 Dec 2019 15:19:48 -0700 Subject: [PATCH 123/203] Update path from theia to hera. --- ush/NCL/plot_FV3SAR_field_native.ncl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/NCL/plot_FV3SAR_field_native.ncl b/ush/NCL/plot_FV3SAR_field_native.ncl index 39a2e6876..c882baa85 100644 --- a/ush/NCL/plot_FV3SAR_field_native.ncl +++ b/ush/NCL/plot_FV3SAR_field_native.ncl @@ -640,7 +640,7 @@ end if ; rd := "/scratch3/BMC/det/Gerard.Ketefian/UFS_CAM_test_instructions/expt_dirs/NX1800_NY1120_A0p21423_Kmns0p23209_HRRR_test_cycl_slurm_01/2017090700" - rd := "/scratch3/BMC/det/Gerard.Ketefian" + rd := "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM" wrtcmp_config_fn := rd + "/model_configure" wrtcmp_config_tmpl_fn = wrtcmp_config_fn + ".tmpl" From d7bbfc3cf7ec20f87bff1b6763b4f330a1cc7103 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 13 Dec 2019 16:32:14 -0700 Subject: [PATCH 124/203] Improve informational messages and comments. --- ush/bash_utils/print_msg.sh | 8 ++++---- ush/templates/wrtcmp_rotated_latlon | 12 ++++++------ 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/ush/bash_utils/print_msg.sh b/ush/bash_utils/print_msg.sh index 5a48d4ad0..544357f62 100644 --- a/ush/bash_utils/print_msg.sh +++ b/ush/bash_utils/print_msg.sh @@ -239,8 +239,8 @@ function print_err_msg_exit() { msg_header=$( printf "\n\ ERROR: - From script in file: \"${caller_fn}\" - In directory: \"${caller_dir}\" + From script: \"${caller_fn}\" + Full path to script: \"${caller_fp}\" " ) @@ -249,8 +249,8 @@ ERROR: msg_header=$( printf "\n\ ERROR: From function: \"${caller_name}\" - In file: \"${caller_fn}\" - In directory: \"${caller_dir}\" + In file: \"${caller_fn}\" + Full path to file: \"${caller_fp}\" " ) diff --git a/ush/templates/wrtcmp_rotated_latlon b/ush/templates/wrtcmp_rotated_latlon index d7122feac..28cad6460 100644 --- a/ush/templates/wrtcmp_rotated_latlon +++ b/ush/templates/wrtcmp_rotated_latlon @@ -8,12 +8,12 @@ write_nemsioflip: .false. write_fsyncflag: .false. output_grid: # Coordinate system of output grid. -cen_lon: # central longitude -cen_lat: # central latitude -lon1: # longitude of lower-left point in non-rotated coordinate system (in degrees) -lat1: # latitude of lower-left -lon2: # longitude of upper-right -lat2: # latitude of upper-right +cen_lon: # Longitude of center of grid, expressed in the NON-ROTATED latlon coordinate system (in degrees). This is also the longitude of the point at which the equator and prime meridian of the ROTATED coordinate system intersect (i.e. the point at which the longitude and latitude in the ROTATED latlon coordinate system are both 0). +cen_lat: # Latitude of center of grid, expressed in the NON-ROTATED latlon coordinate system (in degrees). This is also the latitude of the point at which the equator and prime meridian of the ROTATED coordinate system intersect (i.e. the point at which the longitude and latitude in the ROTATED latlon coordinate system are both 0). +lon1: # Longitude of lower-left grid point, expressed in the ROTATED latlon coordinate system (in degrees). +lat1: # Latitude of lower-left grid point, expressed in the ROTATED latlon coordinate system (in degrees). +lon2: # Longitude of upper-right grid point, expressed in the ROTATED latlon coordinate system (in degrees). +lat2: # Latitude of upper-right grid point, expressed in the ROTATED latlon coordinate system (in degrees). dlon: dlat: From 13436ac5bbb87ae57a539baaa3230f44757874d7 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 13 Dec 2019 16:55:06 -0700 Subject: [PATCH 125/203] Add baseline for an end-to-end workflow test for the HRRR Alaska 50km grid. --- tests/baseline_configs/config.regional_005.sh | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 tests/baseline_configs/config.regional_005.sh diff --git a/tests/baseline_configs/config.regional_005.sh b/tests/baseline_configs/config.regional_005.sh new file mode 100644 index 000000000..a9544b094 --- /dev/null +++ b/tests/baseline_configs/config.regional_005.sh @@ -0,0 +1,57 @@ +# +# MACHINE will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +MACHINE="" +# +# ACCOUNT will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +ACCOUNT="" +# +# EXPT_SUBDIR will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to a value obtained from the name of this file. +# +EXPT_SUBDIR="" +# +# USE_CRON_TO_RELAUNCH may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +USE_CRON_TO_RELAUNCH="TRUE" +# +# CRON_RELAUNCH_INTVL_MNTS may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +CRON_RELAUNCH_INTVL_MNTS="02" + + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +PREDEF_GRID_NAME="GSD_HRRR_AK_50km" +GRID_GEN_METHOD="JPgrid" +QUILTING="TRUE" +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GSD_SAR" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190520" +DATE_LAST_CYCL="20190520" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="RAPX" +EXTRN_MDL_NAME_LBCS="RAPX" + +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" + From 7dcc2f12e3695dbd53b9022e95600517dcefddd2 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 13 Dec 2019 16:57:59 -0700 Subject: [PATCH 126/203] Import any changes to NCL scripts made in other (older) branches to here and commit them. These files have not been tested and still need to be cleaned up. --- ush/NCL/calc_wrtcmp_grid_params.ncl | 488 ++++++++++++++++++ ...calc_wrtcmp_grid_params_lambert_cnfrml.ncl | 8 +- ...calc_wrtcmp_grid_params_rotated_latlon.ncl | 382 ++++++++++++++ ush/NCL/find_wrtcmp_grid_params.ncl | 294 +++++++++++ ush/NCL/get_wrtcmp_grid.ncl | 335 ++++++++---- .../calc_lambert_cnfrml_coords_from_sphr.ncl | 8 +- .../calc_rotated_sphr_coords_from_sphr.ncl | 104 ++++ .../calc_sphr_coords_from_lambert_cnfrml.ncl | 8 +- .../calc_sphr_coords_from_rotated_sphr.ncl | 105 ++++ ...om_to_sphr_coords_to_from_rotated_sphr.ncl | 162 ++++++ ush/NCL/plot_FV3SAR_field_native.ncl | 173 +++++-- ush/NCL/process_plot_params.ncl | 4 + ush/NCL/read_FV3SAR_grid_native.ncl | 6 +- 13 files changed, 1907 insertions(+), 170 deletions(-) create mode 100644 ush/NCL/calc_wrtcmp_grid_params.ncl create mode 100644 ush/NCL/calc_wrtcmp_grid_params_rotated_latlon.ncl create mode 100644 ush/NCL/find_wrtcmp_grid_params.ncl create mode 100644 ush/NCL/lib/calc_rotated_sphr_coords_from_sphr.ncl create mode 100644 ush/NCL/lib/calc_sphr_coords_from_rotated_sphr.ncl create mode 100644 ush/NCL/lib/convert_from_to_sphr_coords_to_from_rotated_sphr.ncl diff --git a/ush/NCL/calc_wrtcmp_grid_params.ncl b/ush/NCL/calc_wrtcmp_grid_params.ncl new file mode 100644 index 000000000..94682c789 --- /dev/null +++ b/ush/NCL/calc_wrtcmp_grid_params.ncl @@ -0,0 +1,488 @@ +; +; ********************************************************************** +; +; Load files. +; +; ********************************************************************** +; +loadscript(lib_location + "pause.ncl") +loadscript(lib_location + "constants.ncl") +loadscript(lib_location + "strcmp_exact.ncl") +loadscript(lib_location + "strpad.ncl") +loadscript(lib_location + "repeat_str.ncl") +loadscript(lib_location + "convert_from_to_sphr_coords_to_from_rotated_sphr.ncl") +loadscript(lib_location + "convert_sphr_coords_to_lambert_cnfrml.ncl") +loadscript(lib_location + "convert_lambert_cnfrml_coords_to_sphr.ncl") + +undef("calc_wrtcmp_grid_params") + +function calc_wrtcmp_grid_params( \ + wrtcmp_config_fn:string, \ + lon_ctr_native:snumeric, lat_ctr_native:snumeric, \ + lon_tile_corners_face_midpts_native[8]:snumeric, \ + lat_tile_corners_face_midpts_native[8]:snumeric, \ + dx_native:snumeric, dy_native:snumeric, \ + angle_units:string) + +local lon_ctr, lat_ctr, lat1, lat2, \ + x_SW_native, y_SW_native, \ + num_gap_cells, \ + dx, x_SW, x_NE, Lx, Lx_ovr_dx, nx, frac_x, x_increment, \ + dy, y_SW, y_NE, Ly, Ly_ovr_dy, ny, frac_y, y_increment, \ + sphr_coords, lon_SW, lat_SW, \ + param_names, num_params, param_names_and_vals, trailing_comments, \ + np, param_name, param_value, param_value_str, \ + regex_search, regex_print, sed_cmd, \ + fmt_str, msg, \ + str_lens, str_len_max, num_delimit_spaces, delimit_spaces, \ + lines_final, regex_replace, sed_output, \ + out + +begin +; +; ********************************************************************** +; +; If not already defined, define the string (separator_line) that serves +; as a separator line between different sections of printout. +; +; ********************************************************************** +; + if (.not. isvar("separator_line")) then + separator_line := repeat_str("=", 72) + end if +; +; ********************************************************************** +; +; Set the coordinates of the center of the write-component output grid +; to be equal to those of the native grid. Also, set the latitudes de- +; fining the two standard parallels of the Lambert conformal projection +; used by the output grid to the latitude of the output grid center. +; +; ********************************************************************** +; +; lon_ctr = lon_ctr_native +; lat_ctr = lat_ctr_native +; lat1 = lat_ctr +; lat2 = lat_ctr +; +; ********************************************************************** +; +; Calculate the Lambert coordinates of the southwest corner of the na- +; tive grid from its spherical coordinates. +; +; ********************************************************************** +; + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + + lon_ctr = lon_ctr_native + lat_ctr = lat_ctr_native + + rotated_sphr_coords \ + := convert_from_to_sphr_coords_to_from_rotated_sphr( \ + lon_ctr, lat_ctr, angle_units, 1, \ + lon_tile_corners_face_midpts_native, \ + lat_tile_corners_face_midpts_native) + lon_verts = sphr_coords@lon_out + lat_verts = sphr_coords@lat_out + x_tile_corners_face_midpts_native = rotated_sphr_coords@lon_out + y_tile_corners_face_midpts_native = rotated_sphr_coords@lat_out + + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + + lon_ctr = lon_ctr_native + lat_ctr = lat_ctr_native + lat1 = lat_ctr + lat2 = lat_ctr + + lambert_coords \ + := convert_sphr_coords_to_lambert_cnfrml( \ + lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ + lon_tile_corners_face_midpts_native, \ + lat_tile_corners_face_midpts_native) + x_tile_corners_face_midpts_native = lambert_coords@x + y_tile_corners_face_midpts_native = lambert_coords@y + + end if + end if + + i = 0 + x_SW_native = x_tile_corners_face_midpts_native(i) + y_SW_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_S_native = x_tile_corners_face_midpts_native(i) + y_S_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_SE_native = x_tile_corners_face_midpts_native(i) + y_SE_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_E_native = x_tile_corners_face_midpts_native(i) + y_E_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_NE_native = x_tile_corners_face_midpts_native(i) + y_NE_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_N_native = x_tile_corners_face_midpts_native(i) + y_N_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_NW_native = x_tile_corners_face_midpts_native(i) + y_NW_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_W_native = x_tile_corners_face_midpts_native(i) + y_W_native = y_tile_corners_face_midpts_native(i) +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + dx = dx_native + dy = dy_native +; num_margin_cells = 1 + num_margin_cells = 5 +; num_margin_cells = 100 +; +; ********************************************************************** +; +; Reduce the extent of the write-component grid in both the positive and +; negative x directions until the latitude of the center of the west +; face of the write-component grid is greater than that of the native +; grid, and the latitude of the center of the east face of the write- +; component grid is less than that of the native grid (i.e. the write- +; component grid lies within the native grid in the x direction). Then, +; as an extra safety measure, reduce each of these extents by a further +; nc_reduce_extra_max cells of size dx. +; +; ********************************************************************** +; + x_W_native_max = max((/x_SW_native, x_W_native, x_NW_native/)) + x_E_native_min = min((/x_SE_native, x_E_native, x_NE_native/)) + + x_W = x_W_native_max + num_margin_cells*dx + x_E = x_E_native_min - num_margin_cells*dx + + Lx = x_E - x_W + Lx_ovr_dx = Lx/dx + nx = tointeger(Lx_ovr_dx) + frac_x = Lx_ovr_dx - nx + x_adj = (0.5d+0*frac_x)*dx + x_W = x_W + x_adj + x_E = x_E - x_adj +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + y_S_native_max = max((/y_SW_native, y_S_native, y_SE_native/)) + y_N_native_min = min((/y_NW_native, y_N_native, y_NE_native/)) + + y_S = y_S_native_max + num_margin_cells*dy + y_N = y_N_native_min - num_margin_cells*dy + + y_S_tmp = y_S + y_N_tmp = y_N + y_S = -min(abs((/y_S_tmp, y_N_tmp/))) + y_N = -y_S + + Ly = y_N - y_S + Ly_ovr_dy = Ly/dy + ny = tointeger(Ly_ovr_dy) + frac_y = Ly_ovr_dy - ny + y_adj = (0.5d+0*frac_y)*dy + y_S = y_S + y_adj + y_N = y_N - y_adj +; +; ********************************************************************** +; +; Calculate the spherical coordinates of the southwest corner of the na- +; tive grid from its Lambert coordinates. +; +; ********************************************************************** +; + x_W = x_W + 0.5*dx + x_E = x_E - 0.5*dx + + y_S = y_S + 0.5*dy + y_N = y_N - 0.5*dy + + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + sphr_coords \ + := convert_from_to_sphr_coords_to_from_rotated_sphr( \ + lon_ctr, lat_ctr, angle_units, -1, \ + x_W, y_S) + lon_SW = sphr_coords@lon_out + lat_SW = sphr_coords@lat_out + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + sphr_coords \ + := convert_lambert_cnfrml_coords_to_sphr( \ + lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ + x_W, y_S) + lon_SW = sphr_coords@lon + lat_SW = sphr_coords@lat + end if + end if +; +; ********************************************************************** +; +; Create a string array containing the names of the Lambert conformal +; output grid parameters that appear in the NEMS model_configure file. +; +; ********************************************************************** +; + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + + param_names = (/ \ + "output_grid", \ + "cen_lon", \ + "cen_lat", \ + "lon1", \ + "lat1", \ + "lon2", \ + "lat2", \ + "dlon", \ + "dlat" /) + + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + + param_names = (/ \ + "output_grid", \ + "cen_lon", \ + "cen_lat", \ + "stdlat1", \ + "stdlat2", \ + "nx", \ + "ny", \ + "lon1", \ + "lat1", \ + "dx", \ + "dy" /) + + end if + end if +; +; ********************************************************************** +; +; Get the number of Lambert conformal output grid parameters that need +; to be se tin the NEMS model_configure file. Then initialize string +; arrays needed in setting these parameters. +; +; ********************************************************************** +; + num_params = dimsizes(param_names) + param_names_and_vals := new(num_params, "string") + trailing_comments := new(num_params, "string") +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + np = 0 + param_name = param_names(np) +; param_value := char_sq + "lambert_conformal" + char_sq + param_value := char_sq + wrtcmp_coord_sys + char_sq + param_value_str := tostring(param_value) + + regex_search = "^(\s*" + param_name + ":\s+)(<" + param_name + ">)(\s*)(.*)" + + regex_print = "\1" + param_value_str + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + param_names_and_vals(np) = systemfunc(sed_cmd) + + regex_print = "\4" + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + trailing_comments(np) = systemfunc(sed_cmd) +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + do np=1, num_params-1 + + param_name := param_names(np) + unrecognized_param = False + + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + + if (strcmp_exact(param_name, "cen_lon")) then + param_value := lon_ctr + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "cen_lat")) then + param_value := lat_ctr + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lon1")) then + param_value := rot_lon_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lat1")) then + param_value := rot_lat_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lon2")) then + param_value := rot_lon_NE + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lat2")) then + param_value := rot_lat_NE + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "dlon")) then + param_value := dlon + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "dlat")) then + param_value := dlat + fmt_str = "%16.8f" + else + unrecognized_param = True + end if + end if + end if + end if + end if + end if + end if + end if + end if + + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + + if (strcmp_exact(param_name, "cen_lon")) then + param_value := lon_ctr + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "cen_lat")) then + param_value := lat_ctr + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "stdlat1")) then + param_value := lat1 + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "stdlat2")) then + param_value := lat2 + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "nx")) then +; param_value := nxm + param_value := nx + fmt_str = "%10.0f" + else if (strcmp_exact(param_name, "ny")) then +; param_value := nym + param_value := ny + fmt_str = "%10.0f" + else if (strcmp_exact(param_name, "lon1")) then + param_value := lon_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lat1")) then + param_value := lat_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "dx")) then + param_value := dx + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "dy")) then + param_value := dy + fmt_str = "%16.8f" + else + unrecognized_param = True + end if + end if + end if + end if + end if + end if + end if + end if + end if + + end if +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + if (unrecognized_param) then + msg := char_nl + \ +"Unknown parameter name specified for given output_coord_sys:" + char_nl + \ +" output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl + \ +" param_name = " + char_dq + param_name + char_dq + char_nl + \ +"Stopping." + print("" + msg) + exit + end if +; +; Generate a string containing the parameter value and formatted as spe- +; cified by fmt_str. Then strip any leading and trailing whitespace +; from it. +; + param_value_str := sprintf(fmt_str, param_value) + param_value_str := str_strip(param_value_str) +; +; Set the regular expression to search for. +; + regex_search = "^(\s*" + param_name + ":\s+)(<" + param_name + ">)(\s*)(.*)" +; +; Get the parameter name and value without the trailing comment (if any). +; + regex_print = "\1" + param_value_str + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + param_names_and_vals(np) = systemfunc(sed_cmd) +; +; Get the trailing name and comment. +; + regex_print = "\4" + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + trailing_comments(np) = systemfunc(sed_cmd) + + end do +; +; ********************************************************************** +; +; Generate a string array containing each line in the model_configure +; file that specifies a parameter describing the write-component output +; grid. Each such line will contain the parameter name, value, and any +; trailing comments, with the trailing comments aligned for readability. +; +; ********************************************************************** +; + lines_final := strpad(param_names_and_vals, " ", "right") + lines_final := lines_final + " " + trailing_comments +; +; ********************************************************************** +; +; Loop through the set of parameters and find the line in the template +; file where each is set. Then replace that line with the corresponding +; line generated above containing the parameter name, its value, and the +; optional aligned comment. +; +; ********************************************************************** +; + do np=0, num_params-1 + param_name = param_names(np) + regex_search = "^(\s*" + param_name + ":\s+)(<" + param_name + ">)(\s*)(.*)" + regex_replace = lines_final(np) + sed_cmd = "sed -i -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_replace + "|" + char_dq + " " + wrtcmp_config_fn + sed_output = systemfunc(sed_cmd) + end do +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + out = True + return(out) + +end + diff --git a/ush/NCL/calc_wrtcmp_grid_params_lambert_cnfrml.ncl b/ush/NCL/calc_wrtcmp_grid_params_lambert_cnfrml.ncl index ae2d5d026..d747f33d0 100644 --- a/ush/NCL/calc_wrtcmp_grid_params_lambert_cnfrml.ncl +++ b/ush/NCL/calc_wrtcmp_grid_params_lambert_cnfrml.ncl @@ -10,8 +10,8 @@ loadscript(lib_location + "constants.ncl") loadscript(lib_location + "strcmp_exact.ncl") loadscript(lib_location + "strpad.ncl") loadscript(lib_location + "repeat_str.ncl") -loadscript(lib_location + "calc_lambert_cnfrml_coords_from_sphr.ncl") -loadscript(lib_location + "calc_sphr_coords_from_lambert_cnfrml.ncl") +loadscript(lib_location + "convert_sphr_coords_to_lambert_cnfrml.ncl") +loadscript(lib_location + "convert_lambert_cnfrml_coords_to_sphr.ncl") undef("calc_wrtcmp_grid_params_lambert_cnfrml") @@ -72,7 +72,7 @@ begin ; ********************************************************************** ; lambert_coords \ - := calc_lambert_cnfrml_coords_from_sphr( \ + := convert_sphr_coords_to_lambert_cnfrml( \ lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ lon_tile_corners_face_midpts_native, \ lat_tile_corners_face_midpts_native) @@ -190,7 +190,7 @@ begin y_N = y_N - 0.5*dy sphr_coords \ - := calc_sphr_coords_from_lambert_cnfrml( \ + := convert_lambert_cnfrml_coords_to_sphr( \ lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ x_W, y_S) lon_SW = sphr_coords@lon diff --git a/ush/NCL/calc_wrtcmp_grid_params_rotated_latlon.ncl b/ush/NCL/calc_wrtcmp_grid_params_rotated_latlon.ncl new file mode 100644 index 000000000..4038b69ca --- /dev/null +++ b/ush/NCL/calc_wrtcmp_grid_params_rotated_latlon.ncl @@ -0,0 +1,382 @@ +; +; ********************************************************************** +; +; Load files. +; +; ********************************************************************** +; +loadscript(lib_location + "pause.ncl") +loadscript(lib_location + "constants.ncl") +loadscript(lib_location + "strcmp_exact.ncl") +loadscript(lib_location + "strpad.ncl") +loadscript(lib_location + "repeat_str.ncl") +loadscript(lib_location + "calc_rotated_sphr_coords_from_sphr.ncl") +loadscript(lib_location + "calc_sphr_coords_from_rotated_sphr.ncl") + +undef("calc_wrtcmp_grid_params_rotated_latlon") + +function calc_wrtcmp_grid_params_rotated_latlon( \ + wrtcmp_config_fn:string, \ + lon_ctr_native:snumeric, lat_ctr_native:snumeric, \ + lon_tile_corners_face_midpts_native[8]:snumeric, \ + lat_tile_corners_face_midpts_native[8]:snumeric, \ + dx_native:snumeric, dy_native:snumeric, \ + angle_units:string) + +local lon_ctr, lat_ctr, lat1, lat2, \ + x_SW_native, y_SW_native, \ + num_gap_cells, \ + dx, x_SW, x_NE, Lx, Lx_ovr_dx, nx, frac_x, x_increment, \ + dy, y_SW, y_NE, Ly, Ly_ovr_dy, ny, frac_y, y_increment, \ + sphr_coords, lon_SW, lat_SW, \ + param_names, num_params, param_names_and_vals, trailing_comments, \ + np, param_name, param_value, param_value_str, \ + regex_search, regex_print, sed_cmd, \ + fmt_str, msg, \ + str_lens, str_len_max, num_delimit_spaces, delimit_spaces, \ + lines_final, regex_replace, sed_output, \ + out + +begin +; +; ********************************************************************** +; +; If not already defined, define the string (separator_line) that serves +; as a separator line between different sections of printout. +; +; ********************************************************************** +; + if (.not. isvar("separator_line")) then + separator_line := repeat_str("=", 72) + end if +; +; ********************************************************************** +; +; Set the coordinates of the center of the write-component output grid +; to be equal to those of the native grid. Also, set the latitudes de- +; fining the two standard parallels of the Lambert conformal projection +; used by the output grid to the latitude of the output grid center. +; +; ********************************************************************** +; + lon_ctr = lon_ctr_native + lat_ctr = lat_ctr_native + lat1 = lat_ctr + lat2 = lat_ctr +; +; ********************************************************************** +; +; Calculate the Lambert coordinates of the southwest corner of the na- +; tive grid from its spherical coordinates. +; +; ********************************************************************** +; + lambert_coords \ + := calc_rotated_sphr_coords_from_sphr( \ + lon_ctr, lat_ctr, lat1, lat2, angle_units, \ + lon_tile_corners_face_midpts_native, \ + lat_tile_corners_face_midpts_native) + x_tile_corners_face_midpts_native = lambert_coords@x + y_tile_corners_face_midpts_native = lambert_coords@y + + i = 0 + x_SW_native = x_tile_corners_face_midpts_native(i) + y_SW_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_S_native = x_tile_corners_face_midpts_native(i) + y_S_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_SE_native = x_tile_corners_face_midpts_native(i) + y_SE_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_E_native = x_tile_corners_face_midpts_native(i) + y_E_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_NE_native = x_tile_corners_face_midpts_native(i) + y_NE_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_N_native = x_tile_corners_face_midpts_native(i) + y_N_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_NW_native = x_tile_corners_face_midpts_native(i) + y_NW_native = y_tile_corners_face_midpts_native(i) + + i = i + 1 + x_W_native = x_tile_corners_face_midpts_native(i) + y_W_native = y_tile_corners_face_midpts_native(i) +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + dx = dx_native + dy = dy_native +; num_margin_cells = 1 + num_margin_cells = 5 +; num_margin_cells = 100 +; +; ********************************************************************** +; +; Reduce the extent of the write-component grid in both the positive and +; negative x directions until the latitude of the center of the west +; face of the write-component grid is greater than that of the native +; grid, and the latitude of the center of the east face of the write- +; component grid is less than that of the native grid (i.e. the write- +; component grid lies within the native grid in the x direction). Then, +; as an extra safety measure, reduce each of these extents by a further +; nc_reduce_extra_max cells of size dx. +; +; ********************************************************************** +; + x_W_native_max = max((/x_SW_native, x_W_native, x_NW_native/)) + x_E_native_min = min((/x_SE_native, x_E_native, x_NE_native/)) + + x_W = x_W_native_max + num_margin_cells*dx + x_E = x_E_native_min - num_margin_cells*dx + + Lx = x_E - x_W + Lx_ovr_dx = Lx/dx + nx = tointeger(Lx_ovr_dx) + frac_x = Lx_ovr_dx - nx + x_adj = (0.5d+0*frac_x)*dx + x_W = x_W + x_adj + x_E = x_E - x_adj +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + y_S_native_max = max((/y_SW_native, y_S_native, y_SE_native/)) + y_N_native_min = min((/y_NW_native, y_N_native, y_NE_native/)) + + y_S = y_S_native_max + num_margin_cells*dy + y_N = y_N_native_min - num_margin_cells*dy + + y_S_tmp = y_S + y_N_tmp = y_N + y_S = -min(abs((/y_S_tmp, y_N_tmp/))) + y_N = -y_S + + Ly = y_N - y_S + Ly_ovr_dy = Ly/dy + ny = tointeger(Ly_ovr_dy) + frac_y = Ly_ovr_dy - ny + y_adj = (0.5d+0*frac_y)*dy + y_S = y_S + y_adj + y_N = y_N - y_adj +; +; ********************************************************************** +; +; Calculate the spherical coordinates of the southwest corner of the na- +; tive grid from its Lambert coordinates. +; +; ********************************************************************** +; + x_W = x_W + 0.5*dx + x_E = x_E - 0.5*dx + + y_S = y_S + 0.5*dy + y_N = y_N - 0.5*dy + + sphr_coords \ + := calc_sphr_coords_from_rotated_sphr( \ + lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ + x_W, y_S) + lon_SW = sphr_coords@lon + lat_SW = sphr_coords@lat +; +; ********************************************************************** +; +; Create a string array containing the names of the Lambert conformal +; output grid parameters that appear in the NEMS model_configure file. +; +; ********************************************************************** +; + param_names = (/ \ + "output_grid", \ + "cen_lon", \ + "cen_lat", \ + "lon1", \ + "lat1", \ + "lon2", \ + "lat2", \ + "dlon", \ + "dlat" /) +; +; ********************************************************************** +; +; Get the number of Lambert conformal output grid parameters that need +; to be se tin the NEMS model_configure file. Then initialize string +; arrays needed in setting these parameters. +; +; ********************************************************************** +; + num_params = dimsizes(param_names) + param_names_and_vals := new(num_params, "string") + trailing_comments := new(num_params, "string") +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + np = 0 + param_name = param_names(np) + param_value := char_sq + "rotated_latlon" + char_sq + param_value_str := tostring(param_value) + + regex_search = "^(\s*" + param_name + ":\s+)(<" + param_name + ">)(\s*)(.*)" + + regex_print = "\1" + param_value_str + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + param_names_and_vals(np) = systemfunc(sed_cmd) + + regex_print = "\4" + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + trailing_comments(np) = systemfunc(sed_cmd) +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + do np=1, num_params-1 + + param_name := param_names(np) + + if (strcmp_exact(param_name, "cen_lon")) then + param_value := lon_ctr + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "cen_lat")) then + param_value := lat_ctr + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lon1")) then + param_value := lon_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lat1")) then + param_value := lat_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lon2")) then + param_value := lon_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "lat2")) then + param_value := lat_SW + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "dlon")) then + param_value := dx + fmt_str = "%16.8f" + else if (strcmp_exact(param_name, "dlat")) then + param_value := dy + fmt_str = "%16.8f" + else + + msg := char_nl + \ +"Unknown parameter name specified for given output_coord_sys:" + char_nl + \ +" output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl + \ +" param_name = " + char_dq + param_name + char_dq + char_nl + \ +"Stopping." + print("" + msg) + exit + + end if + end if + end if + end if + end if + end if + end if + end if +; +; Generate a string containing the parameter value and formatted as spe- +; cified by fmt_str. Then strip any leading and trailing whitespace +; from it. +; + param_value_str := sprintf(fmt_str, param_value) + param_value_str := str_strip(param_value_str) +; +; Set the regular expression to search for. +; + regex_search = "^(\s*" + param_name + ":\s+)(<" + param_name + ">)(\s*)(.*)" +; +; Get the parameter name and value without the trailing comment (if any). +; + regex_print = "\1" + param_value_str + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + param_names_and_vals(np) = systemfunc(sed_cmd) +; +; Get the trailing name and comment. +; + regex_print = "\4" + sed_cmd = "sed -n -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn + trailing_comments(np) = systemfunc(sed_cmd) + + end do +; +; ********************************************************************** +; +; Generate a string array containing each line in the model_configure +; file that specifies a parameter describing the write-component output +; grid. Each such line will contain the parameter name, value, and an +; trailing comment, with the trailing comments aligned for readability. +; +; ********************************************************************** +; + lines_final := strpad(param_names_and_vals, " ", "right") + lines_final := lines_final + " " + trailing_comments +; +; ********************************************************************** +; +; Print out the write-component parameter values calculated above. +; +; ********************************************************************** +; + msg := char_nl + \ +"Write-component parameters corresponding to this native grid are:" + char_nl + print("" + msg) + print("" + lines_final) +; +; ********************************************************************** +; +; Loop through the set of parameters and find the line in the template +; file where each is set. Then replace that line with the corresponding +; line generated above containing the parameter name, its value, and the +; optional aligned comment. +; +; ********************************************************************** +; + do np=0, num_params-1 + param_name = param_names(np) + regex_search = "^(\s*" + param_name + ":\s+)(<" + param_name + ">)(\s*)(.*)" + regex_replace = lines_final(np) + sed_cmd = "sed -i -r -e " + char_dq + "s|" + regex_search + "|" \ + + regex_replace + "|" + char_dq + " " + wrtcmp_config_fn + sed_output = systemfunc(sed_cmd) + end do +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + out = True + return(out) + +end + diff --git a/ush/NCL/find_wrtcmp_grid_params.ncl b/ush/NCL/find_wrtcmp_grid_params.ncl new file mode 100644 index 000000000..149b12f89 --- /dev/null +++ b/ush/NCL/find_wrtcmp_grid_params.ncl @@ -0,0 +1,294 @@ +; +; ********************************************************************** +; +; Declare global variables before loading files. This has the same ef- +; fect as declaring these variables on the command line. +; +; ********************************************************************** +; +;help = True + +expt_dir = "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/test_latest_20191002/expt_dirs/test_sheleg_GSD_HRRR3km_01" + +; +; ********************************************************************** +; +; Load external files. +; +; ********************************************************************** +; +lib_location = "lib/" + +loadscript(lib_location + "special_chars.ncl") +loadscript(lib_location + "pause.ncl") +loadscript(lib_location + "strcmp_exact.ncl") + +load "calc_wrtcmp_grid_params_rotated_latlon.ncl" +load "calc_wrtcmp_grid_params_lambert_cnfrml.ncl" +load "read_FV3SAR_grid_native.ncl" + +begin +; +; ********************************************************************** +; +; Set the name of the current script or function. We have to do this +; manually because NCL does not seem to have a built-in method of ob- +; taining this information. +; +; ********************************************************************** +; + curnt_script_proc_func_name := "find_wrtcmp_grid_params(...)" +; +; ********************************************************************** +; +; Set usage message. +; +; ********************************************************************** +; + usage_msg = \ +" ncl -n find_wrtcmp_grid_params.ncl \" + char_nl + \ +" need to fill in the rest of this message" +; +; ********************************************************************** +; +; Set help message. The help message contains the documentation for +; this script and thus should reflect any changes to the code. +; +; ********************************************************************** +; + help_msg = char_nl + \ +"Need to fill in this help message." +; +; ********************************************************************** +; +; If the variable "help" is specified on the command line and is set to +; True, print out the help message and exit. +; +; ********************************************************************** +; + if (isvar("help")) then + if (help .eq. True) then + print("" + help_msg) + exit + end if + else + help = False + end if +; +; ********************************************************************** +; +; Set the full path to the varible defintions file for this experiment. +; Then read various parameters from it. +; +; ********************************************************************** +; + var_defns_fp = expt_dir + "/var_defns.sh" + + param_names = (/ \ +"WRTCMP_PARAMS_TEMPLATE_FP", \ +"WRTCMP_output_grid", \ +"gtype", \ +"CRES", \ +"delx", \ +"dely", \ +"nh4_T7" /) + + num_params = dimsizes(param_names) + do np=0, num_params-1 + + param_name = param_names(np) + regex_search = "^\s*" + param_name + "=(" + char_dq + "([^" \ + + char_dq + "]+)" + char_dq + "|([^ " + char_dq + "]+))(.*)$" + regex_print = "\2\3" + sed_cmd = "sed --regexp-extended --silent --expression " + char_sq \ + + "s/" + regex_search + "/" + regex_print + "/p" + char_sq \ + + " " + var_defns_fp + sed_output = systemfunc(sed_cmd) +; +; Convert the output from the sed command (which will be a string) to +; the appropriate NCL data type. +; + if (strcmp_exact(param_name, "WRTCMP_PARAMS_TEMPLATE_FP")) then + WRTCMP_PARAMS_TEMPLATE_FP = tostring(sed_output) + else if (strcmp_exact(param_name, "WRTCMP_output_grid")) then + WRTCMP_output_grid = tostring(sed_output) + else if (strcmp_exact(param_name, "gtype")) then + gtype = tostring(sed_output) + else if (strcmp_exact(param_name, "CRES")) then + CRES = tostring(sed_output) + else if (strcmp_exact(param_name, "delx")) then + dx_native = todouble(sed_output) + else if (strcmp_exact(param_name, "dely")) then + dy_native = todouble(sed_output) + else if (strcmp_exact(param_name, "nh4_T7")) then + nhalo_T7 = tointeger(sed_output) + else + + msg := char_nl + \ +"ERROR: " + curnt_script_proc_func_name + ":" + char_nl + \ +"The data type to convert the current variable defintions file parameter" + char_nl + \ +"to has not been spedified:" + char_nl + \ +" param_name = " + char_dq + param_name + char_dq + char_nl + \ +"Stopping." + char_nl + print("" + msg) + exit + + end if + end if + end if + end if + end if + end if + end if + + end do +; +; ********************************************************************** +; +; Check that gtype has the proper value. +; +; ********************************************************************** +; + if (strcmp_exact(gtype, "regional")) then + + inds_tiles_to_plot = (/ 7 /) + + else + + msg := char_nl + \ +"ERROR: " + curnt_script_proc_func_name + ":" + char_nl + \ +"This script is designed to handle only regional grids. Thus, gtype" + char_nl + \ +"may only be set to " + char_dq + "regional" + char_dq + ":" + char_nl + \ +" gtype = " + char_dq + gtype + char_dq + char_nl + \ +"Stopping." + char_nl + + print("" + msg) + exit + + end if +; +; ********************************************************************** +; +; Read in the native FV3SAR grid. +; +; ********************************************************************** +; + remove_rgnl_halo = True + get_tile_bdies = False + + grid_info := read_FV3SAR_grid_native( \ + expt_dir, \ + gtype, \ + CRES, \ + inds_tiles_to_plot, \ + get_tile_bdies, \ + nhalo_T7, \ + remove_rgnl_halo) + + lon_tile_cntr_tiles_to_plot = grid_info@lon_tile_cntr_all_tiles + lat_tile_cntr_tiles_to_plot = grid_info@lat_tile_cntr_all_tiles + + lon_tile_corners_face_midpts_tiles_to_plot \ + = grid_info@lon_tile_corners_face_midpts_all_tiles + lat_tile_corners_face_midpts_tiles_to_plot \ + = grid_info@lat_tile_corners_face_midpts_all_tiles +; +; ********************************************************************** +; +; Set the full path to the file that will contain a copy of the write- +; component template file with placeholders replaced with actual values. +; Then copy the template file to that file. +; +; ********************************************************************** +; + wrtcmp_config_fp = expt_dir + "/aaaa" + sys_output = systemfunc("cp " + WRTCMP_PARAMS_TEMPLATE_FP + \ + " " + wrtcmp_config_fp) +; +; ********************************************************************** +; +; Extract and save into new, appropriately dimensioned variables the +; output from the grid-read operation above. Then call the function +; that calculates the write-component grid parameters for a lambert- +; conformal grid that is guaranteed to lie completely inside the native +; FV3SAR grid. +; +; ********************************************************************** +; + nn = 0 + + lon_grid_cntr_native = lon_tile_cntr_tiles_to_plot(nn) + lat_grid_cntr_native = lat_tile_cntr_tiles_to_plot(nn) + + lon_tile_corners_face_midpts_native \ + := lon_tile_corners_face_midpts_tiles_to_plot(nn,:) + lat_tile_corners_face_midpts_native \ + := lat_tile_corners_face_midpts_tiles_to_plot(nn,:) + + angle_units = "deg" + + valid_vals_WRTCMP_output_grid \ + := (/ "rotated_latlon", "lambert_conformal" /) +;WRTCMP_output_grid = "rotated_latlon" + + if (strcmp_exact(valid_vals_WRTCMP_output_grid, WRTCMP_output_grid)) then + + if (strcmp_exact(WRTCMP_output_grid, "rotated_latlon")) then + + out := calc_wrtcmp_grid_params_rotated_latlon( \ + wrtcmp_config_fp, \ + lon_grid_cntr_native, lat_grid_cntr_native, \ + lon_tile_corners_face_midpts_native, \ + lat_tile_corners_face_midpts_native, \ + dx_native, dy_native, \ + angle_units) + + else if (strcmp_exact(WRTCMP_output_grid, "lambert_conformal")) then + + out := calc_wrtcmp_grid_params_lambert_cnfrml( \ + wrtcmp_config_fp, \ + lon_grid_cntr_native, lat_grid_cntr_native, \ + lon_tile_corners_face_midpts_native, \ + lat_tile_corners_face_midpts_native, \ + dx_native, dy_native, \ + angle_units) + +; else if (strcmp_exact(WRTCMP_output_grid, "rotated_latlon")) then +; +; msg := char_nl + \ +;"ERROR: " + curnt_script_proc_func_name + ":" + char_nl + \ +;"Function to calculate write-component output grid parameters for an " + char_nl + \ +;"output grid of type " + char_dq + "rotated_latlon" + char_dq + " has " + \ +;"not yet been written:" + char_nl + \ +;" WRTCMP_output_grid = " + char_dq + WRTCMP_output_grid + char_dq + char_nl + \ +;"Stopping." + char_nl +; print("" + msg) +; exit + + end if + end if + + else + + valid_vals_str := tostring(valid_vals_WRTCMP_output_grid) + valid_vals_str := str_join(valid_vals_str, char_dq + ", " + char_dq) + valid_vals_str := "(/ " + char_dq + valid_vals_str + char_dq + " /)" + + msg := char_nl + \ +"ERROR: " + curnt_script_proc_func_name + ":" + char_nl + \ +"Specified type of write-component output grid is not currently support-" + char_nl + \ +"ed:" + char_nl + \ +" WRTCMP_output_grid = " + char_dq + WRTCMP_output_grid + char_dq + char_nl + \ +"Currently, this script can generate write-component parameters only for" + char_nl + \ +"output grids of the following types:" + char_nl + \ +" valid_vals_WRTCMP_output_grid = " + valid_vals_str + char_nl + \ +"Stopping." + char_nl + print("" + msg) + exit + + end if + + print("") + print("Done calculating write-component grid parameters.") + +end diff --git a/ush/NCL/get_wrtcmp_grid.ncl b/ush/NCL/get_wrtcmp_grid.ncl index 48dc251a8..e24e333b5 100644 --- a/ush/NCL/get_wrtcmp_grid.ncl +++ b/ush/NCL/get_wrtcmp_grid.ncl @@ -7,8 +7,9 @@ ; loadscript(lib_location + "pause.ncl") loadscript(lib_location + "constants.ncl") -loadscript(lib_location + "calc_lambert_cnfrml_coords_from_sphr.ncl") -loadscript(lib_location + "calc_sphr_coords_from_lambert_cnfrml.ncl") +loadscript(lib_location + "convert_from_to_sphr_coords_to_from_rotated_sphr.ncl") +loadscript(lib_location + "convert_sphr_coords_to_lambert_cnfrml.ncl") +loadscript(lib_location + "convert_lambert_cnfrml_coords_to_sphr.ncl") undef("get_wrtcmp_grid") @@ -18,7 +19,7 @@ function get_wrtcmp_grid( \ local proj_params, \ var_name, regex_search, regex_print, sed_cmd, \ - output_coord_sys, valid_vals_output_coord_sys, valid_vals, msg, \ + wrtcmp_coord_sys, valid_vals_wrtcmp_coord_sys, valid_vals, msg, \ param_names, coord_data_type, num_params, param_name, \ lon_ctr_rad, lat_ctr_rad, lat1_rad, lat2_rad, \ nxm, nyp, lon_cell_cntr_SW, lat_cell_cntr_SW, dx, dy, \ @@ -48,7 +49,8 @@ begin ; ; ********************************************************************** ; -; Get the coordinate system used by the write-component output grid. +; Get the coordinate system in which the write-component output grid is +; specified. ; ; ********************************************************************** ; @@ -57,7 +59,7 @@ begin regex_print = "\1" sed_cmd = "sed -r -n -e " + char_dq + "s|" + regex_search + "|" \ + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn - output_coord_sys = systemfunc(sed_cmd) + wrtcmp_coord_sys = systemfunc(sed_cmd) ; ; ********************************************************************** ; @@ -65,19 +67,19 @@ begin ; ; ********************************************************************** ; - valid_vals_output_coord_sys = (/ "rotated_latlon", "lambert_conformal" /) + valid_vals_wrtcmp_coord_sys = (/ "rotated_latlon", "lambert_conformal" /) - if (.not. strcmp_exact(valid_vals_output_coord_sys, output_coord_sys)) then + if (.not. strcmp_exact(valid_vals_wrtcmp_coord_sys, wrtcmp_coord_sys)) then valid_vals \ := char_dq \ - + str_join(valid_vals_output_coord_sys, char_dq + ", " + char_dq) \ + + str_join(valid_vals_wrtcmp_coord_sys, char_dq + ", " + char_dq) \ + char_dq msg := char_nl + \ -"The ouput coordinate system (output_coord_sys) is not set to a valid " + char_nl + \ -"value:" + char_nl + \ -" output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl + \ +"The coordinate system in which the write-component output grid is spe-" + char_nl + \ +"cified (wrtcmp_coord_sys) has not been set to a valid value:" + char_nl + \ +" wrtcmp_coord_sys = " + char_dq + wrtcmp_coord_sys + char_dq + char_nl + \ "Valid values are:" + char_nl + \ " " + valid_vals + char_nl + \ "Please rerun with a valid grid type. Stopping." @@ -93,9 +95,9 @@ begin ; ********************************************************************** ; msg := char_nl + \ -"The ouput coordinate system (output_coord_sys) used by the write-compo-" + char_nl + \ -"nent is:" + char_nl + \ -" output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl +"The coordinate system in which the write-component output grid is spe-" + char_nl + \ +"cified (wrtcmp_coord_sys) is:" + char_nl + \ +" wrtcmp_coord_sys = " + char_dq + wrtcmp_coord_sys + char_dq + char_nl print("" + msg) ; ; ********************************************************************** @@ -106,7 +108,19 @@ begin ; ; ********************************************************************** ; - if (strcmp_exact(output_coord_sys, "lambert_conformal")) then + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + + param_names = (/ \ + "cen_lon", \ + "cen_lat", \ + "lon1", \ + "lat1", \ + "lon2", \ + "lat2", \ + "dlon", \ + "dlat" /) + + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then param_names = (/ \ "cen_lon", \ @@ -123,19 +137,21 @@ begin else msg := char_nl + \ -"param_names has not been set for this output coordinate system:" + char_nl + \ -" output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl + \ +"param_names has not been set for this value of the write-component out-" + char_nl + \ +"put grid coordinate system (wrtcmp_coord_sys):" + char_nl + \ +" wrtcmp_coord_sys = " + char_dq + wrtcmp_coord_sys + char_dq + char_nl + \ "Stopping." print("" + msg) exit end if + end if ; ; ********************************************************************** ; ; Set the data type (i.e. float or double) of the coordinate arrays for ; the write-component output grid to be "double". Note that here, we -; constructing this grid from the "grid" parameters, so we can choose +; are constructing this grid from the "grid" parameters, so we can choose ; this data type to be whatever we like (i.e. "float" or "double"). ; ; ********************************************************************** @@ -151,6 +167,11 @@ begin ; num_params = dimsizes(param_names) + msg = char_nl + \ +"Reading in write-component output grid coordinate system parameters " + char_nl + \ +"from file wrtcmp_config_fn:" + char_nl + \ +" wrtcmp_config_fn = " + char_dq + wrtcmp_config_fn + char_dq + do np=0, num_params-1 param_name = param_names(np) @@ -160,89 +181,173 @@ begin + regex_print + "|p" + char_dq + " " + wrtcmp_config_fn sed_output = systemfunc(sed_cmd) - if (strcmp_exact(param_name, "cen_lon")) then - lon_ctr := totype(sed_output, coord_data_type) - else if (strcmp_exact(param_name, "cen_lat")) then - lat_ctr := totype(sed_output, coord_data_type) - else if (strcmp_exact(param_name, "stdlat1")) then - lat1 := totype(sed_output, coord_data_type) - else if (strcmp_exact(param_name, "stdlat2")) then - lat2 := totype(sed_output, coord_data_type) - else if (strcmp_exact(param_name, "nx")) then -; nxp := totype(sed_output, "integer") - nx := totype(sed_output, "integer") - else if (strcmp_exact(param_name, "ny")) then -; nyp := totype(sed_output, "integer") - ny := totype(sed_output, "integer") - else if (strcmp_exact(param_name, "lon1")) then - lon_cell_cntr_SW := totype(sed_output, coord_data_type) - else if (strcmp_exact(param_name, "lat1")) then - lat_cell_cntr_SW := totype(sed_output, coord_data_type) - else if (strcmp_exact(param_name, "dx")) then - dx := totype(sed_output, coord_data_type) - else if (strcmp_exact(param_name, "dy")) then - dy := totype(sed_output, coord_data_type) + unrecognized_param = False + + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + + if (strcmp_exact(param_name, "cen_lon")) then + lon_ctr := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "cen_lat")) then + lat_ctr := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "lon1")) then + rot_lon_cell_cntr_SW := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "lat1")) then + rot_lat_cell_cntr_SW := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "lon2")) then + rot_lon_cell_cntr_NE := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "lat2")) then + rot_lat_cell_cntr_NE := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "dlon")) then + dlon := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "dlat")) then + dlat := totype(sed_output, coord_data_type) + else + unrecognized_param = True + end if + end if + end if + end if + end if + end if + end if + end if + + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + + if (strcmp_exact(param_name, "cen_lon")) then + lon_ctr := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "cen_lat")) then + lat_ctr := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "stdlat1")) then + lat1 := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "stdlat2")) then + lat2 := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "nx")) then + nx := totype(sed_output, "integer") + else if (strcmp_exact(param_name, "ny")) then + ny := totype(sed_output, "integer") + else if (strcmp_exact(param_name, "lon1")) then + lon_cell_cntr_SW := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "lat1")) then + lat_cell_cntr_SW := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "dx")) then + dx := totype(sed_output, coord_data_type) + else if (strcmp_exact(param_name, "dy")) then + dy := totype(sed_output, coord_data_type) + else + unrecognized_param = True + end if + end if + end if + end if + end if + end if + end if + end if + end if + end if + else msg := char_nl + \ -"Unknown parameter name specified for given output_coord_sys:" + char_nl + \ -" output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl + \ -" param_name = " + char_dq + param_name + char_dq + char_nl + \ +"Grid parameters have not yet been specified for this value of the " + char_nl + \ +"write-component output grid coordinate system (wrtcmp_coord_sys):" + char_nl + \ +" wrtcmp_coord_sys = " + char_dq + wrtcmp_coord_sys + char_dq + char_nl + \ "Stopping." print("" + msg) exit end if end if - end if - end if - end if - end if - end if - end if - end if - end if - - end do ; ; ********************************************************************** ; -; Print out values of parameters read in from the write-component con- -; figuration file. +; ; ; ********************************************************************** ; - if (strcmp_exact(output_coord_sys, "lambert_conformal")) then + if (unrecognized_param) then - print("") - print("" + separator_line) + msg := char_nl + \ +"Unknown parameter name specified for the given write-component output" + char_nl + \ +"grid coordinate system (wrtcmp_coord_sys):" + char_nl + \ +" wrtcmp_coord_sys = " + char_dq + wrtcmp_coord_sys + char_dq + char_nl + \ +" param_name = " + char_dq + param_name + char_dq + char_nl + \ +"Stopping." + print("" + msg) + exit - msg = char_nl + \ -"Values of write-component output grid coordinate system parameters read" + char_nl + \ -"in from the model_configure file are:" + char_nl + \ -" lon_ctr = " + lon_ctr + char_nl + \ -" lat_ctr = " + lat_ctr + char_nl + \ -" lat1 = " + lat1 + char_nl + \ -" lat2 = " + lat2 + char_nl + \ -" nx = " + nx + char_nl + \ -" ny = " + ny + char_nl + \ -" lon_cell_cntr_SW = " + lon_cell_cntr_SW + char_nl + \ -" lat_cell_cntr_SW = " + lat_cell_cntr_SW + char_nl + \ -" dx = " + dx + char_nl + \ -" dy = " + dy + char_nl - print("" + msg) + end if + + end do - else - msg := char_nl + \ -"Values of write-component output grid coordinate system parameters have" + \ -"not been set for this output coordinate system:" + char_nl + \ -" output_coord_sys = " + char_dq + output_coord_sys + char_dq + char_nl + \ -"Stopping." - print("" + msg) - exit + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + dx = dlon + dy = dlat + nx = (rot_lon_cell_cntr_NE - rot_lon_cell_cntr_SW)/dlon + 1 + nx := round(nx, 3) + ny = (rot_lat_cell_cntr_NE - rot_lat_cell_cntr_SW)/dlat + 1 + ny := round(ny, 3) +print("") +print("nx = " + nx) +print("ny = " + ny) +pause end if + +; +; ********************************************************************** +; +; Print out values of parameters read in from the write-component con- +; figuration file. +; +; ********************************************************************** +; +; msg = char_nl + \ +;"Values of write-component output grid coordinate system parameters read" + char_nl + \ +;"in from the model_configure file are:" +; +; do np=0, num_params-1 +; param_name = param_names(np) +; param_value = $param_name$ +; msg = char_nl + \ +;" " + param_name + " = " + param_value +; end do + +; if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then +; +; else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then +; +; print("") +; print("" + separator_line) +; +; msg = char_nl + \ +;"Values of write-component output grid coordinate system parameters read" + char_nl + \ +;"in from the model_configure file are:" + char_nl + \ +;" lon_ctr = " + lon_ctr + char_nl + \ +;" lat_ctr = " + lat_ctr + char_nl + \ +;" lat1 = " + lat1 + char_nl + \ +;" lat2 = " + lat2 + char_nl + \ +;" nx = " + nx + char_nl + \ +;" ny = " + ny + char_nl + \ +;" lon_cell_cntr_SW = " + lon_cell_cntr_SW + char_nl + \ +;" lat_cell_cntr_SW = " + lat_cell_cntr_SW + char_nl + \ +;" dx = " + dx + char_nl + \ +;" dy = " + dy + char_nl +; print("" + msg) +; +; else +; +; msg := char_nl + \ +;"Grid parameters have not yet been specified for this value of the " + char_nl + \ +;"write-component output grid coordinate system (wrtcmp_coord_sys):" + char_nl + \ +;" wrtcmp_coord_sys = " + char_dq + wrtcmp_coord_sys + char_dq + char_nl + \ +;"Stopping." +; print("" + msg) +; exit +; +; end if ; ; ********************************************************************** ; @@ -251,25 +356,27 @@ begin ; ********************************************************************** ; angle_units = "deg" - -; nx = nxp - 1 -; ny = nyp - 1 - ; ; ********************************************************************** ; -; Use the given spherical coordinates (lon_cell_cntr_SW, lat_cell_cntr_SW) of the southwest -; corner of the grid to calculate the Lambert conformal coordinates -; (x_cell_cntr_SW, y_cell_cntr_SW) of that corner. +; Use the given spherical coordinates (lon_cell_cntr_SW, lat_cell_cntr_- +; SW) of the southwest corner of the grid to calculate the Lambert con- +; formal coordinates (x_cell_cntr_SW, y_cell_cntr_SW) of that corner. ; ; ********************************************************************** ; - lambert_coords \ - := calc_lambert_cnfrml_coords_from_sphr( \ - lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ - lon_cell_cntr_SW, lat_cell_cntr_SW) - x_cell_cntr_SW = lambert_coords@x - y_cell_cntr_SW = lambert_coords@y + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + x_cell_cntr_SW = rot_lon_cell_cntr_SW + y_cell_cntr_SW = rot_lat_cell_cntr_SW + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + lambert_cnfrml_coords \ + := convert_sphr_coords_to_lambert_cnfrml( \ + lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ + lon_cell_cntr_SW, lat_cell_cntr_SW) + x_cell_cntr_SW = lambert_cnfrml_coords@x + y_cell_cntr_SW = lambert_cnfrml_coords@y + end if + end if x_min = x_cell_cntr_SW - 0.5d+0*dx y_min = y_cell_cntr_SW - 0.5d+0*dy @@ -315,11 +422,22 @@ begin ; ; ********************************************************************** ; - sphr_coords := calc_sphr_coords_from_lambert_cnfrml( \ - lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ - x_verts, y_verts) - lon_verts = sphr_coords@lon - lat_verts = sphr_coords@lat + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + sphr_coords \ + := convert_from_to_sphr_coords_to_from_rotated_sphr( \ + lon_ctr, lat_ctr, angle_units, -1, \ + x_verts, y_verts) + lon_verts = sphr_coords@lon_out + lat_verts = sphr_coords@lat_out + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + sphr_coords \ + := convert_lambert_cnfrml_coords_to_sphr( \ + lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ + x_verts, y_verts) + lon_verts = sphr_coords@lon + lat_verts = sphr_coords@lat + end if + end if ; ; ********************************************************************** ; @@ -349,11 +467,22 @@ begin ; ; ********************************************************************** ; - sphr_coords := calc_sphr_coords_from_lambert_cnfrml( \ - lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ - x_cntrs, y_cntrs) - lon_cntrs = sphr_coords@lon - lat_cntrs = sphr_coords@lat + if (strcmp_exact(wrtcmp_coord_sys, "rotated_latlon")) then + sphr_coords \ + := convert_from_to_sphr_coords_to_from_rotated_sphr( \ + lon_ctr, lat_ctr, angle_units, -1, \ + x_cntrs, y_cntrs) + lon_cntrs = sphr_coords@lon_out + lat_cntrs = sphr_coords@lat_out + else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then + sphr_coords \ + := convert_lambert_cnfrml_coords_to_sphr( \ + lon_ctr, lat_ctr, lat1, lat2, rad_Earth, angle_units, \ + x_cntrs, y_cntrs) + lon_cntrs = sphr_coords@lon + lat_cntrs = sphr_coords@lat + end if + end if ; ; ********************************************************************** ; diff --git a/ush/NCL/lib/calc_lambert_cnfrml_coords_from_sphr.ncl b/ush/NCL/lib/calc_lambert_cnfrml_coords_from_sphr.ncl index c78b88227..572147bd3 100644 --- a/ush/NCL/lib/calc_lambert_cnfrml_coords_from_sphr.ncl +++ b/ush/NCL/lib/calc_lambert_cnfrml_coords_from_sphr.ncl @@ -1,7 +1,7 @@ ; ; ********************************************************************** ; -; File name: calc_lambert_cnfrml_coords_from_sphr.ncl +; File name: convert_sphr_coords_to_lambert_cnfrml.ncl ; Author: Gerard Ketefian ; ; Description: @@ -14,9 +14,9 @@ ; loadscript(lib_location + "calc_lambert_cnfrml_proj_auxil_params.ncl") -undef("calc_lambert_cnfrml_coords_from_sphr") +undef("convert_sphr_coords_to_lambert_cnfrml") -function calc_lambert_cnfrml_coords_from_sphr( \ +function convert_sphr_coords_to_lambert_cnfrml( \ lon_ctr:snumeric, lat_ctr:snumeric, \ lat1:snumeric, lat2:snumeric, radius:snumeric, \ angle_units:string, \ @@ -40,7 +40,7 @@ begin ; ********************************************************************** ; curnt_script_proc_func_name \ - := "function calc_lambert_cnfrml_coords_from_sphr" + := "function convert_sphr_coords_to_lambert_cnfrml" ; ; ********************************************************************** ; diff --git a/ush/NCL/lib/calc_rotated_sphr_coords_from_sphr.ncl b/ush/NCL/lib/calc_rotated_sphr_coords_from_sphr.ncl new file mode 100644 index 000000000..4dd627aaf --- /dev/null +++ b/ush/NCL/lib/calc_rotated_sphr_coords_from_sphr.ncl @@ -0,0 +1,104 @@ +; +; ********************************************************************** +; +; File name: calc_rotated_sphr_coords_from_sphr.ncl +; Author: Gerard Ketefian +; +; Description: +; ^^^^^^^^^^^ +; This function calculates the Lambert conformal coordinates x and y +; corresponding to the specified spherical coordinates lon (longitude) +; and lat (latitude). +; +; ********************************************************************** +; +loadscript(lib_location + "calc_rotated_sphr_proj_auxil_params.ncl") + +undef("calc_rotated_sphr_coords_from_sphr") + +function calc_rotated_sphr_coords_from_sphr( \ + lon_ctr:snumeric, lat_ctr:snumeric, \ + lat1:snumeric, lat2:snumeric, radius:snumeric, \ + angle_units:string, \ + lon:snumeric, lat:snumeric) + +local proj_params, \ + lon_ctr_rad, lat_ctr_rad, lat1_rad, lat2_rad, \ + n, F, rho_ctr, \ + angle_units_short_lwrcase, \ + lon_rad, lat_rad, rho, n_dlon_rad, \ + x, y, coords_out + +begin +; +; ********************************************************************** +; +; Set the name of the current script or function. We have to do this +; manually because NCL does not seem to have a built-in method of ob- +; taining this information. +; +; ********************************************************************** +; + curnt_script_proc_func_name \ + := "function calc_rotated_sphr_coords_from_sphr" +; +; ********************************************************************** +; +; Calculate the auxiliary Lambert conformal map projection parameters +; that are needed in the calculation of the Lambert conformal coordi- +; nates (x,y) below. +; +; ********************************************************************** +; + proj_params := calc_rotated_sphr_proj_auxil_params( \ + lon_ctr, lat_ctr, lat1, lat2, radius, angle_units) + + lon_ctr_rad = proj_params@lon_ctr_rad + lat_ctr_rad = proj_params@lat_ctr_rad + lat1_rad = proj_params@lat1_rad + lat2_rad = proj_params@lat2_rad + n = proj_params@n + F = proj_params@F + rho_ctr = proj_params@rho_ctr +; +; ********************************************************************** +; +; If necessary, convert longitude and latitude from degrees to radians. +; +; ********************************************************************** +; + angle_units_short_lwrcase = str_lower(str_get_cols(angle_units, 0, 2)) + if (strcmp_exact(angle_units_short_lwrcase, "deg")) then + lon_rad = lon*rads_per_deg + lat_rad = lat*rads_per_deg + else + lon_rad = lon + lat_rad = lat + end if +; +; ********************************************************************** +; +; Calculate the Lambert conformal coordinates (x,y) using the projection +; parameters obtained above and the specified spherical coordinates. +; +; ********************************************************************** +; + rho = F/((tan(0.25d+0*pi_geom + 0.5d+0*lat_rad))^n) + n_dlon_rad = n*(lon_rad - lon_ctr_rad) + x = rho*sin(n_dlon_rad) + y = rho_ctr - rho*cos(n_dlon_rad) +; +; ********************************************************************** +; +; Return results as attributes of the logical variable coords_out. +; +; ********************************************************************** +; + coords_out = True + coords_out@x = x + coords_out@y = y + return(coords_out) + +end + + diff --git a/ush/NCL/lib/calc_sphr_coords_from_lambert_cnfrml.ncl b/ush/NCL/lib/calc_sphr_coords_from_lambert_cnfrml.ncl index a0fee334c..6cd0bd5f6 100644 --- a/ush/NCL/lib/calc_sphr_coords_from_lambert_cnfrml.ncl +++ b/ush/NCL/lib/calc_sphr_coords_from_lambert_cnfrml.ncl @@ -1,7 +1,7 @@ ; ; ********************************************************************** ; -; File name: calc_sphr_coords_from_lambert_cnfrml.ncl +; File name: convert_lambert_cnfrml_coords_to_sphr.ncl ; Author: Gerard Ketefian ; ; Description: @@ -14,9 +14,9 @@ ; loadscript(lib_location + "calc_lambert_cnfrml_proj_auxil_params.ncl") -undef("calc_sphr_coords_from_lambert_cnfrml") +undef("convert_lambert_cnfrml_coords_to_sphr") -function calc_sphr_coords_from_lambert_cnfrml( \ +function convert_lambert_cnfrml_coords_to_sphr( \ lon_ctr:snumeric, lat_ctr:snumeric, \ lat1:snumeric, lat2:snumeric, radius:snumeric, \ angle_units:string, \ @@ -39,7 +39,7 @@ begin ; ********************************************************************** ; curnt_script_proc_func_name \ - := "function calc_sphr_coords_from_lambert_cnfrml" + := "function convert_lambert_cnfrml_coords_to_sphr" ; ; ********************************************************************** ; diff --git a/ush/NCL/lib/calc_sphr_coords_from_rotated_sphr.ncl b/ush/NCL/lib/calc_sphr_coords_from_rotated_sphr.ncl new file mode 100644 index 000000000..e01b95622 --- /dev/null +++ b/ush/NCL/lib/calc_sphr_coords_from_rotated_sphr.ncl @@ -0,0 +1,105 @@ +; +; ********************************************************************** +; +; File name: calc_sphr_coords_from_rotated_sphr.ncl +; Author: Gerard Ketefian +; +; Description: +; ^^^^^^^^^^^ +; This function calculates the spherical coordinates (longitude and la- +; titude) corresponding to the specified Lambert conformal coordinates +; x and y. +; +; ********************************************************************** +; +loadscript(lib_location + "calc_rotated_sphr_proj_auxil_params.ncl") + +undef("calc_sphr_coords_from_rotated_sphr") + +function calc_sphr_coords_from_rotated_sphr( \ + lon_ctr:snumeric, lat_ctr:snumeric, \ + lat1:snumeric, lat2:snumeric, radius:snumeric, \ + angle_units:string, \ + x:snumeric, y:snumeric) + +local proj_params, \ + lon_ctr_rad, lat_ctr_rad, lat1_rad, lat2_rad, \ + n, F, rho_ctr, \ + rho_ctr_mns_y, rho, theta, lon_rad, lat_rad, \ + angle_units_short_lwr, lon, lat + +begin +; +; ********************************************************************** +; +; Set the name of the current script or function. We have to do this +; manually because NCL does not seem to have a built-in method of ob- +; taining this information. +; +; ********************************************************************** +; + curnt_script_proc_func_name \ + := "function calc_sphr_coords_from_rotated_sphr" +; +; ********************************************************************** +; +; Calculate the auxiliary Lambert conformal map projection parameters +; thar are needed in the calculation of the spherical coordinates (lon, +; lat) below. +; +; ********************************************************************** +; + proj_params := calc_rotated_sphr_proj_auxil_params( \ + lon_ctr, lat_ctr, lat1, lat2, radius, angle_units) + + lon_ctr_rad = proj_params@lon_ctr_rad + lat_ctr_rad = proj_params@lat_ctr_rad + lat1_rad = proj_params@lat1_rad + lat2_rad = proj_params@lat2_rad + n = proj_params@n + F = proj_params@F + rho_ctr = proj_params@rho_ctr +; +; ********************************************************************** +; +; Calculate the spherical coordinates (lon_rad,lat_rad) using the pro- +; jection parameters obtained above and the specified Lambert conformal +; coordinates. Note that lon_rad and lat_rad are in units of radians. +; +; ********************************************************************** +; + rho_ctr_mns_y = rho_ctr - y + rho = sign_matlab(n)*sqrt(x^2 + rho_ctr_mns_y^2) + theta = atan(x/rho_ctr_mns_y) + lon_rad = lon_ctr_rad + theta/n + lat_rad = 2.0d+0*atan((F/rho)^(1.0d+0/n)) - 0.5d+0*pi_geom +; +; ********************************************************************** +; +; If necessary, convert angles from radians to degrees. +; +; ********************************************************************** +; + angle_units_short_lwr = str_lower(str_get_cols(angle_units, 0, 2)) + if (strcmp_exact(angle_units_short_lwr, "deg")) then + lon = lon_rad*degs_per_rad + lat = lat_rad*degs_per_rad + else + lon = lon_rad + lat = lat_rad + end if +; +; ********************************************************************** +; +; Return results as attributes of the logical variable coords_out. +; +; ********************************************************************** +; + coords_out = True + coords_out@lon = lon + coords_out@lat = lat + return(coords_out) + +end + + diff --git a/ush/NCL/lib/convert_from_to_sphr_coords_to_from_rotated_sphr.ncl b/ush/NCL/lib/convert_from_to_sphr_coords_to_from_rotated_sphr.ncl new file mode 100644 index 000000000..9de7d9cc9 --- /dev/null +++ b/ush/NCL/lib/convert_from_to_sphr_coords_to_from_rotated_sphr.ncl @@ -0,0 +1,162 @@ +; +; ********************************************************************** +; +; File name: convert_from_to_sphr_coords_to_from_rotated_sphr.ncl +; Author: Gerard Ketefian +; +; Description: +; ^^^^^^^^^^^ +; This function transforms the given longitudes and latitudes from sphe- +; rical to rotated spherical coordinates or vice versa. The positive X- +; axis of the rotated spherical coordinate system intersects the sphere +; at the (non-rotated) longitude and latitude (lon0, lat0). +; +; The input argument dir determines the direction of the transformation, +; as follows. If dir is set to 1, this function assumes that the input +; longitudes and latitudes (lon_in, lat_in) are in non-rotated spherical coor- +; dinates and calculates the corresponding output longitudes and lati- +; tudes (lon_out, lat_out) in rotated spherical coordinates. If dir is +; set to -1, this function assumes that (lon_in, lat_in) are specified in ro- +; tated coordinates and calculates the corresponding output coordinates +; (lon_out, lat_out) in non-rotated spherical coordinates. No other +; values of dir are allowed. +; +; ********************************************************************** +; +undef("convert_from_to_sphr_coords_to_from_rotated_sphr") + +function convert_from_to_sphr_coords_to_from_rotated_sphr( \ + lon0:snumeric, lat0:snumeric, \ + angle_units:string, dir:integer, \ + lon_in:snumeric, lat_in:snumeric) + +local lon0_rad, lat0_rad, \ + lon_in_rad, lat_in_rad, \ + lon_in_offset_rad, pmlat0_rad, msg, \ + sin_lon_in_offset, cos_lon_in_offset, \ + sin_pmlat0_rad, cos_pmlat0_rad, \ + sin_lat_in, cos_lat_in, tan_lat_in, \ + numer_atan, denom_atan, \ + lon_out, lat_out, coords_out + +begin +; +; ********************************************************************** +; +; If necessary, convert longitude and latitude from degrees to radians. +; +; ********************************************************************** +; + angle_units_short_lwrcase = str_lower(str_get_cols(angle_units, 0, 2)) + if (strcmp_exact(angle_units_short_lwrcase, "deg")) then + lon0_rad = lon0*rads_per_deg + lat0_rad = lat0*rads_per_deg + lon_in_rad = lon_in*rads_per_deg + lat_in_rad = lat_in*rads_per_deg + else + lon0_rad = lon0 + lat0_rad = lat0 + lon_in_rad = lon_in + lat_in_rad = lat_in + end if +; +; ********************************************************************** +; * +; +; * +; ********************************************************************** +; + if (dir .eq. 1) then + lon_in_offset_rad = lon_in_rad - lon0_rad + pmlat0_rad = lat0_rad + else if (dir .eq. -1) then + lon_in_offset_rad = lon_in_rad + pmlat0_rad = -lat0_rad + else + msg := char_nl + \ +"Disallowed value specified for dir:" + char_nl + \ +" dir = " + dir + char_nl + \ +"Set dir to 1 to transform from spherical to rotated spherical coordi-" + char_nl + \ +"nates, or set dir to -1 to transform from rotated spherical to spheri-" + char_nl + \ +"cal coordinates." + char_nl + \ +"Stopping." + print(msg + "") + exit + end if + end if +; +; ********************************************************************** +; * +; Calculate sines, cosines, and tangents of various angles. +; * +; ********************************************************************** +; + sin_lon_in_offset = sin(lon_in_offset_rad) + cos_lon_in_offset = cos(lon_in_offset_rad) + + sin_pmlat0_rad = sin(pmlat0_rad) + cos_pmlat0_rad = cos(pmlat0_rad) + sin_lat_in = sin(lat_in_rad) + cos_lat_in = cos(lat_in_rad) + tan_lat_in = sin_lat_in/cos_lat_in +; +; ********************************************************************** +; * +; Calculate the transformed longitude lon_out. +; * +; ********************************************************************** +; + numer_atan = sin_lon_in_offset + denom_atan = sin_pmlat0_rad*tan_lat_in + cos_pmlat0_rad*cos_lon_in_offset + lon_out = atan2(numer_atan, denom_atan) + if (dir .eq. -1) then + lon_out = lon_out + lon0_rad + end if +; +; ********************************************************************** +; * +; Make sure that the output value of longitude are in the range -pi <= +; lon < pi. +; * +; ********************************************************************** +; + lon_out := where(lon_out .lt. -pi_geom, lon_out + 2.0d+0*pi_geom, lon_out) + lon_out := where(lon_out .ge. pi_geom, lon_out - 2.0d+0*pi_geom, lon_out) +; +; ********************************************************************** +; * +; Calculate the transformed latitude lat_out. +; * +; ********************************************************************** +; + lat_out = asin(cos_pmlat0_rad*sin_lat_in \ + - sin_pmlat0_rad*cos_lon_in_offset*cos_lat_in) +; +; ********************************************************************** +; +; If necessary, convert output longitude and latitude from radians back +; to degrees. +; +; ********************************************************************** +; + if (strcmp_exact(angle_units_short_lwrcase, "deg")) then + lon_out = lon_out*degs_per_rad + lat_out = lat_out*degs_per_rad + end if +; +; ********************************************************************** +; +; Return results as attributes of the logical variable coords_out. +; +; ********************************************************************** +; + coords_out = True + coords_out@lon_out = lon_out + coords_out@lat_out = lat_out + return(coords_out) + +end + + + + diff --git a/ush/NCL/plot_FV3SAR_field_native.ncl b/ush/NCL/plot_FV3SAR_field_native.ncl index c882baa85..db511b9dd 100644 --- a/ush/NCL/plot_FV3SAR_field_native.ncl +++ b/ush/NCL/plot_FV3SAR_field_native.ncl @@ -494,6 +494,75 @@ end if ; ; ********************************************************************** ; +; Get/construct the write-component grid. +; +; ********************************************************************** +; + +; These should be at the top and go through the process_...() function. + +; show_wrtcmp_grid = True + show_wrtcmp_grid = False + + show_wrtcmp_bdy = True +; show_wrtcmp_bdy = False + + wrtcmp_bdy_color = "green" + + +; rd := "/scratch3/BMC/det/Gerard.Ketefian/UFS_CAM_test_instructions/expt_dirs/NX1800_NY1120_A0p21423_Kmns0p23209_HRRR_test_cycl_slurm_01/2017090700" +; rd := "/scratch3/BMC/det/Gerard.Ketefian" + rd := run_dir + "/2019052000" + + wrtcmp_config_fn := rd + "/model_configure" +; wrtcmp_config_tmpl_fn = wrtcmp_config_fn + ".tmpl" +; +; ********************************************************************** +; +; +; +; ********************************************************************** +; + if (show_wrtcmp_grid .or. show_wrtcmp_bdy) then +print("") +print("wrtcmp_config_fn = " + char_dq + wrtcmp_config_fn + char_dq) +pause + grid_info := get_wrtcmp_grid(wrtcmp_config_fn, show_wrtcmp_bdy) + + nx_wrtcmp = grid_info@nx + ny_wrtcmp = grid_info@ny + lon_cntrs_unstruc_wrtcmp = grid_info@lon_cntrs_unstruc + lat_cntrs_unstruc_wrtcmp = grid_info@lat_cntrs_unstruc + lon_verts_unstruc_wrtcmp = grid_info@lon_verts_unstruc + lat_verts_unstruc_wrtcmp = grid_info@lat_verts_unstruc + lon_bdy_wrtcmp = grid_info@lon_bdy + lat_bdy_wrtcmp = grid_info@lat_bdy + lon_grid_cntr_wrtcmp = grid_info@lon_grid_cntr + lat_grid_cntr_wrtcmp = grid_info@lat_grid_cntr + coord_data_type_wrtcmp = grid_info@coord_data_type + + print("") + print("lon_grid_cntr_wrtcmp = " + lon_grid_cntr_wrtcmp + " deg") + print("lat_grid_cntr_wrtcmp = " + lat_grid_cntr_wrtcmp + " deg") +pause + + + out := calc_wrtcmp_grid_params( \ + wrtcmp_config_fn, \ + lon_ctr_native, lat_ctr_native, \ + lon_tile_corners_face_midpts_native, \ + lat_tile_corners_face_midpts_native, \ + dx_native, dy_native, \ + angle_units) + +print("") +print("out = " + out) +pause + + end if +; +; ********************************************************************** +; ; Read in the FV3SAR grid. ; ; ********************************************************************** @@ -620,59 +689,59 @@ end if print("lat_grid_cntr_RAP = " + lat_grid_cntr_RAP + " deg") end if +;; +;; ********************************************************************** +;; +;; Get/construct the write-component grid. +;; +;; ********************************************************************** +;; +; +;; These should be at the top and go through the process_...() function. +; +;; show_wrtcmp_grid = True +; show_wrtcmp_grid = False +; +; show_wrtcmp_bdy = True +;; show_wrtcmp_bdy = False +; +; wrtcmp_bdy_color = "green" +; +; +;; rd := "/scratch3/BMC/det/Gerard.Ketefian/UFS_CAM_test_instructions/expt_dirs/NX1800_NY1120_A0p21423_Kmns0p23209_HRRR_test_cycl_slurm_01/2017090700" +; rd := "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM" +; +; wrtcmp_config_fn := rd + "/model_configure" +; wrtcmp_config_tmpl_fn = wrtcmp_config_fn + ".tmpl" +;; +;; ********************************************************************** +;; +;; +;; +;; ********************************************************************** +;; +; if (show_wrtcmp_grid .or. show_wrtcmp_bdy) then +; +; grid_info := get_wrtcmp_grid(wrtcmp_config_fn, show_wrtcmp_bdy) +; +; nx_wrtcmp = grid_info@nx +; ny_wrtcmp = grid_info@ny +; lon_cntrs_unstruc_wrtcmp = grid_info@lon_cntrs_unstruc +; lat_cntrs_unstruc_wrtcmp = grid_info@lat_cntrs_unstruc +; lon_verts_unstruc_wrtcmp = grid_info@lon_verts_unstruc +; lat_verts_unstruc_wrtcmp = grid_info@lat_verts_unstruc +; lon_bdy_wrtcmp = grid_info@lon_bdy +; lat_bdy_wrtcmp = grid_info@lat_bdy +; lon_grid_cntr_wrtcmp = grid_info@lon_grid_cntr +; lat_grid_cntr_wrtcmp = grid_info@lat_grid_cntr +; coord_data_type_wrtcmp = grid_info@coord_data_type +; +; print("") +; print("lon_grid_cntr_wrtcmp = " + lon_grid_cntr_wrtcmp + " deg") +; print("lat_grid_cntr_wrtcmp = " + lat_grid_cntr_wrtcmp + " deg") +;pause ; -; ********************************************************************** -; -; Get/construct the write-component grid. -; -; ********************************************************************** -; - -; These should be at the top and go through the process_...() function. - -; show_wrtcmp_grid = True - show_wrtcmp_grid = False - - show_wrtcmp_bdy = True -; show_wrtcmp_bdy = False - - wrtcmp_bdy_color = "green" - - -; rd := "/scratch3/BMC/det/Gerard.Ketefian/UFS_CAM_test_instructions/expt_dirs/NX1800_NY1120_A0p21423_Kmns0p23209_HRRR_test_cycl_slurm_01/2017090700" - rd := "/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM" - - wrtcmp_config_fn := rd + "/model_configure" - wrtcmp_config_tmpl_fn = wrtcmp_config_fn + ".tmpl" -; -; ********************************************************************** -; -; -; -; ********************************************************************** -; - if (show_wrtcmp_grid .or. show_wrtcmp_bdy) then - - grid_info := get_wrtcmp_grid(wrtcmp_config_fn, show_wrtcmp_bdy) - - nx_wrtcmp = grid_info@nx - ny_wrtcmp = grid_info@ny - lon_cntrs_unstruc_wrtcmp = grid_info@lon_cntrs_unstruc - lat_cntrs_unstruc_wrtcmp = grid_info@lat_cntrs_unstruc - lon_verts_unstruc_wrtcmp = grid_info@lon_verts_unstruc - lat_verts_unstruc_wrtcmp = grid_info@lat_verts_unstruc - lon_bdy_wrtcmp = grid_info@lon_bdy - lat_bdy_wrtcmp = grid_info@lat_bdy - lon_grid_cntr_wrtcmp = grid_info@lon_grid_cntr - lat_grid_cntr_wrtcmp = grid_info@lat_grid_cntr - coord_data_type_wrtcmp = grid_info@coord_data_type - - print("") - print("lon_grid_cntr_wrtcmp = " + lon_grid_cntr_wrtcmp + " deg") - print("lat_grid_cntr_wrtcmp = " + lat_grid_cntr_wrtcmp + " deg") -pause - - end if +; end if ; ; ********************************************************************** ; diff --git a/ush/NCL/process_plot_params.ncl b/ush/NCL/process_plot_params.ncl index 6cf7790d9..a038c37de 100644 --- a/ush/NCL/process_plot_params.ncl +++ b/ush/NCL/process_plot_params.ncl @@ -162,6 +162,10 @@ example_usage_and_help_str + "'s/(^gtype=)([" + char_dq + "]*)([A-Za-z]+)([" + char_dq + "]*)(.*)/\3/p' " \ + var_defns_fn gtype := systemfunc(cmd) + +; This is a temporary fix. Need this since we removed "gtype" from the +; variable defintions file of the community workflow. +gtype = "regional" ; ; ********************************************************************** ; diff --git a/ush/NCL/read_FV3SAR_grid_native.ncl b/ush/NCL/read_FV3SAR_grid_native.ncl index bb9b21c85..0905e9e1f 100644 --- a/ush/NCL/read_FV3SAR_grid_native.ncl +++ b/ush/NCL/read_FV3SAR_grid_native.ncl @@ -718,8 +718,8 @@ msg + char_nl + \ ; both poles lie within the tile (i.e. crossing of the IDL by a tile ; boundary is a necessary but not sufficient condition for one or both ; poles to lie within the tile). Below, if the IDL crossing check is -; triggered, we do we do not go further to check whether or not one or -; both poles lie within the tile (because that test is more complex). +; triggered, we do not go further to check whether or not one or both +; poles lie within the tile (because that test is more complex). ; ; ********************************************************************** ; @@ -932,7 +932,7 @@ msg + char_nl + \ ; the north and south poles and the one that straddles the IDL), the in- ; dices of the SW, SE, NE, and NW corners of the tile will not be set ; (they will remain set to their initial missing values), so the check -; below will be triggered. Something more comples needs to be done for +; below will be triggered. Something more complex needs to be done for ; such tiles, but we do not worry about it here for now since we're only ; dealing with regional domains. ; From 3a0c1ce328eb4db5904967942945aa769a3d8be0 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 16 Dec 2019 12:01:38 -0700 Subject: [PATCH 127/203] Change NCL script names (already changed names in the code itself). --- ...mbert_cnfrml.ncl => convert_lambert_cnfrml_coords_to_sphr.ncl} | 0 ...ds_from_sphr.ncl => convert_sphr_coords_to_lambert_cnfrml.ncl} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename ush/NCL/lib/{calc_sphr_coords_from_lambert_cnfrml.ncl => convert_lambert_cnfrml_coords_to_sphr.ncl} (100%) rename ush/NCL/lib/{calc_lambert_cnfrml_coords_from_sphr.ncl => convert_sphr_coords_to_lambert_cnfrml.ncl} (100%) diff --git a/ush/NCL/lib/calc_sphr_coords_from_lambert_cnfrml.ncl b/ush/NCL/lib/convert_lambert_cnfrml_coords_to_sphr.ncl similarity index 100% rename from ush/NCL/lib/calc_sphr_coords_from_lambert_cnfrml.ncl rename to ush/NCL/lib/convert_lambert_cnfrml_coords_to_sphr.ncl diff --git a/ush/NCL/lib/calc_lambert_cnfrml_coords_from_sphr.ncl b/ush/NCL/lib/convert_sphr_coords_to_lambert_cnfrml.ncl similarity index 100% rename from ush/NCL/lib/calc_lambert_cnfrml_coords_from_sphr.ncl rename to ush/NCL/lib/convert_sphr_coords_to_lambert_cnfrml.ncl From a8d81be1de0986f41f133087f541120e19ea3258 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 16 Dec 2019 12:04:00 -0700 Subject: [PATCH 128/203] Bug fixes. --- ush/NCL/calc_wrtcmp_grid_params.ncl | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/ush/NCL/calc_wrtcmp_grid_params.ncl b/ush/NCL/calc_wrtcmp_grid_params.ncl index 94682c789..d24627e0c 100644 --- a/ush/NCL/calc_wrtcmp_grid_params.ncl +++ b/ush/NCL/calc_wrtcmp_grid_params.ncl @@ -352,7 +352,6 @@ begin end if end if end if - end if else if (strcmp_exact(wrtcmp_coord_sys, "lambert_conformal")) then @@ -369,11 +368,9 @@ begin param_value := lat2 fmt_str = "%16.8f" else if (strcmp_exact(param_name, "nx")) then -; param_value := nxm param_value := nx fmt_str = "%10.0f" else if (strcmp_exact(param_name, "ny")) then -; param_value := nym param_value := ny fmt_str = "%10.0f" else if (strcmp_exact(param_name, "lon1")) then @@ -399,8 +396,10 @@ begin end if end if end if + end if end if + end if ; ; ********************************************************************** ; From 7dded2c94c77cee1ceb61ecd8127c1ffa2e6b254 Mon Sep 17 00:00:00 2001 From: jeff beck Date: Mon, 16 Dec 2019 20:22:33 +0000 Subject: [PATCH 129/203] Add regional_latlon write component option and change HAFS pre-defined domains to use it. --- scripts/exregional_run_fcst.sh | 5 +++ ush/get_extrn_mdl_file_dir_info.sh | 3 +- ush/set_predef_grid_params.sh | 58 ++++++++++++++---------------- ush/valid_param_vals.sh | 2 +- 4 files changed, 34 insertions(+), 34 deletions(-) diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 7985079b3..676f55ee0 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -512,6 +512,11 @@ if [ "$QUILTING" = "TRUE" ]; then set_file_param "${model_config_fp}" "ny" "$WRTCMP_ny" set_file_param "${model_config_fp}" "dx" "$WRTCMP_dx" set_file_param "${model_config_fp}" "dy" "$WRTCMP_dy" + elif [ "${WRTCMP_output_grid}" = "regional_latlon" ]; then + set_file_param "${model_config_fp}" "lon2" "$WRTCMP_lon_upr_rght" + set_file_param "${model_config_fp}" "lat2" "$WRTCMP_lat_upr_rght" + set_file_param "${model_config_fp}" "dlon" "$WRTCMP_dlon" + set_file_param "${model_config_fp}" "dlat" "$WRTCMP_dlat" fi fi diff --git a/ush/get_extrn_mdl_file_dir_info.sh b/ush/get_extrn_mdl_file_dir_info.sh index 4ccbfe07d..c3f815ae7 100755 --- a/ush/get_extrn_mdl_file_dir_info.sh +++ b/ush/get_extrn_mdl_file_dir_info.sh @@ -390,7 +390,8 @@ fi elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then - fns=( "gfs.t${hh}z.pgrb2.0p25.anl" ) # Get only 0.25 degree files for now. + # fns=( "gfs.t${hh}z.pgrb2.0p25.anl" ) # Get only 0.25 degree files for now. + fns=( "gfs.t${hh}z.pgrb2.0p25.f000" ) # Get only 0.25 degree files for now. fi ;; diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index adea8645d..875d1f337 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -112,17 +112,15 @@ predefined domain: if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="32" - WRTCMP_output_grid="lambert_conformal" + WRTCMP_output_grid="regional_latlon" WRTCMP_cen_lon="${LON_RGNL_CTR}" - WRTCMP_cen_lat="${LAT_RGNL_CTR}" - WRTCMP_stdlat1="${LAT_RGNL_CTR}" - WRTCMP_stdlat2="${LAT_RGNL_CTR}" - WRTCMP_nx="2937" - WRTCMP_ny="1788" - WRTCMP_lon_lwr_left="-97.83959" - WRTCMP_lat_lwr_left="-5.67929305" - WRTCMP_dx="$DELX" - WRTCMP_dy="$DELY" + WRTCMP_cen_lat="25.0" + WRTCMP_lon_lwr_left="-114.5" + WRTCMP_lat_lwr_left="-5.0" + WRTCMP_lon_upr_rght="-9.5" + WRTCMP_lat_upr_rght="55.0" + WRTCMP_dlon="0.03" + WRTCMP_dlat="0.03" fi fi @@ -165,18 +163,16 @@ predefined domain: if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" - WRTCMP_write_tasks_per_group="12" - WRTCMP_output_grid="lambert_conformal" + WRTCMP_write_tasks_per_group="32" + WRTCMP_output_grid="regional_latlon" WRTCMP_cen_lon="${LON_RGNL_CTR}" - WRTCMP_cen_lat="${LAT_RGNL_CTR}" - WRTCMP_stdlat1="${LAT_RGNL_CTR}" - WRTCMP_stdlat2="${LAT_RGNL_CTR}" - WRTCMP_nx="658" - WRTCMP_ny="412" - WRTCMP_lon_lwr_left="-98.0" - WRTCMP_lat_lwr_left="-5.33" - WRTCMP_dx="$DELX" - WRTCMP_dy="$DELY" + WRTCMP_cen_lat="25.0" + WRTCMP_lon_lwr_left="-114.5" + WRTCMP_lat_lwr_left="-5.0" + WRTCMP_lon_upr_rght="-9.5" + WRTCMP_lat_upr_rght="55.0" + WRTCMP_dlon="0.13" + WRTCMP_dlat="0.13" fi fi @@ -219,18 +215,16 @@ predefined domain: if [ "$QUILTING" = "TRUE" ]; then WRTCMP_write_groups="1" - WRTCMP_write_tasks_per_group="10" - WRTCMP_output_grid="lambert_conformal" + WRTCMP_write_tasks_per_group="32" + WRTCMP_output_grid="regional_latlon" WRTCMP_cen_lon="${LON_RGNL_CTR}" - WRTCMP_cen_lat="${LAT_RGNL_CTR}" - WRTCMP_stdlat1="${LAT_RGNL_CTR}" - WRTCMP_stdlat2="${LAT_RGNL_CTR}" - WRTCMP_nx="337" - WRTCMP_ny="210" - WRTCMP_lon_lwr_left="-98.0" - WRTCMP_lat_lwr_left="-4.5" - WRTCMP_dx="$DELX" - WRTCMP_dy="$DELY" + WRTCMP_cen_lat="25.0" + WRTCMP_lon_lwr_left="-114.5" + WRTCMP_lat_lwr_left="-5.0" + WRTCMP_lon_upr_rght="-9.5" + WRTCMP_lat_upr_rght="55.0" + WRTCMP_dlon="0.25" + WRTCMP_dlat="0.25" fi fi diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index 6ce23983c..c9de85fe4 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -11,7 +11,7 @@ valid_vals_FV3GFS_FILE_FMT=("nemsio" "grib2") valid_vals_GRID_GEN_METHOD=("GFDLgrid" "JPgrid") valid_vals_PREEXISTING_DIR_METHOD=("delete" "rename" "quit") valid_vals_GTYPE=("regional") -valid_vals_WRTCMP_output_grid=("rotated_latlon" "lambert_conformal") +valid_vals_WRTCMP_output_grid=("rotated_latlon" "lambert_conformal" "regional_latlon") valid_vals_RUN_TASK_MAKE_GRID=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_RUN_TASK_MAKE_OROG=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_RUN_TASK_MAKE_SFC_CLIMO=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") From ed48974819a3467e39249ee5d48f1535eb3a6340 Mon Sep 17 00:00:00 2001 From: jeff beck Date: Mon, 16 Dec 2019 22:43:21 +0000 Subject: [PATCH 130/203] Add individual IC and LBC options for FV3GFS file formats --- scripts/exregional_make_ics.sh | 4 ++-- scripts/exregional_make_lbcs.sh | 8 ++++---- ush/config_defaults.sh | 3 ++- ush/valid_param_vals.sh | 3 ++- 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index 97c93f808..0fdfa13ad 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -265,7 +265,7 @@ case "${EXTRN_MDL_NAME_ICS}" in "FV3GFS") - if [ "${FV3GFS_FILE_FMT}" = "nemsio" ]; then + if [ "$FV3GFS_FILE_FMT_ICS" = "nemsio" ]; then external_model="FV3GFS" @@ -299,7 +299,7 @@ case "${EXTRN_MDL_NAME_ICS}" in tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"" fi - elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then + elif [ "$FV3GFS_FILE_FMT_ICS" = "grib2" ]; then external_model="GFS" diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index f985a7ccb..b1d27ed40 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -254,7 +254,7 @@ case "${EXTRN_MDL_NAME_LBCS}" in "FV3GFS") - if [ "${FV3GFS_FILE_FMT}" = "nemsio" ]; then + if [ "$FV3GFS_FILE_FMT_LBCS" = "nemsio" ]; then external_model="FV3GFS" @@ -285,7 +285,7 @@ case "${EXTRN_MDL_NAME_LBCS}" in tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"" fi - elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then + elif [ "$FV3GFS_FILE_FMT_LBCS" = "grib2" ]; then external_model="GFS" @@ -361,9 +361,9 @@ for (( i=0; i<${num_fhrs}; i++ )); do fn_atm_nemsio="${EXTRN_MDL_FNS[$i]}" ;; "FV3GFS") - if [ "${FV3GFS_FILE_FMT}" = "nemsio" ]; then + if [ "$FV3GFS_FILE_FMT_LBCS" = "nemsio" ]; then fn_atm_nemsio="${EXTRN_MDL_FNS[$i]}" - elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then + elif [ "$FV3GFS_FILE_FMT_LBCS" = "grib2" ]; then fn_grib2="${EXTRN_MDL_FNS[$i]}" fi ;; diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index b55af8007..0ab7a80d7 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -247,7 +247,8 @@ FCST_LEN_HRS="24" # EXTRN_MDL_NAME_ICS="FV3GFS" EXTRN_MDL_NAME_LBCS="FV3GFS" -FV3GFS_FILE_FMT="nemsio" +FV3GFS_FILE_FMT_ICS="nemsio" +FV3GFS_FILE_FMT_LBCS="nemsio" LBC_UPDATE_INTVL_HRS="6" # #----------------------------------------------------------------------- diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index c9de85fe4..899637e87 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -7,7 +7,8 @@ valid_vals_CCPP_PHYS_SUITE=("FV3_GFS_2017_gfdlmp" "FV3_GSD_v0" "FV3_GSD_SAR") valid_vals_RES=("48" "96" "192" "384" "768" "1152" "3072") valid_vals_EXTRN_MDL_NAME_ICS=("GSMGFS" "FV3GFS" "RAPX" "HRRRX") valid_vals_EXTRN_MDL_NAME_LBCS=("GSMGFS" "FV3GFS" "RAPX" "HRRRX") -valid_vals_FV3GFS_FILE_FMT=("nemsio" "grib2") +valid_vals_FV3GFS_FILE_FMT_ICS=("nemsio" "grib2") +valid_vals_FV3GFS_FILE_FMT_LBCS=("nemsio" "grib2") valid_vals_GRID_GEN_METHOD=("GFDLgrid" "JPgrid") valid_vals_PREEXISTING_DIR_METHOD=("delete" "rename" "quit") valid_vals_GTYPE=("regional") From cdfee35630709e0081077b5d1248ef3ec2b20750 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 16 Dec 2019 16:17:09 -0700 Subject: [PATCH 131/203] Bug fixes to change the former generic variable FV3GFS_FILE_FMT to either FV3GFS_FILE_FMT_ICS or FV3GFS_FILE_FMT_LBCS. --- scripts/exregional_make_ics.sh | 4 ++-- scripts/exregional_make_lbcs.sh | 8 ++++---- ush/get_extrn_mdl_file_dir_info.sh | 23 ++++++++++++++++------- ush/setup.sh | 24 +++++++++++++++++++----- 4 files changed, 41 insertions(+), 18 deletions(-) diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index 5412b1f84..2be57f8d1 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -265,7 +265,7 @@ case "${EXTRN_MDL_NAME_ICS}" in "FV3GFS") - if [ "$FV3GFS_FILE_FMT_ICS" = "nemsio" ]; then + if [ "${FV3GFS_FILE_FMT_ICS}" = "nemsio" ]; then external_model="FV3GFS" @@ -299,7 +299,7 @@ case "${EXTRN_MDL_NAME_ICS}" in tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"" fi - elif [ "$FV3GFS_FILE_FMT_ICS" = "grib2" ]; then + elif [ "${FV3GFS_FILE_FMT_ICS}" = "grib2" ]; then external_model="GFS" diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index 7bc9e97e0..be812f807 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -254,7 +254,7 @@ case "${EXTRN_MDL_NAME_LBCS}" in "FV3GFS") - if [ "$FV3GFS_FILE_FMT_LBCS" = "nemsio" ]; then + if [ "${FV3GFS_FILE_FMT_LBCS}" = "nemsio" ]; then external_model="FV3GFS" @@ -285,7 +285,7 @@ case "${EXTRN_MDL_NAME_LBCS}" in tracers="\"sphum\",\"liq_wat\",\"o3mr\",\"ice_wat\",\"rainwat\",\"snowwat\",\"graupel\"" fi - elif [ "$FV3GFS_FILE_FMT_LBCS" = "grib2" ]; then + elif [ "${FV3GFS_FILE_FMT_LBCS}" = "grib2" ]; then external_model="GFS" @@ -361,9 +361,9 @@ for (( i=0; i<${num_fhrs}; i++ )); do fn_atm_nemsio="${EXTRN_MDL_FNS[$i]}" ;; "FV3GFS") - if [ "$FV3GFS_FILE_FMT_LBCS" = "nemsio" ]; then + if [ "${FV3GFS_FILE_FMT_LBCS}" = "nemsio" ]; then fn_atm_nemsio="${EXTRN_MDL_FNS[$i]}" - elif [ "$FV3GFS_FILE_FMT_LBCS" = "grib2" ]; then + elif [ "${FV3GFS_FILE_FMT_LBCS}" = "grib2" ]; then fn_grib2="${EXTRN_MDL_FNS[$i]}" fi ;; diff --git a/ush/get_extrn_mdl_file_dir_info.sh b/ush/get_extrn_mdl_file_dir_info.sh index c3f815ae7..54f2b5846 100755 --- a/ush/get_extrn_mdl_file_dir_info.sh +++ b/ush/get_extrn_mdl_file_dir_info.sh @@ -353,6 +353,12 @@ fi # #----------------------------------------------------------------------- # + if [ "${anl_or_fcst}" = "ANL" ]; then + fv3gfs_file_fmt="${FV3GFS_FILE_FMT_ICS}" + elif [ "${anl_or_fcst}" = "FCST" ]; then + fv3gfs_file_fmt="${FV3GFS_FILE_FMT_LBCS}" + fi + case "${anl_or_fcst}" in # #----------------------------------------------------------------------- @@ -379,7 +385,7 @@ fi "FV3GFS") - if [ "${FV3GFS_FILE_FMT}" = "nemsio" ]; then + if [ "${fv3gfs_file_fmt}" = "nemsio" ]; then # fns=( "atm" "sfc" "nst" ) fns=( "atm" "sfc" ) @@ -388,9 +394,12 @@ fi suffix="anl.nemsio" fns=( "${fns[@]/%/$suffix}" ) - elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then + elif [ "${fv3gfs_file_fmt}" = "grib2" ]; then - # fns=( "gfs.t${hh}z.pgrb2.0p25.anl" ) # Get only 0.25 degree files for now. +# GSK 12/16/2019: +# Turns out that the .f000 file contains certain necessary fields that +# are not in the .anl file, so switch to the former. +# fns=( "gfs.t${hh}z.pgrb2.0p25.anl" ) # Get only 0.25 degree files for now. fns=( "gfs.t${hh}z.pgrb2.0p25.f000" ) # Get only 0.25 degree files for now. fi @@ -437,13 +446,13 @@ bination of external model (extrn_mdl_name) and analysis or forecast ;; "FV3GFS") - if [ "${FV3GFS_FILE_FMT}" = "nemsio" ]; then + if [ "${fv3gfs_file_fmt}" = "nemsio" ]; then fcst_hhh=( $( printf "%03d " "${lbc_update_fhrs[@]}" ) ) prefix="gfs.t${hh}z.atmf" fns=( "${fcst_hhh[@]/#/$prefix}" ) suffix=".nemsio" fns=( "${fns[@]/%/$suffix}" ) - elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then + elif [ "${fv3gfs_file_fmt}" = "grib2" ]; then fcst_hhh=( $( printf "%03d " "${lbc_update_fhrs[@]}" ) ) prefix="gfs.t${hh}z.pgrb2.0p25.f" fns=( "${fcst_hhh[@]/#/$prefix}" ) @@ -657,7 +666,7 @@ has not been specified for this external model: ;; "FV3GFS") - if [ "${FV3GFS_FILE_FMT}" = "nemsio" ]; then + if [ "${fv3gfs_file_fmt}" = "nemsio" ]; then if [ "${cdate_FV3SAR}" -le "2019061206" ]; then arcv_dir="/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_C/Q2FY19/prfv3rt3/${cdate_FV3SAR}" @@ -684,7 +693,7 @@ has not been specified for this external model: arcvrel_dir="./gfs.${yyyymmdd}/${hh}" fi - elif [ "${FV3GFS_FILE_FMT}" = "grib2" ]; then + elif [ "${fv3gfs_file_fmt}" = "grib2" ]; then arcv_dir="/NCEPPROD/hpssprod/runhistory/rh${yyyy}/${yyyy}${mm}/${yyyymmdd}" arcv_fns="gpfs_dell1_nco_ops_com_gfs_prod_gfs.${yyyymmdd}_${hh}.gfs_pgrb2" diff --git a/ush/setup.sh b/ush/setup.sh index 252c59636..a6f0c3390 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1241,18 +1241,32 @@ check_var_valid_value \ # #----------------------------------------------------------------------- # -# Make sure FV3GFS_FILE_FMT is set to a valid value. +# Make sure FV3GFS_FILE_FMT_ICS is set to a valid value. # #----------------------------------------------------------------------- # if [ "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ] || \ - [ "${EXTRN_MDL_NAME_LBCS}" = "FV3GFS" ]; then err_msg="\ The file format for FV3GFS external model files specified in FV3GFS_- -FILE_FMT is not supported: - FV3GFS_FILE_FMT = \"${FV3GFS_FILE_FMT}\"" +FILE_FMT_ICS is not supported: + FV3GFS_FILE_FMT_ICS = \"${FV3GFS_FILE_FMT_ICS}\"" check_var_valid_value \ - "FV3GFS_FILE_FMT" "valid_vals_FV3GFS_FILE_FMT" "${err_msg}" + "FV3GFS_FILE_FMT_ICS" "valid_vals_FV3GFS_FILE_FMT_ICS" "${err_msg}" +fi +# +#----------------------------------------------------------------------- +# +# Make sure FV3GFS_FILE_FMT_LBCS is set to a valid value. +# +#----------------------------------------------------------------------- +# +if [ "${EXTRN_MDL_NAME_LBCS}" = "FV3GFS" ]; then + err_msg="\ +The file format for FV3GFS external model files specified in FV3GFS_- +FILE_FMT_LBCS is not supported: + FV3GFS_FILE_FMT_LBCS = \"${FV3GFS_FILE_FMT_LBCS}\"" + check_var_valid_value \ + "FV3GFS_FILE_FMT_LBCS" "valid_vals_FV3GFS_FILE_FMT_LBCS" "${err_msg}" fi # #----------------------------------------------------------------------- From 029b09db57e8d6e234363cac82c5fd0136a51456 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 16 Dec 2019 16:29:37 -0700 Subject: [PATCH 132/203] Bug fix. --- ush/setup.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/setup.sh b/ush/setup.sh index a6f0c3390..31213e8bf 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1245,7 +1245,7 @@ check_var_valid_value \ # #----------------------------------------------------------------------- # -if [ "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ] || \ +if [ "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ]; then err_msg="\ The file format for FV3GFS external model files specified in FV3GFS_- FILE_FMT_ICS is not supported: From 9ef096e4192b2312b389d075b322b9448cffcd50 Mon Sep 17 00:00:00 2001 From: jeff beck Date: Mon, 16 Dec 2019 23:37:11 +0000 Subject: [PATCH 133/203] Add wrtcmp_regional_latlon template file --- ush/templates/wrtcmp_regional_latlon | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 ush/templates/wrtcmp_regional_latlon diff --git a/ush/templates/wrtcmp_regional_latlon b/ush/templates/wrtcmp_regional_latlon new file mode 100644 index 000000000..d7122feac --- /dev/null +++ b/ush/templates/wrtcmp_regional_latlon @@ -0,0 +1,23 @@ + +write_groups: +write_tasks_per_group: +num_files: 2 +filename_base: 'dyn''phy' +output_file: 'netcdf' +write_nemsioflip: .false. +write_fsyncflag: .false. + +output_grid: # Coordinate system of output grid. +cen_lon: # central longitude +cen_lat: # central latitude +lon1: # longitude of lower-left point in non-rotated coordinate system (in degrees) +lat1: # latitude of lower-left +lon2: # longitude of upper-right +lat2: # latitude of upper-right +dlon: +dlat: + +nfhout: 1 +nfhmax_hf: 60 +nfhout_hf: 1 +nsout: -1 From d2cd8ae1527d61d1ce92f784658f4d250c9fc41d Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 19 Dec 2019 13:47:56 -0700 Subject: [PATCH 134/203] Introduce new experiment/workflow variable EMC_GRID_NAME to be able to deal with EMC's grids, which are not predefined grids but are saved in their own "fix" directories. --- ush/config_defaults.sh | 1 + ush/generate_FV3SAR_wflow.sh | 2 +- ush/setup.sh | 15 +++++++++++++++ ush/valid_param_vals.sh | 1 + 4 files changed, 18 insertions(+), 1 deletion(-) diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 0ab7a80d7..1814ebfdc 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -508,6 +508,7 @@ fi #----------------------------------------------------------------------- # PREDEF_GRID_NAME="" +EMC_GRID_NAME="" # #----------------------------------------------------------------------- # diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index d4f106728..0157ca61f 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -128,7 +128,7 @@ FHR="${FHR_STR}" # CDATE_generic="@Y@m@d@H" if [ "${RUN_ENVIR}" = "nco" ]; then - CYCLE_DIR="$STMP/tmpnwprd/${PREDEF_GRID_NAME}_${CDATE_generic}" + CYCLE_DIR="$STMP/tmpnwprd/${EMC_GRID_NAME}_${CDATE_generic}" else CYCLE_DIR="$EXPTDIR/${CDATE_generic}" fi diff --git a/ush/setup.sh b/ush/setup.sh index 31213e8bf..8f894ce42 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -380,6 +380,21 @@ fi # #----------------------------------------------------------------------- # +# If running in NCO mode, a valid EMC grid must be specified. Make sure +# EMC_GRID_NAME is set to a valid value. +# +#----------------------------------------------------------------------- +# +if [ "${RUN_ENVIR}" = "nco" ]; then + err_msg="\ +The EMC grid specified in EMC_GRID_NAME is not supported: + EMC_GRID_NAME = \"${EMC_GRID_NAME}\"" + check_var_valid_value \ + "EMC_GRID_NAME" "valid_vals_EMC_GRID_NAME" "${err_msg}" +fi +# +#----------------------------------------------------------------------- +# # Make sure that PREEXISTING_DIR_METHOD is set to a valid value. # #----------------------------------------------------------------------- diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index 899637e87..a5ddd5ae3 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -2,6 +2,7 @@ valid_vals_RUN_ENVIR=("nco" "community") valid_vals_VERBOSE=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_MACHINE=("WCOSS_C" "WCOSS" "DELL" "THEIA" "HERA" "JET" "ODIN" "CHEYENNE") valid_vals_PREDEF_GRID_NAME=("GSD_HAFSV0.A3km" "GSD_HAFSV0.A13km" "GSD_HAFSV0.A25km" "GSD_RAP13km" "GSD_HRRR3km" "GSD_HRRR13km" "GSD_HRRR25km" "GSD_HRRR_AK_3km" "GSD_HRRR_AK_50km" "EMC_CONUS" "EMC_AK") +valid_vals_EMC_GRID_NAME=("ak" "conus" "conus_orig" "guam" "hi" "pr") valid_vals_USE_CCPP=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_CCPP_PHYS_SUITE=("FV3_GFS_2017_gfdlmp" "FV3_GSD_v0" "FV3_GSD_SAR") valid_vals_RES=("48" "96" "192" "384" "768" "1152" "3072") From 67d7c7ed0f2ca421cb302d55a755e963c7cb1865 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 19 Dec 2019 13:52:48 -0700 Subject: [PATCH 135/203] In generate_FV3SAR_wflow.sh: (1) At the end of the experiment/workflow generation, copy the experiment/workflow configuration file to the experiment directory to have a record of how the experiment/workflow was generated; (2) Change call to the generate_FV3SAR_wflow function such that the output goes to both the screen (stdout) and to a log file (this is done using the "tee" command). In run_experiments.sh, remove writing of output from the call to generate_FV3SAR_wflow.sh to a log file and moving of the configuration and log files to the experiment directory since these are now part of the call to generate_FV3SAR_wflow.sh script. --- tests/run_experiments.sh | 28 +-------- ush/generate_FV3SAR_wflow.sh | 107 ++++++++++++++++++++++++++++++++++- 2 files changed, 106 insertions(+), 29 deletions(-) diff --git a/tests/run_experiments.sh b/tests/run_experiments.sh index ed9f7850e..63abc694f 100755 --- a/tests/run_experiments.sh +++ b/tests/run_experiments.sh @@ -451,35 +451,11 @@ fi # #----------------------------------------------------------------------- # - log_fp="$ushdir/log.generate_FV3SAR_wflow.${expt_name}" - print_info_msg " -Generating experiment with name: - expt_name = \"${expt_name}\" -Log file for generation step is: - log_fp = \"${log_fp}\"" - - $ushdir/generate_FV3SAR_wflow.sh > "${log_fp}" 2>&1 || { \ + $ushdir/generate_FV3SAR_wflow.sh || \ print_err_msg_exit "\ Could not generate an experiment/workflow for the test specified by expt_name: - expt_name = \"${expt_name}\" -The log file from the generation script is in the file specified by -log_fp: - log_fp = \"${log_fp}\""; - } -# -#----------------------------------------------------------------------- -# -# Set the experiment directory to the one that the workflow will create. -# Then, in order to have a record of how the experiment and workflow -# were generated, move the configuration file and experiment/workflow -# generation log file to the experiment directory. -# -#----------------------------------------------------------------------- -# - exptdir=$( readlink -f "$homerrfs/../expt_dirs/${expt_subdir}" ) - mv_vrfy "${expt_config_fp}" "${exptdir}" - mv_vrfy "${log_fp}" "${exptdir}" + expt_name = \"${expt_name}\"" done # diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 0157ca61f..1fda6bc87 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -701,6 +701,16 @@ fi # #----------------------------------------------------------------------- # +# To have a record of how this experiment/workflow was generated, copy +# the experiment/workflow configuration file to the experiment directo- +# ry. +# +#----------------------------------------------------------------------- +# +cp_vrfy $USHDIR/${CUSTOM_CONFIG_FN} $EXPTDIR +# +#----------------------------------------------------------------------- +# # For convenience, print out the commands that need to be issued on the # command line in order to launch the workflow and to check its status. # Also, print out the command that should be placed in the user's cron- @@ -772,15 +782,106 @@ Done. { restore_shell_opts; } > /dev/null 2>&1 } + + + + # #----------------------------------------------------------------------- # -# Call the function defined above. +# Start of the script that will call the experiment/workflow generation +# function defined above. # #----------------------------------------------------------------------- # -generate_FV3SAR_wflow - +set -u +#set -x +# +#----------------------------------------------------------------------- +# +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). +# +#----------------------------------------------------------------------- +# +scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) +scrfunc_fn=$( basename "${scrfunc_fp}" ) +scrfunc_dir=$( dirname "${scrfunc_fp}" ) +# +#----------------------------------------------------------------------- +# +# Set directories. +# +#----------------------------------------------------------------------- +# +ushdir="${scrfunc_dir}" +# +# Set the name of and full path to the temporary file in which we will +# save some experiment/workflow variables. The need for this temporary +# file is explained below. +# +tmp_fn="tmp" +tmp_fp="$ushdir/${tmp_fn}" +rm -f "${tmp_fp}" +# +# Set the name of and full path to the log file in which the output from +# the experiment/workflow generation function will be saved. +# +log_fn="log.generate_FV3SAR_wflow" +log_fp="$ushdir/${log_fn}" +rm -f "${log_fp}" +# +# Call the generate_FV3SAR_wflow function defined above to generate the +# experiment/workflow. Note that we pipe the output of the function +# (and possibly other commands) to the "tee" command in order to be able +# to both save it to a file and print it out to the screen (stdout). +# The piping causes the call to the function (and the other commands +# grouped with it using the curly braces, { ... }) to be executed in a +# subshell. As a result, the experiment/workflow variables that the +# function sets are not available outside of the grouping, i.e. they are +# not available at and after the call to "tee". Since some of these va- +# riables are needed after the call to "tee" below, we save them in a +# temporary file and read them in outside the subshell later below. +# +{ +generate_FV3SAR_wflow 2>&1 +retval=$? +echo "$EXPTDIR" >> "${tmp_fp}" +echo "$retval" >> "${tmp_fp}" +} | tee "${log_fp}" +# +# Read in experiment/workflow variables needed later below from the tem- +# porary file created in the subshell above containing the call to the +# generate_FV3SAR_wflow function. These variables are not directly +# available here because the call to generate_FV3SAR_wflow above takes +# place in a subshell (due to the fact that we are then piping its out- +# put to the "tee" command). Then remove the temporary file. +# +exptdir=$( sed "1q;d" "${tmp_fp}" ) +retval=$( sed "2q;d" "${tmp_fp}" ) +rm "${tmp_fp}" +# +# If the call to the generate_FV3SAR_wflow function above was success- +# ful, move the log file in which the "tee" command saved the output of +# the function to the experiment directory. +# +if [ $retval -eq 0 ]; then + mv "${log_fp}" "$exptdir" +# +# If the call to the generate_FV3SAR_wflow function above was not suc- +# cessful, print out an error message and exit with a nonzero return +# code. +# +else + printf " +Experiment/workflow generation failed. Check the log file from the ex- +periment/workflow generation script in the file specified by log_fp: + log_fp = \"${log_fp}\" +Stopping. +" + exit 1 +fi From bd5321c42279b29d81accfc46738d131b20157a4 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 19 Dec 2019 13:58:54 -0700 Subject: [PATCH 136/203] In generate_FV3SAR_wflow.sh: (1) Remove unneeded "module load ..." commands; (2) Change time stamp used in the name of the backup crontab file to something more human-readable. In config_defaults.sh: remove redundant lines of code. --- ush/config_defaults.sh | 2 -- ush/generate_FV3SAR_wflow.sh | 14 +------------- 2 files changed, 1 insertion(+), 15 deletions(-) diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 1814ebfdc..e23272026 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -628,8 +628,6 @@ WRTCMP_dlat="" # The following are used only for the case of WRTCMP_output_grid set to # "'lambert_conformal'". # -WRTCMP_cen_lon="" -WRTCMP_cen_lat="" WRTCMP_stdlat1="" WRTCMP_stdlat2="" WRTCMP_nx="" diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 1fda6bc87..0d761c4c0 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -57,18 +57,6 @@ ushdir="${scrfunc_dir}" # #----------------------------------------------------------------------- # -# Load modules. -# -#----------------------------------------------------------------------- -# -module purge -# These need to be made machine-dependent. The following work only on -# Hera. -module load intel/19.0.4.243 -module load netcdf/4.7.0 -# -#----------------------------------------------------------------------- -# # Source the setup script. Note that this in turn sources the configu- # ration file/script (config.sh) in the current directory. It also cre- # ates the run and work directories, the INPUT and RESTART subdirecto- @@ -400,7 +388,7 @@ if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then # # Make a backup copy of the user's crontab file and save it in a file. # - time_stamp=$( date "+%Y%m%d%H%M%S" ) + time_stamp=$( date "+%F_%T" ) crontab_backup_fp="$EXPTDIR/crontab.bak.${time_stamp}" print_info_msg " Copying contents of user cron table to backup file: From 37c3e495bcdcbb40cbcdbd655477a3b10a1e4ae2 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 19 Dec 2019 14:09:57 -0700 Subject: [PATCH 137/203] Minor changes for clarity. --- jobs/JREGIONAL_GET_EXTRN_FILES | 6 +++--- ush/config_defaults.sh | 1 + ush/generate_FV3SAR_wflow.sh | 5 +++-- ush/set_predef_grid_params.sh | 10 +++++----- ush/valid_param_vals.sh | 7 ++++++- 5 files changed, 18 insertions(+), 11 deletions(-) diff --git a/jobs/JREGIONAL_GET_EXTRN_FILES b/jobs/JREGIONAL_GET_EXTRN_FILES index 0db028979..26440f352 100755 --- a/jobs/JREGIONAL_GET_EXTRN_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_FILES @@ -198,10 +198,10 @@ EXTRN_MDL_FILES_DIR="${CYCLE_DIR}/${EXTRN_MDL_NAME}/${ICS_OR_LBCS}" # #----------------------------------------------------------------------- # -mkdir_vrfy -p "$EXTRN_MDL_FILES_DIR" -cd_vrfy $EXTRN_MDL_FILES_DIR || print_err_msg_exit "\ +mkdir_vrfy -p "${EXTRN_MDL_FILES_DIR}" +cd_vrfy ${EXTRN_MDL_FILES_DIR} || print_err_msg_exit "\ Could not change directory to EXTRN_MDL_FILES_DIR: - EXTRN_MDL_FILES_DIR = \"$EXTRN_MDL_FILES_DIR\"" + EXTRN_MDL_FILES_DIR = \"${EXTRN_MDL_FILES_DIR}\"" # #----------------------------------------------------------------------- # diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index e23272026..d14340d59 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -611,6 +611,7 @@ PRINT_ESMF=".false." WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="20" + WRTCMP_output_grid="''" WRTCMP_cen_lon="" WRTCMP_cen_lat="" diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 0d761c4c0..106950a1d 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -1,4 +1,5 @@ #!/bin/bash -l + # #----------------------------------------------------------------------- # @@ -457,7 +458,7 @@ Exactly one file must exist in directory FIXsar matching the globbing pattern glob_pattern: FIXsar = \"${FIXsar}\" glob_pattern = \"${glob_pattern}\" - num_files = \"${num_files}\"" + num_files = ${num_files}" fi fn=$( ls -1 ${glob_pattern} ) @@ -755,7 +756,7 @@ For automatic resubmission of the workflow (say every 3 minutes), the following line can be added to the user's crontab (use \"crontab -e\" to edit the cron table): -*/3 * * * * cd $EXPTDIR && $rocotorun_cmd +*/3 * * * * cd $EXPTDIR && ${rocotorun_cmd} Done. " diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index 875d1f337..bfaa4152f 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -705,7 +705,7 @@ predefined domain: # #----------------------------------------------------------------------- # -# 3-km HRRR Alaska grid. +# 50-km HRRR Alaska grid. # #----------------------------------------------------------------------- # @@ -721,14 +721,14 @@ predefined domain: " elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - LON_RGNL_CTR=-163.5 #HRRR-AK is -163.5 - LAT_RGNL_CTR=62.8 #HRRR-AK is 60.8 + LON_RGNL_CTR=-163.5 + LAT_RGNL_CTR=62.8 DELX="50000.0" DELY="50000.0" - NX_T7=74 #HRRR-AK is 1300 - NY_T7=51 #HRRR-AK is 920 + NX_T7=74 + NY_T7=51 NHW_T7=6 diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index a5ddd5ae3..18c9add6e 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -1,7 +1,12 @@ valid_vals_RUN_ENVIR=("nco" "community") valid_vals_VERBOSE=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_MACHINE=("WCOSS_C" "WCOSS" "DELL" "THEIA" "HERA" "JET" "ODIN" "CHEYENNE") -valid_vals_PREDEF_GRID_NAME=("GSD_HAFSV0.A3km" "GSD_HAFSV0.A13km" "GSD_HAFSV0.A25km" "GSD_RAP13km" "GSD_HRRR3km" "GSD_HRRR13km" "GSD_HRRR25km" "GSD_HRRR_AK_3km" "GSD_HRRR_AK_50km" "EMC_CONUS" "EMC_AK") +valid_vals_PREDEF_GRID_NAME=( \ +"EMC_CONUS" "EMC_AK" \ +"GSD_HAFSV0.A3km" "GSD_HAFSV0.A13km" "GSD_HAFSV0.A25km" \ +"GSD_HRRR_AK_3km" "GSD_HRRR_AK_50km" \ +"GSD_HRRR3km" "GSD_HRRR13km" "GSD_HRRR25km" \ +"GSD_RAP13km" ) valid_vals_EMC_GRID_NAME=("ak" "conus" "conus_orig" "guam" "hi" "pr") valid_vals_USE_CCPP=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_CCPP_PHYS_SUITE=("FV3_GFS_2017_gfdlmp" "FV3_GSD_v0" "FV3_GSD_SAR") From 02d1b6b02940b2ebb1570af96921eef0c275d7b5 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 19 Dec 2019 14:10:54 -0700 Subject: [PATCH 138/203] Change NCO sample configuration file to get the workflow to work in "nco" mode on EMC's conus grid (and to use available ICs/LBCs on hera). --- ush/config.nco.sh | 36 +++++++++++++++++++++++++++++++----- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/ush/config.nco.sh b/ush/config.nco.sh index f4b1ae332..ac9ea1598 100644 --- a/ush/config.nco.sh +++ b/ush/config.nco.sh @@ -11,7 +11,7 @@ VERBOSE="TRUE" RUN_ENVIR="nco" PREEXISTING_DIR_METHOD="rename" -PREDEF_GRID_NAME="GSD_HRRR25km" +EMC_GRID_NAME="conus" GRID_GEN_METHOD="JPgrid" QUILTING="TRUE" USE_CCPP="TRUE" @@ -19,9 +19,9 @@ CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" FCST_LEN_HRS="06" LBC_UPDATE_INTVL_HRS="6" -DATE_FIRST_CYCL="20190701" -DATE_LAST_CYCL="20190701" -CYCL_HRS=( "00" ) +DATE_FIRST_CYCL="20190901" +DATE_LAST_CYCL="20190901" +CYCL_HRS=( "18" ) EXTRN_MDL_NAME_ICS="FV3GFS" EXTRN_MDL_NAME_LBCS="FV3GFS" @@ -31,5 +31,31 @@ RUN_TASK_MAKE_OROG="TRUE" RUN_TASK_MAKE_SFC_CLIMO="TRUE" RUN="an_experiment" -COMINgfs="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" +COMINgfs="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" # Path to files from external model (FV3GFS). +STMP="/path/to/temporary/directory/stmp" # Path to temporary directory STMP. + +LAYOUT_X=50 +LAYOUT_Y=50 +BLOCKSIZE=20 + +WRTCMP_write_groups="1" +WRTCMP_write_tasks_per_group="${LAYOUT_Y}" + +WRTCMP_output_grid="lambert_conformal" +WRTCMP_PARAMS_TMPL_FN=${WRTCMP_PARAMS_TMPL_FN:-"wrtcmp_${WRTCMP_output_grid}"} + +WRTCMP_cen_lon="-97.5" +WRTCMP_cen_lat="38.5" +WRTCMP_lon_lwr_left="-122.21414225" +WRTCMP_lat_lwr_left="22.41403305" +# +# The following are used only for the case of WRTCMP_output_grid set to +# "'lambert_conformal'". +# +WRTCMP_stdlat1="${WRTCMP_cen_lat}" +WRTCMP_stdlat2="${WRTCMP_cen_lat}" +WRTCMP_nx="1738" +WRTCMP_ny="974" +WRTCMP_dx="3000.0" +WRTCMP_dy="3000.0" From 260b5ecc1b225d1af52f3b526d08049a0c000535 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 19 Dec 2019 14:17:20 -0700 Subject: [PATCH 139/203] To reduce cluttering the ush directory, for each baseline to test, after making the proper substitutions in the experiment configuration file, rename that configuration file "config.sh" instead of creating a link of this name to the config..sh file. --- tests/run_experiments.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/run_experiments.sh b/tests/run_experiments.sh index 63abc694f..40503cfa0 100755 --- a/tests/run_experiments.sh +++ b/tests/run_experiments.sh @@ -442,7 +442,8 @@ fi # and location of the configuration file to use to generate a new expe- # riment and corresponding workflow. # - ln_vrfy -fs "${expt_config_fp}" "$ushdir/config.sh" +# ln_vrfy -fs "${expt_config_fp}" "$ushdir/config.sh" + mv_vrfy -fs "${expt_config_fp}" "$ushdir/config.sh" # #----------------------------------------------------------------------- # From 820dc6296292e5a435555055258ef4d96bfdced3 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 19 Dec 2019 15:21:51 -0700 Subject: [PATCH 140/203] Bug fix. --- tests/run_experiments.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/run_experiments.sh b/tests/run_experiments.sh index 40503cfa0..95a141c6f 100755 --- a/tests/run_experiments.sh +++ b/tests/run_experiments.sh @@ -443,7 +443,7 @@ fi # riment and corresponding workflow. # # ln_vrfy -fs "${expt_config_fp}" "$ushdir/config.sh" - mv_vrfy -fs "${expt_config_fp}" "$ushdir/config.sh" + mv_vrfy -f "${expt_config_fp}" "$ushdir/config.sh" # #----------------------------------------------------------------------- # From d217480bdf47d975e09a9001baf2a8703f7b3734 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 19 Dec 2019 15:22:38 -0700 Subject: [PATCH 141/203] Update list of baselines to include all baselines defined in the baseline_configs subdirectory. --- tests/baselines_list.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/baselines_list.txt b/tests/baselines_list.txt index 9ad1b5bd0..db0531946 100644 --- a/tests/baselines_list.txt +++ b/tests/baselines_list.txt @@ -1,3 +1,5 @@ regional_001 regional_002 regional_003 +regional_004 +regional_005 From adf4ac8a619e53b80cef240a7be45d0f4ddcbb11 Mon Sep 17 00:00:00 2001 From: jeff beck Date: Fri, 20 Dec 2019 22:03:28 +0000 Subject: [PATCH 142/203] Change default radiation physics update frequency to 20 instead of 60 minutes --- ush/templates/input.nml.FV3_GSD_SAR | 4 ++-- ush/templates/input.nml.FV3_GSD_v0 | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/ush/templates/input.nml.FV3_GSD_SAR b/ush/templates/input.nml.FV3_GSD_SAR index 198c7e5cc..11689577b 100644 --- a/ush/templates/input.nml.FV3_GSD_SAR +++ b/ush/templates/input.nml.FV3_GSD_SAR @@ -152,8 +152,8 @@ ltaerosol = .T. lradar = .T. pdfcld = .false. - fhswr = 3600. - fhlwr = 3600. + fhswr = 1200. + fhlwr = 1200. ialb = 1 iems = 1 iaer = 111 diff --git a/ush/templates/input.nml.FV3_GSD_v0 b/ush/templates/input.nml.FV3_GSD_v0 index a07d1750d..4ba9a2c5c 100644 --- a/ush/templates/input.nml.FV3_GSD_v0 +++ b/ush/templates/input.nml.FV3_GSD_v0 @@ -152,8 +152,8 @@ ltaerosol = .T. lradar = .T. pdfcld = .false. - fhswr = 3600. - fhlwr = 3600. + fhswr = 1200. + fhlwr = 1200. ialb = 1 iems = 1 iaer = 111 From 895347c30dcfa8cf955e5b2ae49c6d0c23e878f5 Mon Sep 17 00:00:00 2001 From: jeff beck Date: Fri, 20 Dec 2019 22:04:32 +0000 Subject: [PATCH 143/203] Initialize WRTCMP FP variable in case quilting is set to false --- ush/setup.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/ush/setup.sh b/ush/setup.sh index 31213e8bf..96811c641 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1505,6 +1505,10 @@ fi # #----------------------------------------------------------------------- # + +#Initialize WRTCMP_PARAMS_TMPL_FP in case QUILTING="FALSE" +WRTCMP_PARAMS_TMPL_FP="" + if [ "$QUILTING" = "TRUE" ]; then if [ -z "${WRTCMP_PARAMS_TMPL_FN}" ]; then From ddfe629f9067d1db92acc4b6f1d29a1efa2700ab Mon Sep 17 00:00:00 2001 From: jeff beck Date: Fri, 20 Dec 2019 22:08:58 +0000 Subject: [PATCH 144/203] Reduce HAFS 3-km domain dt_atmos to 40 seconds --- ush/set_predef_grid_params.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index 875d1f337..97bf5184f 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -103,7 +103,7 @@ predefined domain: NHW_T7=6 - DT_ATMOS="100" + DT_ATMOS="40" LAYOUT_X="32" LAYOUT_Y="24" From d0b301182e6513f2496d33fdde85fb4a1d380198 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 20 Dec 2019 15:46:40 -0700 Subject: [PATCH 145/203] Bug fix - make sure variable is defined. --- ush/setup.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ush/setup.sh b/ush/setup.sh index 8f894ce42..84fe61f2a 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1520,6 +1520,8 @@ fi # #----------------------------------------------------------------------- # +WRTCMP_PARAMS_TMPL_FP = "" + if [ "$QUILTING" = "TRUE" ]; then if [ -z "${WRTCMP_PARAMS_TMPL_FN}" ]; then From 0333f779d86b48cb1de493deb2d6f59f6ab7b5a9 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 20 Dec 2019 16:51:32 -0700 Subject: [PATCH 146/203] Modify the set_gridparams_JPgrid function to use process_args() to make it easier to understand its inputs and outputs. --- ush/set_gridparams_JPgrid.sh | 122 +++++++++++++++++++++++++++-------- ush/setup.sh | 23 ++++++- 2 files changed, 118 insertions(+), 27 deletions(-) diff --git a/ush/set_gridparams_JPgrid.sh b/ush/set_gridparams_JPgrid.sh index e45d4d82d..29eced1de 100644 --- a/ush/set_gridparams_JPgrid.sh +++ b/ush/set_gridparams_JPgrid.sh @@ -11,15 +11,24 @@ function set_gridparams_JPgrid() { # #----------------------------------------------------------------------- # +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) -local scrfunc_fn=$( basename "${scrfunc_fp}" ) -local scrfunc_dir=$( dirname "${scrfunc_fp}" ) + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # @@ -27,7 +36,48 @@ local scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -local func_name="${FUNCNAME[0]}" + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Specify the set of valid argument names for this script/function. +# Then process the arguments provided to this script/function (which +# should consist of a set of name-value pairs of the form arg1="value1", +# etc). +# +#----------------------------------------------------------------------- +# + valid_args=( \ +"jpgrid_lon_ctr" \ +"jpgrid_lat_ctr" \ +"jpgrid_nx" \ +"jpgrid_ny" \ +"jpgrid_nhw" \ +"jpgrid_delx" \ +"jpgrid_dely" \ +"jpgrid_alpha" \ +"jpgrid_kappa" \ +"output_varname_lon_ctr" \ +"output_varname_lat_ctr" \ +"output_varname_nx" \ +"output_varname_ny" \ +"output_varname_nhw" \ +"output_varname_del_angle_x_sg" \ +"output_varname_del_angle_y_sg" \ +"output_varname_mns_nx_pls_wide_halo" \ +"output_varname_mns_ny_pls_wide_halo" \ + ) + process_args valid_args "$@" +# +#----------------------------------------------------------------------- +# +# For debugging purposes, print out values of arguments passed to this +# script. Note that these will be printed out only if VERBOSE is set to +# TRUE. +# +#----------------------------------------------------------------------- +# + print_input_args valid_args # #----------------------------------------------------------------------- # @@ -36,42 +86,62 @@ local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # -. ${USHDIR}/constants.sh -echo -echo "pi_geom = $pi_geom" -echo "degs_per_radian = ${degs_per_radian}" -echo "radius_Earth = ${radius_Earth}" + . ${USHDIR}/constants.sh # #----------------------------------------------------------------------- # +# Declare local variables. +# +#----------------------------------------------------------------------- # + local del_angle_x_sg \ + del_angle_y_sg \ + mns_nx_pls_wide_halo \ + mns_ny_pls_wide_halo # #----------------------------------------------------------------------- # -DEL_ANGLE_X_SG=$( bc -l <<< "($DELX/(2.0*${radius_Earth}))*${degs_per_radian}" ) -DEL_ANGLE_X_SG=$( printf "%0.10f\n" ${DEL_ANGLE_X_SG} ) - -DEL_ANGLE_Y_SG=$( bc -l <<< "($DELY/(2.0*${radius_Earth}))*${degs_per_radian}" ) -DEL_ANGLE_Y_SG=$( printf "%0.10f\n" ${DEL_ANGLE_Y_SG} ) - -echo "DEL_ANGLE_X_SG = ${DEL_ANGLE_X_SG}" -echo "DEL_ANGLE_Y_SG = ${DEL_ANGLE_Y_SG}" +# Set parameters needed as inputs to the regional_grid grid generation +# code. +# +#----------------------------------------------------------------------- +# + del_angle_x_sg=$( bc -l <<< "(${jpgrid_delx}/(2.0*${radius_Earth}))*${degs_per_radian}" ) + del_angle_x_sg=$( printf "%0.10f\n" ${del_angle_x_sg} ) -MNS_NX_T7_PLS_WIDE_HALO=$( bc -l <<< "-(${NX_T7} + 2*${NHW_T7})" ) -MNS_NX_T7_PLS_WIDE_HALO=$( printf "%.0f\n" ${MNS_NX_T7_PLS_WIDE_HALO} ) -echo "MNS_NX_T7_PLS_WIDE_HALO = ${MNS_NX_T7_PLS_WIDE_HALO}" + del_angle_y_sg=$( bc -l <<< "(${jpgrid_dely}/(2.0*${radius_Earth}))*${degs_per_radian}" ) + del_angle_y_sg=$( printf "%0.10f\n" ${del_angle_y_sg} ) -MNS_NY_T7_PLS_WIDE_HALO=$( bc -l <<< "-(${NY_T7} + 2*${NHW_T7})" ) -MNS_NY_T7_PLS_WIDE_HALO=$( printf "%.0f\n" ${MNS_NY_T7_PLS_WIDE_HALO} ) -echo "MNS_NY_T7_PLS_WIDE_HALO = ${MNS_NY_T7_PLS_WIDE_HALO}" + mns_nx_pls_wide_halo=$( bc -l <<< "-(${NX_T7} + 2*${NHW_T7})" ) + mns_nx_pls_wide_halo=$( printf "%.0f\n" ${mns_nx_pls_wide_halo} ) -} + mns_ny_pls_wide_halo=$( bc -l <<< "-(${NY_T7} + 2*${NHW_T7})" ) + mns_ny_pls_wide_halo=$( printf "%.0f\n" ${mns_ny_pls_wide_halo} ) # #----------------------------------------------------------------------- # -# Call the function defined above. +# Set output variables. # #----------------------------------------------------------------------- # -set_gridparams_JPgrid + eval ${output_varname_lon_ctr}="${jpgrid_lon_ctr}" + eval ${output_varname_lat_ctr}="${jpgrid_lat_ctr}" + eval ${output_varname_nx}="${jpgrid_nx}" + eval ${output_varname_ny}="${jpgrid_ny}" + eval ${output_varname_nhw}="${jpgrid_nhw}" + eval ${output_varname_del_angle_x_sg}="${del_angle_x_sg}" + eval ${output_varname_del_angle_y_sg}="${del_angle_y_sg}" + eval ${output_varname_mns_nx_pls_wide_halo}="${mns_nx_pls_wide_halo}" + eval ${output_varname_mns_ny_pls_wide_halo}="${mns_ny_pls_wide_halo}" +# +#----------------------------------------------------------------------- +# +# Restore the shell options saved at the beginning of this script/func- +# tion. +# +#----------------------------------------------------------------------- +# + { restore_shell_opts; } > /dev/null 2>&1 + +} diff --git a/ush/setup.sh b/ush/setup.sh index 84fe61f2a..794da1f00 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -50,6 +50,9 @@ cd_vrfy ${scrfunc_dir} #----------------------------------------------------------------------- # . ./source_util_funcs.sh + +#. $USHDIR/set_gridparams_JPgrid.sh +. ./set_gridparams_JPgrid.sh # #----------------------------------------------------------------------- # @@ -1410,7 +1413,25 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - . $USHDIR/set_gridparams_JPgrid.sh + set_gridparams_JPgrid \ + jpgrid_lon_ctr="${LON_RGNL_CTR}" \ + jpgrid_lat_ctr="${LAT_RGNL_CTR}" \ + jpgrid_nx="${NX_T7}" \ + jpgrid_ny="${NY_T7}" \ + jpgrid_nhw="${NHW_T7}" \ + jpgrid_delx="${DELX}" \ + jpgrid_dely="${DELY}" \ + jpgrid_alpha="${ALPHA_JPGRID_PARAM}" \ + jpgrid_kappa="${KAPPA_JPGRID_PARAM}" \ + output_varname_lon_ctr="LON_CTR" \ + output_varname_lat_ctr="LAT_CTR" \ + output_varname_nx="NX" \ + output_varname_ny="NY" \ + output_varname_nhw="NHW" \ + output_varname_del_angle_x_sg="DEL_ANGLE_X_SG" \ + output_varname_del_angle_y_sg="DEL_ANGLE_Y_SG" \ + output_varname_mns_nx_pls_wide_halo="MNS_NX_T7_PLS_WIDE_HALO" \ + output_varname_mns_ny_pls_wide_halo="MNS_NY_T7_PLS_WIDE_HALO" fi # From 0b2fd947f0d1fabc7f19992cdb1de17299d458de Mon Sep 17 00:00:00 2001 From: jeff beck Date: Fri, 27 Dec 2019 04:36:49 +0000 Subject: [PATCH 147/203] Add hash for top-of-the-trunk ufs_weather_model repo to get fix for HRRR ICs and RUC LSM --- Externals.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Externals.cfg b/Externals.cfg index 1b1009a98..5efce4a7f 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -21,7 +21,7 @@ protocol = git repo_url = https://github.com/NCAR/ufs-weather-model # Specify either a branch name or a hash but not both. #branch = dtc/develop -hash = bf05b97 +hash = 4c2d541b local_path = sorc/ufs_weather_model required = True From c7063b51a72853c0a61c5fe1b6e6f5219e81d452 Mon Sep 17 00:00:00 2001 From: "Jeff.Beck" Date: Tue, 31 Dec 2019 00:06:22 +0000 Subject: [PATCH 148/203] Add stanza for jet.intel build --- sorc/build_forecast.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sorc/build_forecast.sh b/sorc/build_forecast.sh index 92633d9cf..a8378a108 100755 --- a/sorc/build_forecast.sh +++ b/sorc/build_forecast.sh @@ -20,6 +20,8 @@ fi if [ $target = hera ]; then target=hera.intel ; fi +if [ $target = jet ]; then target=jet.intel ; fi + #------------------------------------ # Get from the manage_externals configuration file the relative directo- # ries in which the UFS utility codes (not including chgres_cube) and From 75db777f376b29ce18900365c6141f3d69fae4ac Mon Sep 17 00:00:00 2001 From: "Jeff.Beck" Date: Tue, 31 Dec 2019 00:31:51 +0000 Subject: [PATCH 149/203] Comment out NCEPLIBS hard wired path in machine-setup.sh for Jet --- sorc/machine-setup.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sorc/machine-setup.sh b/sorc/machine-setup.sh index f911fe6ce..b08a8fce7 100644 --- a/sorc/machine-setup.sh +++ b/sorc/machine-setup.sh @@ -67,9 +67,9 @@ if [ "$target" = "jet" ] ; then source /apps/lmod/lmod/init/$__ms_shell fi module purge - export NCEPLIBS=/mnt/lfs3/projects/hfv3gfs/gwv/ljtjet/lib - echo NCEPLIBS HARD SET to $NCEPLIBS in `pwd`/module_setup.sh.inc - module use $NCEPLIBS/modulefiles + #export NCEPLIBS=/mnt/lfs3/projects/hfv3gfs/gwv/ljtjet/lib + #echo NCEPLIBS HARD SET to $NCEPLIBS in `pwd`/module_setup.sh.inc + #module use $NCEPLIBS/modulefiles elif [ "$target" = "theia" ] || [ "$target" = "hera" ] ; then # We are on NOAA Theia or Hera if ( ! eval module help > /dev/null 2>&1 ) ; then From d7e08428870e54d02074e95f92d5cef5bc251558 Mon Sep 17 00:00:00 2001 From: "Jeff.Beck" Date: Tue, 31 Dec 2019 00:37:50 +0000 Subject: [PATCH 150/203] Revert previous commit --- sorc/machine-setup.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sorc/machine-setup.sh b/sorc/machine-setup.sh index b08a8fce7..f911fe6ce 100644 --- a/sorc/machine-setup.sh +++ b/sorc/machine-setup.sh @@ -67,9 +67,9 @@ if [ "$target" = "jet" ] ; then source /apps/lmod/lmod/init/$__ms_shell fi module purge - #export NCEPLIBS=/mnt/lfs3/projects/hfv3gfs/gwv/ljtjet/lib - #echo NCEPLIBS HARD SET to $NCEPLIBS in `pwd`/module_setup.sh.inc - #module use $NCEPLIBS/modulefiles + export NCEPLIBS=/mnt/lfs3/projects/hfv3gfs/gwv/ljtjet/lib + echo NCEPLIBS HARD SET to $NCEPLIBS in `pwd`/module_setup.sh.inc + module use $NCEPLIBS/modulefiles elif [ "$target" = "theia" ] || [ "$target" = "hera" ] ; then # We are on NOAA Theia or Hera if ( ! eval module help > /dev/null 2>&1 ) ; then From 9860704080feacf1c6994acaff04af1460583b53 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 6 Jan 2020 17:26:16 -0700 Subject: [PATCH 151/203] Since the concept of tiles only applies to GFDLgrid type of grids (not to JPgrid type of grids), remove the "_T7" (indicating tile 7) from the names of the variables NH0_T7, NH3_T7, and NH4_T7 to obtain NH0, NH3, and NH4, respectively. --- scripts/exregional_make_grid.sh | 24 ++++++++++++------------ scripts/exregional_make_ics.sh | 8 ++++---- scripts/exregional_make_lbcs.sh | 4 ++-- scripts/exregional_make_orog.sh | 24 ++++++++++++------------ scripts/exregional_make_sfc_climo.sh | 8 ++++---- scripts/exregional_run_fcst.sh | 16 ++++++++-------- ush/link_fix.sh | 16 ++++++++-------- ush/set_gridparams_GFDLgrid.sh | 4 ++-- ush/setup.sh | 24 ++++++++++++------------ 9 files changed, 64 insertions(+), 64 deletions(-) diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 8f5b08d69..8a674fc42 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -272,7 +272,7 @@ mkdir_vrfy -p "$tmpdir" # --jstart_nest ${jstart_rgnl_wide_halo_T6SG} \ # --iend_nest ${iend_rgnl_wide_halo_T6SG} \ # --jend_nest ${jend_rgnl_wide_halo_T6SG} \ -# --halo ${NH3_T7} \ +# --halo ${NH3} \ # --great_circle_algorithm # # This creates the 7 grid files ${CRES}_grid.tileN.nc for N=1,...,7. @@ -287,7 +287,7 @@ mkdir_vrfy -p "$tmpdir" # # According to Rusty Benson of GFDL, the flag # -# --halo ${NH3_T7} +# --halo ${NH3} # # only checks to make sure that the nested or regional grid combined # with the specified halo lies completely within the parent tile. If @@ -451,18 +451,18 @@ cd_vrfy $tmpdir # GRID_DIR directory. # print_info_msg "$VERBOSE" " -\"Shaving\" grid file with wide halo to obtain grid file with ${NH3_T7}-cell-wide +\"Shaving\" grid file with wide halo to obtain grid file with ${NH3}-cell-wide halo..." -nml_fn="input.shave.grid.halo${NH3_T7}" -shaved_fp="${tmpdir}/${CRES}_grid.tile${TILE_RGNL}.halo${NH3_T7}.nc" +nml_fn="input.shave.grid.halo${NH3}" +shaved_fp="${tmpdir}/${CRES}_grid.tile${TILE_RGNL}.halo${NH3}.nc" printf "%s %s %s %s %s\n" \ - ${NX_T7} ${NY_T7} ${NH3_T7} \"${unshaved_fp}\" \"${shaved_fp}\" \ + ${NX_T7} ${NY_T7} ${NH3} \"${unshaved_fp}\" \"${shaved_fp}\" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ print_err_msg_exit "\ -Call to executable \"${shave_exec}\" to generate a grid file with a ${NH3_T7}-cell-wide +Call to executable \"${shave_exec}\" to generate a grid file with a ${NH3}-cell-wide halo returned with nonzero exit code. The namelist file nml_fn is in directory tmpdir: tmpdir = \"${tmpdir}\" @@ -475,18 +475,18 @@ mv_vrfy ${shaved_fp} ${GRID_DIR} # GRID_DIR directory. # print_info_msg "$VERBOSE" " -\"Shaving\" grid file with wide halo to obtain grid file with ${NH4_T7}-cell-wide +\"Shaving\" grid file with wide halo to obtain grid file with ${NH4}-cell-wide halo..." -nml_fn="input.shave.grid.halo${NH4_T7}" -shaved_fp="${tmpdir}/${CRES}_grid.tile${TILE_RGNL}.halo${NH4_T7}.nc" +nml_fn="input.shave.grid.halo${NH4}" +shaved_fp="${tmpdir}/${CRES}_grid.tile${TILE_RGNL}.halo${NH4}.nc" printf "%s %s %s %s %s\n" \ - ${NX_T7} ${NY_T7} ${NH4_T7} \"${unshaved_fp}\" \"${shaved_fp}\" \ + ${NX_T7} ${NY_T7} ${NH4} \"${unshaved_fp}\" \"${shaved_fp}\" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ print_err_msg_exit "\ -Call to executable \"${shave_exec}\" to generate a grid file with a ${NH4_T7}-cell-wide +Call to executable \"${shave_exec}\" to generate a grid file with a ${NH4}-cell-wide halo returned with nonzero exit code. The namelist file nml_fn is in directory tmpdir: tmpdir = \"${tmpdir}\" diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index 2be57f8d1..b5e7e8057 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -462,7 +462,7 @@ hh="${EXTRN_MDL_CDATE:8:2}" fix_dir_target_grid="${FIXsar}" mosaic_file_target_grid="${FIXsar}/${CRES}_mosaic.nc" orog_dir_target_grid="${FIXsar}" - orog_files_target_grid="${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4_T7}.nc" + orog_files_target_grid="${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4}.nc" vcoord_file_target_grid="${FIXam}/global_hyblev.l65.txt" mosaic_file_input_grid="" orog_dir_input_grid="" @@ -479,7 +479,7 @@ hh="${EXTRN_MDL_CDATE:8:2}" convert_sfc=.true. convert_nst=.false. regional=1 - halo_bndy=${NH4_T7} + halo_bndy=${NH4} input_type="${input_type}" external_model="${external_model}" tracers_input=${tracers_input} @@ -528,10 +528,10 @@ the FV3SAR failed: #----------------------------------------------------------------------- # mv_vrfy out.atm.tile${TILE_RGNL}.nc \ - ${ICS_DIR}/gfs_data.tile${TILE_RGNL}.halo${NH0_T7}.nc + ${ICS_DIR}/gfs_data.tile${TILE_RGNL}.halo${NH0}.nc mv_vrfy out.sfc.tile${TILE_RGNL}.nc \ - ${ICS_DIR}/sfc_data.tile${TILE_RGNL}.halo${NH0_T7}.nc + ${ICS_DIR}/sfc_data.tile${TILE_RGNL}.halo${NH0}.nc mv_vrfy gfs_ctrl.nc ${ICS_DIR} diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index be812f807..faae5d8f5 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -408,7 +408,7 @@ list file has not specified for this external model: fix_dir_target_grid="${FIXsar}" mosaic_file_target_grid="${FIXsar}/${CRES}_mosaic.nc" orog_dir_target_grid="${FIXsar}" - orog_files_target_grid="${CRES}_oro_data.tile7.halo${NH4_T7}.nc" + orog_files_target_grid="${CRES}_oro_data.tile7.halo${NH4}.nc" vcoord_file_target_grid="${FIXam}/global_hyblev.l65.txt" mosaic_file_input_grid="" orog_dir_input_grid="" @@ -425,7 +425,7 @@ list file has not specified for this external model: convert_sfc=.false. convert_nst=.false. regional=2 - halo_bndy=${NH4_T7} + halo_bndy=${NH4} input_type="${input_type}" external_model="${external_model}" tracers_input=${tracers_input} diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index c38c41d9f..ab5dea603 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -500,12 +500,12 @@ cd_vrfy ${shave_dir} # print_info_msg "$VERBOSE" " \"Shaving\" orography file with wide halo to obtain orography file with -${NH0_T7}-cell-wide halo..." +${NH0}-cell-wide halo..." -nml_fn="input.shave.orog.halo${NH0_T7}" -shaved_fp="${shave_dir}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH0_T7}.nc" +nml_fn="input.shave.orog.halo${NH0}" +shaved_fp="${shave_dir}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH0}.nc" printf "%s %s %s %s %s\n" \ - ${NX_T7} ${NY_T7} ${NH0_T7} \"${unshaved_fp}\" \"${shaved_fp}\" \ + ${NX_T7} ${NY_T7} ${NH0} \"${unshaved_fp}\" \"${shaved_fp}\" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ @@ -524,12 +524,12 @@ mv_vrfy ${shaved_fp} ${OROG_DIR} # print_info_msg "$VERBOSE" " \"Shaving\" orography file with wide halo to obtain orography file with -${NH4_T7}-cell-wide halo..." +${NH4}-cell-wide halo..." -nml_fn="input.shave.orog.halo${NH4_T7}" -shaved_fp="${shave_dir}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4_T7}.nc" +nml_fn="input.shave.orog.halo${NH4}" +shaved_fp="${shave_dir}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4}.nc" printf "%s %s %s %s %s\n" \ - ${NX_T7} ${NY_T7} ${NH4_T7} \"${unshaved_fp}\" \"${shaved_fp}\" \ + ${NX_T7} ${NY_T7} ${NH4} \"${unshaved_fp}\" \"${shaved_fp}\" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ @@ -573,7 +573,7 @@ Call to script to create links to orography files failed." # if [ 0 = 1 ]; then cd_vrfy ${OROG_DIR} -ln_vrfy -sf ${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4_T7}.nc \ +ln_vrfy -sf ${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4}.nc \ ${CRES}_oro_data.tile${TILE_RGNL}.nc fi @@ -590,13 +590,13 @@ fi if [ 0 = 1 ]; then cd_vrfy ${FIXsar} -filename="${CRES}_oro_data.tile${TILE_RGNL}.halo${NH0_T7}.nc" +filename="${CRES}_oro_data.tile${TILE_RGNL}.halo${NH0}.nc" ln_vrfy --relative -sf ${OROG_DIR}/$filename $FIXsar ln_vrfy -sf $filename oro_data.nc -filename="${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4_T7}.nc" +filename="${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4}.nc" ln_vrfy --relative -sf ${OROG_DIR}/$filename $FIXsar -ln_vrfy -sf $filename oro_data.tile${TILE_RGNL}.halo${NH4_T7}.nc +ln_vrfy -sf $filename oro_data.tile${TILE_RGNL}.halo${NH4}.nc ln_vrfy -sf $filename oro_data.tile${TILE_RGNL}.nc fi # diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index 498d45402..6433d1158 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -105,8 +105,8 @@ input_vegetation_type_file="${SFC_CLIMO_INPUT_DIR}/vegetation_type.igbp.0.05.nc" input_vegetation_greenness_file="${SFC_CLIMO_INPUT_DIR}/vegetation_greenness.0.144.nc" mosaic_file_mdl="${FIXsar}/${CRES}_mosaic.nc" orog_dir_mdl="${FIXsar}" -orog_files_mdl=${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4_T7}.nc -halo=${NH4_T7} +orog_files_mdl=${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4}.nc +halo=${NH4} maximum_snow_albedo_method="bilinear" snowfree_albedo_method="bilinear" vegetation_greenness_method="bilinear" @@ -199,7 +199,7 @@ case "$GTYPE" in for fn in *.halo.nc; do if [ -f $fn ]; then bn="${fn%.halo.nc}" - mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH4_T7}.nc + mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH4}.nc fi done # @@ -212,7 +212,7 @@ case "$GTYPE" in for fn in *.nc; do if [ -f $fn ]; then bn="${fn%.nc}" - mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH0_T7}.nc + mv_vrfy $fn ${SFC_CLIMO_DIR}/${CRES}.${bn}.halo${NH0}.nc fi done ;; diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 2e2ddfc92..be4381d46 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -222,7 +222,7 @@ Cannot create symlink because target does not exist: fi # Symlink to halo-3 grid file with "halo4" stripped from name. -target="${FIXsar}/${CRES}_grid.tile${TILE_RGNL}.halo${NH3_T7}.nc" +target="${FIXsar}/${CRES}_grid.tile${TILE_RGNL}.halo${NH3}.nc" if [ -f "${target}" ]; then ln_vrfy -sf ${relative_or_null} $target ${CRES}_grid.tile${TILE_RGNL}.nc else @@ -243,9 +243,9 @@ fi # Note that even though the message says "Stopped", the task still con- # sumes core-hours. # -target="${FIXsar}/${CRES}_grid.tile${TILE_RGNL}.halo${NH4_T7}.nc" +target="${FIXsar}/${CRES}_grid.tile${TILE_RGNL}.halo${NH4}.nc" if [ -f "${target}" ]; then - ln_vrfy -sf $target ${relative_or_null} grid.tile${TILE_RGNL}.halo${NH4_T7}.nc + ln_vrfy -sf $target ${relative_or_null} grid.tile${TILE_RGNL}.halo${NH4}.nc else print_err_msg_exit "\ Cannot create symlink because target does not exist: @@ -260,7 +260,7 @@ if [ "${RUN_TASK_MAKE_OROG}" = "TRUE" ]; then fi # Symlink to halo-0 orography file with "${CRES}_" and "halo0" stripped from name. -target="${FIXsar}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH0_T7}.nc" +target="${FIXsar}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH0}.nc" if [ -f "${target}" ]; then ln_vrfy -sf ${relative_or_null} $target oro_data.nc else @@ -282,9 +282,9 @@ fi # Note that even though the message says "Stopped", the task still con- # sumes core-hours. # -target="${FIXsar}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4_T7}.nc" +target="${FIXsar}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4}.nc" if [ -f "${target}" ]; then - ln_vrfy -sf $target ${relative_or_null} oro_data.tile${TILE_RGNL}.halo${NH4_T7}.nc + ln_vrfy -sf $target ${relative_or_null} oro_data.tile${TILE_RGNL}.halo${NH4}.nc else print_err_msg_exit "\ Cannot create symlink because target does not exist: @@ -314,8 +314,8 @@ Creating links with names that FV3 looks for in the INPUT subdirectory of the current cycle's run directory (CYCLE_DIR)..." cd_vrfy ${CYCLE_DIR}/INPUT -ln_vrfy -sf gfs_data.tile${TILE_RGNL}.halo${NH0_T7}.nc gfs_data.nc -ln_vrfy -sf sfc_data.tile${TILE_RGNL}.halo${NH0_T7}.nc sfc_data.nc +ln_vrfy -sf gfs_data.tile${TILE_RGNL}.halo${NH0}.nc gfs_data.nc +ln_vrfy -sf sfc_data.tile${TILE_RGNL}.halo${NH0}.nc sfc_data.nc # #----------------------------------------------------------------------- # diff --git a/ush/link_fix.sh b/ush/link_fix.sh index 710b142cf..dc11b8090 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -100,13 +100,13 @@ Creating links in the FIXsar directory to the grid files..." # fns_grid=( \ "C*_mosaic.nc" \ -"C*_grid.tile${TILE_RGNL}.halo${NH3_T7}.nc" \ -"C*_grid.tile${TILE_RGNL}.halo${NH4_T7}.nc" \ +"C*_grid.tile${TILE_RGNL}.halo${NH3}.nc" \ +"C*_grid.tile${TILE_RGNL}.halo${NH4}.nc" \ ) fns_orog=( \ -"C*_oro_data.tile${TILE_RGNL}.halo${NH0_T7}.nc" \ -"C*_oro_data.tile${TILE_RGNL}.halo${NH4_T7}.nc" \ +"C*_oro_data.tile${TILE_RGNL}.halo${NH0}.nc" \ +"C*_oro_data.tile${TILE_RGNL}.halo${NH4}.nc" \ ) sfc_climo_fields=( \ @@ -123,8 +123,8 @@ num_fields=${#sfc_climo_fields[@]} fns_sfc_climo=() for (( i=0; i<${num_fields}; i++ )); do ii=$((2*i)) - fns_sfc_climo[$ii]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH0_T7}.nc" - fns_sfc_climo[$ii+1]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH4_T7}.nc" + fns_sfc_climo[$ii]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH0}.nc" + fns_sfc_climo[$ii+1]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH4}.nc" done # #----------------------------------------------------------------------- @@ -278,7 +278,7 @@ done # if [ "${file_group}" = "grid" ]; then # Create link to grid file needed by the make_ic and make_lbc tasks. - filename="${cres}_grid.tile${TILE_RGNL}.halo${NH4_T7}.nc" + filename="${cres}_grid.tile${TILE_RGNL}.halo${NH4}.nc" ln_vrfy -sf ${relative_or_null} $filename ${cres}_grid.tile${TILE_RGNL}.nc fi @@ -286,7 +286,7 @@ fi if [ "${file_group}" = "sfc_climo" ]; then tmp=( "${sfc_climo_fields[@]/#/${cres}.}" ) - fns_sfc_climo_with_halo=( "${tmp[@]/%/.tile${TILE_RGNL}.halo${NH4_T7}.nc}" ) + fns_sfc_climo_with_halo=( "${tmp[@]/%/.tile${TILE_RGNL}.halo${NH4}.nc}" ) fns_sfc_climo_no_halo=( "${tmp[@]/%/.tile${TILE_RGNL}.nc}" ) cd_vrfy $FIXsar diff --git a/ush/set_gridparams_GFDLgrid.sh b/ush/set_gridparams_GFDLgrid.sh index 6932e8c4e..d099050bf 100644 --- a/ush/set_gridparams_GFDLgrid.sh +++ b/ush/set_gridparams_GFDLgrid.sh @@ -135,7 +135,7 @@ jend_rgnl_T6SG=$(( 2*JEND_RGNL_T6 )) # In order to calculate nhw_T6G using the above expression, we must # first specify NHW_T7. Next, we specify an initial value for it by # setting it to one more than the largest-width halo that the model ac- -# tually needs, which is NH4_T7. We then calculate nhw_T6SG using the +# tually needs, which is NH4. We then calculate nhw_T6SG using the # above expression. Note that these values of NHW_T7 and nhw_T6SG will # likely not be their final values; their final values will be calcula- # ted later below after calculating the starting and ending indices of @@ -144,7 +144,7 @@ jend_rgnl_T6SG=$(( 2*JEND_RGNL_T6 )) # #----------------------------------------------------------------------- # -NHW_T7=$(( NH4_T7+1 )) +NHW_T7=$(( NH4+1 )) nhw_T6SG=$(( (2*NHW_T7 + REFINE_RATIO - 1)/REFINE_RATIO )) # #----------------------------------------------------------------------- diff --git a/ush/setup.sh b/ush/setup.sh index 794da1f00..9612226e9 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1363,22 +1363,22 @@ fi # Note that the regional grid is referred to as "tile 7" in the code. # We will let: # -# * NH0_T7 denote the width (in units of number of cells on tile 7) of -# the 0-cell-wide halo, i.e. NH0_T7 = 0; +# * NH0 denote the width (in units of number of cells on tile 7) of +# the 0-cell-wide halo, i.e. NH0 = 0; # -# * NH3_T7 denote the width (in units of number of cells on tile 7) of -# the 3-cell-wide halo, i.e. NH3_T7 = 3; and +# * NH3 denote the width (in units of number of cells on tile 7) of +# the 3-cell-wide halo, i.e. NH3 = 3; and # -# * NH4_T7 denote the width (in units of number of cells on tile 7) of -# the 4-cell-wide halo, i.e. NH4_T7 = 4. +# * NH4 denote the width (in units of number of cells on tile 7) of +# the 4-cell-wide halo, i.e. NH4 = 4. # # We define these variables next. # #----------------------------------------------------------------------- # -NH0_T7=0 -NH3_T7=3 -NH4_T7=4 +NH0=0 +NH3=3 +NH4=4 # #----------------------------------------------------------------------- # @@ -2040,9 +2040,9 @@ RUN_POST_TN="${RUN_POST_TN}" # GTYPE="$GTYPE" TILE_RGNL="${TILE_RGNL}" -NH0_T7="${NH0_T7}" -NH3_T7="${NH3_T7}" -NH4_T7="${NH4_T7}" +NH0="${NH0}" +NH3="${NH3}" +NH4="${NH4}" EOM } || print_err_msg_exit "\ Heredoc (cat) command to append new variable definitions to variable From 6cbea10367edc6f203e09e35ac74464012eb9ff7 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 6 Jan 2020 20:39:34 -0700 Subject: [PATCH 152/203] Change the names of the user-specified grid parameters for a GFDLgrid type of grid to make it easier to distinguish them as valid only for a GFDLgrid type of grid. More info below. The variables are renamed as follows: LON_CTR_T6 --> GFDLgrid_LON_T6_CTR LAT_CTR_T6 --> GFDLgrid_LAT_T6_CTR STRETCH_FAC --> GFDLgrid_STRETCH_FAC REFINE_RATIO --> GFDLgrid_REFINE_RATIO ISTART_RGNL_T6 --> GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G JSTART_RGNL_T6 --> GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G IEND_RGNL_T6 --> GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G JEND_RGNL_T6 --> GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G Note that: 1) A variable named STRETCH_FAC still exists because it is also used for JPgrid type grids. The user-defined variables for a JPgrid type grid will also be renamed for clarity, but that will be in another commit. 2) We have not yet renamed the (very important) user-specified variable RES to GFDLgrid_RES. This step is complicated because this variable appears in many places and is also relevant for a JPgrid type of grid. Thus, the renaming of RES will be done in a separate commit. --- jobs/JREGIONAL_MAKE_GRID | 7 +++- scripts/exregional_make_grid.sh | 9 ++--- scripts/exregional_make_orog.sh | 2 +- scripts/exregional_run_post.sh | 4 +-- ush/config_defaults.sh | 60 ++++++++++++++++----------------- ush/generate_FV3SAR_wflow.sh | 7 ++-- ush/set_gridparams_GFDLgrid.sh | 54 ++++++++++++++--------------- ush/set_predef_grid_params.sh | 48 +++++++++++++------------- 8 files changed, 99 insertions(+), 92 deletions(-) diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index 42e11f28e..a6ca784c3 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -151,7 +151,12 @@ This is the J-job script for the task that generates grid files. #----------------------------------------------------------------------- # export gtype="$GTYPE" -export stretch_fac=${STRETCH_FAC} + +if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then + export stretch_fac=${GFDLgrid_STRETCH_FAC} +elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then + export stretch_fac=${STRETCH_FAC} +fi # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 8a674fc42..04b633ad2 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -265,9 +265,10 @@ mkdir_vrfy -p "$tmpdir" # --grid_type gnomonic_ed \ # --nlon 2*${RES} \ # --grid_name C${RES}_grid \ -# --do_schmidt --stretch_factor ${STRETCH_FAC} \ -# --target_lon ${LON_CTR_T6} --target_lat ${LAT_CTR_T6} \ -# --nest_grid --parent_tile 6 --refine_ratio ${REFINE_RATIO} \ +# --do_schmidt --stretch_factor ${GFDLgrid_STRETCH_FAC} \ +# --target_lon ${GFDLgrid_LON_T6_CTR} +# --target_lat ${GFDLgrid_LAT_T6_CTR} \ +# --nest_grid --parent_tile 6 --refine_ratio ${GFDLgrid_REFINE_RATIO} \ # --istart_nest ${istart_rgnl_wide_halo_T6SG} \ # --jstart_nest ${jstart_rgnl_wide_halo_T6SG} \ # --iend_nest ${iend_rgnl_wide_halo_T6SG} \ @@ -313,7 +314,7 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then $USHDIR/$grid_gen_scr \ $RES \ $tmpdir \ - ${STRETCH_FAC} ${LON_CTR_T6} ${LAT_CTR_T6} ${REFINE_RATIO} \ + ${GFDLgrid_STRETCH_FAC} ${GFDLgrid_LON_T6_CTR} ${GFDLgrid_LAT_T6_CTR} ${GFDLgrid_REFINE_RATIO} \ ${istart_rgnl_wide_halo_T6SG} ${jstart_rgnl_wide_halo_T6SG} \ ${iend_rgnl_wide_halo_T6SG} ${jend_rgnl_wide_halo_T6SG} \ 1 $USHDIR || \ diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index ab5dea603..37d618252 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -331,7 +331,7 @@ Setting orography filtering parameters..." #if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then -# RES_eff=$( bc -l <<< "$RES*${REFINE_RATIO}" ) +# RES_eff=$( bc -l <<< "$RES*${GFDLgrid_REFINE_RATIO}" ) #elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then # grid_size_eff=$( "($DELX + $DELY)/2" ) #echo "grid_size_eff = $grid_size_eff" diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 79ab2e47c..ed027cec3 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -253,8 +253,8 @@ else grid_name="${GRID_GEN_METHOD}" if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - stretch_str="S$( printf "%s" "${STRETCH_FAC}" | sed "s|\.|p|" )" - refine_str="RR${REFINE_RATIO}" + stretch_str="S$( printf "%s" "${GFDLgrid_STRETCH_FAC}" | sed "s|\.|p|" )" + refine_str="RR${GFDLgrid_REFINE_RATIO}" grid_name="${grid_name}_${CRES}_${stretch_str}_${refine_str}" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then nx_T7_str="NX$( printf "%s" "${NX_T7}" | sed "s|\.|p|" )" diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index d14340d59..f237de3c6 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -326,7 +326,7 @@ GRID_GEN_METHOD="JPgrid" # sphere grid. RES must be one of "48", "96", "192", "384", "768", # "1152", and "3072". The mapping from RES to nominal resolution # (cell size) for a uniform global grid (i.e. Schmidt stretch factor -# STRETCH_FAC set to 1) is as follows: +# GFDLgrid_STRETCH_FAC set to 1) is as follows: # # C192 --> 50km # C384 --> 25km @@ -338,28 +338,28 @@ GRID_GEN_METHOD="JPgrid" # the global grid tiles varies somewhat as we move across a tile. # # * Tile 6 has arbitrarily been chosen as the tile to use to orient the -# global grid on the sphere (Earth). This is done by specifying LON_- -# CTR_T6 and LAT_CTR_T6, which are the longitude and latitude (in de- -# grees) of the center of tile 6. +# global grid on the sphere (Earth). This is done by specifying +# GFDLgrid_LON_T6_CTR and GFDLgrid_LAT_T6_CTR, which are the longitude +# and latitude (in degrees) of the center of tile 6. # -# * Setting the Schmidt stretching factor STRETCH_FAC to a value greater +# * Setting the Schmidt stretching factor GFDLgrid_STRETCH_FAC to a value greater # than 1 shrinks tile 6, while setting it to a value less than 1 (but # still greater than 0) expands tile 6. The remaining 5 tiles change # shape as necessary to maintain global coverage of the grid. # # * The cell size on a given global tile depends on both RES and -# STRETCH_FAC (since changing RES changes the number of cells in the -# tile, and changing STRETCH_FAC modifies the shape and size of the +# GFDLgrid_STRETCH_FAC (since changing RES changes the number of cells in the +# tile, and changing GFDLgrid_STRETCH_FAC modifies the shape and size of the # tile). # # * The regional grid is embedded within tile 6 (i.e. it doesn't extend # beyond the boundary of tile 6). Its exact location within tile 6 is # is determined by the starting and ending i and j indices # -# ISTART_RGNL_T6 -# JSTART_RGNL_T6 -# IEND_RGNL_T6 -# JEND_RGNL_T6 +# GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G +# GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G +# GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G +# GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G # # where i is the grid index in the x direction and j is the grid index # in the y direction. @@ -371,12 +371,12 @@ GRID_GEN_METHOD="JPgrid" # 6). Tile 6 is often referred to as the "parent" tile of the region- # al grid. # -# * REFINE_RATIO is the refinement ratio of the regional grid (tile 7) +# * GFDLgrid_REFINE_RATIO is the refinement ratio of the regional grid (tile 7) # with respect to the grid on its parent tile (tile 6), i.e. it is the # number of grid cells along the boundary of the regional grid that # abut one cell on tile 6. Thus, the cell size on the regional grid -# depends not only on RES and STRETCH_FAC (because the cell size on -# tile 6 depends on these two parameters) but also on REFINE_RATIO. +# depends not only on RES and GFDLgrid_STRETCH_FAC (because the cell size on +# tile 6 depends on these two parameters) but also on GFDLgrid_REFINE_RATIO. # Note that as on the tiles of the global grid, the cell size on the # regional grid is not uniform but varies as we move across the grid. # @@ -387,29 +387,29 @@ GRID_GEN_METHOD="JPgrid" # on each tile of the global grid. Must be "48", "96", "192", "384", # "768", "1152", or "3072" # -# LON_CTR_T6: +# GFDLgrid_LON_T6_CTR: # Longitude of the center of tile 6 (in degrees). # -# LAT_CTR_T6: +# GFDLgrid_LAT_T6_CTR: # Latitude of the center of tile 6 (in degrees). # -# STRETCH_FAC: +# GFDLgrid_STRETCH_FAC: # Stretching factor used in the Schmidt transformation applied to the # cubed sphere grid. # -# ISTART_RGNL_T6: +# GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G: # i-index on tile 6 at which the regional grid (tile 7) starts. # -# IEND_RGNL_T6: +# GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G: # i-index on tile 6 at which the regional grid (tile 7) ends. # -# JSTART_RGNL_T6: +# GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G: # j-index on tile 6 at which the regional grid (tile 7) starts. # -# JEND_RGNL_T6: +# GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G: # j-index on tile 6 at which the regional grid (tile 7) ends. # -# REFINE_RATIO: +# GFDLgrid_REFINE_RATIO: # Cell refinement ratio for the regional grid, i.e. the number of cells # in either the x or y direction on the regional grid (tile 7) that abut # one cell on its parent tile (tile 6). @@ -419,14 +419,14 @@ GRID_GEN_METHOD="JPgrid" if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then RES="384" - LON_CTR_T6=-97.5 - LAT_CTR_T6=35.5 - STRETCH_FAC=1.5 - ISTART_RGNL_T6=10 - IEND_RGNL_T6=374 - JSTART_RGNL_T6=10 - JEND_RGNL_T6=374 - REFINE_RATIO=3 + GFDLgrid_LON_T6_CTR=-97.5 + GFDLgrid_LAT_T6_CTR=35.5 + GFDLgrid_STRETCH_FAC=1.5 + GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=10 + GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=374 + GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=10 + GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=374 + GFDLgrid_REFINE_RATIO=3 # #----------------------------------------------------------------------- # diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 106950a1d..835f356f6 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -649,13 +649,14 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # are not necessarily the same [although assuming there is only one re- # gional domain within tile 6, i.e. assuming there is no tile 8, 9, etc, # there is no reason not to center tile 7 with respect to tile 6]. - set_file_param "${FV3_NML_FP}" "target_lon" "${LON_CTR_T6}" - set_file_param "${FV3_NML_FP}" "target_lat" "${LAT_CTR_T6}" + set_file_param "${FV3_NML_FP}" "target_lon" "${GFDLgrid_LON_T6_CTR}" + set_file_param "${FV3_NML_FP}" "target_lat" "${GFDLgrid_LAT_T6_CTR}" + set_file_param "${FV3_NML_FP}" "stretch_fac" "${GFDLgrid_STRETCH_FAC}" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then set_file_param "${FV3_NML_FP}" "target_lon" "${LON_RGNL_CTR}" set_file_param "${FV3_NML_FP}" "target_lat" "${LAT_RGNL_CTR}" + set_file_param "${FV3_NML_FP}" "stretch_fac" "${STRETCH_FAC}" fi -set_file_param "${FV3_NML_FP}" "stretch_fac" "${STRETCH_FAC}" set_file_param "${FV3_NML_FP}" "bc_update_interval" "${LBC_UPDATE_INTVL_HRS}" # # For the GSD_v0 and the GSD_SAR physics suites, set the parameter lsoil diff --git a/ush/set_gridparams_GFDLgrid.sh b/ush/set_gridparams_GFDLgrid.sh index d099050bf..dea252041 100644 --- a/ush/set_gridparams_GFDLgrid.sh +++ b/ush/set_gridparams_GFDLgrid.sh @@ -50,18 +50,18 @@ local func_name="${FUNCNAME[0]}" # puts the index limits of the regional grid on the tile 6 grid, not its # supergrid. These are given by # -# ISTART_RGNL_T6 -# IEND_RGNL_T6 -# JSTART_RGNL_T6 -# JEND_RGNL_T6 +# GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G +# GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G +# GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G +# GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G # # We can obtain the former from the latter by recalling that the super- # grid has twice the resolution of the original grid. Thus, # -# istart_rgnl_T6SG = 2*ISTART_RGNL_T6 - 1 -# iend_rgnl_T6SG = 2*IEND_RGNL_T6 -# jstart_rgnl_T6SG = 2*JSTART_RGNL_T6 - 1 -# jend_rgnl_T6SG = 2*JEND_RGNL_T6 +# istart_rgnl_T6SG = 2*GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G - 1 +# iend_rgnl_T6SG = 2*GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G +# jstart_rgnl_T6SG = 2*GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G - 1 +# jend_rgnl_T6SG = 2*GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G # # These are obtained assuming that grid cells on tile 6 must either be # completely within the regional domain or completely outside of it, @@ -73,10 +73,10 @@ local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # -istart_rgnl_T6SG=$(( 2*ISTART_RGNL_T6 - 1 )) -iend_rgnl_T6SG=$(( 2*IEND_RGNL_T6 )) -jstart_rgnl_T6SG=$(( 2*JSTART_RGNL_T6 - 1 )) -jend_rgnl_T6SG=$(( 2*JEND_RGNL_T6 )) +istart_rgnl_T6SG=$(( 2*GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G - 1 )) +iend_rgnl_T6SG=$(( 2*GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G )) +jstart_rgnl_T6SG=$(( 2*GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G - 1 )) +jend_rgnl_T6SG=$(( 2*GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G )) # #----------------------------------------------------------------------- # @@ -106,12 +106,12 @@ jend_rgnl_T6SG=$(( 2*JEND_RGNL_T6 )) # cells on the tile 6 grid -- which we denote by nhw_T6 -- we simply di- # vide NHW_T7 by the refinement ratio, i.e. # -# nhw_T6 = NHW_T7/REFINE_RATIO +# nhw_T6 = NHW_T7/GFDLgrid_REFINE_RATIO # # The corresponding halo width on the tile 6 supergrid is then given by # # nhw_T6SG = 2*nhw_T6 -# = 2*NHW_T7/REFINE_RATIO +# = 2*NHW_T7/GFDLgrid_REFINE_RATIO # # Note that nhw_T6SG must be an integer, but the expression for it de- # rived above may not yield an integer. To ensure that the halo has a @@ -119,7 +119,7 @@ jend_rgnl_T6SG=$(( 2*JEND_RGNL_T6 )) # result of the expression above for nhw_T6SG, i.e. we redefine nhw_T6SG # to be # -# nhw_T6SG = ceil(2*NHW_T7/REFINE_RATIO) +# nhw_T6SG = ceil(2*NHW_T7/GFDLgrid_REFINE_RATIO) # # where ceil(...) is the ceiling function, i.e. it rounds its floating # point argument up to the next larger integer. Since in bash division @@ -128,7 +128,7 @@ jend_rgnl_T6SG=$(( 2*JEND_RGNL_T6 )) # adding the denominator (of the argument of ceil(...) above) minus 1 to # the original numerator, i.e. by redefining nhw_T6SG to be # -# nhw_T6SG = (2*NHW_T7 + REFINE_RATIO - 1)/REFINE_RATIO +# nhw_T6SG = (2*NHW_T7 + GFDLgrid_REFINE_RATIO - 1)/GFDLgrid_REFINE_RATIO # # This trick works when dividing one positive integer by another. # @@ -145,7 +145,7 @@ jend_rgnl_T6SG=$(( 2*JEND_RGNL_T6 )) #----------------------------------------------------------------------- # NHW_T7=$(( NH4+1 )) -nhw_T6SG=$(( (2*NHW_T7 + REFINE_RATIO - 1)/REFINE_RATIO )) +nhw_T6SG=$(( (2*NHW_T7 + GFDLgrid_REFINE_RATIO - 1)/GFDLgrid_REFINE_RATIO )) # #----------------------------------------------------------------------- # @@ -231,7 +231,7 @@ tile 7 grid are: nhw_T6SG=$(( istart_rgnl_T6SG - istart_rgnl_wide_halo_T6SG )) nhw_T6=$(( nhw_T6SG/2 )) -NHW_T7=$(( nhw_T6*REFINE_RATIO )) +NHW_T7=$(( nhw_T6*GFDLgrid_REFINE_RATIO )) print_info_msg "$VERBOSE" " Values of the halo width on the tile 6 supergrid and on the tile 7 grid @@ -250,11 +250,11 @@ AFTER adjustments are: # nx_rgnl_T6SG=$(( iend_rgnl_T6SG - istart_rgnl_T6SG + 1 )) nx_rgnl_T6=$(( nx_rgnl_T6SG/2 )) -NX_T7=$(( nx_rgnl_T6*REFINE_RATIO )) +NX_T7=$(( nx_rgnl_T6*GFDLgrid_REFINE_RATIO )) ny_rgnl_T6SG=$(( jend_rgnl_T6SG - jstart_rgnl_T6SG + 1 )) ny_rgnl_T6=$(( ny_rgnl_T6SG/2 )) -NY_T7=$(( ny_rgnl_T6*REFINE_RATIO )) +NY_T7=$(( ny_rgnl_T6*GFDLgrid_REFINE_RATIO )) # # The following are set only for informational purposes. # @@ -284,10 +284,10 @@ are: The starting and ending i and j indices on the tile 6 grid used to generate this regional grid are: - ISTART_RGNL_T6 = ${ISTART_RGNL_T6} - IEND_RGNL_T6 = ${IEND_RGNL_T6} - JSTART_RGNL_T6 = ${JSTART_RGNL_T6} - JEND_RGNL_T6 = ${JEND_RGNL_T6} + GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G = ${GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G} + GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G = ${GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G} + GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G = ${GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G} + GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G = ${GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G} The corresponding starting and ending i and j indices on the tile 6 supergrid are: @@ -298,7 +298,7 @@ supergrid are: The refinement ratio (ratio of the number of cells in tile 7 that abut a single cell in tile 6) is: - REFINE_RATIO = ${REFINE_RATIO} + GFDLgrid_REFINE_RATIO = ${GFDLgrid_REFINE_RATIO} The number of cells in the two horizontal directions on the regional tile's/domain's (tile 7) grid WITHOUT A HALO are: @@ -321,11 +321,11 @@ task layout, i.e. LAYOUT_X and LAYOUT_Y): # nx_wide_halo_T6SG=$(( iend_rgnl_wide_halo_T6SG - istart_rgnl_wide_halo_T6SG + 1 )) nx_wide_halo_T6=$(( nx_wide_halo_T6SG/2 )) -nx_wide_halo_T7=$(( nx_wide_halo_T6*REFINE_RATIO )) +nx_wide_halo_T7=$(( nx_wide_halo_T6*GFDLgrid_REFINE_RATIO )) ny_wide_halo_T6SG=$(( jend_rgnl_wide_halo_T6SG - jstart_rgnl_wide_halo_T6SG + 1 )) ny_wide_halo_T6=$(( ny_wide_halo_T6SG/2 )) -ny_wide_halo_T7=$(( ny_wide_halo_T6*REFINE_RATIO )) +ny_wide_halo_T7=$(( ny_wide_halo_T6*GFDLgrid_REFINE_RATIO )) print_info_msg "$VERBOSE" " nx_wide_halo_T7 = ${NX_T7} \ diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index bfaa4152f..2aa7f970e 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -240,23 +240,23 @@ predefined domain: if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - LON_CTR_T6=-106.0 - LAT_CTR_T6=54.0 - STRETCH_FAC=0.63 + GFDLgrid_LON_T6_CTR=-106.0 + GFDLgrid_LAT_T6_CTR=54.0 + GFDLgrid_STRETCH_FAC=0.63 RES="384" - REFINE_RATIO=3 + GFDLgrid_REFINE_RATIO=3 num_margin_cells_T6_left=10 - ISTART_RGNL_T6=$(( num_margin_cells_T6_left + 1 )) + GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) num_margin_cells_T6_right=10 - IEND_RGNL_T6=$(( RES - num_margin_cells_T6_right )) + GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( RES - num_margin_cells_T6_right )) num_margin_cells_T6_bottom=10 - JSTART_RGNL_T6=$(( num_margin_cells_T6_bottom + 1 )) + GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) num_margin_cells_T6_top=10 - JEND_RGNL_T6=$(( RES - num_margin_cells_T6_top )) + GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( RES - num_margin_cells_T6_top )) DT_ATMOS="90" @@ -520,23 +520,23 @@ predefined domain: #dlat=0.02 - LON_CTR_T6=-97.5 - LAT_CTR_T6=38.5 - STRETCH_FAC=1.5 + GFDLgrid_LON_T6_CTR=-97.5 + GFDLgrid_LAT_T6_CTR=38.5 + GFDLgrid_STRETCH_FAC=1.5 RES="768" - REFINE_RATIO=3 + GFDLgrid_REFINE_RATIO=3 num_margin_cells_T6_left=61 - ISTART_RGNL_T6=$(( num_margin_cells_T6_left + 1 )) + GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) num_margin_cells_T6_right=67 - IEND_RGNL_T6=$(( RES - num_margin_cells_T6_right )) + GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( RES - num_margin_cells_T6_right )) num_margin_cells_T6_bottom=165 - JSTART_RGNL_T6=$(( num_margin_cells_T6_bottom + 1 )) + GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) num_margin_cells_T6_top=171 - JEND_RGNL_T6=$(( RES - num_margin_cells_T6_top )) + GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( RES - num_margin_cells_T6_top )) DT_ATMOS="18" @@ -612,23 +612,23 @@ predefined domain: #dlon=0.03 #dlat=0.03 - LON_CTR_T6=-153.0 - LAT_CTR_T6=61.0 - STRETCH_FAC=1.0 # ??? + GFDLgrid_LON_T6_CTR=-153.0 + GFDLgrid_LAT_T6_CTR=61.0 + GFDLgrid_STRETCH_FAC=1.0 # ??? RES="768" - REFINE_RATIO=3 # ??? + GFDLgrid_REFINE_RATIO=3 # ??? num_margin_cells_T6_left=61 - ISTART_RGNL_T6=$(( num_margin_cells_T6_left + 1 )) + GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) num_margin_cells_T6_right=67 - IEND_RGNL_T6=$(( RES - num_margin_cells_T6_right )) + GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( RES - num_margin_cells_T6_right )) num_margin_cells_T6_bottom=165 - JSTART_RGNL_T6=$(( num_margin_cells_T6_bottom + 1 )) + GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) num_margin_cells_T6_top=171 - JEND_RGNL_T6=$(( RES - num_margin_cells_T6_top )) + GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( RES - num_margin_cells_T6_top )) DT_ATMOS="18" From b905a83d624755e05249e986eaf277c72790f0b0 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Mon, 6 Jan 2020 21:44:22 -0700 Subject: [PATCH 153/203] Change the names of the user-specified grid parameters for a JPgrid type of grid to make it easier to distinguish them as valid only for a JPgrid type of grid. More info below. The user-specified variables are renamed as follows: LON_RGNL_CTR --> JPgrid_LON_CTR LAT_RGNL_CTR --> JPgrid_LAT_CTR NX_T7 --> JPgrid_NX NY_T7 --> JPgrid_NY NHW_T7 --> JPgrid_WIDE_HALO_WIDTH DELX --> JPgrid_DELX DELY --> JPgrid_DELY ALPHA_JPGRID_PARAM --> JPgrid_ALPHA_PARAM KAPPA_JPGRID_PARAM --> JPgrid_KAPPA_PARAM Note that: 1) NX_T7, NY_T7, and NHW_T7 were not changed everywhere, since in certain places they're used for both JPgrid and GFDLgrid types. At such places, they will have to be renamed to something else (in a future commit). 2) In addition to the above user-specified variables, the following secondary variables were renamed for clarity (given that there is no concept of tiles for a JPgrid type grid): MNS_NX_T7_PLS_WIDE_HALO --> NEG_NX_OF_DOM_WITH_WIDE_HALO MNS_NY_T7_PLS_WIDE_HALO --> NEG_NY_OF_DOM_WITH_WIDE_HALO --- scripts/exregional_make_grid.sh | 12 +- scripts/exregional_make_orog.sh | 2 +- scripts/exregional_run_post.sh | 8 +- ush/config_defaults.sh | 36 +++--- ush/generate_FV3SAR_wflow.sh | 4 +- ush/set_gridparams_JPgrid.sh | 4 +- ush/set_predef_grid_params.sh | 214 ++++++++++++++++---------------- ush/setup.sh | 26 ++-- 8 files changed, 153 insertions(+), 153 deletions(-) diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 04b633ad2..57231c23f 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -354,14 +354,14 @@ Setting parameters in file: # # Set parameters. # - set_file_param "${RGNL_GRID_NML_FP}" "plon" "${LON_RGNL_CTR}" - set_file_param "${RGNL_GRID_NML_FP}" "plat" "${LAT_RGNL_CTR}" + set_file_param "${RGNL_GRID_NML_FP}" "plon" "${JPgrid_LON_CTR}" + set_file_param "${RGNL_GRID_NML_FP}" "plat" "${JPgrid_LAT_CTR}" set_file_param "${RGNL_GRID_NML_FP}" "delx" "${DEL_ANGLE_X_SG}" set_file_param "${RGNL_GRID_NML_FP}" "dely" "${DEL_ANGLE_Y_SG}" - set_file_param "${RGNL_GRID_NML_FP}" "lx" "${MNS_NX_T7_PLS_WIDE_HALO}" - set_file_param "${RGNL_GRID_NML_FP}" "ly" "${MNS_NY_T7_PLS_WIDE_HALO}" - set_file_param "${RGNL_GRID_NML_FP}" "a" "${ALPHA_JPGRID_PARAM}" - set_file_param "${RGNL_GRID_NML_FP}" "k" "${KAPPA_JPGRID_PARAM}" + set_file_param "${RGNL_GRID_NML_FP}" "lx" "${NEG_NX_OF_DOM_WITH_WIDE_HALO}" + set_file_param "${RGNL_GRID_NML_FP}" "ly" "${NEG_NY_OF_DOM_WITH_WIDE_HALO}" + set_file_param "${RGNL_GRID_NML_FP}" "a" "${JPgrid_ALPHA_PARAM}" + set_file_param "${RGNL_GRID_NML_FP}" "k" "${JPgrid_KAPPA_PARAM}" cd_vrfy $tmpdir diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 37d618252..11fdbded8 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -333,7 +333,7 @@ Setting orography filtering parameters..." #if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # RES_eff=$( bc -l <<< "$RES*${GFDLgrid_REFINE_RATIO}" ) #elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then -# grid_size_eff=$( "($DELX + $DELY)/2" ) +# grid_size_eff=$( "(${JPgrid_DELX} + ${JPgrid_DELY})/2" ) #echo "grid_size_eff = $grid_size_eff" # RES_eff=$( bc -l <<< "2*$pi_geom*$radius_Earth/(4*$grid_size_eff)" ) #fi diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index ed027cec3..a75f45492 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -257,11 +257,11 @@ else refine_str="RR${GFDLgrid_REFINE_RATIO}" grid_name="${grid_name}_${CRES}_${stretch_str}_${refine_str}" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - nx_T7_str="NX$( printf "%s" "${NX_T7}" | sed "s|\.|p|" )" - ny_T7_str="NY$( printf "%s" "${NY_T7}" | sed "s|\.|p|" )" - alpha_JPgrid_param_str="A"$( printf "%s" "${ALPHA_JPGRID_PARAM}" | \ + nx_T7_str="NX$( printf "%s" "${JPgrid_NX}" | sed "s|\.|p|" )" + ny_T7_str="NY$( printf "%s" "${JPgrid_NY}" | sed "s|\.|p|" )" + alpha_JPgrid_param_str="A"$( printf "%s" "${JPgrid_ALPHA_PARAM}" | \ sed "s|-|mns|" | sed "s|\.|p|" ) - kappa_JPgrid_param_str="K"$( printf "%s" "${KAPPA_JPGRID_PARAM}" | \ + kappa_JPgrid_param_str="K"$( printf "%s" "${JPgrid_KAPPA_PARAM}" | \ sed "s|-|mns|" | sed "s|\.|p|" ) grid_name="${grid_name}_${nx_T7_str}_${ny_T7_str}_${alpha_JPgrid_param_str}_${kappa_JPgrid_param_str}" fi diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index f237de3c6..772107e21 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -434,37 +434,37 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # without a global parent (i.e. for GRID_GEN_METHOD set to "JPgrid"). # These are: # -# LON_RGNL_CTR: +# JPgrid_LON_CTR: # The longitude of the center of the grid (in degrees). # -# LAT_RGNL_CTR: +# JPgrid_LAT_CTR: # The latitude of the center of the grid (in degrees). # -# DELX: +# JPgrid_DELX: # The cell size in the zonal direction of the regional grid (in meters). # -# DELY: +# JPgrid_DELY: # The cell size in the meridional direction of the regional grid (in me- # ters). # -# NX_T7: +# JPgrid_NX: # The number of cells in the zonal direction on the regional grid. # -# NY_T7: +# JPgrid_NY: # The number of cells in the meridional direction on the regional grid. # -# NHW_T7: +# JPgrid_WIDE_HALO_WIDTH: # The width of the wide halo (in units of number of cells) to create # around the regional grid. A grid with a halo of this width will first # be created and stored in a grid specification file. This grid will # then be shaved down to obtain grids with 3-cell-wide and 4-cell-wide # halos. # -# ALPHA_JPGRID_PARAM: +# JPgrid_ALPHA_PARAM: # The alpha parameter used in the Jim Purser map projection/grid gene- # ration method. # -# KAPPA_JPGRID_PARAM: +# JPgrid_KAPPA_PARAM: # The kappa parameter used in the Jim Purser map projection/grid gene- # ration method. # @@ -472,15 +472,15 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - LON_RGNL_CTR=-97.5 - LAT_RGNL_CTR=35.5 - DELX="3000.0" - DELY="3000.0" - NX_T7=1000 - NY_T7=1000 - NHW_T7=6 - ALPHA_JPGRID_PARAM="0.21423" - KAPPA_JPGRID_PARAM="-0.23209" + JPgrid_LON_CTR=-97.5 + JPgrid_LAT_CTR=35.5 + JPgrid_DELX="3000.0" + JPgrid_DELY="3000.0" + JPgrid_NX=1000 + JPgrid_NY=1000 + JPgrid_WIDE_HALO_WIDTH=6 + JPgrid_ALPHA_PARAM="0.21423" + JPgrid_KAPPA_PARAM="-0.23209" fi # diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 835f356f6..bcc7e465e 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -653,8 +653,8 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then set_file_param "${FV3_NML_FP}" "target_lat" "${GFDLgrid_LAT_T6_CTR}" set_file_param "${FV3_NML_FP}" "stretch_fac" "${GFDLgrid_STRETCH_FAC}" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - set_file_param "${FV3_NML_FP}" "target_lon" "${LON_RGNL_CTR}" - set_file_param "${FV3_NML_FP}" "target_lat" "${LAT_RGNL_CTR}" + set_file_param "${FV3_NML_FP}" "target_lon" "${JPgrid_LON_CTR}" + set_file_param "${FV3_NML_FP}" "target_lat" "${JPgrid_LAT_CTR}" set_file_param "${FV3_NML_FP}" "stretch_fac" "${STRETCH_FAC}" fi set_file_param "${FV3_NML_FP}" "bc_update_interval" "${LBC_UPDATE_INTVL_HRS}" diff --git a/ush/set_gridparams_JPgrid.sh b/ush/set_gridparams_JPgrid.sh index 29eced1de..73654e507 100644 --- a/ush/set_gridparams_JPgrid.sh +++ b/ush/set_gridparams_JPgrid.sh @@ -112,10 +112,10 @@ function set_gridparams_JPgrid() { del_angle_y_sg=$( bc -l <<< "(${jpgrid_dely}/(2.0*${radius_Earth}))*${degs_per_radian}" ) del_angle_y_sg=$( printf "%0.10f\n" ${del_angle_y_sg} ) - mns_nx_pls_wide_halo=$( bc -l <<< "-(${NX_T7} + 2*${NHW_T7})" ) + mns_nx_pls_wide_halo=$( bc -l <<< "-(${JPgrid_NX} + 2*${JPgrid_WIDE_HALO_WIDTH})" ) mns_nx_pls_wide_halo=$( printf "%.0f\n" ${mns_nx_pls_wide_halo} ) - mns_ny_pls_wide_halo=$( bc -l <<< "-(${NY_T7} + 2*${NHW_T7})" ) + mns_ny_pls_wide_halo=$( bc -l <<< "-(${JPgrid_NY} + 2*${JPgrid_WIDE_HALO_WIDTH})" ) mns_ny_pls_wide_halo=$( printf "%.0f\n" ${mns_ny_pls_wide_halo} ) # #----------------------------------------------------------------------- diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index 2aa7f970e..e84a9ae80 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -92,16 +92,16 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - LON_RGNL_CTR=-62.0 - LAT_RGNL_CTR=22.0 + JPgrid_LON_CTR=-62.0 + JPgrid_LAT_CTR=22.0 - DELX="3000.0" - DELY="3000.0" + JPgrid_DELX="3000.0" + JPgrid_DELY="3000.0" - NX_T7=2880 - NY_T7=1920 + JPgrid_NX=2880 + JPgrid_NY=1920 - NHW_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 DT_ATMOS="100" @@ -113,7 +113,7 @@ predefined domain: WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="32" WRTCMP_output_grid="regional_latlon" - WRTCMP_cen_lon="${LON_RGNL_CTR}" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" WRTCMP_cen_lat="25.0" WRTCMP_lon_lwr_left="-114.5" WRTCMP_lat_lwr_left="-5.0" @@ -144,16 +144,16 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - LON_RGNL_CTR=-62.0 - LAT_RGNL_CTR=22.0 + JPgrid_LON_CTR=-62.0 + JPgrid_LAT_CTR=22.0 - DELX="13000.0" - DELY="13000.0" + JPgrid_DELX="13000.0" + JPgrid_DELY="13000.0" - NX_T7=665 - NY_T7=444 + JPgrid_NX=665 + JPgrid_NY=444 - NHW_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 DT_ATMOS="180" @@ -165,7 +165,7 @@ predefined domain: WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="32" WRTCMP_output_grid="regional_latlon" - WRTCMP_cen_lon="${LON_RGNL_CTR}" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" WRTCMP_cen_lat="25.0" WRTCMP_lon_lwr_left="-114.5" WRTCMP_lat_lwr_left="-5.0" @@ -196,16 +196,16 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - LON_RGNL_CTR=-62.0 - LAT_RGNL_CTR=22.0 + JPgrid_LON_CTR=-62.0 + JPgrid_LAT_CTR=22.0 - DELX="25000.0" - DELY="25000.0" + JPgrid_DELX="25000.0" + JPgrid_DELY="25000.0" - NX_T7=345 - NY_T7=230 + JPgrid_NX=345 + JPgrid_NY=230 - NHW_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 DT_ATMOS="300" @@ -217,7 +217,7 @@ predefined domain: WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="32" WRTCMP_output_grid="regional_latlon" - WRTCMP_cen_lon="${LON_RGNL_CTR}" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" WRTCMP_cen_lat="25.0" WRTCMP_lon_lwr_left="-114.5" WRTCMP_lat_lwr_left="-5.0" @@ -268,8 +268,8 @@ predefined domain: WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="14" WRTCMP_output_grid="rotated_latlon" - WRTCMP_cen_lon="${LON_RGNL_CTR}" - WRTCMP_cen_lat="${LAT_RGNL_CTR}" + WRTCMP_cen_lon="${GFDLgrid_LON_T6_CTR}" + WRTCMP_cen_lat="${GFDLgrid_LAT_T6_CTR}" WRTCMP_lon_lwr_left="-57.9926" WRTCMP_lat_lwr_left="-50.74344" WRTCMP_lon_upr_rght="57.99249" @@ -280,16 +280,16 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - LON_RGNL_CTR=-106.0 - LAT_RGNL_CTR=54.0 + JPgrid_LON_CTR=-106.0 + JPgrid_LAT_CTR=54.0 - DELX="13000.0" - DELY="13000.0" + JPgrid_DELX="13000.0" + JPgrid_DELY="13000.0" - NX_T7=960 - NY_T7=960 + JPgrid_NX=960 + JPgrid_NY=960 - NHW_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 DT_ATMOS="90" @@ -301,8 +301,8 @@ predefined domain: WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="16" WRTCMP_output_grid="rotated_latlon" - WRTCMP_cen_lon="${LON_RGNL_CTR}" - WRTCMP_cen_lat="${LAT_RGNL_CTR}" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="${JPgrid_LAT_CTR}" WRTCMP_lon_lwr_left="-57.9926" WRTCMP_lat_lwr_left="-50.74344" WRTCMP_lon_upr_rght="57.99249" @@ -332,16 +332,16 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - LON_RGNL_CTR=-97.5 - LAT_RGNL_CTR=38.5 + JPgrid_LON_CTR=-97.5 + JPgrid_LAT_CTR=38.5 - DELX="25000.0" - DELY="25000.0" + JPgrid_DELX="25000.0" + JPgrid_DELY="25000.0" - NX_T7=200 - NY_T7=110 + JPgrid_NX=200 + JPgrid_NY=110 - NHW_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 DT_ATMOS="300" @@ -353,16 +353,16 @@ predefined domain: WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="2" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${LON_RGNL_CTR}" - WRTCMP_cen_lat="${LAT_RGNL_CTR}" - WRTCMP_stdlat1="${LAT_RGNL_CTR}" - WRTCMP_stdlat2="${LAT_RGNL_CTR}" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="${JPgrid_LAT_CTR}" + WRTCMP_stdlat1="${JPgrid_LAT_CTR}" + WRTCMP_stdlat2="${JPgrid_LAT_CTR}" WRTCMP_nx="191" WRTCMP_ny="97" WRTCMP_lon_lwr_left="-120.72962370" WRTCMP_lat_lwr_left="25.11648583" - WRTCMP_dx="$DELX" - WRTCMP_dy="$DELY" + WRTCMP_dx="${JPgrid_DELX}" + WRTCMP_dy="${JPgrid_DELY}" fi fi @@ -386,16 +386,16 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - LON_RGNL_CTR=-97.5 - LAT_RGNL_CTR=38.5 + JPgrid_LON_CTR=-97.5 + JPgrid_LAT_CTR=38.5 - DELX="13000.0" - DELY="13000.0" + JPgrid_DELX="13000.0" + JPgrid_DELY="13000.0" - NX_T7=390 - NY_T7=210 + JPgrid_NX=390 + JPgrid_NY=210 - NHW_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 DT_ATMOS="180" @@ -407,16 +407,16 @@ predefined domain: WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="10" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${LON_RGNL_CTR}" - WRTCMP_cen_lat="${LAT_RGNL_CTR}" - WRTCMP_stdlat1="${LAT_RGNL_CTR}" - WRTCMP_stdlat2="${LAT_RGNL_CTR}" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="${JPgrid_LAT_CTR}" + WRTCMP_stdlat1="${JPgrid_LAT_CTR}" + WRTCMP_stdlat2="${JPgrid_LAT_CTR}" WRTCMP_nx="383" WRTCMP_ny="195" WRTCMP_lon_lwr_left="-121.58647982" WRTCMP_lat_lwr_left="24.36006861" - WRTCMP_dx="$DELX" - WRTCMP_dy="$DELY" + WRTCMP_dx="${JPgrid_DELX}" + WRTCMP_dy="${JPgrid_DELY}" fi fi @@ -440,16 +440,16 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - LON_RGNL_CTR=-97.5 - LAT_RGNL_CTR=38.5 + JPgrid_LON_CTR=-97.5 + JPgrid_LAT_CTR=38.5 - DELX="3000.0" - DELY="3000.0" + JPgrid_DELX="3000.0" + JPgrid_DELY="3000.0" - NX_T7=1734 - NY_T7=1008 + JPgrid_NX=1734 + JPgrid_NY=1008 - NHW_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 DT_ATMOS="40" @@ -461,16 +461,16 @@ predefined domain: WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="24" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${LON_RGNL_CTR}" - WRTCMP_cen_lat="${LAT_RGNL_CTR}" - WRTCMP_stdlat1="${LAT_RGNL_CTR}" - WRTCMP_stdlat2="${LAT_RGNL_CTR}" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="${JPgrid_LAT_CTR}" + WRTCMP_stdlat1="${JPgrid_LAT_CTR}" + WRTCMP_stdlat2="${JPgrid_LAT_CTR}" WRTCMP_nx="1738" WRTCMP_ny="974" WRTCMP_lon_lwr_left="-122.21414225" WRTCMP_lat_lwr_left="22.41403305" - WRTCMP_dx="$DELX" - WRTCMP_dy="$DELY" + WRTCMP_dx="${JPgrid_DELX}" + WRTCMP_dy="${JPgrid_DELY}" fi fi @@ -548,16 +548,16 @@ predefined domain: elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - LON_RGNL_CTR=-97.5 - LAT_RGNL_CTR=38.5 + JPgrid_LON_CTR=-97.5 + JPgrid_LAT_CTR=38.5 - DELX="3000.0" - DELY="3000.0" + JPgrid_DELX="3000.0" + JPgrid_DELY="3000.0" - NX_T7=960 - NY_T7=960 + JPgrid_NX=960 + JPgrid_NY=960 - NHW_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 fi ;; @@ -667,16 +667,16 @@ predefined domain: " elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - LON_RGNL_CTR=-163.5 #HRRR-AK is -163.5 - LAT_RGNL_CTR=62.8 #HRRR-AK is 60.8 + JPgrid_LON_CTR=-163.5 #HRRR-AK is -163.5 + JPgrid_LAT_CTR=62.8 #HRRR-AK is 60.8 - DELX="3000.0" - DELY="3000.0" + JPgrid_DELX="3000.0" + JPgrid_DELY="3000.0" - NX_T7=1230 #HRRR-AK is 1300 - NY_T7=850 #HRRR-AK is 920 + JPgrid_NX=1230 #HRRR-AK is 1300 + JPgrid_NY=850 #HRRR-AK is 920 - NHW_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 DT_ATMOS="50" @@ -688,16 +688,16 @@ predefined domain: WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="2" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${LON_RGNL_CTR}" - WRTCMP_cen_lat="${LAT_RGNL_CTR}" - WRTCMP_stdlat1="${LAT_RGNL_CTR}" - WRTCMP_stdlat2="${LAT_RGNL_CTR}" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="${JPgrid_LAT_CTR}" + WRTCMP_stdlat1="${JPgrid_LAT_CTR}" + WRTCMP_stdlat2="${JPgrid_LAT_CTR}" WRTCMP_nx="1169" WRTCMP_ny="762" WRTCMP_lon_lwr_left="172.0" WRTCMP_lat_lwr_left="49.0" - WRTCMP_dx="$DELX" - WRTCMP_dy="$DELY" + WRTCMP_dx="${JPgrid_DELX}" + WRTCMP_dy="${JPgrid_DELY}" fi fi @@ -721,16 +721,16 @@ predefined domain: " elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - LON_RGNL_CTR=-163.5 - LAT_RGNL_CTR=62.8 + JPgrid_LON_CTR=-163.5 + JPgrid_LAT_CTR=62.8 - DELX="50000.0" - DELY="50000.0" + JPgrid_DELX="50000.0" + JPgrid_DELY="50000.0" - NX_T7=74 - NY_T7=51 + JPgrid_NX=74 + JPgrid_NY=51 - NHW_T7=6 + JPgrid_WIDE_HALO_WIDTH=6 DT_ATMOS="600" @@ -742,16 +742,16 @@ predefined domain: WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="1" WRTCMP_output_grid="lambert_conformal" - WRTCMP_cen_lon="${LON_RGNL_CTR}" - WRTCMP_cen_lat="${LAT_RGNL_CTR}" - WRTCMP_stdlat1="${LAT_RGNL_CTR}" - WRTCMP_stdlat2="${LAT_RGNL_CTR}" + WRTCMP_cen_lon="${JPgrid_LON_CTR}" + WRTCMP_cen_lat="${JPgrid_LAT_CTR}" + WRTCMP_stdlat1="${JPgrid_LAT_CTR}" + WRTCMP_stdlat2="${JPgrid_LAT_CTR}" WRTCMP_nx="70" WRTCMP_ny="45" WRTCMP_lon_lwr_left="172.0" WRTCMP_lat_lwr_left="49.0" - WRTCMP_dx="$DELX" - WRTCMP_dy="$DELY" + WRTCMP_dx="${JPgrid_DELX}" + WRTCMP_dy="${JPgrid_DELY}" fi fi diff --git a/ush/setup.sh b/ush/setup.sh index 9612226e9..3b9603a03 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1414,15 +1414,15 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then set_gridparams_JPgrid \ - jpgrid_lon_ctr="${LON_RGNL_CTR}" \ - jpgrid_lat_ctr="${LAT_RGNL_CTR}" \ - jpgrid_nx="${NX_T7}" \ - jpgrid_ny="${NY_T7}" \ - jpgrid_nhw="${NHW_T7}" \ - jpgrid_delx="${DELX}" \ - jpgrid_dely="${DELY}" \ - jpgrid_alpha="${ALPHA_JPGRID_PARAM}" \ - jpgrid_kappa="${KAPPA_JPGRID_PARAM}" \ + jpgrid_lon_ctr="${JPgrid_LON_CTR}" \ + jpgrid_lat_ctr="${JPgrid_LAT_CTR}" \ + jpgrid_nx="${JPgrid_NX}" \ + jpgrid_ny="${JPgrid_NY}" \ + jpgrid_nhw="${JPgrid_WIDE_HALO_WIDTH}" \ + jpgrid_delx="${JPgrid_DELX}" \ + jpgrid_dely="${JPgrid_DELY}" \ + jpgrid_alpha="${JPgrid_ALPHA_PARAM}" \ + jpgrid_kappa="${JPgrid_KAPPA_PARAM}" \ output_varname_lon_ctr="LON_CTR" \ output_varname_lat_ctr="LAT_CTR" \ output_varname_nx="NX" \ @@ -1430,8 +1430,8 @@ elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then output_varname_nhw="NHW" \ output_varname_del_angle_x_sg="DEL_ANGLE_X_SG" \ output_varname_del_angle_y_sg="DEL_ANGLE_Y_SG" \ - output_varname_mns_nx_pls_wide_halo="MNS_NX_T7_PLS_WIDE_HALO" \ - output_varname_mns_ny_pls_wide_halo="MNS_NY_T7_PLS_WIDE_HALO" + output_varname_mns_nx_pls_wide_halo="NEG_NX_OF_DOM_WITH_WIDE_HALO" \ + output_varname_mns_ny_pls_wide_halo="NEG_NY_OF_DOM_WITH_WIDE_HALO" fi # @@ -2099,8 +2099,8 @@ elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then # DEL_ANGLE_X_SG="${DEL_ANGLE_X_SG}" DEL_ANGLE_Y_SG="${DEL_ANGLE_Y_SG}" -MNS_NX_T7_PLS_WIDE_HALO="${MNS_NX_T7_PLS_WIDE_HALO}" -MNS_NY_T7_PLS_WIDE_HALO="${MNS_NY_T7_PLS_WIDE_HALO}" +NEG_NX_OF_DOM_WITH_WIDE_HALO="${NEG_NX_OF_DOM_WITH_WIDE_HALO}" +NEG_NY_OF_DOM_WITH_WIDE_HALO="${NEG_NY_OF_DOM_WITH_WIDE_HALO}" # # The following variables must be set in order to be able to use the # same scripting machinary for the case of GRID_GEN_METHOD set to "JP- From 6d1de154daa3b5c73c2ed1ecbea9f5cce58336f7 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 7 Jan 2020 01:44:37 -0700 Subject: [PATCH 154/203] Improvements to the way JPgrid parameters are calculated: (1) Cleanup function set_gridparams_JPgrid in set_gridparams_JPgrid.sh (details below); (2) In ush/setup.sh, make the corresponding necessary changes in the call to this function; (3) In ush/setup.sh, remove code that sets STRETCH_FAC for a JPgrid (that is now done in the set_gridparams_JPgrid function; see below). Changes to set_gridparams_JPgrid.sh: ----------------------------------- * Remove "jpgrid_" from the start of names of local variables since we already know (from its name) that this function deals with JPgrid-related parameters. * For clarity, rename the input argument "nhw" to "halo_width". * For clarity, rename the local variables mns_nx_pls_wide_halo and mns_ny_pls_wide_halo to neg_nx_of_dom_with_wide_halo and neg_ny_of_dom_with_wide_halo. * Use the function arguments nx, ny, and halo_width (which are local variables) instead of the global variables JPgrid_NX, JPgrid_NY, and JPgrid_wide_halo_width, respectively (since that is the point of passing these variables in as arguments). * Make the array valid_args a local variable (ideally, that's what it should be everywhere it is used in functions). * Create and set an output variable name for a "stretch factor". A JPgrid doesn't have a stretch factor, but a value for a stretch factor needs to be set because it is needed in the orography generation step. --- ush/set_gridparams_JPgrid.sh | 85 ++++++++++++++++++++++++------------ ush/setup.sh | 51 ++++++---------------- 2 files changed, 69 insertions(+), 67 deletions(-) diff --git a/ush/set_gridparams_JPgrid.sh b/ush/set_gridparams_JPgrid.sh index 73654e507..c8c52d425 100644 --- a/ush/set_gridparams_JPgrid.sh +++ b/ush/set_gridparams_JPgrid.sh @@ -47,25 +47,26 @@ function set_gridparams_JPgrid() { # #----------------------------------------------------------------------- # - valid_args=( \ -"jpgrid_lon_ctr" \ -"jpgrid_lat_ctr" \ -"jpgrid_nx" \ -"jpgrid_ny" \ -"jpgrid_nhw" \ -"jpgrid_delx" \ -"jpgrid_dely" \ -"jpgrid_alpha" \ -"jpgrid_kappa" \ + local valid_args=( \ +"lon_ctr" \ +"lat_ctr" \ +"nx" \ +"ny" \ +"halo_width" \ +"delx" \ +"dely" \ +"alpha" \ +"kappa" \ "output_varname_lon_ctr" \ "output_varname_lat_ctr" \ "output_varname_nx" \ "output_varname_ny" \ -"output_varname_nhw" \ +"output_varname_halo_width" \ +"output_varname_stretch_factor" \ "output_varname_del_angle_x_sg" \ "output_varname_del_angle_y_sg" \ -"output_varname_mns_nx_pls_wide_halo" \ -"output_varname_mns_ny_pls_wide_halo" \ +"output_varname_neg_nx_of_dom_with_wide_halo" \ +"output_varname_neg_ny_of_dom_with_wide_halo" \ ) process_args valid_args "$@" # @@ -94,10 +95,35 @@ function set_gridparams_JPgrid() { # #----------------------------------------------------------------------- # - local del_angle_x_sg \ + local stretch_factor \ + del_angle_x_sg \ del_angle_y_sg \ - mns_nx_pls_wide_halo \ - mns_ny_pls_wide_halo + neg_nx_of_dom_with_wide_halo \ + neg_ny_of_dom_with_wide_halo +# +#----------------------------------------------------------------------- +# +# For a JPgrid-type grid, the orography filtering is performed by pass- +# ing to the orography filtering the parameters for an "equivalent" glo- +# bal uniform cubed-sphere grid. These are the parameters that a global +# uniform cubed-sphere grid needs to have in order to have a nominal +# grid cell size equal to that of the (average) cell size on the region- +# al grid. These globally-equivalent parameters include a resolution +# (in units of number of cells in each of the two horizontal directions) +# and a stretch factor. The equivalent resolution is calculated in the +# script that generates the grid, and the stretch factor needs to be set +# to 1 because we are considering an equivalent globally UNIFORM grid. +# However, it turns out that with a non-symmetric regional grid (one in +# which nx is not equal to ny), setting stretch_factor to 1 fails be- +# cause the orography filtering program is designed for a global cubed- +# sphere grid and thus assumes that nx and ny for a given tile are equal +# when stretch_factor is exactly equal to 1. <-- Why is this? Seems like symmetry btwn x and y should still hold when the stretch factor is not equal to 1. +# It turns out that the program will work if we set stretch_factor to a +# value that is not exactly 1. This is what we do below. +# +#----------------------------------------------------------------------- +# + stretch_factor="0.999" # Check whether the orography program has been fixed so that we can set this to 1... # #----------------------------------------------------------------------- # @@ -106,17 +132,17 @@ function set_gridparams_JPgrid() { # #----------------------------------------------------------------------- # - del_angle_x_sg=$( bc -l <<< "(${jpgrid_delx}/(2.0*${radius_Earth}))*${degs_per_radian}" ) + del_angle_x_sg=$( bc -l <<< "(${delx}/(2.0*${radius_Earth}))*${degs_per_radian}" ) del_angle_x_sg=$( printf "%0.10f\n" ${del_angle_x_sg} ) - del_angle_y_sg=$( bc -l <<< "(${jpgrid_dely}/(2.0*${radius_Earth}))*${degs_per_radian}" ) + del_angle_y_sg=$( bc -l <<< "(${dely}/(2.0*${radius_Earth}))*${degs_per_radian}" ) del_angle_y_sg=$( printf "%0.10f\n" ${del_angle_y_sg} ) - mns_nx_pls_wide_halo=$( bc -l <<< "-(${JPgrid_NX} + 2*${JPgrid_WIDE_HALO_WIDTH})" ) - mns_nx_pls_wide_halo=$( printf "%.0f\n" ${mns_nx_pls_wide_halo} ) + neg_nx_of_dom_with_wide_halo=$( bc -l <<< "-($nx + 2*${halo_width})" ) + neg_nx_of_dom_with_wide_halo=$( printf "%.0f\n" ${neg_nx_of_dom_with_wide_halo} ) - mns_ny_pls_wide_halo=$( bc -l <<< "-(${JPgrid_NY} + 2*${JPgrid_WIDE_HALO_WIDTH})" ) - mns_ny_pls_wide_halo=$( printf "%.0f\n" ${mns_ny_pls_wide_halo} ) + neg_ny_of_dom_with_wide_halo=$( bc -l <<< "-($ny + 2*${halo_width})" ) + neg_ny_of_dom_with_wide_halo=$( printf "%.0f\n" ${neg_ny_of_dom_with_wide_halo} ) # #----------------------------------------------------------------------- # @@ -124,15 +150,16 @@ function set_gridparams_JPgrid() { # #----------------------------------------------------------------------- # - eval ${output_varname_lon_ctr}="${jpgrid_lon_ctr}" - eval ${output_varname_lat_ctr}="${jpgrid_lat_ctr}" - eval ${output_varname_nx}="${jpgrid_nx}" - eval ${output_varname_ny}="${jpgrid_ny}" - eval ${output_varname_nhw}="${jpgrid_nhw}" + eval ${output_varname_lon_ctr}="${lon_ctr}" + eval ${output_varname_lat_ctr}="${lat_ctr}" + eval ${output_varname_nx}="${nx}" + eval ${output_varname_ny}="${ny}" + eval ${output_varname_halo_width}="${halo_width}" + eval ${output_varname_stretch_factor}="${stretch_factor}" eval ${output_varname_del_angle_x_sg}="${del_angle_x_sg}" eval ${output_varname_del_angle_y_sg}="${del_angle_y_sg}" - eval ${output_varname_mns_nx_pls_wide_halo}="${mns_nx_pls_wide_halo}" - eval ${output_varname_mns_ny_pls_wide_halo}="${mns_ny_pls_wide_halo}" + eval ${output_varname_neg_nx_of_dom_with_wide_halo}="${neg_nx_of_dom_with_wide_halo}" + eval ${output_varname_neg_ny_of_dom_with_wide_halo}="${neg_ny_of_dom_with_wide_halo}" # #----------------------------------------------------------------------- # diff --git a/ush/setup.sh b/ush/setup.sh index 3b9603a03..18fcc193b 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -830,32 +830,6 @@ fi # #----------------------------------------------------------------------- # -# For a grid with GRID_GEN_METHOD set to "JPgrid", the orography filter- -# is performed by passing to the orography filtering the parameters for -# an "equivalent" global uniform cubed-sphere grid. These are the para- -# meters that a global uniform cubed-sphere grid needs to have in order -# to have a nominal grid cell size equal to that of the (average) cell -# size on the regional grid. These globally-equivalent parameters in- -# clude a resolution (in units of number of cells in each of the two ho- -# rizontal directions) and a stretch factor. The equivalent resolution -# is calculated in the script that generates the grid and orography, and -# the stretch factor needs to be set to 1 because we are considering an -# equivalent globally UNIFORM grid. However, it turns out that with a -# non-symmetric regional grid (one in which nx is not equal to ny), set- -# ting STRETCH_FAC to 1 fails because the orography filtering program is -# designed for a global cubed-sphere grid and thus assumes that nx and -# ny for a given tile are equal when STRETCH_FAC is exactly equal to 1. <-- Why is this? Seems like symmetry btwn x and y should still hold when STRETCH_FAC is not equal to 1. -# It turns out that the program will work if we set STRETCH_FAC that is -# not exactly 1. This is what we do below. -# -#----------------------------------------------------------------------- -# -if [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - STRETCH_FAC="0.999" -fi -# -#----------------------------------------------------------------------- -# # If the base directory (EXPT_BASEDIR) in which the experiment subdirec- # tory (EXPT_SUBDIR) will be located is not set or is set to an empty # string, set it to a default location that is at the same level as the @@ -1414,24 +1388,25 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then set_gridparams_JPgrid \ - jpgrid_lon_ctr="${JPgrid_LON_CTR}" \ - jpgrid_lat_ctr="${JPgrid_LAT_CTR}" \ - jpgrid_nx="${JPgrid_NX}" \ - jpgrid_ny="${JPgrid_NY}" \ - jpgrid_nhw="${JPgrid_WIDE_HALO_WIDTH}" \ - jpgrid_delx="${JPgrid_DELX}" \ - jpgrid_dely="${JPgrid_DELY}" \ - jpgrid_alpha="${JPgrid_ALPHA_PARAM}" \ - jpgrid_kappa="${JPgrid_KAPPA_PARAM}" \ + lon_ctr="${JPgrid_LON_CTR}" \ + lat_ctr="${JPgrid_LAT_CTR}" \ + nx="${JPgrid_NX}" \ + ny="${JPgrid_NY}" \ + halo_width="${JPgrid_WIDE_HALO_WIDTH}" \ + delx="${JPgrid_DELX}" \ + dely="${JPgrid_DELY}" \ + alpha="${JPgrid_ALPHA_PARAM}" \ + kappa="${JPgrid_KAPPA_PARAM}" \ output_varname_lon_ctr="LON_CTR" \ output_varname_lat_ctr="LAT_CTR" \ output_varname_nx="NX" \ output_varname_ny="NY" \ - output_varname_nhw="NHW" \ + output_varname_halo_width="NHW" \ + output_varname_stretch_factor="STRETCH_FAC" \ output_varname_del_angle_x_sg="DEL_ANGLE_X_SG" \ output_varname_del_angle_y_sg="DEL_ANGLE_Y_SG" \ - output_varname_mns_nx_pls_wide_halo="NEG_NX_OF_DOM_WITH_WIDE_HALO" \ - output_varname_mns_ny_pls_wide_halo="NEG_NY_OF_DOM_WITH_WIDE_HALO" + output_varname_neg_nx_of_dom_with_wide_halo="NEG_NX_OF_DOM_WITH_WIDE_HALO" \ + output_varname_neg_ny_of_dom_with_wide_halo="NEG_NY_OF_DOM_WITH_WIDE_HALO" fi # From 63dc6045bbca1badd8dd1cd849826d63ae97fb96 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 7 Jan 2020 01:54:48 -0700 Subject: [PATCH 155/203] Bug fixes in variable names. --- ush/set_predef_grid_params.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index e84a9ae80..8f0b81f09 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -542,7 +542,7 @@ predefined domain: LAYOUT_X="16" LAYOUT_Y="72" - write_tasks_per_group="72" + WRTCMP_write_tasks_per_group="72" BLOCKSIZE=32 @@ -634,8 +634,8 @@ predefined domain: LAYOUT_X="16" LAYOUT_Y="48" - write_groups="2" - write_tasks_per_group="24" + WRTCMP_write_groups="2" + WRTCMP_write_tasks_per_group="24" BLOCKSIZE=32 elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then From 35515b119f3805cd08fb74467f65e1a5968f972d Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 8 Jan 2020 05:17:16 -0700 Subject: [PATCH 156/203] Clean up the function set_gridparams_GFDLgrid(.sh) by making it more like set_gridparams_JPgrid(.sh) (details below). Also, make the necessary changes to setup.sh to call the function in a modified way. Improvements to function set_gridparams_GFDLgrid(.sh): *) Do not call the function by simply sourcing the file [which in the original version is done by calling the function at the end of the file (right after its definition)]. Instead, call the function (with arguments) in ush/setup.sh. *) Define arguments to the function (by defining the array valid_args) and process them using the process_args function. Pass in values through this arguments list instead of using global workflow variables directly. *) Declare local variables (many with new, clearer names; see below). *) Require that tile 7 be centered with respect to tile 6 - check for this. *) Use the eval utility to set the function's output arguments. *) Fix indentation. *) Change variable names as follows: istart_rgnl_T6SG --> istart_of_t7_on_t6sg iend_rgnl_T6SG --> iend_of_t7_on_t6sg jstart_rgnl_T6SG --> jstart_of_t7_on_t6sg jend_rgnl_T6SG --> jend_of_t7_on_t6sg istart_rgnl_wide_halo_T6SG --> istart_of_t7_with_halo_on_t6sg iend_rgnl_wide_halo_T6SG --> iend_of_t7_with_halo_on_t6sg jstart_rgnl_wide_halo_T6SG --> jstart_of_t7_with_halo_on_t6sg jend_rgnl_wide_halo_T6SG --> jend_of_t7_with_halo_on_t6sg nhw_T6SG --> halo_width_on_t6sg nhw_T6 --> halo_width_on_t6g NHW_T7 --> halo_width_on_t7g nx_rgnl_T6SG --> nx_of_t7_on_t6sg nx_rgnl_T6 --> nx_of_t7_on_t6g ny_rgnl_T6SG --> ny_of_t7_on_t6sg ny_rgnl_T6 --> ny_of_t7_on_t6g prime_factors_nx_T7 --> prime_factors_nx_of_t7_on_t7g prime_factors_ny_T7 --> prime_factors_ny_of_t7_on_t7g nx_T6SG --> nx_of_t6_on_t6sg ny_T6SG --> ny_of_t6_on_t6sg nx_T6 --> nx_of_t6_on_t6g ny_T6 --> ny_of_t6_on_t6g NX_T7 --> nx_of_t7_on_t7g NY_T7 --> ny_of_t7_on_t7g nx_wide_halo_T6SG --> nx_of_t7_with_halo_on_t6sg nx_wide_halo_T6 --> nx_of_t7_with_halo_on_t6g nx_wide_halo_T7 --> nx_of_t7_with_halo_on_t7g ny_wide_halo_T6SG --> ny_of_t7_with_halo_on_t6sg ny_wide_halo_T6 --> ny_of_t7_with_halo_on_t6g ny_wide_halo_T7 --> ny_of_t7_with_halo_on_t7g *) Instead of using the following global workflow variables (on the left-hand side in the list below) directly, pass them in as function arguments (on the right-hand side in the list below): GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G --> istart_of_t7_on_t6g GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G --> iend_of_t7_on_t6g GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G --> jstart_of_t7_on_t6g GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G --> jend_of_t7_on_t6g GFDLgrid_REFINE_RATIO --> refine_ratio_t6g_to_t7g --- ush/set_gridparams_GFDLgrid.sh | 455 ++++++++++++++++++++++----------- ush/setup.sh | 23 +- 2 files changed, 330 insertions(+), 148 deletions(-) diff --git a/ush/set_gridparams_GFDLgrid.sh b/ush/set_gridparams_GFDLgrid.sh index dea252041..ad453d030 100644 --- a/ush/set_gridparams_GFDLgrid.sh +++ b/ush/set_gridparams_GFDLgrid.sh @@ -11,15 +11,24 @@ function set_gridparams_GFDLgrid() { # #----------------------------------------------------------------------- # +# Save current shell options (in a global array). Then set new options +# for this script/function. +# +#----------------------------------------------------------------------- +# + { save_shell_opts; set -u +x; } > /dev/null 2>&1 +# +#----------------------------------------------------------------------- +# # Get the full path to the file in which this script/function is located # (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in # which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # -local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) -local scrfunc_fn=$( basename "${scrfunc_fp}" ) -local scrfunc_dir=$( dirname "${scrfunc_fp}" ) + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # @@ -27,7 +36,151 @@ local scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -local func_name="${FUNCNAME[0]}" + local func_name="${FUNCNAME[0]}" +# +#----------------------------------------------------------------------- +# +# Specify the set of valid argument names for this script/function. +# Then process the arguments provided to this script/function (which +# should consist of a set of name-value pairs of the form arg1="value1", +# etc). +# +#----------------------------------------------------------------------- +# + local valid_args=( \ +"lon_of_t6_ctr" \ +"lat_of_t6_ctr" \ +"res_of_t6g" \ +"stretch_factor" \ +"refine_ratio_t6g_to_t7g" \ +"istart_of_t7_on_t6g" \ +"iend_of_t7_on_t6g" \ +"jstart_of_t7_on_t6g" \ +"jend_of_t7_on_t6g" \ +"output_varname_lon_of_t7_ctr" \ +"output_varname_lat_of_t7_ctr" \ +"output_varname_nx_of_t7_on_t7g" \ +"output_varname_ny_of_t7_on_t7g" \ +"output_varname_halo_width_on_t7g" \ +"output_varname_stretch_factor" \ +"output_varname_istart_of_t7_with_halo_on_t6sg" \ +"output_varname_iend_of_t7_with_halo_on_t6sg" \ +"output_varname_jstart_of_t7_with_halo_on_t6sg" \ +"output_varname_jend_of_t7_with_halo_on_t6sg" \ + ) + process_args valid_args "$@" +# +#----------------------------------------------------------------------- +# +# Declare local variables. +# +#----------------------------------------------------------------------- +# + local nx_of_t6_on_t6g \ + ny_of_t6_on_t6g \ + num_left_margin_cells_on_t6g \ + num_right_margin_cells_on_t6g \ + num_bot_margin_cells_on_t6g \ + num_top_margin_cells_on_t6g \ + lon_of_t7_ctr \ + lat_of_t7_ctr \ + istart_of_t7_on_t6sg \ + iend_of_t7_on_t6sg \ + jstart_of_t7_on_t6sg \ + jend_of_t7_on_t6sg \ + halo_width_on_t7g \ + halo_width_on_t6sg \ + istart_of_t7_with_halo_on_t6sg \ + iend_of_t7_with_halo_on_t6sg \ + jstart_of_t7_with_halo_on_t6sg \ + jend_of_t7_with_halo_on_t6sg \ + halo_width_on_t6sg \ + halo_width_on_t6g \ + halo_width_on_t7g \ + nx_of_t7_on_t6sg \ + nx_of_t7_on_t6g \ + nx_of_t7_on_t7g \ + ny_of_t7_on_t6sg \ + ny_of_t7_on_t6g \ + ny_of_t7_on_t7g \ + nx_of_t6_on_t6sg \ + ny_of_t6_on_t6sg \ + prime_factors_nx_of_t7_on_t7g \ + prime_factors_ny_of_t7_on_t7g \ + nx_of_t7_with_halo_on_t6sg \ + nx_of_t7_with_halo_on_t6g \ + nx_of_t7_with_halo_on_t7g \ + ny_of_t7_with_halo_on_t6sg \ + ny_of_t7_with_halo_on_t6g \ + ny_of_t7_with_halo_on_t7g +# +#----------------------------------------------------------------------- +# +# To simplify the grid setup, we require that tile 7 be centered on tile +# 6. Note that this is not really a restriction because tile 6 can al- +# ways be moved so that it is centered on tile 7 [the location of tile 6 +# doesn't really matter because for a regional setup, the forecast model +# will only run on tile 7 (not on tiles 1-6)]. +# +# We now check that tile 7 is centered on tile 6 by checking (1) that +# the number of cells (on tile 6) between the left boundaries of these +# two tiles is equal to that between their right boundaries and (2) that +# the number of cells (on tile 6) between the bottom boundaries of these +# two tiles is equal to that between their top boundaries. If not, we +# print out an error message and exit. If so, we set the longitude and +# latitude of the center of tile 7 to those of tile 6 and continue. +# +#----------------------------------------------------------------------- +# + nx_of_t6_on_t6g=${res_of_t6g} + ny_of_t6_on_t6g=${res_of_t6g} + + num_left_margin_cells_on_t6g=$(( istart_of_t7_on_t6g - 1 )) + num_right_margin_cells_on_t6g=$(( nx_of_t6_on_t6g - iend_of_t7_on_t6g )) + + if [ ${num_left_margin_cells_on_t6g} -ne ${num_right_margin_cells_on_t6g} ]; then + print_err_msg_exit "\ +In order for tile 7 to be centered in the x direction on tile 6, the x- +direction tile 6 cell indices at which tile 7 starts and ends (given by +istart_of_t7_on_t6g and iend_of_t7_on_t6g, respectively) must be set +such that the number of tile 6 cells in the margin between the left +boundaries of tiles 6 and 7 (given by num_left_margin_cells_on_t6g) is +equal to that in the margin between their right boundaries (given by +num_right_margin_cells_on_t6g): + istart_of_t7_on_t6g = ${istart_of_t7_on_t6g} + iend_of_t7_on_t6g = ${iend_of_t7_on_t6g} + num_left_margin_cells_on_t6g = ${num_left_margin_cells_on_t6g} + num_right_margin_cells_on_t6g = ${num_right_margin_cells_on_t6g} +Note that the total number of cells in the x-direction on tile 6 is gi- +ven by: + nx_of_t6_on_t6g = ${nx_of_t6_on_t6g} +Please reset istart_of_t7_on_t6g and iend_of_t7_on_t6g and rerun." + fi + + num_bot_margin_cells_on_t6g=$(( jstart_of_t7_on_t6g - 1 )) + num_top_margin_cells_on_t6g=$(( ny_of_t6_on_t6g - jend_of_t7_on_t6g )) + + if [ ${num_bot_margin_cells_on_t6g} -ne ${num_top_margin_cells_on_t6g} ]; then + print_err_msg_exit "\ +In order for tile 7 to be centered in the y direction on tile 6, the y- +direction tile 6 cell indices at which tile 7 starts and ends (given by +jstart_of_t7_on_t6g and jend_of_t7_on_t6g, respectively) must be set +such that the number of tile 6 cells in the margin between the left +boundaries of tiles 6 and 7 (given by num_left_margin_cells_on_t6g) is +equal to that in the margin between their right boundaries (given by +num_right_margin_cells_on_t6g): + jstart_of_t7_on_t6g = ${jstart_of_t7_on_t6g} + jend_of_t7_on_t6g = ${jend_of_t7_on_t6g} + num_bot_margin_cells_on_t6g = ${num_bot_margin_cells_on_t6g} + num_top_margin_cells_on_t6g = ${num_top_margin_cells_on_t6g} +Note that the total number of cells in the y-direction on tile 6 is gi- +ven by: + ny_of_t6_on_t6g = ${ny_of_t6_on_t6g} +Please reset jstart_of_t7_on_t6g and jend_of_t7_on_t6g and rerun." + fi + + lon_of_t7_ctr="${lon_of_t6_ctr}" + lat_of_t7_ctr="${lat_of_t6_ctr}" # #----------------------------------------------------------------------- # @@ -40,28 +193,28 @@ local func_name="${FUNCNAME[0]}" # the number of cells in each direction on that tile's grid. We will # denote these index limits by # -# istart_rgnl_T6SG -# iend_rgnl_T6SG -# jstart_rgnl_T6SG -# jend_rgnl_T6SG +# istart_of_t7_on_t6sg +# iend_of_t7_on_t6sg +# jstart_of_t7_on_t6sg +# jend_of_t7_on_t6sg # # The "_T6SG" suffix in these names is used to indicate that the indices # are on the supergrid of tile 6. Recall, however, that we have as in- # puts the index limits of the regional grid on the tile 6 grid, not its # supergrid. These are given by # -# GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G -# GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G -# GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G -# GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G +# istart_of_t7_on_t6g +# iend_of_t7_on_t6g +# jstart_of_t7_on_t6g +# jend_of_t7_on_t6g # # We can obtain the former from the latter by recalling that the super- # grid has twice the resolution of the original grid. Thus, # -# istart_rgnl_T6SG = 2*GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G - 1 -# iend_rgnl_T6SG = 2*GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G -# jstart_rgnl_T6SG = 2*GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G - 1 -# jend_rgnl_T6SG = 2*GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G +# istart_of_t7_on_t6sg = 2*istart_of_t7_on_t6g - 1 +# iend_of_t7_on_t6sg = 2*iend_of_t7_on_t6g +# jstart_of_t7_on_t6sg = 2*jstart_of_t7_on_t6g - 1 +# jend_of_t7_on_t6sg = 2*jend_of_t7_on_t6g # # These are obtained assuming that grid cells on tile 6 must either be # completely within the regional domain or completely outside of it, @@ -73,10 +226,10 @@ local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # -istart_rgnl_T6SG=$(( 2*GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G - 1 )) -iend_rgnl_T6SG=$(( 2*GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G )) -jstart_rgnl_T6SG=$(( 2*GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G - 1 )) -jend_rgnl_T6SG=$(( 2*GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G )) + istart_of_t7_on_t6sg=$(( 2*istart_of_t7_on_t6g - 1 )) + iend_of_t7_on_t6sg=$(( 2*iend_of_t7_on_t6g )) + jstart_of_t7_on_t6sg=$(( 2*jstart_of_t7_on_t6g - 1 )) + jend_of_t7_on_t6sg=$(( 2*jend_of_t7_on_t6g )) # #----------------------------------------------------------------------- # @@ -86,10 +239,10 @@ jend_rgnl_T6SG=$(( 2*GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G )) # we must pass to make_hgrid the index limits (on the tile 6 supergrid) # of the regional grid including a halo. We will let the variables # -# istart_rgnl_wide_halo_T6SG -# iend_rgnl_wide_halo_T6SG -# jstart_rgnl_wide_halo_T6SG -# jend_rgnl_wide_halo_T6SG +# istart_of_t7_with_halo_on_t6sg +# iend_of_t7_with_halo_on_t6sg +# jstart_of_t7_with_halo_on_t6sg +# jend_of_t7_with_halo_on_t6sg # # denote these limits. The reason we include "_wide_halo" in these va- # riable names is that the halo of the grid that we will first generate @@ -99,44 +252,44 @@ jend_rgnl_T6SG=$(( 2*GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G )) # the model needs later on by "shaving" layers of cells from this wide- # halo grid. Next, we describe how to calculate the above indices. # -# Let NHW_T7 denote the width of the "wide" halo in units of number of +# Let halo_width_on_t7g denote the width of the "wide" halo in units of number of # grid cells on the regional grid (i.e. tile 7) that we'd like to have # along all four edges of the regional domain (left, right, bottom, and # top). To obtain the corresponding halo width in units of number of -# cells on the tile 6 grid -- which we denote by nhw_T6 -- we simply di- -# vide NHW_T7 by the refinement ratio, i.e. +# cells on the tile 6 grid -- which we denote by halo_width_on_t6g -- we simply di- +# vide halo_width_on_t7g by the refinement ratio, i.e. # -# nhw_T6 = NHW_T7/GFDLgrid_REFINE_RATIO +# halo_width_on_t6g = halo_width_on_t7g/refine_ratio_t6g_to_t7g # # The corresponding halo width on the tile 6 supergrid is then given by # -# nhw_T6SG = 2*nhw_T6 -# = 2*NHW_T7/GFDLgrid_REFINE_RATIO +# halo_width_on_t6sg = 2*halo_width_on_t6g +# = 2*halo_width_on_t7g/refine_ratio_t6g_to_t7g # -# Note that nhw_T6SG must be an integer, but the expression for it de- +# Note that halo_width_on_t6sg must be an integer, but the expression for it de- # rived above may not yield an integer. To ensure that the halo has a -# width of at least NHW_T7 cells on the regional grid, we round up the -# result of the expression above for nhw_T6SG, i.e. we redefine nhw_T6SG +# width of at least halo_width_on_t7g cells on the regional grid, we round up the +# result of the expression above for halo_width_on_t6sg, i.e. we redefine halo_width_on_t6sg # to be # -# nhw_T6SG = ceil(2*NHW_T7/GFDLgrid_REFINE_RATIO) +# halo_width_on_t6sg = ceil(2*halo_width_on_t7g/refine_ratio_t6g_to_t7g) # # where ceil(...) is the ceiling function, i.e. it rounds its floating # point argument up to the next larger integer. Since in bash division # of two integers returns a truncated integer and since bash has no # built-in ceil(...) function, we perform the rounding-up operation by # adding the denominator (of the argument of ceil(...) above) minus 1 to -# the original numerator, i.e. by redefining nhw_T6SG to be +# the original numerator, i.e. by redefining halo_width_on_t6sg to be # -# nhw_T6SG = (2*NHW_T7 + GFDLgrid_REFINE_RATIO - 1)/GFDLgrid_REFINE_RATIO +# halo_width_on_t6sg = (2*halo_width_on_t7g + refine_ratio_t6g_to_t7g - 1)/refine_ratio_t6g_to_t7g # # This trick works when dividing one positive integer by another. # -# In order to calculate nhw_T6G using the above expression, we must -# first specify NHW_T7. Next, we specify an initial value for it by +# In order to calculate halo_width_on_t6g using the above expression, we must +# first specify halo_width_on_t7g. Next, we specify an initial value for it by # setting it to one more than the largest-width halo that the model ac- -# tually needs, which is NH4. We then calculate nhw_T6SG using the -# above expression. Note that these values of NHW_T7 and nhw_T6SG will +# tually needs, which is NH4. We then calculate halo_width_on_t6sg using the +# above expression. Note that these values of halo_width_on_t7g and halo_width_on_t6sg will # likely not be their final values; their final values will be calcula- # ted later below after calculating the starting and ending indices of # the regional grid with wide halo on the tile 6 supergrid and then ad- @@ -144,30 +297,30 @@ jend_rgnl_T6SG=$(( 2*GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G )) # #----------------------------------------------------------------------- # -NHW_T7=$(( NH4+1 )) -nhw_T6SG=$(( (2*NHW_T7 + GFDLgrid_REFINE_RATIO - 1)/GFDLgrid_REFINE_RATIO )) + halo_width_on_t7g=$(( NH4 + 1 )) + halo_width_on_t6sg=$(( (2*halo_width_on_t7g + refine_ratio_t6g_to_t7g - 1)/refine_ratio_t6g_to_t7g )) # #----------------------------------------------------------------------- # -# With an initial value of nhw_T6SG now available, we can obtain the +# With an initial value of halo_width_on_t6sg now available, we can obtain the # tile 6 supergrid index limits of the regional domain (including the # wide halo) from the index limits for the regional domain without a ha- -# lo by simply subtracting nhw_T6SG from the lower index limits and add- -# ing nhw_T6SG to the upper index limits, i.e. +# lo by simply subtracting halo_width_on_t6sg from the lower index limits and add- +# ing halo_width_on_t6sg to the upper index limits, i.e. # -# istart_rgnl_wide_halo_T6SG = istart_rgnl_T6SG - nhw_T6SG -# iend_rgnl_wide_halo_T6SG = iend_rgnl_T6SG + nhw_T6SG -# jstart_rgnl_wide_halo_T6SG = jstart_rgnl_T6SG - nhw_T6SG -# jend_rgnl_wide_halo_T6SG = jend_rgnl_T6SG + nhw_T6SG +# istart_of_t7_with_halo_on_t6sg = istart_of_t7_on_t6sg - halo_width_on_t6sg +# iend_of_t7_with_halo_on_t6sg = iend_of_t7_on_t6sg + halo_width_on_t6sg +# jstart_of_t7_with_halo_on_t6sg = jstart_of_t7_on_t6sg - halo_width_on_t6sg +# jend_of_t7_with_halo_on_t6sg = jend_of_t7_on_t6sg + halo_width_on_t6sg # # We calculate these next. # #----------------------------------------------------------------------- # -istart_rgnl_wide_halo_T6SG=$(( istart_rgnl_T6SG - nhw_T6SG )) -iend_rgnl_wide_halo_T6SG=$(( iend_rgnl_T6SG + nhw_T6SG )) -jstart_rgnl_wide_halo_T6SG=$(( jstart_rgnl_T6SG - nhw_T6SG )) -jend_rgnl_wide_halo_T6SG=$(( jend_rgnl_T6SG + nhw_T6SG )) + istart_of_t7_with_halo_on_t6sg=$(( istart_of_t7_on_t6sg - halo_width_on_t6sg )) + iend_of_t7_with_halo_on_t6sg=$(( iend_of_t7_on_t6sg + halo_width_on_t6sg )) + jstart_of_t7_with_halo_on_t6sg=$(( jstart_of_t7_on_t6sg - halo_width_on_t6sg )) + jend_of_t7_with_halo_on_t6sg=$(( jend_of_t7_on_t6sg + halo_width_on_t6sg )) # #----------------------------------------------------------------------- # @@ -178,26 +331,28 @@ jend_rgnl_wide_halo_T6SG=$(( jend_rgnl_T6SG + nhw_T6SG )) # starting indices on the tile 6 supergrid of the grid with wide halo # must be odd while the ending indices must be even. Thus, below, we # subtract 1 from the starting indices if they are even (which ensures -# that there will be at least NHW_T7 halo cells along the left and bot- +# that there will be at least halo_width_on_t7g halo cells along the left and bot- # tom boundaries), and we add 1 to the ending indices if they are odd -# (which ensures that there will be at least NHW_T7 halo cells along the +# (which ensures that there will be at least halo_width_on_t7g halo cells along the # right and top boundaries). # #----------------------------------------------------------------------- # -if [ $(( istart_rgnl_wide_halo_T6SG%2 )) -eq 0 ]; then - istart_rgnl_wide_halo_T6SG=$(( istart_rgnl_wide_halo_T6SG - 1 )) -fi -if [ $(( iend_rgnl_wide_halo_T6SG%2 )) -eq 1 ]; then - iend_rgnl_wide_halo_T6SG=$(( iend_rgnl_wide_halo_T6SG + 1 )) -fi + if [ $(( istart_of_t7_with_halo_on_t6sg%2 )) -eq 0 ]; then + istart_of_t7_with_halo_on_t6sg=$(( istart_of_t7_with_halo_on_t6sg - 1 )) + fi + + if [ $(( iend_of_t7_with_halo_on_t6sg%2 )) -eq 1 ]; then + iend_of_t7_with_halo_on_t6sg=$(( iend_of_t7_with_halo_on_t6sg + 1 )) + fi + + if [ $(( jstart_of_t7_with_halo_on_t6sg%2 )) -eq 0 ]; then + jstart_of_t7_with_halo_on_t6sg=$(( jstart_of_t7_with_halo_on_t6sg - 1 )) + fi -if [ $(( jstart_rgnl_wide_halo_T6SG%2 )) -eq 0 ]; then - jstart_rgnl_wide_halo_T6SG=$(( jstart_rgnl_wide_halo_T6SG - 1 )) -fi -if [ $(( jend_rgnl_wide_halo_T6SG%2 )) -eq 1 ]; then - jend_rgnl_wide_halo_T6SG=$(( jend_rgnl_wide_halo_T6SG + 1 )) -fi + if [ $(( jend_of_t7_with_halo_on_t6sg%2 )) -eq 1 ]; then + jend_of_t7_with_halo_on_t6sg=$(( jend_of_t7_with_halo_on_t6sg + 1 )) + fi # #----------------------------------------------------------------------- # @@ -206,7 +361,7 @@ fi # #----------------------------------------------------------------------- # -{ save_shell_opts; set +x; } > /dev/null 2>&1 + { save_shell_opts; set +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -223,135 +378,143 @@ fi # #----------------------------------------------------------------------- # -print_info_msg "$VERBOSE" " + print_info_msg "$VERBOSE" " Original values of the halo width on the tile 6 supergrid and on the tile 7 grid are: - nhw_T6SG = $nhw_T6SG - NHW_T7 = ${NHW_T7}" + halo_width_on_t6sg = ${halo_width_on_t6sg} + halo_width_on_t7g = ${halo_width_on_t7g}" -nhw_T6SG=$(( istart_rgnl_T6SG - istart_rgnl_wide_halo_T6SG )) -nhw_T6=$(( nhw_T6SG/2 )) -NHW_T7=$(( nhw_T6*GFDLgrid_REFINE_RATIO )) + halo_width_on_t6sg=$(( istart_of_t7_on_t6sg - istart_of_t7_with_halo_on_t6sg )) + halo_width_on_t6g=$(( halo_width_on_t6sg/2 )) + halo_width_on_t7g=$(( halo_width_on_t6g*refine_ratio_t6g_to_t7g )) -print_info_msg "$VERBOSE" " + print_info_msg "$VERBOSE" " Values of the halo width on the tile 6 supergrid and on the tile 7 grid AFTER adjustments are: - nhw_T6SG = $nhw_T6SG - NHW_T7 = ${NHW_T7}" + halo_width_on_t6sg = ${halo_width_on_t6sg} + halo_width_on_t7g = ${halo_width_on_t7g}" # #----------------------------------------------------------------------- # # Calculate the number of cells that the regional domain (without halo) # has in each of the two horizontal directions (say x and y). We denote -# these by NX_T7 and NY_T7, respectively. These will be needed in the -# "shave" steps later below. +# these by nx_of_t7_on_t7g and ny_of_t7_on_t7g, respectively. These +# will be needed in the "shave" steps in the grid generation task of the +# workflow. # #----------------------------------------------------------------------- # -nx_rgnl_T6SG=$(( iend_rgnl_T6SG - istart_rgnl_T6SG + 1 )) -nx_rgnl_T6=$(( nx_rgnl_T6SG/2 )) -NX_T7=$(( nx_rgnl_T6*GFDLgrid_REFINE_RATIO )) + nx_of_t7_on_t6sg=$(( iend_of_t7_on_t6sg - istart_of_t7_on_t6sg + 1 )) + nx_of_t7_on_t6g=$(( nx_of_t7_on_t6sg/2 )) + nx_of_t7_on_t7g=$(( nx_of_t7_on_t6g*refine_ratio_t6g_to_t7g )) -ny_rgnl_T6SG=$(( jend_rgnl_T6SG - jstart_rgnl_T6SG + 1 )) -ny_rgnl_T6=$(( ny_rgnl_T6SG/2 )) -NY_T7=$(( ny_rgnl_T6*GFDLgrid_REFINE_RATIO )) + ny_of_t7_on_t6sg=$(( jend_of_t7_on_t6sg - jstart_of_t7_on_t6sg + 1 )) + ny_of_t7_on_t6g=$(( ny_of_t7_on_t6sg/2 )) + ny_of_t7_on_t7g=$(( ny_of_t7_on_t6g*refine_ratio_t6g_to_t7g )) # # The following are set only for informational purposes. # -nx_T6=$RES -ny_T6=$RES -nx_T6SG=$(( $nx_T6*2 )) -ny_T6SG=$(( $ny_T6*2 )) + nx_of_t6_on_t6sg=$(( 2*nx_of_t6_on_t6g )) + ny_of_t6_on_t6sg=$(( 2*ny_of_t6_on_t6g )) -prime_factors_nx_T7=$( factor ${NX_T7} | sed -r -e 's/^[0-9]+: (.*)/\1/' ) -prime_factors_ny_T7=$( factor ${NY_T7} | sed -r -e 's/^[0-9]+: (.*)/\1/' ) + prime_factors_nx_of_t7_on_t7g=$( factor ${nx_of_t7_on_t7g} | sed -r -e 's/^[0-9]+: (.*)/\1/' ) + prime_factors_ny_of_t7_on_t7g=$( factor ${ny_of_t7_on_t7g} | sed -r -e 's/^[0-9]+: (.*)/\1/' ) -print_info_msg "$VERBOSE" " + print_info_msg "$VERBOSE" " The number of cells in the two horizontal directions (x and y) on the parent tile's (tile 6) grid and supergrid are: - nx_T6 = $nx_T6 - ny_T6 = $ny_T6 - nx_T6SG = $nx_T6SG - ny_T6SG = $ny_T6SG + nx_of_t6_on_t6g = ${nx_of_t6_on_t6g} + ny_of_t6_on_t6g = ${ny_of_t6_on_t6g} + nx_of_t6_on_t6sg = ${nx_of_t6_on_t6sg} + ny_of_t6_on_t6sg = ${ny_of_t6_on_t6sg} The number of cells in the two horizontal directions on the tile 6 grid -and supergrid that the regional domain (tile 7) WITHOUT A HALO encompasses -are: - nx_rgnl_T6 = $nx_rgnl_T6 - ny_rgnl_T6 = $ny_rgnl_T6 - nx_rgnl_T6SG = $nx_rgnl_T6SG - ny_rgnl_T6SG = $ny_rgnl_T6SG +and supergrid that the regional domain (tile 7) WITHOUT A HALO encompas- +ses are: + nx_of_t7_on_t6g = ${nx_of_t7_on_t6g} + ny_of_t7_on_t6g = ${ny_of_t7_on_t6g} + nx_of_t7_on_t6sg = ${nx_of_t7_on_t6sg} + ny_of_t7_on_t6sg = ${ny_of_t7_on_t6sg} -The starting and ending i and j indices on the tile 6 grid used to -generate this regional grid are: - GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G = ${GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G} - GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G = ${GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G} - GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G = ${GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G} - GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G = ${GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G} +The starting and ending i and j indices on the tile 6 grid used to gene- +rate this regional grid are: + istart_of_t7_on_t6g = ${istart_of_t7_on_t6g} + iend_of_t7_on_t6g = ${iend_of_t7_on_t6g} + jstart_of_t7_on_t6g = ${jstart_of_t7_on_t6g} + jend_of_t7_on_t6g = ${jend_of_t7_on_t6g} -The corresponding starting and ending i and j indices on the tile 6 -supergrid are: - istart_rgnl_T6SG = $istart_rgnl_T6SG - iend_rgnl_T6SG = $iend_rgnl_T6SG - jstart_rgnl_T6SG = $jstart_rgnl_T6SG - jend_rgnl_T6SG = $jend_rgnl_T6SG +The corresponding starting and ending i and j indices on the tile 6 su- +pergrid are: + istart_of_t7_on_t6sg = ${istart_of_t7_on_t6sg} + iend_of_t7_on_t6sg = ${iend_of_t7_on_t6sg} + jstart_of_t7_on_t6sg = ${jstart_of_t7_on_t6sg} + jend_of_t7_on_t6sg = ${jend_of_t7_on_t6sg} The refinement ratio (ratio of the number of cells in tile 7 that abut a single cell in tile 6) is: - GFDLgrid_REFINE_RATIO = ${GFDLgrid_REFINE_RATIO} + refine_ratio_t6g_to_t7g = ${refine_ratio_t6g_to_t7g} -The number of cells in the two horizontal directions on the regional -tile's/domain's (tile 7) grid WITHOUT A HALO are: - NX_T7 = ${NX_T7} - NY_T7 = ${NY_T7} +The number of cells in the two horizontal directions on the regional do- +main's (i.e. tile 7's) grid WITHOUT A HALO are: + nx_of_t7_on_t7g = ${nx_of_t7_on_t7g} + ny_of_t7_on_t7g = ${ny_of_t7_on_t7g} -The prime factors of NX_T7 and NY_T7 are (useful for determining an MPI -task layout, i.e. LAYOUT_X and LAYOUT_Y): - prime_factors_nx_T7: $prime_factors_nx_T7 - prime_factors_ny_T7: $prime_factors_ny_T7" +The prime factors of nx_of_t7_on_t7g and ny_of_t7_on_t7g are (useful for +determining an MPI task layout): + prime_factors_nx_of_t7_on_t7g: ${prime_factors_nx_of_t7_on_t7g} + prime_factors_ny_of_t7_on_t7g: ${prime_factors_ny_of_t7_on_t7g}" # #----------------------------------------------------------------------- # # For informational purposes, calculate the number of cells in each di- -# rection on the regional grid that includes the wide halo (of width -# NHW_T7 cells). We denote these by nx_wide_halo_T7 and ny_wide_halo_- -# T7, respectively. +# rection on the regional grid including the wide halo (of width halo_- +# width_on_t7g cells). We denote these by nx_of_t7_with_halo_on_t7g and +# ny_of_t7_with_halo_on_t7g, respectively. # #----------------------------------------------------------------------- # -nx_wide_halo_T6SG=$(( iend_rgnl_wide_halo_T6SG - istart_rgnl_wide_halo_T6SG + 1 )) -nx_wide_halo_T6=$(( nx_wide_halo_T6SG/2 )) -nx_wide_halo_T7=$(( nx_wide_halo_T6*GFDLgrid_REFINE_RATIO )) + nx_of_t7_with_halo_on_t6sg=$(( iend_of_t7_with_halo_on_t6sg - istart_of_t7_with_halo_on_t6sg + 1 )) + nx_of_t7_with_halo_on_t6g=$(( nx_of_t7_with_halo_on_t6sg/2 )) + nx_of_t7_with_halo_on_t7g=$(( nx_of_t7_with_halo_on_t6g*refine_ratio_t6g_to_t7g )) -ny_wide_halo_T6SG=$(( jend_rgnl_wide_halo_T6SG - jstart_rgnl_wide_halo_T6SG + 1 )) -ny_wide_halo_T6=$(( ny_wide_halo_T6SG/2 )) -ny_wide_halo_T7=$(( ny_wide_halo_T6*GFDLgrid_REFINE_RATIO )) + ny_of_t7_with_halo_on_t6sg=$(( jend_of_t7_with_halo_on_t6sg - jstart_of_t7_with_halo_on_t6sg + 1 )) + ny_of_t7_with_halo_on_t6g=$(( ny_of_t7_with_halo_on_t6sg/2 )) + ny_of_t7_with_halo_on_t7g=$(( ny_of_t7_with_halo_on_t6g*refine_ratio_t6g_to_t7g )) -print_info_msg "$VERBOSE" " -nx_wide_halo_T7 = ${NX_T7} \ -(istart_rgnl_wide_halo_T6SG = $istart_rgnl_wide_halo_T6SG, \ -iend_rgnl_wide_halo_T6SG = $iend_rgnl_wide_halo_T6SG)" + print_info_msg "$VERBOSE" " +nx_of_t7_with_halo_on_t7g = ${nx_of_t7_on_t7g} \ +(istart_of_t7_with_halo_on_t6sg = ${istart_of_t7_with_halo_on_t6sg}, \ +iend_of_t7_with_halo_on_t6sg = ${iend_of_t7_with_halo_on_t6sg})" -print_info_msg "$VERBOSE" " -ny_wide_halo_T7 = ${NY_T7} \ -(jstart_rgnl_wide_halo_T6SG = $jstart_rgnl_wide_halo_T6SG, \ -jend_rgnl_wide_halo_T6SG = $jend_rgnl_wide_halo_T6SG)" + print_info_msg "$VERBOSE" " +ny_of_t7_with_halo_on_t7g = ${ny_of_t7_on_t7g} \ +(jstart_of_t7_with_halo_on_t6sg = ${jstart_of_t7_with_halo_on_t6sg}, \ +jend_of_t7_with_halo_on_t6sg = ${jend_of_t7_with_halo_on_t6sg})" # #----------------------------------------------------------------------- # -# Restore the shell options before turning off xtrace. +# Set output variables. # #----------------------------------------------------------------------- # -{ restore_shell_opts; } > /dev/null 2>&1 - -} + eval ${output_varname_lon_of_t7_ctr}="${lon_of_t7_ctr}" + eval ${output_varname_lat_of_t7_ctr}="${lat_of_t7_ctr}" + eval ${output_varname_nx_of_t7_on_t7g}="${nx_of_t7_on_t7g}" + eval ${output_varname_ny_of_t7_on_t7g}="${ny_of_t7_on_t7g}" + eval ${output_varname_halo_width_on_t7g}="${halo_width_on_t7g}" + eval ${output_varname_stretch_factor}="${stretch_factor}" + eval ${output_varname_istart_of_t7_with_halo_on_t6sg}="${istart_of_t7_with_halo_on_t6sg}" + eval ${output_varname_iend_of_t7_with_halo_on_t6sg}="${iend_of_t7_with_halo_on_t6sg}" + eval ${output_varname_jstart_of_t7_with_halo_on_t6sg}="${jstart_of_t7_with_halo_on_t6sg}" + eval ${output_varname_jend_of_t7_with_halo_on_t6sg}="${jend_of_t7_with_halo_on_t6sg}" # #----------------------------------------------------------------------- # -# Call the function defined above. +# Restore the shell options before turning off xtrace. # #----------------------------------------------------------------------- # -set_gridparams_GFDLgrid + { restore_shell_opts; } > /dev/null 2>&1 + +} diff --git a/ush/setup.sh b/ush/setup.sh index 18fcc193b..493d4916e 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -51,7 +51,7 @@ cd_vrfy ${scrfunc_dir} # . ./source_util_funcs.sh -#. $USHDIR/set_gridparams_JPgrid.sh +. ./set_gridparams_GFDLgrid.sh . ./set_gridparams_JPgrid.sh # #----------------------------------------------------------------------- @@ -1377,7 +1377,26 @@ check_var_valid_value \ # if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - . $USHDIR/set_gridparams_GFDLgrid.sh + set_gridparams_GFDLgrid \ + lon_of_t6_ctr="${GFDLgrid_LON_T6_CTR}" \ + lat_of_t6_ctr="${GFDLgrid_LAT_T6_CTR}" \ + res_of_t6g="${GFDLgrid_RES}" \ + stretch_factor="${GFDLgrid_STRETCH_FAC}" \ + refine_ratio_t6g_to_t7g="${GFDLgrid_REFINE_RATIO}" \ + istart_of_t7_on_t6g="${GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G}" \ + iend_of_t7_on_t6g="${GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G}" \ + jstart_of_t7_on_t6g="${GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G}" \ + jend_of_t7_on_t6g="${GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G}" \ + output_varname_lon_of_t7_ctr="LON_CTR" \ + output_varname_lat_of_t7_ctr="LAT_CTR" \ + output_varname_nx_of_t7_on_t7g="NX" \ + output_varname_ny_of_t7_on_t7g="NY" \ + output_varname_halo_width_on_t7g="NHW" \ + output_varname_stretch_factor="STRETCH_FAC" \ + output_varname_istart_of_t7_with_halo_on_t6sg="ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG" \ + output_varname_iend_of_t7_with_halo_on_t6sg="IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG" \ + output_varname_jstart_of_t7_with_halo_on_t6sg="JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG" \ + output_varname_jend_of_t7_with_halo_on_t6sg="JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG" # #----------------------------------------------------------------------- # From 8c9c2b7f94b15fd555c78ad8bf9d83d5fb69d7c2 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 8 Jan 2020 05:19:36 -0700 Subject: [PATCH 157/203] Bug fix. --- ush/set_gridparams_GFDLgrid.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/set_gridparams_GFDLgrid.sh b/ush/set_gridparams_GFDLgrid.sh index ad453d030..1b49d3885 100644 --- a/ush/set_gridparams_GFDLgrid.sh +++ b/ush/set_gridparams_GFDLgrid.sh @@ -482,12 +482,12 @@ determining an MPI task layout): ny_of_t7_with_halo_on_t7g=$(( ny_of_t7_with_halo_on_t6g*refine_ratio_t6g_to_t7g )) print_info_msg "$VERBOSE" " -nx_of_t7_with_halo_on_t7g = ${nx_of_t7_on_t7g} \ +nx_of_t7_with_halo_on_t7g = ${nx_of_t7_with_halo_on_t7g} \ (istart_of_t7_with_halo_on_t6sg = ${istart_of_t7_with_halo_on_t6sg}, \ iend_of_t7_with_halo_on_t6sg = ${iend_of_t7_with_halo_on_t6sg})" print_info_msg "$VERBOSE" " -ny_of_t7_with_halo_on_t7g = ${ny_of_t7_on_t7g} \ +ny_of_t7_with_halo_on_t7g = ${ny_of_t7_with_halo_on_t7g} \ (jstart_of_t7_with_halo_on_t6sg = ${jstart_of_t7_with_halo_on_t6sg}, \ jend_of_t7_with_halo_on_t6sg = ${jend_of_t7_with_halo_on_t6sg})" # From b9f52640ebb4692fdb9ac9f0607c13db21060332 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 9 Jan 2020 04:06:33 -0700 Subject: [PATCH 158/203] Remove if-then-else statement around the sets of parameters for GFDLgrid and JPgrid. The if-then-else is not needed because now these two sets of parameters are completely different and distinguishable from each other (because they start with either "GFDLgrid_" or "JPgrid_"). --- ush/config_defaults.sh | 42 ++++++++++++++++++------------------------ 1 file changed, 18 insertions(+), 24 deletions(-) diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 772107e21..ab0fa4d75 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -416,17 +416,15 @@ GRID_GEN_METHOD="JPgrid" # #----------------------------------------------------------------------- # -if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - - RES="384" - GFDLgrid_LON_T6_CTR=-97.5 - GFDLgrid_LAT_T6_CTR=35.5 - GFDLgrid_STRETCH_FAC=1.5 - GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=10 - GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=374 - GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=10 - GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=374 - GFDLgrid_REFINE_RATIO=3 +RES="384" +GFDLgrid_LON_T6_CTR=-97.5 +GFDLgrid_LAT_T6_CTR=35.5 +GFDLgrid_STRETCH_FAC=1.5 +GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=10 +GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=374 +GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=10 +GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=374 +GFDLgrid_REFINE_RATIO=3 # #----------------------------------------------------------------------- # @@ -470,19 +468,15 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # #----------------------------------------------------------------------- # -elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - - JPgrid_LON_CTR=-97.5 - JPgrid_LAT_CTR=35.5 - JPgrid_DELX="3000.0" - JPgrid_DELY="3000.0" - JPgrid_NX=1000 - JPgrid_NY=1000 - JPgrid_WIDE_HALO_WIDTH=6 - JPgrid_ALPHA_PARAM="0.21423" - JPgrid_KAPPA_PARAM="-0.23209" - -fi +JPgrid_LON_CTR=-97.5 +JPgrid_LAT_CTR=35.5 +JPgrid_DELX="3000.0" +JPgrid_DELY="3000.0" +JPgrid_NX=1000 +JPgrid_NY=1000 +JPgrid_WIDE_HALO_WIDTH=6 +JPgrid_ALPHA_PARAM="0.21423" +JPgrid_KAPPA_PARAM="-0.23209" # #----------------------------------------------------------------------- # From 1ef10676fcb12337b51a61e625dc32a35d123e0a Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 9 Jan 2020 04:15:53 -0700 Subject: [PATCH 159/203] For clarity, rearrange the order of the user-specified grid parameters. --- ush/config_defaults.sh | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index ab0fa4d75..937620e94 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -338,9 +338,9 @@ GRID_GEN_METHOD="JPgrid" # the global grid tiles varies somewhat as we move across a tile. # # * Tile 6 has arbitrarily been chosen as the tile to use to orient the -# global grid on the sphere (Earth). This is done by specifying -# GFDLgrid_LON_T6_CTR and GFDLgrid_LAT_T6_CTR, which are the longitude -# and latitude (in degrees) of the center of tile 6. +# global grid on the sphere (Earth). This is done by specifying GFDL- +# grid_LON_T6_CTR and GFDLgrid_LAT_T6_CTR, which are the longitude and +# latitude (in degrees) of the center of tile 6. # # * Setting the Schmidt stretching factor GFDLgrid_STRETCH_FAC to a value greater # than 1 shrinks tile 6, while setting it to a value less than 1 (but @@ -382,21 +382,26 @@ GRID_GEN_METHOD="JPgrid" # # Definitions: # -# RES: -# Number of points in each of the two horizontal directions (x and y) -# on each tile of the global grid. Must be "48", "96", "192", "384", -# "768", "1152", or "3072" -# # GFDLgrid_LON_T6_CTR: # Longitude of the center of tile 6 (in degrees). # # GFDLgrid_LAT_T6_CTR: # Latitude of the center of tile 6 (in degrees). # +# RES: +# Number of points in each of the two horizontal directions (x and y) +# on each tile of the global grid. Must be "48", "96", "192", "384", +# "768", "1152", or "3072" +# # GFDLgrid_STRETCH_FAC: # Stretching factor used in the Schmidt transformation applied to the # cubed sphere grid. # +# GFDLgrid_REFINE_RATIO: +# Cell refinement ratio for the regional grid, i.e. the number of cells +# in either the x or y direction on the regional grid (tile 7) that abut +# one cell on its parent tile (tile 6). +# # GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G: # i-index on tile 6 at which the regional grid (tile 7) starts. # @@ -409,22 +414,17 @@ GRID_GEN_METHOD="JPgrid" # GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G: # j-index on tile 6 at which the regional grid (tile 7) ends. # -# GFDLgrid_REFINE_RATIO: -# Cell refinement ratio for the regional grid, i.e. the number of cells -# in either the x or y direction on the regional grid (tile 7) that abut -# one cell on its parent tile (tile 6). -# #----------------------------------------------------------------------- # -RES="384" GFDLgrid_LON_T6_CTR=-97.5 GFDLgrid_LAT_T6_CTR=35.5 +RES="384" GFDLgrid_STRETCH_FAC=1.5 +GFDLgrid_REFINE_RATIO=3 GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=10 GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=374 GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=10 GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=374 -GFDLgrid_REFINE_RATIO=3 # #----------------------------------------------------------------------- # From f987f87c187dabfed849672a891c8f26caaaa2a2 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 9 Jan 2020 04:40:25 -0700 Subject: [PATCH 160/203] Change NX_T7 and NY_T7 in setup.sh to NX and NY, respectively. This is because in the calls to the functions set_gridparams_GFDLgrid() and set_gridparams_JPgrid(), the output variables containing the number of points in the x and y directions on the regional grid (without considering the halo) are set to NX and NY, not NX_T7 and NY_T7. (Note that the "_T7" at the ends of these variables were dropped in certain locations but not eveverywhere in a previous commit; here, we are making this drop in more locations; eventually, NX_T7 and NY_T7 should not appear anywhere in the scripts). --- ush/setup.sh | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/ush/setup.sh b/ush/setup.sh index 493d4916e..377cb889c 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1474,21 +1474,21 @@ component if it is being used) are: # #----------------------------------------------------------------------- # -rem=$(( NX_T7%LAYOUT_X )) +rem=$(( NX%LAYOUT_X )) if [ $rem -ne 0 ]; then print_err_msg_exit "\ -The number of grid cells in the x direction (NX_T7) is not evenly divisible +The number of grid cells in the x direction (NX) is not evenly divisible by the number of MPI tasks in the x direction (LAYOUT_X): - NX_T7 = ${NX_T7} + NX = $NX LAYOUT_X = ${LAYOUT_X}" fi -rem=$(( NY_T7%LAYOUT_Y )) +rem=$(( NY%LAYOUT_Y )) if [ $rem -ne 0 ]; then print_err_msg_exit "\ -The number of grid cells in the y direction (NY_T7) is not evenly divisible +The number of grid cells in the y direction (NY) is not evenly divisible by the number of MPI tasks in the y direction (LAYOUT_Y): - NY_T7 = ${NY_T7} + NY = $NY LAYOUT_Y = ${LAYOUT_Y}" fi @@ -1504,8 +1504,8 @@ The MPI task layout is: # #----------------------------------------------------------------------- # -nx_per_task=$(( NX_T7/LAYOUT_X )) -ny_per_task=$(( NY_T7/LAYOUT_Y )) +nx_per_task=$(( NX/LAYOUT_X )) +ny_per_task=$(( NY/LAYOUT_Y )) num_cols_per_task=$(( $nx_per_task*$ny_per_task )) rem=$(( num_cols_per_task%BLOCKSIZE )) @@ -1514,8 +1514,8 @@ if [ $rem -ne 0 ]; then print_err_msg_exit "\ The number of columns assigned to a given MPI task must be divisible by BLOCKSIZE: - nx_per_task = NX_T7/LAYOUT_X = ${NX_T7}/${LAYOUT_X} = $nx_per_task - ny_per_task = NY_T7/LAYOUT_Y = ${NY_T7}/${LAYOUT_Y} = $ny_per_task + nx_per_task = NX/LAYOUT_X = $NX/${LAYOUT_X} = $nx_per_task + ny_per_task = NY/LAYOUT_Y = $NY/${LAYOUT_Y} = $ny_per_task num_cols_per_task = nx_per_task*ny_per_task = $num_cols_per_task BLOCKSIZE = $BLOCKSIZE rem = num_cols_per_task%%BLOCKSIZE = $rem @@ -1560,11 +1560,11 @@ fi #----------------------------------------------------------------------- # # If the write component is going to be used, make sure that the number -# of grid cells in the y direction (NY_T7) is divisible by the number of -# write tasks per group. This is because the NY_T7 rows of the grid +# of grid cells in the y direction (NY) is divisible by the number of +# write tasks per group. This is because the NY rows of the grid # must be distributed evenly among the write_tasks_per_group tasks in a # given write group, i.e. each task must receive the same number of -# rows. This implies that NY_T7 must be evenly divisible by write_- +# rows. This implies that NY must be evenly divisible by write_- # tasks_per_group. If it isn't, the write component will hang or fail. # We check for this below. # @@ -1572,16 +1572,16 @@ fi # if [ "$QUILTING" = "TRUE" ]; then - rem=$(( NY_T7%WRTCMP_write_tasks_per_group )) + rem=$(( NY%WRTCMP_write_tasks_per_group )) if [ $rem -ne 0 ]; then print_err_msg_exit "\ The number of grid points in the y direction on the regional grid (ny_- T7) must be evenly divisible by the number of tasks per write group (WRTCMP_write_tasks_per_group): - NY_T7 = ${NY_T7} + NY = $NY WRTCMP_write_tasks_per_group = $WRTCMP_write_tasks_per_group - NY_T7%%write_tasks_per_group = $rem" + NY%%write_tasks_per_group = $rem" fi fi @@ -2066,8 +2066,8 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then #----------------------------------------------------------------------- # NHW_T7="${NHW_T7}" -NX_T7="${NX_T7}" -NY_T7="${NY_T7}" +NX="$NX" +NY="$NY" istart_rgnl_wide_halo_T6SG="$istart_rgnl_wide_halo_T6SG" iend_rgnl_wide_halo_T6SG="$iend_rgnl_wide_halo_T6SG" jstart_rgnl_wide_halo_T6SG="$jstart_rgnl_wide_halo_T6SG" From 2db31667f3efa1ef149183cdf39b6c8987f5169b Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 9 Jan 2020 04:49:41 -0700 Subject: [PATCH 161/203] Replace NX_T7 and NY_T7 with NX and NY at all remaining locations in the scripts. --- scripts/exregional_make_grid.sh | 4 ++-- scripts/exregional_make_orog.sh | 4 ++-- ush/generate_FV3SAR_wflow.sh | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 57231c23f..32cc63918 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -458,7 +458,7 @@ halo..." nml_fn="input.shave.grid.halo${NH3}" shaved_fp="${tmpdir}/${CRES}_grid.tile${TILE_RGNL}.halo${NH3}.nc" printf "%s %s %s %s %s\n" \ - ${NX_T7} ${NY_T7} ${NH3} \"${unshaved_fp}\" \"${shaved_fp}\" \ + $NX $NY ${NH3} \"${unshaved_fp}\" \"${shaved_fp}\" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ @@ -482,7 +482,7 @@ halo..." nml_fn="input.shave.grid.halo${NH4}" shaved_fp="${tmpdir}/${CRES}_grid.tile${TILE_RGNL}.halo${NH4}.nc" printf "%s %s %s %s %s\n" \ - ${NX_T7} ${NY_T7} ${NH4} \"${unshaved_fp}\" \"${shaved_fp}\" \ + $NX $NY ${NH4} \"${unshaved_fp}\" \"${shaved_fp}\" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 11fdbded8..e5b71725f 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -505,7 +505,7 @@ ${NH0}-cell-wide halo..." nml_fn="input.shave.orog.halo${NH0}" shaved_fp="${shave_dir}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH0}.nc" printf "%s %s %s %s %s\n" \ - ${NX_T7} ${NY_T7} ${NH0} \"${unshaved_fp}\" \"${shaved_fp}\" \ + $NX $NY ${NH0} \"${unshaved_fp}\" \"${shaved_fp}\" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ @@ -529,7 +529,7 @@ ${NH4}-cell-wide halo..." nml_fn="input.shave.orog.halo${NH4}" shaved_fp="${shave_dir}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4}.nc" printf "%s %s %s %s %s\n" \ - ${NX_T7} ${NY_T7} ${NH4} \"${unshaved_fp}\" \"${shaved_fp}\" \ + $NX $NY ${NH4} \"${unshaved_fp}\" \"${shaved_fp}\" \ > ${nml_fn} $APRUN $EXECDIR/${shave_exec} < ${nml_fn} || \ diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index bcc7e465e..a79415a6e 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -625,13 +625,13 @@ print_info_msg "$VERBOSE" " Setting parameters in FV3 namelist file (FV3_NML_FP): FV3_NML_FP = \"${FV3_NML_FP}\"" # -# Set npx_T7 and npy_T7, which are just NX_T7 plus 1 and NY_T7 plus 1, +# Set npx_T7 and npy_T7, which are just NX plus 1 and NY plus 1, # respectively. These need to be set in the FV3SAR Fortran namelist # file. They represent the number of cell vertices in the x and y di- # rections on the regional grid (tile 7). # -npx_T7=$((NX_T7+1)) -npy_T7=$((NY_T7+1)) +npx_T7=$((NX+1)) +npy_T7=$((NY+1)) # # Set parameters. # From eb5d754fe0d284dbfbcc5d833972b8fb92f4490c Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 9 Jan 2020 04:51:24 -0700 Subject: [PATCH 162/203] Drop the "_T7" from npx_T7 and npy_T7 since the concept of a tile does not make sense for a JPgrid type of grid (it only makes sense for a GFDLgrid type of grid, so for simplicity we just don't specify a tile in the variable name). --- ush/generate_FV3SAR_wflow.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index a79415a6e..eca15b116 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -625,21 +625,21 @@ print_info_msg "$VERBOSE" " Setting parameters in FV3 namelist file (FV3_NML_FP): FV3_NML_FP = \"${FV3_NML_FP}\"" # -# Set npx_T7 and npy_T7, which are just NX plus 1 and NY plus 1, +# Set npx and npy, which are just NX plus 1 and NY plus 1, # respectively. These need to be set in the FV3SAR Fortran namelist # file. They represent the number of cell vertices in the x and y di- # rections on the regional grid (tile 7). # -npx_T7=$((NX+1)) -npy_T7=$((NY+1)) +npx=$((NX+1)) +npy=$((NY+1)) # # Set parameters. # set_file_param "${FV3_NML_FP}" "blocksize" "$BLOCKSIZE" set_file_param "${FV3_NML_FP}" "ccpp_suite" "\'${CCPP_PHYS_SUITE}\'" set_file_param "${FV3_NML_FP}" "layout" "${LAYOUT_X},${LAYOUT_Y}" -set_file_param "${FV3_NML_FP}" "npx" "${npx_T7}" -set_file_param "${FV3_NML_FP}" "npy" "${npy_T7}" +set_file_param "${FV3_NML_FP}" "npx" "$npx" +set_file_param "${FV3_NML_FP}" "npy" "$npy" if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # Question: From adaf4f3bc4c93da43ff28e8875dbbf5d08d92f80 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 9 Jan 2020 04:58:28 -0700 Subject: [PATCH 163/203] Just as we dropped the "_T7" in the variables NX_T7 and NY_T7, now drop it in NHW_T7 (which is the number of halo cells around the regional grid for a WIDE halo, i.e. the halo before shaving it down to 4, 3, and/or 0 cells). --- jobs/JREGIONAL_MAKE_GRID | 8 ++++---- scripts/exregional_make_grid.sh | 6 +++--- scripts/exregional_make_orog.sh | 16 ++++++++-------- ush/setup.sh | 2 +- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index a6ca784c3..bf48a6847 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -33,7 +33,7 @@ # wide. Thus, the halo in the grid file that the grid_gen_scr # script generates must be greater than 4 since otherwise, the # shave steps would shave off cells from within the interior of -# tile 7. We will let NHW_T7 denote the width of the halo in the +# tile 7. We will let NHW denote the width of the halo in the # grid file generated by grid_gen_scr. The "n" in this variable # name denotes number of cells, the "h" is used to indicate that # it refers to a halo region, the "w" is used to indicate that it @@ -48,7 +48,7 @@ # # a) This script generates an orography file only on tile 7. # -# b) This orography file contains a halo of the same width (NHW_T7) +# b) This orography file contains a halo of the same width (NHW) # as the grid file for tile 7 generated by the grid_gen_scr script # in the previous step. # @@ -59,7 +59,7 @@ # in the temporary directory defined in WORKDIR_FLTR. Note that: # # a) The filtered orography file generated by this script contains a -# halo of the same width (NHW_T7) as the (unfiltered) orography +# halo of the same width (NHW) as the (unfiltered) orography # file generated by script orog_gen_scr (and the grid file genera- # ted by grid_gen_scr). # @@ -74,7 +74,7 @@ # This "shave" executable is called 4 times to generate 4 files from # the tile 7 grid file generated by grid_gen_scr and the tile 7 fil- # tered orography file generated by orog_fltr_scr (both of which have -# a halo of width NHW_T7 cells). The 4 output files are placed in +# a halo of width NHW cells). The 4 output files are placed in # the temporary directory defined in WORKDIR_SHVE. More specifically: # # a) shave_exec is called to shave the halo in the tile 7 grid file diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 32cc63918..87b90e1c7 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -228,7 +228,7 @@ mkdir_vrfy -p "$tmpdir" # mation on how tiles 1 through 6 are connected or that tile 7 is within # tile 6). All these files will be placed in the directory specified by # GRID_DIR. Note that the file for tile 7 will include a halo of -# width NHW_T7 cells. +# width NHW cells. # # Since tiles 1 through 6 are not needed to run the FV3SAR model and are # not used later on in any other preprocessing steps, it is not clear @@ -414,7 +414,7 @@ fi # cd_vrfy $tmpdir mv_vrfy ${CRES}_grid.tile${TILE_RGNL}.nc \ - ${CRES}_grid.tile${TILE_RGNL}.halo${NHW_T7}.nc + ${CRES}_grid.tile${TILE_RGNL}.halo${NHW}.nc mv_vrfy ${CRES}_mosaic.nc ${GRID_DIR} cd_vrfy - @@ -438,7 +438,7 @@ Grid file generation complete." # wide halo. This is the input grid file for generating both the grid # file with a 3-cell-wide halo and the one with a 4-cell-wide halo. # -unshaved_fp="$tmpdir/${CRES}_grid.tile${TILE_RGNL}.halo${NHW_T7}.nc" +unshaved_fp="$tmpdir/${CRES}_grid.tile${TILE_RGNL}.halo${NHW}.nc" # # We perform the work in tmpdir, so change location to that directory. # Once it is complete, we move the resultant file from tmpdir to GRID_- diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index e5b71725f..ce2920823 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -241,7 +241,7 @@ export exec_dir="$EXECDIR" # oro.${CRES}.tile7.nc # # and will place it in OROG_DIR. Note that this file will include -# orography for a halo of width NHW_T7 cells around tile 7. The follow- +# orography for a halo of width NHW cells around tile 7. The follow- # ing will also create a work directory called tile7 under OROG_DIR. # This work directory can be removed after the orography file has been # created (it is currently not deleted). @@ -304,8 +304,8 @@ esac # cd_vrfy ${raw_dir} mv_vrfy oro.${CRES}.tile${TILE_RGNL}.nc \ - oro.${CRES}.tile${TILE_RGNL}.halo${NHW_T7}.nc -ln_vrfy -sf oro.${CRES}.tile${TILE_RGNL}.halo${NHW_T7}.nc \ + oro.${CRES}.tile${TILE_RGNL}.halo${NHW}.nc +ln_vrfy -sf oro.${CRES}.tile${TILE_RGNL}.halo${NHW}.nc \ oro.${CRES}.tile${TILE_RGNL}.nc cd_vrfy - @@ -327,7 +327,7 @@ Setting orography filtering parameters..." #print_err_msg_exit "\ #Call to script that generates grid file (Jim Purser version) returned #with nonzero exit code." -#${CRES}_grid.tile${TILE_RGNL}.halo${NHW_T7}.nc +#${CRES}_grid.tile${TILE_RGNL}.halo${NHW}.nc #if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then @@ -401,7 +401,7 @@ fi #----------------------------------------------------------------------- # # Generate a filtered orography file with a wide halo (i.e. with a halo -# width of NHW_T7 cells) for tile 7 from the corresponding raw orography +# width of NHW cells) for tile 7 from the corresponding raw orography # file. # # The following will create a filtered orography file named @@ -459,8 +459,8 @@ zero exit code." # cd_vrfy ${filter_dir} mv_vrfy oro.${CRES}.tile${TILE_RGNL}.nc \ - oro.${CRES}.tile${TILE_RGNL}.halo${NHW_T7}.nc -#ln_vrfy -sf oro.${CRES}.tile${TILE_RGNL}.halo${NHW_T7}.nc \ + oro.${CRES}.tile${TILE_RGNL}.halo${NHW}.nc +#ln_vrfy -sf oro.${CRES}.tile${TILE_RGNL}.halo${NHW}.nc \ # oro.${CRES}.tile${TILE_RGNL}.nc cd_vrfy - @@ -485,7 +485,7 @@ Filtering of orography complete." # orography file without a halo and the one with a 4-cell-wide halo. # #unshaved_fp="${filter_dir}/oro.${CRES}.tile${TILE_RGNL}.nc" -unshaved_fp="${filter_dir}/oro.${CRES}.tile${TILE_RGNL}.halo${NHW_T7}.nc" +unshaved_fp="${filter_dir}/oro.${CRES}.tile${TILE_RGNL}.halo${NHW}.nc" # # We perform the work in shave_dir, so change location to that directo- # ry. Once it is complete, we move the resultant file from shave_dir to diff --git a/ush/setup.sh b/ush/setup.sh index 377cb889c..9467e8fd0 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -2065,7 +2065,7 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # #----------------------------------------------------------------------- # -NHW_T7="${NHW_T7}" +NHW="${NHW}" NX="$NX" NY="$NY" istart_rgnl_wide_halo_T6SG="$istart_rgnl_wide_halo_T6SG" From 8cb6ec66ecd50ec40824e79575fec4f27c36c430 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 9 Jan 2020 05:01:32 -0700 Subject: [PATCH 164/203] Remove the _T7 from the names of any remaining local variables that contain it as a suffix. --- scripts/exregional_run_post.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index a75f45492..5abefdbec 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -257,13 +257,13 @@ else refine_str="RR${GFDLgrid_REFINE_RATIO}" grid_name="${grid_name}_${CRES}_${stretch_str}_${refine_str}" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - nx_T7_str="NX$( printf "%s" "${JPgrid_NX}" | sed "s|\.|p|" )" - ny_T7_str="NY$( printf "%s" "${JPgrid_NY}" | sed "s|\.|p|" )" + nx_str="NX$( printf "%s" "${JPgrid_NX}" | sed "s|\.|p|" )" + ny_str="NY$( printf "%s" "${JPgrid_NY}" | sed "s|\.|p|" )" alpha_JPgrid_param_str="A"$( printf "%s" "${JPgrid_ALPHA_PARAM}" | \ sed "s|-|mns|" | sed "s|\.|p|" ) kappa_JPgrid_param_str="K"$( printf "%s" "${JPgrid_KAPPA_PARAM}" | \ sed "s|-|mns|" | sed "s|\.|p|" ) - grid_name="${grid_name}_${nx_T7_str}_${ny_T7_str}_${alpha_JPgrid_param_str}_${kappa_JPgrid_param_str}" + grid_name="${grid_name}_${nx_str}_${ny_str}_${alpha_JPgrid_param_str}_${kappa_JPgrid_param_str}" fi fi From ab367abd9614889944860818e632d3ed54c8e240 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 9 Jan 2020 05:14:59 -0700 Subject: [PATCH 165/203] Minor fixes (to comments mostly). --- jobs/JREGIONAL_MAKE_GRID | 10 +++++----- scripts/exregional_make_grid.sh | 4 ++-- ush/setup.sh | 20 ++++++++++---------- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index bf48a6847..2b9981213 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -59,9 +59,9 @@ # in the temporary directory defined in WORKDIR_FLTR. Note that: # # a) The filtered orography file generated by this script contains a -# halo of the same width (NHW) as the (unfiltered) orography -# file generated by script orog_gen_scr (and the grid file genera- -# ted by grid_gen_scr). +# halo of the same width (NHW) as the (unfiltered) orography file +# generated by script orog_gen_scr (and the grid file generated by +# grid_gen_scr). # # b) In analogy with the input grid files, the FV3SAR model needs as # input two (filtered) orography files -- one with no halo cells @@ -74,8 +74,8 @@ # This "shave" executable is called 4 times to generate 4 files from # the tile 7 grid file generated by grid_gen_scr and the tile 7 fil- # tered orography file generated by orog_fltr_scr (both of which have -# a halo of width NHW cells). The 4 output files are placed in -# the temporary directory defined in WORKDIR_SHVE. More specifically: +# a halo of width NHW cells). The 4 output files are placed in the +# temporary directory defined in WORKDIR_SHVE. More specifically: # # a) shave_exec is called to shave the halo in the tile 7 grid file # generated by grid_gen_scr down to a width of 3 cells and store diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 87b90e1c7..6f2416d7b 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -227,8 +227,8 @@ mkdir_vrfy -p "$tmpdir" # tains information only about tile 7 (i.e. it does not have any infor- # mation on how tiles 1 through 6 are connected or that tile 7 is within # tile 6). All these files will be placed in the directory specified by -# GRID_DIR. Note that the file for tile 7 will include a halo of -# width NHW cells. +# GRID_DIR. Note that the file for tile 7 will include a halo of width +# NHW cells. # # Since tiles 1 through 6 are not needed to run the FV3SAR model and are # not used later on in any other preprocessing steps, it is not clear diff --git a/ush/setup.sh b/ush/setup.sh index 9467e8fd0..c26be74c7 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1514,14 +1514,14 @@ if [ $rem -ne 0 ]; then print_err_msg_exit "\ The number of columns assigned to a given MPI task must be divisible by BLOCKSIZE: - nx_per_task = NX/LAYOUT_X = $NX/${LAYOUT_X} = $nx_per_task - ny_per_task = NY/LAYOUT_Y = $NY/${LAYOUT_Y} = $ny_per_task - num_cols_per_task = nx_per_task*ny_per_task = $num_cols_per_task + nx_per_task = NX/LAYOUT_X = $NX/${LAYOUT_X} = ${nx_per_task} + ny_per_task = NY/LAYOUT_Y = $NY/${LAYOUT_Y} = ${ny_per_task} + num_cols_per_task = nx_per_task*ny_per_task = ${num_cols_per_task} BLOCKSIZE = $BLOCKSIZE rem = num_cols_per_task%%BLOCKSIZE = $rem The prime factors of num_cols_per_task are (useful for determining a va- lid BLOCKSIZE): - prime_factors_num_cols_per_task: $prime_factors_num_cols_per_task" + prime_factors_num_cols_per_task: ${prime_factors_num_cols_per_task}" fi # #----------------------------------------------------------------------- @@ -1561,12 +1561,12 @@ fi # # If the write component is going to be used, make sure that the number # of grid cells in the y direction (NY) is divisible by the number of -# write tasks per group. This is because the NY rows of the grid -# must be distributed evenly among the write_tasks_per_group tasks in a -# given write group, i.e. each task must receive the same number of -# rows. This implies that NY must be evenly divisible by write_- -# tasks_per_group. If it isn't, the write component will hang or fail. -# We check for this below. +# write tasks per group. This is because the NY rows of the grid must +# be distributed evenly among the write_tasks_per_group tasks in a given +# write group, i.e. each task must receive the same number of rows. +# This implies that NY must be evenly divisible by WRTCMP_write_tasks_- +# per_group. If it isn't, the write component will hang or fail. We +# check for this below. # #----------------------------------------------------------------------- # From c4b0b1c335db3494f695338b3cf0d571e8275ddc Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 9 Jan 2020 21:06:07 -0700 Subject: [PATCH 166/203] When specifying the regional domain's center longitude and latitude, use the (non-grid-type-specific) variables LON_CTR and LAT_CTR instead of the grid-type specific variables GFDLgrid_LON_T6_CTR and GFDLgrid_LAT_T6_CTR or JPgrid_LON_CTR and JPgrid_LAT_CTR. Similarly, use STRETCH_FAC to specify the stretch factor instead of the variable GFDLgrid_STRETCH_FAC that is specific to a GFDLgrid type of grid. --- jobs/JREGIONAL_MAKE_GRID | 7 +------ scripts/exregional_make_grid.sh | 8 ++++---- scripts/exregional_run_post.sh | 2 +- ush/generate_FV3SAR_wflow.sh | 21 ++++++--------------- 4 files changed, 12 insertions(+), 26 deletions(-) diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index 2b9981213..13fe895bf 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -151,12 +151,7 @@ This is the J-job script for the task that generates grid files. #----------------------------------------------------------------------- # export gtype="$GTYPE" - -if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - export stretch_fac=${GFDLgrid_STRETCH_FAC} -elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - export stretch_fac=${STRETCH_FAC} -fi +export stretch_fac=${STRETCH_FAC} # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 6f2416d7b..ecb091629 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -265,9 +265,9 @@ mkdir_vrfy -p "$tmpdir" # --grid_type gnomonic_ed \ # --nlon 2*${RES} \ # --grid_name C${RES}_grid \ -# --do_schmidt --stretch_factor ${GFDLgrid_STRETCH_FAC} \ -# --target_lon ${GFDLgrid_LON_T6_CTR} -# --target_lat ${GFDLgrid_LAT_T6_CTR} \ +# --do_schmidt --stretch_factor ${STRETCH_FAC} \ +# --target_lon ${LON_CTR} +# --target_lat ${LAT_CTR} \ # --nest_grid --parent_tile 6 --refine_ratio ${GFDLgrid_REFINE_RATIO} \ # --istart_nest ${istart_rgnl_wide_halo_T6SG} \ # --jstart_nest ${jstart_rgnl_wide_halo_T6SG} \ @@ -314,7 +314,7 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then $USHDIR/$grid_gen_scr \ $RES \ $tmpdir \ - ${GFDLgrid_STRETCH_FAC} ${GFDLgrid_LON_T6_CTR} ${GFDLgrid_LAT_T6_CTR} ${GFDLgrid_REFINE_RATIO} \ + ${STRETCH_FAC} ${LON_CTR} ${LAT_CTR} ${GFDLgrid_REFINE_RATIO} \ ${istart_rgnl_wide_halo_T6SG} ${jstart_rgnl_wide_halo_T6SG} \ ${iend_rgnl_wide_halo_T6SG} ${jend_rgnl_wide_halo_T6SG} \ 1 $USHDIR || \ diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 5abefdbec..6a327ea7b 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -253,7 +253,7 @@ else grid_name="${GRID_GEN_METHOD}" if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - stretch_str="S$( printf "%s" "${GFDLgrid_STRETCH_FAC}" | sed "s|\.|p|" )" + stretch_str="S$( printf "%s" "${STRETCH_FAC}" | sed "s|\.|p|" )" refine_str="RR${GFDLgrid_REFINE_RATIO}" grid_name="${grid_name}_${CRES}_${stretch_str}_${refine_str}" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index eca15b116..852aec009 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -641,22 +641,13 @@ set_file_param "${FV3_NML_FP}" "layout" "${LAYOUT_X},${LAYOUT_Y}" set_file_param "${FV3_NML_FP}" "npx" "$npx" set_file_param "${FV3_NML_FP}" "npy" "$npy" -if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then +set_file_param "${FV3_NML_FP}" "target_lon" "${LON_CTR}" +set_file_param "${FV3_NML_FP}" "target_lat" "${LAT_CTR}" # Question: -# For a regional grid (i.e. one that only has a tile 7) should the co- -# ordinates that target_lon and target_lat get set to be those of the -# center of tile 6 (of the parent grid) or those of tile 7? These two -# are not necessarily the same [although assuming there is only one re- -# gional domain within tile 6, i.e. assuming there is no tile 8, 9, etc, -# there is no reason not to center tile 7 with respect to tile 6]. - set_file_param "${FV3_NML_FP}" "target_lon" "${GFDLgrid_LON_T6_CTR}" - set_file_param "${FV3_NML_FP}" "target_lat" "${GFDLgrid_LAT_T6_CTR}" - set_file_param "${FV3_NML_FP}" "stretch_fac" "${GFDLgrid_STRETCH_FAC}" -elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - set_file_param "${FV3_NML_FP}" "target_lon" "${JPgrid_LON_CTR}" - set_file_param "${FV3_NML_FP}" "target_lat" "${JPgrid_LAT_CTR}" - set_file_param "${FV3_NML_FP}" "stretch_fac" "${STRETCH_FAC}" -fi +# For a JPgrid type grid, what should stretch_fac be set to? This de- +# pends on how the FV3 code uses the stretch_fac parameter in the name- +# list file. +set_file_param "${FV3_NML_FP}" "stretch_fac" "${STRETCH_FAC}" set_file_param "${FV3_NML_FP}" "bc_update_interval" "${LBC_UPDATE_INTVL_HRS}" # # For the GSD_v0 and the GSD_SAR physics suites, set the parameter lsoil From 378eb8e084250599566c2e98141c31005d553af6 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 9 Jan 2020 21:07:58 -0700 Subject: [PATCH 167/203] Clean up comments. --- ush/generate_FV3SAR_wflow.sh | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 852aec009..ed9998b5d 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -625,10 +625,10 @@ print_info_msg "$VERBOSE" " Setting parameters in FV3 namelist file (FV3_NML_FP): FV3_NML_FP = \"${FV3_NML_FP}\"" # -# Set npx and npy, which are just NX plus 1 and NY plus 1, -# respectively. These need to be set in the FV3SAR Fortran namelist -# file. They represent the number of cell vertices in the x and y di- -# rections on the regional grid (tile 7). +# Set npx and npy, which are just NX plus 1 and NY plus 1, respectively. +# These need to be set in the FV3SAR Fortran namelist file. They repre- +# sent the number of cell vertices in the x and y directions on the re- +# gional grid. # npx=$((NX+1)) npy=$((NY+1)) @@ -646,7 +646,9 @@ set_file_param "${FV3_NML_FP}" "target_lat" "${LAT_CTR}" # Question: # For a JPgrid type grid, what should stretch_fac be set to? This de- # pends on how the FV3 code uses the stretch_fac parameter in the name- -# list file. +# list file. Recall that for a JPgrid, it gets set in the function +# set_gridparams_JPgrid(.sh) to something like 0.9999, but is it ok to +# set it to that here in the FV3 namelist file? set_file_param "${FV3_NML_FP}" "stretch_fac" "${STRETCH_FAC}" set_file_param "${FV3_NML_FP}" "bc_update_interval" "${LBC_UPDATE_INTVL_HRS}" # From 14b1ab9ad058775b3182de7d89b39ab26d5706bf Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 9 Jan 2020 21:20:16 -0700 Subject: [PATCH 168/203] In ush/setup.sh: Write non-grid-specific variables to the variable definitions file since those are the ones that will be used in the task scripts. In scripts/exregional_run_post.sh: (1) Use the non-grid-type-specific variables NX and NY instead of JPgrid_NX and JPgrid_NY; (2) Change local variable names for clarity/consistency. --- scripts/exregional_run_post.sh | 10 +++++----- ush/setup.sh | 19 +++++++++++-------- 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 6a327ea7b..21bd3248e 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -257,13 +257,13 @@ else refine_str="RR${GFDLgrid_REFINE_RATIO}" grid_name="${grid_name}_${CRES}_${stretch_str}_${refine_str}" elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then - nx_str="NX$( printf "%s" "${JPgrid_NX}" | sed "s|\.|p|" )" - ny_str="NY$( printf "%s" "${JPgrid_NY}" | sed "s|\.|p|" )" - alpha_JPgrid_param_str="A"$( printf "%s" "${JPgrid_ALPHA_PARAM}" | \ + nx_str="NX$( printf "%s" "$NX" | sed "s|\.|p|" )" + ny_str="NY$( printf "%s" "$NY" | sed "s|\.|p|" )" + JPgrid_alpha_param_str="A"$( printf "%s" "${JPgrid_ALPHA_PARAM}" | \ sed "s|-|mns|" | sed "s|\.|p|" ) - kappa_JPgrid_param_str="K"$( printf "%s" "${JPgrid_KAPPA_PARAM}" | \ + JPgrid_kappa_param_str="K"$( printf "%s" "${JPgrid_KAPPA_PARAM}" | \ sed "s|-|mns|" | sed "s|\.|p|" ) - grid_name="${grid_name}_${nx_str}_${ny_str}_${alpha_JPgrid_param_str}_${kappa_JPgrid_param_str}" + grid_name="${grid_name}_${nx_str}_${ny_str}_${JPgrid_alpha_param_str}_${JPgrid_kappa_param_str}" fi fi diff --git a/ush/setup.sh b/ush/setup.sh index c26be74c7..e456dd6f7 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -2037,6 +2037,13 @@ TILE_RGNL="${TILE_RGNL}" NH0="${NH0}" NH3="${NH3}" NH4="${NH4}" + +LON_CTR="${LON_CTR}" +LAT_CTR="${LAT_CTR}" +NX="${NX}" +NY="${NY}" +NHW="${NHW}" +STRETCH_FAC="${STRETCH_FAC}" EOM } || print_err_msg_exit "\ Heredoc (cat) command to append new variable definitions to variable @@ -2065,13 +2072,10 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # #----------------------------------------------------------------------- # -NHW="${NHW}" -NX="$NX" -NY="$NY" -istart_rgnl_wide_halo_T6SG="$istart_rgnl_wide_halo_T6SG" -iend_rgnl_wide_halo_T6SG="$iend_rgnl_wide_halo_T6SG" -jstart_rgnl_wide_halo_T6SG="$jstart_rgnl_wide_halo_T6SG" -jend_rgnl_wide_halo_T6SG="$jend_rgnl_wide_halo_T6SG" +ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" +IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" +JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" +JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" CRES="$CRES" EOM } || print_err_msg_exit "\ @@ -2102,7 +2106,6 @@ NEG_NY_OF_DOM_WITH_WIDE_HALO="${NEG_NY_OF_DOM_WITH_WIDE_HALO}" # RES="" # This will be set after the grid generation task is complete. CRES="" # This will be set after the grid generation task is complete. -STRETCH_FAC="${STRETCH_FAC}" EOM } || print_err_msg_exit "\ Heredoc (cat) command to append grid parameters to variable definitions From 17b76d8bd94e7d7a38bbcf86ddb01611b051a43e Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Sat, 11 Jan 2020 12:46:26 -0700 Subject: [PATCH 169/203] Add indentation in preparation for converting this script into a function. --- ush/link_fix.sh | 186 ++++++++++++++++++++++++------------------------ 1 file changed, 93 insertions(+), 93 deletions(-) diff --git a/ush/link_fix.sh b/ush/link_fix.sh index dc11b8090..f43d782e8 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -9,9 +9,9 @@ # #----------------------------------------------------------------------- # -scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) + scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + scrfunc_fn=$( basename "${scrfunc_fp}" ) + scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # @@ -21,7 +21,7 @@ scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -USHDIR="${scrfunc_dir}" + USHDIR="${scrfunc_dir}" # #----------------------------------------------------------------------- # @@ -31,7 +31,7 @@ USHDIR="${scrfunc_dir}" # #----------------------------------------------------------------------- # -. $USHDIR/source_util_funcs.sh + . $USHDIR/source_util_funcs.sh # #----------------------------------------------------------------------- # @@ -40,7 +40,7 @@ USHDIR="${scrfunc_dir}" # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u +x; } > /dev/null 2>&1 + { save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -50,12 +50,12 @@ USHDIR="${scrfunc_dir}" # #----------------------------------------------------------------------- # -valid_args=( \ + valid_args=( \ "verbose" \ "global_var_defns_fp" \ "file_group" \ -) -process_args valid_args "$@" + ) + process_args valid_args "$@" # #----------------------------------------------------------------------- # @@ -65,7 +65,7 @@ process_args valid_args "$@" # #----------------------------------------------------------------------- # -print_input_args valid_args + print_input_args valid_args # #----------------------------------------------------------------------- # @@ -74,7 +74,7 @@ print_input_args valid_args # #----------------------------------------------------------------------- # -. ${global_var_defns_fp} + . ${global_var_defns_fp} # #----------------------------------------------------------------------- # @@ -89,7 +89,7 @@ print_input_args valid_args # #----------------------------------------------------------------------- # -print_info_msg "$verbose" " + print_info_msg "$verbose" " Creating links in the FIXsar directory to the grid files..." # #----------------------------------------------------------------------- @@ -98,18 +98,18 @@ Creating links in the FIXsar directory to the grid files..." # #----------------------------------------------------------------------- # -fns_grid=( \ + fns_grid=( \ "C*_mosaic.nc" \ "C*_grid.tile${TILE_RGNL}.halo${NH3}.nc" \ "C*_grid.tile${TILE_RGNL}.halo${NH4}.nc" \ -) + ) -fns_orog=( \ + fns_orog=( \ "C*_oro_data.tile${TILE_RGNL}.halo${NH0}.nc" \ "C*_oro_data.tile${TILE_RGNL}.halo${NH4}.nc" \ -) + ) -sfc_climo_fields=( \ + sfc_climo_fields=( \ "facsf" \ "maximum_snow_albedo" \ "slope_type" \ @@ -118,14 +118,14 @@ sfc_climo_fields=( \ "substrate_temperature" \ "vegetation_greenness" \ "vegetation_type" \ -) -num_fields=${#sfc_climo_fields[@]} -fns_sfc_climo=() -for (( i=0; i<${num_fields}; i++ )); do - ii=$((2*i)) - fns_sfc_climo[$ii]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH0}.nc" - fns_sfc_climo[$ii+1]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH4}.nc" -done + ) + num_fields=${#sfc_climo_fields[@]} + fns_sfc_climo=() + for (( i=0; i<${num_fields}; i++ )); do + ii=$((2*i)) + fns_sfc_climo[$ii]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH0}.nc" + fns_sfc_climo[$ii+1]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH4}.nc" + done # #----------------------------------------------------------------------- # @@ -134,8 +134,8 @@ done # #----------------------------------------------------------------------- # -valid_vals_file_group=( "grid" "orog" "sfc_climo" ) -check_var_valid_value "file_group" "valid_vals_file_group" + valid_vals_file_group=( "grid" "orog" "sfc_climo" ) + check_var_valid_value "file_group" "valid_vals_file_group" # #----------------------------------------------------------------------- # @@ -144,20 +144,20 @@ check_var_valid_value "file_group" "valid_vals_file_group" # #----------------------------------------------------------------------- # -fps_grid=( "${fns_grid[@]/#/${GRID_DIR}/}" ) -fps_orog=( "${fns_orog[@]/#/${OROG_DIR}/}" ) -fps_sfc_climo=( "${fns_sfc_climo[@]/#/${SFC_CLIMO_DIR}/}" ) + fps_grid=( "${fns_grid[@]/#/${GRID_DIR}/}" ) + fps_orog=( "${fns_orog[@]/#/${OROG_DIR}/}" ) + fps_sfc_climo=( "${fns_sfc_climo[@]/#/${SFC_CLIMO_DIR}/}" ) -if [ "${file_group}" = "grid" ]; then - fps_all=( "${fps_grid[@]}" ) - run_task="${RUN_TASK_MAKE_GRID}" -elif [ "${file_group}" = "orog" ]; then - fps_all=( "${fps_orog[@]}" ) - run_task="${RUN_TASK_MAKE_OROG}" -elif [ "${file_group}" = "sfc_climo" ]; then - fps_all=( "${fps_sfc_climo[@]}" ) - run_task="${RUN_TASK_MAKE_SFC_CLIMO}" -fi + if [ "${file_group}" = "grid" ]; then + fps_all=( "${fps_grid[@]}" ) + run_task="${RUN_TASK_MAKE_GRID}" + elif [ "${file_group}" = "orog" ]; then + fps_all=( "${fps_orog[@]}" ) + run_task="${RUN_TASK_MAKE_OROG}" + elif [ "${file_group}" = "sfc_climo" ]; then + fps_all=( "${fps_sfc_climo[@]}" ) + run_task="${RUN_TASK_MAKE_SFC_CLIMO}" + fi # #----------------------------------------------------------------------- # @@ -166,44 +166,44 @@ fi # #----------------------------------------------------------------------- # -i=0 -res_prev="" -res="" -fp_prev="" + i=0 + res_prev="" + res="" + fp_prev="" -for fp in ${fps_all[@]}; do + for fp in ${fps_all[@]}; do - fn=$( basename $fp ) + fn=$( basename $fp ) printf "i = %s\n" "$i" printf " fn = %s\n" "$fn" - - res=$( printf "%s" $fn | sed -n -r -e "s/^C([0-9]*).*/\1/p" ) - if [ -z $res ]; then - print_err_msg_exit "\ + + res=$( printf "%s" $fn | sed -n -r -e "s/^C([0-9]*).*/\1/p" ) + if [ -z $res ]; then + print_err_msg_exit "\ The C-resolution could not be extracted from the current file's name. The full path to the file (fp) is: fp = \"${fp}\" This may be because fp contains the * globbing character, which would imply that no files were found that match the globbing pattern specified in fp." - fi + fi printf " res_prev = %s\n" "${res_prev}" printf " res = %s\n" "${res}" - if [ $i -gt 0 ] && [ ${res} != ${res_prev} ]; then - print_err_msg_exit "\ + if [ $i -gt 0 ] && [ ${res} != ${res_prev} ]; then + print_err_msg_exit "\ The C-resolutions (as obtained from the file names) of the previous and current file (fp_prev and fp, respectively) are different: fp_prev = \"${fp_prev}\" fp = \"${fp}\" Please ensure that all files have the same C-resolution." - fi + fi - i=$((i+1)) - fp_prev="$fp" - res_prev=${res} + i=$((i+1)) + fp_prev="$fp" + res_prev=${res} -done + done # #----------------------------------------------------------------------- # @@ -241,7 +241,7 @@ fi # #----------------------------------------------------------------------- # -fps_all=( "${fps_all[@]/\*/$res}" ) + fps_all=( "${fps_all[@]/\*/$res}" ) echo printf "fps_all = ( \\ \n" @@ -249,25 +249,25 @@ printf "\"%s\" \\ \n" "${fps_all[@]}" printf ")" echo -relative_or_null="" -if [ "${run_task}" = "TRUE" ]; then - relative_or_null="--relative" -fi + relative_or_null="" + if [ "${run_task}" = "TRUE" ]; then + relative_or_null="--relative" + fi echo echo "FIXsar = \"$FIXsar\"" -cd_vrfy $FIXsar -for fp in "${fps_all[@]}"; do - if [ -f "$fp" ]; then - ln_vrfy -sf ${relative_or_null} $fp . -# ln_vrfy -sf $fp . - else - print_err_msg_exit "\ + cd_vrfy $FIXsar + for fp in "${fps_all[@]}"; do + if [ -f "$fp" ]; then + ln_vrfy -sf ${relative_or_null} $fp . +# ln_vrfy -sf $fp . + else + print_err_msg_exit "\ Cannot create symlink because target file (fp) does not exist: fp = \"${fp}\"" - fi -done + fi + done # #----------------------------------------------------------------------- # @@ -276,34 +276,34 @@ done # #----------------------------------------------------------------------- # -if [ "${file_group}" = "grid" ]; then + if [ "${file_group}" = "grid" ]; then # Create link to grid file needed by the make_ic and make_lbc tasks. - filename="${cres}_grid.tile${TILE_RGNL}.halo${NH4}.nc" - ln_vrfy -sf ${relative_or_null} $filename ${cres}_grid.tile${TILE_RGNL}.nc -fi + filename="${cres}_grid.tile${TILE_RGNL}.halo${NH4}.nc" + ln_vrfy -sf ${relative_or_null} $filename ${cres}_grid.tile${TILE_RGNL}.nc + fi # Create links to surface climatology files needed by the make_ic task. -if [ "${file_group}" = "sfc_climo" ]; then + if [ "${file_group}" = "sfc_climo" ]; then - tmp=( "${sfc_climo_fields[@]/#/${cres}.}" ) - fns_sfc_climo_with_halo=( "${tmp[@]/%/.tile${TILE_RGNL}.halo${NH4}.nc}" ) - fns_sfc_climo_no_halo=( "${tmp[@]/%/.tile${TILE_RGNL}.nc}" ) + tmp=( "${sfc_climo_fields[@]/#/${cres}.}" ) + fns_sfc_climo_with_halo=( "${tmp[@]/%/.tile${TILE_RGNL}.halo${NH4}.nc}" ) + fns_sfc_climo_no_halo=( "${tmp[@]/%/.tile${TILE_RGNL}.nc}" ) - cd_vrfy $FIXsar - for (( i=0; i<${num_fields}; i++ )); do - target="${fns_sfc_climo_with_halo[$i]}" - symlink="${fns_sfc_climo_no_halo[$i]}" - if [ -f "$target" ]; then -# ln_vrfy -sf ${relative_or_null} $target $symlink - ln_vrfy -sf $target $symlink - else - print_err_msg_exit "\ + cd_vrfy $FIXsar + for (( i=0; i<${num_fields}; i++ )); do + target="${fns_sfc_climo_with_halo[$i]}" + symlink="${fns_sfc_climo_no_halo[$i]}" + if [ -f "$target" ]; then +# ln_vrfy -sf ${relative_or_null} $target $symlink + ln_vrfy -sf $target $symlink + else + print_err_msg_exit "\ Cannot create symlink because target file (target) does not exist: target = \"${target}\"" - fi - done + fi + done -fi + fi # #----------------------------------------------------------------------- # @@ -311,5 +311,5 @@ fi # #----------------------------------------------------------------------- # -{ restore_shell_opts; } > /dev/null 2>&1 + { restore_shell_opts; } > /dev/null 2>&1 From 4ff41875f0e618614e90babfd7360b51a3f4ab69 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Sat, 11 Jan 2020 12:51:41 -0700 Subject: [PATCH 170/203] Convert the link_fix.sh script to a function. --- ush/link_fix.sh | 31 ++++++++++++++----------------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/ush/link_fix.sh b/ush/link_fix.sh index f43d782e8..62a600f89 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -3,44 +3,40 @@ # #----------------------------------------------------------------------- # -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). +# This file defines and then calls a function that i # #----------------------------------------------------------------------- # - scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) - scrfunc_fn=$( basename "${scrfunc_fp}" ) - scrfunc_dir=$( dirname "${scrfunc_fp}" ) +function link_fix() { # #----------------------------------------------------------------------- # -# This script should be located in USHDIR, so set USHDIR to this -# script's directory (USHDIR is needed in various places below or in -# sourced scripts). +# Save current shell options (in a global array). Then set new options +# for this script/function. # #----------------------------------------------------------------------- # - USHDIR="${scrfunc_dir}" + { save_shell_opts; set -u +x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # -# Source the function definitions file, which should be in the same di- -# rectory as the current script. This is needed in order to be able to -# use the process_args() function below. +# Get the full path to the file in which this script/function is located +# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in +# which the file is located (scrfunc_dir). # #----------------------------------------------------------------------- # - . $USHDIR/source_util_funcs.sh + local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) + local scrfunc_fn=$( basename "${scrfunc_fp}" ) + local scrfunc_dir=$( dirname "${scrfunc_fp}" ) # #----------------------------------------------------------------------- # -# Save current shell options (in a global array). Then set new options -# for this script/function. +# Get the name of this function. # #----------------------------------------------------------------------- # - { save_shell_opts; set -u +x; } > /dev/null 2>&1 + local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # @@ -313,3 +309,4 @@ Cannot create symlink because target file (target) does not exist: # { restore_shell_opts; } > /dev/null 2>&1 +} From eb849946bd11f3dc9255c7fda6a9e9506ae6c0c9 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 15 Jan 2020 09:35:15 -0700 Subject: [PATCH 171/203] Fix comments. --- jobs/JREGIONAL_GET_EXTRN_FILES | 3 +-- jobs/JREGIONAL_MAKE_GRID | 3 +-- jobs/JREGIONAL_MAKE_ICS | 3 +-- jobs/JREGIONAL_MAKE_LBCS | 3 +-- jobs/JREGIONAL_MAKE_OROG | 3 +-- jobs/JREGIONAL_MAKE_SFC_CLIMO | 3 +-- jobs/JREGIONAL_RUN_FCST | 3 +-- jobs/JREGIONAL_RUN_POST | 3 +-- scripts/exregional_get_extrn_files.sh | 3 +-- scripts/exregional_make_grid.sh | 3 +-- scripts/exregional_make_ics.sh | 3 +-- scripts/exregional_make_lbcs.sh | 3 +-- scripts/exregional_make_orog.sh | 3 +-- scripts/exregional_make_sfc_climo.sh | 3 +-- scripts/exregional_run_fcst.sh | 3 +-- scripts/exregional_run_post.sh | 3 +-- ush/config_defaults.sh | 16 ++++++++-------- ush/get_extrn_mdl_file_dir_info.sh | 3 +-- ush/load_modules_run_task.sh | 3 +-- 19 files changed, 26 insertions(+), 44 deletions(-) diff --git a/jobs/JREGIONAL_GET_EXTRN_FILES b/jobs/JREGIONAL_GET_EXTRN_FILES index 26440f352..5568871f6 100755 --- a/jobs/JREGIONAL_GET_EXTRN_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_FILES @@ -22,8 +22,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_GRID b/jobs/JREGIONAL_MAKE_GRID index 13fe895bf..718d4af58 100755 --- a/jobs/JREGIONAL_MAKE_GRID +++ b/jobs/JREGIONAL_MAKE_GRID @@ -101,8 +101,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_ICS b/jobs/JREGIONAL_MAKE_ICS index 5614faaa2..77bf2a532 100755 --- a/jobs/JREGIONAL_MAKE_ICS +++ b/jobs/JREGIONAL_MAKE_ICS @@ -3,8 +3,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_LBCS b/jobs/JREGIONAL_MAKE_LBCS index 24ff59da6..678bd669f 100755 --- a/jobs/JREGIONAL_MAKE_LBCS +++ b/jobs/JREGIONAL_MAKE_LBCS @@ -3,8 +3,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_OROG b/jobs/JREGIONAL_MAKE_OROG index 579b0a01e..3e9fff3b7 100755 --- a/jobs/JREGIONAL_MAKE_OROG +++ b/jobs/JREGIONAL_MAKE_OROG @@ -3,8 +3,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_MAKE_SFC_CLIMO b/jobs/JREGIONAL_MAKE_SFC_CLIMO index 23771df22..eec9f3488 100755 --- a/jobs/JREGIONAL_MAKE_SFC_CLIMO +++ b/jobs/JREGIONAL_MAKE_SFC_CLIMO @@ -3,8 +3,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_FCST b/jobs/JREGIONAL_RUN_FCST index 634747f51..39e5c17b2 100755 --- a/jobs/JREGIONAL_RUN_FCST +++ b/jobs/JREGIONAL_RUN_FCST @@ -13,8 +13,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/jobs/JREGIONAL_RUN_POST b/jobs/JREGIONAL_RUN_POST index 12fa5344c..cde0dd726 100755 --- a/jobs/JREGIONAL_RUN_POST +++ b/jobs/JREGIONAL_RUN_POST @@ -12,8 +12,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_get_extrn_files.sh b/scripts/exregional_get_extrn_files.sh index fa1899ed2..d684d1863 100755 --- a/scripts/exregional_get_extrn_files.sh +++ b/scripts/exregional_get_extrn_files.sh @@ -3,8 +3,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index ecb091629..fd707fab8 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -3,8 +3,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_ics.sh b/scripts/exregional_make_ics.sh index b5e7e8057..4c6a5ca15 100755 --- a/scripts/exregional_make_ics.sh +++ b/scripts/exregional_make_ics.sh @@ -3,8 +3,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_lbcs.sh b/scripts/exregional_make_lbcs.sh index faae5d8f5..fe273bbbb 100755 --- a/scripts/exregional_make_lbcs.sh +++ b/scripts/exregional_make_lbcs.sh @@ -3,8 +3,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index ce2920823..0460047b9 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -3,8 +3,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index 6433d1158..31e624c58 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -3,8 +3,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index be4381d46..dba2237fd 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -3,8 +3,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_run_post.sh b/scripts/exregional_run_post.sh index 21bd3248e..0f278751a 100755 --- a/scripts/exregional_run_post.sh +++ b/scripts/exregional_run_post.sh @@ -3,8 +3,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 937620e94..45d1273fd 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -140,14 +140,14 @@ PTMP="/path/to/temporary/directory/ptmp" # tions (either in terms of actual values or placeholders) of the para- # meters that the write component needs. If the write component is go- # ing to be used, this file is first appended to MODEL_CONFIG_FN, and -# any placeholder values in the variable definitions in the new MODEL_- -# CONFIG_FN file are subsequently replaced by actual values. If a pre- -# defined domain is being used (see PREDEF_GRID_NAME below), WRTCMP_PA- -# RAMS_TEMPLATE_FN may be set to an empty string. In this case, it will -# be reset to the name of the existing template file for that predefined -# domain. It is assumed that the file specified by WRTCMP_PARAMS_TEMP- -# LATE_FN is located in the templates directory TEMPLATE_DIR, which is -# in turn defined in the setup script. +# any placeholder values in the variable definitions in the new +# MODEL_CONFIG_FN file are subsequently replaced by actual values. If a +# predefined domain is being used (see PREDEF_GRID_NAME below), +# WRTCMP_PARAMS_TEMPLATE_FN may be set to an empty string. In this +# case, it will be reset to the name of the existing template file for +# that predefined domain. It is assumed that the file specified by +# WRTCMP_PARAMS_TEMPLATE_FN is located in the templates directory +# TEMPLATE_DIR, which is in turn defined in the setup script. # #----------------------------------------------------------------------- # diff --git a/ush/get_extrn_mdl_file_dir_info.sh b/ush/get_extrn_mdl_file_dir_info.sh index 54f2b5846..5e111186c 100755 --- a/ush/get_extrn_mdl_file_dir_info.sh +++ b/ush/get_extrn_mdl_file_dir_info.sh @@ -1,8 +1,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh index f38da12d2..fb149f1aa 100755 --- a/ush/load_modules_run_task.sh +++ b/ush/load_modules_run_task.sh @@ -3,8 +3,7 @@ # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Source the variable definitions file and the bash utility functions. # #----------------------------------------------------------------------- # From 62c438152f0076bb857360bd76aa99157f8dfa1a Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 15 Jan 2020 09:39:00 -0700 Subject: [PATCH 172/203] (1) Fix comments; (2) Fix informational/error messages; (3) Rename variables DEFAULT_CONFIG_FN and CUSTOM_CONFIG_FN to DEFAULT_EXPT_CONFIG_FN and EXPT_CONFIG_F, respectively. --- ush/compare_config_scripts.sh | 44 +++++++++--------- ush/generate_FV3SAR_wflow.sh | 23 ++++++---- ush/setup.sh | 84 ++++++++++++++++++----------------- 3 files changed, 82 insertions(+), 69 deletions(-) diff --git a/ush/compare_config_scripts.sh b/ush/compare_config_scripts.sh index 5685a0996..0fa8697f3 100644 --- a/ush/compare_config_scripts.sh +++ b/ush/compare_config_scripts.sh @@ -2,10 +2,10 @@ #----------------------------------------------------------------------- # # This file defines and then calls a function that checks that all vari- -# ables defined in the local configuration script (whose file name is -# stored in the variable LOCAL_CONFIG_FN) are also assigned a default -# value in the default configuration script (whose file name is stored -# in the variable DEFAULT_CONFIG_FN). +# ables defined in the user-specified experiment/workflow configuration +# file (whose file name is stored in the variable EXPT_CONFIG_FN) are +# also assigned default values in the default configuration file (whose +# file name is stored in the variable DEFAULT_EXPT_CONFIG_FN). # #----------------------------------------------------------------------- # @@ -33,7 +33,7 @@ local func_name="${FUNCNAME[0]}" # #----------------------------------------------------------------------- # -# Source function definition files. +# Source bash utility functions. # #----------------------------------------------------------------------- # @@ -51,16 +51,16 @@ local func_name="${FUNCNAME[0]}" #----------------------------------------------------------------------- # # Create a list of variable settings in the default workflow/experiment -# default script by stripping out comments, blank lines, extraneous -# leading whitespace, etc from that script and saving the result in the -# variable var_list_default. Each line of var_list_default will have -# the form +# file (script) by stripping out comments, blank lines, extraneous lead- +# ing whitespace, etc from that file and saving the result in the varia- +# ble var_list_default. Each line of var_list_default will have the +# form # # VAR=... # # where the VAR is a variable name and ... is the value (including any # trailing comments). Then create an equivalent list for the local con- -# figuration script and save the result in var_list_local. +# figuration file and save the result in var_list_local. # #----------------------------------------------------------------------- # @@ -69,7 +69,7 @@ sed -r \ -e "s/^([ ]*)([^ ]+.*)/\2/g" \ -e "/^#.*/d" \ -e "/^$/d" \ - ${DEFAULT_CONFIG_FN} \ + ${DEFAULT_EXPT_CONFIG_FN} \ ) var_list_local=$( \ @@ -77,14 +77,14 @@ sed -r \ -e "s/^([ ]*)([^ ]+.*)/\2/g" \ -e "/^#.*/d" \ -e "/^$/d" \ - ${CUSTOM_CONFIG_FN} \ + ${EXPT_CONFIG_FN} \ ) # #----------------------------------------------------------------------- # # Loop through each line of var_list_local. For each line, extract the # the name of the variable that is being set (say VAR) and check that -# this variable is set somewhere in the default configuration script by +# this variable is set somewhere in the default configuration file by # verifying that a line that starts with "VAR=" exists in var_list_de- # fault. # @@ -100,9 +100,10 @@ while read crnt_line; do if [ -z "${var_name}" ]; then print_info_msg " -Current line (crnt_line) of custom experiment/workflow configuration -script (CUSTOM_CONFIG_FN) does not contain a variable name (var_name): - CUSTOM_CONFIG_FN = \"${CUSTOM_CONFIG_FN}\" +Current line (crnt_line) of user-specified experiment/workflow configu- +ration file (EXPT_CONFIG_FN) does not contain a variable name (i.e. +var_name is empty): + EXPT_CONFIG_FN = \"${EXPT_CONFIG_FN}\" crnt_line = \"${crnt_line}\" var_name = \"${var_name}\" Skipping to next line." @@ -116,13 +117,14 @@ Skipping to next line." # grep "^${var_name}=" <<< "${var_list_default}" > /dev/null 2>&1 || \ print_err_msg_exit "\ -Variable (var_name) in custom configuration script (CUSTOM_CONFIG_FN) -not defined in default configuration script (DEFAULT_CONFIG_FN): - CUSTOM_CONFIG_FN = \"${CUSTOM_CONFIG_FN}\" - DEFAULT_CONFIG_FN = \"${DEFAULT_CONFIG_FN}\" +The variable specified by var_name in the user-specified experiment/ +workflow configuration file (EXPT_CONFIG_FN) does not appear in the de- +fault experiment/workflow configuration file (DEFAULT_EXPT_CONFIG_FN): + EXPT_CONFIG_FN = \"${EXPT_CONFIG_FN}\" + DEFAULT_EXPT_CONFIG_FN = \"${DEFAULT_EXPT_CONFIG_FN}\" var_name = \"${var_name}\" Please assign a default value to this variable in the default configura- -tion script and rerun." +tion file and rerun." fi diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index ed9998b5d..37ad863bd 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -41,7 +41,7 @@ ushdir="${scrfunc_dir}" # #----------------------------------------------------------------------- # -# Source function definition files. +# Source bash utility functions. # #----------------------------------------------------------------------- # @@ -58,12 +58,12 @@ ushdir="${scrfunc_dir}" # #----------------------------------------------------------------------- # -# Source the setup script. Note that this in turn sources the configu- -# ration file/script (config.sh) in the current directory. It also cre- -# ates the run and work directories, the INPUT and RESTART subdirecto- -# ries under the run directory, and a variable definitions file/script -# in the run directory. The latter gets sources by each of the scripts -# that run the various workflow tasks. +# Source the file that defines and then calls the setup function. The +# setup function in turn first sources the default configuration file +# (which contains default values for the experiment/workflow parameters) +# and then sources the user-specified configuration file (which contains +# user-specified values for a subset of the experiment/workflow parame- +# ters that override their default values). # #----------------------------------------------------------------------- # @@ -478,6 +478,13 @@ echo "RES = $RES" set_file_param "${GLOBAL_VAR_DEFNS_FP}" "RES" "${RES}" set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "${CRES}" +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# else # #----------------------------------------------------------------------- @@ -690,7 +697,7 @@ fi # #----------------------------------------------------------------------- # -cp_vrfy $USHDIR/${CUSTOM_CONFIG_FN} $EXPTDIR +cp_vrfy $USHDIR/${EXPT_CONFIG_FN} $EXPTDIR # #----------------------------------------------------------------------- # diff --git a/ush/setup.sh b/ush/setup.sh index e456dd6f7..ee260c149 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -45,12 +45,18 @@ cd_vrfy ${scrfunc_dir} # #----------------------------------------------------------------------- # -# Source function definition files. +# Source bash utility functions. # #----------------------------------------------------------------------- # . ./source_util_funcs.sh - +# +#----------------------------------------------------------------------- +# +# Source functions for setting grid parameters. +# +#----------------------------------------------------------------------- +# . ./set_gridparams_GFDLgrid.sh . ./set_gridparams_JPgrid.sh # @@ -70,47 +76,44 @@ cd_vrfy ${scrfunc_dir} # #----------------------------------------------------------------------- # -DEFAULT_CONFIG_FN="config_defaults.sh" -CUSTOM_CONFIG_FN="config.sh" +DEFAULT_EXPT_CONFIG_FN="config_defaults.sh" +EXPT_CONFIG_FN="config.sh" # #----------------------------------------------------------------------- # -# Source the configuration script containing default values of experi- -# ment variables. +# Source the default configuration file containing default values for +# the experiment/workflow variables. # #----------------------------------------------------------------------- # -. ./${DEFAULT_CONFIG_FN} +. ./${DEFAULT_EXPT_CONFIG_FN} # #----------------------------------------------------------------------- # -# If a local configuration script exists, source that as well. Here, by -# "local", we mean one that contains variable settings that are relevant -# only to the local environment (e.g. a directory setting that applies -# only to the current user on the current machine). Note that this lo- -# cal script is not tracked by the repository, whereas the default con- -# figuration script sourced above is tracked. Any variable settings in -# the local script will override the ones in the default script. The -# purpose of having a local configuration script is to avoid having to -# make changes to the default configuration script that are only appli- -# cable to one user, one machine, etc. +# If a user-specified configuration file exists, source it. This file +# contains user-specified values for a subset of the experiment/workflow +# variables that override their default values. Note that the user- +# specified configuration file is not tracked by the repository, whereas +# the default configuration file is tracked. # #----------------------------------------------------------------------- # -if [ -f "${CUSTOM_CONFIG_FN}" ]; then +if [ -f "${EXPT_CONFIG_FN}" ]; then # -# We require that the variables being set in the local configuration -# script have counterparts in the default configuration script. This is -# so that we do not accidentally introduce new variables in the local -# script without also officially introducing them in the default script. -# Thus, before sourcing the local configuration script, we check for -# this. +# We require that the variables being set in the user-specified configu- +# ration file have counterparts in the default configuration file. This +# is so that we do not introduce new variables in the user-specified +# configuration file without also officially introducing them in the de- +# fault configuration file. Thus, before sourcing the user-specified +# configuration file, we check that all variables in the user-specified +# configuration file are also assigned default values in the default +# configuration file. # . ./compare_config_scripts.sh # -# Now source the local configuration script. +# Now source the user-specified configuration file. # - . ./${CUSTOM_CONFIG_FN} + . ./${EXPT_CONFIG_FN} # fi # @@ -1647,21 +1650,22 @@ fi # #----------------------------------------------------------------------- # -# Generate the shell script that will appear in the run directory (RUN- -# DIR) and will contain definitions of variables needed by the various -# scripts in the workflow. We refer to this as the variable definitions -# file. We will create this file by: +# Generate the shell script that will appear in the experiment directory +# (EXPTDIR) and will contain definitions of variables needed by the va- +# rious scripts in the workflow. We refer to this as the experiment/ +# workflow global variable definitions file. We will create this file +# by: # -# 1) Copying the default workflow/experiment configuration script (spe- -# fied by DEFAULT_CONFIG_FN and located in the shell script directory -# USHDIR) to the run directory and renaming it to the name specified -# by GLOBAL_VAR_DEFNS_FN. +# 1) Copying the default workflow/experiment configuration file (speci- +# fied by DEFAULT_EXPT_CONFIG_FN and located in the shell script di- +# rectory specified by USHDIR) to the experiment directory and rena- +# ming it to the name specified by GLOBAL_VAR_DEFNS_FN. # -# 2) Resetting the original values of the variables defined in this file -# to their current values. This is necessary because these variables -# may have been reset by the local configuration script (if one ex- -# ists in USHDIR) and/or by this setup script, e.g. because predef_- -# domain is set to a valid non-empty value. +# 2) Resetting the default variable values in this file to their current +# values. This is necessary because these variables may have been +# reset by the user-specified configuration file (if one exists in +# USHDIR) and/or by this setup script, e.g. because predef_domain is +# set to a valid non-empty value. # # 3) Appending to the variable definitions file any new variables intro- # duced in this setup script that may be needed by the scripts that @@ -1674,7 +1678,7 @@ fi #----------------------------------------------------------------------- # GLOBAL_VAR_DEFNS_FP="$EXPTDIR/$GLOBAL_VAR_DEFNS_FN" -cp_vrfy ./${DEFAULT_CONFIG_FN} ${GLOBAL_VAR_DEFNS_FP} +cp_vrfy ./${DEFAULT_EXPT_CONFIG_FN} ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # From b9a688826b3743676e6fbd95e8e17f7d42a6600d Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 15 Jan 2020 10:29:55 -0700 Subject: [PATCH 173/203] In setup.sh, create a new local variable named "res_in_fixsar_filenames" that will contain the resolution extracted from any files in the fixsar directory (e.g. grid files, orography files, surface climatology files). The names of all these files start with "C" followed by an integer, e.g "C768". The resolution extracted will be the integer after the "C" (in this case 768). The extraction of this resolution from the file names is done in the function link_fix.sh. Also, change the arguments list of the link_fix function to accept this new variable as an argument as well as to return it as long as the input resolution is the same as the ones in the names of the preexisting files. Note that: 1) The new variable "res_in_fixsar_filenames" gets initialized to a null string. 2) The link_fix function may be called zero, one, two, or three times in setup.sh to set "res_in_fixsar_filenames". It will be called zero times (i.e. it will not be called) if the make_grid, make_orog, and make_sfc_climo tasks are all going to be run; it will be called one time if one of these three tasks will not be run; it will be called two times if two of these tasks will not be run; and it will be called three times if none of the three tasks will be run. (Whether these tasks are run depends on whether the user sets the variables RUN_TASK_MAKE_GRID, RUN_TASK_MAKE_OROG, and RUN_TASK_MAKE_SFC_CLIMO, respectively, to "TRUE" or "FALSE".) 3) If files having the naming convention described above do not exist in the fixsar directory (which means the make_grid, make_orog, and make_sfc_climo tasks must all be run to create the respective files in the fixsar directory), then "res_in_fixsar_filenames" will remain set to a null string in setup.sh. --- ush/generate_FV3SAR_wflow.sh | 15 ++++++--- ush/link_fix.sh | 62 +++++++++++++++++++++++++----------- 2 files changed, 53 insertions(+), 24 deletions(-) diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 37ad863bd..ee2871ccc 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -486,6 +486,8 @@ echo "RES = $RES" #----------------------------------------------------------------------- # else + + res_in_fixsar_filenames="" # #----------------------------------------------------------------------- # @@ -498,8 +500,9 @@ else if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then $USHDIR/link_fix.sh \ verbose="FALSE" \ - global_var_defns_fp="${GLOBAL_VAR_DEFNS_FP}" \ - file_group="grid" || \ + file_group="grid" \ + res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ + output_varname_res="res_in_fixsar_filenames" || \ print_err_msg_exit "\ Call to script to create links to grid files failed." fi @@ -515,8 +518,9 @@ Call to script to create links to grid files failed." if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then $USHDIR/link_fix.sh \ verbose="FALSE" \ - global_var_defns_fp="${GLOBAL_VAR_DEFNS_FP}" \ - file_group="orog" || \ + file_group="orog" \ + res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ + output_varname_res="res_in_fixsar_filenames" || \ print_err_msg_exit "\ Call to script to create links to orography files failed." fi @@ -533,8 +537,9 @@ Call to script to create links to orography files failed." if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then $USHDIR/link_fix.sh \ verbose="FALSE" \ - global_var_defns_fp="${GLOBAL_VAR_DEFNS_FP}" \ file_group="sfc_climo" || \ + res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ + output_varname_res="res_in_fixsar_filenames" || \ print_err_msg_exit "\ Call to script to create links to surface climatology files failed." fi diff --git a/ush/link_fix.sh b/ush/link_fix.sh index 62a600f89..15f7d4f08 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -3,7 +3,7 @@ # #----------------------------------------------------------------------- # -# This file defines and then calls a function that i +# This file defines a function that ... # #----------------------------------------------------------------------- # @@ -46,10 +46,11 @@ function link_fix() { # #----------------------------------------------------------------------- # - valid_args=( \ + local valid_args=( \ "verbose" \ -"global_var_defns_fp" \ "file_group" \ +"res_in_existing_fixsar_filenames" \ +"output_varname_res" \ ) process_args valid_args "$@" # @@ -207,26 +208,49 @@ Please ensure that all files have the same C-resolution." # #----------------------------------------------------------------------- # -# Set RES to a null string if it is not already defined in the variable -# defintions file. +# if [ "${res_in_existing_fixsar_filenames}" = "" ]; then # -RES=${RES:-""} -if [ "$RES" = "$res" ] || [ "$RES" = "" ]; then - cres="C${res}" - set_file_param "${GLOBAL_VAR_DEFNS_FP}" "RES" "${res}" - set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "${cres}" -elif [ "$RES" != "$res" ]; then - print_err_msg_exit "\ -The resolution (RES) specified in the variable definitions file -(global_var_defns_fp) does not match the resolution (res) found in this -script for the specified file group (file_group): - global_var_defns_fp = \"${global_var_defns_fp}\" - RES = \"${RES}\" +# print_info_msg "$verbose" " +#Setting variable res_in_existing_fixsar_filenames to the resolution +#(res) extracted from the names of the specifed group of fixed files +#(file_group): +# file_group = \"${file_group}\" +# res = $res" +# +# res_in_existing_fixsar_filenames=$res +# +# elif [ "${res_in_existing_fixsar_filenames}" = "$res" ]; then +# +# print_info_msg "$verbose" " +#As expected, the value of res_in_existing_fixsar_filenames (obtained +#from the names of a previously considered group of fixed files, e.g. +#grid and/or orography files) matches the resolution (res) extracted from +#the names of the specifed group of fixed files (file_group): +# file_group = \"${file_group}\" +# res = $res +# res_in_existing_fixsar_filenames = ${res_in_existing_fixsar_filenames}" + + if [ "${res_in_existing_fixsar_filenames}" = "" ] || \ + [ "${res_in_existing_fixsar_filenames}" = "$res" ]; then +# +# Use the eval function to set the value of the output variable. +# + eval ${output_varname_res}="$res" + + else + + print_err_msg_exit "\ +The value of res_in_existing_fixsar_filenames (obtained from the names +of a previously considered group of fixed files, e.g. grid and/or oro- +graphy files) does not match the resolution (res) extracted the names of +the specifed group of fixed files (file_group): file_group = \"${file_group}\" - res = \"${res}\" + res = $res + res_in_existing_fixsar_filenames = ${res_in_existing_fixsar_filenames} This usually means that one or more of the file groups (grid, orography, and/or surface climatology) are defined on different grids." -fi + + fi # #----------------------------------------------------------------------- # From 0065dce9fb9f324574cdd60fbef6d84e69692088 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 15 Jan 2020 10:32:19 -0700 Subject: [PATCH 174/203] For clarity, declare local variables in the function link_fix.sh. --- ush/link_fix.sh | 30 +++++++++++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/ush/link_fix.sh b/ush/link_fix.sh index 15f7d4f08..1fcce191b 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -66,12 +66,36 @@ function link_fix() { # #----------------------------------------------------------------------- # -# Source the variable definitions script and the function definitions -# file. +# Declare local variables. # #----------------------------------------------------------------------- # - . ${global_var_defns_fp} + local fns_grid \ + fns_orog \ + sfc_climo_fields \ + num_fields \ + fns_sfc_climo \ + i \ + ii \ + valid_vals_file_group \ + fps_grid \ + fps_orog \ + fps_sfc_climo \ + fps_all \ + run_task \ + res_prev \ + res \ + fp_prev \ + fp \ + fn \ +# +#----------------------------------------------------------------------- +# +# Source the variable definitions file and the bash utility functions. +# +#----------------------------------------------------------------------- +# +# . ${global_var_defns_fp} # #----------------------------------------------------------------------- # From fe1477c75464cd2c5f9d648945357f1639a09488 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 15 Jan 2020 11:02:06 -0700 Subject: [PATCH 175/203] Change the user-specified grid parameter for a GFDLgrid type of grid named RES to GFDLgrid_RES. --- ush/config_defaults.sh | 4 ++-- ush/set_predef_grid_params.sh | 6 +++--- ush/setup.sh | 12 ++++++------ ush/valid_param_vals.sh | 2 +- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 45d1273fd..96c0007f4 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -388,7 +388,7 @@ GRID_GEN_METHOD="JPgrid" # GFDLgrid_LAT_T6_CTR: # Latitude of the center of tile 6 (in degrees). # -# RES: +# GFDLgrid_RES: # Number of points in each of the two horizontal directions (x and y) # on each tile of the global grid. Must be "48", "96", "192", "384", # "768", "1152", or "3072" @@ -418,7 +418,7 @@ GRID_GEN_METHOD="JPgrid" # GFDLgrid_LON_T6_CTR=-97.5 GFDLgrid_LAT_T6_CTR=35.5 -RES="384" +GFDLgrid_RES="384" GFDLgrid_STRETCH_FAC=1.5 GFDLgrid_REFINE_RATIO=3 GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=10 diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index 8f0b81f09..ae386335f 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -243,7 +243,7 @@ predefined domain: GFDLgrid_LON_T6_CTR=-106.0 GFDLgrid_LAT_T6_CTR=54.0 GFDLgrid_STRETCH_FAC=0.63 - RES="384" + GFDLgrid_RES="384" GFDLgrid_REFINE_RATIO=3 num_margin_cells_T6_left=10 @@ -523,7 +523,7 @@ predefined domain: GFDLgrid_LON_T6_CTR=-97.5 GFDLgrid_LAT_T6_CTR=38.5 GFDLgrid_STRETCH_FAC=1.5 - RES="768" + GFDLgrid_RES="768" GFDLgrid_REFINE_RATIO=3 num_margin_cells_T6_left=61 @@ -615,7 +615,7 @@ predefined domain: GFDLgrid_LON_T6_CTR=-153.0 GFDLgrid_LAT_T6_CTR=61.0 GFDLgrid_STRETCH_FAC=1.0 # ??? - RES="768" + GFDLgrid_RES="768" GFDLgrid_REFINE_RATIO=3 # ??? num_margin_cells_T6_left=61 diff --git a/ush/setup.sh b/ush/setup.sh index ee260c149..ae7294453 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -817,18 +817,18 @@ fi # #----------------------------------------------------------------------- # -# For a "GFDLgrid" type of grid, make sure RES is set to a valid value. -# Then set the C-resolution (CRES). +# For a "GFDLgrid" type of grid, make sure GFDLgrid_RES is set to a va- +# lid value. Then set the C-resolution (CRES). # #----------------------------------------------------------------------- # if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then err_msg="\ The number of grid cells per tile in each horizontal direction specified -in RES is not supported: - RES = \"${RES}\"" - check_var_valid_value "RES" "valid_vals_RES" "${err_msg}" - CRES="C${RES}" +in GFLDgrid_RES is not supported: + GFLDgrid_RES = \"${GFLDgrid_RES}\"" + check_var_valid_value "GFDLgrid_RES" "valid_vals_GFDLgrid_RES" "${err_msg}" + CRES="C${GFLDgrid_RES}" fi # #----------------------------------------------------------------------- diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index 18c9add6e..22200a9e6 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -10,7 +10,7 @@ valid_vals_PREDEF_GRID_NAME=( \ valid_vals_EMC_GRID_NAME=("ak" "conus" "conus_orig" "guam" "hi" "pr") valid_vals_USE_CCPP=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_CCPP_PHYS_SUITE=("FV3_GFS_2017_gfdlmp" "FV3_GSD_v0" "FV3_GSD_SAR") -valid_vals_RES=("48" "96" "192" "384" "768" "1152" "3072") +valid_vals_GFDLgrid_RES=("48" "96" "192" "384" "768" "1152" "3072") valid_vals_EXTRN_MDL_NAME_ICS=("GSMGFS" "FV3GFS" "RAPX" "HRRRX") valid_vals_EXTRN_MDL_NAME_LBCS=("GSMGFS" "FV3GFS" "RAPX" "HRRRX") valid_vals_FV3GFS_FILE_FMT_ICS=("nemsio" "grib2") From a61fcea126ee16ab9d73b85d086abe74e8a00354 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 16 Jan 2020 12:11:07 -0700 Subject: [PATCH 176/203] Update workflow variable names to new ones changed to in recent commits; improve comments. --- scripts/exregional_make_grid.sh | 83 ++++++++++++++++++--------------- 1 file changed, 45 insertions(+), 38 deletions(-) diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index fd707fab8..4952bc16d 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -247,12 +247,17 @@ mkdir_vrfy -p "$tmpdir" # size specified by the argument to the --halo flag does not extend be- # yond the boundaries of the parent grid (tile 6). In this case, since # the values passed to the --istart_nest, ..., and --jend_nest flags al- -# ready include a halo (because these arguments are $istart_rgnl_with_- -# halo_T6SG, $iend_rgnl_wide_halo_T6SG, $jstart_rgnl_wide_halo_T6SG, and -# $jend_rgnl_wide_halo_T6SG), it is reasonable to pass as the argument -# to --halo a zero. However, make_hgrid requires that the argument to -# --halo be at least 1, so below, we pass a 1 as the next-to-last argu- -# ment to grid_gen_scr. +# ready include a halo (because these arguments are +# +# ${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}, +# ${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}, +# ${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}, and +# ${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}, +# +# i.e. they include "WITH_WIDE_HALO_" in their names), it is reasonable +# to pass as the argument to --halo a zero. However, make_hgrid re- +# quires that the argument to --halo be at least 1, so below, we pass a +# 1 as the next-to-last argument to grid_gen_scr. # # More information on make_hgrid: # ------------------------------ @@ -268,10 +273,10 @@ mkdir_vrfy -p "$tmpdir" # --target_lon ${LON_CTR} # --target_lat ${LAT_CTR} \ # --nest_grid --parent_tile 6 --refine_ratio ${GFDLgrid_REFINE_RATIO} \ -# --istart_nest ${istart_rgnl_wide_halo_T6SG} \ -# --jstart_nest ${jstart_rgnl_wide_halo_T6SG} \ -# --iend_nest ${iend_rgnl_wide_halo_T6SG} \ -# --jend_nest ${jend_rgnl_wide_halo_T6SG} \ +# --istart_nest ${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ +# --jstart_nest ${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ +# --iend_nest ${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ +# --jend_nest ${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ # --halo ${NH3} \ # --great_circle_algorithm # @@ -280,10 +285,10 @@ mkdir_vrfy -p "$tmpdir" # the extents of the arrays in that file do not seem to include a halo, # i.e. they are based only on the values passed via the four flags # -# --istart_nest ${istart_rgnl_wide_halo_T6SG} -# --jstart_nest ${jstart_rgnl_wide_halo_T6SG} -# --iend_nest ${iend_rgnl_wide_halo_T6SG} -# --jend_nest ${jend_rgnl_wide_halo_T6SG} +# --istart_nest ${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} +# --jstart_nest ${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} +# --iend_nest ${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} +# --jend_nest ${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} # # According to Rusty Benson of GFDL, the flag # @@ -310,12 +315,14 @@ Starting grid file generation..." if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - $USHDIR/$grid_gen_scr \ - $RES \ + $USHDIR/${grid_gen_scr} \ + ${GFDLgrid_RES} \ $tmpdir \ ${STRETCH_FAC} ${LON_CTR} ${LAT_CTR} ${GFDLgrid_REFINE_RATIO} \ - ${istart_rgnl_wide_halo_T6SG} ${jstart_rgnl_wide_halo_T6SG} \ - ${iend_rgnl_wide_halo_T6SG} ${jend_rgnl_wide_halo_T6SG} \ + ${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ + ${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ + ${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ + ${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG} \ 1 $USHDIR || \ print_err_msg_exit "\ Call to script that generates grid files returned with nonzero exit @@ -333,38 +340,38 @@ sphere resolution returned with nonzero exit code." printf "%s\n" "RES_equiv = $RES_equiv" CRES_equiv="C${RES_equiv}" printf "%s\n" "CRES_equiv = $CRES_equiv" - -elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then # #----------------------------------------------------------------------- # -# Set the full path to the namelist file for the executable that gene- -# rates a regional grid using Jim Purser's method. Then set parameters -# in that file. +# Consider JPgrid-type of grid. # #----------------------------------------------------------------------- # - RGNL_GRID_NML_FP="$tmpdir/${RGNL_GRID_NML_FN}" - cp_vrfy ${TEMPLATE_DIR}/${RGNL_GRID_NML_FN} ${RGNL_GRID_NML_FP} +elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then +# +# Copy the template namelist file for the JPgrid-type grid generation +# code to the temporary subdirectory. Then replace the placeholders in +# that file with actual values. +# + rgnl_grid_nml_fp="$tmpdir/${RGNL_GRID_NML_FN}" + cp_vrfy ${TEMPLATE_DIR}/${RGNL_GRID_NML_FN} ${rgnl_grid_nml_fp} print_info_msg "$VERBOSE" " Setting parameters in file: - RGNL_GRID_NML_FP = \"${RGNL_GRID_NML_FP}\"" -# -# Set parameters. -# - set_file_param "${RGNL_GRID_NML_FP}" "plon" "${JPgrid_LON_CTR}" - set_file_param "${RGNL_GRID_NML_FP}" "plat" "${JPgrid_LAT_CTR}" - set_file_param "${RGNL_GRID_NML_FP}" "delx" "${DEL_ANGLE_X_SG}" - set_file_param "${RGNL_GRID_NML_FP}" "dely" "${DEL_ANGLE_Y_SG}" - set_file_param "${RGNL_GRID_NML_FP}" "lx" "${NEG_NX_OF_DOM_WITH_WIDE_HALO}" - set_file_param "${RGNL_GRID_NML_FP}" "ly" "${NEG_NY_OF_DOM_WITH_WIDE_HALO}" - set_file_param "${RGNL_GRID_NML_FP}" "a" "${JPgrid_ALPHA_PARAM}" - set_file_param "${RGNL_GRID_NML_FP}" "k" "${JPgrid_KAPPA_PARAM}" + rgnl_grid_nml_fp = \"${rgnl_grid_nml_fp}\"" + + set_file_param "${rgnl_grid_nml_fp}" "plon" "${LON_CTR}" + set_file_param "${rgnl_grid_nml_fp}" "plat" "${LAT_CTR}" + set_file_param "${rgnl_grid_nml_fp}" "delx" "${DEL_ANGLE_X_SG}" + set_file_param "${rgnl_grid_nml_fp}" "dely" "${DEL_ANGLE_Y_SG}" + set_file_param "${rgnl_grid_nml_fp}" "lx" "${NEG_NX_OF_DOM_WITH_WIDE_HALO}" + set_file_param "${rgnl_grid_nml_fp}" "ly" "${NEG_NY_OF_DOM_WITH_WIDE_HALO}" + set_file_param "${rgnl_grid_nml_fp}" "a" "${JPgrid_ALPHA_PARAM}" + set_file_param "${rgnl_grid_nml_fp}" "k" "${JPgrid_KAPPA_PARAM}" cd_vrfy $tmpdir - $EXECDIR/regional_grid ${RGNL_GRID_NML_FP} || \ + $EXECDIR/regional_grid ${rgnl_grid_nml_fp} || \ print_err_msg_exit "\ Call to executable that generates grid file (Jim Purser version) re- turned with nonzero exit code." From 5c953c434730508511a966f16d186b8382a12a63 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 16 Jan 2020 15:21:12 -0700 Subject: [PATCH 177/203] Fix the way grid parameters are calculated. See below for details. ush/config_defaults.sh: ---------------------- *) Introduce new experiment/workflow variable GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES that, for a GFDLgrid-type grid, determines whether the resolution (i.e. the number of grid points) used in file names should be what would be used for a global cubed-sphere grid (which is the number of grid points in one of the two horizontal directions on any one of the tiles of the global grid on which the regional grid (tile 7) is based) or should be a number based on the equivalent global uniform cubed-sphere grid (which is the number of grid points that a global and UNIFORM (i.e. no stretching) global cubed-sphere grid needs to have in order to have approximately the same grid size as the regional GFDLgrid-type grid). This variable (GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES) will be needed to be able to use the same file naming convention as EMC. ush/setup.sh: ------------ *) Move calculation of the variable res_in_fixsar_filenames from the script ush/generate_FV3SAR_wflow.sh to ush/setup.sh. Also, rename this variable RES_IN_FIXSAR_FILENAMES (i.e. capitalize it) and make it a global experiment/workflow variable (by placing it in the global variable defintions file var_defns.sh). The variable RES_IN_FIXSAR_FILENAMES specifies the resolution appearing in the names of any pre-generated grid, orography, and surface climatology files. If there are no such pre-generated files (which means that all three of the tasks make_grid, make_orog, and make_sfc_climo will be run to produce these files), this variable will be set to a null string. *) Initialize CRES to a null string and calculate a valid value for it only if the make_grid taks is NOT going to be run. (If make_grid task is going to be run, a valid value for CRES will be calculated during the execution of that task.) *) Set the experiment/workflow variables RES_IN_FIXSAR_FILENAMES and CRES in the global variable definitions file, var_defns.sh, regardless of what values they're set to (i.e. even if they're set to null strings). *) Stop using/setting a variable named RES as a global variable. That's because it doesn't contain any information that CRES doesn't. ush/generate_FV3SAR_wflow.sh: ---------------------------- *) Move calculation of the variable res_in_fixsar_filenames from the ush/generate_FV3SAR_wflow.sh to ush/setup.sh (see above for more details). scripts/exregional_make_grid.sh: ------------------------------- *) For a GFDLgrid-type of grid, set CRES according to whether the new global variable GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES introduced above is set to "TRUE" or "FALSE". If "TRUE", set CRES based on GFDLgrid_RES (which is the standard way things are done in the global model and the way the regional model at EMC names its files). If "FALSE", set CRES based on the equivalent global uniform cubed-sphere grid resolution (see above). *) If RES_IN_FIXSAR_FILENAMES is not set to a null string (i.e. it has a valid value), it will be set to the resolution appearing in the names of any pregenerated orography or surface climatology files. In this case, make sure that RES_IN_FIXSAR_FILENAMES is equal to the resolution calculated for the grid (in the local variable res). Perform this check for both GFDLgrid- and JPgrid-type grids. --- scripts/exregional_make_grid.sh | 109 ++++++++++++++++++++++-------- ush/config_defaults.sh | 1 + ush/generate_FV3SAR_wflow.sh | 116 ++++++++++++++++---------------- ush/setup.sh | 100 ++++++++++++++++++++++++--- 4 files changed, 231 insertions(+), 95 deletions(-) diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 4952bc16d..00e22d533 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -313,6 +313,15 @@ mkdir_vrfy -p "$tmpdir" print_info_msg "$VERBOSE" " Starting grid file generation..." +tile_rgnl=7 +res="" +# +#----------------------------------------------------------------------- +# +# Consider a GFDLgrid-type of grid. +# +#----------------------------------------------------------------------- +# if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then $USHDIR/${grid_gen_scr} \ @@ -328,22 +337,39 @@ if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then Call to script that generates grid files returned with nonzero exit code." - tile_rgnl=7 - grid_fp="$tmpdir/${CRES}_grid.tile${tile_rgnl}.nc" - $EXECDIR/global_equiv_resol "${grid_fp}" || \ - print_err_msg_exit "\ + if [ "${GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES}" = "TRUE" ]; then + + res=${GFDLgrid_RES} + CRES="C$res" + + else + + grid_fp="$tmpdir/C${GFDLgrid_RES}_grid.tile${tile_rgnl}.nc" + $EXECDIR/global_equiv_resol "${grid_fp}" || \ + print_err_msg_exit "\ Call to executable that calculates equivalent global uniform cubed sphere resolution returned with nonzero exit code." - RES_equiv=$( ncdump -h "${grid_fp}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]") - RES_equiv=${RES_equiv//$'\n'/} -printf "%s\n" "RES_equiv = $RES_equiv" - CRES_equiv="C${RES_equiv}" -printf "%s\n" "CRES_equiv = $CRES_equiv" + res=$( ncdump -h "${grid_fp}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]" ) || \ + print_err_msg_exit "\ +Attempt to extract the equivalent global uniform cubed-sphere grid reso- +lution from the file specified by grid_fp faild: + grid_fp = \"${grid_fp}\"" + res=${res//$'\n'/} + CRES="C$res" + + grid_fp_orig="${grid_fp}" + grid_fp="$tmpdir/${CRES}_grid.tile${tile_rgnl}.nc" + mv_vrfy ${grid_fp_orig} ${grid_fp} + + fi + +printf "%s\n" "res = $res" +printf "%s\n" "CRES = $CRES" # #----------------------------------------------------------------------- # -# Consider JPgrid-type of grid. +# Consider a JPgrid-type of grid. # #----------------------------------------------------------------------- # @@ -376,42 +402,71 @@ Setting parameters in file: Call to executable that generates grid file (Jim Purser version) re- turned with nonzero exit code." - tile_rgnl=7 grid_fp="$tmpdir/regional_grid.nc" $EXECDIR/global_equiv_resol "${grid_fp}" || \ print_err_msg_exit "\ Call to executable that calculates equivalent global uniform cubed sphere resolution returned with nonzero exit code." - RES_equiv=$( ncdump -h "${grid_fp}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]" ) # Need error checking here. - RES_equiv=${RES_equiv//$'\n'/} -printf "%s\n" "RES_equiv = $RES_equiv" - CRES_equiv="C${RES_equiv}" -printf "%s\n" "CRES_equiv = $CRES_equiv" + res=$( ncdump -h "${grid_fp}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]" ) || \ + print_err_msg_exit "\ +Attempt to extract the equivalent global uniform cubed-sphere grid reso- +lution from the file specified by grid_fp faild: + grid_fp = \"${grid_fp}\"" + res=${res//$'\n'/} + CRES="C$res" grid_fp_orig="${grid_fp}" - grid_fp="$tmpdir/${CRES_equiv}_grid.tile${tile_rgnl}.nc" + grid_fp="$tmpdir/${CRES}_grid.tile${tile_rgnl}.nc" mv_vrfy ${grid_fp_orig} ${grid_fp} - $EXECDIR/mosaic_file $CRES_equiv || \ +printf "%s\n" "res = $res" +printf "%s\n" "CRES = $CRES" +# +# Create a grid mosaic file that relates the tiles of the cubed-sphere +# grid. Note that there are no "tiles" in the case of a JPgrid-type +# grid, but this file must nevertheless exist because the forecast mo- +# del code looks for it. +# + $EXECDIR/mosaic_file $CRES || \ print_err_msg_exit "\ Call to executable that creates a grid mosaic file returned with nonzero exit code." + +fi # -# RES and CRES need to be set here in order for the rest of the script -# (that was originally written for a grid with GRID_GEN_METHOD set to -# "GFDLgrid") to work for a grid with GRID_GEN_METHOD set to "JPgrid". +#----------------------------------------------------------------------- # - RES="$RES_equiv" - CRES="$CRES_equiv" - - set_file_param "${GLOBAL_VAR_DEFNS_FP}" "RES" "$RES" - set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "$CRES" - +# If there are pre-existing orography or climatology files we will be +# using (i.e. if RUN_TASK_MAKE_OROG or RUN_TASK_MAKE_SURF_CLIMO is set +# to "FALSE", in which case RES_IN_FIXSAR_FILENAMES will not be set to a +# null string), check that the grid resolution (res) calculated above +# matches the resolution appearing in the names of the preexisting oro- +# graphy or surface climatology files. +# +#----------------------------------------------------------------------- +# +if [ ! -z "${RES_IN_FIXSAR_FILENAMES}" ]; then + if [ "$res" -ne "${RES_IN_FIXSAR_FILENAMES}" ]; then + print_err_msg_exit "\ +The resolution (res) calculated for the grid does not match the resolu- +tion (RES_IN_FIXSAR_FILENAMES) appearing in the names of the orography +and/or surface climatology files: + res = $res + RES_IN_FIXSAR_FILENAMES = \"${RES_IN_FIXSAR_FILENAMES}\"" + fi fi # #----------------------------------------------------------------------- # +# Set CRES in the variable definitions file. +# +#----------------------------------------------------------------------- +# +set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "$CRES" +# +#----------------------------------------------------------------------- +# # For clarity, rename the tile 7 grid file such that its new name con- # tains the halo size. Then create a link whose name doesn't contain # the halo size that points to this file. diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 96c0007f4..a2b486331 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -425,6 +425,7 @@ GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=10 GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=374 GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=10 GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=374 +GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="TRUE" # #----------------------------------------------------------------------- # diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index ee2871ccc..f6358a11e 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -485,65 +485,65 @@ echo "RES = $RES" # #----------------------------------------------------------------------- # -else - - res_in_fixsar_filenames="" -# -#----------------------------------------------------------------------- -# -# If the grid file generation task in the workflow is going to be -# skipped (because pregenerated files are available), create links in -# the FIXsar directory to the pregenerated grid files. -# -#----------------------------------------------------------------------- -# - if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then - $USHDIR/link_fix.sh \ - verbose="FALSE" \ - file_group="grid" \ - res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ - output_varname_res="res_in_fixsar_filenames" || \ - print_err_msg_exit "\ -Call to script to create links to grid files failed." - fi -# -#----------------------------------------------------------------------- -# -# If the orography file generation task in the workflow is going to be -# skipped (because pregenerated files are available), create links in -# the FIXsar directory to the pregenerated orography files. +#else +# +# res_in_fixsar_filenames="" +## +##----------------------------------------------------------------------- +## +## If the grid file generation task in the workflow is going to be +## skipped (because pregenerated files are available), create links in +## the FIXsar directory to the pregenerated grid files. +## +##----------------------------------------------------------------------- +## +# if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then +# $USHDIR/link_fix.sh \ +# verbose="FALSE" \ +# file_group="grid" \ +# res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ +# output_varname_res="res_in_fixsar_filenames" || \ +# print_err_msg_exit "\ +#Call to script to create links to grid files failed." +# fi +## +##----------------------------------------------------------------------- +## +## If the orography file generation task in the workflow is going to be +## skipped (because pregenerated files are available), create links in +## the FIXsar directory to the pregenerated orography files. +## +##----------------------------------------------------------------------- +## +# if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then +# $USHDIR/link_fix.sh \ +# verbose="FALSE" \ +# file_group="orog" \ +# res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ +# output_varname_res="res_in_fixsar_filenames" || \ +# print_err_msg_exit "\ +#Call to script to create links to orography files failed." +# fi +## +##----------------------------------------------------------------------- +## +## If the surface climatology file generation task in the workflow is +## going to be skipped (because pregenerated files are available), create +## links in the FIXsar directory to the pregenerated surface climatology +## files. +## +##----------------------------------------------------------------------- +## +# if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then +# $USHDIR/link_fix.sh \ +# verbose="FALSE" \ +# file_group="sfc_climo" || \ +# res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ +# output_varname_res="res_in_fixsar_filenames" || \ +# print_err_msg_exit "\ +#Call to script to create links to surface climatology files failed." +# fi # -#----------------------------------------------------------------------- -# - if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then - $USHDIR/link_fix.sh \ - verbose="FALSE" \ - file_group="orog" \ - res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ - output_varname_res="res_in_fixsar_filenames" || \ - print_err_msg_exit "\ -Call to script to create links to orography files failed." - fi -# -#----------------------------------------------------------------------- -# -# If the surface climatology file generation task in the workflow is -# going to be skipped (because pregenerated files are available), create -# links in the FIXsar directory to the pregenerated surface climatology -# files. -# -#----------------------------------------------------------------------- -# - if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then - $USHDIR/link_fix.sh \ - verbose="FALSE" \ - file_group="sfc_climo" || \ - res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ - output_varname_res="res_in_fixsar_filenames" || \ - print_err_msg_exit "\ -Call to script to create links to surface climatology files failed." - fi - fi # #----------------------------------------------------------------------- diff --git a/ush/setup.sh b/ush/setup.sh index ae7294453..17fe77d2d 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -818,7 +818,7 @@ fi #----------------------------------------------------------------------- # # For a "GFDLgrid" type of grid, make sure GFDLgrid_RES is set to a va- -# lid value. Then set the C-resolution (CRES). +# lid value. # #----------------------------------------------------------------------- # @@ -828,7 +828,6 @@ The number of grid cells per tile in each horizontal direction specified in GFLDgrid_RES is not supported: GFLDgrid_RES = \"${GFLDgrid_RES}\"" check_var_valid_value "GFDLgrid_RES" "valid_vals_GFDLgrid_RES" "${err_msg}" - CRES="C${GFLDgrid_RES}" fi # #----------------------------------------------------------------------- @@ -1431,6 +1430,87 @@ elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then output_varname_neg_ny_of_dom_with_wide_halo="NEG_NY_OF_DOM_WITH_WIDE_HALO" fi +# +#----------------------------------------------------------------------- +# +# +# +#----------------------------------------------------------------------- +# +RES_IN_FIXSAR_FILENAMES="" +# +#----------------------------------------------------------------------- +# +# If the grid file generation task in the workflow is going to be +# skipped (because pregenerated files are available), create links in +# the FIXsar directory to the pregenerated grid files. +# +#----------------------------------------------------------------------- +# +if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then + $USHDIR/link_fix.sh \ + verbose="FALSE" \ + file_group="grid" \ + res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ + output_varname_res="RES_IN_FIXSAR_FILENAMES" || \ + print_err_msg_exit "\ +Call to script to create links to grid files failed." +fi +# +#----------------------------------------------------------------------- +# +# If the orography file generation task in the workflow is going to be +# skipped (because pregenerated files are available), create links in +# the FIXsar directory to the pregenerated orography files. +# +#----------------------------------------------------------------------- +# +if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then + $USHDIR/link_fix.sh \ + verbose="FALSE" \ + file_group="orog" \ + res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ + output_varname_res="RES_IN_FIXSAR_FILENAMES" || \ + print_err_msg_exit "\ +Call to script to create links to orography files failed." +fi +# +#----------------------------------------------------------------------- +# +# If the surface climatology file generation task in the workflow is +# going to be skipped (because pregenerated files are available), create +# links in the FIXsar directory to the pregenerated surface climatology +# files. +# +#----------------------------------------------------------------------- +# +if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then + $USHDIR/link_fix.sh \ + verbose="FALSE" \ + file_group="sfc_climo" || \ + res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ + output_varname_res="RES_IN_FIXSAR_FILENAMES" || \ + print_err_msg_exit "\ +Call to script to create links to surface climatology files failed." +fi +# +#----------------------------------------------------------------------- +# +# The variable CRES is needed in constructing various file names. If +# not running the make_grid task, we can set it here. Otherwise, it +# will get set to a valid value by that task. +# +#----------------------------------------------------------------------- +# +CRES="" +if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then + CRES="C${RES_IN_FIXSAR_FILENAMES}" +fi + + + + + # #----------------------------------------------------------------------- # @@ -2048,6 +2128,14 @@ NX="${NX}" NY="${NY}" NHW="${NHW}" STRETCH_FAC="${STRETCH_FAC}" + +RES_IN_FIXSAR_FILENAMES="${RES_IN_FIXSAR_FILENAMES}" +# +# If running the make_grid task, CRES will be set to a null string du- +# the grid generation step. It will later be set to an actual value af- +# ter the make_grid task is complete. +# +CRES="$CRES" EOM } || print_err_msg_exit "\ Heredoc (cat) command to append new variable definitions to variable @@ -2080,7 +2168,6 @@ ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_O IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG="${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}" -CRES="$CRES" EOM } || print_err_msg_exit "\ Heredoc (cat) command to append grid parameters to variable definitions @@ -2103,13 +2190,6 @@ DEL_ANGLE_X_SG="${DEL_ANGLE_X_SG}" DEL_ANGLE_Y_SG="${DEL_ANGLE_Y_SG}" NEG_NX_OF_DOM_WITH_WIDE_HALO="${NEG_NX_OF_DOM_WITH_WIDE_HALO}" NEG_NY_OF_DOM_WITH_WIDE_HALO="${NEG_NY_OF_DOM_WITH_WIDE_HALO}" -# -# The following variables must be set in order to be able to use the -# same scripting machinary for the case of GRID_GEN_METHOD set to "JP- -# grid" as for GRID_GEN_METHOD set to "GFDLgrid". -# -RES="" # This will be set after the grid generation task is complete. -CRES="" # This will be set after the grid generation task is complete. EOM } || print_err_msg_exit "\ Heredoc (cat) command to append grid parameters to variable definitions From c47eeec462732d907a1b87dcda7f368e46091a15 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 16 Jan 2020 15:46:21 -0700 Subject: [PATCH 178/203] Bug fix (merge conflict wasn't cleared). --- ush/setup.sh | 6 ------ 1 file changed, 6 deletions(-) diff --git a/ush/setup.sh b/ush/setup.sh index 1737a9bca..513718abb 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1618,13 +1618,7 @@ fi # #----------------------------------------------------------------------- # -<<<<<<< HEAD -WRTCMP_PARAMS_TMPL_FP = "" -======= - -#Initialize WRTCMP_PARAMS_TMPL_FP in case QUILTING="FALSE" WRTCMP_PARAMS_TMPL_FP="" ->>>>>>> refs/remotes/origin/community_develop if [ "$QUILTING" = "TRUE" ]; then From 4e1ce0f6683a9f827f71f96a0ebd7740d73fa276 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 17 Jan 2020 11:43:26 -0700 Subject: [PATCH 179/203] Since link_fix(.sh) was changed in a previous commit from a script to a function, need to source it and call it as a function. Do this everywhere it is used. --- scripts/exregional_make_grid.sh | 19 ++++++++++--------- scripts/exregional_make_orog.sh | 15 +++++++-------- scripts/exregional_make_sfc_climo.sh | 17 +++++++++++++---- ush/generate_FV3SAR_wflow.sh | 12 ++++++------ ush/link_fix.sh | 2 +- ush/setup.sh | 15 ++++++++------- 6 files changed, 45 insertions(+), 35 deletions(-) diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 00e22d533..34f36237f 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -12,12 +12,12 @@ # #----------------------------------------------------------------------- # -# Source file containing definitions of mathematical and physical con- -# stants. +# Source other necessary files. # #----------------------------------------------------------------------- # -. ${USHDIR}/constants.sh +. $USHDIR/constants.sh +. $USHDIR/link_fix.sh # #----------------------------------------------------------------------- # @@ -26,7 +26,7 @@ # #----------------------------------------------------------------------- # -{ save_shell_opts; set -u +x; } > /dev/null 2>&1 +{ save_shell_opts; set -u -x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # @@ -463,7 +463,7 @@ fi # #----------------------------------------------------------------------- # -set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "$CRES" +set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "\"$CRES\"" # #----------------------------------------------------------------------- # @@ -567,12 +567,13 @@ cd_vrfy - # #----------------------------------------------------------------------- # -$USHDIR/link_fix.sh \ +link_fix \ verbose="FALSE" \ - global_var_defns_fp="${GLOBAL_VAR_DEFNS_FP}" \ - file_group="grid" || \ + file_group="grid" \ + res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ + output_varname_res="dummy" || \ print_err_msg_exit "\ -Call to script to create links to grid files failed." +Call to function to create links to grid files failed." # #----------------------------------------------------------------------- # diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 0460047b9..9e29e3138 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -12,12 +12,11 @@ # #----------------------------------------------------------------------- # -# Source file containing definitions of mathematical and physical con- -# stants. +# Source other necessary files. # #----------------------------------------------------------------------- # -. ${USHDIR}/constants.sh +. $USHDIR/link_fix.sh # #----------------------------------------------------------------------- # @@ -555,13 +554,13 @@ cd_vrfy - # #----------------------------------------------------------------------- # - -$USHDIR/link_fix.sh \ +link_fix \ verbose="FALSE" \ - global_var_defns_fp="${GLOBAL_VAR_DEFNS_FP}" \ - file_group="orog" || \ + file_group="orog" \ + res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ + output_varname_res="dummy" || \ print_err_msg_exit "\ -Call to script to create links to orography files failed." +Call to function to create links to orography files failed." # Moved the following to exregional_make_sfc_climo.sh script since it # needs to be done only if the make_sfc_climo task is run. diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index 31e624c58..56ec2dec7 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -12,6 +12,14 @@ # #----------------------------------------------------------------------- # +# Source other necessary files. +# +#----------------------------------------------------------------------- +# +. $USHDIR/link_fix.sh +# +#----------------------------------------------------------------------- +# # Save current shell options (in a global array). Then set new options # for this script/function. # @@ -226,12 +234,13 @@ esac # #----------------------------------------------------------------------- # -$USHDIR/link_fix.sh \ +link_fix \ verbose="FALSE" \ - global_var_defns_fp="${GLOBAL_VAR_DEFNS_FP}" \ - file_group="sfc_climo" || \ + file_group="sfc_climo" \ + res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ + output_varname_res="dummy" || \ print_err_msg_exit "\ -Call to script to create links to surface climatology files failed." +Call to function to create links to surface climatology files failed." # #----------------------------------------------------------------------- # diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index f6358a11e..91224f1d4 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -498,13 +498,13 @@ echo "RES = $RES" ##----------------------------------------------------------------------- ## # if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then -# $USHDIR/link_fix.sh \ +# link_fix \ # verbose="FALSE" \ # file_group="grid" \ # res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ # output_varname_res="res_in_fixsar_filenames" || \ # print_err_msg_exit "\ -#Call to script to create links to grid files failed." +#Call to function to create links to grid files failed." # fi ## ##----------------------------------------------------------------------- @@ -516,13 +516,13 @@ echo "RES = $RES" ##----------------------------------------------------------------------- ## # if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then -# $USHDIR/link_fix.sh \ +# link_fix \ # verbose="FALSE" \ # file_group="orog" \ # res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ # output_varname_res="res_in_fixsar_filenames" || \ # print_err_msg_exit "\ -#Call to script to create links to orography files failed." +#Call to function to create links to orography files failed." # fi ## ##----------------------------------------------------------------------- @@ -535,13 +535,13 @@ echo "RES = $RES" ##----------------------------------------------------------------------- ## # if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then -# $USHDIR/link_fix.sh \ +# link_fix \ # verbose="FALSE" \ # file_group="sfc_climo" || \ # res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ # output_varname_res="res_in_fixsar_filenames" || \ # print_err_msg_exit "\ -#Call to script to create links to surface climatology files failed." +#Call to function to create links to surface climatology files failed." # fi # fi diff --git a/ush/link_fix.sh b/ush/link_fix.sh index 1fcce191b..3e06d48e2 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -16,7 +16,7 @@ function link_fix() { # #----------------------------------------------------------------------- # - { save_shell_opts; set -u +x; } > /dev/null 2>&1 + { save_shell_opts; set -u -x; } > /dev/null 2>&1 # #----------------------------------------------------------------------- # diff --git a/ush/setup.sh b/ush/setup.sh index 513718abb..a65aaeed0 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -53,12 +53,13 @@ cd_vrfy ${scrfunc_dir} # #----------------------------------------------------------------------- # -# Source functions for setting grid parameters. +# Source other necessary files. # #----------------------------------------------------------------------- # . ./set_gridparams_GFDLgrid.sh . ./set_gridparams_JPgrid.sh +. ./link_fix.sh # #----------------------------------------------------------------------- # @@ -1448,13 +1449,13 @@ RES_IN_FIXSAR_FILENAMES="" #----------------------------------------------------------------------- # if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then - $USHDIR/link_fix.sh \ + link_fix \ verbose="FALSE" \ file_group="grid" \ res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ output_varname_res="RES_IN_FIXSAR_FILENAMES" || \ print_err_msg_exit "\ -Call to script to create links to grid files failed." +Call to function to create links to grid files failed." fi # #----------------------------------------------------------------------- @@ -1466,13 +1467,13 @@ fi #----------------------------------------------------------------------- # if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then - $USHDIR/link_fix.sh \ + link_fix \ verbose="FALSE" \ file_group="orog" \ res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ output_varname_res="RES_IN_FIXSAR_FILENAMES" || \ print_err_msg_exit "\ -Call to script to create links to orography files failed." +Call to function to create links to orography files failed." fi # #----------------------------------------------------------------------- @@ -1485,13 +1486,13 @@ fi #----------------------------------------------------------------------- # if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then - $USHDIR/link_fix.sh \ + link_fix \ verbose="FALSE" \ file_group="sfc_climo" || \ res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ output_varname_res="RES_IN_FIXSAR_FILENAMES" || \ print_err_msg_exit "\ -Call to script to create links to surface climatology files failed." +Call to function to create links to surface climatology files failed." fi # #----------------------------------------------------------------------- From 96ea2dc2b03e51e11be6b21df677cdef541c8582 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 17 Jan 2020 11:45:16 -0700 Subject: [PATCH 180/203] Since there is no longer a global variable named RES containing the resolution (an integer), where necessary we need to extract this integer from CRES. Do this in scripts/exregional_make_grid.sh. --- scripts/exregional_make_orog.sh | 70 ++++++++++++++++++++------------- 1 file changed, 43 insertions(+), 27 deletions(-) diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 9e29e3138..c39c7e8d2 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -231,6 +231,15 @@ export exec_dir="$EXECDIR" # #----------------------------------------------------------------------- # +# Extract the resolution from CRES and save it in the local variable +# res. +# +#----------------------------------------------------------------------- +# +res="${CRES:1}" +# +#----------------------------------------------------------------------- +# # Generate an orography file corresponding to tile 7 (the regional do- # main) only. # @@ -263,7 +272,7 @@ case $MACHINE in # printf "%s\n" "\ ${ufs_utils_ushdir}/${orog_gen_scr} \ -$RES \ +$res \ ${TILE_RGNL} \ ${FIXsar} \ ${raw_dir} \ @@ -279,7 +288,7 @@ ${tmp_dir}" \ "THEIA" | "HERA" | "JET" | "ODIN") ${ufs_utils_ushdir}/${orog_gen_scr} \ - $RES ${TILE_RGNL} ${FIXsar} ${raw_dir} ${UFS_UTILS_DIR} ${topo_dir} ${tmp_dir} || \ + $res ${TILE_RGNL} ${FIXsar} ${raw_dir} ${UFS_UTILS_DIR} ${topo_dir} ${tmp_dir} || \ print_err_msg_exit "\ Call to script that generates raw orography file returned with nonzero exit code." @@ -329,22 +338,22 @@ Setting orography filtering parameters..." #if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then -# RES_eff=$( bc -l <<< "$RES*${GFDLgrid_REFINE_RATIO}" ) +# res_eff=$( bc -l <<< "$res*${GFDLgrid_REFINE_RATIO}" ) #elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then # grid_size_eff=$( "(${JPgrid_DELX} + ${JPgrid_DELY})/2" ) #echo "grid_size_eff = $grid_size_eff" -# RES_eff=$( bc -l <<< "2*$pi_geom*$radius_Earth/(4*$grid_size_eff)" ) +# res_eff=$( bc -l <<< "2*$pi_geom*$radius_Earth/(4*$grid_size_eff)" ) #fi -#RES_eff=$( printf "%.0f\n" $RES_eff ) +#res_eff=$( printf "%.0f\n" ${res_eff} ) #echo -#echo "RES_eff = $RES_eff" +#echo "res_eff = $res_eff" # This will work for a JPgrid type of grid because for that case, RES # in the variable definitions file gets set to RES_equiv (by the make_- # grid task), but this won't work for a GFDLgrid type of grid because if # the stretch factor is not 1 in that case, RES_equiv will not be the # same as RES (because RES does not account for the stretch factor). -RES_equiv=$RES +RES_equiv=$res # Can also call it the "equivalent" global unstretched resolution. @@ -370,28 +379,36 @@ global cubed-sphere resolution (RES_equiv) failed: done - if [ 0 = 1 ]; then -if [ $RES -eq 48 ]; then - export cd4=0.12; export max_slope=0.12; export n_del2_weak=4; export peak_fac=1.1 -elif [ $RES -eq 96 ]; then - export cd4=0.12; export max_slope=0.12; export n_del2_weak=8; export peak_fac=1.1 -elif [ $RES -eq 192 ]; then - export cd4=0.15; export max_slope=0.12; export n_del2_weak=12; export peak_fac=1.05 -elif [ $RES -eq 384 ]; then - export cd4=0.15; export max_slope=0.12; export n_del2_weak=12; export peak_fac=1.0 -elif [ $RES -eq 768 ]; then - export cd4=0.15; export max_slope=0.12; export n_del2_weak=16; export peak_fac=1.0 -elif [ $RES -eq 1152 ]; then - export cd4=0.15; export max_slope=0.16; export n_del2_weak=20; export peak_fac=1.0 -elif [ $RES -eq 3072 ]; then - export cd4=0.15; export max_slope=0.30; export n_del2_weak=24; export peak_fac=1.0 -else +case "$res" in + 48) + export cd4=0.12; export max_slope=0.12; export n_del2_weak=4; export peak_fac=1.1 + ;; + 96) + export cd4=0.12; export max_slope=0.12; export n_del2_weak=8; export peak_fac=1.1 + ;; + 192) + export cd4=0.15; export max_slope=0.12; export n_del2_weak=12; export peak_fac=1.05 + ;; + 384) + export cd4=0.15; export max_slope=0.12; export n_del2_weak=12; export peak_fac=1.0 + ;; + 768) + export cd4=0.15; export max_slope=0.12; export n_del2_weak=16; export peak_fac=1.0 + ;; + 1152) + export cd4=0.15; export max_slope=0.16; export n_del2_weak=20; export peak_fac=1.0 + ;; + 3072) + export cd4=0.15; export max_slope=0.30; export n_del2_weak=24; export peak_fac=1.0 + ;; + *) # This needs to be fixed - i.e. what to do about regional grids that are # not based on a parent global cubed-sphere grid. - export cd4=0.15; export max_slope=0.30; export n_del2_weak=24; export peak_fac=1.0 -fi + export cd4=0.15; export max_slope=0.30; export n_del2_weak=24; export peak_fac=1.0 + ;; +esac fi @@ -429,7 +446,6 @@ fi # print_info_msg "$VERBOSE" " Starting filtering of orography..." -#echo "GTYPE = \"$GTYPE\"" # The script below creates absolute symlinks in $filter_dir. That's # probably necessary for NCO but probably better to create relative @@ -439,7 +455,7 @@ Starting filtering of orography..." # script called below expects it to be in the environment. export gtype="$GTYPE" ${ufs_utils_ushdir}/${orog_fltr_scr} \ - $RES \ + $res \ ${FIXsar} ${raw_dir} ${filter_dir} \ $cd4 ${peak_fac} ${max_slope} ${n_del2_weak} \ ${ufs_utils_ushdir} || \ From a147cf473973c782cf695837b618613dd206e180 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 17 Jan 2020 12:08:00 -0700 Subject: [PATCH 181/203] Bug fixes. --- scripts/exregional_make_grid.sh | 4 ++-- scripts/exregional_make_orog.sh | 4 ++-- scripts/exregional_make_sfc_climo.sh | 4 ++-- ush/link_fix.sh | 2 ++ 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 34f36237f..66a47916f 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -570,8 +570,8 @@ cd_vrfy - link_fix \ verbose="FALSE" \ file_group="grid" \ - res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ - output_varname_res="dummy" || \ + res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ + output_varname_res="dummy" || \ print_err_msg_exit "\ Call to function to create links to grid files failed." # diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index c39c7e8d2..842b9ced7 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -573,8 +573,8 @@ cd_vrfy - link_fix \ verbose="FALSE" \ file_group="orog" \ - res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ - output_varname_res="dummy" || \ + res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ + output_varname_res="dummy" || \ print_err_msg_exit "\ Call to function to create links to orography files failed." diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index 56ec2dec7..d7a1de01d 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -237,8 +237,8 @@ esac link_fix \ verbose="FALSE" \ file_group="sfc_climo" \ - res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ - output_varname_res="dummy" || \ + res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ + output_varname_res="dummy" || \ print_err_msg_exit "\ Call to function to create links to surface climatology files failed." # diff --git a/ush/link_fix.sh b/ush/link_fix.sh index 3e06d48e2..9b88e80c5 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -88,6 +88,7 @@ function link_fix() { fp_prev \ fp \ fn \ + cres # #----------------------------------------------------------------------- # @@ -260,6 +261,7 @@ Please ensure that all files have the same C-resolution." # Use the eval function to set the value of the output variable. # eval ${output_varname_res}="$res" + cres="C$res" else From aae8dbe3c3ae6ebf0f9a36351c7a7de541355247 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 17 Jan 2020 15:30:16 -0700 Subject: [PATCH 182/203] Bug fixes: 1) In set_predef_grid_params.sh, change RES to GFDLgrid_RES everywhere. 2) In set_predef_grid_params.sh, set the write-component parameters for the EMC_CONUS grid, but these have not yet been tested (make a note of this fact). 3) Remove the setting of the write-component template file name from set_predef_grid_params.sh and put it in setup.sh. This is because set_predef_grid_params.sh is called only if a predefined grid is specified, but the name of the write-component template file may be needed even if a predefined grid is not specified (it is needed whenever quilting is turned on). 4) In setup.sh, change GDFLgrid_RES to GFDLgrid_RES. --- ush/set_predef_grid_params.sh | 53 +++++++++++++---------------------- ush/setup.sh | 30 ++++++++++++++++++-- 2 files changed, 48 insertions(+), 35 deletions(-) diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index b08cff312..165570ba7 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -250,13 +250,13 @@ predefined domain: GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) num_margin_cells_T6_right=10 - GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( RES - num_margin_cells_T6_right )) + GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_right )) num_margin_cells_T6_bottom=10 GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) num_margin_cells_T6_top=10 - GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( RES - num_margin_cells_T6_top )) + GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) DT_ATMOS="90" @@ -530,21 +530,34 @@ predefined domain: GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) num_margin_cells_T6_right=67 - GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( RES - num_margin_cells_T6_right )) + GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_right )) num_margin_cells_T6_bottom=165 GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) num_margin_cells_T6_top=171 - GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( RES - num_margin_cells_T6_top )) + GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) DT_ATMOS="18" LAYOUT_X="16" LAYOUT_Y="72" - WRTCMP_write_tasks_per_group="72" BLOCKSIZE=32 + if [ "$QUILTING" = "TRUE" ]; then + WRTCMP_write_groups="1" + WRTCMP_write_tasks_per_group=$(( 1*LAYOUT_Y )) + WRTCMP_output_grid="rotated_latlon" + WRTCMP_cen_lon="${GFDLgrid_LON_T6_CTR}" + WRTCMP_cen_lat="${GFDLgrid_LAT_T6_CTR}" +# GSK - The following have not been tested... + WRTCMP_lon_lwr_left="-25.0" + WRTCMP_lat_lwr_left="-15.0" + WRTCMP_lon_upr_rght="25.0" + WRTCMP_lat_upr_rght="15.0" + WRTCMP_dlon="0.02" + WRTCMP_dlat="0.02" + fi elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then @@ -622,13 +635,13 @@ predefined domain: GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) num_margin_cells_T6_right=67 - GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( RES - num_margin_cells_T6_right )) + GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_right )) num_margin_cells_T6_bottom=165 GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) num_margin_cells_T6_top=171 - GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( RES - num_margin_cells_T6_top )) + GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) DT_ATMOS="18" @@ -758,32 +771,6 @@ predefined domain: ;; # esac -# -#----------------------------------------------------------------------- -# -# Set the name of the template file containing placeholder values for -# write-component parameters (if this file name is not already set). -# This file will be appended to the model_configure file, and place- -# holder values will be replaced with actual ones. -# -#----------------------------------------------------------------------- -# -if [ "$QUILTING" = "TRUE" ]; then -# -# First, make sure that WRTCMP_output_grid is set to a valid value. -# - err_msg="\ -The coordinate system used by the write-component output grid specified -in WRTCMP_output_grid is not supported: - WRTCMP_output_grid = \"${WRTCMP_output_grid}\"" - check_var_valid_value \ - "WRTCMP_output_grid" "valid_vals_WRTCMP_output_grid" "${err_msg}" -# -# Now set the name of the write-component template file. -# - WRTCMP_PARAMS_TMPL_FN=${WRTCMP_PARAMS_TMPL_FN:-"wrtcmp_${WRTCMP_output_grid}"} - -fi } # diff --git a/ush/setup.sh b/ush/setup.sh index a65aaeed0..36b46b24b 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -818,6 +818,32 @@ fi # #----------------------------------------------------------------------- # +# If quilting is enabled, set the name of the template file containing +# placeholder values for write-component parameters (if this file name +# is not already set). This file will be appended to the model_confi- +# gure file, and placeholder values will be replaced with actual ones. +# +#----------------------------------------------------------------------- +# +if [ "$QUILTING" = "TRUE" ]; then +# +# First, make sure that WRTCMP_output_grid is set to a valid value. +# + err_msg="\ +The coordinate system used by the write-component output grid specified +in WRTCMP_output_grid is not supported: + WRTCMP_output_grid = \"${WRTCMP_output_grid}\"" + check_var_valid_value \ + "WRTCMP_output_grid" "valid_vals_WRTCMP_output_grid" "${err_msg}" +# +# Now set the name of the write-component template file. +# + WRTCMP_PARAMS_TMPL_FN=${WRTCMP_PARAMS_TMPL_FN:-"wrtcmp_${WRTCMP_output_grid}"} + +fi +# +#----------------------------------------------------------------------- +# # For a "GFDLgrid" type of grid, make sure GFDLgrid_RES is set to a va- # lid value. # @@ -826,8 +852,8 @@ fi if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then err_msg="\ The number of grid cells per tile in each horizontal direction specified -in GFLDgrid_RES is not supported: - GFLDgrid_RES = \"${GFLDgrid_RES}\"" +in GFDLgrid_RES is not supported: + GFDLgrid_RES = \"${GFDLgrid_RES}\"" check_var_valid_value "GFDLgrid_RES" "valid_vals_GFDLgrid_RES" "${err_msg}" fi # From d68443de119f5833e1fe061379ca341cf0eba6fb Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 17 Jan 2020 15:33:50 -0700 Subject: [PATCH 183/203] Bug fixes. --- scripts/exregional_make_grid.sh | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index 66a47916f..a025047e9 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -147,7 +147,7 @@ case $MACHINE in # { restore_shell_opts; } > /dev/null 2>&1 # # export APRUN="time" - APRUN="time" + export APRUN="time" topo_dir="/scratch1/NCEPDEV/global/glopara/fix/fix_orog" # # ulimit -s unlimited @@ -362,6 +362,10 @@ lution from the file specified by grid_fp faild: grid_fp="$tmpdir/${CRES}_grid.tile${tile_rgnl}.nc" mv_vrfy ${grid_fp_orig} ${grid_fp} + mosaic_fp_orig="$tmpdir/C${GFDLgrid_RES}_mosaic.nc" + mosaic_fp="$tmpdir/${CRES}_mosaic.nc" + mv_vrfy ${mosaic_fp_orig} ${mosaic_fp} + fi printf "%s\n" "res = $res" From 21aa8a8cda280fb7b74ea463282eab2f08894bab Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 21 Jan 2020 00:33:10 -0700 Subject: [PATCH 184/203] Clean up link_fix function and make necessary changes to files in which it is called. Details below. ush/link_fix.sh: --------------- * Remove the input argument res_in_existing_fixsar_filenames. This was being used to compare the resolution extracted from file names in previous calls to link_fix to the one extracted from file names in the current call, but this approach is confusing. For clarity, we now perform this comparison in setup.sh, after each call to link_fix (see below). * For clarity, rename the argument output_varname_res to output_varname_res_in_filenames. This specifies the name of the variable in which to save the resolution extracted from the file names. * Put in an if-statement that checks whether output_varname_res_in_filenames is set to a null string, and if so, do not try to use the eval function to set the variable specified by output_varname_res_in_filenames to the resolution extracted from the file names. This is useful because we don't always need the resolution returned in a variable. * Update declaration list of local variables. * Remove commented-out code. * Update comments. * Reduce the number of local variables used for storing file names in the specified file group. * Update informational and error messages. * Remove debugging print statements. * After changing directory to FIXsar, change back to the original directory that link_fix was called from. ush/setup.sh: ------------ * Move creation of FIXsar directory to somewhere before the calls to the link_fix function. Otherwise, the calls to link_fix will fail. * Use local variables for storing resolutions extracted from the names of the grid, orography, and surface climatology files. Then compare them to make sure they are the same, and if so, store this resolution in the global variable RES_IN_FIXSAR_FILENAMES. Note that if all three tasks make_grid, make_orog, and make_sfc_climo are to be run, then RES_IN_FIXSAR_FILENAMES remain set to a null string (which is what it was initialized to). * Change arguments in calls to the link_fix function to match the new argument list in link_fix.sh. scripts/exregional_make_[grid,orog,sfc_climo].sh: ------------------------------------------------ * Change arguments in calls to the link_fix function in order to match the new arguments list in link_fix.sh. * In the call to link_fix function, don't specify the argument that is the output variable name for the resolution in the file names (output_varname_res_in_filenames). This means that output_varname_res_in_filenames will get set to a null string (by the function process_args), which (because of one of the changes made in link_fix.sh listed above) means that there will be no variable that gets set to the resolution that link_fix extracts from the file names. We do this because we do not need this resolution saved in an output variable. --- scripts/exregional_make_grid.sh | 6 +- scripts/exregional_make_orog.sh | 6 +- scripts/exregional_make_sfc_climo.sh | 6 +- ush/link_fix.sh | 260 +++++++++++++-------------- ush/setup.sh | 63 +++++-- 5 files changed, 173 insertions(+), 168 deletions(-) diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index a025047e9..1e962576c 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -572,10 +572,8 @@ cd_vrfy - #----------------------------------------------------------------------- # link_fix \ - verbose="FALSE" \ - file_group="grid" \ - res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ - output_varname_res="dummy" || \ + verbose="$VERBOSE" \ + file_group="grid" || \ print_err_msg_exit "\ Call to function to create links to grid files failed." # diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 842b9ced7..2495b79de 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -571,10 +571,8 @@ cd_vrfy - #----------------------------------------------------------------------- # link_fix \ - verbose="FALSE" \ - file_group="orog" \ - res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ - output_varname_res="dummy" || \ + verbose="$VERBOSE" \ + file_group="orog" || \ print_err_msg_exit "\ Call to function to create links to orography files failed." diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index d7a1de01d..9dcfe5486 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -235,10 +235,8 @@ esac #----------------------------------------------------------------------- # link_fix \ - verbose="FALSE" \ - file_group="sfc_climo" \ - res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ - output_varname_res="dummy" || \ + verbose="$VERBOSE" \ + file_group="sfc_climo" || \ print_err_msg_exit "\ Call to function to create links to surface climatology files failed." # diff --git a/ush/link_fix.sh b/ush/link_fix.sh index 9b88e80c5..a943994b7 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -49,8 +49,7 @@ function link_fix() { local valid_args=( \ "verbose" \ "file_group" \ -"res_in_existing_fixsar_filenames" \ -"output_varname_res" \ +"output_varname_res_in_filenames" \ ) process_args valid_args "$@" # @@ -70,33 +69,40 @@ function link_fix() { # #----------------------------------------------------------------------- # - local fns_grid \ - fns_orog \ + local valid_vals_verbose \ + valid_vals_file_group \ + fns \ + fps \ + run_task \ sfc_climo_fields \ num_fields \ - fns_sfc_climo \ i \ ii \ - valid_vals_file_group \ - fps_grid \ - fps_orog \ - fps_sfc_climo \ - fps_all \ - run_task \ res_prev \ res \ fp_prev \ fp \ fn \ - cres + relative_or_null \ + cres \ + tmp \ + fns_sfc_climo_with_halo_in_fn \ + fns_sfc_climo_no_halo_in_fn \ + target \ + symlink # #----------------------------------------------------------------------- # -# Source the variable definitions file and the bash utility functions. +# Set the valid values that various input arguments can take on and then +# ensure that the values passed in are one of these valid values. # #----------------------------------------------------------------------- # -# . ${global_var_defns_fp} + valid_vals_verbose=( "TRUE" "FALSE" ) + check_var_valid_value "verbose" "valid_vals_verbose" + + valid_vals_file_group=( "grid" "orog" "sfc_climo" ) + check_var_valid_value "file_group" "valid_vals_file_group" # #----------------------------------------------------------------------- # @@ -116,22 +122,34 @@ Creating links in the FIXsar directory to the grid files..." # #----------------------------------------------------------------------- # -# Create globbing patterns for grid, orography, and surface climo files. +# Create globbing patterns for grid, orography, and surface climatology +# files. # #----------------------------------------------------------------------- # - fns_grid=( \ + case "${file_group}" in +# + "grid") + fns=( \ "C*_mosaic.nc" \ "C*_grid.tile${TILE_RGNL}.halo${NH3}.nc" \ "C*_grid.tile${TILE_RGNL}.halo${NH4}.nc" \ - ) - - fns_orog=( \ + ) + fps=( "${fns[@]/#/${GRID_DIR}/}" ) + run_task="${RUN_TASK_MAKE_GRID}" + ;; +# + "orog") + fns=( \ "C*_oro_data.tile${TILE_RGNL}.halo${NH0}.nc" \ "C*_oro_data.tile${TILE_RGNL}.halo${NH4}.nc" \ - ) - - sfc_climo_fields=( \ + ) + fps=( "${fns[@]/#/${OROG_DIR}/}" ) + run_task="${RUN_TASK_MAKE_OROG}" + ;; +# + "sfc_climo") + sfc_climo_fields=( \ "facsf" \ "maximum_snow_albedo" \ "slope_type" \ @@ -140,51 +158,24 @@ Creating links in the FIXsar directory to the grid files..." "substrate_temperature" \ "vegetation_greenness" \ "vegetation_type" \ - ) - num_fields=${#sfc_climo_fields[@]} - fns_sfc_climo=() - for (( i=0; i<${num_fields}; i++ )); do - ii=$((2*i)) - fns_sfc_climo[$ii]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH0}.nc" - fns_sfc_climo[$ii+1]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH4}.nc" - done -# -#----------------------------------------------------------------------- -# -# Set the valid values that file_group can take on and then check whe- -# ther it is in fact set to one of these valid values. -# -#----------------------------------------------------------------------- -# - valid_vals_file_group=( "grid" "orog" "sfc_climo" ) - check_var_valid_value "file_group" "valid_vals_file_group" -# -#----------------------------------------------------------------------- -# -# Prepend appropriate directory to each set of file name globbing pat- -# terns. -# -#----------------------------------------------------------------------- -# - fps_grid=( "${fns_grid[@]/#/${GRID_DIR}/}" ) - fps_orog=( "${fns_orog[@]/#/${OROG_DIR}/}" ) - fps_sfc_climo=( "${fns_sfc_climo[@]/#/${SFC_CLIMO_DIR}/}" ) - - if [ "${file_group}" = "grid" ]; then - fps_all=( "${fps_grid[@]}" ) - run_task="${RUN_TASK_MAKE_GRID}" - elif [ "${file_group}" = "orog" ]; then - fps_all=( "${fps_orog[@]}" ) - run_task="${RUN_TASK_MAKE_OROG}" - elif [ "${file_group}" = "sfc_climo" ]; then - fps_all=( "${fps_sfc_climo[@]}" ) + ) + num_fields=${#sfc_climo_fields[@]} + fns=() + for (( i=0; i<${num_fields}; i++ )); do + ii=$((2*i)) + fns[$ii]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH0}.nc" + fns[$ii+1]="C*.${sfc_climo_fields[$i]}.tile${TILE_RGNL}.halo${NH4}.nc" + done + fps=( "${fns[@]/#/${SFC_CLIMO_DIR}/}" ) run_task="${RUN_TASK_MAKE_SFC_CLIMO}" - fi + ;; +# + esac # #----------------------------------------------------------------------- # # Find all files matching the globbing patterns and make sure that they -# all have the same C-resolution in their names. +# all have the same resolution (an integer) in their names. # #----------------------------------------------------------------------- # @@ -193,32 +184,28 @@ Creating links in the FIXsar directory to the grid files..." res="" fp_prev="" - for fp in ${fps_all[@]}; do + for fp in ${fps[@]}; do fn=$( basename $fp ) -printf "i = %s\n" "$i" -printf " fn = %s\n" "$fn" res=$( printf "%s" $fn | sed -n -r -e "s/^C([0-9]*).*/\1/p" ) if [ -z $res ]; then print_err_msg_exit "\ -The C-resolution could not be extracted from the current file's name. -The full path to the file (fp) is: +The resolution could not be extracted from the current file's name. The +full path to the file (fp) is: fp = \"${fp}\" This may be because fp contains the * globbing character, which would imply that no files were found that match the globbing pattern specified in fp." fi -printf " res_prev = %s\n" "${res_prev}" -printf " res = %s\n" "${res}" if [ $i -gt 0 ] && [ ${res} != ${res_prev} ]; then print_err_msg_exit "\ -The C-resolutions (as obtained from the file names) of the previous and +The resolutions (as obtained from the file names) of the previous and current file (fp_prev and fp, respectively) are different: fp_prev = \"${fp_prev}\" fp = \"${fp}\" -Please ensure that all files have the same C-resolution." +Please ensure that all files have the same resolution." fi i=$((i+1)) @@ -229,85 +216,53 @@ Please ensure that all files have the same C-resolution." # #----------------------------------------------------------------------- # +# If the output variable name is not set to a null string, set it. This +# variable is just the resolution extracted from the file names in the +# specified file group. Note that if the output variable name is not +# specified in the call to this function, the process_args function will +# set it to a null string, in which case no output variable will be set. # +#----------------------------------------------------------------------- +# + if [ ! -z "${output_varname_res_in_filenames}" ]; then + eval ${output_varname_res_in_filenames}="$res" + fi # #----------------------------------------------------------------------- # -# if [ "${res_in_existing_fixsar_filenames}" = "" ]; then +# Replace the * globbing character in the set of globbing patterns with +# the resolution. This will result in a set of (full paths to) specific +# files. # -# print_info_msg "$verbose" " -#Setting variable res_in_existing_fixsar_filenames to the resolution -#(res) extracted from the names of the specifed group of fixed files -#(file_group): -# file_group = \"${file_group}\" -# res = $res" +#----------------------------------------------------------------------- # -# res_in_existing_fixsar_filenames=$res + fps=( "${fps[@]/\*/$res}" ) # -# elif [ "${res_in_existing_fixsar_filenames}" = "$res" ]; then +#----------------------------------------------------------------------- # -# print_info_msg "$verbose" " -#As expected, the value of res_in_existing_fixsar_filenames (obtained -#from the names of a previously considered group of fixed files, e.g. -#grid and/or orography files) matches the resolution (res) extracted from -#the names of the specifed group of fixed files (file_group): -# file_group = \"${file_group}\" -# res = $res -# res_in_existing_fixsar_filenames = ${res_in_existing_fixsar_filenames}" - - if [ "${res_in_existing_fixsar_filenames}" = "" ] || \ - [ "${res_in_existing_fixsar_filenames}" = "$res" ]; then +# In creating the various symlinks below, it is convenient to work in +# the FIXsar directory. We will change directory back to the original +# later below. # -# Use the eval function to set the value of the output variable. +#----------------------------------------------------------------------- # - eval ${output_varname_res}="$res" - cres="C$res" - - else - - print_err_msg_exit "\ -The value of res_in_existing_fixsar_filenames (obtained from the names -of a previously considered group of fixed files, e.g. grid and/or oro- -graphy files) does not match the resolution (res) extracted the names of -the specifed group of fixed files (file_group): - file_group = \"${file_group}\" - res = $res - res_in_existing_fixsar_filenames = ${res_in_existing_fixsar_filenames} -This usually means that one or more of the file groups (grid, orography, -and/or surface climatology) are defined on different grids." - - fi + cd_vrfy "$FIXsar" # #----------------------------------------------------------------------- # -# Replace the * globbing character in the set of globbing patterns with -# the C-resolution. This will result in a set of (full paths to) speci- -# fic files. Use these as the link targets to create symlinks in the -# FIXsar directory. +# Use the set of full file paths generated above as the link targets to +# create symlinks to these files in the FIXsar directory. # #----------------------------------------------------------------------- # - fps_all=( "${fps_all[@]/\*/$res}" ) - -echo -printf "fps_all = ( \\ \n" -printf "\"%s\" \\ \n" "${fps_all[@]}" -printf ")" -echo - relative_or_null="" if [ "${run_task}" = "TRUE" ]; then relative_or_null="--relative" fi -echo -echo "FIXsar = \"$FIXsar\"" - - cd_vrfy $FIXsar - for fp in "${fps_all[@]}"; do + for fp in "${fps[@]}"; do if [ -f "$fp" ]; then ln_vrfy -sf ${relative_or_null} $fp . -# ln_vrfy -sf $fp . else print_err_msg_exit "\ Cannot create symlink because target file (fp) does not exist: @@ -317,30 +272,47 @@ Cannot create symlink because target file (fp) does not exist: # #----------------------------------------------------------------------- # -# Create links locally (in the FIXsar directory) needed by the forecast -# task. These are "files" that the FV3 executable looks for. +# Set the C-resolution based on the resolution appearing in the file +# names. +# +#----------------------------------------------------------------------- +# + cres="C$res" +# +#----------------------------------------------------------------------- +# +# If considering grid files, create a symlink to the halo4 grid file +# that does not contain the halo size in its name. This is needed by +# the tasks that generate the initial and lateral boundary condition +# files. # #----------------------------------------------------------------------- # if [ "${file_group}" = "grid" ]; then -# Create link to grid file needed by the make_ic and make_lbc tasks. - filename="${cres}_grid.tile${TILE_RGNL}.halo${NH4}.nc" - ln_vrfy -sf ${relative_or_null} $filename ${cres}_grid.tile${TILE_RGNL}.nc + target="${cres}_grid.tile${TILE_RGNL}.halo${NH4}.nc" + symlink="${cres}_grid.tile${TILE_RGNL}.nc" + ln_vrfy -sf $target $symlink fi - -# Create links to surface climatology files needed by the make_ic task. +# +#----------------------------------------------------------------------- +# +# If considering surface climatology files, create symlinks to the sur- +# face climatology files that do not contain the halo size in their +# names. These are needed by the task that generates the initial condi- +# tion files. +# +#----------------------------------------------------------------------- +# if [ "${file_group}" = "sfc_climo" ]; then tmp=( "${sfc_climo_fields[@]/#/${cres}.}" ) - fns_sfc_climo_with_halo=( "${tmp[@]/%/.tile${TILE_RGNL}.halo${NH4}.nc}" ) - fns_sfc_climo_no_halo=( "${tmp[@]/%/.tile${TILE_RGNL}.nc}" ) + fns_sfc_climo_with_halo_in_fn=( "${tmp[@]/%/.tile${TILE_RGNL}.halo${NH4}.nc}" ) + fns_sfc_climo_no_halo_in_fn=( "${tmp[@]/%/.tile${TILE_RGNL}.nc}" ) - cd_vrfy $FIXsar for (( i=0; i<${num_fields}; i++ )); do - target="${fns_sfc_climo_with_halo[$i]}" - symlink="${fns_sfc_climo_no_halo[$i]}" + target="${fns_sfc_climo_with_halo_in_fn[$i]}" + symlink="${fns_sfc_climo_no_halo_in_fn[$i]}" if [ -f "$target" ]; then -# ln_vrfy -sf ${relative_or_null} $target $symlink ln_vrfy -sf $target $symlink else print_err_msg_exit "\ @@ -348,6 +320,14 @@ Cannot create symlink because target file (target) does not exist: target = \"${target}\"" fi done +# +#----------------------------------------------------------------------- +# +# Change directory back to original one. +# +#----------------------------------------------------------------------- +# + cd_vrfy - fi # diff --git a/ush/setup.sh b/ush/setup.sh index 36b46b24b..13c956ea5 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1465,6 +1465,10 @@ fi #----------------------------------------------------------------------- # RES_IN_FIXSAR_FILENAMES="" + +if [ "${RUN_ENVIR}" != "nco" ]; then + mkdir_vrfy -p "$FIXsar" +fi # #----------------------------------------------------------------------- # @@ -1474,14 +1478,18 @@ RES_IN_FIXSAR_FILENAMES="" # #----------------------------------------------------------------------- # +res_in_grid_fns="" if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then + link_fix \ - verbose="FALSE" \ + verbose="$VERBOSE" \ file_group="grid" \ - res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ - output_varname_res="RES_IN_FIXSAR_FILENAMES" || \ + output_varname_res_in_filenames="res_in_grid_fns" || \ print_err_msg_exit "\ Call to function to create links to grid files failed." + + RES_IN_FIXSAR_FILENAMES="${res_in_grid_fns}" + fi # #----------------------------------------------------------------------- @@ -1492,14 +1500,28 @@ fi # #----------------------------------------------------------------------- # +res_in_orog_fns="" if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then + link_fix \ - verbose="FALSE" \ + verbose="$VERBOSE" \ file_group="orog" \ - res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ - output_varname_res="RES_IN_FIXSAR_FILENAMES" || \ + output_varname_res_in_filenames="res_in_orog_fns" || \ print_err_msg_exit "\ Call to function to create links to orography files failed." + + if [ ! -z "${RES_IN_FIXSAR_FILENAMES}" ] && \ + [ "${res_in_orog_fns}" -ne "${RES_IN_FIXSAR_FILENAMES}" ]; then + print_err_msg_exit "\ +The resolution extracted from the orography file names (res_in_orog_fns) +does not match the resolution in other groups of files already consi- +dered (RES_IN_FIXSAR_FILENAMES): + res_in_orog_fns = ${res_in_orog_fns} + RES_IN_FIXSAR_FILENAMES = ${RES_IN_FIXSAR_FILENAMES}" + else + RES_IN_FIXSAR_FILENAMES="${res_in_orog_fns}" + fi + fi # #----------------------------------------------------------------------- @@ -1511,14 +1533,28 @@ fi # #----------------------------------------------------------------------- # +res_in_sfc_climo_fns="" if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then + link_fix \ - verbose="FALSE" \ - file_group="sfc_climo" || \ - res_in_existing_fixsar_filenames="${RES_IN_FIXSAR_FILENAMES}" \ - output_varname_res="RES_IN_FIXSAR_FILENAMES" || \ + verbose="$VERBOSE" \ + file_group="sfc_climo" \ + output_varname_res_in_filenames="res_in_sfc_climo_fns" || \ print_err_msg_exit "\ Call to function to create links to surface climatology files failed." + + if [ ! -z "${RES_IN_FIXSAR_FILENAMES}" ] && \ + [ "${res_in_sfc_climo_fns}" -ne "${RES_IN_FIXSAR_FILENAMES}" ]; then + print_err_msg_exit "\ +The resolution extracted from the surface climatology file names (res_- +in_sfc_climo_fns) does not match the resolution in other groups of files +already considered (RES_IN_FIXSAR_FILENAMES): + res_in_sfc_climo_fns = ${res_in_sfc_climo_fns} + RES_IN_FIXSAR_FILENAMES = ${RES_IN_FIXSAR_FILENAMES}" + else + RES_IN_FIXSAR_FILENAMES="${res_in_sfc_climo_fns}" + fi + fi # #----------------------------------------------------------------------- @@ -1749,11 +1785,6 @@ fi #----------------------------------------------------------------------- # mkdir_vrfy -p "$EXPTDIR" - -# Maybe do the following later? Not sure yet... -if [ "${RUN_ENVIR}" != "nco" ]; then - mkdir_vrfy -p $FIXsar -fi # #----------------------------------------------------------------------- # @@ -1785,7 +1816,7 @@ fi #----------------------------------------------------------------------- # GLOBAL_VAR_DEFNS_FP="$EXPTDIR/$GLOBAL_VAR_DEFNS_FN" -cp_vrfy ./${DEFAULT_EXPT_CONFIG_FN} ${GLOBAL_VAR_DEFNS_FP} +cp_vrfy $USHDIR/${DEFAULT_EXPT_CONFIG_FN} ${GLOBAL_VAR_DEFNS_FP} # #----------------------------------------------------------------------- # From ef8aea9bf3299c770218c550ebcfcf4d0eda1735 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 21 Jan 2020 10:03:22 -0700 Subject: [PATCH 185/203] Delete commented-out code and fix comments. --- scripts/exregional_run_fcst.sh | 10 +++--- ush/config.nco.sh | 1 + ush/generate_FV3SAR_wflow.sh | 66 ---------------------------------- 3 files changed, 6 insertions(+), 71 deletions(-) diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index dba2237fd..0fc09d41c 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -345,11 +345,11 @@ if [ "${RUN_ENVIR}" = "nco" ]; then # # If not running in "nco" mode, FIXam is an actual directory (not a sym- # link) in the experiment directory that contains the same files as the -# system fix directory except that the files have renamed to the file -# names that FV3 looks for. Thus, when creating links to the files in -# this directory, both the target and symlink names should be the ones -# specified in the FIXam_FILES_EXPTDIR array (because that array con- -# tains the file names that FV3 looks for). +# system fix directory except that the files have been renamed to the +# file names that FV3 looks for. Thus, when creating links to the files +# in this directory, both the target and symlink names should be the +# ones specified in the FIXam_FILES_EXPTDIR array (because that array +# contains the file names that FV3 looks for). # else diff --git a/ush/config.nco.sh b/ush/config.nco.sh index ac9ea1598..a7fc33ba9 100644 --- a/ush/config.nco.sh +++ b/ush/config.nco.sh @@ -13,6 +13,7 @@ PREEXISTING_DIR_METHOD="rename" EMC_GRID_NAME="conus" GRID_GEN_METHOD="JPgrid" + QUILTING="TRUE" USE_CCPP="TRUE" CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 91224f1d4..151738106 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -478,72 +478,6 @@ echo "RES = $RES" set_file_param "${GLOBAL_VAR_DEFNS_FP}" "RES" "${RES}" set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "${CRES}" -# -#----------------------------------------------------------------------- -# -# -# -#----------------------------------------------------------------------- -# -#else -# -# res_in_fixsar_filenames="" -## -##----------------------------------------------------------------------- -## -## If the grid file generation task in the workflow is going to be -## skipped (because pregenerated files are available), create links in -## the FIXsar directory to the pregenerated grid files. -## -##----------------------------------------------------------------------- -## -# if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then -# link_fix \ -# verbose="FALSE" \ -# file_group="grid" \ -# res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ -# output_varname_res="res_in_fixsar_filenames" || \ -# print_err_msg_exit "\ -#Call to function to create links to grid files failed." -# fi -## -##----------------------------------------------------------------------- -## -## If the orography file generation task in the workflow is going to be -## skipped (because pregenerated files are available), create links in -## the FIXsar directory to the pregenerated orography files. -## -##----------------------------------------------------------------------- -## -# if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then -# link_fix \ -# verbose="FALSE" \ -# file_group="orog" \ -# res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ -# output_varname_res="res_in_fixsar_filenames" || \ -# print_err_msg_exit "\ -#Call to function to create links to orography files failed." -# fi -## -##----------------------------------------------------------------------- -## -## If the surface climatology file generation task in the workflow is -## going to be skipped (because pregenerated files are available), create -## links in the FIXsar directory to the pregenerated surface climatology -## files. -## -##----------------------------------------------------------------------- -## -# if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then -# link_fix \ -# verbose="FALSE" \ -# file_group="sfc_climo" || \ -# res_in_existing_fixsar_filenames="${res_in_fixsar_filenames}" \ -# output_varname_res="res_in_fixsar_filenames" || \ -# print_err_msg_exit "\ -#Call to function to create links to surface climatology files failed." -# fi -# fi # #----------------------------------------------------------------------- From 0e1fadfa27136a7d0d5ac5bfd0235ea4a1fe199b Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 21 Jan 2020 11:09:59 -0700 Subject: [PATCH 186/203] Use consistent method of creating symlinks (first defining the variables "target" and "symlink" variables, then checking for the existence of "target", then creating the symlink). --- scripts/exregional_run_fcst.sh | 41 ++++++++++++++++++++++++++++------ 1 file changed, 34 insertions(+), 7 deletions(-) diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 0fc09d41c..d4d133129 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -212,8 +212,9 @@ fi # Symlink to mosaic file with a completely different name. target="${FIXsar}/${CRES}_mosaic.nc" +symlink="grid_spec.nc" if [ -f "${target}" ]; then - ln_vrfy -sf ${relative_or_null} $target grid_spec.nc + ln_vrfy -sf ${relative_or_null} $target $symlink else print_err_msg_exit "\ Cannot create symlink because target does not exist: @@ -222,8 +223,9 @@ fi # Symlink to halo-3 grid file with "halo4" stripped from name. target="${FIXsar}/${CRES}_grid.tile${TILE_RGNL}.halo${NH3}.nc" +symlink="${CRES}_grid.tile${TILE_RGNL}.nc" if [ -f "${target}" ]; then - ln_vrfy -sf ${relative_or_null} $target ${CRES}_grid.tile${TILE_RGNL}.nc + ln_vrfy -sf ${relative_or_null} $target $symlink else print_err_msg_exit "\ Cannot create symlink because target does not exist: @@ -243,8 +245,9 @@ fi # sumes core-hours. # target="${FIXsar}/${CRES}_grid.tile${TILE_RGNL}.halo${NH4}.nc" +symlink="grid.tile${TILE_RGNL}.halo${NH4}.nc" if [ -f "${target}" ]; then - ln_vrfy -sf $target ${relative_or_null} grid.tile${TILE_RGNL}.halo${NH4}.nc + ln_vrfy -sf ${relative_or_null} $target $symlink else print_err_msg_exit "\ Cannot create symlink because target does not exist: @@ -260,8 +263,9 @@ fi # Symlink to halo-0 orography file with "${CRES}_" and "halo0" stripped from name. target="${FIXsar}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH0}.nc" +symlink="oro_data.nc" if [ -f "${target}" ]; then - ln_vrfy -sf ${relative_or_null} $target oro_data.nc + ln_vrfy -sf ${relative_or_null} $target $symlink else print_err_msg_exit "\ Cannot create symlink because target does not exist: @@ -282,8 +286,9 @@ fi # sumes core-hours. # target="${FIXsar}/${CRES}_oro_data.tile${TILE_RGNL}.halo${NH4}.nc" +symlink="oro_data.tile${TILE_RGNL}.halo${NH4}.nc" if [ -f "${target}" ]; then - ln_vrfy -sf $target ${relative_or_null} oro_data.tile${TILE_RGNL}.halo${NH4}.nc + ln_vrfy -sf ${relative_or_null} $target $symlink else print_err_msg_exit "\ Cannot create symlink because target does not exist: @@ -313,8 +318,30 @@ Creating links with names that FV3 looks for in the INPUT subdirectory of the current cycle's run directory (CYCLE_DIR)..." cd_vrfy ${CYCLE_DIR}/INPUT -ln_vrfy -sf gfs_data.tile${TILE_RGNL}.halo${NH0}.nc gfs_data.nc -ln_vrfy -sf sfc_data.tile${TILE_RGNL}.halo${NH0}.nc sfc_data.nc +#ln_vrfy -sf gfs_data.tile${TILE_RGNL}.halo${NH0}.nc gfs_data.nc +#ln_vrfy -sf sfc_data.tile${TILE_RGNL}.halo${NH0}.nc sfc_data.nc + +relative_or_null="" + +target="gfs_data.tile${TILE_RGNL}.halo${NH0}.nc" +symlink="gfs_data.nc" +if [ -f "${target}" ]; then + ln_vrfy -sf ${relative_or_null} $target $symlink +else + print_err_msg_exit "\ +Cannot create symlink because target does not exist: + target = \"$target}\"" +fi + +target="sfc_data.tile${TILE_RGNL}.halo${NH0}.nc" +symlink="sfc_data.nc" +if [ -f "${target}" ]; then + ln_vrfy -sf ${relative_or_null} $target $symlink +else + print_err_msg_exit "\ +Cannot create symlink because target does not exist: + target = \"$target}\"" +fi # #----------------------------------------------------------------------- # From 7442cb38a0fefe4c6fbfc27ddfbba41cedb01412 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 21 Jan 2020 11:12:22 -0700 Subject: [PATCH 187/203] Minor changes. --- scripts/exregional_run_fcst.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index d4d133129..020c629bd 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -367,7 +367,8 @@ files in the FIXam directory..." if [ "${RUN_ENVIR}" = "nco" ]; then for (( i=0; i<${NUM_FIXam_FILES}; i++ )); do - ln_vrfy -sf $FIXam/${FIXam_FILES_SYSDIR[$i]} ${CYCLE_DIR}/${FIXam_FILES_EXPTDIR[$i]} + ln_vrfy -sf $FIXam/${FIXam_FILES_SYSDIR[$i]} \ + ${CYCLE_DIR}/${FIXam_FILES_EXPTDIR[$i]} done # # If not running in "nco" mode, FIXam is an actual directory (not a sym- From cc9fda786aff906ef47acef35aaceaaf95a4291f Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 21 Jan 2020 11:15:41 -0700 Subject: [PATCH 188/203] Fixes to make the workflow work for the case of a GFDLgrid type of grid (i.e. GRID_GEN_METHOD set to "GFDLgrid") using a resolution in the file names that is NOT the number of grid points per direction per tile but the equivalent global uniform cubed-sphere grid resolution (i.e. GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES set to "FALSE"). --- scripts/exregional_run_fcst.sh | 17 +++++++---------- ush/link_fix.sh | 19 +++++++++++++++++++ 2 files changed, 26 insertions(+), 10 deletions(-) diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 020c629bd..475feef2e 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -181,15 +181,6 @@ esac # #----------------------------------------------------------------------- # -# Change location to the INPUT subdirectory of the current cycle's run -# directory. -# -#----------------------------------------------------------------------- -# -#cd_vrfy ${CYCLE_DIR}/INPUT -# -#----------------------------------------------------------------------- -# # Create links in the INPUT subdirectory of the current cycle's run di- # rectory to the grid and (filtered) orography files. # @@ -223,7 +214,13 @@ fi # Symlink to halo-3 grid file with "halo4" stripped from name. target="${FIXsar}/${CRES}_grid.tile${TILE_RGNL}.halo${NH3}.nc" -symlink="${CRES}_grid.tile${TILE_RGNL}.nc" +if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "TRUE" ] && \ + [ "${GRID_GEN_METHOD}" = "GFDLgrid" ] && \ + [ "${GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES}" = "FALSE" ]; then + symlink="C${GFDLgrid_RES}_grid.tile${TILE_RGNL}.nc" +else + symlink="${CRES}_grid.tile${TILE_RGNL}.nc" +fi if [ -f "${target}" ]; then ln_vrfy -sf ${relative_or_null} $target $symlink else diff --git a/ush/link_fix.sh b/ush/link_fix.sh index a943994b7..b1d9275eb 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -292,6 +292,25 @@ Cannot create symlink because target file (fp) does not exist: target="${cres}_grid.tile${TILE_RGNL}.halo${NH4}.nc" symlink="${cres}_grid.tile${TILE_RGNL}.nc" ln_vrfy -sf $target $symlink +# +# The surface climatology file generation code looks for a grid file ha- +# ving a name of the form "C${GFDLgrid_RES}_tile7.halo4.nc" (i.e. the +# resolution used in this file is that of the number of grid points per +# horizontal direction per tile, just like in the global model). Thus, +# if we are running this code, if the grid is of GFDLgrid type, and if +# we are not using GFDLgrid_RES in filenames (i.e. we are using the +# equivalent global uniform grid resolution instead), then create a +# link whose name uses the GFDLgrid_RES that points to the link whose +# name uses the equivalent global uniform resolution. +# + if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "TRUE" ] && \ + [ "${GRID_GEN_METHOD}" = "GFDLgrid" ] && \ + [ "${GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES}" = "FALSE" ]; then + target="${cres}_grid.tile${TILE_RGNL}.halo${NH4}.nc" + symlink="C${GFDLgrid_RES}_grid.tile${TILE_RGNL}.nc" + ln_vrfy -sf $target $symlink + fi + fi # #----------------------------------------------------------------------- From 09c09ef2ff27faa833f7c90c6d16823772cbd8da Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 21 Jan 2020 12:27:20 -0700 Subject: [PATCH 189/203] Update directory location for FV3GFS files on hera (for generating ICs and LBCs). --- ush/set_extrn_mdl_params.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/set_extrn_mdl_params.sh b/ush/set_extrn_mdl_params.sh index de6073fae..4749e23b5 100644 --- a/ush/set_extrn_mdl_params.sh +++ b/ush/set_extrn_mdl_params.sh @@ -91,7 +91,7 @@ this machine and external model combination: EXTRN_MDL_FILES_SYSBASEDIR_ICS="/scratch4/NCEPDEV/rstprod/com/gfs/prod" ;; "HERA") - EXTRN_MDL_FILES_SYSBASEDIR_ICS="/scratch2/NCEPDEV/fv3-cam/noscrub/Eric.Rogers/prfv3rt1" + EXTRN_MDL_FILES_SYSBASEDIR_ICS="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" ;; "JET") EXTRN_MDL_FILES_SYSBASEDIR_ICS="/lfs3/projects/hpc-wof1/ywang/regional_fv3/gfs" @@ -241,7 +241,7 @@ this machine and external model combination: EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/scratch4/NCEPDEV/rstprod/com/gfs/prod" ;; "HERA") - EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/scratch2/NCEPDEV/fv3-cam/noscrub/Eric.Rogers/prfv3rt1" + EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" ;; "JET") EXTRN_MDL_FILES_SYSBASEDIR_LBCS="/lfs3/projects/hpc-wof1/ywang/regional_fv3/gfs" From 40426a771b3384ac757931f6a260e8fa6ca7ebe3 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 21 Jan 2020 12:31:02 -0700 Subject: [PATCH 190/203] Wherever a predefined grid is of type GFDLgrid, set the new parameter GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES that determines what integer resolution to use in the file names. --- ush/set_predef_grid_params.sh | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index 165570ba7..6342d3662 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -258,6 +258,8 @@ predefined domain: num_margin_cells_T6_top=10 GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) + GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="FALSE" + DT_ATMOS="90" LAYOUT_X="14" @@ -538,6 +540,8 @@ predefined domain: num_margin_cells_T6_top=171 GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) + GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="TRUE" + DT_ATMOS="18" LAYOUT_X="16" @@ -643,6 +647,8 @@ predefined domain: num_margin_cells_T6_top=171 GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) + GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="TRUE" + DT_ATMOS="18" LAYOUT_X="16" From d662cc0cc5f79a21d7bfeb20c4ae129f2836adb3 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 21 Jan 2020 12:36:53 -0700 Subject: [PATCH 191/203] To make testing easier, add a new coarse EMC CONUS grid (and update the name of the old EMC_CONUS grid to EMC_CONUS_3km). --- ush/set_predef_grid_params.sh | 67 +++++++++++++++++++++++++++++++++-- ush/valid_param_vals.sh | 2 +- 2 files changed, 66 insertions(+), 3 deletions(-) diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh index 6342d3662..753115b13 100644 --- a/ush/set_predef_grid_params.sh +++ b/ush/set_predef_grid_params.sh @@ -480,11 +480,11 @@ predefined domain: # #----------------------------------------------------------------------- # -# EMC's CONUS grid. +# EMC's 3km CONUS grid. # #----------------------------------------------------------------------- # -"EMC_CONUS") +"EMC_CONUS_3km") if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then # Values from an EMC script. @@ -581,6 +581,69 @@ predefined domain: # #----------------------------------------------------------------------- # +# EMC's coarse (?? km) CONUS grid. +# +#----------------------------------------------------------------------- +# +"EMC_CONUS_coarse") + + if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then + + GFDLgrid_LON_T6_CTR=-97.5 + GFDLgrid_LAT_T6_CTR=38.5 + GFDLgrid_STRETCH_FAC=1.5 + GFDLgrid_RES="96" + GFDLgrid_REFINE_RATIO=2 + + num_margin_cells_T6_left=9 + GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) + + num_margin_cells_T6_right=9 + GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_right )) + + num_margin_cells_T6_bottom=9 + GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) + + num_margin_cells_T6_top=9 + GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) + + GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="TRUE" + + DT_ATMOS="100" + + LAYOUT_X="6" + LAYOUT_Y="6" + BLOCKSIZE="26" + + if [ "$QUILTING" = "TRUE" ]; then + WRTCMP_write_groups="1" + WRTCMP_write_tasks_per_group=$(( 1*LAYOUT_Y )) + WRTCMP_output_grid="rotated_latlon" + WRTCMP_cen_lon="${GFDLgrid_LON_T6_CTR}" + WRTCMP_cen_lat="${GFDLgrid_LAT_T6_CTR}" +# GSK - The following have not been tested... + WRTCMP_lon_lwr_left="-25.0" + WRTCMP_lat_lwr_left="-15.0" + WRTCMP_lon_upr_rght="25.0" + WRTCMP_lat_upr_rght="15.0" + WRTCMP_dlon="0.24" + WRTCMP_dlat="0.24" + fi + + elif [ "${GRID_GEN_METHOD}" = "JPgrid" ]; then + + print_err_msg_exit "\ +The parameters for a \"${GRID_GEN_METHOD}\" type grid have not yet been specified for this +predefined domain: + PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" + GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" + + fi + ;; + +# +#----------------------------------------------------------------------- +# # EMC's Alaska grid. # #----------------------------------------------------------------------- diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index 22200a9e6..0a4fd93c0 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -2,7 +2,7 @@ valid_vals_RUN_ENVIR=("nco" "community") valid_vals_VERBOSE=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_MACHINE=("WCOSS_C" "WCOSS" "DELL" "THEIA" "HERA" "JET" "ODIN" "CHEYENNE") valid_vals_PREDEF_GRID_NAME=( \ -"EMC_CONUS" "EMC_AK" \ +"EMC_CONUS_3km" "EMC_CONUS_coarse" "EMC_AK" \ "GSD_HAFSV0.A3km" "GSD_HAFSV0.A13km" "GSD_HAFSV0.A25km" \ "GSD_HRRR_AK_3km" "GSD_HRRR_AK_50km" \ "GSD_HRRR3km" "GSD_HRRR13km" "GSD_HRRR25km" \ From 6382cc498f9aa1de4dbd564fbca26c3eccbe0529 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Wed, 22 Jan 2020 10:32:31 -0700 Subject: [PATCH 192/203] Changes to make the workflow work with RUN_ENVIR set to "nco", using one of EMC's grids. --- ush/config.nco.sh | 39 ++----- ush/generate_FV3SAR_wflow.sh | 67 +++++------ ush/setup.sh | 208 +++++++++++++++++++++++++---------- 3 files changed, 188 insertions(+), 126 deletions(-) diff --git a/ush/config.nco.sh b/ush/config.nco.sh index a7fc33ba9..3b37a43aa 100644 --- a/ush/config.nco.sh +++ b/ush/config.nco.sh @@ -11,9 +11,8 @@ VERBOSE="TRUE" RUN_ENVIR="nco" PREEXISTING_DIR_METHOD="rename" -EMC_GRID_NAME="conus" -GRID_GEN_METHOD="JPgrid" - +EMC_GRID_NAME="conus" # For now, this is maps to PREDEF_GRID_NAME="EMC_CONUS_coarse". +GRID_GEN_METHOD="GFDLgrid" QUILTING="TRUE" USE_CCPP="TRUE" CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" @@ -27,36 +26,12 @@ CYCL_HRS=( "18" ) EXTRN_MDL_NAME_ICS="FV3GFS" EXTRN_MDL_NAME_LBCS="FV3GFS" -RUN_TASK_MAKE_GRID="TRUE" -RUN_TASK_MAKE_OROG="TRUE" -RUN_TASK_MAKE_SFC_CLIMO="TRUE" +#RUN_TASK_MAKE_GRID="FALSE" +#RUN_TASK_MAKE_OROG="FALSE" +#RUN_TASK_MAKE_SFC_CLIMO="FALSE" RUN="an_experiment" COMINgfs="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" # Path to files from external model (FV3GFS). -STMP="/path/to/temporary/directory/stmp" # Path to temporary directory STMP. - -LAYOUT_X=50 -LAYOUT_Y=50 -BLOCKSIZE=20 - -WRTCMP_write_groups="1" -WRTCMP_write_tasks_per_group="${LAYOUT_Y}" - -WRTCMP_output_grid="lambert_conformal" -WRTCMP_PARAMS_TMPL_FN=${WRTCMP_PARAMS_TMPL_FN:-"wrtcmp_${WRTCMP_output_grid}"} - -WRTCMP_cen_lon="-97.5" -WRTCMP_cen_lat="38.5" -WRTCMP_lon_lwr_left="-122.21414225" -WRTCMP_lat_lwr_left="22.41403305" -# -# The following are used only for the case of WRTCMP_output_grid set to -# "'lambert_conformal'". -# -WRTCMP_stdlat1="${WRTCMP_cen_lat}" -WRTCMP_stdlat2="${WRTCMP_cen_lat}" -WRTCMP_nx="1738" -WRTCMP_ny="974" -WRTCMP_dx="3000.0" -WRTCMP_dy="3000.0" +#STMP="/path/to/temporary/directory/stmp" # Path to temporary directory STMP. +STMP="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/stmp" # Path to temporary directory STMP. diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index 151738106..ad3a078eb 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -446,39 +446,40 @@ fi # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" = "nco" ]; then - - glob_pattern="C*_mosaic.nc" - cd_vrfy $FIXsar - num_files=$( ls -1 ${glob_pattern} 2>/dev/null | wc -l ) - - if [ "${num_files}" -ne "1" ]; then - print_err_msg_exit "\ -Exactly one file must exist in directory FIXsar matching the globbing -pattern glob_pattern: - FIXsar = \"${FIXsar}\" - glob_pattern = \"${glob_pattern}\" - num_files = ${num_files}" - fi - - fn=$( ls -1 ${glob_pattern} ) - RES=$( printf "%s" $fn | sed -n -r -e "s/^C([0-9]*)_mosaic.nc/\1/p" ) - CRES="C$RES" -echo "RES = $RES" - -# RES_equiv=$( ncdump -h "${grid_fn}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]") -# RES_equiv=${RES_equiv//$'\n'/} -#printf "%s\n" "RES_equiv = $RES_equiv" -# CRES_equiv="C${RES_equiv}" -#printf "%s\n" "CRES_equiv = $CRES_equiv" +## Is this if-statement still necessary? +#if [ "${RUN_ENVIR}" = "nco" ]; then # -# RES="$RES_equiv" -# CRES="$CRES_equiv" - - set_file_param "${GLOBAL_VAR_DEFNS_FP}" "RES" "${RES}" - set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "${CRES}" - -fi +# glob_pattern="C*_mosaic.nc" +# cd_vrfy $FIXsar +# num_files=$( ls -1 ${glob_pattern} 2>/dev/null | wc -l ) +# +# if [ "${num_files}" -ne "1" ]; then +# print_err_msg_exit "\ +#Exactly one file must exist in directory FIXsar matching the globbing +#pattern glob_pattern: +# FIXsar = \"${FIXsar}\" +# glob_pattern = \"${glob_pattern}\" +# num_files = ${num_files}" +# fi +# +# fn=$( ls -1 ${glob_pattern} ) +# RES=$( printf "%s" $fn | sed -n -r -e "s/^C([0-9]*)_mosaic.nc/\1/p" ) +# CRES="C$RES" +#echo "RES = $RES" +# +## RES_equiv=$( ncdump -h "${grid_fn}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]") +## RES_equiv=${RES_equiv//$'\n'/} +##printf "%s\n" "RES_equiv = $RES_equiv" +## CRES_equiv="C${RES_equiv}" +##printf "%s\n" "CRES_equiv = $CRES_equiv" +## +## RES="$RES_equiv" +## CRES="$CRES_equiv" +# +# set_file_param "${GLOBAL_VAR_DEFNS_FP}" "RES" "${RES}" +# set_file_param "${GLOBAL_VAR_DEFNS_FP}" "CRES" "${CRES}" +# +#fi # #----------------------------------------------------------------------- # @@ -774,7 +775,7 @@ rm -f "${log_fp}" # temporary file and read them in outside the subshell later below. # { -generate_FV3SAR_wflow 2>&1 +generate_FV3SAR_wflow 2>&1 # If this exits with an error, the whole {...} group quits, so things don't work... retval=$? echo "$EXPTDIR" >> "${tmp_fp}" echo "$retval" >> "${tmp_fp}" diff --git a/ush/setup.sh b/ush/setup.sh index 13c956ea5..6a5405bd0 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -372,32 +372,55 @@ check_var_valid_value "GTYPE" "valid_vals_GTYPE" # #----------------------------------------------------------------------- # -# Make sure PREDEF_GRID_NAME is set to a valid value. +# If running in NCO mode, a valid EMC grid must be specified. Make sure +# EMC_GRID_NAME is set to a valid value. +# +# Note: It is probably best to eventually eliminate EMC_GRID_NAME as a +# user-specified variable and just go with PREDEF_GRID_NAME. # #----------------------------------------------------------------------- # -if [ ! -z ${PREDEF_GRID_NAME} ]; then +if [ "${RUN_ENVIR}" = "nco" ]; then err_msg="\ -The predefined regional grid specified in PREDEF_GRID_NAME is not sup- -ported: - PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\"" +The EMC grid specified in EMC_GRID_NAME is not supported: + EMC_GRID_NAME = \"${EMC_GRID_NAME}\"" check_var_valid_value \ - "PREDEF_GRID_NAME" "valid_vals_PREDEF_GRID_NAME" "${err_msg}" + "EMC_GRID_NAME" "valid_vals_EMC_GRID_NAME" "${err_msg}" fi # +# Map the specified EMC grid to one of the predefined grids. +# +case "${EMC_GRID_NAME}" in + "ak") + PREDEF_GRID_NAME="EMC_AK" + ;; + "conus") + PREDEF_GRID_NAME="EMC_CONUS_coarse" + ;; + "conus_orig") + PREDEF_GRID_NAME="EMC_CONUS_3km" + ;; + "guam"|"hi"|"pr") + print_err_msg_exit "\ +A predefined grid (PREDEF_GRID_NAME) has not yet been defined for this +EMC grid (EMC_GRID_NAME): + EMC_GRID_NAME = \"${EMC_GRID_NAME}\"" + ;; +esac +# #----------------------------------------------------------------------- # -# If running in NCO mode, a valid EMC grid must be specified. Make sure -# EMC_GRID_NAME is set to a valid value. +# Make sure PREDEF_GRID_NAME is set to a valid value. # #----------------------------------------------------------------------- # -if [ "${RUN_ENVIR}" = "nco" ]; then +if [ ! -z ${PREDEF_GRID_NAME} ]; then err_msg="\ -The EMC grid specified in EMC_GRID_NAME is not supported: - EMC_GRID_NAME = \"${EMC_GRID_NAME}\"" +The predefined regional grid specified in PREDEF_GRID_NAME is not sup- +ported: + PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\"" check_var_valid_value \ - "EMC_GRID_NAME" "valid_vals_EMC_GRID_NAME" "${err_msg}" + "PREDEF_GRID_NAME" "valid_vals_PREDEF_GRID_NAME" "${err_msg}" fi # #----------------------------------------------------------------------- @@ -906,7 +929,22 @@ LOGDIR="${EXPTDIR}/log" # if [ "${RUN_ENVIR}" = "nco" ]; then FIXam="${FIXrrfs}/fix_am" - FIXsar="${FIXrrfs}/fix_sar" +# +# Important note: +# In "nco" mode, it is assumed that in the build step, a symlink is cre- +# ated at ${FIXrrfs}/fix_sar whose target is the system disk under +# which the fixed grid, orography, and surface climatology files are +# located. For example, from the ${FIXrrfs}/fix_sar directory, an +# "ls -alF fix_sar" might show +# +# > ls -alF fix_sar +# lrwxrwxrwx 1 Gerard.Ketefian det 62 Dec 20 15:43 fix_sar -> /scratch2/NCEPDEV/fv3-cam/emc.campara/fix_fv3cam/fix_sar/ +# +# FIXsar="${FIXrrfs}/fix_sar" + FIXsar="${FIXrrfs}/fix_sar/${EMC_GRID_NAME}" +echo "AAAAAAAAAAAAAAAAAAAAAAAA" +echo "FIXsar = \"${FIXsar}\"" +echo "BBBBBBBBBBBBBBBBBBBBBBBB" COMROOT="$PTMP/com" else FIXam="${EXPTDIR}/fix_am" @@ -1472,25 +1510,71 @@ fi # #----------------------------------------------------------------------- # +# +# +#----------------------------------------------------------------------- +# +# Is this if-statement still necessary? +if [ "${RUN_ENVIR}" = "nco" ]; then + + glob_pattern="C*_mosaic.nc" + cd_vrfy $FIXsar + num_files=$( ls -1 ${glob_pattern} 2>/dev/null | wc -l ) + + if [ "${num_files}" -ne "1" ]; then + print_err_msg_exit "\ +Exactly one file must exist in directory FIXsar matching the globbing +pattern glob_pattern: + FIXsar = \"${FIXsar}\" + glob_pattern = \"${glob_pattern}\" + num_files = ${num_files}" + fi + + fn=$( ls -1 ${glob_pattern} ) + RES_IN_FIXSAR_FILENAMES=$( printf "%s" $fn | sed -n -r -e "s/^C([0-9]*)_mosaic.nc/\1/p" ) +echo "RES_IN_FIXSAR_FILENAMES = ${RES_IN_FIXSAR_FILENAMES}" + + if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ] && \ + [ "${GFDLgrid_RES}" -ne "${RES_IN_FIXSAR_FILENAMES}" ]; then + print_err_msg_exit "\ +The resolution extracted from the fixed file names (RES_IN_FIXSAR_FILENAMES) +does not match the resolution specified by GFDLgrid_RES: + GFDLgrid_RES = ${GFDLgrid_RES} + RES_IN_FIXSAR_FILENAMES = ${RES_IN_FIXSAR_FILENAMES}" + fi + +# RES_equiv=$( ncdump -h "${grid_fn}" | grep -o ":RES_equiv = [0-9]\+" | grep -o "[0-9]") +# RES_equiv=${RES_equiv//$'\n'/} +#printf "%s\n" "RES_equiv = $RES_equiv" +# CRES_equiv="C${RES_equiv}" +#printf "%s\n" "CRES_equiv = $CRES_equiv" +# +# RES="$RES_equiv" +# CRES="$CRES_equiv" + +else +# +#----------------------------------------------------------------------- +# # If the grid file generation task in the workflow is going to be # skipped (because pregenerated files are available), create links in # the FIXsar directory to the pregenerated grid files. # #----------------------------------------------------------------------- # -res_in_grid_fns="" -if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then + res_in_grid_fns="" + if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then - link_fix \ - verbose="$VERBOSE" \ - file_group="grid" \ - output_varname_res_in_filenames="res_in_grid_fns" || \ - print_err_msg_exit "\ -Call to function to create links to grid files failed." + link_fix \ + verbose="$VERBOSE" \ + file_group="grid" \ + output_varname_res_in_filenames="res_in_grid_fns" || \ + print_err_msg_exit "\ + Call to function to create links to grid files failed." - RES_IN_FIXSAR_FILENAMES="${res_in_grid_fns}" + RES_IN_FIXSAR_FILENAMES="${res_in_grid_fns}" -fi + fi # #----------------------------------------------------------------------- # @@ -1500,29 +1584,29 @@ fi # #----------------------------------------------------------------------- # -res_in_orog_fns="" -if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then - - link_fix \ - verbose="$VERBOSE" \ - file_group="orog" \ - output_varname_res_in_filenames="res_in_orog_fns" || \ - print_err_msg_exit "\ -Call to function to create links to orography files failed." + res_in_orog_fns="" + if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then - if [ ! -z "${RES_IN_FIXSAR_FILENAMES}" ] && \ - [ "${res_in_orog_fns}" -ne "${RES_IN_FIXSAR_FILENAMES}" ]; then + link_fix \ + verbose="$VERBOSE" \ + file_group="orog" \ + output_varname_res_in_filenames="res_in_orog_fns" || \ print_err_msg_exit "\ -The resolution extracted from the orography file names (res_in_orog_fns) -does not match the resolution in other groups of files already consi- -dered (RES_IN_FIXSAR_FILENAMES): - res_in_orog_fns = ${res_in_orog_fns} - RES_IN_FIXSAR_FILENAMES = ${RES_IN_FIXSAR_FILENAMES}" - else - RES_IN_FIXSAR_FILENAMES="${res_in_orog_fns}" - fi + Call to function to create links to orography files failed." -fi + if [ ! -z "${RES_IN_FIXSAR_FILENAMES}" ] && \ + [ "${res_in_orog_fns}" -ne "${RES_IN_FIXSAR_FILENAMES}" ]; then + print_err_msg_exit "\ + The resolution extracted from the orography file names (res_in_orog_fns) + does not match the resolution in other groups of files already consi- + dered (RES_IN_FIXSAR_FILENAMES): + res_in_orog_fns = ${res_in_orog_fns} + RES_IN_FIXSAR_FILENAMES = ${RES_IN_FIXSAR_FILENAMES}" + else + RES_IN_FIXSAR_FILENAMES="${res_in_orog_fns}" + fi + + fi # #----------------------------------------------------------------------- # @@ -1533,26 +1617,28 @@ fi # #----------------------------------------------------------------------- # -res_in_sfc_climo_fns="" -if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then - - link_fix \ - verbose="$VERBOSE" \ - file_group="sfc_climo" \ - output_varname_res_in_filenames="res_in_sfc_climo_fns" || \ - print_err_msg_exit "\ -Call to function to create links to surface climatology files failed." + res_in_sfc_climo_fns="" + if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then - if [ ! -z "${RES_IN_FIXSAR_FILENAMES}" ] && \ - [ "${res_in_sfc_climo_fns}" -ne "${RES_IN_FIXSAR_FILENAMES}" ]; then + link_fix \ + verbose="$VERBOSE" \ + file_group="sfc_climo" \ + output_varname_res_in_filenames="res_in_sfc_climo_fns" || \ print_err_msg_exit "\ -The resolution extracted from the surface climatology file names (res_- -in_sfc_climo_fns) does not match the resolution in other groups of files -already considered (RES_IN_FIXSAR_FILENAMES): - res_in_sfc_climo_fns = ${res_in_sfc_climo_fns} - RES_IN_FIXSAR_FILENAMES = ${RES_IN_FIXSAR_FILENAMES}" - else - RES_IN_FIXSAR_FILENAMES="${res_in_sfc_climo_fns}" + Call to function to create links to surface climatology files failed." + + if [ ! -z "${RES_IN_FIXSAR_FILENAMES}" ] && \ + [ "${res_in_sfc_climo_fns}" -ne "${RES_IN_FIXSAR_FILENAMES}" ]; then + print_err_msg_exit "\ + The resolution extracted from the surface climatology file names (res_- + in_sfc_climo_fns) does not match the resolution in other groups of files + already considered (RES_IN_FIXSAR_FILENAMES): + res_in_sfc_climo_fns = ${res_in_sfc_climo_fns} + RES_IN_FIXSAR_FILENAMES = ${RES_IN_FIXSAR_FILENAMES}" + else + RES_IN_FIXSAR_FILENAMES="${res_in_sfc_climo_fns}" + fi + fi fi From d03eeac04c2f2c80baecb8b79d6b17907d183fcf Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 23 Jan 2020 13:48:23 -0700 Subject: [PATCH 193/203] Update the task/script description at the beginning of the script. --- jobs/JREGIONAL_GET_EXTRN_FILES | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/jobs/JREGIONAL_GET_EXTRN_FILES b/jobs/JREGIONAL_GET_EXTRN_FILES index 5568871f6..5b447d7af 100755 --- a/jobs/JREGIONAL_GET_EXTRN_FILES +++ b/jobs/JREGIONAL_GET_EXTRN_FILES @@ -4,17 +4,19 @@ #----------------------------------------------------------------------- # # This script gets either from the system directory or from mass store -# (HPSS) the files generated by the external model (specified by the va- -# riable EXTRN_MDL_NAME) for either the initial conditions (ICs) or the +# (HPSS) the files generated by the external model (specified by the +# variable EXTRN_MDL_NAME) for either the initial conditions (ICs) or the # lateral boundary conditions (LBCs). Which of these we are considering -# depends on the value of the variable ICS_OR_LBCS. Also, when we -# refer to ICs, we are also referring to the surface fields and the 0-th -# hour LBC, and when we refer to LBCs, we are referring to the LBCs ex- -# cluding the one at 0-th hour. If considering ICs, this script places -# these external model files in a subdirectory under the one specified -# by the variable EXTRN_MDL_FILES_BASEDIR_ICS, and if considering -# LBCs, it places the files in a subdirectory under the one specified by -# the variable EXTRN_MDL_FILES_BASEDIR_LBCS. +# depends on the value of the variable ICS_OR_LBCS, which should be +# defined in the environment (when calling this script from a rocoto +# workflow, the workflow should define this variable, e.g. using rocoto's +# tag). +# +# Note that when we refer to ICs, we are referring to not only the +# atmospheric fields at the initial time but also various surface fields +# (which are for now time-independent) as well as the 0-th forecast hour +# LBCs. Also, when we refer to LBCs, we are referring to the LBCs excluding +# the one at the 0-th hour. # #----------------------------------------------------------------------- # From 7ebc8ad57b748ad61bd657f60b9591df3975c923 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 23 Jan 2020 13:58:36 -0700 Subject: [PATCH 194/203] For clarity, rename the variables FIXam_FILES_SYSDIR and FIXam_FILES_EXPTDIR to FIXgsm_FILENAMES and FIXam_FILENAMES, respectively. --- scripts/exregional_run_fcst.sh | 18 +++++++++------ ush/config_defaults.sh | 4 ++-- ush/generate_FV3SAR_wflow.sh | 42 +++++++++++++++++----------------- ush/setup.sh | 18 +++++++-------- 4 files changed, 43 insertions(+), 39 deletions(-) diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 475feef2e..dd9f88575 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -356,16 +356,20 @@ files in the FIXam directory..." # If running in "nco" mode, FIXam is simply a symlink under the workflow # directory that points to the system directory containing the fix # files. The files in this system directory are named as listed in the -# FIXam_FILES_SYSDIR array. Thus, that is the array to use to form the -# names of the link targets, but the names of the symlinks themselves -# must be as specified in the FIXam_FILES_EXPTDIR array (because that +# FIXgsm_FILENAMES array. Thus, that is the array to use to form the +# names of the targets of the symlinks, but the names of the symlinks themselves +# must be as specified in the FIXam_FILENAMES array (because that # array contains the file names that FV3 looks for). # if [ "${RUN_ENVIR}" = "nco" ]; then for (( i=0; i<${NUM_FIXam_FILES}; i++ )); do - ln_vrfy -sf $FIXam/${FIXam_FILES_SYSDIR[$i]} \ - ${CYCLE_DIR}/${FIXam_FILES_EXPTDIR[$i]} +# Note: Can link directly to files in FIXgsm without needing a local +# FIXam directory, i.e. use +# ln_vrfy -sf $FIXgsm/${FIXgsm_FILENAMES[$i]} \ +# ${CYCLE_DIR}/${FIXam_FILENAMES[$i]} + ln_vrfy -sf $FIXam/${FIXgsm_FILENAMES[$i]} \ + ${CYCLE_DIR}/${FIXam_FILENAMES[$i]} done # # If not running in "nco" mode, FIXam is an actual directory (not a sym- @@ -373,13 +377,13 @@ if [ "${RUN_ENVIR}" = "nco" ]; then # system fix directory except that the files have been renamed to the # file names that FV3 looks for. Thus, when creating links to the files # in this directory, both the target and symlink names should be the -# ones specified in the FIXam_FILES_EXPTDIR array (because that array +# ones specified in the FIXam_FILENAMES array (because that array # contains the file names that FV3 looks for). # else for (( i=0; i<${NUM_FIXam_FILES}; i++ )); do - ln_vrfy -sf --relative $FIXam/${FIXam_FILES_EXPTDIR[$i]} ${CYCLE_DIR} + ln_vrfy -sf --relative $FIXam/${FIXam_FILENAMES[$i]} ${CYCLE_DIR} done fi diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index a2b486331..d8e4f5632 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -661,7 +661,7 @@ SFC_CLIMO_DIR="/path/to/pregenerated/surface/climo/files" # #----------------------------------------------------------------------- # -FIXam_FILES_SYSDIR=( \ +FIXgsm_FILENAMES=( \ "CFSR.SEAICE.1982.2012.monthly.clim.grb" \ "RTGSST.1982.2012.monthly.clim.grb" \ "seaice_newland.grb" \ @@ -699,7 +699,7 @@ FIXam_FILES_SYSDIR=( \ ) # "global_o3prdlos.f77" \ -FIXam_FILES_EXPTDIR=( \ +FIXam_FILENAMES=( \ "CFSR.SEAICE.1982.2012.monthly.clim.grb" \ "RTGSST.1982.2012.monthly.clim.grb" \ "seaice_newland.grb" \ diff --git a/ush/generate_FV3SAR_wflow.sh b/ush/generate_FV3SAR_wflow.sh index ad3a078eb..d3dc0dcdd 100755 --- a/ush/generate_FV3SAR_wflow.sh +++ b/ush/generate_FV3SAR_wflow.sh @@ -282,7 +282,7 @@ cd_vrfy - #----------------------------------------------------------------------- # # Make sure that the correct ozone production/loss fixed file is speci- -# fied in the array FIXam_FILES_SYSDIR. There should be two such files +# fied in the array FIXgsm_FILENAMES. There should be two such files # on disk in the system directory specified in FIXgsm. They are named # # ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77 @@ -299,31 +299,31 @@ cd_vrfy - # (CCPP_PHYS_SUITE). The GFS physics suite uses the after-2015 parame- # terization, while the GSD physics suite uses the 2015 parameteriza- # tion. Thus, we must ensure that the ozone production/loss fixed file -# listed in the array FIXam_FILES_SYSDIR is the correct one for the gi- +# listed in the array FIXgsm_FILENAMES is the correct one for the gi- # ven physics suite. We do this below as follows. # -# First, note that FIXam_FILES_SYSDIR should contain the name of exactly +# First, note that FIXgsm_FILENAMES should contain the name of exactly # one of the ozone production/loss fixed files listed above. We verify # this by trying to obtain the indices of the elements of FIXam_FILES_- # SYSDIR that contain the two files. One of these indices should not # exist while the other one should. If the 2015 file is the one that is -# found in FIXam_FILES_SYSDIR, then if we're using GFS physics, we -# change that element in FIXam_FILES_SYSDIR to the name of the after- +# found in FIXgsm_FILENAMES, then if we're using GFS physics, we +# change that element in FIXgsm_FILENAMES to the name of the after- # 2015 file. Similarly, if the after-2015 file is the one that is found -# in FIXam_FILES_SYSDIR, then if we're using GSD physics, we change that -# element in FIXam_FILES_SYSDIR to the name of the 2015 file. If +# in FIXgsm_FILENAMES, then if we're using GSD physics, we change that +# element in FIXgsm_FILENAMES to the name of the 2015 file. If # neither file or more than one ozone production/loss file is found in -# FIXam_FILES_SYSDIR, we print out an error message and exit. +# FIXgsm_FILENAMES, we print out an error message and exit. # #----------------------------------------------------------------------- # ozphys_2015_fn="ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77" -indx_ozphys_2015=$( get_elem_inds "FIXam_FILES_SYSDIR" "${ozphys_2015_fn}" ) +indx_ozphys_2015=$( get_elem_inds "FIXgsm_FILENAMES" "${ozphys_2015_fn}" ) read -a indx_ozphys_2015 <<< ${indx_ozphys_2015} num_files_ozphys_2015=${#indx_ozphys_2015[@]} ozphys_after2015_fn="global_o3prdlos.f77" -indx_ozphys_after2015=$( get_elem_inds "FIXam_FILES_SYSDIR" "${ozphys_after2015_fn}" ) +indx_ozphys_after2015=$( get_elem_inds "FIXgsm_FILENAMES" "${ozphys_after2015_fn}" ) read -a indx_ozphys_after2015 <<< ${indx_ozphys_after2015} num_files_ozphys_after2015=${#indx_ozphys_after2015[@]} @@ -331,7 +331,7 @@ if [ ${num_files_ozphys_2015} -eq 1 ] && \ [ ${num_files_ozphys_after2015} -eq 0 ]; then if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_2017_gfdlmp" ]; then - FIXam_FILES_SYSDIR[${indx_ozphys_2015}]="${ozphys_after2015_fn}" + FIXgsm_FILENAMES[${indx_ozphys_2015}]="${ozphys_after2015_fn}" fi elif [ ${num_files_ozphys_2015} -eq 0 ] && \ @@ -339,14 +339,14 @@ elif [ ${num_files_ozphys_2015} -eq 0 ] && \ if [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_v0" ] || \ [ "${CCPP_PHYS_SUITE}" = "FV3_GSD_SAR" ]; then - FIXam_FILES_SYSDIR[${indx_ozphys_after2015}]="${ozphys_2015_fn}" + FIXgsm_FILENAMES[${indx_ozphys_after2015}]="${ozphys_2015_fn}" fi else - FIXam_FILES_SYSDIR_str=$( printf "\"%s\"\n" "${FIXam_FILES_SYSDIR[@]}" ) + FIXgsm_FILENAMES_str=$( printf "\"%s\"\n" "${FIXgsm_FILENAMES[@]}" ) print_err_msg_exit "\ -The array FIXam_FILES_SYSDIR containing the names of the fixed files in +The array FIXgsm_FILENAMES containing the names of the fixed files in the system directory (FIXgsm) to copy or link to has been specified in- correctly because it contains no or more than one occurrence of the ozone production/loss file(s) (whose names are specified in the varia- @@ -356,11 +356,11 @@ bles ozphys_2015_fn and ozphys_after2015_fn): num_files_ozphys_2015_fn = \"${num_files_ozphys_2015_fn}\" ozphys_after2015_fn = \"${ozphys_after2015_fn}\" num_files_ozphys_after2015_fn = \"${num_files_ozphys_after2015_fn}\" - FIXam_FILES_SYSDIR = + FIXgsm_FILENAMES = ( -${FIXam_FILES_SYSDIR_str} +${FIXgsm_FILENAMES_str} ) -Please check the contents of the FIXam_FILES_SYSDIR array and rerun." +Please check the contents of the FIXgsm_FILENAMES array and rerun." fi # @@ -490,8 +490,8 @@ fi #----------------------------------------------------------------------- # -# For nco, we assume the following copy operation is done beforehand, but -# that can be changed. +# In NCO mode, we assume the following copy operation is done beforehand, +# but that can be changed. if [ "${RUN_ENVIR}" != "nco" ]; then print_info_msg "$VERBOSE" " @@ -502,8 +502,8 @@ Copying fixed files from system directory to the experiment directory..." cp_vrfy $FIXgsm/global_hyblev.l65.txt $FIXam for (( i=0; i<${NUM_FIXam_FILES}; i++ )); do - cp_vrfy $FIXgsm/${FIXam_FILES_SYSDIR[$i]} \ - $FIXam/${FIXam_FILES_EXPTDIR[$i]} + cp_vrfy $FIXgsm/${FIXgsm_FILENAMES[$i]} \ + $FIXam/${FIXam_FILENAMES[$i]} done fi diff --git a/ush/setup.sh b/ush/setup.sh index 6a5405bd0..ae7868868 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1850,18 +1850,18 @@ NUM_NODES=$(( (PE_MEMBER01 + NCORES_PER_NODE - 1)/NCORES_PER_NODE )) # #----------------------------------------------------------------------- # -num_fixam_files_sysdir="${#FIXam_FILES_SYSDIR[@]}" -num_fixam_files_exptdir="${#FIXam_FILES_EXPTDIR[@]}" -if [ "${num_fixam_files_sysdir}" -ne "${num_fixam_files_exptdir}" ]; then +num_fixgsm_files="${#FIXgsm_FILENAMES[@]}" +num_fixam_files="${#FIXam_FILENAMES[@]}" +if [ "${num_fixgsm_files}" -ne "${num_fixam_files}" ]; then print_err_msg_exit "\ -The number of fixed files specified in FIXam_FILES_SYSDIR must be equal -to that specified in FIXam_FILES_EXPTDIR: - num_fixam_files_sysdir = ${num_fixam_files_sysdir} - num_fixam_files_exptdir = ${num_fixam_files_exptdir}" +The number of fixed files specified in the array FIXgsm_FILENAMES +(num_fixgsm_files) must be equal to that specified in the array FIXam_FILENAMES +(num_fixam_files): + num_fixgsm_files = ${num_fixgsm_files} + num_fixam_files = ${num_fixam_files}" else - NUM_FIXam_FILES="${num_fixam_files_sysdir}" + NUM_FIXam_FILES="${num_fixam_files}" fi - # #----------------------------------------------------------------------- # From 332e8346099c7223be44eb0040f434e768c410e0 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 23 Jan 2020 14:05:16 -0700 Subject: [PATCH 195/203] Change the valid values that PRINT_ESMF can take on so that there is no "." at the beginning and end. The "."s are now added in the exregional_run_fcst.sh script before writing the value to the forecast model namelist file. Also, make sure in setup.sh that PRINT_ESMF has been set to a valid value. --- scripts/exregional_run_fcst.sh | 3 ++- ush/config_defaults.sh | 2 +- ush/setup.sh | 20 ++++++++++++++++++++ ush/valid_param_vals.sh | 1 + 4 files changed, 24 insertions(+), 2 deletions(-) diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index dd9f88575..2c34ec2f7 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -489,6 +489,7 @@ Setting parameters in file: model_config_fp = \"${model_config_fp}\"" dot_quilting_dot="."${QUILTING,,}"." +dot_print_esmf_dot="."${PRINT_ESMF,,}"." set_file_param "${model_config_fp}" "PE_MEMBER01" "${PE_MEMBER01}" set_file_param "${model_config_fp}" "dt_atmos" "${DT_ATMOS}" @@ -499,7 +500,7 @@ set_file_param "${model_config_fp}" "start_hour" "$HH" set_file_param "${model_config_fp}" "nhours_fcst" "${FCST_LEN_HRS}" set_file_param "${model_config_fp}" "ncores_per_node" "${NCORES_PER_NODE}" set_file_param "${model_config_fp}" "quilting" "${dot_quilting_dot}" -set_file_param "${model_config_fp}" "print_esmf" "${PRINT_ESMF}" +set_file_param "${model_config_fp}" "print_esmf" "${dot_print_esmf_dot}" # #----------------------------------------------------------------------- # diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index d8e4f5632..40ffca5a3 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -602,7 +602,7 @@ BLOCKSIZE="24" #----------------------------------------------------------------------- # QUILTING="TRUE" -PRINT_ESMF=".false." +PRINT_ESMF="FALSE" WRTCMP_write_groups="1" WRTCMP_write_tasks_per_group="20" diff --git a/ush/setup.sh b/ush/setup.sh index ae7868868..d47a3e561 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -1683,6 +1683,26 @@ fi # #----------------------------------------------------------------------- # +# Make sure that PRINT_ESMF is set to a valid value. +# +#----------------------------------------------------------------------- +# +check_var_valid_value "PRINT_ESMF" "valid_vals_PRINT_ESMF" +# +# Set PRINT_ESMF to either "TRUE" or "FALSE" so we don't have to consider +# other valid values later on. +# +PRINT_ESMF=${PRINT_ESMF^^} +if [ "${PRINT_ESMF}" = "TRUE" ] || \ + [ "${PRINT_ESMF}" = "YES" ]; then + PRINT_ESMF="TRUE" +elif [ "${PRINT_ESMF}" = "FALSE" ] || \ + [ "${PRINT_ESMF}" = "NO" ]; then + PRINT_ESMF="FALSE" +fi +# +#----------------------------------------------------------------------- +# # Calculate PE_MEMBER01. This is the number of MPI tasks used for the # forecast, including those for the write component if QUILTING is set # to "TRUE". diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index 0a4fd93c0..9de33e4e6 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -23,4 +23,5 @@ valid_vals_RUN_TASK_MAKE_GRID=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "n valid_vals_RUN_TASK_MAKE_OROG=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_RUN_TASK_MAKE_SFC_CLIMO=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_QUILTING=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") +valid_vals_PRINT_ESMF=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_USE_CRON_TO_RELAUNCH=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") From 9e5a77765d6c74697c173a7a74cc4708f47eee37 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 23 Jan 2020 14:10:25 -0700 Subject: [PATCH 196/203] Eliminate WRTCMP_PARAMS_TMPL_FN as a user-configurable experiment variable. Since we only need this in order to set WRTCMP_PARAMS_TMPL_FP (note the FP at the end, not FN), make WRTCMP_PARAMS_FN a local variable (in lowercase) in setup.sh. --- ush/config_defaults.sh | 1 - ush/setup.sh | 70 +++++++++++++++--------------------------- 2 files changed, 24 insertions(+), 47 deletions(-) diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 40ffca5a3..c8296421e 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -162,7 +162,6 @@ NEMS_CONFIG_FN="nems.configure" WFLOW_XML_FN="FV3SAR_wflow.xml" GLOBAL_VAR_DEFNS_FN="var_defns.sh" -WRTCMP_PARAMS_TMPL_FN="" WFLOW_LAUNCH_SCRIPT_FN="launch_FV3SAR_wflow.sh" WFLOW_LAUNCH_LOG_FN="log.launch_FV3SAR_wflow" # diff --git a/ush/setup.sh b/ush/setup.sh index d47a3e561..3df4c4e9d 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -841,32 +841,6 @@ fi # #----------------------------------------------------------------------- # -# If quilting is enabled, set the name of the template file containing -# placeholder values for write-component parameters (if this file name -# is not already set). This file will be appended to the model_confi- -# gure file, and placeholder values will be replaced with actual ones. -# -#----------------------------------------------------------------------- -# -if [ "$QUILTING" = "TRUE" ]; then -# -# First, make sure that WRTCMP_output_grid is set to a valid value. -# - err_msg="\ -The coordinate system used by the write-component output grid specified -in WRTCMP_output_grid is not supported: - WRTCMP_output_grid = \"${WRTCMP_output_grid}\"" - check_var_valid_value \ - "WRTCMP_output_grid" "valid_vals_WRTCMP_output_grid" "${err_msg}" -# -# Now set the name of the write-component template file. -# - WRTCMP_PARAMS_TMPL_FN=${WRTCMP_PARAMS_TMPL_FN:-"wrtcmp_${WRTCMP_output_grid}"} - -fi -# -#----------------------------------------------------------------------- -# # For a "GFDLgrid" type of grid, make sure GFDLgrid_RES is set to a va- # lid value. # @@ -942,9 +916,6 @@ if [ "${RUN_ENVIR}" = "nco" ]; then # # FIXsar="${FIXrrfs}/fix_sar" FIXsar="${FIXrrfs}/fix_sar/${EMC_GRID_NAME}" -echo "AAAAAAAAAAAAAAAAAAAAAAAA" -echo "FIXsar = \"${FIXsar}\"" -echo "BBBBBBBBBBBBBBBBBBBBBBBB" COMROOT="$PTMP/com" else FIXam="${EXPTDIR}/fix_am" @@ -1778,29 +1749,36 @@ fi # #----------------------------------------------------------------------- # -# If the write component is going to be used to write output files (i.e. -# if QUILTING is set to "TRUE"), first make sure that a name is speci- -# fied for the template file containing the write-component output grid -# parameters. (This template file will be concatenated to the NEMS con- -# figuration file specified in MODEL_CONFIG_FN.) If so, set the full -# path to the file and make sure that the file exists. +# Initialize the full path to the template file containing placeholder +# values for the write component parameters. Then, if the write component +# is going to be used to write output files to disk (i.e. if QUILTING is +# set to "TRUE"), set the full path to this file. This file will be +# appended to the NEMS configuration file (MODEL_CONFIG_FN), and placeholder +# values will be replaced with actual ones. # #----------------------------------------------------------------------- # WRTCMP_PARAMS_TMPL_FP="" if [ "$QUILTING" = "TRUE" ]; then - - if [ -z "${WRTCMP_PARAMS_TMPL_FN}" ]; then - print_err_msg_exit "\ -The write-component template file name (WRTCMP_PARAMS_TMPL_FN) must be -set to a non-empty value when quilting (i.e. the write-component) is -enabled: - QUILTING = \"$QUILTING\" - WRTCMP_PARAMS_TMPL_FN = \"${WRTCMP_PARAMS_TMPL_FN}\"" - fi - - WRTCMP_PARAMS_TMPL_FP="${TEMPLATE_DIR}/${WRTCMP_PARAMS_TMPL_FN}" +# +# First, make sure that WRTCMP_output_grid is set to a valid value. +# + err_msg="\ +The coordinate system used by the write-component output grid specified +in WRTCMP_output_grid is not supported: + WRTCMP_output_grid = \"${WRTCMP_output_grid}\"" + check_var_valid_value \ + "WRTCMP_output_grid" "valid_vals_WRTCMP_output_grid" "${err_msg}" +# +# Now set the name of the write-component template file. +# + wrtcmp_params_tmpl_fn=${wrtcmp_params_tmpl_fn:-"wrtcmp_${WRTCMP_output_grid}"} +# +# Finally, set the full path to the write component template file and +# make sure that the file exists. +# + WRTCMP_PARAMS_TMPL_FP="${TEMPLATE_DIR}/${wrtcmp_params_tmpl_fn}" if [ ! -f "${WRTCMP_PARAMS_TMPL_FP}" ]; then print_err_msg_exit "\ The write-component template file does not exist or is not a file: From 69fc8c3d9f96e790b59aa0e54078c81578e64f7c Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Thu, 23 Jan 2020 14:15:04 -0700 Subject: [PATCH 197/203] Update experiment variable descriptions in config_defaults.sh. --- ush/config_defaults.sh | 707 ++++++++++++++++++++++++----------------- 1 file changed, 409 insertions(+), 298 deletions(-) diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index c8296421e..c5d38bfe6 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -1,11 +1,35 @@ -# This file is always sourced by another script (i.e. it's never run in -# its own shell), so there's no need to put the #!/bin/some_shell on the -# first line. +# +#----------------------------------------------------------------------- +# +# This file sets the experiment's configuration variables (which are +# global shell variables) to their default values. For many of these +# variables, the valid values that they may take on are defined in the +# file $USHDIR/valid_param_vals.sh. +# +#----------------------------------------------------------------------- +# # #----------------------------------------------------------------------- # +# Set the RUN_ENVIR variable that is listed and described in the WCOSS +# Implementation Standards document: +# +# NCEP Central Operations +# WCOSS Implementation Standards +# April 17, 2019 +# Version 10.2.0 +# +# RUN_ENVIR is described in this document as follows: # +# Set to "nco" if running in NCO's production environment. Used to +# distinguish between organizations. +# +# Valid values are "nco" and "community". Here, we use it to generate +# and run the experiment either in NCO mode (if RUN_ENVIR is set to "nco") +# or in community mode (if RUN_ENVIR is set to "community"). This has +# implications on the experiment variables that need to be set and the +# the directory structure used. # #----------------------------------------------------------------------- # @@ -17,33 +41,29 @@ RUN_ENVIR="nco" # Set machine and queue parameters. Definitions: # # MACHINE: -# Machine on which the workflow will run. Valid values are "WCOSS_C", -# "WCOSS", "DELL", "THEIA","HERA","JET", "ODIN", and "CHEYENNE". New values -# may be added as the workflow is ported to additional machines. +# Machine on which the workflow will run. # # ACCOUNT: # The account under which to submit jobs to the queue. # # QUEUE_DEFAULT: # The default queue to which workflow tasks are submitted. If a task -# does not have a specific variable in which its queue is defined (e.g. -# QUEUE_HPSS, QUEUE_FCST; see below), it is submitted to this -# queue. If this is not set or set to an empty string, it will be reset -# to a machine-dependent value in the setup script (setup.sh). +# does not have a specific variable that specifies the queue to which it +# will be submitted (e.g. QUEUE_HPSS, QUEUE_FCST; see below), it will be +# submitted to the queue specified by this variable. If this is not set +# or is set to an empty string, it will be (re)set to a machine-dependent +# value. # # QUEUE_HPSS: -# The queue to which the tasks that get or link to external model files -# (needed to generate ICs and LBCs) are submitted. This task either co- -# pies the GFS analysis and forecast files from a system direc- -# tory or fetches them from HPSS. In either case, it places the files -# in a temporary directory. If this is not set or set to an empty -# string, it will be reset to a machine-dependent value in the setup -# script (setup.sh). +# The queue to which the tasks that get or create links to external model +# files [which are needed to generate initial conditions (ICs) and lateral +# boundary conditions (LBCs)] are submitted. If this is not set or is +# set to an empty string, it will be (re)set to a machine-dependent value. # # QUEUE_FCST: -# The queue to which the run_FV3 task is submitted. This task runs -# the forecast. If this is not set or set to an empty string, it will -# be reset to a machine-dependent value in the setup script (setup.sh). +# The queue to which the task that runs a forecast is submitted. If this +# is not set or set to an empty string, it will be (re)set to a machine- +# dependent value. # # mach_doc_end # @@ -57,7 +77,17 @@ QUEUE_FCST="production_queue" # #----------------------------------------------------------------------- # +# Set cron-related parameters. Definitions: +# +# USE_CRON_TO_RELAUNCH: +# Flag that determines whether or not to add a line to the user's cron +# table to call the experiment launch script every CRON_RELAUNCH_INTVL_MNTS +# minutes. # +# CRON_RELAUNCH_INTVL_MNTS: +# The interval (in minutes) between successive calls of the experiment +# launch script by a cron job to (re)launch the experiment (so that the +# workflow for the experiment kicks off where it left off). # #----------------------------------------------------------------------- # @@ -72,21 +102,28 @@ CRON_RELAUNCH_INTVL_MNTS="03" # EXPT_BASEDIR: # The base directory in which the experiment directory will be created. # If this is not specified or if it is set to an empty string, it will -# default to $BASEDIR/expt_dirs. The full path to the experiment di- -# rectory, which we will denote by EXPTDIR, will be set to $EXPT_BASEDIR -# /$EXPT_SUBDIR (also see definition of EXPT_SUBDIR). +# default to ${HOMErrfs}/../expt_dirs. # # EXPT_SUBDIR: -# The name that the experiment directory (without the full path) will -# have. The full path to the experiment directory, which we will denote -# by EXPTDIR, will be set to ${EXPT_BASEDIR}/${EXPT_SUBDIR} (also see -# definition of EXPT_BASEDIR). +# The name that the experiment directory (without the full path) will +# have. The full path to the experiment directory, which will be contained +# in the variable EXPTDIR, will be: +# +# EXPTDIR="${EXPT_BASEDIR}/${EXPT_SUBDIR}" +# +# This cannot be empty. If set to a null string here, it must be set to +# a (non-empty) value in the user-defined experiment configuration file. +# +# NET, envir, RUN, COMINgfs, STMP, PTMP: +# Directories or variables used to create directory names that are needed +# when generating and running an experiment in NCO mode (see the description +# of the RUN_ENVIR variable above). These are defined in the WCOSS +# Implementation Standards document and thus will not be described here. +# # dir_doc_end # #----------------------------------------------------------------------- # -#EXPT_BASEDIR="/path/to/directory/in/which/experiment/subdirs/will/exist" -#EXPT_SUBDIR="my_test" EXPT_BASEDIR="" EXPT_SUBDIR="" @@ -99,23 +136,22 @@ PTMP="/path/to/temporary/directory/ptmp" # #----------------------------------------------------------------------- # -# File names. Definitions: +# Set file names. Definitions: # # RGNL_GRID_NML_FN: -# Name of file containing the namelist settings for the utility that ge- -# nerates a "JPgrid" type of regional grid. +# Name of file containing the namelist settings for the code that generates +# a "JPgrid" type of regional grid. # # FV3_NML_FN: -# Name of file containing the FV3SAR namelist settings. +# Name of file containing the forecast model's namelist settings. # # DIAG_TABLE_FN: -# Name of file that specifies the fields that the FV3SAR will output for -# a forecast that uses GFS physics. The forecast can be either with -# CCPP enabled or disabled. +# Name of file that specifies the fields that the forecast model will +# output. # # FIELD_TABLE_FN: -# Name of file that specifies the traces that the FV3SAR will read in -# from the IC/BC files. +# Name of file that specifies the tracers that the forecast model will +# read in from the IC/LBC files. # # DATA_TABLE_FN: # Name of file that specifies ??? @@ -127,27 +163,25 @@ PTMP="/path/to/temporary/directory/ptmp" # Name of file that specifies ??? # # WFLOW_XML_FN: -# Name of the workflow XML file to be passed to rocoto. +# Name of the rocoto workflow XML file that the experiment generation +# script creates and that defines the workflow for the experiment. # # GLOBAL_VAR_DEFNS_FN: -# Name of file that is sourced by the worflow scripts to set variable -# values. -# -# WRTCMP_PARAMS_TMPL_FN: -# Name of the template file that needs to be appended to the model con- -# figuration file (MODEL_CONFIG_FN) if the write component (QUILTING) is -# going to be used to write output files. This file contains defini- -# tions (either in terms of actual values or placeholders) of the para- -# meters that the write component needs. If the write component is go- -# ing to be used, this file is first appended to MODEL_CONFIG_FN, and -# any placeholder values in the variable definitions in the new -# MODEL_CONFIG_FN file are subsequently replaced by actual values. If a -# predefined domain is being used (see PREDEF_GRID_NAME below), -# WRTCMP_PARAMS_TEMPLATE_FN may be set to an empty string. In this -# case, it will be reset to the name of the existing template file for -# that predefined domain. It is assumed that the file specified by -# WRTCMP_PARAMS_TEMPLATE_FN is located in the templates directory -# TEMPLATE_DIR, which is in turn defined in the setup script. +# Name of file containing the defintions of the primary experiment variables +# (parameters) defined in this default configuration script and in the +# user-specified configuration as well as secondary experiment variables +# generated by the experiment generation script. This file is sourced +# by many scripts (e.g. the J-job scripts corresponding to each workflow +# task) in order to make all the experiment variables available in those +# scripts. +# +# WFLOW_LAUNCH_SCRIPT_FN: +# Name of the script that can be used to (re)launch the experiment's rocoto +# workflow. +# +# WFLOW_LAUNCH_LOG_FN: +# Name of the log file that contains the output from successive calls to +# the workflow launch script (WFLOW_LAUNCH_SCRIPT_FN). # #----------------------------------------------------------------------- # @@ -171,13 +205,11 @@ WFLOW_LAUNCH_LOG_FN="log.launch_FV3SAR_wflow" # # DATE_FIRST_CYCL: # Starting date of the first forecast in the set of forecasts to run. -# Format is "YYYYMMDD". Note that this does not include the hour-of- -# day. +# Format is "YYYYMMDD". Note that this does not include the hour-of-day. # # DATE_LAST_CYCL: # Starting date of the last forecast in the set of forecasts to run. -# Format is "YYYYMMDD". Note that this does not include the hour-of- -# day. +# Format is "YYYYMMDD". Note that this does not include the hour-of-day. # # CYCL_HRS: # An array containing the hours of the day at which to launch forecasts. @@ -198,112 +230,97 @@ FCST_LEN_HRS="24" # #----------------------------------------------------------------------- # -# Set initial and lateral boundary condition generation parameters. De- -# finitions: +# Set initial and lateral boundary condition generation parameters. +# Definitions: # -# EXTRN_MDL_NAME_ICS +# EXTRN_MDL_NAME_ICS: #`The name of the external model that will provide fields from which -# initial condition (IC) and surface files will be generated for input -# into the FV3SAR. +# initial condition (including and surface) files will be generated for +# input into the forecast model. # -# EXTRN_MDL_NAME_LBCS +# EXTRN_MDL_NAME_LBCS: #`The name of the external model that will provide fields from which -# lateral boundary condition (LBC) files will be generated for input in- -# to the FV3SAR. +# lateral boundary condition (LBC) files will be generated for input into +# the forecast model. # # LBC_UPDATE_INTVL_HRS: -# The frequency (in integer hours) with which lateral boundary data will -# be provided to the FV3SAR model. We will refer to this as the bound- -# ary update interval. If the boundary data is obtained from GFS fore- -# cast files in nemsio format stored in HPSS (mass store), then LBC_UP- -# DATE_INTVL_HRS must be greater than or equal to 6 because these fore- -# cast files are available only every 6 hours. -# -# EXTRN_MDL_INFO_FN: -# Name of sourceable file (not including the full path) defining the va- -# riables specified in EXTRN_MDL_INFO_VAR_NAMES (see below). -# -# EXTRN_MDL_INFO_VAR_NAMES: -# Names to use for the following parameters (for a given cycle of the -# FV3SAR): -# * The date and hour-of-day (in YYYYMMDDHH format) of the start time of -# the external model. -# * Array containing the forecast hours (relative to the -# * Array containing the names of the external model output files. -# * The system directory in which the external model output files may be -# found (if the cycle start time is not too old). -# * The format of the archive file (e.g. "tar", "zip", etc) on HPSS that -# may contain the external model output files. Note that this archive -# file will exist only if the cycle start time is old enough. -# * The name of the archive file on HPSS that may contain the external -# model output files. -# * The full path to the archive file on HPSS that may contain the ex- -# ternal model output files. -# * The directory "within" the archive file in which the external model -# output files are stored. +# The interval (in integer hours) with which LBC files will be generated. +# We will refer to this as the boundary update interval. Note that the +# model specified in EXTRN_MDL_NAME_LBCS must have data available at a +# frequency greater than or equal to that implied by LBC_UPDATE_INTVL_HRS. +# For example, if LBC_UPDATE_INTVL_HRS is set to 6, then the model must +# have data availble at least every 6 hours. It is up to the user to +# ensure that this is the case. +# +# FV3GFS_FILE_FMT_ICS: +# If using the FV3GFS model as the source of the ICs (i.e. if EXTRN_MDL_NAME_ICS +# is set to "FV3GFS"), this variable specifies the format of the model +# files to use when generating the ICs. +# +# FV3GFS_FILE_FMT_LBCS: +# If using the FV3GFS model as the source of the LBCs (i.e. if +# EXTRN_MDL_NAME_LBCS is set to "FV3GFS"), this variable specifies the +# format of the model files to use when generating the LBCs. # #----------------------------------------------------------------------- # EXTRN_MDL_NAME_ICS="FV3GFS" EXTRN_MDL_NAME_LBCS="FV3GFS" +LBC_UPDATE_INTVL_HRS="6" FV3GFS_FILE_FMT_ICS="nemsio" FV3GFS_FILE_FMT_LBCS="nemsio" -LBC_UPDATE_INTVL_HRS="6" # #----------------------------------------------------------------------- # -# Flag controlling whether or not a CCPP-enabled version of the FV3SAR -# will be run. This must be set to "TRUE" or "FALSE". Setting this -# flag to "TRUE" will cause the workflow to stage the appropriate CCPP- -# enabled versions of the FV3SAR executable and various input files -# (e.g. the FV3SAR namelist file, the diagnostics table file, the field -# table file, etc) that have settings that correspond to EMC's CCPP-ena- -# bled FV3SAR regression test. It will also cause additional files -# (i.e. in addition to the ones for the non-CCPP enabled version of the -# FV3SAR) to be staged in the experiment directory (e.g. module setup -# scripts, module load files). +# Set CCPP related parameters. Definitions: # -#----------------------------------------------------------------------- +# USE_CCPP: +# Flag controlling whether or not a CCPP-enabled version of the forecast +# model will be run. Note that the user is responsible for ensuring that +# a CCPP-enabled forecast model executable is built and placed at the +# correct location (that is part of the build process). # -USE_CCPP="FALSE" +# CCPP_PHYS_SUITE: +# If USE_CCPP has been set to "TRUE", this variable defines the physics +# suite that will run using CCPP. The choice of physics suite determines +# the forecast model's namelist file, the diagnostics table file, the +# field table file, and the XML physics suite definition file that are +# staged in the experiment directory or the cycle directories under it. +# If USE_CCPP is set to "FALSE", the only physics suite that can be run +# is the GFS. # -#----------------------------------------------------------------------- -# -# If CCPP has been set to "TRUE", the CCPP_PHYS_SUITE variable defines -# the physics suite that will run using CCPP. This affects the FV3SAR -# namelist file, the diagnostics table file, the field table file, and -# the XML physics suite definition file that are staged in the experi- -# ment directory and/or the run directories under it. -# -# Note that with CCPP set to "FALSE", the only physics suite that can be -# run is the GFS. -# -# IMPORTANT NOTE: -# It is up to the user to ensure that the CCPP FV3 executable is com- -# piled with either the dynamic build or the static build with the cor- -# rect physics package. If using a static build, the run will fail if -# there is a mismatch between the physics package specified in this con- -# figuration file and the physics package used for the static build. +# Note that it is up to the user to ensure that the CCPP-enabled forecast +# model executable is built with either the dynamic build (which can +# handle any CCPP physics package but is slower to run) or the static +# build with the correct physics package. If using a static build, the +# forecast will fail if the physics package specified in the experiment's +# variable defintions file (GLOBAL_VAR_DEFNS_FN) is not the same as the +# one that was used for the static build. # #----------------------------------------------------------------------- # -#CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" +USE_CCPP="FALSE" CCPP_PHYS_SUITE="FV3_GSD_v0" -#CCPP_PHYS_SUITE="FV3_GSD_SAR" # #----------------------------------------------------------------------- # -# Set GRID_GEN_METHOD. This variable specifies the method to use to ge- -# nerate a regional grid in the horizontal. The values that grid_gen_- -# method can take on are: +# Set GRID_GEN_METHOD. This variable specifies the method to use to +# generate a regional grid in the horizontal, or, if using pregenerated +# grid files instead of running the grid generation task, the grid generation +# method that was used to generate those files. The values that +# GRID_GEN_METHOD can take on are: # # * "GFDLgrid": -# This will generate a regional grid by first generating a parent glo- -# bal cubed-sphere grid using GFDL's grid generator. +# This setting will generate a regional grid by first generating a +# "parent" global cubed-sphere grid and then taking a portion of tile +# 6 of that global grid -- referred to in the grid generation scripts +# as "tile 7" even though it doesn't correspond to a complete tile -- +# and using it as the regional grid. Note that the forecast is run on +# only on the regional grid (i.e. tile 7, not tiles 1 through 6). # # * "JPgrid": -# This will generate a regional grid using the map projection deve- -# loped by Jim Purser of EMC. +# This will generate a regional grid using the map projection developed +# by Jim Purser of EMC. # #----------------------------------------------------------------------- # @@ -311,75 +328,81 @@ GRID_GEN_METHOD="JPgrid" # #----------------------------------------------------------------------- # -# Set parameters specific to the method for generating a regional grid -# WITH a global parent (i.e. for GRID_GEN_METHOD set to "GFDLgrid"). -# Note that for this method: +# Set parameters specific to the "GFDLgrid" method of generating a regional +# grid (i.e. for GRID_GEN_METHOD set to "GFDLgrid"). The following +# parameters will be used only if GRID_GEN_METHOD is set to "GFDLgrid". +# In this grid generation method: # -# * The regional grid is defined with respect to a global cubed-sphere -# grid. Thus, the parameters for a global cubed-sphere grid must be -# specified even though the model equations are not integrated on this -# global grid (they are integrated only on the regional grid). +# * The regional grid is defined with respect to a "parent" global cubed- +# sphere grid. Thus, all the parameters for a global cubed-sphere grid +# must be specified in order to define this parent global grid even +# though the model equations are not integrated on (they are integrated +# only on the regional grid). # -# * RES is the number of grid cells in either one of the two horizontal -# directions x and y on any one of the 6 tiles of the global cubed- -# sphere grid. RES must be one of "48", "96", "192", "384", "768", -# "1152", and "3072". The mapping from RES to nominal resolution -# (cell size) for a uniform global grid (i.e. Schmidt stretch factor -# GFDLgrid_STRETCH_FAC set to 1) is as follows: +# * GFDLgrid_RES is the number of grid cells in either one of the two +# horizontal directions x and y on any one of the 6 tiles of the parent +# global cubed-sphere grid. The mapping from GFDLgrid_RES to a nominal +# resolution (grid cell size) for a uniform global grid (i.e. Schmidt +# stretch factor GFDLgrid_STRETCH_FAC set to 1) for several values of +# GFDLgrid_RES is as follows: # -# C192 --> 50km -# C384 --> 25km -# C768 --> 13km -# C1152 --> 8.5km -# C3072 --> 3.2km +# GFDLgrid_RES typical cell size +# ------------ ----------------- +# 192 50 km +# 384 25 km +# 768 13 km +# 1152 8.5 km +# 3072 3.2 km # -# Note that these are nominal resolutions. The actual cell size on +# Note that these are only typical cell sizes. The actual cell size on # the global grid tiles varies somewhat as we move across a tile. # # * Tile 6 has arbitrarily been chosen as the tile to use to orient the -# global grid on the sphere (Earth). This is done by specifying GFDL- -# grid_LON_T6_CTR and GFDLgrid_LAT_T6_CTR, which are the longitude and -# latitude (in degrees) of the center of tile 6. +# global parent grid on the sphere (Earth). This is done by specifying +# GFDLgrid_LON_T6_CTR and GFDLgrid_LAT_T6_CTR, which are the longitude +# and latitude (in degrees) of the center of tile 6. # -# * Setting the Schmidt stretching factor GFDLgrid_STRETCH_FAC to a value greater -# than 1 shrinks tile 6, while setting it to a value less than 1 (but -# still greater than 0) expands tile 6. The remaining 5 tiles change +# * Setting the Schmidt stretching factor GFDLgrid_STRETCH_FAC to a value +# greater than 1 shrinks tile 6, while setting it to a value less than +# 1 (but still greater than 0) expands it. The remaining 5 tiles change # shape as necessary to maintain global coverage of the grid. # -# * The cell size on a given global tile depends on both RES and -# GFDLgrid_STRETCH_FAC (since changing RES changes the number of cells in the -# tile, and changing GFDLgrid_STRETCH_FAC modifies the shape and size of the -# tile). +# * The cell size on a given global tile depends on both GFDLgrid_RES and +# GFDLgrid_STRETCH_FAC (since changing GFDLgrid_RES changes the number +# of cells in the tile, and changing GFDLgrid_STRETCH_FAC modifies the +# shape and size of the tile). # # * The regional grid is embedded within tile 6 (i.e. it doesn't extend # beyond the boundary of tile 6). Its exact location within tile 6 is -# is determined by the starting and ending i and j indices +# is determined by specifying the starting and ending i and j indices +# of the regional grid on tile 6, where i is the grid index in the x +# direction and j is the grid index in the y direction. These indices +# are stored in the variables # # GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G # GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G # GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G # GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G # -# where i is the grid index in the x direction and j is the grid index -# in the y direction. -# -# * In the FV3SAR code, for convenience the regional grid is denoted as -# "tile 7" even though it doesn't map back to one of the 6 faces of -# the cube from which the global grid is generated (it maps back to -# only a subregion on face 6 since it is wholly confined within tile -# 6). Tile 6 is often referred to as the "parent" tile of the region- -# al grid. -# -# * GFDLgrid_REFINE_RATIO is the refinement ratio of the regional grid (tile 7) -# with respect to the grid on its parent tile (tile 6), i.e. it is the -# number of grid cells along the boundary of the regional grid that -# abut one cell on tile 6. Thus, the cell size on the regional grid -# depends not only on RES and GFDLgrid_STRETCH_FAC (because the cell size on -# tile 6 depends on these two parameters) but also on GFDLgrid_REFINE_RATIO. -# Note that as on the tiles of the global grid, the cell size on the -# regional grid is not uniform but varies as we move across the grid. -# -# Definitions: +# * In the forecast model code and in the experiment generation and workflow +# scripts, for convenience the regional grid is denoted as "tile 7" even +# though it doesn't map back to one of the 6 faces of the cube from +# which the parent global grid is generated (it maps back to only a +# subregion on face 6 since it is wholly confined within tile 6). Tile +# 6 may be referred to as the "parent" tile of the regional grid. +# +# * GFDLgrid_REFINE_RATIO is the refinement ratio of the regional grid +# (tile 7) with respect to the grid on its parent tile (tile 6), i.e. +# it is the number of grid cells along the boundary of the regional grid +# that abut one cell on tile 6. Thus, the cell size on the regional +# grid depends not only on GFDLgrid_RES and GFDLgrid_STRETCH_FAC (because +# the cell size on tile 6 depends on these two parameters) but also on +# GFDLgrid_REFINE_RATIO. Note that as on the tiles of the global grid, +# the cell size on the regional grid is not uniform but varies as we +# move across the grid. +# +# Definitions of parameters that need to be specified when GRID_GEN_METHOD +# is set to "GFDLgrid": # # GFDLgrid_LON_T6_CTR: # Longitude of the center of tile 6 (in degrees). @@ -388,13 +411,17 @@ GRID_GEN_METHOD="JPgrid" # Latitude of the center of tile 6 (in degrees). # # GFDLgrid_RES: -# Number of points in each of the two horizontal directions (x and y) -# on each tile of the global grid. Must be "48", "96", "192", "384", -# "768", "1152", or "3072" +# Number of points in each of the two horizontal directions (x and y) on +# each tile of the parent global grid. Note that the name of this parameter +# is really a misnomer because although it has the stirng "RES" (for +# "resolution") in its name, it specifies number of grid cells, not grid +# size (in say meters or kilometers). However, we keep this name in order +# to remain consistent with the usage of the word "resolution" in the +# global forecast model and other auxiliary codes. # # GFDLgrid_STRETCH_FAC: # Stretching factor used in the Schmidt transformation applied to the -# cubed sphere grid. +# parent cubed-sphere grid. # # GFDLgrid_REFINE_RATIO: # Cell refinement ratio for the regional grid, i.e. the number of cells @@ -413,6 +440,30 @@ GRID_GEN_METHOD="JPgrid" # GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G: # j-index on tile 6 at which the regional grid (tile 7) ends. # +# GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES: +# Flag that determines the file naming convention to use for grid, orography, +# and surface climatology files (or, if using pregenerated files, the +# naming convention that was used to name these files). These files +# usually start with the string "C${RES}_", where RES is an integer. +# In the global forecast model, RES is the number of points in each of +# the two horizontal directions (x and y) on each tile of the global grid +# (defined here as GFDLgrid_RES). If this flag is set to "TRUE", RES will +# be set to GFDLgrid_RES just as in the global forecast model. If it is +# set to "FALSE", we calculate (in the grid generation task) an "equivalent +# global uniform cubed-sphere resolution" -- call it RES_EQUIV -- and +# then set RES equal to it. RES_EQUIV is the number of grid points in +# each of the x and y directions on each tile that a global UNIFORM (i.e. +# stretch factor of 1) cubed-sphere grid would have to have in order to +# have the same average grid size as the regional grid. This is a more +# useful indicator of the grid size because it takes into account the +# effects of GFDLgrid_RES, GFDLgrid_STRETCH_FAC, and GFDLgrid_REFINE_RATIO +# in determining the regional grid's typical grid size, whereas simply +# setting RES to GFDLgrid_RES doesn't take into account the effects of +# GFDLgrid_STRETCH_FAC and GFDLgrid_REFINE_RATIO on the regional grid's +# resolution. Nevertheless, some users still prefer to use GFDLgrid_RES +# in the file names, so we allow for that here by setting this flag to +# "TRUE". +# #----------------------------------------------------------------------- # GFDLgrid_LON_T6_CTR=-97.5 @@ -428,9 +479,8 @@ GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="TRUE" # #----------------------------------------------------------------------- # -# Set parameters specific to the method for generating a regional grid -# without a global parent (i.e. for GRID_GEN_METHOD set to "JPgrid"). -# These are: +# Set parameters specific to the "JPgrid" method of generating a regional +# grid (i.e. for GRID_GEN_METHOD set to "JPgrid"). Definitions: # # JPgrid_LON_CTR: # The longitude of the center of the grid (in degrees). @@ -442,8 +492,8 @@ GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="TRUE" # The cell size in the zonal direction of the regional grid (in meters). # # JPgrid_DELY: -# The cell size in the meridional direction of the regional grid (in me- -# ters). +# The cell size in the meridional direction of the regional grid (in +# meters). # # JPgrid_NX: # The number of cells in the zonal direction on the regional grid. @@ -452,113 +502,65 @@ GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="TRUE" # The number of cells in the meridional direction on the regional grid. # # JPgrid_WIDE_HALO_WIDTH: -# The width of the wide halo (in units of number of cells) to create -# around the regional grid. A grid with a halo of this width will first -# be created and stored in a grid specification file. This grid will -# then be shaved down to obtain grids with 3-cell-wide and 4-cell-wide -# halos. +# The width (in units of number of grid cells) of the halo to add around +# the regional grid before shaving the halo down to the width(s) expected +# by the forecast model. +# +# In order to generate grid files containing halos that are 3-cell and +# 4-cell wide and orography files with halos that are 0-cell and 3-cell +# wide (all of which are required as inputs to the forecast model), the +# grid and orography tasks first create files with halos around the regional +# domain of width JPgrid_WIDE_HALO_WIDTH cells. These are first stored +# in files. The files are then read in and "shaved" down to obtain grid +# files with 3-cell-wide and 4-cell-wide halos and orography files with +# 0-cell-wide (i.e. no halo) and 3-cell-wide halos. For this reason, we +# refer to the original halo that then gets shaved down as the "wide" +# halo, i.e. because it is wider than the 0-cell-wide, 3-cell-wide, and +# 4-cell-wide halos that we will eventually end up with. Note that the +# grid and orography files with the wide halo are only needed as intermediates +# in generating the files with 0-cell-, 3-cell-, and 4-cell-wide halos; +# they are not needed by the forecast model. Usually, there is no reason +# to change this parameter from its default value set here. +# +# NOTE: Probably don't need to make this a user-specified variable. +# Just set it in the function set_gridparams_JPgrid.sh. # # JPgrid_ALPHA_PARAM: -# The alpha parameter used in the Jim Purser map projection/grid gene- -# ration method. +# The alpha parameter used in the Jim Purser map projection/grid generation +# method. # # JPgrid_KAPPA_PARAM: -# The kappa parameter used in the Jim Purser map projection/grid gene- -# ration method. +# The kappa parameter used in the Jim Purser map projection/grid generation +# method. # #----------------------------------------------------------------------- # -JPgrid_LON_CTR=-97.5 -JPgrid_LAT_CTR=35.5 +JPgrid_LON_CTR="-97.5" +JPgrid_LAT_CTR="35.5" JPgrid_DELX="3000.0" JPgrid_DELY="3000.0" -JPgrid_NX=1000 -JPgrid_NY=1000 -JPgrid_WIDE_HALO_WIDTH=6 +JPgrid_NX="1000" +JPgrid_NY="1000" +JPgrid_WIDE_HALO_WIDTH="6" JPgrid_ALPHA_PARAM="0.21423" JPgrid_KAPPA_PARAM="-0.23209" # #----------------------------------------------------------------------- # -# Set PREDEF_GRID_NAME. This variable specifies a predefined (regional) -# domain, as follows: -# -# * If PREDEF_GRID_NAME is set to an empty string, the grid configuration -# parameters set below are used to generate a grid. -# -# * If PREDEF_GRID_NAME is set to a valid non-empty string, the grid confi- -# guration parameters set below are overwritten by predefined values -# in order to generate a predefined grid. Valid non-empty values for -# PREDEF_GRID_NAME currently consist of: -# -# "RAP" -# "HRRR" -# "EMCCONUS" -# -# These result in regional grids that cover (as closely as possible) -# the domains used in the WRF/ARW-based RAP and HRRR models, respec- -# tively. -# -#----------------------------------------------------------------------- -# -PREDEF_GRID_NAME="" -EMC_GRID_NAME="" -# -#----------------------------------------------------------------------- -# -# Set the model integraton time step DT_ATMOS. This is the time step -# for the largest atmosphere model loop. It corresponds to the frequen- -# cy with which the top level routine in the dynamics is called as well -# as the frequency with which the physics is called. -# -#----------------------------------------------------------------------- -# -DT_ATMOS=18 #Preliminary values: 18 for 3-km runs, 90 for 13-km runs -# -#----------------------------------------------------------------------- -# -# Set PREEXISTING_DIR_METHOD. This variable determines the strategy to -# use to deal with preexisting experiment and/or work directories (e.g -# ones generated by previous experiments). This variable must be set to -# one of "delete", "rename", and "quit". The resulting behavior for -# each of these values is as follows: -# -# * "delete": -# The preexisting directory is deleted and a new directory (having the -# same name as the original preexisting directory) is created. -# -# * "rename": -# The preexisting directory is renamed and a new directory (having the -# same name as the original preexisting directory) is created. The -# new name of the preexisting directory consists of its original name -# and the suffix "_oldNNN", where NNN is a 3-digit integer chosen to -# make the new name unique. -# -# * "quit": -# The preexisting directory is left unchanged, but execution of the -# currently running script is terminated. In this case, the preexist- -# ing directory must be dealt with manually before rerunning the -# script. -# -#----------------------------------------------------------------------- -# -PREEXISTING_DIR_METHOD="delete" -#PREEXISTING_DIR_METHOD="rename" -#PREEXISTING_DIR_METHOD="quit" -# -#----------------------------------------------------------------------- -# -# Set the flag that determines whether or not the workflow scripts tend -# to be more verbose. This must be set to "TRUE" or "FALSE". +# Set DT_ATMOS. This is the main forecast model integraton time step. +# As described in the forecast model documentation, "It corresponds to +# the frequency with which the top level routine in the dynamics is called +# as well as the frequency with which the physics is called." # #----------------------------------------------------------------------- # -VERBOSE="TRUE" -#VERBOSE="FALSE" +DT_ATMOS="18" # #----------------------------------------------------------------------- # -# Set the number of MPI tasks to use in the x and y directions. +# Set LAYOUT_X and LAYOUT_Y. These are the number of MPI tasks (processes) +# to use in the two horizontal directions (x and y) of the regional grid +# when running the forecast model. # #----------------------------------------------------------------------- # @@ -567,10 +569,15 @@ LAYOUT_Y="20" # #----------------------------------------------------------------------- # -# Set the BLOCKSIZE to use. This is the amount of data that is passed -# into the cache at a time. The number of vertical columns per MPI task -# needs to be divisible by BLOCKSIZE; otherwise, unexpected results may -# occur. +# Set BLOCKSIZE. This is the amount of data that is passed into the cache +# at a time. The number of vertical columns per MPI task needs to be +# divisible by BLOCKSIZE; otherwise, unexpected results may occur. +# +# GSK: IMPORTANT NOTE: +# I think Dom fixed the code so that the number of columns per MPI task +# no longer needs to be divisible by BLOCKSIZE. If so, remove the check +# on blocksize in the experiment generation scripts. Note that BLOCKSIZE +# still needs to be set to a value (probably machine-dependent). # #----------------------------------------------------------------------- # @@ -581,7 +588,8 @@ BLOCKSIZE="24" # Set write-component (quilting) parameters. Definitions: # # QUILTING: -# Flag for whether or not to use the write component for output. +# Flag that determines whether or not to use the write component for +# writing output files to disk. # # WRTCMP_write_groups: # The number of write groups (i.e. groups of MPI tasks) to use in the @@ -594,7 +602,7 @@ BLOCKSIZE="24" # Flag for whether or not to output extra (debugging) information from # ESMF routines. Must be ".true." or ".false.". Note that the write # component uses ESMF library routines to interpolate from the native -# FV3SAR grid to the user-specified output grid (which is defined in the +# forecast model grid to the user-specified output grid (which is defined in the # model configuration file MODEL_CONFIG_FN in the forecast's run direc- # tory). # @@ -629,34 +637,137 @@ WRTCMP_nx="" WRTCMP_ny="" WRTCMP_dx="" WRTCMP_dy="" - - # #----------------------------------------------------------------------- # +# Set PREDEF_GRID_NAME. This parameter specifies a predefined regional +# grid, as follows: # +# * If PREDEF_GRID_NAME is set to an empty string, the grid parameters, +# time step (DT_ATMOS), computational parameters (e.g. LAYOUT_X, LAYOUT_Y), +# and write component parameters set above (and possibly overwritten by +# values in the user-specified configuration file) are used. +# +# * If PREDEF_GRID_NAME is set to a valid grid name, the grid parameters, +# time step (DT_ATMOS), computational parameters (e.g. LAYOUT_X, LAYOUT_Y), +# and write component parameters set above (and possibly overwritten by +# values in the user-specified configuration file) are overwritten by +# predefined values for the specified grid. +# +# This is simply a convenient way to quickly specify a set of parameters +# that depend on the grid. # #----------------------------------------------------------------------- # -RUN_TASK_MAKE_GRID="TRUE" -GRID_DIR="/path/to/pregenerated/grid/files" - -RUN_TASK_MAKE_OROG="TRUE" -OROG_DIR="/path/to/pregenerated/orog/files" +PREDEF_GRID_NAME="" +# +#----------------------------------------------------------------------- +# +# Set EMC_GRID_NAME. This is a convenience parameter to allow EMC to use +# its original grid names. It is simply used to determine a value for +# PREDEF_GRID_NAME. Once EMC starts using PREDEF_GRID_NAME, this variable +# can be eliminated. +# +#----------------------------------------------------------------------- +# +EMC_GRID_NAME="" +# +#----------------------------------------------------------------------- +# +# Set PREEXISTING_DIR_METHOD. This variable determines the method to use +# use to deal with preexisting directories [e.g ones generated by previous +# calls to the experiment generation script using the same experiment name +# (EXPT_SUBDIR) as the current experiment]. This variable must be set to +# one of "delete", "rename", and "quit". The resulting behavior for each +# of these values is as follows: +# +# * "delete": +# The preexisting directory is deleted and a new directory (having the +# same name as the original preexisting directory) is created. +# +# * "rename": +# The preexisting directory is renamed and a new directory (having the +# same name as the original preexisting directory) is created. The new +# name of the preexisting directory consists of its original name and +# the suffix "_oldNNN", where NNN is a 3-digit integer chosen to make +# the new name unique. +# +# * "quit": +# The preexisting directory is left unchanged, but execution of the +# currently running script is terminated. In this case, the preexisting +# directory must be dealt with manually before rerunning the script. +# +#----------------------------------------------------------------------- +# +PREEXISTING_DIR_METHOD="delete" # #----------------------------------------------------------------------- # +# Set VERBOSE. This is a flag that determines whether or not the experiment +# generation and workflow task scripts tend to be print out more informational +# messages. +# +#----------------------------------------------------------------------- # +VERBOSE="TRUE" # #----------------------------------------------------------------------- # +# Set flags (and related directories) that determine whether the grid, +# orography, and/or surface climatology file generation tasks should be +# run. Note that these are all cycle-independent tasks, i.e. if they are +# to be run, they do so only once at the beginning of the workflow before +# any cycles are run. Definitions: +# +# RUN_TASK_MAKE_GRID: +# Flag that determines whether the grid file generation task is to be run. +# If this is set to "TRUE", the grid generation task is run and new grid +# files are generated. If it is set to "FALSE", then the scripts look +# for pregenerated grid files in the directory specified by GRID_DIR (see +# below). +# +# GRID_DIR: +# The directory in which to look for pregenerated grid files if +# RUN_TASK_MAKE_GRID is set to "FALSE". +# +# RUN_TASK_MAKE_OROG: +# Same as RUN_TASK_MAKE_GRID but for the orography generation task. +# +# OROG_DIR: +# Same as GRID_DIR but for the orogrpahy generation task. +# +# RUN_TASK_MAKE_SFC_CLIMO: +# Same as RUN_TASK_MAKE_GRID but for the surface climatology generation +# task. +# +# SFC_CLIMO_DIR: +# Same as GRID_DIR but for the surface climatology generation task. +# +#----------------------------------------------------------------------- +# +RUN_TASK_MAKE_GRID="TRUE" +GRID_DIR="/path/to/pregenerated/grid/files" + +RUN_TASK_MAKE_OROG="TRUE" +OROG_DIR="/path/to/pregenerated/orog/files" + RUN_TASK_MAKE_SFC_CLIMO="TRUE" SFC_CLIMO_DIR="/path/to/pregenerated/surface/climo/files" - # #----------------------------------------------------------------------- # -# +# Set the arrays that specify the file names in the system and experiment's +# FIXam directories. Definitions: +# +# FIXgsm_FILENAMES: +# This array contains the names of the fixed files in the system's FIXgsm +# directory that the experiment generation script will either copy or +# create links to. +# +# FIXam_FILENAMES: +# This array contains the names of the files in the local FIXam directory +# that are either copies of or symlinks to the files listed in the +# FIXgsm_FILENAMES array in the FIXgsm directory. # #----------------------------------------------------------------------- # From c3f8d40d3ec8d52482158f7451a821926061960f Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 24 Jan 2020 09:06:20 -0700 Subject: [PATCH 198/203] Change hashes to latest for which most end-to-end tests pass. --- Externals.cfg | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index 5efce4a7f..7f92ef5b9 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -3,7 +3,7 @@ protocol = git repo_url = https://github.com/NCAR/UFS_UTILS # Specify either a branch name or a hash but not both. #branch = dtc/develop -hash = 0244c839 +hash = 22d9e6ad local_path = sorc/UFS_UTILS_develop required = True @@ -12,7 +12,7 @@ protocol = git repo_url = https://github.com/NCAR/UFS_UTILS # Specify either a branch name or a hash but not both. #branch = feature/chgres_grib2 -hash = 9dca4141 +hash = b47bc84c local_path = sorc/UFS_UTILS_chgres_grib2 required = True @@ -30,7 +30,7 @@ protocol = git repo_url = https://github.com/NOAA-EMC/EMC_post # Specify either a branch name or a hash but not both. #branch = develop -hash = 3b42c2a +hash = 78078f62 local_path = sorc/EMC_post required = True From 2c8d398555c20a10b5574e481426416745d1340f Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Fri, 24 Jan 2020 09:08:00 -0700 Subject: [PATCH 199/203] Add three more end-to-end baseline tests: (1) 006 is to test NOT running the grid, orography, and surface climatology tasks; (2) 007 is to test using a GFDLgrid without specifying a predefined grid (i.e. user specifies all the grid parameters) and using the global model's file-naming convention (where the resolution in the file names is the number of grid points on any one of the tiles 1 through 6); and (3) 008 is the same as 007 except using the file naming convention in which the "equivalent global UNIFORM cubed-sphere grid" resolution is used. --- tests/baseline_configs/config.regional_006.sh | 62 +++++++++++ tests/baseline_configs/config.regional_007.sh | 103 ++++++++++++++++++ tests/baseline_configs/config.regional_008.sh | 103 ++++++++++++++++++ 3 files changed, 268 insertions(+) create mode 100644 tests/baseline_configs/config.regional_006.sh create mode 100644 tests/baseline_configs/config.regional_007.sh create mode 100644 tests/baseline_configs/config.regional_008.sh diff --git a/tests/baseline_configs/config.regional_006.sh b/tests/baseline_configs/config.regional_006.sh new file mode 100644 index 000000000..757c94cad --- /dev/null +++ b/tests/baseline_configs/config.regional_006.sh @@ -0,0 +1,62 @@ +# +# MACHINE will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +MACHINE="" +# +# ACCOUNT will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +ACCOUNT="" +# +# EXPT_SUBDIR will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to a value obtained from the name of this file. +# +EXPT_SUBDIR="" +# +# USE_CRON_TO_RELAUNCH may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +USE_CRON_TO_RELAUNCH="TRUE" +# +# CRON_RELAUNCH_INTVL_MNTS may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +CRON_RELAUNCH_INTVL_MNTS="02" + + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +PREDEF_GRID_NAME="GSD_HRRR25km" +GRID_GEN_METHOD="JPgrid" +QUILTING="TRUE" +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190701" +DATE_LAST_CYCL="20190701" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="FV3GFS" +EXTRN_MDL_NAME_LBCS="FV3GFS" + +RUN_TASK_MAKE_GRID="FALSE" +GRID_DIR="/scratch2/BMC/det/FV3SAR_pregen/grid/GSD_HRRR25km" + +RUN_TASK_MAKE_OROG="FALSE" +OROG_DIR="/scratch2/BMC/det/FV3SAR_pregen/orog/GSD_HRRR25km" + +RUN_TASK_MAKE_SFC_CLIMO="FALSE" +SFC_CLIMO_DIR="/scratch2/BMC/det/FV3SAR_pregen/sfc_climo/GSD_HRRR25km" + diff --git a/tests/baseline_configs/config.regional_007.sh b/tests/baseline_configs/config.regional_007.sh new file mode 100644 index 000000000..be1698c7e --- /dev/null +++ b/tests/baseline_configs/config.regional_007.sh @@ -0,0 +1,103 @@ +# +# MACHINE will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +MACHINE="" +# +# ACCOUNT will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +ACCOUNT="" +# +# EXPT_SUBDIR will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to a value obtained from the name of this file. +# +EXPT_SUBDIR="" +# +# USE_CRON_TO_RELAUNCH may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +USE_CRON_TO_RELAUNCH="TRUE" +# +# CRON_RELAUNCH_INTVL_MNTS may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +CRON_RELAUNCH_INTVL_MNTS="02" + + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +GRID_GEN_METHOD="GFDLgrid" + +GFDLgrid_LON_T6_CTR=-97.5 +GFDLgrid_LAT_T6_CTR=38.5 +GFDLgrid_STRETCH_FAC=1.5 +GFDLgrid_RES="96" +GFDLgrid_REFINE_RATIO=2 + +#num_margin_cells_T6_left=9 +#GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) +GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G="10" + +#num_margin_cells_T6_right=9 +#GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_right )) +GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G="87" + +#num_margin_cells_T6_bottom=9 +#GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) +GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G="10" + +#num_margin_cells_T6_top=9 +#GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) +GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G="87" + +GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="TRUE" + +DT_ATMOS="100" + +LAYOUT_X="6" +LAYOUT_Y="6" +BLOCKSIZE="26" + +QUILTING="TRUE" + +if [ "$QUILTING" = "TRUE" ]; then + WRTCMP_write_groups="1" + WRTCMP_write_tasks_per_group=$(( 1*LAYOUT_Y )) + WRTCMP_output_grid="rotated_latlon" + WRTCMP_cen_lon="${GFDLgrid_LON_T6_CTR}" + WRTCMP_cen_lat="${GFDLgrid_LAT_T6_CTR}" +# The following have not been tested... + WRTCMP_lon_lwr_left="-25.0" + WRTCMP_lat_lwr_left="-15.0" + WRTCMP_lon_upr_rght="25.0" + WRTCMP_lat_upr_rght="15.0" + WRTCMP_dlon="0.24" + WRTCMP_dlat="0.24" +fi + +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190701" +DATE_LAST_CYCL="20190701" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="FV3GFS" +EXTRN_MDL_NAME_LBCS="FV3GFS" + +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" + diff --git a/tests/baseline_configs/config.regional_008.sh b/tests/baseline_configs/config.regional_008.sh new file mode 100644 index 000000000..0a3cbbaf4 --- /dev/null +++ b/tests/baseline_configs/config.regional_008.sh @@ -0,0 +1,103 @@ +# +# MACHINE will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +MACHINE="" +# +# ACCOUNT will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to value passed in as an argument to that script. +# +ACCOUNT="" +# +# EXPT_SUBDIR will be set by the workflow launch script (launch_FV3SAR_- +# wflow.sh) to a value obtained from the name of this file. +# +EXPT_SUBDIR="" +# +# USE_CRON_TO_RELAUNCH may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +USE_CRON_TO_RELAUNCH="TRUE" +# +# CRON_RELAUNCH_INTVL_MNTS may be reset by the workflow launch script +# (launch_FV3SAR_wflow.sh) to value passed in as an argument to that +# script, but in case it is not, we give it a default value here. +# +CRON_RELAUNCH_INTVL_MNTS="02" + + +QUEUE_DEFAULT="batch" +QUEUE_HPSS="service" +QUEUE_FCST="batch" + +VERBOSE="TRUE" + +RUN_ENVIR="community" +PREEXISTING_DIR_METHOD="rename" + +GRID_GEN_METHOD="GFDLgrid" + +GFDLgrid_LON_T6_CTR=-97.5 +GFDLgrid_LAT_T6_CTR=38.5 +GFDLgrid_STRETCH_FAC=1.5 +GFDLgrid_RES="96" +GFDLgrid_REFINE_RATIO=2 + +#num_margin_cells_T6_left=9 +#GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_left + 1 )) +GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G="10" + +#num_margin_cells_T6_right=9 +#GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_right )) +GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G="87" + +#num_margin_cells_T6_bottom=9 +#GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G=$(( num_margin_cells_T6_bottom + 1 )) +GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G="10" + +#num_margin_cells_T6_top=9 +#GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G=$(( GFDLgrid_RES - num_margin_cells_T6_top )) +GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G="87" + +GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES="FALSE" + +DT_ATMOS="100" + +LAYOUT_X="6" +LAYOUT_Y="6" +BLOCKSIZE="26" + +QUILTING="TRUE" + +if [ "$QUILTING" = "TRUE" ]; then + WRTCMP_write_groups="1" + WRTCMP_write_tasks_per_group=$(( 1*LAYOUT_Y )) + WRTCMP_output_grid="rotated_latlon" + WRTCMP_cen_lon="${GFDLgrid_LON_T6_CTR}" + WRTCMP_cen_lat="${GFDLgrid_LAT_T6_CTR}" +# The following have not been tested... + WRTCMP_lon_lwr_left="-25.0" + WRTCMP_lat_lwr_left="-15.0" + WRTCMP_lon_upr_rght="25.0" + WRTCMP_lat_upr_rght="15.0" + WRTCMP_dlon="0.24" + WRTCMP_dlat="0.24" +fi + +USE_CCPP="TRUE" +CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" +FCST_LEN_HRS="06" +LBC_UPDATE_INTVL_HRS="6" + +DATE_FIRST_CYCL="20190701" +DATE_LAST_CYCL="20190701" +CYCL_HRS=( "00" ) + +EXTRN_MDL_NAME_ICS="FV3GFS" +EXTRN_MDL_NAME_LBCS="FV3GFS" + +RUN_TASK_MAKE_GRID="TRUE" +RUN_TASK_MAKE_OROG="TRUE" +RUN_TASK_MAKE_SFC_CLIMO="TRUE" + From 2290c13af22f6afdc3629795f2b5024b085faec7 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 28 Jan 2020 10:09:34 -0700 Subject: [PATCH 200/203] Use the -L flag to check if a file is a symlink instead of the -h flag (since the latter may be deprecated according to a StackOverflow post). --- ush/cmp_rundirs_ncfiles.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/cmp_rundirs_ncfiles.sh b/ush/cmp_rundirs_ncfiles.sh index bbe5ecd57..c65045487 100755 --- a/ush/cmp_rundirs_ncfiles.sh +++ b/ush/cmp_rundirs_ncfiles.sh @@ -23,12 +23,12 @@ function cmp_ncfiles_one_dir() { for fn in *.$fileext; do fn1="$fn" - if [ -f "$fn1" ] && [ ! -h "$fn1" ]; then # Check if regular file and not a symlink. + if [ -f "$fn1" ] && [ ! -L "$fn1" ]; then # Check if regular file and not a symlink. fn2="$dir2/$subdir/$fn" if [ -e "$fn2" ]; then # Check if file exists. - if [ -f "$fn2" ] && [ ! -h "$fn2" ]; then # Check if regular file and not a symlink. + if [ -f "$fn2" ] && [ ! -L "$fn2" ]; then # Check if regular file and not a symlink. printf "\nComparing file \"$fn\" in subdirectory \"$subdir\" ...\n" nccmp -d $fn1 $fn2 From b112e100761986a0d4d188b61916d4852f42d987 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 28 Jan 2020 11:39:38 -0700 Subject: [PATCH 201/203] Bug fix for changing directory back to original. --- ush/link_fix.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ush/link_fix.sh b/ush/link_fix.sh index b1d9275eb..0aff351b6 100755 --- a/ush/link_fix.sh +++ b/ush/link_fix.sh @@ -339,6 +339,8 @@ Cannot create symlink because target file (target) does not exist: target = \"${target}\"" fi done + + fi # #----------------------------------------------------------------------- # @@ -347,8 +349,6 @@ Cannot create symlink because target file (target) does not exist: #----------------------------------------------------------------------- # cd_vrfy - - - fi # #----------------------------------------------------------------------- # From d71d07d9b2e81a11d756d881b40b2cd8eb037727 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 28 Jan 2020 11:47:43 -0700 Subject: [PATCH 202/203] Add "conus_c96" as a valid value of EMC_GRID_NAME. Also, change mapping from EMC_GRID_NAME to PREDEF_GRID_NAME so that if EMC_GRID_NAME is set to "conus" or "conus_c96 map", PREDEF_GRID_NAME gets set to "EMC_CONUS_3km" and "EMC_CONUS_coarse", respectively. --- ush/setup.sh | 63 +++++++++++++++++++++++++++++++---------- ush/valid_param_vals.sh | 2 +- 2 files changed, 49 insertions(+), 16 deletions(-) diff --git a/ush/setup.sh b/ush/setup.sh index 3df4c4e9d..1d418b659 100755 --- a/ush/setup.sh +++ b/ush/setup.sh @@ -395,12 +395,12 @@ case "${EMC_GRID_NAME}" in PREDEF_GRID_NAME="EMC_AK" ;; "conus") - PREDEF_GRID_NAME="EMC_CONUS_coarse" - ;; - "conus_orig") PREDEF_GRID_NAME="EMC_CONUS_3km" ;; - "guam"|"hi"|"pr") + "conus_c96") + PREDEF_GRID_NAME="EMC_CONUS_coarse" + ;; + "conus_orig"|"guam"|"hi"|"pr") print_err_msg_exit "\ A predefined grid (PREDEF_GRID_NAME) has not yet been defined for this EMC grid (EMC_GRID_NAME): @@ -902,25 +902,58 @@ LOGDIR="${EXPTDIR}/log" #----------------------------------------------------------------------- # if [ "${RUN_ENVIR}" = "nco" ]; then + FIXam="${FIXrrfs}/fix_am" + FIXsar="${FIXrrfs}/fix_sar/${EMC_GRID_NAME}" + COMROOT="$PTMP/com" # -# Important note: -# In "nco" mode, it is assumed that in the build step, a symlink is cre- -# ated at ${FIXrrfs}/fix_sar whose target is the system disk under -# which the fixed grid, orography, and surface climatology files are -# located. For example, from the ${FIXrrfs}/fix_sar directory, an -# "ls -alF fix_sar" might show +# In NCO mode (i.e. if RUN_ENVIR set to "nco"), it is assumed that before +# running the experiment generation script, the path specified in FIXam +# already exists and is either itself the directory in which various fixed +# files (but not the ones containing the regional grid and the orography +# and surface climatology on that grid) are located, or it is a symlink +# to such a directory. Resolve any symlinks in the path specified by +# FIXam and check that this is the case. # -# > ls -alF fix_sar -# lrwxrwxrwx 1 Gerard.Ketefian det 62 Dec 20 15:43 fix_sar -> /scratch2/NCEPDEV/fv3-cam/emc.campara/fix_fv3cam/fix_sar/ + path_resolved=$( readlink -m "$FIXam" ) + if [ ! -d "${path_resolved}" ]; then + print_err_msg_exit "\ +In NCO mode (RUN_ENVIR set to \"nco\"), the path specified by FIXam after +resolving all symlinks (path_resolved) must point to an existing directory +before an experiment can be generated. In this case, path_resolved is +not a directory or does not exist: + RUN_ENVIR = \"${RUN_ENVIR}\" + FIXam = \"$FIXam\" + path_resolved = \"${path_resolved}\" +Please correct and then rerun the experiment generation script." + fi # -# FIXsar="${FIXrrfs}/fix_sar" - FIXsar="${FIXrrfs}/fix_sar/${EMC_GRID_NAME}" - COMROOT="$PTMP/com" +# In NCO mode (i.e. if RUN_ENVIR set to "nco"), it is assumed that before +# running the experiment generation script, the path specified in FIXsar +# already exists and is either itself the directory in which the fixed +# grid, orography, and surface climatology files are located, or it is a +# symlink to such a directory. Resolve any symlinks in the path specified +# by FIXsar and check that this is the case. +# + path_resolved=$( readlink -m "$FIXsar" ) + if [ ! -d "${path_resolved}" ]; then + print_err_msg_exit "\ +In NCO mode (RUN_ENVIR set to \"nco\"), the path specified by FIXsar after +resolving all symlinks (path_resolved) must point to an existing directory +before an experiment can be generated. In this case, path_resolved is +not a directory or does not exist: + RUN_ENVIR = \"${RUN_ENVIR}\" + FIXsar = \"$FIXsar\" + path_resolved = \"${path_resolved}\" +Please correct and then rerun the experiment generation script." + fi + else + FIXam="${EXPTDIR}/fix_am" FIXsar="${EXPTDIR}/fix_sar" COMROOT="" + fi # #----------------------------------------------------------------------- diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh index 9de33e4e6..129d48ee1 100644 --- a/ush/valid_param_vals.sh +++ b/ush/valid_param_vals.sh @@ -7,7 +7,7 @@ valid_vals_PREDEF_GRID_NAME=( \ "GSD_HRRR_AK_3km" "GSD_HRRR_AK_50km" \ "GSD_HRRR3km" "GSD_HRRR13km" "GSD_HRRR25km" \ "GSD_RAP13km" ) -valid_vals_EMC_GRID_NAME=("ak" "conus" "conus_orig" "guam" "hi" "pr") +valid_vals_EMC_GRID_NAME=("ak" "conus" "conus_c96" "conus_orig" "guam" "hi" "pr") valid_vals_USE_CCPP=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no") valid_vals_CCPP_PHYS_SUITE=("FV3_GFS_2017_gfdlmp" "FV3_GSD_v0" "FV3_GSD_SAR") valid_vals_GFDLgrid_RES=("48" "96" "192" "384" "768" "1152" "3072") From 22915e15109b7516781afb2be9222d0e4f9851f4 Mon Sep 17 00:00:00 2001 From: gerard ketefian Date: Tue, 28 Jan 2020 16:03:14 -0700 Subject: [PATCH 203/203] Update the configuration file for generating experiments in NCO mode on a coarse conus grid. Still need resolving of certain inconsistencies between EMC and community workflow to get the NCO-mode test to work (currently fails at the beginning of the forecast task due to inconsistency in ozone physics). --- ush/config.nco.sh | 29 +++++++++++++++++++++++++---- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/ush/config.nco.sh b/ush/config.nco.sh index 3b37a43aa..ef63ec9b8 100644 --- a/ush/config.nco.sh +++ b/ush/config.nco.sh @@ -11,8 +11,9 @@ VERBOSE="TRUE" RUN_ENVIR="nco" PREEXISTING_DIR_METHOD="rename" -EMC_GRID_NAME="conus" # For now, this is maps to PREDEF_GRID_NAME="EMC_CONUS_coarse". +EMC_GRID_NAME="conus_c96" # For now, this is maps to PREDEF_GRID_NAME="EMC_CONUS_coarse". GRID_GEN_METHOD="GFDLgrid" + QUILTING="TRUE" USE_CCPP="TRUE" CCPP_PHYS_SUITE="FV3_GFS_2017_gfdlmp" @@ -26,12 +27,32 @@ CYCL_HRS=( "18" ) EXTRN_MDL_NAME_ICS="FV3GFS" EXTRN_MDL_NAME_LBCS="FV3GFS" +# +# In NCO mode, the following don't need to be explicitly set to "FALSE" +# in this configuration file because the experiment generation script +# will do this (along with printing out an informational message). +# #RUN_TASK_MAKE_GRID="FALSE" #RUN_TASK_MAKE_OROG="FALSE" #RUN_TASK_MAKE_SFC_CLIMO="FALSE" RUN="an_experiment" -COMINgfs="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" # Path to files from external model (FV3GFS). -#STMP="/path/to/temporary/directory/stmp" # Path to temporary directory STMP. -STMP="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/stmp" # Path to temporary directory STMP. +COMINgfs="/scratch1/NCEPDEV/hwrf/noscrub/hafs-input/COMGFS" # Path to directory containing files from the external model (FV3GFS). +STMP="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/NCO_dirs/stmp" # Path to directory STMP that mostly contains input files. +PTMP="/scratch2/BMC/det/Gerard.Ketefian/UFS_CAM/NCO_dirs/ptmp" # Path to directory PTMP in which the experiment's output files will be placed. + +# +# In NCO mode, the user must manually (e.g. after doing the build step) +# create the symlink "${FIXrrfs}/fix_sar" that points to EMC's FIXsar +# directory on the machine. For example, on hera, the symlink's target +# needs to be +# +# /scratch2/NCEPDEV/fv3-cam/emc.campara/fix_fv3cam/fix_sar +# +# The experiment generation script will then set FIXsar to +# +# FIXsar="${FIXrrfs}/fix_sar/${EMC_GRID_NAME}" +# +# where EMC_GRID_NAME has the value set above. +#