diff --git a/.github/workflows/ci_run_scm_DEPHY.yml b/.github/workflows/ci_run_scm_DEPHY.yml index a07f1698a..0f0463c77 100644 --- a/.github/workflows/ci_run_scm_DEPHY.yml +++ b/.github/workflows/ci_run_scm_DEPHY.yml @@ -1,6 +1,6 @@ name: CI test to run the SCM with DEPHY v1 data -on: [push, pull_request] +on: [pull_request,workflow_dispatch] jobs: run-scm-DEPHY: diff --git a/.github/workflows/ci_run_scm_ufs_replay.yml b/.github/workflows/ci_run_scm_ufs_replay.yml new file mode 100644 index 000000000..0bd6a161d --- /dev/null +++ b/.github/workflows/ci_run_scm_ufs_replay.yml @@ -0,0 +1,75 @@ +name: CI test to create SCM UFS-replay cases from UWM regression tests + +on: [pull_request,workflow_dispatch] + +jobs: + run_scm_ufs_replay: + + # The type of runner that the job will run on + runs-on: ubuntu-22.04 + defaults: + run: + shell: bash -el {0} + + # Environmental variables + env: + dir_rt_cache: /home/runner/ufs_rts + SCM_ROOT: ${{ github.workspace }} + + steps: + + ####################################################################################### + # Checkout SCM code, setup python. + ####################################################################################### + + - name: Checkout SCM. + uses: actions/checkout@v3 + + - name: Initialize SCM submodules. + run: git submodule update --init --recursive + + - name: Update system packages. + run: sudo apt-get update + + - name: Cache conda + uses: actions/cache@v3 + with: + path: ~/conda_pkgs_dir + key: conda-pkgs + + - name: Setup python. + uses: conda-incubator/setup-miniconda@v2 + with: + activate-environment: env_ufsreplay + environment-file: environment-ufsreplay.yml + use-only-tar-bz2: true + auto-activate-base: true + auto-update-conda: true + + ####################################################################################### + # Create UFS-replay case for SCM using UWM Regression Tests + ####################################################################################### + + - name: Cache UWM regression test output. + uses: actions/cache@v3 + with: + path: ${dir_rt_cache} + key: ufs-rt-files + + - name: Download UWM regression test output from NCAR-DTC FTP site, if not cached. + run: | + if test ! -d "${dir_rt_cache}"; then + mkdir -p ${dir_rt_cache} && cd ${dir_rt_cache} + wget -q ftp://ftp.rap.ucar.edu:/pub/ccpp-scm/ufs_rts_scmreplay_ci.tar + tar -xvf ufs_rts_scmreplay_ci.tar + ls ${dir_rt_cache} + fi + + - name: Create UFS-replay case. + run: | + cd ${SCM_ROOT}/scm/etc/scripts/ + ./UFS_forcing_ensemble_generator.py -d ${dir_rt_cache}/ --C_RES 192 -dt 360 -n control_c192 -lons 300 -lats 34 -sc + + ####################################################################################### + # Done + ####################################################################################### \ No newline at end of file diff --git a/.github/workflows/ci_scm_ccpp_prebuild.yml b/.github/workflows/ci_scm_ccpp_prebuild.yml index 40beaa2c0..bb9e3bd17 100644 --- a/.github/workflows/ci_scm_ccpp_prebuild.yml +++ b/.github/workflows/ci_scm_ccpp_prebuild.yml @@ -1,6 +1,6 @@ name: CI test to run SCM ccpp_prebuild script -on: [push, pull_request] +on: [push, pull_request, workflow_dispatch] jobs: build-linux: diff --git a/environment-ufsreplay.yml b/environment-ufsreplay.yml new file mode 100644 index 000000000..fc76fe24b --- /dev/null +++ b/environment-ufsreplay.yml @@ -0,0 +1,10 @@ +name: env_ufsreplay + +dependencies: + - conda-forge::python=3.8.5 + - conda-forge::netcdf4 + - conda-forge::f90nml + - conda-forge::xarray + - conda-forge::numpy + - conda-forge::shapely + - conda-forge::xesmf diff --git a/environment.yml b/environment.yml new file mode 100644 index 000000000..02b6eb2f4 --- /dev/null +++ b/environment.yml @@ -0,0 +1,6 @@ +name: scm_py37 + +dependencies: + - conda-forge::python=3.7 + - conda-forge::netcdf4 + - conda-forge::f90nml diff --git a/scm/doc/TechGuide/chap_cases.tex b/scm/doc/TechGuide/chap_cases.tex index eacb6123b..e12b75ead 100644 --- a/scm/doc/TechGuide/chap_cases.tex +++ b/scm/doc/TechGuide/chap_cases.tex @@ -132,7 +132,7 @@ \section{Included Cases} \item UFS initial conditions for 38.1 N, 98.5 W (central Kansas) for 00Z on Oct. 3, 2016 with Noah variables on the C96 FV3 grid (\execout{fv3\_model\_point\_noah.nc}) \item UFS initial conditions for 38.1 N, 98.5 W (central Kansas) for 00Z on Oct. 3, 2016 with NoahMP variables on the C96 FV3 grid (\execout{fv3\_model\_point\_noahmp.nc}) \end{itemize} -See \ref{sec:UFS ICs} for information on how to generate these files for other locations and dates, given appropriate UFS Atmosphere initial conditions. +See \ref{sec:UFSreplay} for information on how to generate these files for other locations and dates, given appropriate UFS Atmosphere initial conditions and output. \section{How to set up new cases} @@ -196,15 +196,36 @@ \section{Using other LASSO cases} \item Create a new case configuration file (or copy and modify an existing one) in \execout{ccpp-scm/scm/etc/case\_config}. Be sure that the \execout{case\_name} variable points to the newly created/processed case input file from above. \end{enumerate} -\section{Using UFS Initial Conditions} -\label{sec:UFS ICs} +\section{Using UFS Output to Create SCM Cases: UFS-Replay} +\label{sec:UFSreplay} + + +\subsection{Python Dependencies} +\label{subsection: pydepend} +The scripts here require a few python packages that may not be found by default in all python installations. There is a YAML file with the python environment needed to run the script in \execout{ccpp-scm/environment-ufsreplay.yml}. To create and activate this environment using conda: + +Create environment (only once): + +\execout{> conda env create -f environment-ufsreplay.yml} + +This will create the conda environment \execout{env\_ufsreplay} + + +Activate environment: + +\execout{> conda activate env\_ufsreplay} + + +\subsection{UFS\_IC\_generator.py} +\label{subsection: ufsicgenerator} +A script exists in \execout{scm/etc/scripts/UFS\_IC\_generator.py} to read in UFS history (output) files and their initial conditions to generate a SCM case input data file, in DEPHY format. -A script exists in \execout{scm/etc/scripts/UFS\_IC\_generator.py} to read in UFS Atmosphere cold start initial conditions and generate a case input data file that the SCM can use. Note that the script requires a few python packages that may not be found by default in all python installations: \exec{argparse}, \exec{fnmatch}, \exec{logging}, \exec{NetCDF4}, \exec{numpy}, \exec{shapely}, \exec{f90nml}, and \exec{re}. \begin{lstlisting}[language=bash] ./UFS_IC_generator.py [-h] (-l LOCATION LOCATION | -ij INDEX INDEX) -d -DATE -i IN_DIR -g GRID_DIR [-t {1,2,3,4,5,6}] -[-a AREA] -n CASE_NAME [-oc] +DATE -i IN_DIR -g GRID_DIR -f FORCING_DIR -n +CASE_NAME [-t {1,2,3,4,5,6,7}] [-a AREA] [-oc] +[-lam] [-sc] [-near] \end{lstlisting} Mandatory arguments: @@ -214,10 +235,11 @@ \section{Using UFS Initial Conditions} \item -l 261.51 38.2 (two floating point values separated by a space) \item -ij 8 49 (two integer values separated by a space; this option must also use the \exec{-{}-tile (-t)} argument to specify the tile number) \end{itemize} -\item \exec{-{}-date (-d)} YYYYMMDDHHMM: date corresponding to the UFS initial conditions -\item \exec{-{}-in\_dir (-i)}: path to the directory containing UFS initial conditions +\item \exec{-{}-date (-d)} YYYYMMDDHHMMSS: date corresponding to the UFS initial conditions +\item \exec{-{}-in\_dir (-i)}: path to the directory containing the UFS initial conditions \item \exec{-{}-grid\_dir (-g)}: path to the directory containing the UFS supergrid files (AKA "fix" directory) -\item \exec{-{}-case\_name (-n)}: what to call the output NetCDF file +\item \exec{-{}-forcing\_dir (-f)}: path to the directory containing the UFS history files +\item \exec{-{}-case\_name (-n)}: name of case \end{enumerate} Optional arguments: @@ -225,18 +247,118 @@ \section{Using UFS Initial Conditions} \item \exec{-{}-tile (-t)}: if one already knows the correct tile for the given longitude and latitude OR one is specifying the UFS grid index (\exec{-{}-index} argument) \item \exec{-{}-area (-a)}: area of grid cell in $m^2$ (if known or different than the value calculated from the supergrid file) \item \exec{-{}-old\_chgres (-oc)}: flag if UFS initial conditions were generated using older version of chgres (global\_chgres); might be the case for pre-2018 data +\item \exec{-{}-lam (-lam)}: flag to signal that the ICs and forcing is from a limited-area model run +\item \exec{-{}-save\_comp (-sc)}: flag to create UFS reference file for comparison +\item \exec{-{}-use\_nearest (-near)}: flag to indicate using the nearest UFS history file gridpoint +\end{enumerate} + +\subsection{UFS\_forcing\_ensemble\_generator.py} +\label{subsection: ufsforcingensemblegenerator} +There is an additional script in \execout{scm/etc/scripts/UFS\_forcing\_ensemble\_generator.py} to create UFS-replay case(s) starting with output from UFS Weather Model (UWM) Regression Tests (RTs). + +\begin{lstlisting}[language=bash] +UFS_forcing_ensemble_generator.py [-h] -d DIR -n CASE_NAME +(-lonl LON_1 LON_2 -latl LAT_1 LAT_2 -nens NENSMEMBERS | +-lons [LON_LIST] -lats [LAT_LIST]) +[-dt TIMESTEP] [-cres C_RES] [-sdf SUITE] [-sc] [-near] +\end{lstlisting} + +Mandatory arguments: +\begin{enumerate} +\item \exec{-{}-dir (-d)}: path to UFS Regression Test output +\item \exec{-{}-case\_name (-n)}: name of cases +\item Either: (see examples below) + \begin{itemize} + \item \exec{-{}-lon\_limits (-lonl)} AND \exec{-{}-lat\_limits (-latl)} AND \exec{-{}-nensmembers (-nens)}: longitude range, latitude range, and number of cases to create + \item \exec{-{}-lon\_list (-lons)} AND \exec{-{}-lat\_list (-lats)}: longitude and latitude of cases + \end{itemize} \end{enumerate} -The following commands were used from within the \exec{scm/etc/scripts} directory to generate the example UFS Atmosphere initial condition case input file: +Optional arguments: +\begin{enumerate} +\item \exec{-{}-timestep (-dt)}: SCM timestep, in seconds +\item \exec{-{}-C\_res (-cres)}: UFS spatial resolution +\item \exec{-{}-suite (-sdf)}: CCPP suite definition file to use for ensemble +\item \exec{-{}-save\_comp (-sc)}: flag to create UFS reference file for comparison +\item \exec{-{}-use\_nearest (-near)}: flag to indicate using the nearest UFS history file gridpoint +\end{enumerate} + +Examples to run from within the \exec{scm/etc/scripts} directory to create SCM cases starting with the output from a UFS Weather Model regression test(s): + +On the supported platforms Cheyenne (NCAR) and Hera (NOAA), there are staged UWM RTs located at: +\begin{itemize} +\item \execout{Cheyenne /glade/scratch/epicufsrt/GMTB/CCPP-SCM/UFS\_RTs} +\item \execout{Hera /scratch1/BMC/gmtb/CCPP-SCM/UFS\_RTs} +\end{itemize} + +\subsection{Example 1: UFS-replay for single point} +\label{subsection: example1} + +UFS regression test, \execout{control\_c192}, for single point. +\begin{lstlisting}[language=bash] +./UFS_forcing_ensemble_generator.py -d /glade/scratch/epicufsrt/GMTB/CCPP-SCM/UFS_RTs/control_c192/ -sc --C_RES 192 -dt 360 -n control_c192 -lons 300 -lats 34 +\end{lstlisting} + +Upon successful completion of the script, the command to run the case(s) will print to the screen. For example, + +\begin{lstlisting}[language=bash] +./run_scm.py --npz_type gfs --file scm_ufsens_control_c192.py --timestep 360 +\end{lstlisting} + +The file \exec{scm\_ufsens\_control\_c192.py} is created in \exec{ccpp-scm/scm/bin/}, where the SCM run script is to be exectued. + +\subsection{Example 2: UFS-replay for list of points} +\label{subsection: example2} + +UFS regression test, \execout{control\_c384}, for multiple points. +\begin{lstlisting}[language=bash] +./UFS_forcing_ensemble_generator.py -d /glade/scratch/epicufsrt/GMTB/CCPP-SCM/UFS_RTs/control_c384/ -sc --C_RES 384 -dt 225 -n control_c384 -lons 300 300 300 300 -lats 34 35 35 37 +\end{lstlisting} + +Upon successful completion of the script, the command to run the case(s) will print to the screen. For example, + +\begin{lstlisting}[language=bash] +./run_scm.py --npz_type gfs --file scm_ufsens_control_c384.py --timestep 225 +\end{lstlisting} + +The file \exec{scm\_ufsens\_control\_c384.py} contains \exec{ALL} of the cases created. Each case created will have the naming convention \exec{case\_name\_nXXX}, where the suffix \exec{XXX} is the case number from 0 to the number of points provided. The contents of the file should look like: \begin{lstlisting}[language=bash] -./UFS_IC_generator.py -l 261.51 38.2 -d 201610030000 -i ../../data/raw_case_input/FV3_C96_example_ICs -g ../../data/raw_case_input/FV3_C96_example_ICs -n fv3_model_point_noah -oc +run_list = [{"case": "control_c384_n000", "suite": "SCM_GFS_v16"}, + {"case": "control_c384_n001", "suite": "SCM_GFS_v16"}, + {"case": "control_c384_n002", "suite": "SCM_GFS_v16"}, + {"case": "control_c384_n003", "suite": "SCM_GFS_v16"}] \end{lstlisting} -Note that the \exec{-{}-in\_dir (-i)} and \exec{-{}-grid\_dir (-g)} arguments are the same in this case (since the supergrid files were copied to the same directory as the initial conditions files for point of example), but they will not in general be the same. Also note that the default behavior of the script is to expect that the NetCDF initial condition files were generated from \execout{chgres\_cube} and not the older \execout{global\_chgres}. If they were generated from the older version (which is likely for pre-2018 data), they will have a slightly different format requiring the \exec{-{}-old\_chgres (-oc)} option to be set in order for the files to be read properly by the script. If you try without the \exec{-{}-old\_chgres (-oc)} flag and receive a ``IndexError: t not found'' error, try the script again with the flag. +\subsection{Example 3: UFS-replay for an ensemble of points} +\label{subsection: example3} + +UFS regression test, \execout{control\_p8}, for an ensemble (10) of randomly selected points over a specified longitude ($300-320^oW$) and latitude ($40-50^oN$) range + +But first, to use the \execout{control\_p8} test we need to rerun the regression test to generate UFS history files with a denser and constant output interval. First, in \execout{control\_p8/model\_configure}, change \exec{-{}-output\_fh} to \execout{"interval -1"}, where \execout{interval} is the UFS history file output frequency (in hours), see \href{https://ufs-weather-model.readthedocs.io/en/latest/InputsOutputs.html}{UFS Weather Model Users Guide} for more details. + +For the purposes of this example the \execout{control\_p8} test has already been rerun, but if starting from your own UWM RTs, you can rerun the UWM regression test, on Cheyenne for example, by running the following command in the RT directory: \execout{qsub job\_card} -In addition to the case input files generated by this script, one will need appropriate case configuration files. Make sure that the \exec{model\_ics} variable is set to \exec{.true.} and that the \exec{C\_RES}, \exec{year}, \exec{month}, \exec{day}, and \exec{hour} are all set to the appropriate values that match the UFS Atmosphere initial conditions used. See \execout{scm/etc/case\_config/fv3\_model\_point\_noah.nml} for an example. +Now the cases can be generated with the following command: +\begin{lstlisting}[language=bash] +./UFS_forcing_ensemble_generator.py -d /glade/scratch/epicufsrt/GMTB/CCPP-SCM/UFS_RTs/control_p8/ -sc --C_RES 96 -dt 720 -n control_p8 -lonl 300 320 -latl 40 50 -nens 10 -sdf SCM_GFS_v17_p8 +\end{lstlisting} + +Upon successful completion of the script, the command to run the case(s) will print to the screen. For example, + +\begin{lstlisting}[language=bash] +./run_scm.py --npz_type gfs --file scm_ufsens_control_p8.py --timestep 720 +\end{lstlisting} -Running the model is the same as for observational field campaign cases: +The file \exec{scm\_ufsens\_control\_p8.py} contains ten cases (n000-n009) to be run. The contents of the file should look like: \begin{lstlisting}[language=bash] -./run_scm.py -c fv3_model_point_noah -s SCM_GFS_v16 +run_list = [{"case": "control_p8_n000", "suite": "SCM_GFS_v17_p8"}, + {"case": "control_p8_n001", "suite": "SCM_GFS_v17_p8"}, + {"case": "control_p8_n002", "suite": "SCM_GFS_v17_p8"}, + {"case": "control_p8_n003", "suite": "SCM_GFS_v17_p8"}, + {"case": "control_p8_n004", "suite": "SCM_GFS_v17_p8"}, + {"case": "control_p8_n005", "suite": "SCM_GFS_v17_p8"}, + {"case": "control_p8_n006", "suite": "SCM_GFS_v17_p8"}, + {"case": "control_p8_n007", "suite": "SCM_GFS_v17_p8"}, + {"case": "control_p8_n008", "suite": "SCM_GFS_v17_p8"}, + {"case": "control_p8_n009", "suite": "SCM_GFS_v17_p8"}] \end{lstlisting} diff --git a/scm/doc/TechGuide/main.pdf b/scm/doc/TechGuide/main.pdf index 13ae9ed1a..0f50aceba 100644 Binary files a/scm/doc/TechGuide/main.pdf and b/scm/doc/TechGuide/main.pdf differ diff --git a/scm/etc/scripts/UFS_IC_generator.py b/scm/etc/scripts/UFS_IC_generator.py index b6db23e87..6fdbd13fd 100755 --- a/scm/etc/scripts/UFS_IC_generator.py +++ b/scm/etc/scripts/UFS_IC_generator.py @@ -11,6 +11,9 @@ import math import f90nml import re +import fv3_remap +import xesmf +from datetime import datetime, timedelta ############################################################################### # Global settings # @@ -20,11 +23,18 @@ earth_radius = 6371000.0 #m rdgas = 287.05 rvgas = 461.50 +cp = 1004.6 zvir = rvgas/rdgas - 1. +rocp = rdgas/cp grav = 9.80665 +deg_to_rad = math.pi/180.0 +kappa = rdgas/cp +p0 = 100000.0 missing_value = -9999.0 #9.99e20 +n_lam_halo_points = 3 + missing_variable_snow_layers = 3 missing_variable_soil_layers = 4 missing_variable_ice_layers = 2 @@ -32,8 +42,8 @@ # Path to the directory containing processed case input files PROCESSED_CASE_DIR = '../../data/processed_case_input' -# Path to the directory containing NoahMP table files (need MPTABLE.TBL and SOILPARM.TBL) -NOAHMP_TABLES_DIR = '../../data/raw_case_input/NoahMP_tables' +# Path to the directory containing comparison data files +COMPARISON_DATA_DIR = '../../data/comparison_data' # For developers: set logging level to DEBUG for additional output #LOGLEVEL = logging.DEBUG @@ -45,34 +55,39 @@ parser = argparse.ArgumentParser() group1 = parser.add_mutually_exclusive_group(required=True) -group1.add_argument('-l', '--location', help='longitude and latitude in degress E and N, respectively, separated by a space', nargs=2, type=float) -group1.add_argument('-ij','--index', help='i,j indices within the tile (if known - bypasses search for closest model point to lon/lat location)', nargs=2, type=int) -parser.add_argument('-d', '--date', help='date corresponding to initial conditions in YYYYMMDDHHMM format', required=True) -parser.add_argument('-i', '--in_dir', help='input directory path containing FV3 input files', required=True) -parser.add_argument('-g', '--grid_dir', help='directory path containing FV3 tile supergrid files', required=True) -parser.add_argument('-t', '--tile', help='tile of desired point (if known - bypasses tile search if present)', type=int, choices=range(1,7)) -parser.add_argument('-a', '--area', help='area of grid cell in m^2', type=float) -parser.add_argument('-mp','--noahmp', help='flag to generate cold-start ICs for NoahMP LSM from Noah LSM ICs', action='store_true') -parser.add_argument('-n', '--case_name', help='name of case', required=True) -parser.add_argument('-oc','--old_chgres', help='flag to denote that the initial conditions use an older data format (pre-chgres_cube)', action='store_true') - -############################################################################### -# Functions and subroutines # -############################################################################### +group1.add_argument('-l', '--location', help='longitude and latitude in degress E and N, respectively, separated by a space', nargs=2, type=float) +group1.add_argument('-ij', '--index', help='i,j indices within the tile (if known - bypasses search for closest model point to lon/lat location)', nargs=2, type=int) +parser.add_argument('-d', '--date', help='date corresponding to initial conditions in YYYYMMDDHHMMSS format', required=False) +parser.add_argument('-i', '--in_dir', help='path to the directory containing the UFS initial conditions', required=True) +parser.add_argument('-g', '--grid_dir', help='path to the directory containing the UFS supergrid files (AKA "fix" directory)', required=True) +parser.add_argument('-f', '--forcing_dir', help='path to the directory containing the UFS history files', required=True) +parser.add_argument('-n', '--case_name', help='name of case', required=True) +parser.add_argument('-t', '--tile', help='tile of desired point (if known - bypasses tile search if present)', type=int, choices=range(1,8)) +parser.add_argument('-a', '--area', help='area of grid cell in m^2', type=float) +parser.add_argument('-oc', '--old_chgres', help='flag to denote that the initial conditions use an older data format (pre-chgres_cube)', action='store_true') +parser.add_argument('-lam', '--lam', help='flag to signal that the ICs and forcing is from a limited-area model run', action='store_true') +parser.add_argument('-sc', '--save_comp', help='flag to create UFS reference file for comparison', action='store_true') +parser.add_argument('-near','--use_nearest', help='flag to indicate using the nearest UFS history file gridpoint',action='store_true') +######################################################################################## +# +######################################################################################## def parse_arguments(): """Parse command line arguments""" - args = parser.parse_args() - location = args.location - index = args.index - date = args.date - in_dir = args.in_dir - grid_dir = args.grid_dir - tile = args.tile - area = args.area - case_name = args.case_name - noahmp = args.noahmp - old_chgres = args.old_chgres + args = parser.parse_args() + location = args.location + index = args.index + date = args.date + in_dir = args.in_dir + grid_dir = args.grid_dir + forcing_dir = args.forcing_dir + tile = args.tile + area = args.area + case_name = args.case_name + old_chgres = args.old_chgres + lam = args.lam + save_comp = args.save_comp + use_nearest = args.use_nearest #validate args if not os.path.exists(in_dir): @@ -92,24 +107,39 @@ def parse_arguments(): raise Exception(message) date_dict = {} - if len(date) != 12: - message = 'The entered date {0} does not have the 12 characters expected in the format YYYYMMDDHHMM'.format(date) - logging.critical(message) - raise Exception(message) - else: - date_dict["year"] = np.int(date[0:4]) - date_dict["month"] = np.int(date[4:6]) - date_dict["day"] = np.int(date[6:8]) - date_dict["hour"] = np.int(date[8:10]) - date_dict["minute"] = np.int(date[10:]) - - return (location, index, date_dict, in_dir, grid_dir, tile, area, noahmp, case_name, old_chgres) + if date: + if len(date) != 14: + message = 'The entered date {0} does not have the 14 characters expected in the format YYYYMMDDHHMMSS'.format(date) + logging.critical(message) + raise Exception(message) + else: + date_dict["year"] = int(date[0:4]) + date_dict["month"] = int(date[4:6]) + date_dict["day"] = int(date[6:8]) + date_dict["hour"] = int(date[8:10]) + date_dict["minute"] = int(date[10:12]) + date_dict["second"] = int(date[12:]) + + if tile: + if (not lam and tile > 6): + message = 'The entered tile {0} is not compatibile with the global cubed-sphere grid'.format(date) + logging.critical(message) + raise Exception(message) + + return (location, index, date_dict, in_dir, grid_dir, forcing_dir, tile, \ + area, case_name, old_chgres, lam, save_comp, use_nearest) +######################################################################################## +# +######################################################################################## def setup_logging(): """Sets up the logging module.""" logging.basicConfig(format='%(levelname)s: %(message)s', level=LOGLEVEL) - -def find_tile(loc, dir): + +######################################################################################## +# +######################################################################################## +def find_tile(loc, dir, lam): """Find the FV3 tile with the given lon/lat""" #returns the integer tile number @@ -133,8 +163,8 @@ def find_tile(loc, dir): for f_name in grid_fnames: if not found_tile: nc_file = Dataset('{0}/{1}'.format(dir,f_name)) - longitude = np.array(nc_file['x']).swapaxes(0,1) - latitude = np.array(nc_file['y']).swapaxes(0,1) + longitude = np.asarray(nc_file['x']).swapaxes(0,1) + latitude = np.asarray(nc_file['y']).swapaxes(0,1) nc_file.close() adj_long = False @@ -149,20 +179,20 @@ def find_tile(loc, dir): edge_1_lon = longitude[0,:] edge_1_lat = latitude[0,:] - edge_1 = zip(edge_1_lon, edge_1_lat) + edge_1 = list(zip(edge_1_lon, edge_1_lat)) edge_2_lon = longitude[:,-1] edge_2_lat = latitude[:,-1] - edge_2 = zip(edge_2_lon, edge_2_lat) + edge_2 = list(zip(edge_2_lon, edge_2_lat)) edge_3_lon = longitude[-1,:] edge_3_lat = latitude[-1,:] - edge_3 = zip(edge_3_lon, edge_3_lat) + edge_3 = list(zip(edge_3_lon, edge_3_lat)) edge_3.reverse() #need to reverse the direction of this edge to form a regular polygon edge_4_lon = longitude[:,0] edge_4_lat = latitude[:,0] - edge_4 = zip(edge_4_lon, edge_4_lat) + edge_4 = list(zip(edge_4_lon, edge_4_lat)) edge_4.reverse() #need to reverse the direction of this edge to form a regular polygon polygon_points = edge_1 + edge_2 + edge_3 + edge_4 @@ -178,28 +208,44 @@ def find_tile(loc, dir): loc_point = Point(temp_loc) if tile_polygon.contains(loc_point): found_tile = True - return f_name.split('tile')[1].split('.nc')[0] + if (lam): + return f_name.split('tile')[1].split('.halo')[0] + else: + return f_name.split('tile')[1].split('.nc')[0] else: polar_tile_filenames.append(f_name) #if the tile hasn't been found by this point, it must be contained within a polar tile for f_name in polar_tile_filenames: nc_file = Dataset('{0}/{1}'.format(dir,f_name)) - latitude = np.array(nc_file['y']).swapaxes(0,1) + latitude = np.asarray(nc_file['y']).swapaxes(0,1) nc_file.close() #if the sign of the mean latitude of the tile is the same as that of the point, the tile has been found if np.sign(np.mean(latitude)) == np.sign(loc[1]): found_tile = True - return f_name.split('tile')[1].split('.nc')[0] + if (lam): + return f_name.split('tile')[1].split('.halo')[0] + else: + return f_name.split('tile')[1].split('.nc')[0] return -1 -def find_loc_indices(loc, dir, tile): +######################################################################################## +# +######################################################################################## +def find_loc_indices(loc, dir, tile, lam): """Find the nearest neighbor FV3 grid point given a lon/lat pair and the tile number""" #returns the indices of the nearest neighbor point in the given tile, the lon/lat of the nearest neighbor, #and the distance (m) from the given point to the nearest neighbor grid cell - filename_pattern = '*grid.tile{0}.nc'.format(tile) + if (tile >= 0): + if (lam): + filename_pattern = '*grid.tile7.halo{}.nc'.format(n_lam_halo_points) + else: + filename_pattern = '*grid.tile{0}.nc'.format(tile) + else: + filename_pattern = 'atmf000.nc' + for f_name in os.listdir(dir): if fnmatch.fnmatch(f_name, filename_pattern): filename = f_name @@ -209,13 +255,29 @@ def find_loc_indices(loc, dir, tile): raise Exception(message) nc_file = Dataset('{0}/{1}'.format(dir,filename)) - #read in supergrid longitude and latitude - lon_super = np.array(nc_file['x']) #[lat,lon] or [y,x] #.swapaxes(0,1) - lat_super = np.array(nc_file['y']) #[lat,lon] or [y,x] #.swapaxes(0,1) - #get the longitude and latitude data for the grid centers by slicing the supergrid - #and taking only odd-indexed values - longitude = lon_super[1::2,1::2] - latitude = lat_super[1::2,1::2] + + if (tile >= 0): + #read in supergrid longitude and latitude + lon_super = np.asarray(nc_file['x']) #[lat,lon] or [y,x] #.swapaxes(0,1) + lat_super = np.asarray(nc_file['y']) #[lat,lon] or [y,x] #.swapaxes(0,1) + if (lam): + #strip ghost/halo points and return central (A-grid) points + #assuming n_lam_halo_points + lon_super_no_halo = lon_super[2*n_lam_halo_points:lon_super.shape[0]-2*n_lam_halo_points,2*n_lam_halo_points:lon_super.shape[1]-2*n_lam_halo_points] + lat_super_no_halo = lat_super[2*n_lam_halo_points:lat_super.shape[0]-2*n_lam_halo_points,2*n_lam_halo_points:lat_super.shape[1]-2*n_lam_halo_points] + #get the longitude and latitude data for the grid centers by slicing the supergrid + #and taking only odd-indexed values + longitude = lon_super_no_halo[1::2,1::2] + latitude = lat_super_no_halo[1::2,1::2] + else: + #get the longitude and latitude data for the grid centers by slicing the supergrid + #and taking only odd-indexed values + longitude = lon_super[1::2,1::2] + latitude = lat_super[1::2,1::2] + else: + longitude = np.asarray(nc_file['lon']) + latitude = np.asarray(nc_file['lat']) + nc_file.close() adj_long = False @@ -233,12 +295,12 @@ def find_loc_indices(loc, dir, tile): eucl_dist = np.zeros((longitude.shape[0],longitude.shape[1])) #get the Cartesian location of the given point - cart_loc = np.array(sph2cart(math.radians(temp_loc[0]), math.radians(temp_loc[1]), earth_radius)) + cart_loc = np.asarray(sph2cart(math.radians(temp_loc[0]), math.radians(temp_loc[1]), earth_radius)) for i in range(len(longitude)): for j in range(len(longitude[i])): #get the Cartesian location of all grid points - cart_cell = np.array(sph2cart(math.radians(longitude[i,j]), math.radians(latitude[i,j]), earth_radius)) + cart_cell = np.asarray(sph2cart(math.radians(longitude[i,j]), math.radians(latitude[i,j]), earth_radius)) #calculate the euclidean distance from the given point to the current grid cell eucl_dist[i,j] = np.linalg.norm(cart_loc - cart_cell) @@ -248,10 +310,17 @@ def find_loc_indices(loc, dir, tile): return (i,j,longitude[i,j]%360.0, latitude[i,j], eucl_dist[i,j]) -def find_lon_lat_of_indices(indices, dir, tile): +######################################################################################## +# +######################################################################################## +def find_lon_lat_of_indices(indices, dir, tile, lam): """Find the longitude and latitude of the given indices within the given tile.""" - filename_pattern = '*grid.tile{0}.nc'.format(tile) + if (lam): + filename_pattern = '*grid.tile{0}.halo{1}.nc'.format(tile, n_lam_halo_points) + else: + filename_pattern = '*grid.tile{0}.nc'.format(tile) + for f_name in os.listdir(dir): if fnmatch.fnmatch(f_name, filename_pattern): filename = f_name @@ -261,17 +330,73 @@ def find_lon_lat_of_indices(indices, dir, tile): raise Exception(message) nc_file = Dataset('{0}/{1}'.format(dir,filename)) + #read in supergrid longitude and latitude - lon_super = np.array(nc_file['x']) #[lat,lon] or [y,x] #.swapaxes(0,1) - lat_super = np.array(nc_file['y']) #[lat,lon] or [y,x] #.swapaxes(0,1) - #get the longitude and latitude data for the grid centers by slicing the supergrid - #and taking only odd-indexed values - longitude = lon_super[1::2,1::2] - latitude = lat_super[1::2,1::2] + lon_super = np.asarray(nc_file['x']) #[lat,lon] or [y,x] #.swapaxes(0,1) + lat_super = np.asarray(nc_file['y']) #[lat,lon] or [y,x] #.swapaxes(0,1) + if (lam): + #strip ghost/halo points and return central (A-grid) points + #assuming n_lam_halo_points + lon_super_no_halo = lon_super[2*n_lam_halo_points:lon_super.shape[0]-2*n_lam_halo_points,2*n_lam_halo_points:lon_super.shape[1]-2*n_lam_halo_points] + lat_super_no_halo = lat_super[2*n_lam_halo_points:lat_super.shape[0]-2*n_lam_halo_points,2*n_lam_halo_points:lat_super.shape[1]-2*n_lam_halo_points] + #get the longitude and latitude data for the grid centers by slicing the supergrid + #and taking only odd-indexed values + longitude = lon_super_no_halo[1::2,1::2] + latitude = lat_super_no_halo[1::2,1::2] + else: + #get the longitude and latitude data for the grid centers by slicing the supergrid + #and taking only odd-indexed values + longitude = lon_super[1::2,1::2] + latitude = lat_super[1::2,1::2] + nc_file.close() return (longitude[indices[1],indices[0]], latitude[indices[1],indices[0]]) + +######################################################################################## +# +######################################################################################## +def get_initial_lon_lat_grid(dir, tile, lam): + if (lam): + filename_pattern = '*grid.tile{0}.halo{1}.nc'.format(tile, n_lam_halo_points) + else: + filename_pattern = '*grid.tile{0}.nc'.format(tile) + + for f_name in os.listdir(dir): + if fnmatch.fnmatch(f_name, filename_pattern): + filename = f_name + if not filename: + message = 'No filenames matching the pattern {0} found in {1}'.format(filename_pattern,dir) + logging.critical(message) + raise Exception(message) + + nc_file = Dataset('{0}/{1}'.format(dir,filename)) + #read in supergrid longitude and latitude + lon_super = np.asarray(nc_file['x']) #[lat,lon] or [y,x] #.swapaxes(0,1) + lat_super = np.asarray(nc_file['y']) #[lat,lon] or [y,x] #.swapaxes(0,1) + if (lam): + #strip ghost/halo points and return central (A-grid) points + #assuming n_lam_halo_points + lon_super_no_halo = lon_super[2*n_lam_halo_points:lon_super.shape[0]-2*n_lam_halo_points,2*n_lam_halo_points:lon_super.shape[1]-2*n_lam_halo_points] + lat_super_no_halo = lat_super[2*n_lam_halo_points:lat_super.shape[0]-2*n_lam_halo_points,2*n_lam_halo_points:lat_super.shape[1]-2*n_lam_halo_points] + #get the longitude and latitude data for the grid centers by slicing the supergrid + #and taking only odd-indexed values + longitude = lon_super_no_halo[1::2,1::2] + latitude = lat_super_no_halo[1::2,1::2] + else: + #get the longitude and latitude data for the grid centers by slicing the supergrid + #and taking only odd-indexed values + longitude = lon_super[1::2,1::2] + latitude = lat_super[1::2,1::2] + + nc_file.close() + + return (longitude, latitude) + +######################################################################################## +# +######################################################################################## def sph2cart(az, el, r): """Calculate the Cartesian coordiates from spherical coordinates""" @@ -282,6 +407,9 @@ def sph2cart(az, el, r): return (x, y, z) +######################################################################################## +# +######################################################################################## def read_NetCDF_var(nc_file, var_name, i, j): try: var = nc_file[var_name][j,i] @@ -291,6 +419,9 @@ def read_NetCDF_var(nc_file, var_name, i, j): var = missing_value return var +######################################################################################## +# +######################################################################################## def read_NetCDF_surface_var(nc_file, var_name, i, j, old_chgres, vert_dim): if old_chgres: if vert_dim > 0: @@ -325,251 +456,727 @@ def read_NetCDF_surface_var(nc_file, var_name, i, j, old_chgres, vert_dim): var = missing_value return var -def get_UFS_IC_data(dir, grid_dir, tile, i, j, old_chgres): +######################################################################################## +# +######################################################################################## +def get_UFS_IC_data(dir, grid_dir, tile, i, j, old_chgres, lam): """Get the state, surface, and orographic data for the given tile and indices""" #returns dictionaries with the data - state_data = get_UFS_state_data(dir, tile, i, j, old_chgres) - surface_data = get_UFS_surface_data(dir, tile, i, j, old_chgres) - oro_data = get_UFS_oro_data(dir, tile, i, j) vgrid_data = get_UFS_vgrid_data(grid_dir) #only needed for ak, bk to calculate pressure - - #calculate derived quantities - if old_chgres: - #temperature - nlevs = state_data["nlevs"] - gz=state_data["z"]*grav - pn1=np.zeros([nlevs+1]) - temp=np.zeros([nlevs]) - for k in range(nlevs+1): - pn1[k]=np.log(vgrid_data["ak"][k]+state_data["p_surf"]*vgrid_data["bk"][k]) - for k in range(nlevs): - temp[k] = (gz[k]-gz[k+1])/( rdgas*(pn1[k+1]-pn1[k])*(1.+zvir*state_data["qv"][k]) ) - state_data["T"] = temp - state_data["pres"] = np.exp(pn1[0:nlevs]) + state_data = get_UFS_state_data(vgrid_data, dir, tile, i, j, old_chgres, lam) + surface_data = get_UFS_surface_data(dir, tile, i, j, old_chgres, lam) + oro_data = get_UFS_oro_data(dir, tile, i, j, lam) return (state_data, surface_data, oro_data) - -def get_UFS_state_data(dir, tile, i, j, old_chgres): + +######################################################################################## +# +######################################################################################## +def get_UFS_state_data(vgrid, dir, tile, i, j, old_chgres, lam): """Get the state data for the given tile and indices""" - nc_file = Dataset('{0}/{1}'.format(dir,'gfs_data.tile{0}.nc'.format(tile))) - - #the majority of this routine is from Phil Pegion (NOAA PSD) - - # assume model contains one less level than the cold start spectral GFS initial conditions - nlevs=len(nc_file.dimensions['lev'])-1 - - # upper air fields from initial conditions - zh=nc_file['zh'][::-1,j,i] - uw1=nc_file['u_w'][::-1,j,i] - uw2=nc_file['u_w'][::-1,j,i+1] - us1=nc_file['u_s'][::-1,j,i] - us2=nc_file['u_s'][::-1,j+1,i] - vw1=nc_file['v_w'][::-1,j,i] - vw2=nc_file['v_w'][::-1,j,i+1] - vs1=nc_file['v_s'][::-1,j,i] - vs2=nc_file['v_s'][::-1,j+1,i] - ucomp=0.25*(uw1+uw2+us1+us2) # estimate u winds on the A grid - vcomp=0.25*(vw1+vw2+vs1+vs2) # estimate v winds on the A grid - sphum=nc_file['sphum'][::-1,j,i] + if lam: + nc_file_data = Dataset('{0}/{1}'.format(dir,'gfs_data.nc')) + else: + nc_file_data = Dataset('{0}/{1}'.format(dir,'gfs_data.tile{0}.nc'.format(tile))) + + # get nlevs from the gfs_ctrl.nc data + nlevs_model=vgrid["nlevs"] + + # upper air fields from initial conditions (all data are top-first) + zh_rev=nc_file_data['zh'][:,j,i] + sphum_rev=nc_file_data['sphum'][:,j,i] # o3 and qv are taken from ics. - o3=nc_file['o3mr'][::-1,j,i] - liqwat=nc_file['liq_wat'][::-1,j,i] - - # surface pressure - ps=nc_file['ps'][j,i] + o3_rev=nc_file_data['o3mr'][:,j,i] + liqwat_rev=nc_file_data['liq_wat'][:,j,i] + ps_data = nc_file_data['ps'][j,i] - if not old_chgres: - #gfs_data.tileX.nc files created from chgres_cube already containt temperature and pressure profiles(well, surface pressure and delp); use those - #older version of global_chgres did not include these vars - t = nc_file['t'][::-1,j,i] - delp = nc_file['delp'][::-1,j,i] + #The 3D fields above are apparently on grid vertical interfaces. In the file external_ic.F90/get_nggps_ic subroutine in FV3, these fields + #are further processed to get to the vertical grid centers/means. + + # following remap_scalar_nggps in external_ic.F90 + levp_data = len(sphum_rev) - p = np.zeros(nlevs) - p[0] = ps - for k in range(1, nlevs): - p[k] = p[k-1] - delp[k-1] + ak_rev = vgrid["ak"][::-1] + bk_rev = vgrid["bk"][::-1] + ak_rev[0] = np.max([1.0E-9, ak_rev[0]]) + + ptop_data = ak_rev[1] + + pressure_from_data_rev = ak_rev + bk_rev*ps_data + log_pressure_from_data_rev = np.log(pressure_from_data_rev) + + gz_rev = np.zeros(2*levp_data +1) + pn_rev = np.zeros(2*levp_data +1) - nc_file.close() + for k in range(0,levp_data+1): + gz_rev[k] = zh_rev[k]*grav + pn_rev[k] = log_pressure_from_data_rev[k] + k2 = int(np.max([10, levp_data/2])) + for k in range(levp_data+1,levp_data+k2): + #do k=km+2, km+k2 + l = 2*(levp_data) - k + gz_rev[k] = 2.*gz_rev[levp_data] - gz_rev[l] + pn_rev[k] = 2.*pn_rev[levp_data] - pn_rev[l] + + phis = zh_rev[-1]*grav + + for k in range(levp_data+k2-2,0,-1): + #do k=km+k2-1, 2, -1 + if (phis <= gz_rev[k] and phis >= gz_rev[k+1]): + log_ps_calc = pn_rev[k] + (pn_rev[k+1]-pn_rev[k])*(gz_rev[k]-phis)/(gz_rev[k]-gz_rev[k+1]) + break + + ps_calc = np.exp(log_ps_calc) + + pressure_model_interfaces_rev = np.zeros(nlevs_model+1) + log_pressure_model_interfaces_rev = np.zeros(nlevs_model+1) + pressure_model_interfaces_rev[0] = ak_rev[1] + log_pressure_model_interfaces_rev[0] = np.log(pressure_model_interfaces_rev[0]) + for k in range(1,nlevs_model+1): + pressure_model_interfaces_rev[k] = ak_rev[k+1] + bk_rev[k+1]*ps_calc + log_pressure_model_interfaces_rev[k] = np.log(pressure_model_interfaces_rev[k]) + + pressure_thickness_model_rev = np.zeros(nlevs_model) + for k in range(0,nlevs_model): + pressure_thickness_model_rev[k] = pressure_model_interfaces_rev[k+1] - pressure_model_interfaces_rev[k] + + sphum_model_rev = fv3_remap.mappm(levp_data, pressure_from_data_rev[np.newaxis, :], sphum_rev[np.newaxis, :], nlevs_model, pressure_model_interfaces_rev[np.newaxis, :], 1, 1, 0, 8, ptop_data) + sphum_model_rev_3d = fv3_remap.fillq(1, nlevs_model, 1, np.expand_dims(sphum_model_rev, axis=2), pressure_thickness_model_rev[np.newaxis, :]) + sphum_model_rev = sphum_model_rev_3d[:,:,0] + + o3_model_rev = fv3_remap.mappm(levp_data, pressure_from_data_rev[np.newaxis, :], o3_rev[np.newaxis, :], nlevs_model, pressure_model_interfaces_rev[np.newaxis, :], 1, 1, 0, 8, ptop_data) + o3_model_rev_3d = fv3_remap.fillz(1, nlevs_model, 1, np.expand_dims(o3_model_rev, axis=2), pressure_thickness_model_rev[np.newaxis, :]) + o3_model_rev = o3_model_rev_3d[:,:,0] + + liqwat_model_rev = fv3_remap.mappm(levp_data, pressure_from_data_rev[np.newaxis, :], liqwat_rev[np.newaxis, :], nlevs_model, pressure_model_interfaces_rev[np.newaxis, :], 1, 1, 0, 8, ptop_data) + liqwat_model_rev_3d = fv3_remap.fillz(1, nlevs_model, 1, np.expand_dims(liqwat_model_rev, axis=2), pressure_thickness_model_rev[np.newaxis, :]) + liqwat_model_rev = liqwat_model_rev_3d[:,:,0] - #put data in a dictionary if old_chgres: - state = { - "nlevs": nlevs, - "z": zh, - "u": ucomp, - "v": vcomp, - "qv": sphum, - "o3": o3, - "ql": liqwat, - "p_surf": ps - } + gz_fv = np.zeros(nlevs_model+1) + gz_fv[-1] = phis + m = 0 + for k in range(0,nlevs_model): + for l in range(m, levp_data+k2-1): + if ( (log_pressure_model_interfaces_rev[k] <= pn_rev[l+1]) and (log_pressure_model_interfaces_rev[k] >= pn_rev[l]) ): + gz_fv[k] = gz_rev[l] + (gz_rev[l+1]-gz_rev[l])*(log_pressure_model_interfaces_rev[k]-pn_rev[l])/(pn_rev[l+1]-pn_rev[l]) + break + m = l + + temp_model_rev = np.zeros((1,nlevs_model)) + for k in range(0, nlevs_model): + temp_model_rev[0,k] = (gz_fv[k]-gz_fv[k+1])/(rdgas*(log_pressure_model_interfaces_rev[k+1]-log_pressure_model_interfaces_rev[k])*(1.+zvir*sphum_model_rev[0,k]) ) else: - state = { - "nlevs": nlevs, - "z": zh, - "u": ucomp, - "v": vcomp, - "qv": sphum, - "o3": o3, - "ql": liqwat, - "p_surf": ps, - "T": t, - "pres": p - } + temp_rev = nc_file_data['t'][:,j,i] + + temp_model_rev = fv3_remap.mappm(levp_data, pressure_from_data_rev[np.newaxis, :], temp_rev[np.newaxis, :], nlevs_model, pressure_model_interfaces_rev[np.newaxis, :], 1, 1, 2, 4, ptop_data) + + + icewat_model_rev = np.zeros(nlevs_model) + all_liquid_threshold = 273.16 + all_ice_threshold = 233.16 + intermediate_threshold = 258.16 + cloud_ice_mixing_ratio_threshold = 1.0E-5 + for k in range(0, nlevs_model): + cloud_water = liqwat_model_rev[0,k] + if (temp_model_rev[0,k] > all_liquid_threshold): + liqwat_model_rev[0,k] = cloud_water + icewat_model_rev[k] = 0.0 + elif (temp_model_rev[0,k] < all_ice_threshold): + liqwat_model_rev[0,k] = 0.0 + icewat_model_rev[k] = cloud_water + else: + if k == 0: + liqwat_model_rev[0,k] = cloud_water*(temp_model_rev[0,k]-all_ice_threshold)/(all_liquid_threshold - all_ice_threshold) + icewat_model_rev[k] = cloud_water - liqwat_model_rev[0,k] + else: + if (temp_model_rev[0,k] < intermediate_threshold and icewat_model_rev[k-1] > cloud_ice_mixing_ratio_threshold): + liqwat_model_rev[0,k] = 0.0 + icewat_model_rev[k] = cloud_water + else: + liqwat_model_rev[0,k] = cloud_water*(temp_model_rev[0,k]-all_ice_threshold)/(all_liquid_threshold - all_ice_threshold) + icewat_model_rev[k] = cloud_water - liqwat_model_rev[0,k] + (liqwat_model_rev[0,k], dummy_rain, icewat_model_rev[k], dummy_snow) = fv3_remap.mp_auto_conversion(liqwat_model_rev[0,k], icewat_model_rev[k]) + + [u_s, u_n, v_w, v_e] = get_zonal_and_meridional_winds_on_cd_grid(tile, dir, i, j, nc_file_data, lam) + + #put C/D grid zonal/meridional winds on model pressure levels + u_s_model_rev = fv3_remap.mappm(levp_data, pressure_from_data_rev[np.newaxis, :], u_s[np.newaxis, :], nlevs_model, pressure_model_interfaces_rev[np.newaxis, :], 1, 1, -1, 8, ptop_data) + u_n_model_rev = fv3_remap.mappm(levp_data, pressure_from_data_rev[np.newaxis, :], u_n[np.newaxis, :], nlevs_model, pressure_model_interfaces_rev[np.newaxis, :], 1, 1, -1, 8, ptop_data) + v_w_model_rev = fv3_remap.mappm(levp_data, pressure_from_data_rev[np.newaxis, :], v_w[np.newaxis, :], nlevs_model, pressure_model_interfaces_rev[np.newaxis, :], 1, 1, -1, 8, ptop_data) + v_e_model_rev = fv3_remap.mappm(levp_data, pressure_from_data_rev[np.newaxis, :], v_e[np.newaxis, :], nlevs_model, pressure_model_interfaces_rev[np.newaxis, :], 1, 1, -1, 8, ptop_data) + + #put C/D grid zonal/meridional winds on A grid (simple averaging for now, but FV3 has more complex methods that should be implemented) + + u_model_rev = np.zeros(nlevs_model) + v_model_rev = np.zeros(nlevs_model) + u_model_rev = 0.5*(u_s_model_rev + u_n_model_rev) + v_model_rev = 0.5*(v_w_model_rev + v_e_model_rev) + + nc_file_data.close() + + pressure_model_interfaces = pressure_model_interfaces_rev[::-1] + pressure_model = np.zeros(nlevs_model) + for k in range(0,nlevs_model): + #from gmtb_scm_vgrid + pressure_model[k] = ((1.0/(rocp+1.0))*(pressure_model_interfaces[k]**(rocp+1.0) - pressure_model_interfaces[k+1]**(rocp+1.0))/(pressure_model_interfaces[k] - pressure_model_interfaces[k+1]))**(1.0/rocp) + #put data in a dictionary + state = { + "nlevs": nlevs_model, + "zh": zh_rev[nlevs_model:0:-1], + "ua": u_model_rev[0][::-1], + "va": v_model_rev[0][::-1], + "qv": sphum_model_rev[0][::-1], + "o3": o3_model_rev[0][::-1], + "ql": liqwat_model_rev[0][::-1], + "qi": icewat_model_rev[::-1], + "ps": np.asarray(ps_calc), + "ta": temp_model_rev[0,::-1], + "pa": pressure_model, + "pa_i": pressure_model_interfaces + } return state -def get_UFS_surface_data(dir, tile, i, j, old_chgres): +######################################################################################## +# +######################################################################################## +def get_zonal_and_meridional_winds_on_cd_grid(tile, dir, i, j, nc_file_data, lam): + if lam: + filename_pattern = '*grid.tile{0}.halo{1}.nc'.format(tile, n_lam_halo_points) + for f_name in os.listdir(dir): + if fnmatch.fnmatch(f_name, filename_pattern): + filename = f_name + if not filename: + message = 'No filenames matching the pattern {0} found in {1}'.format(filename_pattern,dir) + logging.critical(message) + raise Exception(message) + else: + filename_pattern = '*grid.tile{0}.nc'.format(tile) + + for f_name in os.listdir(dir): + if fnmatch.fnmatch(f_name, filename_pattern): + filename = f_name + if not filename: + message = 'No filenames matching the pattern {0} found in {1}'.format(filename_pattern,dir) + logging.critical(message) + raise Exception(message) + + nc_file_grid = Dataset('{0}/{1}'.format(dir,filename)) + + if (lam): + #strip ghost/halo points and return supergrid + lon_super_data = np.asarray(nc_file_grid['x']) + lat_super_data = np.asarray(nc_file_grid['y']) + #assuming n_lam_halo_points + lon_super = lon_super_data[2*n_lam_halo_points:lon_super_data.shape[0]-2*n_lam_halo_points,2*n_lam_halo_points:lon_super_data.shape[1]-2*n_lam_halo_points] + lat_super = lat_super_data[2*n_lam_halo_points:lat_super_data.shape[0]-2*n_lam_halo_points,2*n_lam_halo_points:lat_super_data.shape[1]-2*n_lam_halo_points] + else: + lon_super = np.asarray(nc_file_grid['x']) #[lat,lon] or [y,x] #.swapaxes(0,1) + lat_super = np.asarray(nc_file_grid['y']) #[lat,lon] or [y,x] #.swapaxes(0,1) + + num_agrid_x = int(0.5*(lon_super.shape[1]-1)) + num_agrid_y = int(0.5*(lon_super.shape[0]-1)) + + #find orientation + #A-grid point + agrid_super_i_index = 2*i + 1 + agrid_super_j_index = 2*j + 1 + point_on_agrid = np.asarray((lon_super[agrid_super_j_index,agrid_super_i_index],lat_super[agrid_super_j_index,agrid_super_i_index])) + + test_dgrid_points = [(lon_super[agrid_super_j_index,agrid_super_i_index+1],lat_super[agrid_super_j_index,agrid_super_i_index+1]),\ + (lon_super[agrid_super_j_index,agrid_super_i_index-1],lat_super[agrid_super_j_index,agrid_super_i_index-1]),\ + (lon_super[agrid_super_j_index+1,agrid_super_i_index],lat_super[agrid_super_j_index+1,agrid_super_i_index]),\ + (lon_super[agrid_super_j_index-1,agrid_super_i_index],lat_super[agrid_super_j_index-1,agrid_super_i_index])] + + test_lon_diff = [p[0] - point_on_agrid[0] for p in test_dgrid_points] + test_lat_diff = [p[1] - point_on_agrid[1] for p in test_dgrid_points] + + east_test_point = np.argmax(test_lon_diff) + north_test_point = np.argmax(test_lat_diff) + + if east_test_point == 0: + #longitude increases most along the positive i axis + if north_test_point == 2: + #latitude increases most along the positive j axis + # ---> j+ north + # | + # V + # i+ east + + #calculation of zonal wind on first (south) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*j,2*(i+1)],lat_super[2*j,2*(i+1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_s = nc_file_data['u_s'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of zonal wind on second (north) D-grid point + p1 = np.asarray((lon_super[2*(j+1),2*i],lat_super[2*(j+1),2*i,])) + p2 = np.asarray((lon_super[2*(j+1),2*(i+1)],lat_super[2*(j+1),2*(i+1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_n = nc_file_data['u_s'][:,j+1,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j+1,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on first (west) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*(j+1),2*i],lat_super[2*(j+1),2*i])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_w = nc_file_data['u_w'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on second (east) D-grid point + p1 = np.asarray((lon_super[2*j,2*(i+1)],lat_super[2*j,2*(i+1)])) + p2 = np.asarray((lon_super[2*(j+1),2*(i+1)],lat_super[2*(j+1),2*(i+1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_e = nc_file_data['u_w'][:,j,i+1]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j,i+1]*fv3_remap.inner_prod(e1, ey) + elif north_test_point == 3: + #latitude increases most along the negative j axis + # <--- j- north + # | + # V + # i+ east + #calculation of zonal wind on first (south) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*j,2*(i+1)],lat_super[2*j,2*(i+1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_s = nc_file_data['u_s'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of zonal wind on second (north) D-grid point + p1 = np.asarray((lon_super[2*(j-1),2*i],lat_super[2*(j-1),2*i])) + p2 = np.asarray((lon_super[2*(j-1),2*(i+1)],lat_super[2*(j-1),2*(i+1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_n = nc_file_data['u_s'][:,j-1,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j-1,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on first (west) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*(j-1),2*i],lat_super[2*(j-1),2*i])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_w = nc_file_data['u_w'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on second (east) D-grid point + p1 = np.asarray((lon_super[2*j,2*(i+1)],lat_super[2*j,2*(i+1)])) + p2 = np.asarray((lon_super[2*(j-1),2*(i+1)],lat_super[2*(j-1),2*(i+1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_e = nc_file_data['u_w'][:,j,i+1]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j,i+1]*fv3_remap.inner_prod(e1, ey) + else: + print('unknown grid orientation') + elif east_test_point == 1: + #longitude increases most along the negative i axis + if north_test_point == 2: + #latitude increases most along the positive j axis + # i- east + # ^ + # | + # ---> j+ north + #calculation of zonal wind on first (south) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*j,2*(i-1)],lat_super[2*j,2*(i-1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_s = nc_file_data['u_s'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of zonal wind on second (north) D-grid point + p1 = np.asarray((lon_super[2*(j+1),2*i],lat_super[2*(j+1),2*i])) + p2 = np.asarray((lon_super[2*(j+1),2*(i-1)],lat_super[2*(j+1),2*(i-1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_n = nc_file_data['u_s'][:,j+1,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j+1,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on first (west) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*(j+1),2*i],lat_super[2*(j+1),2*i])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_w = nc_file_data['u_w'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on second (east) D-grid point + p1 = np.asarray((lon_super[2*j,2*(i-1)],lat_super[2*j,2*(i-1)])) + p2 = np.asarray((lon_super[2*(j+1),2*(i-1)],lat_super[2*(j+1),2*(i-1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_e = nc_file_data['u_w'][:,j,i-1]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j,i-1]*fv3_remap.inner_prod(e1, ey) + elif north_test_point == 3: + #latitude increases most along the negative j axis + # i- east + # ^ + # | + # <--- j- north + #calculation of zonal wind on first (south) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*j,2*(i-1)],lat_super[2*j,2*(i-1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_s = nc_file_data['u_s'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of zonal wind on second (north) D-grid point + p1 = np.asarray((lon_super[2*(j-1),2*i],lat_super[2*(j-1),2*i])) + p2 = np.asarray((lon_super[2*(j-1),2*(i-1)],lat_super[2*(j-1),2*(i-1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_n = nc_file_data['u_s'][:,j-1,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j-1,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on first (west) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*(j-1),2*i],lat_super[2*(j-1),2*i])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_w = nc_file_data['u_w'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on second (east) D-grid point + p1 = np.asarray((lon_super[2*j,2*(i-1)],lat_super[2*j,2*(i-1)])) + p2 = np.asarray((lon_super[2*(j-1),2*(i-1)],lat_super[2*(j-1),2*(i-1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_e = nc_file_data['u_w'][:,j,i-1]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j,i-1]*fv3_remap.inner_prod(e1, ey) + else: + print('unknown grid orientation') + elif east_test_point == 2: + #longitude increases most along the positive j axis + if north_test_point == 0: + #latitude increases most along the positive i axis + # ---> j+ east + # | + # V + # i+ north + #calculation of zonal wind on first (south) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*(j+1),2*i],lat_super[2*(j+1),2*i])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_s = nc_file_data['u_s'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of zonal wind on second (north) D-grid point + p1 = np.asarray((lon_super[2*j,2*(i+1)],lat_super[2*j,2*(i+1)])) + p2 = np.asarray((lon_super[2*(j+1),2*(i+1)],lat_super[2*(j+1),2*(i+1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_n = nc_file_data['u_s'][:,j,i+1]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j,i+1]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on first (west) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*j,2*(i+1)],lat_super[2*j,2*(i+1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_w = nc_file_data['u_w'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on second (east) D-grid point + p1 = np.asarray((lon_super[2*(j+1),2*i],lat_super[2*(j+1),2*i])) + p2 = np.asarray((lon_super[2*(j+1),2*(i+1)],lat_super[2*(j+1),2*(i+1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_e = nc_file_data['u_w'][:,j+1,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j+1,i]*fv3_remap.inner_prod(e1, ey) + elif north_test_point == 1: + #latitude increases most along the negative i axis + # i- north + # ^ + # | + # ---> j+ east + #calculation of zonal wind on first (south) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*(j+1),2*i],lat_super[2*(j+1),2*i])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_s = nc_file_data['u_s'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of zonal wind on second (north) D-grid point + p1 = np.asarray((lon_super[2*j,2*(i-1)],lat_super[2*j,2*(i-1)])) + p2 = np.asarray((lon_super[2*(j+1),2*(i-1)],lat_super[2*(j+1),2*(i-1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_n = nc_file_data['u_s'][:,j,i-1]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j,i-1]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on first (west) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*j,2*(i-1)],lat_super[2*j,2*(i-1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_w = nc_file_data['u_w'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on second (east) D-grid point + p1 = np.asarray((lon_super[2*(j+1),2*i],lat_super[2*(j+1),2*i])) + p2 = np.asarray((lon_super[2*(j+1),2*(i-1)],lat_super[2*(j+1),2*(i-1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_e = nc_file_data['u_w'][:,j+1,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j+1,i]*fv3_remap.inner_prod(e1, ey) + else: + print('unknown grid orientation') + elif east_test_point == 3: + #longitude increases most along the negative j axis + if north_test_point == 0: + #latitude increases most along the positive i axis + # <--- j- east + # | + # V + # i+ north + #calculation of zonal wind on first (south) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*(j-1),2*i],lat_super[2*(j-1),2*i])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_s = nc_file_data['u_s'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of zonal wind on second (north) D-grid point + p1 = np.asarray((lon_super[2*j,2*(i+1)],lat_super[2*j,2*(i+1)])) + p2 = np.asarray((lon_super[2*(j-1),2*(i+1)],lat_super[2*(j-1),2*(i+1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_n = nc_file_data['u_s'][:,j,i+1]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j,i+1]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on first (west) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*j,2*(i+1)],lat_super[2*j,2*(i+1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_w = nc_file_data['u_w'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on second (east) D-grid point + p1 = np.asarray((lon_super[2*(j-1),2*i],lat_super[2*(j-1),2*i])) + p2 = np.asarray((lon_super[2*(j-1),2*(i+1)],lat_super[2*(j-1),2*(i+1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_e = nc_file_data['u_w'][:,j-1,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j-1,i]*fv3_remap.inner_prod(e1, ey) + elif north_test_point == 1: + #latitude increases most along the negative i axis + # i- north + # ^ + # | + # <--- j- east + #calculation of zonal wind on first (south) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*(j-1),2*i],lat_super[2*(j-1),2*i])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_s = nc_file_data['u_s'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of zonal wind on second (north) D-grid point + p1 = np.asarray((lon_super[2*j,2*(i-1)],lat_super[2*j,2*(i-1)])) + p2 = np.asarray((lon_super[2*(j-1),2*(i-1)],lat_super[2*(j-1),2*(i-1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + u_n = nc_file_data['u_s'][:,j,i-1]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_s'][:,j,i-1]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on first (west) D-grid point + p1 = np.asarray((lon_super[2*j,2*i],lat_super[2*j,2*i])) + p2 = np.asarray((lon_super[2*j,2*(i-1)],lat_super[2*j,2*(i-1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_w = nc_file_data['u_w'][:,j,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j,i]*fv3_remap.inner_prod(e1, ey) + + #calculation of meridionial wind on second (east) D-grid point + p1 = np.asarray((lon_super[2*(j-1),2*i],lat_super[2*(j-1),2*i])) + p2 = np.asarray((lon_super[2*(j-1),2*(i-1)],lat_super[2*(j-1),2*(i-1)])) + p3 = fv3_remap.mid_pt_sphere(p1*deg_to_rad, p2*deg_to_rad) + e1 = fv3_remap.get_unit_vect2(p1*deg_to_rad, p2*deg_to_rad) + (ex, ey) = fv3_remap.get_latlon_vector(p3) + v_e = nc_file_data['u_w'][:,j-1,i]*fv3_remap.inner_prod(e1, ex) + nc_file_data['v_w'][:,j-1,i]*fv3_remap.inner_prod(e1, ey) + else: + print('unknown grid orientation') + + + nc_file_grid.close() + + return [u_s, u_n, v_w, v_e] + +######################################################################################## +# +######################################################################################## +def get_UFS_surface_data(dir, tile, i, j, old_chgres, lam): """Get the surface data for the given tile and indices""" - nc_file = Dataset('{0}/{1}'.format(dir,'sfc_data.tile{0}.nc'.format(tile))) + if lam: + nc_file = Dataset('{0}/{1}'.format(dir,'sfc_data.nc')) + else: + nc_file = Dataset('{0}/{1}'.format(dir,'sfc_data.tile{0}.nc'.format(tile))) #FV3/io/FV3GFS_io.F90/sfc_prop_restart_read was used as reference for variables that can be read in - #read in scalars (would be 2D variables in a 3D model) - - # surface properties (assuming Noah LSM; may contain variables needed for fractional land fraction) - tsfco_in = read_NetCDF_surface_var(nc_file, 'tsea', i, j, old_chgres, 0) - tg3_in = read_NetCDF_surface_var(nc_file, 'tg3', i, j, old_chgres, 0) + # + # read in scalars (2D) + # + + # Noah LSM ( may contain variables needed for fractional land fraction) + tsfco_in = read_NetCDF_surface_var(nc_file, 'tsea', i, j, old_chgres, 0) + tg3_in = read_NetCDF_surface_var(nc_file, 'tg3', i, j, old_chgres, 0) uustar_in = read_NetCDF_surface_var(nc_file, 'uustar', i, j, old_chgres, 0) - alvsf_in = read_NetCDF_surface_var(nc_file, 'alvsf', i, j, old_chgres, 0) - alvwf_in = read_NetCDF_surface_var(nc_file, 'alvwf', i, j, old_chgres, 0) - alnsf_in = read_NetCDF_surface_var(nc_file, 'alnsf', i, j, old_chgres, 0) - alnwf_in = read_NetCDF_surface_var(nc_file, 'alnwf', i, j, old_chgres, 0) - facsf_in = read_NetCDF_surface_var(nc_file, 'facsf', i, j, old_chgres, 0) - facwf_in = read_NetCDF_surface_var(nc_file, 'facwf', i, j, old_chgres, 0) - styp_in = read_NetCDF_surface_var(nc_file, 'stype', i, j, old_chgres, 0) - slope_in = read_NetCDF_surface_var(nc_file, 'slope', i, j, old_chgres, 0) - vtyp_in = read_NetCDF_surface_var(nc_file, 'vtype', i, j, old_chgres, 0) - vfrac_in = read_NetCDF_surface_var(nc_file, 'vfrac', i, j, old_chgres, 0) + alvsf_in = read_NetCDF_surface_var(nc_file, 'alvsf', i, j, old_chgres, 0) + alvwf_in = read_NetCDF_surface_var(nc_file, 'alvwf', i, j, old_chgres, 0) + alnsf_in = read_NetCDF_surface_var(nc_file, 'alnsf', i, j, old_chgres, 0) + alnwf_in = read_NetCDF_surface_var(nc_file, 'alnwf', i, j, old_chgres, 0) + facsf_in = read_NetCDF_surface_var(nc_file, 'facsf', i, j, old_chgres, 0) + facwf_in = read_NetCDF_surface_var(nc_file, 'facwf', i, j, old_chgres, 0) + styp_in = read_NetCDF_surface_var(nc_file, 'stype', i, j, old_chgres, 0) + slope_in = read_NetCDF_surface_var(nc_file, 'slope', i, j, old_chgres, 0) + vtyp_in = read_NetCDF_surface_var(nc_file, 'vtype', i, j, old_chgres, 0) + vfrac_in = read_NetCDF_surface_var(nc_file, 'vfrac', i, j, old_chgres, 0) shdmin_in = read_NetCDF_surface_var(nc_file, 'shdmin', i, j, old_chgres, 0) shdmax_in = read_NetCDF_surface_var(nc_file, 'shdmax', i, j, old_chgres, 0) - zorlw_in = read_NetCDF_surface_var(nc_file, 'zorl', i, j, old_chgres, 0) - slmsk_in = read_NetCDF_surface_var(nc_file, 'slmsk', i, j, old_chgres, 0) + zorlo_in = read_NetCDF_surface_var(nc_file, 'zorl', i, j, old_chgres, 0) + slmsk_in = read_NetCDF_surface_var(nc_file, 'slmsk', i, j, old_chgres, 0) canopy_in = read_NetCDF_surface_var(nc_file, 'canopy', i, j, old_chgres, 0) - hice_in = read_NetCDF_surface_var(nc_file, 'hice', i, j, old_chgres, 0) - fice_in = read_NetCDF_surface_var(nc_file, 'fice', i, j, old_chgres, 0) - tisfc_in = read_NetCDF_surface_var(nc_file, 'tisfc', i, j, old_chgres, 0) + hice_in = read_NetCDF_surface_var(nc_file, 'hice', i, j, old_chgres, 0) + fice_in = read_NetCDF_surface_var(nc_file, 'fice', i, j, old_chgres, 0) + tisfc_in = read_NetCDF_surface_var(nc_file, 'tisfc', i, j, old_chgres, 0) snwdph_in = read_NetCDF_surface_var(nc_file, 'snwdph', i, j, old_chgres, 0) snoalb_in = read_NetCDF_surface_var(nc_file, 'snoalb', i, j, old_chgres, 0) sheleg_in = read_NetCDF_surface_var(nc_file, 'sheleg', i, j, old_chgres, 0) - f10m_in = read_NetCDF_surface_var(nc_file, 'f10m', i, j, old_chgres, 0) - t2m_in = read_NetCDF_surface_var(nc_file, 't2m', i, j, old_chgres, 0) - q2m_in = read_NetCDF_surface_var(nc_file, 'q2m', i, j, old_chgres, 0) - ffmm_in = read_NetCDF_surface_var(nc_file, 'ffmm', i, j, old_chgres, 0) - ffhh_in = read_NetCDF_surface_var(nc_file, 'ffhh', i, j, old_chgres, 0) - tprcp_in = read_NetCDF_surface_var(nc_file, 'tprcp', i, j, old_chgres, 0) + f10m_in = read_NetCDF_surface_var(nc_file, 'f10m', i, j, old_chgres, 0) + t2m_in = read_NetCDF_surface_var(nc_file, 't2m', i, j, old_chgres, 0) + q2m_in = read_NetCDF_surface_var(nc_file, 'q2m', i, j, old_chgres, 0) + ffmm_in = read_NetCDF_surface_var(nc_file, 'ffmm', i, j, old_chgres, 0) + ffhh_in = read_NetCDF_surface_var(nc_file, 'ffhh', i, j, old_chgres, 0) + tprcp_in = read_NetCDF_surface_var(nc_file, 'tprcp', i, j, old_chgres, 0) srflag_in = read_NetCDF_surface_var(nc_file, 'srflag', i, j, old_chgres, 0) sncovr_in = read_NetCDF_surface_var(nc_file, 'sncovr', i, j, old_chgres, 0) - tsfcl_in = read_NetCDF_surface_var(nc_file, 'tsfcl', i, j, old_chgres, 0) - zorll_in = read_NetCDF_surface_var(nc_file, 'zorll', i, j, old_chgres, 0) - zorli_in = read_NetCDF_surface_var(nc_file, 'zorli', i, j, old_chgres, 0) - - #present when cplwav = T - zorlwav_in = read_NetCDF_surface_var(nc_file, 'zorlwav', i, j, old_chgres, 0) - - #NSST variables that may be in the surface file - tref_in = read_NetCDF_surface_var(nc_file, 'tref', i, j, old_chgres, 0) - z_c_in = read_NetCDF_surface_var(nc_file, 'z_c', i, j, old_chgres, 0) - c_0_in = read_NetCDF_surface_var(nc_file, 'c_0', i, j, old_chgres, 0) - c_d_in = read_NetCDF_surface_var(nc_file, 'c_d', i, j, old_chgres, 0) - w_0_in = read_NetCDF_surface_var(nc_file, 'w_0', i, j, old_chgres, 0) - w_d_in = read_NetCDF_surface_var(nc_file, 'w_d', i, j, old_chgres, 0) - xt_in = read_NetCDF_surface_var(nc_file, 'xt', i, j, old_chgres, 0) - xs_in = read_NetCDF_surface_var(nc_file, 'xs', i, j, old_chgres, 0) - xu_in = read_NetCDF_surface_var(nc_file, 'xu', i, j, old_chgres, 0) - xv_in = read_NetCDF_surface_var(nc_file, 'xv', i, j, old_chgres, 0) - xz_in = read_NetCDF_surface_var(nc_file, 'xz', i, j, old_chgres, 0) - zm_in = read_NetCDF_surface_var(nc_file, 'zm', i, j, old_chgres, 0) - xtts_in = read_NetCDF_surface_var(nc_file, 'xtts', i, j, old_chgres, 0) - xzts_in = read_NetCDF_surface_var(nc_file, 'xzts', i, j, old_chgres, 0) - d_conv_in = read_NetCDF_surface_var(nc_file, 'd_conv', i, j, old_chgres, 0) - ifd_in = read_NetCDF_surface_var(nc_file, 'ifd', i, j, old_chgres, 0) + tsfcl_in = read_NetCDF_surface_var(nc_file, 'tsfcl', i, j, old_chgres, 0) + zorll_in = read_NetCDF_surface_var(nc_file, 'zorll', i, j, old_chgres, 0) + zorli_in = read_NetCDF_surface_var(nc_file, 'zorli', i, j, old_chgres, 0) + + # present when cplwav = T + zorlw_in = read_NetCDF_surface_var(nc_file, 'zorlw', i, j, old_chgres, 0) + + # NSST + tref_in = read_NetCDF_surface_var(nc_file, 'tref', i, j, old_chgres, 0) + z_c_in = read_NetCDF_surface_var(nc_file, 'z_c', i, j, old_chgres, 0) + c_0_in = read_NetCDF_surface_var(nc_file, 'c_0', i, j, old_chgres, 0) + c_d_in = read_NetCDF_surface_var(nc_file, 'c_d', i, j, old_chgres, 0) + w_0_in = read_NetCDF_surface_var(nc_file, 'w_0', i, j, old_chgres, 0) + w_d_in = read_NetCDF_surface_var(nc_file, 'w_d', i, j, old_chgres, 0) + xt_in = read_NetCDF_surface_var(nc_file, 'xt', i, j, old_chgres, 0) + xs_in = read_NetCDF_surface_var(nc_file, 'xs', i, j, old_chgres, 0) + xu_in = read_NetCDF_surface_var(nc_file, 'xu', i, j, old_chgres, 0) + xv_in = read_NetCDF_surface_var(nc_file, 'xv', i, j, old_chgres, 0) + xz_in = read_NetCDF_surface_var(nc_file, 'xz', i, j, old_chgres, 0) + zm_in = read_NetCDF_surface_var(nc_file, 'zm', i, j, old_chgres, 0) + xtts_in = read_NetCDF_surface_var(nc_file, 'xtts', i, j, old_chgres, 0) + xzts_in = read_NetCDF_surface_var(nc_file, 'xzts', i, j, old_chgres, 0) + d_conv_in = read_NetCDF_surface_var(nc_file, 'd_conv', i, j, old_chgres, 0) + ifd_in = read_NetCDF_surface_var(nc_file, 'ifd', i, j, old_chgres, 0) dt_cool_in = read_NetCDF_surface_var(nc_file, 'dt_cool', i, j, old_chgres, 0) - qrain_in = read_NetCDF_surface_var(nc_file, 'qrain', i, j, old_chgres, 0) - - #NoahMP variables that may be in the surface file - snowxy_in = read_NetCDF_surface_var(nc_file, 'snowxy', i, j, old_chgres, 0) - tvxy_in = read_NetCDF_surface_var(nc_file, 'tvxy', i, j, old_chgres, 0) - tgxy_in = read_NetCDF_surface_var(nc_file, 'tgxy', i, j, old_chgres, 0) - canicexy_in = read_NetCDF_surface_var(nc_file, 'canicexy', i, j, old_chgres, 0) - canliqxy_in = read_NetCDF_surface_var(nc_file, 'canliqxy', i, j, old_chgres, 0) - eahxy_in = read_NetCDF_surface_var(nc_file, 'eahxy', i, j, old_chgres, 0) - tahxy_in = read_NetCDF_surface_var(nc_file, 'tahxy', i, j, old_chgres, 0) - cmxy_in = read_NetCDF_surface_var(nc_file, 'cmxy', i, j, old_chgres, 0) - chxy_in = read_NetCDF_surface_var(nc_file, 'chxy', i, j, old_chgres, 0) - fwetxy_in = read_NetCDF_surface_var(nc_file, 'fwetxy', i, j, old_chgres, 0) - sneqvoxy_in = read_NetCDF_surface_var(nc_file, 'sneqvoxy', i, j, old_chgres, 0) - alboldxy_in = read_NetCDF_surface_var(nc_file, 'alboldxy', i, j, old_chgres, 0) - qsnowxy_in = read_NetCDF_surface_var(nc_file, 'qsnowxy', i, j, old_chgres, 0) - wslakexy_in = read_NetCDF_surface_var(nc_file, 'wslakexy', i, j, old_chgres, 0) - zwtxy_in = read_NetCDF_surface_var(nc_file, 'zwtxy', i, j, old_chgres, 0) - waxy_in = read_NetCDF_surface_var(nc_file, 'waxy', i, j, old_chgres, 0) - wtxy_in = read_NetCDF_surface_var(nc_file, 'wtxy', i, j, old_chgres, 0) - lfmassxy_in = read_NetCDF_surface_var(nc_file, 'lfmassxy', i, j, old_chgres, 0) - rtmassxy_in = read_NetCDF_surface_var(nc_file, 'rtmassxy', i, j, old_chgres, 0) - stmassxy_in = read_NetCDF_surface_var(nc_file, 'stmassxy', i, j, old_chgres, 0) - woodxy_in = read_NetCDF_surface_var(nc_file, 'woodxy', i, j, old_chgres, 0) - stblcpxy_in = read_NetCDF_surface_var(nc_file, 'stblcpxy', i, j, old_chgres, 0) - fastcpxy_in = read_NetCDF_surface_var(nc_file, 'fastcpxy', i, j, old_chgres, 0) - xsaixy_in = read_NetCDF_surface_var(nc_file, 'xsaixy', i, j, old_chgres, 0) - xlaixy_in = read_NetCDF_surface_var(nc_file, 'xlaixy', i, j, old_chgres, 0) - taussxy_in = read_NetCDF_surface_var(nc_file, 'taussxy', i, j, old_chgres, 0) - smcwtdxy_in = read_NetCDF_surface_var(nc_file, 'smcwtdxy', i, j, old_chgres, 0) + qrain_in = read_NetCDF_surface_var(nc_file, 'qrain', i, j, old_chgres, 0) + + # NoahMP LSM + snowxy_in = read_NetCDF_surface_var(nc_file, 'snowxy', i, j, old_chgres, 0) + tvxy_in = read_NetCDF_surface_var(nc_file, 'tvxy', i, j, old_chgres, 0) + tgxy_in = read_NetCDF_surface_var(nc_file, 'tgxy', i, j, old_chgres, 0) + canicexy_in = read_NetCDF_surface_var(nc_file, 'canicexy', i, j, old_chgres, 0) + canliqxy_in = read_NetCDF_surface_var(nc_file, 'canliqxy', i, j, old_chgres, 0) + eahxy_in = read_NetCDF_surface_var(nc_file, 'eahxy', i, j, old_chgres, 0) + tahxy_in = read_NetCDF_surface_var(nc_file, 'tahxy', i, j, old_chgres, 0) + cmxy_in = read_NetCDF_surface_var(nc_file, 'cmxy', i, j, old_chgres, 0) + chxy_in = read_NetCDF_surface_var(nc_file, 'chxy', i, j, old_chgres, 0) + fwetxy_in = read_NetCDF_surface_var(nc_file, 'fwetxy', i, j, old_chgres, 0) + sneqvoxy_in = read_NetCDF_surface_var(nc_file, 'sneqvoxy', i, j, old_chgres, 0) + alboldxy_in = read_NetCDF_surface_var(nc_file, 'alboldxy', i, j, old_chgres, 0) + qsnowxy_in = read_NetCDF_surface_var(nc_file, 'qsnowxy', i, j, old_chgres, 0) + wslakexy_in = read_NetCDF_surface_var(nc_file, 'wslakexy', i, j, old_chgres, 0) + zwtxy_in = read_NetCDF_surface_var(nc_file, 'zwtxy', i, j, old_chgres, 0) + waxy_in = read_NetCDF_surface_var(nc_file, 'waxy', i, j, old_chgres, 0) + wtxy_in = read_NetCDF_surface_var(nc_file, 'wtxy', i, j, old_chgres, 0) + lfmassxy_in = read_NetCDF_surface_var(nc_file, 'lfmassxy', i, j, old_chgres, 0) + rtmassxy_in = read_NetCDF_surface_var(nc_file, 'rtmassxy', i, j, old_chgres, 0) + stmassxy_in = read_NetCDF_surface_var(nc_file, 'stmassxy', i, j, old_chgres, 0) + woodxy_in = read_NetCDF_surface_var(nc_file, 'woodxy', i, j, old_chgres, 0) + stblcpxy_in = read_NetCDF_surface_var(nc_file, 'stblcpxy', i, j, old_chgres, 0) + fastcpxy_in = read_NetCDF_surface_var(nc_file, 'fastcpxy', i, j, old_chgres, 0) + xsaixy_in = read_NetCDF_surface_var(nc_file, 'xsaixy', i, j, old_chgres, 0) + xlaixy_in = read_NetCDF_surface_var(nc_file, 'xlaixy', i, j, old_chgres, 0) + taussxy_in = read_NetCDF_surface_var(nc_file, 'taussxy', i, j, old_chgres, 0) + smcwtdxy_in = read_NetCDF_surface_var(nc_file, 'smcwtdxy', i, j, old_chgres, 0) deeprechxy_in = read_NetCDF_surface_var(nc_file, 'deeprechxy', i, j, old_chgres, 0) - rechxy_in = read_NetCDF_surface_var(nc_file, 'rechxy', i, j, old_chgres, 0) - albdvis_in = read_NetCDF_surface_var(nc_file, 'albdvis', i, j, old_chgres, 0) - albdnir_in = read_NetCDF_surface_var(nc_file, 'albdnir', i, j, old_chgres, 0) - albivis_in = read_NetCDF_surface_var(nc_file, 'albivis', i, j, old_chgres, 0) - albinir_in = read_NetCDF_surface_var(nc_file, 'albinir', i, j, old_chgres, 0) - emiss_in = read_NetCDF_surface_var(nc_file, 'emiss', i, j, old_chgres, 0) - - #RUC LSM variables that may be in the surface file - wetness_in = read_NetCDF_surface_var(nc_file, 'wetness', i, j, old_chgres, 0) - clw_surf_land_in = read_NetCDF_surface_var(nc_file, 'clw_surf_land', i, j, old_chgres, 0) - clw_surf_ice_in = read_NetCDF_surface_var(nc_file, 'clw_surf_ice', i, j, old_chgres, 0) - qwv_surf_land_in = read_NetCDF_surface_var(nc_file, 'qwv_surf_land', i, j, old_chgres, 0) - qwv_surf_ice_in = read_NetCDF_surface_var(nc_file, 'qwv_surf_ice', i, j, old_chgres, 0) - tsnow_land_in = read_NetCDF_surface_var(nc_file, 'tsnow_land', i, j, old_chgres, 0) - tsnow_ice_in = read_NetCDF_surface_var(nc_file, 'tsnow_ice', i, j, old_chgres, 0) + rechxy_in = read_NetCDF_surface_var(nc_file, 'rechxy', i, j, old_chgres, 0) + + # RUC LSM + wetness_in = read_NetCDF_surface_var(nc_file, 'wetness', i, j, old_chgres, 0) + clw_surf_land_in = read_NetCDF_surface_var(nc_file, 'clw_surf_land', i, j, old_chgres, 0) + clw_surf_ice_in = read_NetCDF_surface_var(nc_file, 'clw_surf_ice', i, j, old_chgres, 0) + qwv_surf_land_in = read_NetCDF_surface_var(nc_file, 'qwv_surf_land', i, j, old_chgres, 0) + qwv_surf_ice_in = read_NetCDF_surface_var(nc_file, 'qwv_surf_ice', i, j, old_chgres, 0) + tsnow_land_in = read_NetCDF_surface_var(nc_file, 'tsnow_land', i, j, old_chgres, 0) + tsnow_ice_in = read_NetCDF_surface_var(nc_file, 'tsnow_ice', i, j, old_chgres, 0) snowfall_acc_land_in = read_NetCDF_surface_var(nc_file, 'snowfall_acc_land', i, j, old_chgres, 0) - snowfall_acc_ice_in = read_NetCDF_surface_var(nc_file, 'snowfall_acc_ice', i, j, old_chgres, 0) - sncovr_ice_in = read_NetCDF_surface_var(nc_file, 'sncovr_ice', i, j, old_chgres, 0) - lai_in = read_NetCDF_surface_var(nc_file, 'lai', i, j, old_chgres, 0) - - #read in profiles (would be 3D variables in a 3D model) - - #land_state + snowfall_acc_ice_in = read_NetCDF_surface_var(nc_file, 'snowfall_acc_ice', i, j, old_chgres, 0) + sfalb_lnd_in = read_NetCDF_surface_var(nc_file, 'sfalb_lnd', i, j, old_chgres, 0) + sfalb_lnd_bck_in = read_NetCDF_surface_var(nc_file, 'sfalb_lnd_bck', i, j, old_chgres, 0) + sfalb_ice_in = read_NetCDF_surface_var(nc_file, 'sfalb_ice', i, j, old_chgres, 0) + lai_in = read_NetCDF_surface_var(nc_file, 'lai', i, j, old_chgres, 0) + albdirvis_ice_in = read_NetCDF_surface_var(nc_file, 'albdirvis_ice', i, j, old_chgres, 0) + albdirnir_ice_in = read_NetCDF_surface_var(nc_file, 'albdirnir_ice', i, j, old_chgres, 0) + albdifvis_ice_in = read_NetCDF_surface_var(nc_file, 'albdifvis_ice', i, j, old_chgres, 0) + albdifnir_ice_in = read_NetCDF_surface_var(nc_file, 'albdifnir_ice', i, j, old_chgres, 0) + emis_lnd_in = read_NetCDF_surface_var(nc_file, 'emis_lnd', i, j, old_chgres, 0) + emis_ice_in = read_NetCDF_surface_var(nc_file, 'emis_ice', i, j, old_chgres, 0) + + # + # read in profiles (3D) + # + + # land_state stc_in = read_NetCDF_surface_var(nc_file, 'stc', i, j, old_chgres, missing_variable_soil_layers) smc_in = read_NetCDF_surface_var(nc_file, 'smc', i, j, old_chgres, missing_variable_soil_layers) slc_in = read_NetCDF_surface_var(nc_file, 'slc', i, j, old_chgres, missing_variable_soil_layers) - #NoahMP 3D variables + # NoahMP LSM snicexy_in = read_NetCDF_surface_var(nc_file, 'snicexy', i, j, old_chgres, missing_variable_snow_layers) snliqxy_in = read_NetCDF_surface_var(nc_file, 'snliqxy', i, j, old_chgres, missing_variable_snow_layers) - tsnoxy_in = read_NetCDF_surface_var(nc_file, 'tsnoxy', i, j, old_chgres, missing_variable_snow_layers) + tsnoxy_in = read_NetCDF_surface_var(nc_file, 'tsnoxy', i, j, old_chgres, missing_variable_snow_layers) smoiseq_in = read_NetCDF_surface_var(nc_file, 'smoiseq', i, j, old_chgres, missing_variable_soil_layers) zsnsoxy_in = read_NetCDF_surface_var(nc_file, 'zsnsoxy', i, j, old_chgres, missing_variable_soil_layers + missing_variable_snow_layers) - #RUC LSM 3D variables - tslb_in = read_NetCDF_surface_var(nc_file, 'tslb', i, j, old_chgres, missing_variable_soil_layers) + # RUC LSM + tslb_in = read_NetCDF_surface_var(nc_file, 'tslb', i, j, old_chgres, missing_variable_soil_layers) smois_in = read_NetCDF_surface_var(nc_file, 'smois', i, j, old_chgres, missing_variable_soil_layers) - sh2o_in = read_NetCDF_surface_var(nc_file, 'sh2o', i, j, old_chgres, missing_variable_soil_layers) - smfr_in = read_NetCDF_surface_var(nc_file, 'smfr', i, j, old_chgres, missing_variable_soil_layers) - flfr_in = read_NetCDF_surface_var(nc_file, 'flfr', i, j, old_chgres, missing_variable_soil_layers) + sh2o_in = read_NetCDF_surface_var(nc_file, 'sh2o', i, j, old_chgres, missing_variable_soil_layers) + smfr_in = read_NetCDF_surface_var(nc_file, 'smfr', i, j, old_chgres, missing_variable_soil_layers) + flfr_in = read_NetCDF_surface_var(nc_file, 'flfr', i, j, old_chgres, missing_variable_soil_layers) - #fractional grid 3D variables + # fractional grid tiice_in = read_NetCDF_surface_var(nc_file, 'tiice', i, j, old_chgres, missing_variable_ice_layers) - #print("zorlwav_in = {}".format(zorlwav_in)) - + # nc_file.close() #put data in a dictionary surface = { - #Noah LSM + # Noah LSM "tsfco": tsfco_in, "tg3": tg3_in, "uustar": uustar_in, @@ -579,21 +1186,21 @@ def get_UFS_surface_data(dir, tile, i, j, old_chgres): "alnwf": alnwf_in, "facsf": facsf_in, "facwf": facwf_in, - "styp": styp_in, - "slope": slope_in, - "vtyp": vtyp_in, - "vfrac": vfrac_in, + "soiltyp": styp_in, + "slopetyp": slope_in, + "vegtyp": vtyp_in, + "vegfrac": vfrac_in, "shdmin": shdmin_in, "shdmax": shdmax_in, - "zorlw": zorlw_in, + "z0": zorlo_in, "slmsk": slmsk_in, "canopy": canopy_in, "hice": hice_in, "fice": fice_in, "tisfc": tisfc_in, - "snwdph": snwdph_in, + "snowd": snwdph_in, "snoalb": snoalb_in, - "sheleg": sheleg_in, + "weasd": sheleg_in, "f10m": f10m_in, "t2m": t2m_in, "q2m": q2m_in, @@ -606,7 +1213,7 @@ def get_UFS_surface_data(dir, tile, i, j, old_chgres): "zorll": zorll_in, "zorli": zorli_in, #cplwav - "zorlwav": zorlwav_in, + "zorlw": zorlw_in, #NSST "tref": tref_in, "z_c": z_c_in, @@ -656,12 +1263,7 @@ def get_UFS_surface_data(dir, tile, i, j, old_chgres): "smcwtdxy": smcwtdxy_in, "deeprechxy": deeprechxy_in, "rechxy": rechxy_in, - "albdvis": albdvis_in, - "albdnir": albdnir_in, - "albivis": albivis_in, - "albinir": albinir_in, - "emiss": emiss_in, - #RUC LSM + # RUC LSM 2D "wetness": wetness_in, "clw_surf_land": clw_surf_land_in, "clw_surf_ice": clw_surf_ice_in, @@ -671,8 +1273,16 @@ def get_UFS_surface_data(dir, tile, i, j, old_chgres): "tsnow_ice": tsnow_ice_in, "snowfall_acc_land": snowfall_acc_land_in, "snowfall_acc_ice": snowfall_acc_ice_in, - "sncovr_ice": sncovr_ice_in, + "sfalb_lnd": sfalb_lnd_in, + "sfalb_lnd_bck": sfalb_lnd_bck_in, + "sfalb_ice": sfalb_ice_in, "lai": lai_in, + "albdirvis_ice": albdirvis_ice_in, + "albdirnir_ice": albdirnir_ice_in, + "albdifvis_ice": albdifvis_ice_in, + "albdifnir_ice": albdifnir_ice_in, + "emis_lnd": emis_lnd_in, + "emis_ice": emis_ice_in, #Noah LSM 3D "stc": stc_in, "smc": smc_in, @@ -694,99 +1304,128 @@ def get_UFS_surface_data(dir, tile, i, j, old_chgres): } return surface -def get_UFS_oro_data(dir, tile, i, j): +######################################################################################## +# +######################################################################################## +def get_UFS_oro_data(dir, tile, i, j, lam): """Get the orographic data for the given tile and indices""" - filename_pattern = 'oro_data.tile{0}.nc'.format(tile) - for f_name in os.listdir(dir): - if fnmatch.fnmatch(f_name, filename_pattern): - filename = f_name - - nc_file = Dataset('{0}/{1}'.format(dir,filename)) + if lam: + nc_file = Dataset('{0}/{1}'.format(dir,'oro_data.nc')) + else: + filename_pattern = 'oro_data.tile{0}.nc'.format(tile) + for f_name in os.listdir(dir): + if fnmatch.fnmatch(f_name, filename_pattern): + filename = f_name + + nc_file = Dataset('{0}/{1}'.format(dir,filename)) # orographyic properties - stddev_in = read_NetCDF_var(nc_file, "stddev", i, j) - convexity_in = read_NetCDF_var(nc_file, "convexity", i, j) - oa1_in = read_NetCDF_var(nc_file, "oa1", i, j) - oa2_in = read_NetCDF_var(nc_file, "oa2", i, j) - oa3_in = read_NetCDF_var(nc_file, "oa3", i, j) - oa4_in = read_NetCDF_var(nc_file, "oa4", i, j) - ol1_in = read_NetCDF_var(nc_file, "ol1", i, j) - ol2_in = read_NetCDF_var(nc_file, "ol2", i, j) - ol3_in = read_NetCDF_var(nc_file, "ol3", i, j) - ol4_in = read_NetCDF_var(nc_file, "ol4", i, j) - theta_in = read_NetCDF_var(nc_file, "theta", i, j) - gamma_in = read_NetCDF_var(nc_file, "gamma", i, j) - sigma_in = read_NetCDF_var(nc_file, "sigma", i, j) - elvmax_in = read_NetCDF_var(nc_file, "elvmax", i, j) - orog_filt_in = read_NetCDF_var(nc_file, "orog_filt", i, j) - orog_raw_in = read_NetCDF_var(nc_file, "orog_raw", i, j) + stddev_in = read_NetCDF_var(nc_file, "stddev", i, j) + convexity_in = read_NetCDF_var(nc_file, "convexity", i, j) + oa1_in = read_NetCDF_var(nc_file, "oa1", i, j) + oa2_in = read_NetCDF_var(nc_file, "oa2", i, j) + oa3_in = read_NetCDF_var(nc_file, "oa3", i, j) + oa4_in = read_NetCDF_var(nc_file, "oa4", i, j) + ol1_in = read_NetCDF_var(nc_file, "ol1", i, j) + ol2_in = read_NetCDF_var(nc_file, "ol2", i, j) + ol3_in = read_NetCDF_var(nc_file, "ol3", i, j) + ol4_in = read_NetCDF_var(nc_file, "ol4", i, j) + theta_in = read_NetCDF_var(nc_file, "theta", i, j) + gamma_in = read_NetCDF_var(nc_file, "gamma", i, j) + sigma_in = read_NetCDF_var(nc_file, "sigma", i, j) + elvmax_in = read_NetCDF_var(nc_file, "elvmax", i, j) + orog_filt_in = read_NetCDF_var(nc_file, "orog_filt", i, j) + orog_raw_in = read_NetCDF_var(nc_file, "orog_raw", i, j) #fractional landmask variables - land_frac_in = read_NetCDF_var(nc_file, "land_frac", i, j) - #lake variables - lake_frac_in = read_NetCDF_var(nc_file, "lake_frac", i, j) + land_frac_in = read_NetCDF_var(nc_file, "land_frac", i, j) + #lake variables (optional) + lake_frac_in = read_NetCDF_var(nc_file, "lake_frac", i, j) lake_depth_in = read_NetCDF_var(nc_file, "lake_depth", i, j) - + + # nc_file.close() - #put data in a dictionary - oro = { - "stddev": stddev_in, - "convexity": convexity_in, - "oa1": oa1_in, - "oa2": oa2_in, - "oa3": oa3_in, - "oa4": oa4_in, - "ol1": ol1_in, - "ol2": ol2_in, - "ol3": ol3_in, - "ol4": ol4_in, - "theta": theta_in, - "gamma": gamma_in, - "sigma": sigma_in, - "elvmax": elvmax_in, - "orog_filt": orog_filt_in, - "orog_raw": orog_raw_in, - "land_frac": land_frac_in, - "lake_frac": lake_frac_in, - "lake_depth": lake_depth_in - } + # Store data in a dictionary + oro = {"stddev": stddev_in, + "convexity": convexity_in, + "oa1": oa1_in, + "oa2": oa2_in, + "oa3": oa3_in, + "oa4": oa4_in, + "ol1": ol1_in, + "ol2": ol2_in, + "ol3": ol3_in, + "ol4": ol4_in, + "theta": theta_in, + "gamma": gamma_in, + "sigma": sigma_in, + "elvmax": elvmax_in, + "oro": orog_filt_in, + "oro_uf": orog_raw_in, + "landfrac": land_frac_in, + "lakefrac": lake_frac_in, + "lakedepth": lake_depth_in} + return oro +######################################################################################## +# +######################################################################################## def get_UFS_vgrid_data(dir): """Get the vertical grid data for resolution of the data within the IC directory""" nc_file = Dataset('{0}/{1}'.format(dir,'gfs_ctrl.nc')) # vertical coordinate definition + # GJF: it looks like there is an extra level on top that represents 0 Pa, otherwise these values are for vertical grid interfaces ak=nc_file['vcoord'][0,::-1] bk=nc_file['vcoord'][1,::-1] + #GJF: in external_ic.F90, when external_eta is true (which it apparently is for FV3GFS runs), the top value is ignored + #ak = ak[0:len(ak)-1] + #bk = bk[0:len(bk)-1] + nc_file.close() vgrid = { "ak": ak, - "bk": bk + "bk": bk, + "nlevs": len(ak)-2 #full grid levels are interfaces - 1 and there is an extra level on top (subtract 2) } return vgrid -def get_UFS_grid_area(dir, tile, i, j): +######################################################################################## +# +######################################################################################## +def get_UFS_grid_area(dir, tile, i, j, lam): """Get the horizontal grid cell area for the given tile and indices""" #this information is in the supergrid files - filename_pattern = '*grid.tile{0}.nc'.format(tile) - - for f_name in os.listdir(dir): - if fnmatch.fnmatch(f_name, filename_pattern): - filename = f_name - if not filename: - message = 'No filenames matching the pattern {0} found in {1}'.format(filename_pattern,dir) - logging.critical(message) - raise Exception(message) - + if lam: + filename_pattern = '*grid.tile{0}.halo{1}.nc'.format(tile, n_lam_halo_points) + for f_name in os.listdir(dir): + if fnmatch.fnmatch(f_name, filename_pattern): + filename = f_name + if not filename: + message = 'No filenames matching the pattern {0} found in {1}'.format(filename_pattern,dir) + logging.critical(message) + raise Exception(message) + else: + filename_pattern = '*grid.tile{0}.nc'.format(tile) + + for f_name in os.listdir(dir): + if fnmatch.fnmatch(f_name, filename_pattern): + filename = f_name + if not filename: + message = 'No filenames matching the pattern {0} found in {1}'.format(filename_pattern,dir) + logging.critical(message) + raise Exception(message) + nc_file = Dataset('{0}/{1}'.format(dir,filename)) + # extract out area of grid cell #calculate supergrid indices from regular grid indices @@ -794,1291 +1433,1307 @@ def get_UFS_grid_area(dir, tile, i, j): ipt2 = i*2+1 #from Phil Pegion: the area is calculated by adding up the 4 components of the contained supergrid cells - area_in=nc_file['area'][jpt2-1:jpt2+1,ipt2-1:ipt2+1] + if lam: + area_data = nc_file['area'][:,:] + area_data_no_halo = area_data[2*n_lam_halo_points:area_data.shape[0]-2*n_lam_halo_points,2*n_lam_halo_points:area_data.shape[1]-2*n_lam_halo_points] + area_in = area_data_no_halo[jpt2-1:jpt2+1,ipt2-1:ipt2+1] + else: + area_in=nc_file['area'][jpt2-1:jpt2+1,ipt2-1:ipt2+1] return area_in.sum() -def get_UFS_forcing_data(nlevs): +######################################################################################## +# +######################################################################################## +def search_in_dict(listin,name): + for count, dictionary in enumerate(listin): + if dictionary["name"] == name: + return count + +######################################################################################## +# +######################################################################################## +def get_UFS_forcing_data(nlevs, state_IC, location, use_nearest, forcing_dir, grid_dir, + tile, i, j, lam, save_comp_data): """Get the horizontal and vertical advective tendencies for the given tile and indices""" - - #Note: this is a placeholder function that sets forcing to 0, but will need to be filled out in the future from custom FV3 output - - ntimes = 1 - - time = np.zeros(ntimes) - w_ls = np.zeros((nlevs,ntimes),dtype=float) - omega = np.zeros((nlevs,ntimes),dtype=float) - u_g = np.zeros((nlevs,ntimes),dtype=float) - v_g = np.zeros((nlevs,ntimes),dtype=float) - u_nudge = np.zeros((nlevs,ntimes),dtype=float) - v_nudge = np.zeros((nlevs,ntimes),dtype=float) - T_nudge = np.zeros((nlevs,ntimes),dtype=float) - thil_nudge = np.zeros((nlevs,ntimes),dtype=float) - qt_nudge = np.zeros((nlevs,ntimes),dtype=float) - rad_heating = np.zeros((nlevs,ntimes),dtype=float) - h_advec_thil = np.zeros((nlevs,ntimes),dtype=float) - v_advec_thil = np.zeros((nlevs,ntimes),dtype=float) - h_advec_qt = np.zeros((nlevs,ntimes),dtype=float) - v_advec_qt = np.zeros((nlevs,ntimes),dtype=float) - - forcing = { - "time": time, - "w_ls": w_ls, - "omega": omega, - "u_g": u_g, - "v_g": v_g, - "u_nudge": u_nudge, - "v_nudge": v_nudge, - "T_nudge": T_nudge, - "thil_nudge": thil_nudge, - "qt_nudge": qt_nudge, - "rad_heating": rad_heating, - "h_advec_thil": h_advec_thil, - "v_advec_thil": v_advec_thil, - "h_advec_qt": h_advec_qt, - "v_advec_qt": v_advec_qt - } - - return forcing - -def add_noahmp_coldstart(surface, date): - """Add cold-start ICs for the NoahMP LSM from Noah LSM variables""" - - #use cold start section of FV3/io/FV3GFS_io.F90 to initialize NoahMP-specific variables (this is a python port of the Fortran code in that file) - - #MPTABLE.TBL uses a namelist format, so can use f90nml to read it in - mptable_nml_all = f90nml.read(os.path.join(NOAHMP_TABLES_DIR, 'MPTABLE.TBL')) - #MPTABLE.TBL contains data (with distinct namelists) for USGS and MODIS data; looks like MODIS is the operational - mptable_nml_active = mptable_nml_all['noah_mp_modis_parameters'] #alternative is mptable_nml_all['noah_mp_usgs_parameters'] - - #operational values; change if necessary (or read from somewhere?) - n_snow_layers = 3 - n_soil_layers = 4 - - #thickness of each soil level - dzs = np.array([0.1,0.3,0.6,1.0]) - - #bottom depth of each soil level - zsoil = np.array([-0.1,-0.4,-1.0,-2.0]) - - #initialize all NoahMP vars as missing - surface["tvxy"] = missing_value - surface["tgxy"] = missing_value - surface["tahxy"] = missing_value - surface["canicexy"] = missing_value - surface["canliqxy"] = missing_value - surface["eahxy"] = missing_value - surface["cmxy"] = missing_value - surface["chxy"] = missing_value - surface["fwetxy"] = missing_value - surface["sneqvoxy"] = missing_value - surface["alboldxy"] = missing_value - surface["qsnowxy"] = missing_value - surface["wslakexy"] = missing_value - surface["taussxy"] = missing_value - surface["waxy"] = missing_value - surface["wtxy"] = missing_value - surface["zwtxy"] = missing_value - surface["xlaixy"] = missing_value - surface["xsaixy"] = missing_value - - surface["lfmassxy"] = missing_value - surface["stmassxy"] = missing_value - surface["rtmassxy"] = missing_value - surface["woodxy"] = missing_value - surface["stblcpxy"] = missing_value - surface["fastcpxy"] = missing_value - surface["smcwtdxy"] = missing_value - surface["deeprechxy"] = missing_value - surface["rechxy"] = missing_value - - surface["snowxy"] = missing_value - surface["snicexy"] = np.ones(n_snow_layers)*missing_value - surface["snliqxy"] = np.ones(n_snow_layers)*missing_value - surface["tsnoxy"] = np.ones(n_snow_layers)*missing_value - surface["smoiseq"] = np.ones(n_soil_layers)*missing_value - surface["zsnsoxy"] = np.ones(n_snow_layers + n_soil_layers)*missing_value - - if surface["slmsk"] > 0.01: - surface["tvxy"] = surface["tsfco"] - surface["tgxy"] = surface["tsfco"] - surface["tahxy"] = surface["tsfco"] - - if (surface["snwdph"] > 0.01 and surface["tsfco"] > 273.15 ): - surface["tvxy"] = 273.15 - surface["tgxy"] = 273.15 - surface["tahxy"]= 273.15 - - surface["canicexy"] = 0.0 - surface["canliqxy"] = surface["canopy"] - surface["eahxy"] = 2000.0 - - # eahxy = psfc*qv/(0.622+qv); qv is mixing ratio, converted from sepcific - # humidity specific humidity /(1.0 - specific humidity) + + # Determine UFS history file format (tiled/quilted) + if lam: + atm_ftag = 'atmf*.tile{0}.nc'.format(tile) + sfc_ftag = 'sfcf*.tile{0}.nc'.format(tile) + else: + atm_ftag = 'atmf*.nc' + sfc_ftag = 'sfcf*.nc' + + # Get list of UFS history files with 3D ATMospheric state variables. + atm_filenames = [] + for f_name in os.listdir(forcing_dir): + if fnmatch.fnmatch(f_name, atm_ftag): + atm_filenames.append(f_name) + if not atm_filenames: + message = 'No filenames matching the pattern {0} found in {1}'. \ + format(atm_ftag,forcing_dir) + logging.critical(message) + raise Exception(message) + atm_filenames = sorted(atm_filenames) + n_filesA = len(atm_filenames) + + # Get list of UFS history files with 2D fields. + sfc_filenames = [] + for f_name in os.listdir(forcing_dir): + if fnmatch.fnmatch(f_name, sfc_ftag): + sfc_filenames.append(f_name) + if not sfc_filenames: + message = 'No filenames matching the pattern {0} found in {1}'. \ + format(sfc_ftag,forcing_dir) + logging.critical(message) + raise Exception(message) + sfc_filenames = sorted(sfc_filenames) + n_filesS = len(sfc_filenames) + + if (n_filesS == n_filesA): + n_files = n_filesA + else: + message = 'Number of UFS 2D/3D history files is inconsistent' + logging.critical(message) + raise Exception(message) + + # Physical constants (used by FV3 remapping functions) + kord_tm = -9 + kord_tr = 9 + t_min = 184.0 + q_min = 0.0 + sec_in_hr = 3600. + + #################################################################################### + # + # Read in atmospheric state, atmf*.nc history files. + # + #################################################################################### + + # Find nearest point on UFS history file (quilted) grid. + if use_nearest: + (tile_jj, tile_ii, point_lon, point_lat, dist_min) = find_loc_indices(location, forcing_dir, -999, lam) + print('The closest point has indices [{0},{1}]'.format(tile_ii,tile_jj)) + print('This index has a central longitude/latitude of [{0},{1}]'.format(point_lon,point_lat)) + print('This grid cell is approximately {0} km away from the desired location of {1} {2}'.format(dist_min/1.0E3,location[0],location[1])) + + # + ps = [] + p_lev = [] + p_lay = [] + t_lay = [] + qv_lay = [] + u_lay = [] + v_lay = [] + time_hr = [] + + # Get grid from UFS IC data + (ic_grid_lon, ic_grid_lat) = get_initial_lon_lat_grid(grid_dir, tile, lam) + + # Read in 3D UFS history files + for count, filename in enumerate(atm_filenames, start=1): + nc_file = Dataset('{0}/{1}'.format(forcing_dir,filename)) + nc_file.set_always_mask(False) - surface["cmxy"] = 0.0 - surface["chxy"] = 0.0 - surface["fwetxy"] = 0.0 - surface["sneqvoxy"] = surface["sheleg"] # mm - surface["alboldxy"] = 0.65 - surface["qsnowxy"] = 0.0 + # Check if UFS history file grid is different than UFS initial-condition grid. + if not use_nearest: + try: + data_grid_lon = nc_file['lon'][:,:] + data_grid_lat = nc_file['lat'][:,:] + except: + data_grid_lon = nc_file['grid_xt'][:,:] + data_grid_lat = nc_file['grid_yt'][:,:] + equal_grids = False + if (ic_grid_lon.shape == data_grid_lon.shape and \ + ic_grid_lat.shape == ic_grid_lat.shape): + if (np.equal(ic_grid_lon,data_grid_lon).all() and \ + np.equal(ic_grid_lat,data_grid_lat).all()): + equal_grids = True + + # If necessary, remap history file (data_grid) to IC file (ic_grid). + if (not equal_grids): + print('Regridding {} onto native grid: regridding progress = {}%'. \ + format(filename, 100.0*count/(2*n_files))) + + grid_in = {'lon': data_grid_lon, 'lat': data_grid_lat} + grid_out = {'lon': np.reshape(ic_grid_lon[j,i],(-1,1)), 'lat': \ + np.reshape(ic_grid_lat[j,i],(-1,1))} + regridder = xesmf.Regridder(grid_in, grid_out, 'bilinear') + ps_data = regridder(nc_file['pressfc'][0,:,:]) + t_data = regridder(nc_file['tmp'][0,::-1,:,:]) + qv_data = regridder(nc_file['spfh'][0,::-1,:,:]) + u_data = regridder(nc_file['ugrd'][0,::-1,:,:]) + v_data = regridder(nc_file['vgrd'][0,::-1,:,:]) + i_get = 0 + j_get = 0 + # Same grids for history file (data_grid) to IC file (ic_grid). + else: + ps_data = nc_file['pressfc'][0,:,:] + t_data = nc_file['tmp'][0,::-1,:,:] + qv_data = nc_file['spfh'][0,::-1,:,:] + u_data = nc_file['ugrd'][0,::-1,:,:] + v_data = nc_file['vgrd'][0,::-1,:,:] + i_get = i + j_get = j + else: + print('Using nearest UFS point {} progress = {}%'.format(filename, 100.0*count/(2*n_files))) + ps_data = nc_file['pressfc'][0,:,:] + t_data = nc_file['tmp'][0,::-1,:,:] + qv_data = nc_file['spfh'][0,::-1,:,:] + u_data = nc_file['ugrd'][0,::-1,:,:] + v_data = nc_file['vgrd'][0,::-1,:,:] + j_get = tile_jj + i_get = tile_ii + + # Compute and store vertical grid information. + ak = getattr(nc_file, "ak")[::-1] + bk = getattr(nc_file, "bk")[::-1] + ps.append(ps_data[j_get,i_get]) + nlevs = len(nc_file.dimensions['pfull']) + p_interface = np.zeros(nlevs+1) + for k in range(nlevs+1): + p_interface[k]=ak[k]+ps[-1]*bk[k] + p_lev.append(p_interface) + p_layer = np.zeros(nlevs) + for k in range(nlevs): + p_layer[k] = ((1.0/(rocp+1.0))*(p_interface[k]**(rocp+1.0) - \ + p_interface[k+1]**(rocp+1.0))/(p_interface[k] - \ + p_interface[k+1]))**(1.0/rocp) + p_lay.append(p_layer) + + # Store state variables. + t_lay.append(t_data[:,j_get,i_get]) + qv_lay.append(qv_data[:,j_get,i_get]) + u_lay.append(u_data[:,j_get,i_get]) + v_lay.append(v_data[:,j_get,i_get]) + time_hr.append(nc_file['time'][0]) - surface["wslakexy"] = 0.0 - surface["taussxy"] = 0.0 - surface["waxy"] = 4900.0 - surface["wtxy"] = surface["waxy"] - surface["zwtxy"] = (25.0 + 2.0) - surface["waxy"] / 1000.0 /0.2 + # Close file + nc_file.close() + + # Convert from python list to numpy array + ps = np.asarray(ps) + p_lev = np.asarray(p_lev) + p_lay = np.asarray(p_lay) + t_lay = np.asarray(t_lay) + qv_lay = np.asarray(qv_lay) + u_lay = np.asarray(u_lay) + v_lay = np.asarray(v_lay) + tv_lay = t_lay*(1.0 + zvir*qv_lay) + time_hr = np.asarray(time_hr) + + # Read in 2D UFS history files + vars2d =[{"name":"spfh2m"}, {"name":"tmp2m"}, \ + {"name":"dswrf_ave"}, {"name":"ulwrf_ave"},\ + {"name":"lhtfl_ave"}, {"name":"shtfl_ave"},\ + {"name":"dswrf"}, {"name":"ulwrf"},\ + {"name":"lhtfl"}, {"name":"shtfl"},\ + {"name":"pwat"}, {"name":"vgrd10m"},\ + {"name":"ugrd10m"}] + for var2d in vars2d: var2d["values"] = [] + + for count, filename in enumerate(sfc_filenames, start=1): + nc_file = Dataset('{0}/{1}'.format(forcing_dir,filename)) + nc_file.set_always_mask(False) - vegtyp = np.int(surface['vtyp']) - if (vegtyp == 0): - vegtyp = 7 - if ((vegtyp == mptable_nml_active['ISBARREN']) or (vegtyp == mptable_nml_active['ISSNOW']) or (vegtyp == mptable_nml_active['ISURBAN']) or (vegtyp == mptable_nml_active['ISWATER'])) : - surface["xlaixy"] = 0.0 - surface["xsaixy"] = 0.0 - - surface["lfmassxy"] = 0.0 - surface["stmassxy"] = 0.0 - surface["rtmassxy"] = 0.0 - - surface["woodxy"] = 0.0 - surface["stblcpxy"] = 0.0 - surface["fastcpxy"] = 0.0 + # Check if UFS history file grid is different than UFS initial-condition grid. + if not use_nearest: + try: + data_grid_lon = nc_file['lon'][:,:] + data_grid_lat = nc_file['lat'][:,:] + except: + data_grid_lon = nc_file['grid_xt'][:,:] + data_grid_lat = nc_file['grid_yt'][:,:] + equal_grids = False + if (ic_grid_lon.shape == data_grid_lon.shape and \ + ic_grid_lat.shape == ic_grid_lat.shape): + if (np.equal(ic_grid_lon,data_grid_lon).all() and \ + np.equal(ic_grid_lat,data_grid_lat).all()): + equal_grids = True + + # If necessary, remap history file (data_grid) to IC file (ic_grid). + if (not equal_grids): + print('Regridding {} onto native grid: regridding progress = {}%'. \ + format(filename, 50+50.0*count/(n_files))) + + grid_in = {'lon': data_grid_lon, 'lat': data_grid_lat} + grid_out = {'lon': np.reshape(ic_grid_lon[j,i],(-1,1)), 'lat': \ + np.reshape(ic_grid_lat[j,i],(-1,1))} + regridder = xesmf.Regridder(grid_in, grid_out, 'bilinear') + i_get = 0 + j_get = 0 + # Same grids for history file (data_grid) to IC file (ic_grid). + else: + i_get = i + j_get = j else: - #laim gives monthly values for each of the vegetation types - laim = np.array(mptable_nml_active['LAIM']).reshape(12,20) - - #be sure to use month-1, vegtyp-1 since python is 0-indexed - surface["xlaixy"] = np.amax([laim[date["month"]-1,vegtyp-1],0.05]) - surface["xsaixy"] = np.amax([surface["xlaixy"]*0.1,0.05]) - - sla = np.array(mptable_nml_active['SLA']) - masslai = 1000.0 / np.amax([sla[vegtyp-1],1.0]) - surface["lfmassxy"] = surface["xlaixy"]*masslai - masssai = 1000.0 / 3.0 - surface["stmassxy"] = surface["xsaixy"]*masssai - - surface["rtmassxy"] = 500.0 + print('Using nearest UFS point {} progress = {}%'.format(filename, 50+50.0*count/(n_files))) + j_get = tile_jj + i_get = tile_ii - surface["woodxy"] = 500.0 - surface["stblcpxy"] = 1000.0 - surface["fastcpxy"] = 1000.0 - - if ( vegtyp == mptable_nml_active['ISSNOW'] ): - for k in range(n_soil_layers): - surface["stc"][k] = np.amin([surface["stc"][k],np.amin([surface["tg3"],263.15])]) - surface["smc"][k] = 1 - surface["slc"][k] = 0 + for var2d in vars2d: + if not use_nearest: + data = regridder(nc_file[var2d["name"]][0,:,:]) + else: + data = nc_file[var2d["name"]][0,:,:] + var2d["values"].append(data[j_get,i_get]) + var2d["units"] = nc_file[var2d["name"]].getncattr(name="units") + var2d["long_name"] = nc_file[var2d["name"]].getncattr(name="long_name") + + nc_file.close() + + # Convert to numpy arrays + for var2d in vars2d: + var2d["values"] = np.asarray(var2d["values"]) + + # + # Create dictionary with full "Native" state (IC@t=0,ATMF*@t>0) + # + ps_IC = np.zeros([1]) + ps_IC[0] = state_IC["ps"] + pi_IC = np.zeros([1,nlevs+1]) + pi_IC[0,:] = state_IC["pa_i"] + p_IC = np.zeros([1,nlevs]) + p_IC[0,:] = state_IC["pa"] + t_IC = np.zeros([1,nlevs]) + t_IC[0,:] = state_IC["ta"] + qv_IC = np.zeros([1,nlevs]) + qv_IC[0,:] = state_IC["qv"] + u_IC = np.zeros([1,nlevs]) + u_IC[0,:] = state_IC["ua"] + v_IC = np.zeros([1,nlevs]) + v_IC[0,:] = state_IC["va"] + tv_IC = t_IC*(1.0 + zvir*qv_IC) + stateNATIVE = {"time": np.concatenate((np.asarray([0.]), time_hr[:])), \ + "ps": np.concatenate((ps_IC, ps), axis=0), \ + "p_lev": np.concatenate((pi_IC, p_lev), axis=0), \ + "p_lay": np.concatenate((p_IC, p_lay), axis=0), \ + "t_lay": np.concatenate((t_IC, t_lay), axis=0), \ + "qv_lay": np.concatenate((qv_IC, qv_lay), axis=0), \ + "u_lay": np.concatenate((u_IC, u_lay), axis=0), \ + "v_lay": np.concatenate((v_IC, v_lay), axis=0), \ + "tv_lay": np.concatenate((tv_IC, tv_lay), axis=0)} + + #################################################################################### + # + # The "total" advection, where "total" = "advective + remapping", can be computed + # directly by taking the difference across time. + # The advective part is the piece needed to force the SCM, which we can "back-out" + # by computing the "remapping" component, and removing it from the total. + # + # At initial-time (ICs) + # - Remap IC state to vertical grid of first UFS history file. + # + # At subsequent time(s) + # - Remap UFS state at current time-step to vertical grid of subsequent time-step + # (used for differencing) + # + # This "REGRID"ed state is used to remove the impact vertical remapping on the + # change of state. + # + # *NOTE* The first UFS history file is AFTER the first physics timestep. Subsequent + # history files are controlled by "output_fh" in the UFS. + # + #################################################################################### + + # + nlevs = len(p_lay[0,:]) + dummy = np.zeros(1) + from_p = np.zeros([1,nlevs+1]) + to_p = np.zeros([1,nlevs+1]) + log_from_p = np.zeros([1,nlevs+1]) + log_to_p = np.zeros([1,nlevs+1]) + dp2 = np.zeros([1,nlevs]) + tv_rev = np.zeros([1,nlevs]) + qv_rev = np.zeros([1,nlevs]) + u_rev = np.zeros([1,nlevs]) + v_rev = np.zeros([1,nlevs]) + tv_layr = np.zeros([n_files+1,nlevs]) + qv_layr = np.zeros([n_files+1,nlevs]) + u_layr = np.zeros([n_files+1,nlevs]) + v_layr = np.zeros([n_files+1,nlevs]) + p_layr = np.zeros([n_files+1,nlevs]) + p_levr = np.zeros([n_files+1,nlevs+1]) + + # + # First timestep... + # + + # Interpolation range + from_p[0,:] = state_IC["pa_i"][::-1] + to_p[0,:] = p_lev[0,::-1] + log_from_p[0,:] = np.log(state_IC["pa_i"][::-1]) + log_to_p[0,:] = np.log(p_lev[0,::-1]) + + # IC Virtual Temperature on vertical-grid of first UFS history file. + tv_init = state_IC["ta"]*(1.0 + zvir*state_IC["qv"]) + tv_init_rev = tv_init[::-1] + tv_rev_new = fv3_remap.map_scalar(nlevs, log_from_p, tv_init_rev[np.newaxis, :], \ + dummy, nlevs, log_to_p, 0, 0, 1, \ + np.abs(kord_tm), t_min) + + # IC Specific humidity on vertical-grid of first UFS history file. + qv_init_rev = state_IC["qv"][::-1] + for k in range(0,nlevs): dp2[0,k] = from_p[0,k+1] - from_p[0,k] + qv_rev_new = fv3_remap.map1_q2(nlevs, from_p, qv_init_rev[np.newaxis, :], \ + nlevs, to_p, dp2, 0, 0, 0, kord_tr, q_min) + + # IC Zonal wind on vertical-grid of first UFS history file. + u_init_rev = state_IC["ua"][::-1] + u_rev_new = fv3_remap.map1_ppm(nlevs, from_p, u_init_rev[np.newaxis, :], 0.0, \ + nlevs, to_p, 0, 0, -1, kord_tm ) + + # IC Meridional wind on vertical-grid of first UFS history file. + v_init_rev = state_IC["va"][::-1] + v_rev_new = fv3_remap.map1_ppm(nlevs, from_p, v_init_rev[np.newaxis, :], 0.0, \ + nlevs, to_p, 0, 0, -1, kord_tm ) + + # Store + p_layr[0,:] = p_lay[0,:] + p_levr[0,:] = p_lev[0,:] + v_layr[0,:] = v_rev_new[0,::-1] + u_layr[0,:] = u_rev_new[0,::-1] + tv_layr[0,:] = tv_rev_new[0,::-1] + qv_layr[0,:] = qv_rev_new[0,::-1] + + # + # Subsequent timestep(s)... + # (current state on vertical grid of subsequent time-step(s). Used for differencing) + # + + # + for t in range(n_files-1): + # + from_p[0,:] = p_lev[t,::-1] + to_p[0,:] = p_lev[t+1,::-1] + log_from_p[0,:] = np.log(p_lev[t,::-1]) + log_to_p[0,:] = np.log(p_lev[t+1,::-1]) + + # Virtual Temperature @ time > 0 + tv_rev[0,:] = tv_lay[t,::-1] + tv_rev_new = fv3_remap.map_scalar(nlevs, log_from_p, tv_rev, dummy, nlevs, \ + log_to_p, 0, 0, 1, np.abs(kord_tm), t_min) + # Specific humidity @ time > 0 + qv_rev[0,:] = qv_lay[t,::-1] + for k in range(0,nlevs): dp2[0,k] = to_p[0,k+1] - to_p[0,k] + qv_rev_new = fv3_remap.map1_q2(nlevs, from_p, qv_rev, nlevs, to_p, dp2, \ + 0, 0, 0, kord_tr, q_min) + # Zonal wind @ time > 0 + u_rev[0,:] = u_lay[t,::-1] + u_rev_new = fv3_remap.map1_ppm(nlevs, from_p, u_rev, 0.0, nlevs, to_p, \ + 0, 0, -1, kord_tm ) + # Meridional wind @ time > 0 + v_rev[0,:] = v_lay[t,::-1] + v_rev_new = fv3_remap.map1_ppm(nlevs, from_p, v_rev, 0.0, nlevs, to_p, \ + 0, 0, -1, kord_tm ) + + # Store + p_layr[t+1,:] = p_lay[t+1,:] + p_levr[t+1,:] = p_lev[t+1,:] + tv_layr[t+1,:] = tv_rev_new[0,::-1] + qv_layr[t+1,:] = qv_rev_new[0,::-1] + u_layr[t+1,:] = u_rev_new[0,::-1] + v_layr[t+1,:] = v_rev_new[0,::-1] + + # + p_layr[t+2,:] = p_layr[t+1,:] + p_levr[t+2,:] = p_levr[t+1,:] + tv_layr[t+2,:] = tv_layr[t+1,:] + qv_layr[t+2,:] = qv_layr[t+1,:] + u_layr[t+2,:] = u_layr[t+1,:] + v_layr[t+2,:] = v_layr[t+1,:] + + # Temperature + t_layr = tv_layr/(1.0 + zvir*qv_layr) + + # Create dictionary with "Regridded" state + stateREGRID = {"time": np.concatenate((np.asarray([0.]), time_hr[:])), \ + "ps": np.concatenate((ps[0:1], ps), axis=0), \ + "p_lev": p_levr, \ + "p_lay": p_layr, \ + "t_lay": t_layr, \ + "qv_lay": qv_layr, \ + "u_lay": u_layr, \ + "v_lay": v_layr, \ + "tv_lay": tv_layr } + + #################################################################################### + # + # Compute tendencies advective = total - remapping + # + #################################################################################### + + # + dtdt_adv = np.zeros([n_files+1,nlevs]) + dqvdt_adv = np.zeros([n_files+1,nlevs]) + dudt_adv = np.zeros([n_files+1,nlevs]) + dvdt_adv = np.zeros([n_files+1,nlevs]) + pres_adv = np.zeros([n_files+1,nlevs]) + pres_i_adv = np.zeros([n_files+1,nlevs+1]) + tend_remap = np.zeros([1,nlevs]) + tend_total = np.zeros([1,nlevs]) + + # + for t in range(n_files): + # + dtime_sec = (stateNATIVE["time"][t+1] - stateNATIVE["time"][t])*sec_in_hr + # + pres_adv[t,:] = stateNATIVE["p_lay"][t,:] + pres_i_adv[t,:] = stateNATIVE["p_lev"][t,:] + # + tend_total[0,:] = stateREGRID["t_lay"][t+1,:] - stateREGRID["t_lay"][t,:] + tend_remap[0,:] = stateREGRID["t_lay"][t,:] - stateNATIVE["t_lay"][t,:] + dtdt_adv[t,:] = (tend_total[0,:] - tend_remap[0,:]) / dtime_sec + # + tend_total[0,:] = stateREGRID["qv_lay"][t+1,:] - stateREGRID["qv_lay"][t,:] + tend_remap[0,:] = stateREGRID["qv_lay"][t,:] - stateNATIVE["qv_lay"][t,:] + dqvdt_adv[t,:] = (tend_total[0,:] - tend_remap[0,:]) / dtime_sec + # + tend_total[0,:] = stateREGRID["u_lay"][t+1,:] - stateREGRID["u_lay"][t,:] + tend_remap[0,:] = stateREGRID["u_lay"][t,:] - stateNATIVE["u_lay"][t,:] + dudt_adv[t,:] = (tend_total[0,:] - tend_remap[0,:]) / dtime_sec + # + tend_total[0,:] = stateREGRID["v_lay"][t+1,:] - stateREGRID["v_lay"][t,:] + tend_remap[0,:] = stateREGRID["v_lay"][t,:] - stateNATIVE["v_lay"][t,:] + dvdt_adv[t,:] = (tend_total[0,:] - tend_remap[0,:]) / dtime_sec + + # + dtdt_adv[t+1,:] = dtdt_adv[t,:] + dqvdt_adv[t+1,:] = dqvdt_adv[t,:] + dudt_adv[t+1,:] = dudt_adv[t,:] + dvdt_adv[t+1,:] = dvdt_adv[t,:] + pres_adv[t+1,:] = pres_adv[t,:] + pres_i_adv[t+1,:] = pres_i_adv[t,:] + + if save_comp_data: + # + t_layr = np.zeros([n_files+1,nlevs]) + qv_layr = np.zeros([n_files+1,nlevs]) + u_layr = np.zeros([n_files+1,nlevs]) + v_layr = np.zeros([n_files+1,nlevs]) + p_layr = np.zeros([n_files+1,nlevs]) + + # + for t in range(0,n_files): + from_p[0,:] = stateNATIVE["p_lev"][t,::-1] + to_p[0,:] = stateNATIVE["p_lev"][1,::-1] + log_from_p[0,:] = np.log(from_p[0,:]) + log_to_p[0,:] = np.log(to_p[0,:]) + p_layr[t,:] = stateNATIVE["p_lay"][1,::-1] + for k in range(0,nlevs): dp2[0,k] = to_p[0,k+1] - to_p[0,k] + t_layr[t,:] = fv3_remap.map_scalar(nlevs, log_from_p, stateNATIVE["t_lay"][t:t+1,::-1], \ + dummy, nlevs, log_to_p, 0, 0, 1, np.abs(kord_tm), t_min) + qv_layr[t,:] = fv3_remap.map1_q2(nlevs, from_p, stateNATIVE["qv_lay"][t:t+1,::-1], \ + nlevs, to_p, dp2, 0, 0, 0, kord_tr, q_min) + u_layr[t,:] = fv3_remap.map1_ppm(nlevs, from_p, stateNATIVE["u_lay"][t:t+1,::-1], \ + 0.0, nlevs, to_p, 0, 0, -1, kord_tm) + v_layr[t,:] = fv3_remap.map1_ppm(nlevs, from_p, stateNATIVE["v_lay"][t:t+1,::-1], \ + 0.0, nlevs, to_p, 0, 0, -1, kord_tm) + + t_layr[t+1,:] = t_layr[t,:] + qv_layr[t+1,:] = qv_layr[t,:] + u_layr[t+1,:] = u_layr[t,:] + v_layr[t+1,:] = v_layr[t,:] + p_layr[t+1,:] = p_layr[t,:] + + #################################################################################### + # + # if we had atmf,sfcf files at every timestep (and the SCM timestep is made to match + # the UFS), then dqvdt_adv should be applied uninterpolated for each time step. If + # atmf and sfcf files represent time averages over the previous diagnostic period, + # and if forcing terms are interpolatd in time in the SCM, then dqvdt_adv should + # represent the forcing values in the middle of time[t] and time[t+1] from atmf/sfcf. + # That way, the time-averaged applied forcing from time[t] to time[t+1] in the SCM + # will be equal to what is derived from atmf/sfcf. (preference should be to have + # option to remove time-interpolation of forcing such that the constant forcing + # applied converged to time-step values as the diag interval approaches the time + # step) + # + #################################################################################### + time_method = 'constant_simple' #this is not implemented in the SCM code yet + #time_method = 'constant_interp' + #time_method = 'gradient' #this produced wonky results in the SCM; avoid until investigated more + + if (time_method == 'constant_simple'): + print('Forcing should not be interpolated in time. Rather, forcing should held constant at their current values until the next forcing interval is reached.') + ntimes = n_files + time = np.zeros(ntimes) - snd = surface["snwdph"]/1000.0 # go to m from snwdph + p_s = np.zeros((ntimes),dtype=float) + pressure_forc = np.zeros((nlevs,ntimes),dtype=float) + tot_advec_T = np.zeros((nlevs,ntimes),dtype=float) + tot_advec_qv = np.zeros((nlevs,ntimes),dtype=float) + tot_advec_u = np.zeros((nlevs,ntimes),dtype=float) + tot_advec_v = np.zeros((nlevs,ntimes),dtype=float) - if (surface["sheleg"] != 0.0 and snd == 0.0 ): - snd = surface["sheleg"]/1000.0 - - if (vegtyp == 15): # land ice in MODIS/IGBP - if ( surface["sheleg"] < 0.1): - surface["sheleg"] = 0.1 - snd = 0.01 + p_s[0] = ps[0] + pressure_forc[:,0] = pres_adv[0,:] + tot_advec_T[:,0] = dtdt_adv[0,:] + tot_advec_qv[:,0] = dqvdt_adv[0,:] + tot_advec_u[:,0] = dudt_adv[0,:] + tot_advec_v[:,0] = dvdt_adv[0,:] - dzsno = np.zeros(n_snow_layers) - if (snd < 0.025 ): - surface["snowxy"] = 0.0 - dzsno[:] = 0.0 - elif (snd >= 0.025 and snd <= 0.05 ): - surface["snowxy"] = -1.0 - dzsno[-1] = snd - elif (snd > 0.05 and snd <= 0.10 ): - surface["snowxy"] = -2.0 - dzsno[-2] = 0.5*snd - dzsno[-1] = 0.5*snd - elif (snd > 0.10 and snd <= 0.25 ): - surface["snowxy"] = -2.0 - dzsno[-2] = 0.05 - dzsno[-1] = snd - 0.05 - elif (snd > 0.25 and snd <= 0.45 ): - surface["snowxy"] = -3.0 - dzsno[-3] = 0.05 - dzsno[-2] = 0.5*(snd-0.05) - dzsno[-1] = 0.5*(snd-0.05) - elif (snd > 0.45): - surface["snowxy"] = -3.0 - dzsno[-3] = 0.05 - dzsno[-2] = 0.20 - dzsno[-1] = snd - 0.05 - 0.20 - else: - message = 'problem with the logic assigning snow layers.' - logging.critical(message) - raise Exception(message) + for t in range(1,n_files): + time[t] = sec_in_hr*time_hr[t-1] + p_s[t] = ps[t] + pressure_forc[:,t] = pres_adv[t,:] + tot_advec_T[:,t] = dtdt_adv[t,:] + tot_advec_qv[:,t] = dqvdt_adv[t,:] + tot_advec_u[:,t] = dudt_adv[t,:] + tot_advec_v[:,t] = dvdt_adv[t,:] + elif (time_method == 'constant_interp'): + print('Forcing can be interpolated in time, but the time values are chosen such that forcing will effectively be held consant during a diagnostic time interval.') + ntimes = 2*n_files - surface["tsnoxy"][:] = 0.0 - surface["snicexy"][:] = 0.0 - surface["snliqxy"][:] = 0.0 - surface["zsnsoxy"][:] = 0.0 + time_setback = 1.0 #s - isnow = np.int(surface["snowxy"] + n_snow_layers) - dzsnso = np.zeros(n_snow_layers + n_soil_layers) - for k in range(isnow, n_snow_layers): - surface["tsnoxy"][k] = surface["tgxy"] - surface["snliqxy"][k] = 0.0 - surface["snicexy"][k] = 1.00 * dzsno[k] * surface["sheleg"]/snd #this line causes a warning + time = np.zeros(ntimes) + p_s = np.zeros((ntimes),dtype=float) + pressure_forc = np.zeros((nlevs,ntimes),dtype=float) + tot_advec_T = np.zeros((nlevs,ntimes),dtype=float) + tot_advec_qv = np.zeros((nlevs,ntimes),dtype=float) + tot_advec_u = np.zeros((nlevs,ntimes),dtype=float) + tot_advec_v = np.zeros((nlevs,ntimes),dtype=float) - dzsnso[k] = -dzsno[k] - - for k in range(n_snow_layers, n_snow_layers + n_soil_layers): - dzsnso[k] = -dzs[k - n_snow_layers] + time[0] = 0.0 + time[1] = sec_in_hr*time_hr[0] - time_setback #forcing period should extend from beginning of diagnostic period to right BEFORE the next one + p_s[0] = ps[0] + p_s[1] = p_s[0] + pressure_forc[:,0] = pres_adv[0,:] + pressure_forc[:,1] = pressure_forc[:,0] + tot_advec_T[:,0] = dtdt_adv[0,:] + tot_advec_T[:,1] = tot_advec_T[:,0] + tot_advec_qv[:,0] = dqvdt_adv[0,:] + tot_advec_qv[:,1] = tot_advec_qv[:,0] + tot_advec_u[:,0] = dudt_adv[0,:] + tot_advec_u[:,1] = tot_advec_u[:,0] + tot_advec_v[:,0] = dvdt_adv[0,:] + tot_advec_v[:,1] = tot_advec_v[:,0] - surface["zsnsoxy"][isnow] = dzsnso[isnow] - for k in range(isnow+1,n_snow_layers + n_soil_layers): - surface["zsnsoxy"][k] = surface["zsnsoxy"][k-1] + dzsnso[k] - - soilparm = read_noahmp_soil_table() + for t in range(1,n_files): + time[2*t] = sec_in_hr*time_hr[t-1] + time[2*t+1] = sec_in_hr*time_hr[t] - time_setback + p_s[2*t] = ps[t] + p_s[2*t+1] = p_s[2*t] + pressure_forc[:,2*t] = pres_adv[t,:] + pressure_forc[:,2*t+1] = pressure_forc[:,2*t] + tot_advec_T[:,2*t] = dtdt_adv[t,:] + tot_advec_T[:,2*t+1] = tot_advec_T[:,2*t] + tot_advec_qv[:,2*t] = dqvdt_adv[t,:] + tot_advec_qv[:,2*t+1] = tot_advec_qv[:,2*t] + tot_advec_u[:,2*t] = dudt_adv[t,:] + tot_advec_u[:,2*t+1] = tot_advec_u[:,2*t] + tot_advec_v[:,2*t] = dvdt_adv[t,:] + tot_advec_v[:,2*t+1] = tot_advec_v[:,2*t] + # + #p_s[2*t-1] = 0.5*(p_s[2*t] + p_s[2*t-1]) + #pressure_forc[:,2*t-1] = 0.5*(pressure_forc[:,2*t] + pressure_forc[:,2*t-1]) + #tot_advec_T[:,2*t-1] = 0.5*(tot_advec_T[:,2*t] + tot_advec_T[:,2*t-1]) + #tot_advec_qv[:,2*t-1] = 0.5*(tot_advec_qv[:,2*t] + tot_advec_qv[:,2*t-1]) + #tot_advec_u[:,2*t-1] = 0.5*(tot_advec_u[:,2*t] + tot_advec_u[:,2*t-1]) + #tot_advec_v[:,2*t-1] = 0.5*(tot_advec_v[:,2*t] + tot_advec_v[:,2*t-1]) + + + elif (time_method == 'gradient'): #this produced wonky results in the SCM; avoid until investigated more + print('Forcing can be interpolated in time since the forcing terms are assumed to follow a constant time-gradient.') - soiltyp = int(surface["styp"]) - if (soiltyp != 0): - #find the index of the soiltype from the "index" field - index = soilparm["index"].index(soiltyp) - bexp = soilparm["bb"][index] - smcmax = soilparm["maxsmc"][index] - smcwlt = soilparm["wltsmc"][index] - dwsat = soilparm["satdw"][index] - dksat = soilparm["satdk"][index] - psisat = -soilparm["satpsi"][index] + ntimes = 2*n_files + 1 + time = np.zeros(ntimes) + p_s = np.zeros((ntimes),dtype=float) + pressure_forc = np.zeros((nlevs,ntimes),dtype=float) + tot_advec_T = np.zeros((nlevs,ntimes),dtype=float) + tot_advec_qv = np.zeros((nlevs,ntimes),dtype=float) + tot_advec_u = np.zeros((nlevs,ntimes),dtype=float) + tot_advec_v = np.zeros((nlevs,ntimes),dtype=float) - if (vegtyp == mptable_nml_active['ISURBAN']): - smcmax = 0.45 - smcwlt = 0.40 + p_s[0] = state_IC['ps'] + pressure_forc[:,0] = state_IC['pa'] + tot_advec_T[:,0] = 0.0 + tot_advec_qv[:,0] = 0.0 + tot_advec_u[:,0] = 0.0 + tot_advec_v[:,0] = 0.0 - if ((bexp > 0.0) and (smcmax > 0.0) and (-psisat > 0.0 )): - for k in range(n_soil_layers): - if ( k == 0 ): - ddz = -zsoil[k+1] * 0.5 - elif ( k < n_soil_layers-1 ): - ddz = (zsoil[k-1] - zsoil[k+1] ) * 0.5 - else: - ddz = zsoil[k-1] - zsoil[k] -# ! -# ! Use newton-raphson method to find eq soil moisture -# ! - expon = bexp +1. - aa = dwsat/ddz - bb = dksat / smcmax ** expon - - smc = 0.5 * smcmax - for iter in range(100): - func = (smc - smcmax) * aa + bb * smc ** expon - dfunc = aa + bb * expon * smc ** bexp - dx = func/dfunc - smc = smc - dx - if ( abs (dx) < 1.e-6): - break - - surface["smoiseq"][k] = np.amin([np.amax([smc,1.e-4]),smcmax*0.99]) - else: - surface["smoiseq"][:] = smcmax + for t in range(n_files): + time[2*t + 1] = time[2*t] + 0.5*(sec_in_hr*time_hr[t] - time[2*t]) + time[2*t + 2] = sec_in_hr*time_hr[t] + + p_s[2*t+1] = ps[t] + pressure_forc[:,2*t+1] = pres_adv[t,:] + tot_advec_T[:,2*t+1] = dtdt_adv[t,:] + tot_advec_qv[:,2*t+1] = dqvdt_adv[t,:] + tot_advec_u[:,2*t+1] = dudt_adv[t,:] + tot_advec_v[:,2*t+1] = dvdt_adv[t,:] + + #calculate gradient in time and extrapolate for time (2t + 2) + grad = (p_s[2*t + 1] - p_s[2*t])/(time[2*t + 1] - time[2*t]) + p_s[2*t + 2] = p_s[2*t + 1] + grad*(time[2*t + 2] - time[2*t + 1]) + + for k in range(nlevs): + grad = (pressure_forc[k,2*t + 1] - pressure_forc[k, 2*t])/(time[2*t + 1] - time[2*t]) + pressure_forc[k,2*t + 2] = pressure_forc[k,2*t+1] + grad*(time[2*t + 2] - time[2*t + 1]) + grad = (tot_advec_T[k,2*t + 1] - tot_advec_T[k, 2*t])/(time[2*t + 1] - time[2*t]) + tot_advec_T[k,2*t + 2] = tot_advec_T[k,2*t+1] + grad*(time[2*t + 2] - time[2*t + 1]) + grad = (tot_advec_qv[k,2*t + 1] - tot_advec_qv[k, 2*t])/(time[2*t + 1] - time[2*t]) + tot_advec_qv[k,2*t + 2] = tot_advec_qv[k,2*t+1] + grad*(time[2*t + 2] - time[2*t + 1]) + grad = (tot_advec_u[k,2*t + 1] - tot_advec_u[k, 2*t])/(time[2*t + 1] - time[2*t]) + tot_advec_u[k,2*t + 2] = tot_advec_u[k,2*t+1] + grad*(time[2*t + 2] - time[2*t + 1]) + grad = (tot_advec_v[k,2*t + 1] - tot_advec_v[k, 2*t])/(time[2*t + 1] - time[2*t]) + tot_advec_v[k,2*t + 2] = tot_advec_v[k,2*t+1] + grad*(time[2*t + 2] - time[2*t + 1]) + else: + print('Unrecognized forcing time method. Exiting.') + exit() - surface["smcwtdxy"] = smcmax - surface["deeprechxy"] = 0.0 - surface["rechxy"] = 0.0 - - return surface + # + w_ls = np.zeros((nlevs,ntimes),dtype=float) + omega = np.zeros((nlevs,ntimes),dtype=float) + rad_heating = np.zeros((nlevs,ntimes),dtype=float) -def read_noahmp_soil_table(): - """Read values from SOILPARM.TBL for NoahMP LSM ICs""" - #returns a dictionary with data - - #two different datasets are included in the table - choices = ["STAS","STAS-RUC"] - - #get all lines of the file - with open(os.path.join(NOAHMP_TABLES_DIR, 'SOILPARM.TBL'), 'r') as f: - lineList = f.readlines() - f.close() - - #find the line where the desired data starts - line_index = 0 - for line in lineList: - line_index += 1 - #hardcoded to look for choices[0]; swap choices[0] for choices[1] to use choices[1] below - m = re.match(choices[0],line) and not re.match(choices[1],line) - if m: - start_index = line_index - break - - #get the data for each variable from the lines - n_soil_types = int(lineList[start_index].split()[0].split(',')[0]) - soil_index = [] - bb = [] - drysmc = [] - f11 = [] - maxsmc = [] - refsmc = [] - satpsi = [] - satdk = [] - satdw = [] - wltsmc = [] - qtz = [] - name = [] - for line in lineList[start_index+1:start_index+n_soil_types+1]: - values = line.strip().split(',') - soil_index.append(int(values[0])) - bb.append(float(values[1])) - drysmc.append(float(values[2])) - f11.append(float(values[3])) - maxsmc.append(float(values[4])) - refsmc.append(float(values[5])) - satpsi.append(float(values[6])) - satdk.append(float(values[7])) - satdw.append(float(values[8])) - wltsmc.append(float(values[9])) - qtz.append(float(values[10])) - name.append(values[11].strip()) - - soilparm = { - "index": soil_index, - "bb": bb, - "drysmc": drysmc, - "f11": f11, - "maxsmc": maxsmc, - "refsmc": refsmc, - "satpsi": satpsi, - "satdk": satdk, - "satdw": satdw, - "wltsmc": wltsmc, - "qtz": qtz, - "name": name + forcing = { + "time": time, + "wa": w_ls.swapaxes(0,1), + "wap": omega.swapaxes(0,1), + "tnta_rad": rad_heating.swapaxes(0,1), + "ps_forc": np.ones(ntimes)*ps[0], + "pa_forc": pressure_forc.swapaxes(0,1), + "tnta_adv": tot_advec_T.swapaxes(0,1), + "tnqv_adv": tot_advec_qv.swapaxes(0,1), + "tnua_adv": tot_advec_u.swapaxes(0,1), + "tnva_adv": tot_advec_v.swapaxes(0,1) } - return soilparm + if (save_comp_data): + comp_data = { + "time": stateNATIVE["time"]*sec_in_hr, + "pa" : p_layr[:,::-1], + "ta" : t_layr[:,::-1], + "qv" : qv_layr[:,::-1], + "ua" : u_layr[:,::-1], + "va" : v_layr[:,::-1], + "vars2d":vars2d} + else: + comp_data = {} -def write_SCM_case_file(state, surface, oro, forcing, case, date): - """Write all data to a netCDF file that the SCM can read""" - #expects the data to write, the name of the generated file, and the date corresponding to the ICs - - real_type = np.float64 - int_type = np.int32 - - nlevs = state["nlevs"] - nsoil = len(surface["stc"]) - nsnow = len(surface["snicexy"]) - nice = len(surface["tiice"]) - - nc_file = Dataset(os.path.join(PROCESSED_CASE_DIR, case + '.nc'), 'w', format='NETCDF4') - nc_file.description = "FV3GFS model profile input (no forcing)" - nc_file.missing_value = missing_value - - #create groups for scalars, intitialization, and forcing + return (forcing, comp_data, stateREGRID) - scalar_grp = nc_file.createGroup("scalars") - initial_grp = nc_file.createGroup("initial") - forcing_grp = nc_file.createGroup("forcing") - - #create dimensions and write them out +######################################################################################## +# +######################################################################################## +def write_SCM_case_file(state, surface, oro, forcing, case, date, stateREGRID): + """Write all data to a netCDF file in the DEPHY-SCM format""" - time_dim = nc_file.createDimension('time', None) - time_var = nc_file.createVariable('time', real_type, ('time',)) - time_var[:] = forcing["time"] - time_var.units = 's' - time_var.description = 'elapsed time since the beginning of the simulation' + # Working types + wp = np.float64 + wi = np.int32 - levels_dim = nc_file.createDimension('levels', None) - levels_var = nc_file.createVariable('levels', real_type, ('levels',)) - levels_var[:] = state["pres"] - levels_var.units = 'Pa' - levels_var.description = 'pressure levels' - - soil_dim = nc_file.createDimension('nsoil',None) - soil_depth_var = nc_file.createVariable('soil_depth', real_type, ('nsoil',)) - soil_depth_var[:] = [0.1,0.4,1.0,2.0] - soil_depth_var.units = 'm' - soil_depth_var.description = 'depth of bottom of soil layers' - - snow_dim = nc_file.createDimension('nsnow',None) - soil_plus_snow_dim = nc_file.createDimension('nsoil_plus_nsnow',None) - ice_dim = nc_file.createDimension('nice',None) - - #initial group + # Local switches + forcing_on = 1 + forcing_off = 0 + + # Output file + com = 'mkdir -p ' + PROCESSED_CASE_DIR + print(com) + os.system(com) + fileOUT = os.path.join(PROCESSED_CASE_DIR, case + '_SCM_driver.nc') + + nc_file = Dataset(fileOUT, 'w', format='NETCDF3_CLASSIC') + nc_file.description = "FV3GFS model profile input (UFS forcings)" + + nc_file.missing_value = missing_value + + start_date = datetime(date["year"],date["month"],date["day"],date["hour"],date["minute"],date["second"]) + + # + # Create surface type string (Saved as GLOBAL attribute) + # + if surface["slmsk"] > 1.5: + surface_string = 'ice' + elif surface["slmsk"] > 0.5: + surface_string = 'land' + else: + surface_string = 'ocean' + + # + # Global file attributes. + # + runtime = timedelta(seconds=forcing['time'][-1]) + end_date = start_date + runtime + end_date_string = end_date.strftime("%Y-%m-%d %H:%M:%S") + start_date_string = start_date.strftime("%Y-%m-%d %H:%M:%S") + # + loc_string = str(round(surface["lon"],2)) + "E" + str(round(surface["lat"],2)) + "N" + case_string = 'UFS_' + start_date_string + '_' + loc_string + # + nc_file.case = case_string + nc_file.title = 'Forcing and Initial Conditions for ' + case_string + nc_file.reference = 'https://dtcenter.org/sites/default/files/paragraph/scm-ccpp-guide-v6-0-0.pdf' + nc_file.author = 'Grant J. Firl and Dustin Swales' + nc_file.version = 'Created on ' + datetime.today().strftime('%Y-%m-%d %H:%M:%S') + nc_file.format_version = 'DEPHY SCM format version 1' + nc_file.modifications = '' + nc_file.script = os.path.basename(__file__) + nc_file.comment = '' + nc_file.start_date = start_date_string + nc_file.end_date = end_date_string + nc_file.forcing_scale = -1 + nc_file.radiation = "off" + nc_file.adv_ta = forcing_off + nc_file.adv_qv = forcing_off + nc_file.adv_ua = forcing_off + nc_file.adv_va = forcing_off + nc_file.adv_theta = forcing_off + nc_file.adv_thetal = forcing_off + nc_file.adv_qt = forcing_off + nc_file.adv_rv = forcing_off + nc_file.adv_rt = forcing_off + nc_file.forc_wa = forcing_off + nc_file.forc_wap = forcing_off + nc_file.forc_geo = forcing_off + nc_file.nudging_ua = forcing_off + nc_file.nudging_va = forcing_off + nc_file.nudging_ta = forcing_off + nc_file.nudging_theta = forcing_off + nc_file.nudging_thetal = forcing_off + nc_file.nudging_qv = forcing_off + nc_file.nudging_qt = forcing_off + nc_file.nudging_rv = forcing_off + nc_file.nudging_rt = forcing_off + nc_file.zh_nudging_ta = forcing_off + nc_file.zh_nudging_theta = forcing_off + nc_file.zh_nudging_thetal = forcing_off + nc_file.zh_nudging_qv = forcing_off + nc_file.zh_nudging_qt = forcing_off + nc_file.zh_nudging_rv = forcing_off + nc_file.zh_nudging_rt = forcing_off + nc_file.zh_nudging_ua = forcing_off + nc_file.zh_nudging_va = forcing_off + nc_file.pa_nudging_ta = forcing_off + nc_file.pa_nudging_theta = forcing_off + nc_file.pa_nudging_thetal = forcing_off + nc_file.pa_nudging_qv = forcing_off + nc_file.pa_nudging_qt = forcing_off + nc_file.pa_nudging_rv = forcing_off + nc_file.pa_nudging_rt = forcing_off + nc_file.pa_nudging_ua = forcing_off + nc_file.pa_nudging_va = forcing_off + # + nc_file.surface_type = surface_string + # + nc_file.adv_ta = forcing_on + nc_file.adv_qv = forcing_on + nc_file.adv_ua = forcing_on + nc_file.adv_va = forcing_on + # + nc_file.surface_forcing_temp = 'none' + nc_file.surface_forcing_moisture = 'none' + nc_file.surface_forcing_wind = 'none' + nc_file.surface_forcing_lsm = 'none' #'noah' #'noahmp' #'ruc' + nc_file.surface_forcing_lsm = 'lsm' + # Set file dimension + time_dim = nc_file.createDimension('time', len(forcing['time'])) + timei_dim = nc_file.createDimension('t0', 1) + lev_dim = nc_file.createDimension('lev', state["nlevs"]) + soil_dim = nc_file.createDimension('nsoil', len(surface["stc"])) + snow_dim = nc_file.createDimension('nsnow', len(surface["snicexy"])) + nslsnw_dim = nc_file.createDimension('nsoil_plus_nsnow',len(surface["snicexy"]) + len(surface["stc"])) + ice_dim = nc_file.createDimension('nice', len(surface["tiice"])) + + # + timei_var = nc_file.createVariable('t0', wp, ('t0')) + timei_var.units = 'seconds since ' + start_date_string + timei_var.standard_name = 'Initial time' + timei_var.calendar = 'gregorian' + timei_var[:] = 0.0 + # + timef_var = nc_file.createVariable('time', wp, ('time')) + timef_var.units = 'seconds since ' + start_date_string + timef_var.standard_name = 'Forcing time' + timef_var.calendar = 'gregorian' + timef_var[:] = forcing['time'] + # + lev_var = nc_file.createVariable('lev', wp, ('lev')) + lev_var.units = 'm' + lev_var.standard_name = 'height' + lev_var[:] = 0.0 + + # + lon_var = nc_file.createVariable('lon', wp, ('time')) + lon_var.units = 'degrees_east' + lon_var.standard_name = 'longitude' + lon_var[:] = surface["lon"] + + # + lat_var = nc_file.createVariable('lat', wp, ('time')) + lat_var.units = 'degrees_north' + lat_var.standard_name = 'latitude' + lat_var[:] = surface["lat"] + + # + soil_depth_var = nc_file.createVariable('soil_depth', wp, ('nsoil')) + soil_depth_var.units = 'm' + soil_depth_var.standard_name = 'depth of bottom of soil layers' + soil_depth_var[:] = [0.1,0.4,1.0,2.0] + # + theta_oro = nc_file.createVariable('theta_oro',wp, ('t0')) + theta_oro.units = "deg" + theta_oro.standard_name = "angle with respect to east of maximum subgrid orographic variations" + theta_oro[:] = oro["theta"] + # + z0_var = nc_file.createVariable('zorl', wp, ('time')) + z0_var.units = "cm" + z0_var.standard_name = 'surface_roughness_length_for_momentum_in_air' + z0_var[:] = surface["z0"] + # + zorlw_var = nc_file.createVariable('zorlw', wp, ('t0')) + zorlw_var.units = "cm" + zorlw_var.standard_name = "surface roughness length over ocean" + zorlw_var[:] = surface["z0"] + # + zorll_var = nc_file.createVariable('zorll', wp, ('t0')) + zorll_var.units = "cm" + zorll_var.standard_name = "surface roughness length over land" + zorll_var[:] = surface["zorll"] + # + zorli_var = nc_file.createVariable('zorli', wp, ('t0')) + zorli_var.units = "cm" + zorli_var.standard_name = "surface roughness length over ice" + zorli_var[:] = surface["zorli"] + # + zorlwav_var = nc_file.createVariable('zorlwav', wp, ('time')) + zorlwav_var.units = "cm" + zorlwav_var.standard_name = 'surface_roughness_length_from_wave_model' + zorlwav_var[:] = surface["zorlw"] + + # + # Variables to be output to SCM input file. Only fields that come directly from forcing, + # surface, state, and oro. Fields that get renamed are done above. + # + dict = {} + dict.update(date) + dict.update(surface) + dict.update(state) + dict.update(oro) + dict.update(forcing) + + ######################################################################################## + # + # Dictonary format: + # {"name": "", "type", "dimd": (), "units": "", "desc": ""} + # + ######################################################################################## + var_dict = [{"name": "orog", "type":wp, "dimd": ('t0' ), "units": "m", "desc": "surface_altitude"},\ + {"name": "zh", "type":wp, "dimd": ('t0', 'lev'), "units": "m", "desc": "height"},\ + {"name": "pa", "type":wp, "dimd": ('t0', 'lev'), "units": "Pa", "desc": "air_ressure"}, \ + {"name": "ta", "type":wp, "dimd": ('t0', 'lev'), "units": "K", "desc": "air_temperature","default_value": stateREGRID["t_lay"][1,:], "override": True}, \ + {"name": "theta", "type":wp, "dimd": ('t0', 'lev'), "units": "K", "desc": "air_potential_temperature"}, \ + {"name": "thetal", "type":wp, "dimd": ('t0', 'lev'), "units": "K", "desc": "air_liquid_potential_temperature"}, \ + {"name": "rv", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "humidity_mixing_ratio"}, \ + {"name": "rl", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "cloud_liquid_water_mixing_ratio"}, \ + {"name": "ri", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "cloud_ice_water_mixing_ratio"}, \ + {"name": "rt", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "water_mixing_ratio"}, \ + {"name": "qv", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "specific_humidity","default_value": stateREGRID["qv_lay"][1,:], "override": True}, \ + {"name": "ql", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "mass_fraction_of_cloud_liquid_water_in_air"}, \ + {"name": "qi", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "mass_fraction_of_cloud_ice_water_in_air", "default_value": 0.0}, \ + {"name": "qt", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "mass_fraction_of_water_in_air"}, \ + {"name": "hur", "type":wp, "dimd": ('t0', 'lev'), "units": "%", "desc": "relative_humidity"}, \ + {"name": "tke", "type":wp, "dimd": ('t0', 'lev'), "units": "m2 s-2", "desc": "specific_turbulen_kinetic_energy", "default_value": 0.0}, \ + {"name": "ua", "type":wp, "dimd": ('t0', 'lev'), "units": "m s-1", "desc": "eastward_wind", "default_value": stateREGRID["u_lay"][1,:], "override": True}, \ + {"name": "va", "type":wp, "dimd": ('t0', 'lev'), "units": "m s-1", "desc": "northward_wind", "default_value": stateREGRID["v_lay"][1,:], "override": True}, \ + {"name": "ts", "type":wp, "dimd": ('t0' ), "units": "K", "desc": "surface_temperature"},\ + {"name": "tskin", "type":wp, "dimd": ('t0' ), "units": "K", "desc": "surface_skin_pressure"}, \ + {"name": "ps", "type":wp, "dimd": ('t0' ), "units": "Pa", "desc": "surface_air_pressure"}, \ + {"name": "beta", "type":wp, "dimd": ('t0' ), "units": "m", "desc": "soil_water_stress_factor"}, \ + {"name": "mrsos", "type":wp, "dimd": ('t0' ), "units": "kg m-2", "desc": "mass_content_of_water_in_soil_layer"}, \ + {"name": "o3", "type":wp, "dimd": ('t0', 'lev'), "units": "kg kg-1", "desc": "mole_fraction_of_ozone_in_air"}, \ + {"name": "sza", "type":wp, "dimd": ('t0' ), "units": "degree", "desc": "solar_zenith_angle"}, \ + {"name": "io", "type":wp, "dimd": ('t0' ), "units": "W m-2", "desc": "solar_irradiance"}, \ + {"name": "alb", "type":wp, "dimd": ('t0' ), "units": "1", "desc": "surface_albedo"}, \ + {"name": "emis", "type":wp, "dimd": ('t0' ), "units": "1", "desc": "surface_longwave_emissivity"}, \ + {"name": "slmsk", "type":wp, "dimd": ('t0' ), "units": "none", "desc": "land_sea_ice_mask"}] + # + var_frc = [{"name": "zh_forc", "type":wp, "dimd": ('time', 'lev'), "units": "m", "desc": "height_forcing","default_value": 1.},\ + {"name": "pa_forc", "type":wp, "dimd": ('time', 'lev'), "units": "Pa", "desc": "air_pressure_forcing"}, \ + {"name": "wa", "type":wp, "dimd": ('time', 'lev'), "units": "m s-1", "desc": "upward_air_velocity"}, \ + {"name": "wap", "type":wp, "dimd": ('time', 'lev'), "units": "Pa s-1", "desc": "lagrangian_tendency_of_air_pressure"}, \ + {"name": "ug", "type":wp, "dimd": ('time', 'lev'), "units": "m s-1", "desc": "geostrophic_eastward_wind"}, \ + {"name": "vg", "type":wp, "dimd": ('time', 'lev'), "units": "m s-1", "desc": "geostrophic_northward_wind"}, \ + {"name": "tnua_adv", "type":wp, "dimd": ('time', 'lev'), "units": "m s-2", "desc": "tendency_of_eastward_wind_due_to_advection"},\ + {"name": "tnva_adv", "type":wp, "dimd": ('time', 'lev'), "units": "m s-2", "desc": "tendency_of_northward_wind_due_to_advection"}, \ + {"name": "tnta_adv", "type":wp, "dimd": ('time', 'lev'), "units": "K s-1", "desc": "tendency_of_air_temperature_due_to_advection"}, \ + {"name": "tntheta_adv", "type":wp, "dimd": ('time', 'lev'), "units": "K s-1", "desc": "tendency_of_air_potential_temperature_due_to_advection"}, \ + {"name": "tnthetal_adv", "type":wp, "dimd": ('time', 'lev'), "units": "K s-1", "desc": "tendency_of_air_liquid_potential_temperature_due_to_advection"}, \ + {"name": "tnqv_adv", "type":wp, "dimd": ('time', 'lev'), "units": "kg kg-1 s-1", "desc": "tendency_of_specific_humidity_due_to_advection"},\ + {"name": "tnqt_adv", "type":wp, "dimd": ('time', 'lev'), "units": "kg kg-1 s-1", "desc": "tendency_of_mass_fraction_of_water_in_air_due_to_advection"},\ + {"name": "tnrv_adv", "type":wp, "dimd": ('time', 'lev'), "units": "kg kg-1 s-1", "desc": "tendency_of_humidity_mixing_ratio_due_to_advection"},\ + {"name": "tnrt_adv", "type":wp, "dimd": ('time', 'lev'), "units": "kg kg-1 s-1", "desc": "tendency_of_water_mixing_ratio_due_to_advection"},\ + {"name": "tnta_rad", "type":wp, "dimd": ('time', 'lev'), "units": "K s-1", "desc": "tendency_of_air_temperature_due_to_radiative_heating"}, \ + {"name": "tntheta_rad", "type":wp, "dimd": ('time', 'lev'), "units": "K s-1", "desc": "tendency_of_potential_air_temperature_due_to_radiative_heating"}, \ + {"name": "tnthetal_rad", "type":wp, "dimd": ('time', 'lev'), "units": "K s-1", "desc": "tendency_of_air_liquid_potential_temperature_due_to_radiative_heating"}, \ + {"name": "ta_nud", "type":wp, "dimd": ('time', 'lev'), "units": "K", "desc": "nudging_air_temperature"}, \ + {"name": "theta_nud", "type":wp, "dimd": ('time', 'lev'), "units": "K", "desc": "nudging_air_potential_temperature"}, \ + {"name": "thetal_nud", "type":wp, "dimd": ('time', 'lev'), "units": "K", "desc": "nudging_air_liquid_potential_temperature"}, \ + {"name": "qt_nud", "type":wp, "dimd": ('time', 'lev'), "units": "kg kg-1", "desc": "nudging_mass_fraction_of_water_in_air"}, \ + {"name": "rv_nud", "type":wp, "dimd": ('time', 'lev'), "units": "m s-1", "desc": "nudging_humidity_mixing_ratio"}, \ + {"name": "rt_nud", "type":wp, "dimd": ('time', 'lev'), "units": "m s-1", "desc": "nudging_water_mixing_ratio"}, \ + {"name": "ua_nud", "type":wp, "dimd": ('time', 'lev'), "units": "m s-1", "desc": "nudging_eastward_wind"}, \ + {"name": "va_nud", "type":wp, "dimd": ('time', 'lev'), "units": "m s-1", "desc": "nudging_northward_wind"}, \ + {"name": "hfss", "type":wp, "dimd": ('time' ), "units": "W m-2", "desc": "surface_upward_sensible_heat_flux"}, \ + {"name": "hfls", "type":wp, "dimd": ('time' ), "units": "W m-2", "desc": "surface_upward_latent_heat_flux"}, \ + {"name": "wpthetap_s", "type":wp, "dimd": ('time' ), "units": "K m s-1", "desc": "surface_upward_potential_temperature_flux"}, \ + {"name": "wpqvp_s", "type":wp, "dimd": ('time' ), "units": "m s-1", "desc": "surface_upward_specific_humidity_flux"}, \ + {"name": "wpqtp_s", "type":wp, "dimd": ('time' ), "units": "m s-1", "desc": "surface_upward_water_mass_fraction_flux"}, \ + {"name": "wprvp_s", "type":wp, "dimd": ('time' ), "units": "m s-1", "desc": "surface_upward_humidity_mixing_ratio_flux"}, \ + {"name": "wprtp_s", "type":wp, "dimd": ('time' ), "units": "m s-1", "desc": "surface_upward_water_mixing_ratio_flux"}, \ + {"name": "ts_forc", "type":wp, "dimd": ('time' ), "units": "K", "desc": "forcing_surface_temperature"},\ + {"name": "ps_forc", "type":wp, "dimd": ('time' ), "units": "Pa", "desc": "forcing_surface_air_pressure"},\ + {"name": "uustar", "type":wp, "dimd": ('time' ), "units": "m s-1", "desc": "surface_friction_velocity"}, \ + {"name": "z0h", "type":wp, "dimd": ('time' ), "units": "m", "desc": "surface_roughness_length_for_heat_in_air"}, \ + {"name": "z0q", "type":wp, "dimd": ('time' ), "units": "m", "desc": "surface_roughness_length_for_humidity_in_air"}, \ + {"name": "mrsos_forc", "type":wp, "dimd": ('time' ), "units": "kg m-2", "desc": "forcing_mass_content_of_water_in_soil_layer"}] + + # + var_oro = [{"name": "area", "type":wp, "dimd": ('t0'), "units": "m 2-1", "desc": "grid_cell_area"},\ + {"name": "stddev", "type":wp, "dimd": ('t0'), "units": "m", "desc": "standard deviation of subgrid orography"}, \ + {"name": "convexity", "type":wp, "dimd": ('t0'), "units": "none", "desc": "convexity of subgrid orography"}, \ + {"name": "oa1", "type":wp, "dimd": ('t0'), "units": "none", "desc": "assymetry of subgrid orography 1"}, \ + {"name": "oa2", "type":wp, "dimd": ('t0'), "units": "none", "desc": "assymetry of subgrid orography 2"}, \ + {"name": "oa3", "type":wp, "dimd": ('t0'), "units": "none", "desc": "assymetry of subgrid orography 3"}, \ + {"name": "oa4", "type":wp, "dimd": ('t0'), "units": "none", "desc": "assymetry of subgrid orography 4"}, \ + {"name": "ol1", "type":wp, "dimd": ('t0'), "units": "none", "desc": "fraction of grid box with subgrid orography higher than critical height 1"}, \ + {"name": "ol2", "type":wp, "dimd": ('t0'), "units": "none", "desc": "fraction of grid box with subgrid orography higher than critical height 2"}, \ + {"name": "ol3", "type":wp, "dimd": ('t0'), "units": "none", "desc": "fraction of grid box with subgrid orography higher than critical height 3"}, \ + {"name": "ol4", "type":wp, "dimd": ('t0'), "units": "none", "desc": "fraction of grid box with subgrid orography higher than critical height 4"}, \ + {"name": "sigma", "type":wp, "dimd": ('t0'), "units": "none", "desc": "slope of subgrid orography"}, \ + {"name": "gamma", "type":wp, "dimd": ('t0'), "units": "none", "desc": "anisotropy of subgrid orography"}, \ + {"name": "elvmax", "type":wp, "dimd": ('t0'), "units": "m", "desc": "maximum of subgrid orography"}, \ + {"name": "oro", "type":wp, "dimd": ('t0'), "units": "m", "desc": "orography"}, \ + {"name": "oro_uf", "type":wp, "dimd": ('t0'), "units": "m", "desc": "unfiltered orography"}, \ + {"name": "landfrac", "type":wp, "dimd": ('t0'), "units": "none", "desc": "fraction of horizontal grid area occupied by land"}, \ + {"name": "lakefrac", "type":wp, "dimd": ('t0'), "units": "none", "desc": "fraction of horizontal grid area occupied by lake", "default_value":0}, \ + {"name": "lakedepth", "type":wp, "dimd": ('t0'), "units": "none", "desc": "lake depth", "default_value":0}] + # + var_nsst = [{"name": "tref", "type":wp, "dimd": ('t0'), "units": "K", "desc": "sea surface reference temperature for NSST"}, \ + {"name": "z_c", "type":wp, "dimd": ('t0'), "units": "m", "desc": "sub-layer cooling thickness for NSST"}, \ + {"name": "c_0", "type":wp, "dimd": ('t0'), "units": "none", "desc": "coefficient 1 to calculate d(Tz)/d(Ts) for NSST"}, \ + {"name": "c_d", "type":wp, "dimd": ('t0'), "units": "nonw", "desc": "coefficient 2 to calculate d(Tz)/d(Ts) for NSST"}, \ + {"name": "w_0", "type":wp, "dimd": ('t0'), "units": "none", "desc": "coefficient 3 to calculate d(Tz)/d(Ts) for NSST"}, \ + {"name": "w_d", "type":wp, "dimd": ('t0'), "units": "none", "desc": "coefficient 4 to calculate d(Tz)/d(Ts) for NSST"}, \ + {"name": "xt", "type":wp, "dimd": ('t0'), "units": "K m", "desc": "heat content in diurnal thermocline layer for NSST"}, \ + {"name": "xs", "type":wp, "dimd": ('t0'), "units": "ppt m", "desc": "salinity content in diurnal thermocline layer for NSST"}, \ + {"name": "xu", "type":wp, "dimd": ('t0'), "units": "m2 s-1", "desc": "u-current in diurnal thermocline layer for NSST"}, \ + {"name": "xv", "type":wp, "dimd": ('t0'), "units": "m2 s-1", "desc": "v-current in diurnal thermocline layer for NSST"}, \ + {"name": "xz", "type":wp, "dimd": ('t0'), "units": "m", "desc": "thickness of diurnal thermocline layer for NSST"}, \ + {"name": "zm" , "type":wp, "dimd": ('t0'), "units": "m", "desc": "thickness of ocean mixed layer for NSST"}, \ + {"name": "xtts", "type":wp, "dimd": ('t0'), "units": "m", "desc": "sensitivity of diurnal thermocline layer heat content to surface temperature [d(xt)/d(ts)] for NSST"},\ + {"name": "xzts", "type":wp, "dimd": ('t0'), "units": "m K-1", "desc": "sensitivity of diurnal thermocline layer thickness to surface temperature [d(xz)/d(ts)] for NSST"}, \ + {"name": "d_conv", "type":wp, "dimd": ('t0'), "units": "m", "desc": "thickness of free convection layer for NSST"}, \ + {"name": "ifd", "type":wp, "dimd": ('t0'), "units": "none", "desc": "index to start DTM run for NSST"}, \ + {"name": "dt_cool", "type":wp, "dimd": ('t0'), "units": "K", "desc": "sub-layer cooling amount for NSST"}, \ + {"name": "qrain", "type":wp, "dimd": ('t0'), "units": "W m-2", "desc": "sensible heat due to rainfall for NSST"}] + # + var_frgd = [{"name": "tiice", "type":wp, "dimd": ('t0','nice'), "units": "K", "desc": "sea ice internal temperature"}] + # + var_noah = [{"name": "vegsrc", "type":wi, "dimd": ('t0'), "units": "none", "desc": "vegetation source (1-2)", "default_value": 1}, \ + {"name": "tsfco", "type":wp, "dimd": ('t0'), "units": "K", "desc": "sea/skin/ice surface temperature"}, \ + {"name": "weasd", "type":wp, "dimd": ('t0'), "units": "mm", "desc": "water equivalent accumulated snow depth"}, \ + {"name": "tg3", "type":wp, "dimd": ('t0'), "units": "K", "desc": "deep soil temperature"}, \ + {"name": "alvsf", "type":wp, "dimd": ('t0'), "units": "none", "desc": "60 degree vis albedo with strong cosz dependency"}, \ + {"name": "alnsf", "type":wp, "dimd": ('t0'), "units": "none", "desc": "60 degree nir albedo with strong cosz dependency"}, \ + {"name": "alvwf", "type":wp, "dimd": ('t0'), "units": "none", "desc": "60 degree vis albedo with weak cosz dependency"}, \ + {"name": "alnwf", "type":wp, "dimd": ('t0'), "units": "none", "desc": "60 degree nir albedo with weak cosz dependency"}, \ + {"name": "facsf", "type":wp, "dimd": ('t0'), "units": "none", "desc": "fractional coverage with strong cosz dependency"}, \ + {"name": "facwf", "type":wp, "dimd": ('t0'), "units": "none", "desc": "fractional coverage with weak cosz dependency"}, \ + {"name": "vegfrac", "type":wp, "dimd": ('t0'), "units": "none", "desc": "vegetation fraction"}, \ + {"name": "canopy", "type":wp, "dimd": ('t0'), "units": "kg m-2", "desc": "amount of water stored in canopy"}, \ + {"name": "f10m", "type":wp, "dimd": ('t0'), "units": "none", "desc": "ratio of sigma level 1 wind and 10m wind"}, \ + {"name": "t2m", "type":wp, "dimd": ('t0'), "units": "K", "desc": "2-meter absolute temperature"}, \ + {"name": "q2m", "type":wp, "dimd": ('t0'), "units": "kg kg-1", "desc": "2-meter specific humidity"}, \ + {"name": "vegtyp", "type":wi, "dimd": ('t0'), "units": "none", "desc": "vegetation type (1-12)"}, \ + {"name": "soiltyp", "type":wi, "dimd": ('t0'), "units": "none", "desc": "soil type (1-12)"}, \ + {"name": "ffmm", "type":wp, "dimd": ('t0'), "units": "none", "desc": "Monin-Obukhov similarity function for momentum"}, \ + {"name": "ffhh", "type":wp, "dimd": ('t0'), "units": "none", "desc": "Monin-Obukhov similarity function for heat"}, \ + {"name": "hice", "type":wp, "dimd": ('t0'), "units": "m", "desc": "sea ice thickness"}, \ + {"name": "fice", "type":wp, "dimd": ('t0'), "units": "none", "desc": "ice fraction"}, \ + {"name": "tisfc", "type":wp, "dimd": ('t0'), "units": "K", "desc": "ice surface temperature"}, \ + {"name": "tprcp", "type":wp, "dimd": ('t0'), "units": "m", "desc": "instantaneous total precipitation amount"}, \ + {"name": "srflag", "type":wp, "dimd": ('t0'), "units": "none", "desc": "snow/rain flag for precipitation"}, \ + {"name": "snowd", "type":wp, "dimd": ('t0'), "units": "mm", "desc": "water equivalent snow depth"}, \ + {"name": "shdmin", "type":wp, "dimd": ('t0'), "units": "none", "desc": "minimum vegetation fraction"}, \ + {"name": "shdmax", "type":wp, "dimd": ('t0'), "units": "none", "desc": "maximum vegetation fraction"}, \ + {"name": "slopetyp", "type":wi, "dimd": ('t0'), "units": "none", "desc": "slope type (1-9)"}, \ + {"name": "snoalb", "type":wp, "dimd": ('t0'), "units": "none", "desc": "maximum snow albedo"}, \ + {"name": "sncovr", "type":wp, "dimd": ('t0'), "units": "none", "desc": "surface snow area fraction"}, \ + {"name": "tsfcl", "type":wp, "dimd": ('t0'), "units": "K", "desc": "surface skin temperature over land"}, \ + {"name": "stc", "type":wp, "dimd": ('t0','nsoil'), "units": "K", "desc": "initial profile of soil liquid moisture"}, \ + {"name": "smc", "type":wp, "dimd": ('t0','nsoil'), "units": "kg", "desc": "initial profile of soil moisture"}, \ + {"name": "slc", "type":wp, "dimd": ('t0','nsoil'), "units": "kg", "desc": "initial profile of soil temperature"}] + # + var_noahmp=[{"name": "tvxy", "type":wp, "dimd": ('t0'), "units": "K", "desc": "vegetation temperature for NoahMP"}, \ + {"name": "tgxy", "type":wp, "dimd": ('t0'), "units": "K", "desc": "ground temperature for NoahMP"}, \ + {"name": "tahxy", "type":wp, "dimd": ('t0'), "units": "K", "desc": "canopy air temperature for NoahMP"}, \ + {"name": "canicexy", "type":wp, "dimd": ('t0'), "units": "mm", "desc": "canopy intercepted ice mass for NoahMP"}, \ + {"name": "canliqxy", "type":wp, "dimd": ('t0'), "units": "mm", "desc": "canopy intercepted liquid water for NoahMP"}, \ + {"name": "eahxy", "type":wp, "dimd": ('t0'), "units": "Pa", "desc": "canopy air vapor pressure for NoahMP"}, \ + {"name": "cmxy", "type":wp, "dimd": ('t0'), "units": "none", "desc": "surface drag coefficient for momentum for NoahMP"}, \ + {"name": "chxy", "type":wp, "dimd": ('t0'), "units": "none", "desc": "surface exchange coeff heat & moisture for NoahMP"}, \ + {"name": "fwetxy", "type":wp, "dimd": ('t0'), "units": "none", "desc": "area fraction of canopy that is wetted/snowed for NoahMP"}, \ + {"name": "sneqvoxy", "type":wp, "dimd": ('t0'), "units": "mm", "desc": "snow mass at previous time step for NoahMP"}, \ + {"name": "alboldxy", "type":wp, "dimd": ('t0'), "units": "none", "desc": "snow albedo at previous time step for NoahMP"}, \ + {"name": "qsnowxy", "type":wp, "dimd": ('t0'), "units": "mm s-1", "desc": "snow precipitation rate at surface for NoahMP"}, \ + {"name": "wslakexy", "type":wp, "dimd": ('t0'), "units": "mm", "desc": "lake water storage for NoahMP"}, \ + {"name": "taussxy", "type":wp, "dimd": ('t0'), "units": "none", "desc": "non-dimensional snow age for NoahMP"}, \ + {"name": "waxy", "type":wp, "dimd": ('t0'), "units": "mm", "desc": "water storage in aquifer for NoahMP"}, \ + {"name": "wtxy", "type":wp, "dimd": ('t0'), "units": "mm", "desc": "water storage in aquifer and saturated soil for NoahMP"}, \ + {"name": "zwtxy", "type":wp, "dimd": ('t0'), "units": "m", "desc": "water table depth for NoahMP"}, \ + {"name": "xlaixy", "type":wp, "dimd": ('t0'), "units": "none", "desc": "leaf area index for NoahMP"}, \ + {"name": "xsaixy", "type":wp, "dimd": ('t0'), "units": "none", "desc": "stem area index for NoahMP"}, \ + {"name": "lfmassxy", "type":wp, "dimd": ('t0'), "units": "g m-2", "desc": "leaf mass for NoahMP"}, \ + {"name": "stmassxy", "type":wp, "dimd": ('t0'), "units": "g m-2", "desc": "stem mass for NoahMP"}, \ + {"name": "rtmassxy", "type":wp, "dimd": ('t0'), "units": "g m-2", "desc": "fine root mass for NoahMP"}, \ + {"name": "woodxy", "type":wp, "dimd": ('t0'), "units": "g m-2", "desc": "wood mass including woody roots for NoahMP"}, \ + {"name": "stblcpxy", "type":wp, "dimd": ('t0'), "units": "g m-2", "desc": "stable carbon in deep soil for NoahMP"}, \ + {"name": "fastcpxy", "type":wp, "dimd": ('t0'), "units": "g m-2", "desc": "short-lived carbon in shallow soil for NoahMP"}, \ + {"name": "smcwtdxy", "type":wp, "dimd": ('t0'), "units": "m3 m-3", "desc": "soil water content between the bottom of the soil and the water table for NoahMP"}, \ + {"name": "deeprechxy", "type":wp, "dimd": ('t0'), "units": "m", "desc": "recharge to or from the water table when deep for NoahMP"}, \ + {"name": "rechxy", "type":wp, "dimd": ('t0'), "units": "m", "desc": "recharge to or from the water table when shallow for NoahMP"}, \ + {"name": "snowxy", "type":wp, "dimd": ('t0'), "units": "none", "desc": "number of snow layers for NoahMP"}, \ + {"name": "snicexy", "type":wp, "dimd": ('t0','nsnow'), "units": "mm", "desc": "initial profile of snow layer ice"}, \ + {"name": "snliqxy", "type":wp, "dimd": ('t0','nsnow'), "units": "mm", "desc": "initial profile of snow layer liquid"}, \ + {"name": "tsnoxy", "type":wp, "dimd": ('t0','nsnow'), "units": "K", "desc": "initial profile of snow layer temperature"}, \ + {"name": "smoiseq", "type":wp, "dimd": ('t0','nsoil'), "units": "m3 m-3", "desc": "initial profile of equilibrium soil water content"}, \ + {"name": "zsnsoxy", "type":wp, "dimd": ('t0','nsoil_plus_nsnow'), "units": "m","desc": "layer bottom depth from snow surface"}] + # + var_ruc = [{"name": "wetness", "type":wp, "dimd": ('t0'), "units": "none", "desc": "normalized soil wetness for RUC LSM"}, \ + {"name": "lai", "type":wp, "dimd": ('t0'), "units": "none", "desc": "leaf area index for RUC LSM"}, \ + {"name": "clw_surf_land", "type":wp, "dimd": ('t0'), "units": "kg kg-1", "desc": "cloud condensed water mixing ratio at surface over land for RUC LSM"},\ + {"name": "clw_surf_ice", "type":wp, "dimd": ('t0'), "units": "kg kg-1", "desc": "cloud condensed water mixing ratio at surface over ice for RUC LSM"},\ + {"name": "qwv_surf_land", "type":wp, "dimd": ('t0'), "units": "kg kg-1", "desc": "water vapor mixing ratio at surface over land for RUC LSM"},\ + {"name": "qwv_surf_ice", "type":wp, "dimd": ('t0'), "units": "kg kg-1", "desc": "water vapor mixing ratio at surface over ice for RUC LSM"},\ + {"name": "tsnow_land", "type":wp, "dimd": ('t0'), "units": "K", "desc": "snow temperature at the bottom of the first snow layer over land for RUC LSM"},\ + {"name": "tsnow_ice", "type":wp, "dimd": ('t0'), "units": "K", "desc": "snow temperature at the bottom of the first snow layer over ice for RUC LSM"},\ + {"name": "snowfall_acc_land","type":wp, "dimd": ('t0'), "units": "kg m-2", "desc": "run-total snow accumulation on the ground over land for RUC LSM"},\ + {"name": "snowfall_acc_ice", "type":wp, "dimd": ('t0'), "units": "kg m-2", "desc": "run-total snow accumulation on the ground over ice for RUC LSM"},\ + {"name": "sfalb_lnd", "type":wp, "dimd": ('t0'), "units": "none", "desc": "surface albedo over land for RUC LSM"},\ + {"name": "sfalb_lnd_bck", "type":wp, "dimd": ('t0'), "units": "none", "desc": "surface snow-free albedo over land for RUC LSM"},\ + {"name": "sfalb_ice", "type":wp, "dimd": ('t0'), "units": "none", "desc": "surface albedo over ice for RUC LSM"},\ + {"name": "emis_lnd", "type":wp, "dimd": ('t0'), "units": "none", "desc": "surface emissivity over land for RUC LSM"},\ + {"name": "emis_ice", "type":wp, "dimd": ('t0'), "units": "none", "desc": "surface emissivity over ice for RUC LSM"}, \ + {"name": "tslb", "type":wp, "dimd": ('t0','nsoil'), "units": "K", "desc": "soil temperature for RUC LSM"}, \ + {"name": "smois", "type":wp, "dimd": ('t0','nsoil'), "units": "none", "desc": "volume fraction of soil moisture for RUC LSM"}, \ + {"name": "sh2o", "type":wp, "dimd": ('t0','nsoil'), "units": "none", "desc": "volume fraction of unfrozen soil moisture for RUC LSM"}, \ + {"name": "smfr", "type":wp, "dimd": ('t0','nsoil'), "units": "none", "desc": "volume fraction of frozen soil moisture for RUC LSM"}, + {"name": "flfr", "type":wp, "dimd": ('t0','nsoil'), "units": "none", "desc": "flag for frozen soil physics for RUC LSM"}] + + # + var_dict.extend(var_frc) + var_dict.extend(var_oro) + var_dict.extend(var_nsst) + var_dict.extend(var_frgd) + var_dict.extend(var_ruc) + var_dict.extend(var_noah) + var_dict.extend(var_noahmp) + + # + for var in var_dict: + if (var["name"] in dict): + var_temp = nc_file.createVariable(var["name"], var["type"], var["dimd"]) + var_temp.units = var["units"] + var_temp.standard_name = var["desc"] + var_temp[:] = dict[var["name"]] + elif "default_value" in var: + var_temp = nc_file.createVariable(var["name"], var["type"], var["dimd"]) + var_temp.units = var["units"] + var_temp.standard_name = var["desc"] + var_temp[:] = var["default_value"] + if "override" in var: + var_temp[:] = var["default_value"] + # + # Close file + # + nc_file.close() + + return(fileOUT) + +######################################################################################## +def write_comparison_file(comp_data, case_name, date, surface): + """Write UFS history file data to netCDF file for comparison""" + + wp = np.float64 + wi = np.int32 + + nlevs = comp_data["pa"].shape[1] + ntime = comp_data["pa"].shape[0] + + start_date = datetime(date["year"],date["month"],date["day"],date["hour"],date["minute"],date["second"]) + start_date_string = start_date.strftime("%Y%m%d%H%M%S") + + loc_string = str(round(surface["lon"],2)) + "E" + str(round(surface["lat"],2)) + "N" + case_string = 'UFS_' + start_date_string + '_' + loc_string + + com = 'mkdir -p ' + COMPARISON_DATA_DIR + print(com) + os.system(com) + nc_file = Dataset(os.path.join(COMPARISON_DATA_DIR, case_name + '_comp_data.nc'), 'w', format='NETCDF3_CLASSIC') + nc_file.case = case_string + nc_file.title = 'UFS history file data for ' + case_string + nc_file.reference = '' + nc_file.author = 'Grant J. Firl' + nc_file.version = 'Created on ' + datetime.today().strftime('%Y-%m-%d-%H:%M:%S') + nc_file.script = os.path.basename(__file__) + nc_file.startDate = start_date_string + + # Dimensions + lev_dim = nc_file.createDimension('lev', size=nlevs) + time_dim = nc_file.createDimension('time', size=ntime) + time_ufs_history_dim = nc_file.createDimension('time_ufs_history', size=ntime-1) + + # Varaibles + time_var = nc_file.createVariable('time', wp, ('time',)) + time_var.units = 'second' + time_var.long_name = 'history file time' + time_var[:] = comp_data['time'] + + time2_var = nc_file.createVariable('time_ufs_history', wp, ('time_ufs_history',)) + time2_var.units = 'second' + time2_var.long_name = 'UFS history file time' + time2_var[:] = comp_data['time'][1::] - temperature_var = initial_grp.createVariable('temp', real_type, ('levels',)) - temperature_var[:] = state["T"][0:nlevs] + lev_var = nc_file.createVariable('levs', wp, ('time','lev',)) + lev_var.units = 'Pa' + lev_var.long_name = 'pressure' + lev_var[:,:] = comp_data["pa"] + + temperature_var = nc_file.createVariable('temp', wp, ('time', 'lev',)) temperature_var.units = 'K' - temperature_var.description = 'initial profile of absolute temperature' - - qt_var = initial_grp.createVariable('qt', real_type, ('levels',)) - qt_var[:] = state["qv"][0:nlevs] - qt_var.units = 'kg kg^-1' - qt_var.description = 'initial profile of total water specific humidity' - - ql_var = initial_grp.createVariable('ql', real_type, ('levels',)) - ql_var[:] = state["ql"][0:nlevs] - ql_var.units = 'kg kg^-1' - ql_var.description = 'initial profile of liquid water specific humidity' - - qi_var = initial_grp.createVariable('qi', real_type, ('levels',)) - qi_var[:] = 0.0 - qi_var.units = 'kg kg^-1' - qi_var.description = 'initial profile of ice water specific humidity' - - u_var = initial_grp.createVariable('u', real_type, ('levels',)) - u_var[:] = state["u"][0:nlevs] - u_var.units = 'm s^-1' - u_var.description = 'initial profile of E-W horizontal wind' - - v_var = initial_grp.createVariable('v', real_type, ('levels',)) - v_var[:] = state["v"][0:nlevs] - v_var.units = 'm s^-1' - v_var.description = 'initial profile of N-S horizontal wind' - - tke_var = initial_grp.createVariable('tke', real_type, ('levels',)) - tke_var[:] = 0.0 - tke_var.units = 'm^2 s^-2' - tke_var.description = 'initial profile of turbulence kinetic energy' - - ozone_var = initial_grp.createVariable('ozone', real_type, ('levels',)) - ozone_var[:] = state["o3"][0:nlevs] - ozone_var.units = 'kg kg^-1' - ozone_var.description = 'initial profile of ozone mass mixing ratio' - - stc_var = initial_grp.createVariable('stc',real_type,('nsoil',)) - stc_var[:] = surface["stc"][0:nsoil] - stc_var.units = "K" - stc_var.description = "initial profile of soil temperature" - - smc_var = initial_grp.createVariable('smc',real_type,('nsoil',)) - smc_var[:] = surface["smc"][0:nsoil] - smc_var.units = "kg" - smc_var.description = "initial profile of soil moisture" - - slc_var = initial_grp.createVariable('slc',real_type,('nsoil',)) - slc_var[:] = surface["slc"][0:nsoil] - slc_var.units = "kg" - slc_var.description = "initial profile of soil liquid moisture" - - snicexy_var = initial_grp.createVariable('snicexy',real_type,('nsnow',)) - snicexy_var[:] = surface["snicexy"][0:nsnow] - snicexy_var.units = "mm" - snicexy_var.description = "initial profile of snow layer ice" - - snliqxy_var = initial_grp.createVariable('snliqxy',real_type,('nsnow',)) - snliqxy_var[:] = surface["snliqxy"][0:nsnow] - snliqxy_var.units = "mm" - snliqxy_var.description = "initial profile of snow layer liquid" - - tsnoxy_var = initial_grp.createVariable('tsnoxy',real_type,('nsnow',)) - tsnoxy_var[:] = surface["tsnoxy"][0:nsnow] - tsnoxy_var.units = "K" - tsnoxy_var.description = "initial profile of snow layer temperature" - - smoiseq_var = initial_grp.createVariable('smoiseq',real_type,('nsoil',)) - smoiseq_var[:] = surface["smoiseq"][0:nsoil] - smoiseq_var.units = "m3 m-3" - smoiseq_var.description = "initial profile of equilibrium soil water content" - - zsnsoxy_var = initial_grp.createVariable('zsnsoxy',real_type,('nsoil_plus_nsnow',)) - zsnsoxy_var[:] = surface["zsnsoxy"][0:nsoil + nsnow] - zsnsoxy_var.units = "m" - zsnsoxy_var.description = "layer bottom depth from snow surface" - - tiice_var = initial_grp.createVariable('tiice',real_type,('nice',)) - tiice_var[:] = surface["tiice"][0:nice] - tiice_var.units = "K" - tiice_var.description = "sea ice internal temperature" - - tslb_var = initial_grp.createVariable('tslb',real_type,('nsoil',)) - tslb_var[:] = surface["tslb"][0:nsoil] - tslb_var.units = "K" - tslb_var.description = "soil temperature for RUC LSM" - - smois_var = initial_grp.createVariable('smois',real_type,('nsoil',)) - smois_var[:] = surface["smois"][0:nsoil] - smois_var.units = "None" - smois_var.description = "volume fraction of soil moisture for RUC LSM" - - sh2o_var = initial_grp.createVariable('sh2o',real_type,('nsoil',)) - sh2o_var[:] = surface["sh2o"][0:nsoil] - sh2o_var.units = "None" - sh2o_var.description = "volume fraction of unfrozen soil moisture for RUC LSM" - - smfr_var = initial_grp.createVariable('smfr',real_type,('nsoil',)) - smfr_var[:] = surface["smfr"][0:nsoil] - smfr_var.units = "None" - smfr_var.description = "volume fraction of frozen soil moisture for RUC LSM" - - flfr_var = initial_grp.createVariable('flfr',real_type,('nsoil',)) - flfr_var[:] = surface["flfr"][0:nsoil] - flfr_var.units = "None" - flfr_var.description = "flag for frozen soil physics for RUC LSM" - - #forcing group - - p_surf_var = forcing_grp.createVariable('p_surf', real_type, ('time',)) - p_surf_var[:] = state["p_surf"] - p_surf_var.units = 'Pa' - p_surf_var.description = 'surface pressure' - - T_surf_var = forcing_grp.createVariable('T_surf', real_type, ('time',)) - T_surf_var[:] = missing_value - T_surf_var.units = 'K' - T_surf_var.description = 'surface absolute temperature forcing' - - w_ls_var = forcing_grp.createVariable('w_ls', real_type, ('levels','time',)) - w_ls_var[:] = forcing["w_ls"] - w_ls_var.units = 'm s^-1' - w_ls_var.description = 'large scale vertical velocity' - - omega_var = forcing_grp.createVariable('omega', real_type, ('levels','time',)) - omega_var[:] = forcing["omega"] - omega_var.units = 'Pa s^-1' - omega_var.description = 'large scale pressure vertical velocity' - - u_g_var = forcing_grp.createVariable('u_g', real_type, ('levels','time',)) - u_g_var[:] = forcing["u_g"] - u_g_var.units = 'm s^-1' - u_g_var.description = 'large scale geostrophic E-W wind' - - v_g_var = forcing_grp.createVariable('v_g', real_type, ('levels','time',)) - v_g_var[:] = forcing["v_g"] - v_g_var.units = 'm s^-1' - v_g_var.description = 'large scale geostrophic N-S wind' - - u_nudge_var = forcing_grp.createVariable('u_nudge', real_type, ('levels','time',)) - u_nudge_var[:] = forcing["u_nudge"] - u_nudge_var.units = 'm s^-1' - u_nudge_var.description = 'E-W wind to nudge toward' - - v_nudge_var = forcing_grp.createVariable('v_nudge', real_type, ('levels','time',)) - v_nudge_var[:] = forcing["v_nudge"] - v_nudge_var.units = 'm s^-1' - v_nudge_var.description = 'N-S wind to nudge toward' - - T_nudge_var = forcing_grp.createVariable('T_nudge', real_type, ('levels','time',)) - T_nudge_var[:] = forcing["T_nudge"] - T_nudge_var.units = 'K' - T_nudge_var.description = 'absolute temperature to nudge toward' - - thil_nudge_var = forcing_grp.createVariable('thil_nudge', real_type, ('levels','time',)) - thil_nudge_var[:] = forcing["thil_nudge"] - thil_nudge_var.units = 'K' - thil_nudge_var.description = 'potential temperature to nudge toward' - - qt_nudge_var = forcing_grp.createVariable('qt_nudge', real_type, ('levels','time',)) - qt_nudge_var[:] = forcing["qt_nudge"] - qt_nudge_var.units = 'kg kg^-1' - qt_nudge_var.description = 'q_t to nudge toward' - - rad_heating_var = forcing_grp.createVariable('dT_dt_rad', real_type, ('levels','time',)) - rad_heating_var[:] = forcing["rad_heating"] - rad_heating_var.units = 'K s^-1' - rad_heating_var.description = 'prescribed radiative heating rate' - - h_advec_thil_var = forcing_grp.createVariable('h_advec_thetail', real_type, ('levels','time',)) - h_advec_thil_var[:] = forcing["h_advec_thil"] - h_advec_thil_var.units = 'K s^-1' - h_advec_thil_var.description = 'prescribed theta_il tendency due to horizontal advection' - - v_advec_thil_var = forcing_grp.createVariable('v_advec_thetail', real_type, ('levels','time',)) - v_advec_thil_var[:] = forcing["v_advec_thil"] - v_advec_thil_var.units = 'K s^-1' - v_advec_thil_var.description = 'prescribed theta_il tendency due to vertical advection' - - h_advec_qt_var = forcing_grp.createVariable('h_advec_qt', real_type, ('levels','time',)) - h_advec_qt_var[:] = forcing["h_advec_qt"] - h_advec_qt_var.units = 'kg kg^-1 s^-1' - h_advec_qt_var.description = 'prescribed q_t tendency due to horizontal advection' - - v_advec_qt_var = forcing_grp.createVariable('v_advec_qt', real_type, ('levels','time',)) - v_advec_qt_var[:] = forcing["v_advec_qt"] - v_advec_qt_var.units = 'kg kg^-1 s^-1' - v_advec_qt_var.description = 'prescribed q_t tendency due to vertical advection' - - #scalar group - year_var = scalar_grp.createVariable('init_year',int_type) - year_var[:] = date["year"] - year_var.units = "years" - year_var.description = "year at time of initial values" - - month_var = scalar_grp.createVariable('init_month',int_type) - month_var[:] = date["month"] - month_var.units = "months" - month_var.description = "month at time of initial values" - - day_var = scalar_grp.createVariable('init_day',int_type) - day_var[:] = date["day"] - day_var.units = "days" - day_var.description = "day at time of initial values" - - hour_var = scalar_grp.createVariable('init_hour',int_type) - hour_var[:] = date["hour"] - hour_var.units = "hours" - hour_var.description = "hour at time of initial values" - - minute_var = scalar_grp.createVariable('init_minute',int_type) - minute_var[:] = date["minute"] - minute_var.units = "minutes" - minute_var.description = "minute at time of initial values" - - second_var = scalar_grp.createVariable('init_second',int_type) - second_var[:] = 0.0 - second_var.units = "seconds" - second_var.description = "second at time of initial values" - - lat_var = scalar_grp.createVariable('lat', real_type) - lat_var[:] = surface["lat"] - lat_var.units = 'degrees N' - lat_var.description = 'latitude of column' - - lon_var = scalar_grp.createVariable('lon', real_type) - lon_var[:] = surface["lon"] - lon_var.units = 'degrees E' - lon_var.description = 'longitude of column' - - area = scalar_grp.createVariable('area', real_type) - area[:] = surface["area"] - area.units = "m^2" - area.description = "grid cell area" - - #Noah initial parameters - - tsfco = scalar_grp.createVariable('tsfco',real_type) - tsfco[:] = surface["tsfco"] - tsfco.units = "K" - tsfco.description = "sea surface temperature OR surface skin temperature over land OR sea ice surface skin temperature (depends on value of slmsk)" - - vegsrc = scalar_grp.createVariable('vegsrc',int_type) - vegsrc[:] = 1 #when would this be 2? - vegsrc.description = "vegetation soure (1-2)" - - vegtyp = scalar_grp.createVariable('vegtyp',int_type) - vegtyp[:] = surface["vtyp"] - vegtyp.description = "vegetation type (1-12)" - - soiltyp = scalar_grp.createVariable('soiltyp',int_type) - soiltyp[:] = surface["styp"] - soiltyp.description = "soil type (1-12)" - - slopetyp = scalar_grp.createVariable('slopetyp',int_type) - slopetyp[:] = surface["slope"] - slopetyp.description = "slope type (1-9)" - - vegfrac = scalar_grp.createVariable('vegfrac',real_type) - vegfrac[:] = surface["vfrac"] - vegfrac.description = "vegetation fraction" - - shdmin = scalar_grp.createVariable('shdmin',real_type) - shdmin[:] = surface["shdmin"] - shdmin.description = "minimum vegetation fraction" - - shdmax = scalar_grp.createVariable('shdmax',real_type) - shdmax[:] = surface["shdmax"] - shdmax.description = "maximum vegetation fraction" - - zorlw = scalar_grp.createVariable('zorlw',real_type) - zorlw[:] = surface["zorlw"] - zorlw.units = "cm" - zorlw.description = "surface roughness length over ocean" - - islmsk = scalar_grp.createVariable('slmsk',real_type) - islmsk[:] = surface["slmsk"] - islmsk.description = "land-sea-ice mask" - - canopy = scalar_grp.createVariable('canopy',real_type) - canopy[:] = surface["canopy"] - canopy.units = "kg m-2" - canopy.description = "amount of water stored in canopy" - - hice = scalar_grp.createVariable('hice',real_type) - hice[:] = surface["hice"] - hice.units = "m" - hice.description = "sea ice thickness" - - fice = scalar_grp.createVariable('fice',real_type) - fice[:] = surface["fice"] - fice.description = "ice fraction" - - tisfc = scalar_grp.createVariable('tisfc',real_type) - tisfc[:] = surface["tisfc"] - tisfc.units = "K" - tisfc.description = "ice surface temperature" - - snwdph = scalar_grp.createVariable('snwdph',real_type) - snwdph[:] = surface["snwdph"] - snwdph.units = "mm" - snwdph.description = "water equivalent snow depth" - - snoalb = scalar_grp.createVariable('snoalb',real_type) - snoalb[:] = surface["snoalb"] - snoalb.description = "maximum snow albedo" - - tg3 = scalar_grp.createVariable('tg3',real_type) - tg3[:] = surface["tg3"] - tg3.units = "K" - tg3.description = "deep soil temperature" - - uustar = scalar_grp.createVariable('uustar',real_type) - uustar[:] = surface["uustar"] - uustar.units = "m s-1" - uustar.description = "friction velocity" - - alvsf = scalar_grp.createVariable('alvsf',real_type) - alvsf[:] = surface["alvsf"] - alvsf.units = "None" - alvsf.description = "60 degree vis albedo with strong cosz dependency" - - alnsf = scalar_grp.createVariable('alnsf',real_type) - alnsf[:] = surface["alnsf"] - alnsf.units = "None" - alnsf.description = "60 degree nir albedo with strong cosz dependency" - - alvwf = scalar_grp.createVariable('alvwf',real_type) - alvwf[:] = surface["alvwf"] - alvwf.units = "None" - alvwf.description = "60 degree vis albedo with weak cosz dependency" - - alnwf = scalar_grp.createVariable('alnwf',real_type) - alnwf[:] = surface["alnwf"] - alnwf.units = "None" - alnwf.description = "60 degree nir albedo with weak cosz dependency" - - facsf = scalar_grp.createVariable('facsf',real_type) - facsf[:] = surface["facsf"] - facsf.units = "None" - facsf.description = "fractional coverage with strong cosz dependency" - - facwf = scalar_grp.createVariable('facwf',real_type) - facwf[:] = surface["facwf"] - facwf.units = "None" - facwf.description = "fractional coverage with weak cosz dependency" - - weasd = scalar_grp.createVariable('weasd',real_type) - weasd[:] = surface["sheleg"] - weasd.units = "mm" - weasd.description = "water equivalent accumulated snow depth" - - f10m = scalar_grp.createVariable('f10m',real_type) - f10m[:] = surface["f10m"] - f10m.units = "None" - f10m.description = "ratio of sigma level 1 wind and 10m wind" - - t2m = scalar_grp.createVariable('t2m',real_type) - t2m[:] = surface["t2m"] - t2m.units = "K" - t2m.description = "2-meter absolute temperature" - - q2m = scalar_grp.createVariable('q2m',real_type) - q2m[:] = surface["q2m"] - q2m.units = "kg kg-1" - q2m.description = "2-meter specific humidity" - - ffmm = scalar_grp.createVariable('ffmm',real_type) - ffmm[:] = surface["ffmm"] - ffmm.units = "None" - ffmm.description = "Monin-Obukhov similarity function for momentum" - - ffhh = scalar_grp.createVariable('ffhh',real_type) - ffhh[:] = surface["ffhh"] - ffhh.units = "None" - ffhh.description = "Monin-Obukhov similarity function for heat" - - tprcp = scalar_grp.createVariable('tprcp',real_type) - tprcp[:] = surface["tprcp"] - tprcp.units = "m" - tprcp.description = "instantaneous total precipitation amount" - - srflag = scalar_grp.createVariable('srflag',real_type) - srflag[:] = surface["srflag"] - srflag.units = "None" - srflag.description = "snow/rain flag for precipitation" - - sncovr = scalar_grp.createVariable('sncovr',real_type) - sncovr[:] = surface["sncovr"] - sncovr.units = "None" - sncovr.description = "surface snow area fraction" - - tsfcl = scalar_grp.createVariable('tsfcl',real_type) - tsfcl[:] = surface["tsfcl"] - tsfcl.units = "K" - tsfcl.description = "surface skin temperature over land" - - zorll = scalar_grp.createVariable('zorll',real_type) - zorll[:] = surface["zorll"] - zorll.units = "cm" - zorll.description = "surface roughness length over land" - - zorli = scalar_grp.createVariable('zorli',real_type) - zorli[:] = surface["zorli"] - zorli.units = "cm" - zorli.description = "surface roughness length over ice" - - zorlwav = scalar_grp.createVariable('zorlwav',real_type) - zorlwav[:] = surface["zorlwav"] - zorlwav.units = "cm" - zorlwav.description = "surface roughness length from wave model" - - #Orography initial parameters - - stddev = scalar_grp.createVariable('stddev',real_type) - stddev[:] = oro["stddev"] - stddev.units = "m" - stddev.description = "standard deviation of subgrid orography" - - convexity = scalar_grp.createVariable('convexity',real_type) - convexity[:] = oro["convexity"] - convexity.units = "" - convexity.description = "convexity of subgrid orography" - - oa1 = scalar_grp.createVariable('oa1',real_type) - oa1[:] = oro["oa1"] - oa1.units = "" - oa1.description = "assymetry of subgrid orography 1" - - oa2 = scalar_grp.createVariable('oa2',real_type) - oa2[:] = oro["oa2"] - oa2.units = "" - oa2.description = "assymetry of subgrid orography 2" - - oa3 = scalar_grp.createVariable('oa3',real_type) - oa3[:] = oro["oa3"] - oa3.units = "" - oa3.description = "assymetry of subgrid orography 3" - - oa4 = scalar_grp.createVariable('oa4',real_type) - oa4[:] = oro["oa4"] - oa4.units = "" - oa4.description = "assymetry of subgrid orography 4" - - ol1 = scalar_grp.createVariable('ol1',real_type) - ol1[:] = oro["ol1"] - ol1.units = "" - ol1.description = "fraction of grid box with subgrid orography higher than critical height 1" - - ol2 = scalar_grp.createVariable('ol2',real_type) - ol2[:] = oro["ol2"] - ol2.units = "" - ol2.description = "fraction of grid box with subgrid orography higher than critical height 2" - - ol3 = scalar_grp.createVariable('ol3',real_type) - ol3[:] = oro["ol3"] - ol3.units = "" - ol3.description = "fraction of grid box with subgrid orography higher than critical height 3" - - ol4 = scalar_grp.createVariable('ol4',real_type) - ol4[:] = oro["ol4"] - ol4.units = "" - ol4.description = "fraction of grid box with subgrid orography higher than critical height 4" - - theta = scalar_grp.createVariable('theta',real_type) - theta[:] = oro["theta"] - theta.units = "deg" - theta.description = "angle with respect to east of maximum subgrid orographic variations" - - gamma = scalar_grp.createVariable('gamma',real_type) - gamma[:] = oro["gamma"] - gamma.units = "" - gamma.description = "anisotropy of subgrid orography" - - sigma = scalar_grp.createVariable('sigma',real_type) - sigma[:] = oro["sigma"] - sigma.units = "" - sigma.description = "slope of subgrid orography" - - elvmax = scalar_grp.createVariable('elvmax',real_type) - elvmax[:] = oro["elvmax"] - elvmax.units = "m" - elvmax.description = "maximum of subgrid orography" - - orog_filt = scalar_grp.createVariable('oro',real_type) - orog_filt[:] = oro["orog_filt"] - orog_filt.units = "m" - orog_filt.description = "orography" - - orog_raw = scalar_grp.createVariable('oro_uf',real_type) - orog_raw[:] = oro["orog_raw"] - orog_raw.units = "m" - orog_raw.description = "unfiltered orography" - - land_frac = scalar_grp.createVariable('landfrac',real_type) - land_frac[:] = oro["land_frac"] - land_frac.units = "None" - land_frac.description = "fraction of horizontal grid area occupied by land" - - lake_frac = scalar_grp.createVariable('lakefrac',real_type) - lake_frac[:] = oro["lake_frac"] - lake_frac.units = "None" - lake_frac.description = "fraction of horizontal grid area occupied by lake" - - lake_depth = scalar_grp.createVariable('lakedepth',real_type) - lake_depth[:] = oro["lake_depth"] - lake_depth.units = "m" - lake_depth.description = "lake depth" - - #NoahMP initial scalar parameters - tvxy = scalar_grp.createVariable('tvxy',real_type) - tvxy[:] = surface["tvxy"] - tvxy.units = "K" - tvxy.description = "vegetation temperature" - - tgxy = scalar_grp.createVariable('tgxy',real_type) - tgxy[:] = surface["tgxy"] - tgxy.units = "K" - tgxy.description = "ground temperature for NoahMP" - - tahxy = scalar_grp.createVariable('tahxy',real_type) - tahxy[:] = surface["tahxy"] - tahxy.units = "K" - tahxy.description = "canopy air temperature" - - canicexy = scalar_grp.createVariable('canicexy',real_type) - canicexy[:] = surface["canicexy"] - canicexy.units = "mm" - canicexy.description = "canopy intercepted ice mass" - - canliqxy = scalar_grp.createVariable('canliqxy',real_type) - canliqxy[:] = surface["canliqxy"] - canliqxy.units = "mm" - canliqxy.description = "canopy intercepted liquid water" - - eahxy = scalar_grp.createVariable('eahxy',real_type) - eahxy[:] = surface["eahxy"] - eahxy.units = "Pa" - eahxy.description = "canopy air vapor pressure" - - cmxy = scalar_grp.createVariable('cmxy',real_type) - cmxy[:] = surface["cmxy"] - cmxy.units = "" - cmxy.description = "surface drag coefficient for momentum for NoahMP" - - chxy = scalar_grp.createVariable('chxy',real_type) - chxy[:] = surface["chxy"] - chxy.units = "" - chxy.description = "surface exchange coeff heat & moisture for NoahMP" - - fwetxy = scalar_grp.createVariable('fwetxy',real_type) - fwetxy[:] = surface["fwetxy"] - fwetxy.units = "" - fwetxy.description = "area fraction of canopy that is wetted/snowed" - - sneqvoxy = scalar_grp.createVariable('sneqvoxy',real_type) - sneqvoxy[:] = surface["sneqvoxy"] - sneqvoxy.units = "mm" - sneqvoxy.description = "snow mass at previous time step" - - alboldxy = scalar_grp.createVariable('alboldxy',real_type) - alboldxy[:] = surface["alboldxy"] - alboldxy.units = "" - alboldxy.description = "snow albedo at previous time step" - - qsnowxy = scalar_grp.createVariable('qsnowxy',real_type) - qsnowxy[:] = surface["qsnowxy"] - qsnowxy.units = "mm s-1" - qsnowxy.description = "snow precipitation rate at surface" - - wslakexy = scalar_grp.createVariable('wslakexy',real_type) - wslakexy[:] = surface["wslakexy"] - wslakexy.units = "mm" - wslakexy.description = "lake water storage" - - taussxy = scalar_grp.createVariable('taussxy',real_type) - taussxy[:] = surface["taussxy"] - taussxy.units = "" - taussxy.description = "non-dimensional snow age" - - waxy = scalar_grp.createVariable('waxy',real_type) - waxy[:] = surface["waxy"] - waxy.units = "mm" - waxy.description = "water storage in aquifer" - - wtxy = scalar_grp.createVariable('wtxy',real_type) - wtxy[:] = surface["wtxy"] - wtxy.units = "mm" - wtxy.description = "water storage in aquifer and saturated soil" - - zwtxy = scalar_grp.createVariable('zwtxy',real_type) - zwtxy[:] = surface["zwtxy"] - zwtxy.units = "m" - zwtxy.description = "water table depth" - - xlaixy = scalar_grp.createVariable('xlaixy',real_type) - xlaixy[:] = surface["xlaixy"] - xlaixy.units = "" - xlaixy.description = "leaf area index" - - xsaixy = scalar_grp.createVariable('xsaixy',real_type) - xsaixy[:] = surface["xsaixy"] - xsaixy.units = "" - xsaixy.description = "stem area index" - - lfmassxy = scalar_grp.createVariable('lfmassxy',real_type) - lfmassxy[:] = surface["lfmassxy"] - lfmassxy.units = "g m-2" - lfmassxy.description = "leaf mass" - - stmassxy = scalar_grp.createVariable('stmassxy',real_type) - stmassxy[:] = surface["stmassxy"] - stmassxy.units = "g m-2" - stmassxy.description = "stem mass" - - rtmassxy = scalar_grp.createVariable('rtmassxy',real_type) - rtmassxy[:] = surface["rtmassxy"] - rtmassxy.units = "g m-2" - rtmassxy.description = "fine root mass" - - woodxy = scalar_grp.createVariable('woodxy',real_type) - woodxy[:] = surface["woodxy"] - woodxy.units = "g m-2" - woodxy.description = "wood mass including woody roots" - - stblcpxy = scalar_grp.createVariable('stblcpxy',real_type) - stblcpxy[:] = surface["stblcpxy"] - stblcpxy.units = "g m-2" - stblcpxy.description = "stable carbon in deep soil" - - fastcpxy = scalar_grp.createVariable('fastcpxy',real_type) - fastcpxy[:] = surface["fastcpxy"] - fastcpxy.units = "g m-2" - fastcpxy.description = "short-lived carbon in shallow soil" - - smcwtdxy = scalar_grp.createVariable('smcwtdxy',real_type) - smcwtdxy[:] = surface["smcwtdxy"] - smcwtdxy.units = "m3 m-3" - smcwtdxy.description = "soil water content between the bottom of the soil and the water table" - - deeprechxy = scalar_grp.createVariable('deeprechxy',real_type) - deeprechxy[:] = surface["deeprechxy"] - deeprechxy.units = "m" - deeprechxy.description = "recharge to or from the water table when deep" - - rechxy = scalar_grp.createVariable('rechxy',real_type) - rechxy[:] = surface["rechxy"] - rechxy.units = "m" - rechxy.description = "recharge to or from the water table when shallow" - - snowxy = scalar_grp.createVariable('snowxy',real_type) - snowxy[:] = surface["snowxy"] - snowxy.units = "" - snowxy.description = "number of snow layers" - - albdvis = scalar_grp.createVariable('albdvis',real_type) - albdvis[:] = surface["albdvis"] - albdvis.units = "" - albdvis.description = "surface albedo direct visible" - - albdnir = scalar_grp.createVariable('albdnir',real_type) - albdnir[:] = surface["albdnir"] - albdnir.units = "" - albdnir.description = "surface albedo direct near-infrared" - - albivis = scalar_grp.createVariable('albivis',real_type) - albivis[:] = surface["albivis"] - albivis.units = "" - albivis.description = "surface albedo diffuse visible" - - albinir = scalar_grp.createVariable('albinir',real_type) - albinir[:] = surface["albinir"] - albinir.units = "" - albinir.description = "surface albedo diffuse near-infrared" - - emiss = scalar_grp.createVariable('emiss',real_type) - emiss[:] = surface["emiss"] - emiss.units = "" - emiss.description = "surface emissivity" - - #NSST initial scalar parameters - tref = scalar_grp.createVariable('tref',real_type) - tref[:] = surface["tref"] - tref.units = "K" - tref.description = "sea surface reference temperature for NSST" - - z_c = scalar_grp.createVariable('z_c',real_type) - z_c[:] = surface["z_c"] - z_c.units = "m" - z_c.description = "sub-layer cooling thickness for NSST" - - c_0 = scalar_grp.createVariable('c_0',real_type) - c_0[:] = surface["c_0"] - c_0.units = "" - c_0.description = "coefficient 1 to calculate d(Tz)/d(Ts) for NSST" - - c_d = scalar_grp.createVariable('c_d',real_type) - c_d[:] = surface["c_d"] - c_d.units = "" - c_d.description = "coefficient 2 to calculate d(Tz)/d(Ts) for NSST" - - w_0 = scalar_grp.createVariable('w_0',real_type) - w_0[:] = surface["w_0"] - w_0.units = "" - w_0.description = "coefficient 3 to calculate d(Tz)/d(Ts) for NSST" - - w_d = scalar_grp.createVariable('w_d',real_type) - w_d[:] = surface["w_d"] - w_d.units = "" - w_d.description = "coefficient 4 to calculate d(Tz)/d(Ts) for NSST" - - xt = scalar_grp.createVariable('xt',real_type) - xt[:] = surface["xt"] - xt.units = "K m" - xt.description = "heat content in diurnal thermocline layer for NSST" - - xs = scalar_grp.createVariable('xs',real_type) - xs[:] = surface["xs"] - xs.units = "ppt m" - xs.description = "salinity content in diurnal thermocline layer for NSST" - - xu = scalar_grp.createVariable('xu',real_type) - xu[:] = surface["xu"] - xu.units = "m2 s-1" - xu.description = "u-current in diurnal thermocline layer for NSST" - - xv = scalar_grp.createVariable('xv',real_type) - xv[:] = surface["xv"] - xv.units = "m2 s-1" - xv.description = "v-current in diurnal thermocline layer for NSST" - - xz = scalar_grp.createVariable('xz',real_type) - xz[:] = surface["xz"] - xz.units = "m" - xz.description = "thickness of diurnal thermocline layer for NSST" - - zm = scalar_grp.createVariable('zm',real_type) - zm[:] = surface["zm"] - zm.units = "m" - zm.description = "thickness of ocean mixed layer for NSST" - - xtts = scalar_grp.createVariable('xtts',real_type) - xtts[:] = surface["xtts"] - xtts.units = "m" - xtts.description = "sensitivity of diurnal thermocline layer heat content to surface temperature [d(xt)/d(ts)] for NSST" - - xzts = scalar_grp.createVariable('xzts',real_type) - xzts[:] = surface["xzts"] - xzts.units = "m K-1" - xzts.description = "sensitivity of diurnal thermocline layer thickness to surface temperature [d(xz)/d(ts)] for NSST" - - d_conv = scalar_grp.createVariable('d_conv',real_type) - d_conv[:] = surface["d_conv"] - d_conv.units = "m" - d_conv.description = "thickness of free convection layer for NSST" - - ifd = scalar_grp.createVariable('ifd',real_type) - ifd[:] = surface["ifd"] - ifd.units = "" - ifd.description = "index to start DTM run for NSST" - - dt_cool = scalar_grp.createVariable('dt_cool',real_type) - dt_cool[:] = surface["dt_cool"] - dt_cool.units = "K" - dt_cool.description = "sub-layer cooling amount for NSST" - - qrain = scalar_grp.createVariable('qrain',real_type) - qrain[:] = surface["qrain"] - qrain.units = "W" - qrain.description = "sensible heat due to rainfall for NSST" - - #RUC LSM - wetness = scalar_grp.createVariable('wetness',real_type) - wetness[:] = surface["wetness"] - wetness.units = "" - wetness.description = "normalized soil wetness for RUC LSM" - - clw_surf_land = scalar_grp.createVariable('clw_surf_land',real_type) - clw_surf_land[:] = surface["clw_surf_land"] - clw_surf_land.units = "kg kg-1" - clw_surf_land.description = "cloud condensed water mixing ratio at surface over land for RUC LSM" - - clw_surf_ice = scalar_grp.createVariable('clw_surf_ice',real_type) - clw_surf_ice[:] = surface["clw_surf_ice"] - clw_surf_ice.units = "kg kg-1" - clw_surf_ice.description = "cloud condensed water mixing ratio at surface over ice for RUC LSM" - - qwv_surf_land = scalar_grp.createVariable('qwv_surf_land',real_type) - qwv_surf_land[:] = surface["qwv_surf_land"] - qwv_surf_land.units = "kg kg-1" - qwv_surf_land.description = "water vapor mixing ratio at surface over land for RUC LSM" - - qwv_surf_ice = scalar_grp.createVariable('qwv_surf_ice',real_type) - qwv_surf_ice[:] = surface["qwv_surf_ice"] - qwv_surf_ice.units = "kg kg-1" - qwv_surf_ice.description = "water vapor mixing ratio at surface over ice for RUC LSM" - - tsnow_land = scalar_grp.createVariable('tsnow_land',real_type) - tsnow_land[:] = surface["tsnow_land"] - tsnow_land.units = "K" - tsnow_land.description = "snow temperature at the bottom of the first snow layer over land for RUC LSM" - - tsnow_ice = scalar_grp.createVariable('tsnow_ice',real_type) - tsnow_ice[:] = surface["tsnow_ice"] - tsnow_ice.units = "K" - tsnow_ice.description = "snow temperature at the bottom of the first snow layer over ice for RUC LSM" - - snowfall_acc_land = scalar_grp.createVariable('snowfall_acc_land',real_type) - snowfall_acc_land[:] = surface["snowfall_acc_land"] - snowfall_acc_land.units = "kg m-2" - snowfall_acc_land.description = "run-total snow accumulation on the ground over land for RUC LSM" - - snowfall_acc_ice = scalar_grp.createVariable('snowfall_acc_ice',real_type) - snowfall_acc_ice[:] = surface["snowfall_acc_ice"] - snowfall_acc_ice.units = "kg m-2" - snowfall_acc_ice.description = "run-total snow accumulation on the ground over ice for RUC LSM" - - sncovr_ice = scalar_grp.createVariable('sncovr_ice',real_type) - sncovr_ice[:] = surface["sncovr_ice"] - sncovr_ice.units = "" - sncovr_ice.description = "surface snow area fraction over ice for RUC LSM" - - lai = scalar_grp.createVariable('lai',real_type) - lai[:] = surface["lai"] - lai.units = "" - lai.description = "leaf area index for RUC LSM" + temperature_var.long_name = 'Temperature' + temperature_var[:,:] = comp_data["ta"] + + qv_var = nc_file.createVariable('qv', wp, ('time', 'lev',)) + qv_var.units = 'kg kg-1' + qv_var.long_name = 'specific humidity' + qv_var[:,:] = comp_data["qv"] + + u_var = nc_file.createVariable('u', wp, ('time', 'lev',)) + u_var.units = 'm s-1' + u_var.long_name = 'zonal wind' + u_var[:,:] = comp_data["ua"] + + v_var = nc_file.createVariable('v', wp, ('time', 'lev',)) + v_var.units = 'm s-1' + v_var.long_name = 'meridional wind' + v_var[:,:] = comp_data["va"] + + for var2d in comp_data["vars2d"]: + tempVar = nc_file.createVariable(var2d["name"], wp, ('time_ufs_history')) + tempVar.units = var2d["units"] + tempVar.long_name = var2d["long_name"] + tempVar[:] = var2d["values"] + + nc_file.close() + + return + +######################################################################################## +# +######################################################################################## +def find_date(forcing_dir): + + atm_ftag = 'atmf*.nc' + + atm_filenames = [] + for f_name in os.listdir(forcing_dir): + if fnmatch.fnmatch(f_name, atm_ftag): + atm_filenames.append(f_name) + if not atm_filenames: + message = 'No filenames matching the pattern {0} found in {1}'.format(atm_ftag,forcing_dir) + logging.critical(message) + raise Exception(message) + atm_filenames = sorted(atm_filenames) + + nc_file = Dataset('{0}/{1}'.format(forcing_dir,atm_filenames[0])) + + #starting date is in the units attribute of time + + date_string = nc_file['time'].getncattr('units').split('since ')[1] #should be in format YYYY-MM-DD HH:MM:SS nc_file.close() + + date_dict = {} + date_dict["year"] = int(date_string[0:4]) + date_dict["month"] = int(date_string[5:7]) + date_dict["day"] = int(date_string[8:10]) + date_dict["hour"] = int(date_string[11:13]) + date_dict["minute"] = int(date_string[14:16]) + date_dict["second"] = int(date_string[17:]) + + return date_dict +######################################################################################## +# +######################################################################################## def main(): setup_logging() #read in arguments - (location, indices, date, in_dir, grid_dir, tile, area, noahmp, case_name, old_chgres) = parse_arguments() + (location, indices, date, in_dir, grid_dir, forcing_dir, tile, area, case_name, + old_chgres, lam, save_comp, use_nearest) = parse_arguments() #find tile containing the point using the supergrid if no tile is specified + #if not tile and not lam: if not tile: - tile = find_tile(location, grid_dir) + tile = int(find_tile(location, grid_dir, lam)) if tile < 0: message = 'No tile was found for location {0}'.format(location) logging.critical(message) raise Exception(message) - print 'Tile found: {0}'.format(tile) + print('Tile found: {0}'.format(tile)) #find index of closest point in the tile if indices are not specified if not indices: - (tile_j, tile_i, point_lon, point_lat, dist_min) = find_loc_indices(location, grid_dir, tile) - print 'The closest point in tile {0} has indices [{1},{2}]'.format(tile,tile_i,tile_j) - print 'This index has a central longitude/latitude of [{0},{1}]'.format(point_lon,point_lat) - print 'This grid cell is approximately {0} km away from the desired location of {1} {2}'.format(dist_min/1.0E3,location[0],location[1]) + (tile_j, tile_i, point_lon, point_lat, dist_min) = find_loc_indices(location, grid_dir, tile, lam) + print('The closest point in tile {0} has indices [{1},{2}]'.format(tile,tile_i,tile_j)) + print('This index has a central longitude/latitude of [{0},{1}]'.format(point_lon,point_lat)) + print('This grid cell is approximately {0} km away from the desired location of {1} {2}'.format(dist_min/1.0E3,location[0],location[1])) else: tile_i = indices[0] tile_j = indices[1] #still need to grab the lon/lat if the tile and indices are supplied - (point_lon, point_lat) = find_lon_lat_of_indices(indices, grid_dir, tile) + (point_lon, point_lat) = find_lon_lat_of_indices(indices, grid_dir, tile, lam) - print 'This index has a central longitude/latitude of [{0},{1}]'.format(point_lon,point_lat) - - #get UFS IC data (TODO: flag to read in RESTART data rather than IC data and implement different file reads) - (state_data, surface_data, oro_data) = get_UFS_IC_data(in_dir, grid_dir, tile, tile_i, tile_j, old_chgres) + print('This index has a central longitude/latitude of [{0},{1}]'.format(point_lon,point_lat)) - #cold start NoahMP variables - if (noahmp): - surface_data = add_noahmp_coldstart(surface_data, date) + # get UFS IC data (TODO: flag to read in RESTART data rather than IC data and implement + # different file reads) + (state_data, surface_data, oro_data) = get_UFS_IC_data(in_dir, grid_dir, tile, tile_i,\ + tile_j, old_chgres, lam) + + if not date: + # date was not included on command line; look in atmf* file for initial date + date = find_date(forcing_dir) + #get grid cell area if not given if not area: - area = get_UFS_grid_area(grid_dir, tile, tile_i, tile_j) + area = get_UFS_grid_area(grid_dir, tile, tile_i, tile_j, lam) surface_data["area"] = area - - surface_data["lon"] = point_lon - surface_data["lat"] = point_lat - - #get UFS forcing data (zeros for now; only placeholder) - forcing_data = get_UFS_forcing_data(state_data["nlevs"]) - - #write SCM case file - write_SCM_case_file(state_data, surface_data, oro_data, forcing_data, case_name, date) - + surface_data["lon"] = point_lon + surface_data["lat"] = point_lat + + # Get UFS forcing data + (forcing_data, comp_data, stateREGRID) = get_UFS_forcing_data(state_data["nlevs"], state_data, \ + location, use_nearest, forcing_dir, \ + grid_dir, tile, tile_i, tile_j, lam,\ + save_comp) + + # Write SCM case file + fileOUT = write_SCM_case_file(state_data, surface_data, oro_data, forcing_data, case_name, date, \ + stateREGRID) + + # read in and remap the state variables to the first history file pressure profile and + # write them out to compare SCM output to (atmf for state variables and sfcf for physics + # tendencies) + if (save_comp): + write_comparison_file(comp_data, case_name, date, surface_data) if __name__ == '__main__': main() diff --git a/scm/etc/scripts/UFS_LSM_param.py b/scm/etc/scripts/UFS_LSM_param.py index de6efd964..f17372e1b 100755 --- a/scm/etc/scripts/UFS_LSM_param.py +++ b/scm/etc/scripts/UFS_LSM_param.py @@ -31,6 +31,7 @@ group1.add_argument('-l', '--location', help='longitude and latitude in degress E and N, respectively, separated by a space', nargs=2, type=float) group1.add_argument('-ij','--index', help='i,j indices within the tile (if known - bypasses search for closest model point to lon/lat location)', nargs=2, type=int) parser.add_argument('-g', '--grid_dir', help='directory path containing FV3 tile supergrid files', required=True) +parser.add_argument('-ic','--lsm_dir', help='directory path containing FV3 tile IC files', required=True) parser.add_argument('-t', '--tile', help='tile of desired point (if known - bypasses tile search if present)', type=int, choices=range(1,7)) ############################################################################### @@ -43,6 +44,7 @@ def parse_arguments(): location = args.location index = args.index grid_dir = args.grid_dir + lsm_dir = args.lsm_dir tile = args.tile #validate args @@ -50,6 +52,10 @@ def parse_arguments(): message = 'The directory {0} does not exist'.format(grid_dir) logging.critical(message) raise Exception(message) + if not os.path.exists(lsm_dir): + message = 'The directory {0} does not exist'.format(lsm_dir) + logging.critical(message) + raise Exception(message) if not index: if not 0 <= location[0] <= 360 : @@ -62,7 +68,7 @@ def parse_arguments(): logging.critical(message) raise Exception(message) - return (location, index, grid_dir, tile) + return (location, index, grid_dir, lsm_dir, tile) def setup_logging(): """Sets up the logging module.""" @@ -108,20 +114,20 @@ def find_tile(loc, dir): edge_1_lon = longitude[0,:] edge_1_lat = latitude[0,:] - edge_1 = zip(edge_1_lon, edge_1_lat) + edge_1 = list(zip(edge_1_lon, edge_1_lat)) edge_2_lon = longitude[:,-1] edge_2_lat = latitude[:,-1] - edge_2 = zip(edge_2_lon, edge_2_lat) + edge_2 = list(zip(edge_2_lon, edge_2_lat)) edge_3_lon = longitude[-1,:] edge_3_lat = latitude[-1,:] - edge_3 = zip(edge_3_lon, edge_3_lat) + edge_3 = list(zip(edge_3_lon, edge_3_lat)) edge_3.reverse() #need to reverse the direction of this edge to form a regular polygon edge_4_lon = longitude[:,0] edge_4_lat = latitude[:,0] - edge_4 = zip(edge_4_lon, edge_4_lat) + edge_4 = list(zip(edge_4_lon, edge_4_lat)) edge_4.reverse() #need to reverse the direction of this edge to form a regular polygon polygon_points = edge_1 + edge_2 + edge_3 + edge_4 @@ -231,9 +237,9 @@ def find_lon_lat_of_indices(indices, dir, tile): return (longitude[indices[1],indices[0]], latitude[indices[1],indices[0]]) -def get_UFS_surface_fix_data(dir, tile, i, j): +def get_UFS_surface_fix_data(fix_dir, tile, i, j): - fix_dir = dir + '/fix_sfc' + #fix_dir = dir + '/fix_sfc' filename_pattern = '*facsf.tile{0}.nc'.format(tile) for f_name in os.listdir(fix_dir): @@ -388,36 +394,36 @@ def main(): setup_logging() #read in arguments - (location, indices, grid_dir, tile) = parse_arguments() + (location, indices, grid_dir, lsm_dir, tile) = parse_arguments() #find tile containing the point using the supergrid if no tile is specified if not tile: - tile = find_tile(location, grid_dir) + tile = int(find_tile(location, grid_dir)) if tile < 0: message = 'No tile was found for location {0}'.format(location) logging.critical(message) raise Exception(message) - print 'Tile found: {0}'.format(tile) + print('Tile found: {0}'.format(tile)) #find index of closest point in the tile if indices are not specified if not indices: (tile_j, tile_i, point_lon, point_lat, dist_min) = find_loc_indices(location, grid_dir, tile) - print 'The closest point in tile {0} has indices [{1},{2}]'.format(tile,tile_i,tile_j) - print 'This index has a central longitude/latitude of [{0},{1}]'.format(point_lon,point_lat) - print 'This grid cell is approximately {0} km away from the desired location of {1} {2}'.format(dist_min/1.0E3,location[0],location[1]) + print('The closest point in tile {0} has indices [{1},{2}]'.format(tile,tile_i,tile_j)) + print('This index has a central longitude/latitude of [{0},{1}]'.format(point_lon,point_lat)) + print('This grid cell is approximately {0} km away from the desired location of {1} {2}'.format(dist_min/1.0E3,location[0],location[1])) else: tile_i = indices[0] tile_j = indices[1] #still need to grab the lon/lat if the tile and indices are supplied (point_lon, point_lat) = find_lon_lat_of_indices(indices, grid_dir, tile) - print 'This index has a central longitude/latitude of [{0},{1}]'.format(point_lon,point_lat) + print('This index has a central longitude/latitude of [{0},{1}]'.format(point_lon,point_lat)) #get grid cell area if not given #area = get_UFS_grid_area(grid_dir, tile, tile_i, tile_j) - (facsf, facwf, max_snow_alb, alvsf, alvwf, alnsf, alnwf, substrate_t, veg_greenness, max_veg_greenness, min_veg_greenness, slope_type, soil_type, veg_type) = get_UFS_surface_fix_data(grid_dir, tile, tile_i, tile_j) + (facsf, facwf, max_snow_alb, alvsf, alvwf, alnsf, alnwf, substrate_t, veg_greenness, max_veg_greenness, min_veg_greenness, slope_type, soil_type, veg_type) = get_UFS_surface_fix_data(lsm_dir, tile, tile_i, tile_j) print("facsf,facwf={0},{1}".format(facsf,facwf)) print("maximum_snow_albedo={}".format(max_snow_alb)) diff --git a/scm/etc/scripts/UFS_forcing_ensemble_generator.py b/scm/etc/scripts/UFS_forcing_ensemble_generator.py new file mode 100755 index 000000000..d057dc6cf --- /dev/null +++ b/scm/etc/scripts/UFS_forcing_ensemble_generator.py @@ -0,0 +1,190 @@ +#!/usr/bin/env python + +############################################################################### +# Dependencies +############################################################################### +import argparse +import os +from netCDF4 import Dataset +import xarray as xr +import numpy as np +import random + +############################################################################### +# Argument list +############################################################################### +parser = argparse.ArgumentParser() +parser.add_argument('-d', '--dir', help='path to UFS Regression Test output', required=True) +parser.add_argument('-n', '--case_name', help='name of case', required=True) +parser.add_argument('-lonl', '--lon_limits', help='longitude range, separated by a space', nargs=2, type=float, required=False) +parser.add_argument('-latl', '--lat_limits', help='latitude range, separated by a space', nargs=2, type=float, required=False) +parser.add_argument('-lons', '--lon_list', help='longitudes, separated by a space', nargs='*', type=float, required=False) +parser.add_argument('-lats', '--lat_list', help='latitudes, separated by a space', nargs='*', type=float, required=False) +parser.add_argument('-nens', '--nensmembers', help='number of SCM UFS ensemble memebers to create', type=int, required=False) +parser.add_argument('-dt', '--timestep', help='SCM timestep, in seconds', type=int, default = 3600) +parser.add_argument('-cres', '--C_RES', help='UFS spatial resolution', type=int, default = 96) +parser.add_argument('-sdf', '--suite', help='CCPP suite definition file to use for ensemble', default = 'SCM_GFS_v16') +parser.add_argument('-sc', '--save_comp', help='flag to save a file with UFS data for comparisons', action='store_true') +parser.add_argument('-near', '--use_nearest', help='flag to indicate using the nearest UFS history file gridpoint, no regridding',action='store_true') + +############################################################################### +# Main program +############################################################################### +def main(): + # Get command line arguments + args = parser.parse_args() + + # This asssumes using UFS Weather Model Regression Test output. + args.dir_ic = args.dir + "/INPUT/" + args.dir_grid = args.dir + "/INPUT/" + args.dir_forcing = args.dir + + # Error checking + if (args.lon_limits and args.lon_list): + print("ERROR: Can't provide explicit longitude(s) AND a longitude range") + exit() + if (args.lat_limits and args.lat_list): + print("ERROR: Can't provide explicit latitude(s) AND a latitude range") + exit() + if (args.lon_limits or args.lat_limits) and not args.nensmembers: + print("ERROR: Longitude/Latitude range provided, but NOT ensemble count.") + exit() + + if (args.nensmembers): + npts = args.nensmembers + if (args.lat_list or args.lon_list): + print("ERROR: Can't provide explicit lon/lat range AND number of points for ensemble generation.") + exit() + else: + if (args.lon_list and args.lat_list): + if (len(args.lon_list) == len(args.lat_list)): + npts = len(args.lon_list) + else: + print("ERROR: Number of longitude/latitudes are inconsistent") + exit() + + ########################################################################### + # + # Set longitude/latitude + # + ########################################################################### + if (args.nensmembers): + rng1 = np.zeros((npts), dtype=float) + rng2 = np.zeros((npts), dtype=float) + lons = np.zeros((npts), dtype=float) + lats = np.zeros((npts), dtype=float) + for ipt in range(npts): + # Here the seed is set to give the same set of points each time. + random.seed(ipt) + rng1[ipt] = random.randint(1,1000)*0.001 + rng2[ipt] = random.randint(2,1000)*0.001 + # + if args.lat_limits: + lats[ipt] = args.lat_limits[0] + (args.lat_limits[1]-args.lat_limits[0])*rng1[ipt] + else: + lats[ipt] = rng1[ipt]*180-90 + if args.lon_limits: + lons[ipt] = args.lon_limits[0] + (args.lon_limits[1]-args.lon_limits[0])*rng2[ipt] + else: + lons[ipt] = rng2[ipt]*360 + ########################################################################### + # + # Use longitude and latitude provided + # + ########################################################################### + else: + lons = np.asarray(args.lon_list) + lats = np.asarray(args.lat_list) + + ########################################################################### + # + # Create SCM case configuration (etc/case_config) file. + # + ########################################################################### + # + case_config =[{"name": "input_type", "values": str(1)}, \ + {"name": "dt", "values": str(args.timestep)}, \ + {"name": "C_RES", "values": str(args.C_RES)}] + + # What, if any, options neeed to be passsed to UFS_IC_generator.py? + com_config = '' + if args.save_comp: com_config = com_config + ' -sc' + if args.use_nearest: com_config = com_config + ' -near' + + # Create inputs to SCM + case_list = "" + case_list_nf = "" + count = 0 + run_list = [] + for pt in range(0,npts): + # Call UFS_IC_generator.py + case_name = args.case_name +"_n" + str(pt).zfill(3) + file_scminput = "../../data/processed_case_input/"+case_name+"_SCM_driver.nc" + com = "./UFS_IC_generator.py -l " +str(lons[pt]) + " " + str(lats[pt]) + \ + " -i " + args.dir_ic + " -g " + args.dir_grid + " -f " + args.dir_forcing + " -n " + case_name + com_config + print(com) + os.system(com) + + # Add case to ensemble list. + case_list = case_list + '"'+case_name+'"' + if (count != npts-1): case_list = case_list + ', ' + + # What is the surface type? (get from SCM input file) + dataset = xr.open_dataset(file_scminput) + sfc_type = int(np.round_(dataset.slmsk.values[0])) + + # Create case_config file(s) + fileOUT = "../../etc/case_config/"+case_name+".nml" + fileID = open(fileOUT, 'w') + fileID.write('$case_config') + fileID.write('\n') + fileID.write('case_name = ' + "'" + case_name + "',") + fileID.write('\n') + fileID.write('sfc_type = ' + str(sfc_type) + ",") + fileID.write('\n') + for opts in case_config: + fileID.write(opts["name"] + ' = ' + opts["values"] + ",") + fileID.write('\n') + fileID.write('$end') + fileID.write('\n') + fileID.close() + + # Add case to dictionary to be used by run_scm.py + run_list.append({"case": case_name, "suite": args.suite}) + + # + count = count + 1 + + ########################################################################### + # + # Create "multirun file list" needed by run_scm.py + # + ########################################################################### + com = "mkdir -p ../../bin/" + print(com) + os.system(com) + fileOUT = "scm_ufsens_"+args.case_name+".py" + fileID = open("../../bin/"+fileOUT, 'w') + fileID.write('run_list = [') + fileID.write('\n') + for run in run_list: + #print(' {"case": "' , run["case"] , '", "suite": "' , run["suite"] , '"},') + fileID.write(' {"case": "' + run["case"] + '", "suite": "' + run["suite"] + '"},') + fileID.write('\n') + fileID.write(' ]') + fileID.close() + + ########################################################################### + # + # Display run commands + # + ########################################################################### + print("-------------------------------------------------------------------------------------------") + print("Command(s) to execute in ccpp-scm/scm/bin/: ") + print(" ") + print("./run_scm.py --npz_type gfs --file " + fileOUT + " --timestep " + str(args.timestep)) + print("") + print("-------------------------------------------------------------------------------------------") + +if __name__ == '__main__': + main() diff --git a/scm/etc/scripts/fv3_remap.py b/scm/etc/scripts/fv3_remap.py new file mode 100755 index 000000000..ab384fa0a --- /dev/null +++ b/scm/etc/scripts/fv3_remap.py @@ -0,0 +1,1681 @@ +#!/usr/bin/env python + +import numpy as np +import math + +r3 = 1./3. +r23 = 2./3. +r12 = 1./12. + +def ppm_limiters(dm, a4, itot, lmt): + # INPUT PARAMETERS: + #real , intent(in):: dm(*) !< Linear slope + #integer, intent(in) :: itot !< Total Longitudes + #integer, intent(in) :: lmt !< 0: Standard PPM constraint 1: Improved full monotonicity constraint + # !< (Lin) 2: Positive definite constraint + # !< 3: do nothing (return immediately) + #! INPUT/OUTPUT PARAMETERS: + # real , intent(inout) :: a4(4,*) !< PPM array AA <-- a4(1,i) AL <-- a4(2,i) AR <-- a4(3,i) A6 <-- a4(4,i) + # ! LOCAL VARIABLES: + # real qmp + # real da1, da2, a6da + # real fmin + # integer i + + #! Developer: S.-J. Lin + if (lmt == 3): + return a4 + + if (lmt == 0): + #! Standard PPM constraint + for i in range(0,itot): + if(dm[i] == 0.): + a4[1,i] = a4[0,i] + a4[2,i] = a4[0,i] + a4[3,i] = 0. + else: + da1 = a4[2,i] - a4[1,i] + da2 = da1*da1 + a6da = a4[3,i]*da1 + if(a6da < -da2): + a4[3,i] = 3.*(a4[1,i]-a4[0,i]) + a4[2,i] = a4[1,i] - a4[3,i] + elif(a6da > da2): + a4[3,i] = 3.*(a4[2,i]-a4[0,i]) + a4[1,i] = a4[2,i] - a4[3,i] + elif (lmt == 1): + #! Improved full monotonicity constraint (Lin 2004) + #! Note: no need to provide first guess of A6 <-- a4(4,i) + for i in range(0,itot): + qmp = 2.*dm[i] + a4[1,i] = a4[0,i]-np.sign(qmp)*np.abs(np.min([np.abs(qmp),np.abs(a4[1,i]-a4[0,i])])) + a4[2,i] = a4[0,i]+np.sign(qmp)*np.abs(np.min([np.abs(qmp),np.abs(a4[2,i]-a4[0,i])])) + a4[3,i] = 3.*( 2.*a4[0,i] - (a4[1,i]+a4[2,i]) ) + elif (lmt == 2): + #! Positive definite constraint + for i in range(0,itot): + if( np.abs(a4[2,i]-a4[1,i]) < -a4[3,i] ): + fmin = a4[0,i]+0.25*(a4[2,i]-a4[1,i])**2/a4[3,i]+a4[3,i]*r12 + if( fmin < 0.): + if(a4[0,i] < a4[2,i] and a4[0,i] < a4[1,i]): + a4[2,i] = a4[0,i] + a4[1,i] = a4[0,i] + a4[3,i] = 0. + elif(a4[2,i] > a4[1,i]): + a4[3,i] = 3.*(a4[1,i]-a4[0,i]) + a4[2,i] = a4[1,i] - a4[3,i] + else: + a4[3,i] = 3.*(a4[2,i]-a4[0,i]) + a4[1,i] = a4[2,i] - a4[3,i] + return a4 + +def cs_limiters(im, extm, a4, iv): + #integer, intent(in) :: im + #integer, intent(in) :: iv + #logical, intent(in) :: extm(im) + #real , intent(inout) :: a4(4,im) !< PPM array + #! LOCAL VARIABLES: + #real da1, da2, a6da + #integer i + + if (iv == 0): + #! Positive definite constraint + for i in range(0,im): + if (a4[0,i] <= 0.): + a4[1,i] = a4[0,i] + a4[2,i] = a4[0,i] + a4[3,i] = 0. + else: + if (np.abs(a4[2,i]-a4[1,i]) < -a4[3,i]): + if ((a4[0,i]+0.25*(a4[2,i]-a4[1,i])**2/a4[3,i]+a4[3,i]*r12) < 0.): + #! local minimum is negative + if (a4[0,i] < a4[2,i] and a4[0,i] < a4[1,i]): + a4[2,i] = a4[0,i] + a4[1,i] = a4[0,i] + a4[3,i] = 0. + elif (a4[2,i] > a4[1,i]): + a4[3,i] = 3.*(a4[1,i]-a4[0,i]) + a4[2,i] = a4[1,i] - a4[3,i] + else: + a4[3,i] = 3.*(a4[2,i]-a4[0,i]) + a4[1,i] = a4[2,i] - a4[3,i] + elif (iv == 1): + for i in range(0,im): + if ((a4[0,i]-a4[1,i])*(a4[0,i]-a4[2,i]) >= 0.): + a4[1,i] = a4[0,i] + a4[2,i] = a4[0,i] + a4[3,i] = 0. + else: + da1 = a4[2,i] - a4[1,i] + da2 = da1**2 + a6da = a4[3,i]*da1 + if (a6da < -da2): + a4[3,i] = 3.*(a4[1,i]-a4[0,i]) + a4[2,i] = a4[1,i] - a4[3,i] + elif (a6da > da2): + a4[3,i] = 3.*(a4[2,i]-a4[0,i]) + a4[1,i] = a4[2,i] - a4[3,i] + else: + #! Standard PPM constraint + for i in range(0,im): + if (extm[i]): + a4[1,i] = a4[0,i] + a4[2,i] = a4[0,i] + a4[3,i] = 0. + else: + da1 = a4[2,i] - a4[1,i] + da2 = da1**2 + a6da = a4[3,i]*da1 + if (a6da < -da2): + a4[3,i] = 3.*(a4[1,i]-a4[0,i]) + a4[2,i] = a4[1,i] - a4[3,i] + elif (a6da > da2): + a4[3,i] = 3.*(a4[2,i]-a4[0,i]) + a4[1,i] = a4[2,i] - a4[3,i] + return a4 + +def ppm_profile(a4, delp, km, i1, i2, iv, kord): + + #! INPUT PARAMETERS: + #integer, intent(in):: iv !< iv =-1: winds iv = 0: positive definite scalars iv = 1: others iv = 2: temp (if remap_t) and w (iv=-2) + #integer, intent(in):: i1 !< Starting longitude + #integer, intent(in):: i2 !< Finishing longitude + #integer, intent(in):: km !< Vertical dimension + #integer, intent(in):: kord !< Order (or more accurately method no.): + #real , intent(in):: delp(i1:i2,km) !< Layer pressure thickness + #!INPUT/OUTPUT PARAMETERS: + #real , intent(inout):: a4(4,i1:i2,km) !< Interpolated values + #! DESCRIPTION: + #! + #! Perform the piecewise parabolic reconstruction + #! + #! !REVISION HISTORY: + #! S.-J. Lin revised at GFDL 2007 + #!----------------------------------------------------------------------- + #! local arrays: + it = i2 - i1 + 1 + + dc = np.zeros((it,km)) + h2 = np.zeros((it,km)) + delq = np.zeros((it,km)) + df2 = np.zeros((it,km)) + d4 = np.zeros((it,km)) + #real dc(i1:i2,km) + #real h2(i1:i2,km) + #real delq(i1:i2,km) + #real df2(i1:i2,km) + #real d4(i1:i2,km) + + #! local scalars: + #integer i, k, km1, lmt, it + #real fac + #real a1, a2, c1, c2, c3, d1, d2 + #real qm, dq, lac, qmp, pmp + + + + km1 = km - 1 + + for k in range(1,km): + for i in range(i1-1,i2): + delq[i,k-1] = a4[0,i,k] - a4[0,i,k-1] + d4[i,k ] = delp[i,k-1] + delp[i,k] + + for k in range(1,km1): + for i in range(i1-1,i2): + c1 = (delp[i,k-1]+0.5*delp[i,k])/d4[i,k+1] + c2 = (delp[i,k+1]+0.5*delp[i,k])/d4[i,k] + df2[i,k] = delp[i,k]*(c1*delq[i,k] + c2*delq[i,k-1]) / (d4[i,k]+delp[i,k+1]) + dc[i,k] = np.sign(df2[i,k])*np.abs(np.min([np.abs(df2[i,k]), np.max([a4[0,i,k-1],a4[0,i,k],a4[0,i,k+1]])-a4[0,i,k], a4[0,i,k]-np.min([a4[0,i,k-1],a4[0,i,k],a4[0,i,k+1]])])) + + #!----------------------------------------------------------- + #! 4th order interpolation of the provisional cell edge value + #!----------------------------------------------------------- + + for k in range(2,km1): + for i in range(i1-1,i2): + c1 = delq[i,k-1]*delp[i,k-1] / d4[i,k] + a1 = d4[i,k-1] / (d4[i,k] + delp[i,k-1]) + a2 = d4[i,k+1] / (d4[i,k] + delp[i,k]) + a4[1,i,k] = a4[0,i,k-1] + c1 + 2./(d4[i,k-1]+d4[i,k+1]) * (delp[i,k]*(c1*(a1 - a2)+a2*dc[i,k-1]) - delp[i,k-1]*a1*dc[i,k]) + + #! Area preserving cubic with 2nd deriv. = 0 at the boundaries + #! Top + for i in range(i1-1,i2): + d1 = delp[i,1] + d2 = delp[i,2] + qm = (d2*a4[0,i,0]+d1*a4[0,i,1]) / (d1+d2) + dq = 2.*(a4[0,i,1]-a4[0,i,0]) / (d1+d2) + c1 = 4.*(a4[1,i,2]-qm-d2*dq) / ( d2*(2.*d2*d2+d1*(d2+3.*d1)) ) + c3 = dq - 0.5*c1*(d2*(5.*d1+d2)-3.*d1*d1) + a4[1,i,1] = qm - 0.25*c1*d1*d2*(d2+3.*d1) + #! Top edge: + #!------------------------------------------------------- + a4[1,i,0] = d1*(2.*c1*d1**2-c3) + a4[1,i,1] + #!------------------------------------------------------- + #! a4[2,i,1] = (12./7.)*a4[1,i,1]-(13./14.)*a4[1,i,2]+(3./14.)*a4[1,i,3] + #!------------------------------------------------------- + #! No over- and undershoot condition + a4[1,i,1] = np.max([a4[1,i,1], np.min([a4[0,i,0], a4[0,i,1]])]) + a4[1,i,1] = np.min([a4[1,i,1], np.max([a4[0,i,0], a4[0,i,1]])]) + dc[i,0] = 0.5*(a4[1,i,1] - a4[0,i,0]) + + #! Enforce monotonicity within the top layer + + if (iv == 0): + for i in range(i1-1,i2): + a4[1,i,0] = np.max([0., a4[1,i,0]]) + a4[1,i,1] = np.max([0., a4[1,i,1]]) + elif (iv == -1): + for i in range(i1-1,i2): + if (a4[1,i,0]*a4[0,i,0] <= 0. ): + a4[1,i,0] = 0. + elif (np.abs(iv) == 2): + for i in range(i1-1,i2): + a4[1,i,0] = a4[0,i,0] + a4[2,i,0] = a4[0,i,0] + + #! Bottom + #! Area preserving cubic with 2nd deriv. = 0 at the surface + for i in range(i1-1,i2): + d1 = delp[i,km-1] + d2 = delp[i,km1-1] + qm = (d2*a4[0,i,km-1]+d1*a4[0,i,km1-1]) / (d1+d2) + dq = 2.*(a4[0,i,km1-1]-a4[0,i,km-1]) / (d1+d2) + c1 = (a4[1,i,km1-1]-qm-d2*dq) / (d2*(2.*d2*d2+d1*(d2+3.*d1))) + c3 = dq - 2.0*c1*(d2*(5.*d1+d2)-3.*d1*d1) + a4[1,i,km-1] = qm - c1*d1*d2*(d2+3.*d1) + #! Bottom edge: + #!----------------------------------------------------- + a4[2,i,km-1] = d1*(8.*c1*d1**2-c3) + a4[1,i,km-1] + #! dc[i,km] = 0.5*(a4[3,i,km] - a4[1,i,km]) + #!----------------------------------------------------- + #! a4[3,i,km] = (12./7.)*a4[1,i,km]-(13./14.)*a4[1,i,km-1]+(3./14.)*a4[1,i,km-2] + #! No over- and under-shoot condition + a4[1,i,km-1] = np.max([a4[1,i,km-1], np.min([a4[0,i,km-1], a4[0,i,km1-1]])]) + a4[1,i,km-1] = np.min([a4[1,i,km-1], np.max([a4[0,i,km-1], a4[0,i,km1-1]])]) + dc[i,km-1] = 0.5*(a4[0,i,km-1] - a4[1,i,km-1]) + + #! Enforce constraint on the "slope" at the surface + + ##ifdef BOT_MONO + # do i=i1,i2 + # a4(4,i,km) = 0 + # if( a4(3,i,km) * a4(1,i,km) <= 0. ) a4(3,i,km) = 0. + # d1 = a4(1,i,km) - a4(2,i,km) + # d2 = a4(3,i,km) - a4(1,i,km) + # if ( d1*d2 < 0. ) then + # a4(2,i,km) = a4(1,i,km) + # a4(3,i,km) = a4(1,i,km) + # else + # dq = sign(min(abs(d1),abs(d2),0.5*abs(delq(i,km-1))), d1) + # a4(2,i,km) = a4(1,i,km) - dq + # a4(3,i,km) = a4(1,i,km) + dq + # endif + # enddo + ##else + if (iv == 0): + for i in range(i1-1,i2): + a4[1,i,km-1] = np.max([0.,a4[1,i,km-1]]) + a4[2,i,km-1] = np.max([0.,a4[2,i,km-1]]) + elif (iv < 0): + for i in range(i1-1,i2): + if (a4[0,i,km-1]*a4[2,i,km-1] <= 0.): + a4[2,i,km-1] = 0. + ##endif + + for k in range(0,km1): + for i in range(i1-1,i2): + a4[2,i,k] = a4[1,i,k+1] + + #!----------------------------------------------------------- + #! f(s) = AL + s*[(AR-AL) + A6*(1-s)] ( 0 <= s <= 1 ) + #!----------------------------------------------------------- + #! Top 2 and bottom 2 layers always use monotonic mapping + for k in range(0,2): + for i in range(i1-1,i2): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + a4[:,:,k] = ppm_limiters(dc[:,k], a4[:,:,k], it, 0) + + if (kord >= 7): + #!----------------------- + #! Huynh's 2nd constraint + #!----------------------- + for k in range(1,km1): + for i in range(i1-1,i2): + #! Method#1 + #! h2[i,k] = delq[i,k] - delq[i,k-1] + #! Method#2 - better + h2[i,k] = 2.*(dc[i,k+1]/delp[i,k+1] - dc[i,k-1]/delp[i,k-1]) / (delp[i,k]+0.5*(delp[i,k-1]+delp[i,k+1])) * delp[i,k]**2 + #! Method#3 + #!!! h2[i,k] = dc[i,k+1] - dc[i,k-1] + fac = 1.5 #! original quasi-monotone + + for k in range(2,km-2): + for i in range(i1-1,i2): + #! Right edges + #! qmp = a4[1,i,k] + 2.0*delq[i,k-1] + #! lac = a4[1,i,k] + fac*h2[i,k-1] + 0.5*delq[i,k-1] + pmp = 2.*dc[i,k] + qmp = a4[0,i,k] + pmp + lac = a4[0,i,k] + fac*h2[i,k-1] + dc[i,k] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], qmp, lac])]), np.max([a4[0,i,k], qmp, lac])]) + #! Left edges + #! qmp = a4[1,i,k] - 2.0*delq[i,k] + #! lac = a4[1,i,k] + fac*h2[i,k+1] - 0.5*delq[i,k] + #! + qmp = a4[0,i,k] - pmp + lac = a4[0,i,k] + fac*h2[i,k+1] - dc[i,k] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], qmp, lac])]), np.max([a4[0,i,k], qmp, lac])]) + #!------------- + #! Recompute A6 + #!------------- + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + #! Additional constraint to ensure positivity when kord=7 + if (iv == 0 and kord >= 6): + a4[:,:,k] = ppm_limiters(dc[:,k], a4[:,:,k], it, 2) + else: + lmt = kord - 3 + lmt = np.max([0, lmt]) + if (iv == 0): + lmt = np.min([2, lmt]) + + for k in range(2,km-2): + if( kord != 4): + for i in range(i1-1,i2): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + + if(kord != 6): + a4[:,:,k] = ppm_limiters(dc[:,k], a4[:,:,k], it, lmt) + + for k in range(km1-1,km): + for i in range(i1-1,i2): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + a4[:,:,k] = ppm_limiters(dc[:,k], a4[:,:,k], it, 0) + + return a4 + +def scalar_profile(qs, a4, delp, km, i1, i2, iv, kord, qmin): + #! Optimized vertical profile reconstruction: + #! Latest: Apr 2008 S.-J. Lin, NOAA/GFDL + #integer, intent(in):: i1, i2 + #integer, intent(in):: km !< vertical dimension + #integer, intent(in):: iv !< iv =-1: winds iv = 0: positive definite scalars iv = 1: others + #integer, intent(in):: kord + #real, intent(in) :: qs(i1:i2) + #real, intent(in) :: delp(i1:i2,km) !< Layer pressure thickness + #real, intent(inout):: a4(4,i1:i2,km) !< Interpolated values + #real, intent(in):: qmin + #!----------------------------------------------------------------------- + im = i2 - i1 + 1 + extm = np.zeros([im,km],dtype=bool) + ext5 = np.zeros([im,km],dtype=bool) + ext6 = np.zeros([im,km],dtype=bool) + + gam = np.zeros([im,km]) + q = np.zeros([im,km+1]) + d4 = np.zeros([im]) + + #logical, dimension(i1:i2,km):: extm, ext5, ext6 + #real gam(i1:i2,km) + #real q(i1:i2,km+1) + #real d4(i1:i2) + #real bet, a_bot, grat + #real pmp_1, lac_1, pmp_2, lac_2, x0, x1 + #integer i, k, im + + if (iv == -2): + for i in range(0,im): + gam[i,1] = 0.5 + q[i,0] = 1.5*a4[0,i,0] + for k in range(1,km-1): + for i in range(0,im): + grat = delp[i,k-1] / delp[i,k] + bet = 2. + grat + grat - gam[i,k] + q[i,k] = (3.*(a4[0,i,k-1]+a4[0,i,k]) - q[i,k-1])/bet + gam[i,k+1] = grat / bet + for i in range(0,im): + grat = delp[i,km-2] / delp[i,km-1] + q[i,km-1] = (3.*(a4[0,i,km-2]+a4[0,i,km-1]) - grat*qs[i] - q[i,km-2]) / (2. + grat + grat - gam[i,km-1]) + q[i,km] = qs[i] + for k in range(km-2,-1,-1): + for i in range(0,im): + q[i,k] = q[i,k] - gam[i,k+1]*q[i,k+1] + else: + for i in range(0,im): + grat = delp[i,1] / delp[i,0] #! grid ratio + bet = grat*(grat+0.5) + q[i,0] = ((grat+grat)*(grat+1.)*a4[0,i,0] + a4[0,i,1]) / bet + gam[i,0] = ( 1. + grat*(grat+1.5) ) / bet + for k in range(1,km): + for i in range(0,im): + d4[i] = delp[i,k-1] / delp[i,k] + bet = 2. + d4[i] + d4[i] - gam[i,k-1] + q[i,k] = ( 3.*(a4[0,i,k-1]+d4[i]*a4[0,i,k]) - q[i,k-1] )/bet + gam[i,k] = d4[i] / bet + for i in range(0,im): + a_bot = 1. + d4[i]*(d4[i]+1.5) + q[i,km] = (2.*d4[i]*(d4[i]+1.)*a4[0,i,km-1]+a4[0,i,km-2]-a_bot*q[i,km-1]) / ( d4[i]*(d4[i]+0.5) - a_bot*gam[i,km-1]) + for k in range(km-1,-1,-1): + for i in range(0,im): + q[i,k] = q[i,k] - gam[i,k]*q[i,k+1] + + + #!----- Perfectly linear scheme -------------------------------- + if (np.abs(kord) > 16): + for k in range(0,km): + for i in range(0,im): + a4[1,i,k] = q[i,k] + a4[2,i,k] = q[i,k+1] + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + return a4 + + #!----- Perfectly linear scheme -------------------------------- + #!------------------ + #! Apply constraints + #!------------------ + + #! Apply *large-scale* constraints + for i in range(0,im): + q[i,1] = np.min([q[i,1], np.max([a4[0,i,0], a4[0,i,1]])]) + q[i,1] = np.max([q[i,1], np.min([a4[0,i,0], a4[0,i,1]])]) + + for k in range(1,km): + for i in range(0,im): + gam[i,k] = a4[0,i,k] - a4[0,i,k-1] + + #! Interior: + for k in range(2,km-1): + for i in range(0,im): + if (gam[i,k-1]*gam[i,k+1] > 0.): + #! Apply large-scale constraint to ALL fields if not local max/min + q[i,k] = np.min([q[i,k], np.max([a4[0,i,k-1],a4[0,i,k]])]) + q[i,k] = np.max([q[i,k], np.min([a4[0,i,k-1],a4[0,i,k]])]) + else: + if (gam[i,k-1] > 0): + #! There exists a local max + q[i,k] = np.max([q[i,k], np.min([a4[0,i,k-1],a4[0,i,k]])]) + else: + #! There exists a local min + q[i,k] = np.min([q[i,k], np.max([a4[0,i,k-1],a4[0,i,k]])]) + if (iv == 0): + q[i,k] = np.max([0., q[i,k]]) + + #! Bottom: + for i in range(0,im): + q[i,km-1] = np.min([q[i,km-1], np.max([a4[0,i,km-2], a4[0,i,km-1]])]) + q[i,km-1] = np.max([q[i,km-1], np.min([a4[0,i,km-2], a4[0,i,km-1]])]) + + for k in range(0,km): + for i in range(0,im): + a4[1,i,k] = q[i,k ] + a4[2,i,k] = q[i,k+1] + + for k in range(0,km): + if (k == 0 or k == km-1): + for i in range(0,im): + extm[i,k] = (a4[1,i,k]-a4[0,i,k]) * (a4[2,i,k]-a4[0,i,k]) > 0. + else: + for i in range(0,im): + extm[i,k] = gam[i,k]*gam[i,k+1] < 0. + if ( np.abs(kord) > 9 ): + for i in range(0,im): + x0 = 2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k]) + x1 = np.abs(a4[1,i,k]-a4[2,i,k]) + a4[3,i,k] = 3.*x0 + ext5[i,k] = np.abs(x0) > x1 + ext6[i,k] = np.abs(a4[3,i,k]) > x1 + + #!--------------------------- + #! Apply subgrid constraints: + #!--------------------------- + #! f(s) = AL + s*[(AR-AL) + A6*(1-s)] ( 0 <= s <= 1 ) + #! Top 2 and bottom 2 layers always use monotonic mapping + + if (iv == 0): + for i in range(0,im): + a4[1,i,0] = np.max([0., a4[1,i,0]]) + elif (iv == -1): + for i in range(0,im): + if ( a4[1,i,0]*a4[0,i,0] <= 0. ): + a4[1,i,0] = 0. + elif (iv == 2): + for i in range(0,im): + a4[1,i,0] = a4[0,i,0] + a4[2,i,0] = a4[0,i,0] + a4[3,i,0] = 0. + + if (iv != 2): + for i in range(0,im): + a4[3,i,0] = 3.*(2.*a4[0,i,0] - (a4[1,i,0]+a4[2,i,0])) + a4[:,:,0] = cs_limiters(im, extm[:,0], a4[:,:,0], 1) + + #! k=1 + for i in range(0,im): + a4[3,i,1] = 3.*(2.*a4[0,i,1] - (a4[1,i,1]+a4[2,i,1])) + a4[:,:,1] = cs_limiters(im, extm[:,1], a4[:,:,1], 2) + + #!------------------------------------- + #! Huynh's 2nd constraint for interior: + #!------------------------------------- + for k in range(2,km-2): + if (np.abs(kord) < 9): + for i in range(0,im): + #! Left edges + pmp_1 = a4[0,i,k] - 2.*gam[i,k+1] + lac_1 = pmp_1 + 1.5*gam[i,k+2] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], pmp_1, lac_1])]), np.max([a4[0,i,k], pmp_1, lac_1])]) + #! Right edges + pmp_2 = a4[0,i,k] + 2.*gam[i,k] + lac_2 = pmp_2 - 1.5*gam[i,k-1] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], pmp_2, lac_2])]), np.max([a4[0,i,k], pmp_2, lac_2])]) + + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + elif (np.abs(kord) == 9): + for i in range(0,im): + if (extm[i,k] and extm[i,k-1]): + #! grid-scale 2-delta-z wave detected + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + a4[3,i,k] = 0. + elif (extm[i,k] and extm[i,k+1]): + #! grid-scale 2-delta-z wave detected + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + a4[3,i,k] = 0. + elif (extm[i,k] and a4[0,i,k] < qmin): + #! grid-scale 2-delta-z wave detected + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + a4[3,i,k] = 0. + else: + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + #! Check within the smooth region if subgrid profile is non-monotonic + if(np.abs(a4[3,i,k]) > np.abs(a4[1,i,k]-a4[2,i,k])): + pmp_1 = a4[0,i,k] - 2.*gam[i,k+1] + lac_1 = pmp_1 + 1.5*gam[i,k+2] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], pmp_1, lac_1])]), np.max([a4[0,i,k], pmp_1, lac_1])]) + pmp_2 = a4[0,i,k] + 2.*gam[i,k] + lac_2 = pmp_2 - 1.5*gam[i,k-1] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], pmp_2, lac_2])]), np.max([a4[0,i,k], pmp_2, lac_2])]) + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + + elif (np.abs(kord) == 10): + for i in range(0,im): + if (ext5[i,k]): + if (ext5[i,k-1] or ext5[i,k+1]): + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + elif (ext6[i,k-1] or ext6[i,k+1]): + pmp_1 = a4[0,i,k] - 2.*gam[i,k+1] + lac_1 = pmp_1 + 1.5*gam[i,k+2] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], pmp_1, lac_1])]), np.max([a4[0,i,k], pmp_1, lac_1])]) + pmp_2 = a4[1,i,k] + 2.*gam[i,k] + lac_2 = pmp_2 - 1.5*gam[i,k-1] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], pmp_2, lac_2])]), np.max([a4[0,i,k], pmp_2, lac_2])]) + elif (ext6[i,k]): + if (ext5[i,k-1] or ext5[i,k+1]): + pmp_1 = a4[0,i,k] - 2.*gam[i,k+1] + lac_1 = pmp_1 + 1.5*gam[i,k+2] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], pmp_1, lac_1])]), np.max([a4[0,i,k], pmp_1, lac_1])]) + pmp_2 = a4[0,i,k] + 2.*gam[i,k] + lac_2 = pmp_2 - 1.5*gam[i,k-1] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], pmp_2, lac_2])]), np.max([a4[0,i,k], pmp_2, lac_2])]) + for i in range(0,im): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + elif (np.abs(kord) == 12): + for i in range(0,im): + if (extm[i,k]): + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + a4[3,i,k] = 0. + else: #! not a local extremum + a4[3,i,k] = 6.*a4[0,i,k] - 3.*(a4[1,i,k]+a4[2,i,k]) + #! Check within the smooth region if subgrid profile is non-monotonic + if (np.abs(a4[3,i,k]) > np.abs(a4[1,i,k]-a4[2,i,k])): + pmp_1 = a4[0,i,k] - 2.*gam[i,k+1] + lac_1 = pmp_1 + 1.5*gam[i,k+2] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], pmp_1, lac_1])]), np.max([a4[0,i,k], pmp_1, lac_1])]) + pmp_2 = a4[0,i,k] + 2.*gam[i,k] + lac_2 = pmp_2 - 1.5*gam[i,k-1] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], pmp_2, lac_2])]), np.max([a4[0,i,k], pmp_2, lac_2])]) + a4[3,i,k] = 6.*a4[0,i,k] - 3.*(a4[1,i,k]+a4[2,i,k]) + elif (np.abs(kord) == 13): + for i in range(0,im): + if (ext6[i,k]): + if (ext6[i,k-1] and ext6[i,k+1]): + #! grid-scale 2-delta-z wave detected + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + for i in range(0,im): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + elif (np.abs(kord) == 14): + for i in range(0,im): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + elif (np.abs(kord) == 15): #! Revised abs(kord)=9 scheme + for i in range(0,im): + if (ext5[i,k] and ext5[i,k-1]): + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + elif (ext5[i,k] and ext5[i,k+1]): + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + elif (ext5[i,k] and a4[0,i,k] < qmin): + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + elif (ext6[i,k]): + pmp_1 = a4[0,i,k] - 2.*gam[i,k+1] + lac_1 = pmp_1 + 1.5*gam[i,k+2] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], pmp_1, lac_1])]), np.max([a4[0,i,k], pmp_1, lac_1])]) + pmp_2 = a4[0,i,k] + 2.*gam[i,k] + lac_2 = pmp_2 - 1.5*gam[i,k-1] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], pmp_2, lac_2])]), np.max([a4[0,i,k], pmp_2, lac_2])]) + for i in range(0,im): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + elif (np.abs(kord) == 16): + for i in range(0,im): + if (ext5[i,k]): + if (ext5[i,k-1] or ext5[i,k+1]): + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + elif (ext6[i,k-1] or ext6[i,k+1]): + #! Left edges + pmp_1 = a4[0,i,k] - 2.*gam[i,k+1] + lac_1 = pmp_1 + 1.5*gam[i,k+2] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], pmp_1, lac_1])]), np.max([a4[0,i,k], pmp_1, lac_1])]) + #! Right edges + pmp_2 = a4[0,i,k] + 2.*gam[i,k] + lac_2 = pmp_2 - 1.5*gam[i,k-1] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], pmp_2, lac_2])]), np.max([a4[0,i,k], pmp_2, lac_2])]) + for i in range(0,im): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + else: #! kord = 11, 13 + for i in range(0,im): + if (ext5[i,k] and (ext5[i,k-1] or ext5[i,k+1] or a4[0,i,k] < qmin)): + #! Noisy region: + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + a4[3,i,k] = 0. + else: + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + + #! Additional constraint to ensure positivity + if (iv == 0): + a4[:,:,k] = cs_limiters(im, extm[:,k], a4[:,:,k], 0) + + ####end for k in range(3,km-2) + + #!---------------------------------- + #! Bottom layer subgrid constraints: + #!---------------------------------- + if (iv == 0): + for i in range(0,im): + a4[2,i,km-1] = np.max([0., a4[2,i,km-1]]) + elif (iv == -1): + for i in range(0,im): + if (a4[2,i,km-1]*a4[0,i,km-1] <= 0.): + a4[2,i,km-1] = 0. + + for k in range(km-2,km): + for i in range(0,im): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + if (k == (km-2)): + a4[:,:,k] = cs_limiters(im, extm[:,k], a4[:,:,k], 2) + if (k == km-1): + a4[:,:,k] = cs_limiters(im, extm[:,k], a4[:,:,k], 1) + + return a4 + +def map_scalar(km, pe1, q1, qs, kn, pe2, i1, i2, iv, kord, q_min): + #! iv=1 + #integer, intent(in) :: i1 !< Starting longitude + #integer, intent(in) :: i2 !< Finishing longitude + #integer, intent(in) :: iv !< Mode: 0 == constituents 1 == temp 2 == remap temp with cs scheme + #integer, intent(in) :: kord !< Method order + #integer, intent(in) :: km !< Original vertical dimension + #integer, intent(in) :: kn !< Target vertical dimension + #real, intent(in) :: qs(i1:i2) !< bottom BC + #real, intent(in) :: pe1(i1:i2,km+1) !< pressure at layer edges from model top to bottom surface in the original vertical coordinate + #real, intent(in) :: pe2(i1:i2,kn+1) !< pressure at layer edges from model top to bottom surface in the new vertical coordinate + #real, intent(in) :: q1(ibeg:iend,km) !< Field input + #! INPUT/OUTPUT PARAMETERS: + #real, intent(inout):: q2(ibeg:iend,kn) !< Field output + + im = i2 - i1 + 1 + q2 = np.zeros([im,kn]) + #real, intent(in):: q_min + + #! DESCRIPTION: + #! IV = 0: constituents + #! pe1: pressure at layer edges (from model top to bottom surface) + #! in the original vertical coordinate + #! pe2: pressure at layer edges (from model top to bottom surface) + #! in the new vertical coordinate + #! LOCAL VARIABLES: + dp1 = np.zeros([im,km]) + q4 = np.zeros([4,im,km]) + #real dp1(i1:i2,km) + #real q4(4,i1:i2,km) + #real pl, pr, qsum, dp, esl + #integer i, k, l, m, k0 + qsum = 0. + + for k in range(0,km): + for i in range(0,im): + dp1[i,k] = pe1[i,k+1] - pe1[i,k] + q4[0,i,k] = q1[i,k] + + #! Compute vertical subgrid distribution + if (kord > 7): + #print qs, q4, dp1, km, i1, i2, iv, kord, q_min + q4 = scalar_profile(qs, q4, dp1, km, i1, i2, iv, kord, q_min) + else: + q4 = ppm_profile(q4, dp1, km, i1, i2, iv, kord) + + for i in range(0,im): + k0 = 0 + for k in range(0,kn): + next_k = False + for l in range(k0,km): #AKA l-loop + #! locate the top edge: pe2(i,k) + if (pe2[i,k] >= pe1[i,l] and pe2[i,k] <= pe1[i,l+1]): + pl = (pe2[i,k]-pe1[i,l]) / dp1[i,l] + if (pe2[i,k+1] <= pe1[i,l+1]): + #! entire new grid is within the original grid + pr = (pe2[i,k+1]-pe1[i,l]) / dp1[i,l] + q2[i,k] = q4[1,i,l] + 0.5*(q4[3,i,l]+q4[2,i,l]-q4[1,i,l]) * (pr+pl)-q4[3,i,l]*r3*(pr*(pr+pl)+pl**2) + k0 = l + next_k = True + break + #goto 555 #(next iteration of "for k in range(0,kn):" loop) + else: + #! Fractional area... + qsum = (pe1[i,l+1]-pe2[i,k])*(q4[1,i,l]+0.5*(q4[3,i,l]+q4[2,i,l]-q4[1,i,l])*(1.+pl)-q4[3,i,l]*(r3*(1.+pl*(1.+pl)))) + for m in range(l+1,km): #AKA m-loop + #! locate the bottom edge: pe2(i,k+1) + if (pe2[i,k+1] > pe1[i,m+1]): + #! Whole layer + qsum = qsum + dp1[i,m]*q4[0,i,m] + else: + dp = pe2[i,k+1]-pe1[i,m] + esl = dp / dp1[i,m] + qsum = qsum + dp*(q4[1,i,m]+0.5*esl*(q4[2,i,m]-q4[1,i,m]+q4[3,i,m]*(1.-r23*esl))) + k0 = m + #goto 123 #(exit out of l-loop) + break + else: + #GJF: the following if statement is not in the fv_mapz, but it captures the case where pe2[kn] > pe1[km] where the m loop is not entered; without this, the lowest layer values are weird + if (l+1 == km): + dp = pe2[i,kn]-pe1[i,km] + esl = dp / dp1[i,km-1] + qsum = qsum + dp*(q4[1,i,km-1]+0.5*esl*(q4[2,i,km-1]-q4[1,i,km-1]+q4[3,i,km-1]*(1.-r23*esl))) + break #handles goto 123 statement below (exits out of l-loop even if m-loop successfully completes) + #continue + break + #goto 123 #(right before going to next iteration of "for k in range(1,kn):" loop) + if not next_k: + q2[i,k] = qsum / (pe2[i,k+1] - pe2[i,k]) #AKA label 123 + + return q2 + +def map1_q2 (km, pe1, q1, kn, pe2, dp2, i1, i2, iv, kord, q_min): + #! INPUT PARAMETERS: + #integer, intent(in) :: i1, i2 + #integer, intent(in) :: iv !< Mode: 0 == constituents 1 == ??? + #integer, intent(in) :: kord + #integer, intent(in) :: km !< Original vertical dimension + #integer, intent(in) :: kn !< Target vertical dimension + #real, intent(in) :: pe1(i1:i2,km+1) !< pressure at layer edges from model top to bottom surface in the original vertical coordinate + #real, intent(in) :: pe2(i1:i2,kn+1) !< pressure at layer edges from model top to bottom surface in the new vertical coordinate + #real, intent(in) :: q1(i1:i2,km) !< Field input + #real, intent(in) :: dp2(i1:i2,kn) + #real, intent(in) :: q_min + #! INPUT/OUTPUT PARAMETERS: + im = i2 - i1 + 1 + q2 = np.zeros([im,kn]) + #real, intent(inout):: q2(i1:i2,kn) !< Field output + #! LOCAL VARIABLES: + im = i2 - i1 + 1 + qs = np.zeros([im]) + dp1 = np.zeros([im,km]) + q4 = np.zeros([4,im,km]) + #real qs(i1:i2) + #real dp1(i1:i2,km) + #real q4(4,i1:i2,km) + #real pl, pr, qsum, dp, esl + #integer i, k, l, m, k0 + qsum = 0. + for k in range(0,km): + for i in range(0,im): + dp1[i,k] = pe1[i,k+1] - pe1[i,k] + q4[0,i,k] = q1[i,k] + + #! Compute vertical subgrid distribution + if (kord > 7): + q4 = scalar_profile (qs, q4, dp1, km, i1, i2, iv, kord, q_min) + else: + q4 = ppm_profile (q4, dp1, km, i1, i2, iv, kord) + + #! Mapping + for i in range(0,im): + k0 = 0 + for k in range(0,kn): + next_k = False + #print 'k new = ',k + for l in range(k0,km): + #print 'l old = ',l + #! locate the top edge: pe2(i,k) + if (pe2[i,k] >= pe1[i,l] and pe2[i,k] <= pe1[i,l+1]): + pl = (pe2[i,k]-pe1[i,l]) / dp1[i,l] + if (pe2[i,k+1] <= pe1[i,l+1]): + #! entire new grid is within the original grid + pr = (pe2[i,k+1]-pe1[i,l]) / dp1[i,l] + q2[i,k] = q4[1,i,l] + 0.5*(q4[3,i,l]+q4[2,i,l]-q4[1,i,l])*(pr+pl)-q4[3,i,l]*r3*(pr*(pr+pl)+pl**2) + k0 = l + next_k = True + #print 'new grid within old; q2 = ', q2[i,k] + break + #goto 555 #next k-loop iteration + else: + #! Fractional area... + #print k, (pe1[i,l+1]-pe2[i,k]), (q4[1,i,l]+0.5*(q4[3,i,l]+q4[2,i,l]-q4[1,i,l])*(1.+pl)-q4[3,i,l]*(r3*(1.+pl*(1.+pl)))), dp2[i,k] + qsum = (pe1[i,l+1]-pe2[i,k])*(q4[1,i,l]+0.5*(q4[3,i,l]+q4[2,i,l]-q4[1,i,l])*(1.+pl)-q4[3,i,l]*(r3*(1.+pl*(1.+pl)))) + for m in range(l+1,km): + #! locate the bottom edge: pe2(i,k+1) + if (pe2[i,k+1] > pe1[i,m+1]): + #! Whole layer.. + qsum = qsum + dp1[i,m]*q4[0,i,m] + #print 'whole layer, m = ',m + else: + dp = pe2[i,k+1]-pe1[i,m] + esl = dp / dp1[i,m] + qsum = qsum + dp*(q4[1,i,m]+0.5*esl*(q4[2,i,m]-q4[1,i,m]+q4[3,i,m]*(1.-r23*esl))) + k0 = m + #print 'partial layer, m = ',m + #goto 123 #end l-loop + break + else: + #GJF: the following if statement is not in the fv_mapz, but it captures the case where pe2[kn] > pe1[km] where the m loop is not entered; without this, the lowest layer values are weird + if (l+1 == km): + dp = pe2[i,kn]-pe1[i,km] + esl = dp / dp1[i,km-1] + qsum = qsum + dp*(q4[1,i,km-1]+0.5*esl*(q4[2,i,km-1]-q4[1,i,km-1]+q4[3,i,km-1]*(1.-r23*esl))) + break + + break + #goto 123 #end l-loop + if not next_k: + q2[i,k] = qsum / dp2[i,k] #formerly labeled 123 + #print 'result q2 ', q2[i,k] + #print q2 + #exit() + return q2 + +def cs_profile(qs, a4, delp, km, i1, i2, iv, kord): + #! Optimized vertical profile reconstruction: + #! Latest: Apr 2008 S.-J. Lin, NOAA/GFDL + #integer, intent(in):: i1, i2 + #integer, intent(in):: km !< vertical dimension + #integer, intent(in):: iv !< iv =-1: winds iv = 0: positive definite scalars iv = 1: others + #integer, intent(in):: kord + #real, intent(in) :: qs(i1:i2) + #real, intent(in) :: delp(i1:i2,km) !< Layer pressure thickness + #real, intent(inout):: a4(4,i1:i2,km) !< Interpolated values + #real, intent(in):: qmin + #!----------------------------------------------------------------------- + im = i2 - i1 + 1 + extm = np.zeros([im,km],dtype=bool) + ext5 = np.zeros([im,km],dtype=bool) + ext6 = np.zeros([im,km],dtype=bool) + + gam = np.zeros([im,km]) + q = np.zeros([im,km+1]) + d4 = np.zeros([im]) + + #logical, dimension(i1:i2,km):: extm, ext5, ext6 + #real gam(i1:i2,km) + #real q(i1:i2,km+1) + #real d4(i1:i2) + #real bet, a_bot, grat + #real pmp_1, lac_1, pmp_2, lac_2, x0, x1 + #integer i, k, im + + if (iv == -2): + for i in range(0,im): + gam[i,1] = 0.5 + q[i,0] = 1.5*a4[0,i,0] + for k in range(1,km-1): + for i in range(0,im): + grat = delp[i,k-1] / delp[i,k] + bet = 2. + grat + grat - gam[i,k] + q[i,k] = (3.*(a4[0,i,k-1]+a4[0,i,k]) - q[i,k-1])/bet + gam[i,k+1] = grat / bet + for i in range(0,im): + grat = delp[i,km-2] / delp[i,km-1] + q[i,km-1] = (3.*(a4[0,i,km-2]+a4[0,i,km-1]) - grat*qs[i] - q[i,km-2]) / (2. + grat + grat - gam[i,km-1]) + q[i,km] = qs[i] + for k in range(km-2,-1,-1): + for i in range(0,im): + q[i,k] = q[i,k] - gam[i,k+1]*q[i,k+1] + else: + for i in range(0,im): + grat = delp[i,1] / delp[i,0] #! grid ratio + bet = grat*(grat+0.5) + q[i,0] = ((grat+grat)*(grat+1.)*a4[0,i,0] + a4[0,i,1]) / bet + gam[i,0] = ( 1. + grat*(grat+1.5) ) / bet + for k in range(1,km): + for i in range(0,im): + d4[i] = delp[i,k-1] / delp[i,k] + bet = 2. + d4[i] + d4[i] - gam[i,k-1] + q[i,k] = ( 3.*(a4[0,i,k-1]+d4[i]*a4[0,i,k]) - q[i,k-1] )/bet + gam[i,k] = d4[i] / bet + for i in range(0,im): + a_bot = 1. + d4[i]*(d4[i]+1.5) + q[i,km] = (2.*d4[i]*(d4[i]+1.)*a4[0,i,km-1]+a4[0,i,km-2]-a_bot*q[i,km-1]) / ( d4[i]*(d4[i]+0.5) - a_bot*gam[i,km-1]) + for k in range(km-1,-1,-1): + for i in range(0,im): + q[i,k] = q[i,k] - gam[i,k]*q[i,k+1] + + + #!----- Perfectly linear scheme -------------------------------- + if (np.abs(kord) > 16): + for k in range(0,km): + for i in range(0,im): + a4[1,i,k] = q[i,k] + a4[2,i,k] = q[i,k+1] + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + return a4 + + #!----- Perfectly linear scheme -------------------------------- + #!------------------ + #! Apply constraints + #!------------------ + + #! Apply *large-scale* constraints + for i in range(0,im): + q[i,1] = np.min([q[i,1], np.max([a4[0,i,0], a4[0,i,1]])]) + q[i,1] = np.max([q[i,1], np.min([a4[0,i,0], a4[0,i,1]])]) + + for k in range(1,km): + for i in range(0,im): + gam[i,k] = a4[0,i,k] - a4[0,i,k-1] + + #! Interior: + for k in range(2,km-1): + for i in range(0,im): + if (gam[i,k-1]*gam[i,k+1] > 0.): + #! Apply large-scale constraint to ALL fields if not local max/min + q[i,k] = np.min([q[i,k], np.max([a4[0,i,k-1],a4[0,i,k]])]) + q[i,k] = np.max([q[i,k], np.min([a4[0,i,k-1],a4[0,i,k]])]) + else: + if (gam[i,k-1] > 0): + #! There exists a local max + q[i,k] = np.max([q[i,k], np.min([a4[0,i,k-1],a4[0,i,k]])]) + else: + #! There exists a local min + q[i,k] = np.min([q[i,k], np.max([a4[0,i,k-1],a4[0,i,k]])]) + if (iv == 0): + q[i,k] = np.max([0., q[i,k]]) + + #! Bottom: + for i in range(0,im): + q[i,km-1] = np.min([q[i,km-1], np.max([a4[0,i,km-2], a4[0,i,km-1]])]) + q[i,km-1] = np.max([q[i,km-1], np.min([a4[0,i,km-2], a4[0,i,km-1]])]) + + for k in range(0,km): + for i in range(0,im): + a4[1,i,k] = q[i,k ] + a4[2,i,k] = q[i,k+1] + + for k in range(0,km): + if (k == 0 or k == km-1): + for i in range(0,im): + extm[i,k] = (a4[1,i,k]-a4[0,i,k]) * (a4[2,i,k]-a4[0,i,k]) > 0. + else: + for i in range(0,im): + extm[i,k] = gam[i,k]*gam[i,k+1] < 0. + if ( np.abs(kord) > 9 ): + for i in range(0,im): + x0 = 2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k]) + x1 = np.abs(a4[1,i,k]-a4[2,i,k]) + a4[3,i,k] = 3.*x0 + ext5[i,k] = np.abs(x0) > x1 + ext6[i,k] = np.abs(a4[3,i,k]) > x1 + + #!--------------------------- + #! Apply subgrid constraints: + #!--------------------------- + #! f(s) = AL + s*[(AR-AL) + A6*(1-s)] ( 0 <= s <= 1 ) + #! Top 2 and bottom 2 layers always use monotonic mapping + + if (iv == 0): + for i in range(0,im): + a4[1,i,0] = np.max([0., a4[1,i,0]]) + elif (iv == -1): + for i in range(0,im): + if ( a4[1,i,0]*a4[0,i,0] <= 0. ): + a4[1,i,0] = 0. + elif (iv == 2): + for i in range(0,im): + a4[1,i,0] = a4[0,i,0] + a4[2,i,0] = a4[0,i,0] + a4[3,i,0] = 0. + + if (iv != 2): + for i in range(0,im): + a4[3,i,0] = 3.*(2.*a4[0,i,0] - (a4[1,i,0]+a4[2,i,0])) + a4[:,:,0] = cs_limiters(im, extm[:,0], a4[:,:,0], 1) + + #! k=1 + for i in range(0,im): + a4[3,i,1] = 3.*(2.*a4[0,i,1] - (a4[1,i,1]+a4[2,i,1])) + a4[:,:,1] = cs_limiters(im, extm[:,1], a4[:,:,1], 2) + + #!------------------------------------- + #! Huynh's 2nd constraint for interior: + #!------------------------------------- + for k in range(2,km-2): + if (np.abs(kord) < 9): + for i in range(0,im): + #! Left edges + pmp_1 = a4[0,i,k] - 2.*gam[i,k+1] + lac_1 = pmp_1 + 1.5*gam[i,k+2] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], pmp_1, lac_1])]), np.max([a4[0,i,k], pmp_1, lac_1])]) + #! Right edges + pmp_2 = a4[0,i,k] + 2.*gam[i,k] + lac_2 = pmp_2 - 1.5*gam[i,k-1] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], pmp_2, lac_2])]), np.max([a4[0,i,k], pmp_2, lac_2])]) + + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + elif (np.abs(kord) == 9): + for i in range(0,im): + if (extm[i,k] and extm[i,k-1]): + #! grid-scale 2-delta-z wave detected + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + a4[3,i,k] = 0. + elif (extm[i,k] and extm[i,k+1]): + #! grid-scale 2-delta-z wave detected + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + a4[3,i,k] = 0. + else: + a4[3,i,k] = 6.*a4[0,i,k] - 3.*(a4[1,i,k]+a4[2,i,k]) + #! Check within the smooth region if subgrid profile is non-monotonic + if(np.abs(a4[3,i,k]) > np.abs(a4[1,i,k]-a4[2,i,k])): + pmp_1 = a4[0,i,k] - 2.*gam[i,k+1] + lac_1 = pmp_1 + 1.5*gam[i,k+2] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], pmp_1, lac_1])]), np.max([a4[0,i,k], pmp_1, lac_1])]) + pmp_2 = a4[0,i,k] + 2.*gam[i,k] + lac_2 = pmp_2 - 1.5*gam[i,k-1] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], pmp_2, lac_2])]), np.max([a4[0,i,k], pmp_2, lac_2])]) + a4[3,i,k] = 6.*a4[0,i,k] - 3.*(a4[1,i,k]+a4[2,i,k]) + + elif (np.abs(kord) == 10): + for i in range(0,im): + if (ext5[i,k]): + if (ext5[i,k-1] or ext5[i,k+1]): + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + elif (ext6[i,k-1] or ext6[i,k+1]): + pmp_1 = a4[0,i,k] - 2.*gam[i,k+1] + lac_1 = pmp_1 + 1.5*gam[i,k+2] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], pmp_1, lac_1])]), np.max([a4[0,i,k], pmp_1, lac_1])]) + pmp_2 = a4[1,i,k] + 2.*gam[i,k] + lac_2 = pmp_2 - 1.5*gam[i,k-1] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], pmp_2, lac_2])]), np.max([a4[0,i,k], pmp_2, lac_2])]) + elif (ext6[i,k]): + if (ext5[i,k-1] or ext5[i,k+1]): + pmp_1 = a4[0,i,k] - 2.*gam[i,k+1] + lac_1 = pmp_1 + 1.5*gam[i,k+2] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], pmp_1, lac_1])]), np.max([a4[0,i,k], pmp_1, lac_1])]) + pmp_2 = a4[0,i,k] + 2.*gam[i,k] + lac_2 = pmp_2 - 1.5*gam[i,k-1] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], pmp_2, lac_2])]), np.max([a4[0,i,k], pmp_2, lac_2])]) + for i in range(0,im): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + elif (np.abs(kord) == 12): + for i in range(0,im): + if (extm[i,k]): + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + a4[3,i,k] = 0. + else: #! not a local extremum + a4[3,i,k] = 6.*a4[0,i,k] - 3.*(a4[1,i,k]+a4[2,i,k]) + #! Check within the smooth region if subgrid profile is non-monotonic + if (np.abs(a4[3,i,k]) > np.abs(a4[1,i,k]-a4[2,i,k])): + pmp_1 = a4[0,i,k] - 2.*gam[i,k+1] + lac_1 = pmp_1 + 1.5*gam[i,k+2] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], pmp_1, lac_1])]), np.max([a4[0,i,k], pmp_1, lac_1])]) + pmp_2 = a4[0,i,k] + 2.*gam[i,k] + lac_2 = pmp_2 - 1.5*gam[i,k-1] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], pmp_2, lac_2])]), np.max([a4[0,i,k], pmp_2, lac_2])]) + a4[3,i,k] = 6.*a4[0,i,k] - 3.*(a4[1,i,k]+a4[2,i,k]) + elif (np.abs(kord) == 13): + for i in range(0,im): + if (ext6[i,k]): + if (ext6[i,k-1] and ext6[i,k+1]): + #! grid-scale 2-delta-z wave detected + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + for i in range(0,im): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + elif (np.abs(kord) == 14): + for i in range(0,im): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + elif (np.abs(kord) == 15): #! Revised abs(kord)=9 scheme + for i in range(0,im): + if (ext5[i,k] ): + if (ext5[i,k-1] or ext5[i,k+1]): + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + elif (ext6[i,k]): + #! Check within the smooth region if subgrid profile is non-monotonic + pmp_1 = a4[0,i,k] - 2.*gam[i,k+1] + lac_1 = pmp_1 + 1.5*gam[i,k+2] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], pmp_1, lac_1])]), np.max([a4[0,i,k], pmp_1, lac_1])]) + pmp_2 = a4[0,i,k] + 2.*gam[i,k] + lac_2 = pmp_2 - 1.5*gam[i,k-1] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], pmp_2, lac_2])]), np.max([a4[0,i,k], pmp_2, lac_2])]) + for i in range(0,im): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + elif (np.abs(kord) == 16): + for i in range(0,im): + if (ext5[i,k]): + if (ext5[i,k-1] or ext5[i,k+1]): + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + elif (ext6[i,k-1] or ext6[i,k+1]): + #! Left edges + pmp_1 = a4[0,i,k] - 2.*gam[i,k+1] + lac_1 = pmp_1 + 1.5*gam[i,k+2] + a4[1,i,k] = np.min([np.max([a4[1,i,k], np.min([a4[0,i,k], pmp_1, lac_1])]), np.max([a4[0,i,k], pmp_1, lac_1])]) + #! Right edges + pmp_2 = a4[0,i,k] + 2.*gam[i,k] + lac_2 = pmp_2 - 1.5*gam[i,k-1] + a4[2,i,k] = np.min([np.max([a4[2,i,k], np.min([a4[0,i,k], pmp_2, lac_2])]), np.max([a4[0,i,k], pmp_2, lac_2])]) + for i in range(0,im): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + else: #! kord = 11 + for i in range(0,im): + if (ext5[i,k] and (ext5[i,k-1] or ext5[i,k+1])): + #! Noisy region: + a4[1,i,k] = a4[0,i,k] + a4[2,i,k] = a4[0,i,k] + a4[3,i,k] = 0. + else: + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + + #! Additional constraint to ensure positivity + if (iv == 0): + a4[:,:,k] = cs_limiters(im, extm[:,k], a4[:,:,k], 0) + + ####end for k in range(3,km-2) + + #!---------------------------------- + #! Bottom layer subgrid constraints: + #!---------------------------------- + if (iv == 0): + for i in range(0,im): + a4[2,i,km-1] = np.max([0., a4[2,i,km-1]]) + elif (iv == -1): + for i in range(0,im): + if (a4[2,i,km-1]*a4[0,i,km-1] <= 0.): + a4[2,i,km-1] = 0. + + for k in range(km-2,km): + for i in range(0,im): + a4[3,i,k] = 3.*(2.*a4[0,i,k] - (a4[1,i,k]+a4[2,i,k])) + if (k == (km-2)): + a4[:,:,k] = cs_limiters(im, extm[:,k], a4[:,:,k], 2) + if (k == km-1): + a4[:,:,k] = cs_limiters(im, extm[:,k], a4[:,:,k], 1) + + return a4 + +def mappm (km, pe1, q1, kn, pe2, i1, i2, iv, kord, ptop): + #! IV = 0: constituents + #! IV = 1: potential temp + #! IV =-1: winds + + #! Mass flux preserving mapping: q1(im,km) -> q2(im,kn) + + #! pe1: pressure at layer edges (from model top to bottom surface) + #! in the original vertical coordinate + #! pe2: pressure at layer edges (from model top to bottom surface) + #! in the new vertical coordinate + + # integer, intent(in):: i1, i2, km, kn, kord, iv + # real, intent(in ):: pe1(i1:i2,km+1), pe2(i1:i2,kn+1) !< pe1: pressure at layer edges from model top to bottom + # !! surface in the ORIGINAL vertical coordinate + # !< pe2: pressure at layer edges from model top to bottom + # !! surface in the NEW vertical coordinate + #! Mass flux preserving mapping: q1(im,km) -> q2(im,kn) + # real, intent(in ):: q1(i1:i2,km) + # real, intent(out):: q2(i1:i2,kn) + # real, intent(IN) :: ptop + #! local + # real qs(i1:i2) + # real dp1(i1:i2,km) + # real a4(4,i1:i2,km) + # integer i, k, l + # integer k0, k1 + # real pl, pr, tt, delp, qsum, dpsum, esl + im = i2 - i1 + 1 + dp1 = np.zeros([im,km]) + a4 = np.zeros([4,im,km]) + q2 = np.zeros([im,kn]) + qs = np.zeros(im) + + for k in range(0,km): + for i in range(0,im): + dp1[i,k] = pe1[i,k+1] - pe1[i,k] + a4[0,i,k] = q1[i,k] + + if ( kord > 7 ): + a4 = cs_profile( qs, a4, dp1, km, i1, i2, iv, kord ) + else: + a4 = ppm_profile( a4, dp1, km, i1, i2, iv, kord ) + + #!------------------------------------ + #! Lowest layer: constant distribution + #!------------------------------------ + ##ifdef NGGPS_SUBMITTED + # do i=i1,i2 + # a4(2,i,km) = q1(i,km) + # a4(3,i,km) = q1(i,km) + # a4(4,i,km) = 0. + # enddo + #endif + qsum = 0. + for i in range(0,im): + k0 = 0 + for k in range(0,kn): + next_k = False + if (pe2[i,k] <= pe1[i,0]): + #! above old ptop + q2[i,k] = q1[i,0] + elif (pe2[i,k] >= pe1[i,km]): + #! Entire grid below old ps + ##ifdef NGGPS_SUBMITTED + # q2(i,k) = a4(3,i,km) ! this is not good. + #else + q2[i,k] = q1[i,km-1] + #endif + else: + for l in range(k0,km): + #! locate the top edge at pe2(i,k) + if ( pe2[i,k] >= pe1[i,l] and pe2[i,k] <= pe1[i,l+1]): + k0 = l + pl = (pe2[i,k]-pe1[i,l]) / dp1[i,l] + if (pe2[i,k+1] <= pe1[i,l+1]): + #! entire new grid is within the original grid + pr = (pe2[i,k+1]-pe1[i,l]) / dp1[i,l] + tt = r3*(pr*(pr+pl)+pl**2) + q2[i,k] = a4[1,i,l] + 0.5*(a4[3,i,l]+a4[2,i,l]-a4[1,i,l])*(pr+pl)-a4[3,i,l]*tt + next_k = True + break + #goto 555 + else: + #! Fractional area... + delp = pe1[i,l+1] - pe2[i,k] + tt = r3*(1.+pl*(1.+pl)) + qsum = delp*(a4[1,i,l]+0.5*(a4[3,i,l]+a4[2,i,l]-a4[1,i,l])*(1.+pl)-a4[3,i,l]*tt) + dpsum = delp + k1 = l + 1 + break + #goto 111 + if not next_k: + #labeled 111 + for l in range(k1,km): + if( pe2[i,k+1] > pe1[i,l+1] ): + #! Whole layer.. + qsum = qsum + dp1[i,l]*q1[i,l] + dpsum = dpsum + dp1[i,l] + else: + delp = pe2[i,k+1]-pe1[i,l] + esl = delp / dp1[i,l] + qsum = qsum + delp * (a4[1,i,l]+0.5*esl*(a4[2,i,l]-a4[1,i,l]+a4[3,i,l]*(1.-r23*esl))) + dpsum = dpsum + delp + k0 = l + break #goto 123 + else: #when l-loop completes without breaking + delp = pe2[i,k+1] - pe1[i,km] #should this be km? + if (delp > 0.): + #! Extended below old ps + ##ifdef NGGPS_SUBMITTED + #qsum = qsum + delp * a4(3,i,km) ! not good. + ##else + qsum = qsum + delp * q1[i,km-1] # should this be km-1? + ##endif + dpsum = dpsum + delp + q2[i,k] = qsum / dpsum #formerly labeled 123 + + return q2 + +def map1_ppm(km, pe1, q1, qs, kn, pe2, i1, i2, iv, kord): +# subroutine map1_ppm( km, pe1, q1, qs, & +# kn, pe2, q2, i1, i2, & +# j, ibeg, iend, jbeg, jend, iv, kord) +# integer, intent(in) :: i1 !< Starting longitude +# integer, intent(in) :: i2 !< Finishing longitude +# integer, intent(in) :: iv !< Mode: 0 == constituents 1 == ??? 2 == remap temp with cs scheme +# integer, intent(in) :: kord !< Method order +# integer, intent(in) :: j !< Current latitude +# integer, intent(in) :: ibeg, iend, jbeg, jend +# integer, intent(in) :: km !< Original vertical dimension +# integer, intent(in) :: kn !< Target vertical dimension +# real, intent(in) :: qs(i1:i2) !< bottom BC +# real, intent(in) :: pe1(i1:i2,km+1) !< pressure at layer edges from model top to bottom surface in the original vertical coordinate +# real, intent(in) :: pe2(i1:i2,kn+1) !< pressure at layer edges from model top to bottom surface in the new vertical coordinate +# real, intent(in) :: q1(ibeg:iend,jbeg:jend,km) !< Field input +# ! INPUT/OUTPUT PARAMETERS: +# real, intent(inout):: q2(ibeg:iend,jbeg:jend,kn) !< Field output +# +# ! DESCRIPTION: +# ! IV = 0: constituents +# ! pe1: pressure at layer edges (from model top to bottom surface) +# ! in the original vertical coordinate +# ! pe2: pressure at layer edges (from model top to bottom surface) +# ! in the new vertical coordinate +# +# ! LOCAL VARIABLES: +# real dp1(i1:i2,km) +# real q4(4,i1:i2,km) +# real pl, pr, qsum, dp, esl +# integer i, k, l, m, k0 +# + im = i2 - i1 + 1 + q2 = np.zeros([im,kn]) + + qs = np.zeros([im]) + dp1 = np.zeros([im,km]) + q4 = np.zeros([4,im,km]) + qsum = 0. + for k in range(0,km): + for i in range(i1-1,i2): + dp1[i,k] = pe1[i,k+1] - pe1[i,k] + q4[0,i,k] = q1[i,k] +# ! Compute vertical subgrid distribution + if (kord > 7): + q4 = cs_profile( qs, q4, dp1, km, i1, i2, iv, kord ) + else: + q4 = ppm_profile( q4, dp1, km, i1, i2, iv, kord ) + + for i in range(i1-1,i2): + k0 = 0 + for k in range(0,kn): + next_k = False + for l in range(k0,km): + # ! locate the top edge: pe2(i,k) + if( pe2[i,k] >= pe1[i,l] and pe2[i,k] <= pe1[i,l+1] ): + pl = (pe2[i,k]-pe1[i,l]) / dp1[i,l] + if( pe2[i,k+1] <= pe1[i,l+1] ): + # ! entire new grid is within the original grid + pr = (pe2[i,k+1]-pe1[i,l]) / dp1[i,l] + q2[i,k] = q4[1,i,l] + 0.5*(q4[3,i,l]+q4[2,i,l]-q4[1,i,l])*(pr+pl)-q4[3,i,l]*r3*(pr*(pr+pl)+pl**2) + k0 = l + next_k = True + #print 'new grid within old; q2 = ', q2[i,k] + break + #goto 555 #next k-loop iteration + else: + # ! Fractional area... + qsum = (pe1[i,l+1]-pe2[i,k])*(q4[1,i,l]+0.5*(q4[3,i,l]+q4[2,i,l]-q4[1,i,l])*(1.+pl)-q4[3,i,l]*(r3*(1.+pl*(1.+pl)))) + for m in range(l+1,km): # was do m = l+1,km + # ! locate the bottom edge: pe2(i,k+1) + if( pe2[i,k+1] > pe1[i,m+1] ): + # ! Whole layer + qsum = qsum + dp1[i,m]*q4[0,i,m] + else: + dp = pe2[i,k+1]-pe1[i,m] + esl = dp / dp1[i,m] + qsum = qsum + dp*(q4[1,i,m]+0.5*esl*(q4[2,i,m]-q4[1,i,m]+q4[3,i,m]*(1.-r23*esl))) + k0 = m + # goto 123 + break + else: + #GJF: the following if statement is not in the fv_mapz, but it captures the case where pe2[kn] > pe1[km] where the m loop is not entered; without this, the lowest layer values are weird + if (l+1 == km): + dp = pe2[i,kn]-pe1[i,km] + esl = dp / dp1[i,km-1] + qsum = qsum + dp*(q4[1,i,km-1]+0.5*esl*(q4[2,i,km-1]-q4[1,i,km-1]+q4[3,i,km-1]*(1.-r23*esl))) + break + + break + #goto 123 #end l-loop + if not next_k: + q2[i,k] = qsum / ( pe2[i,k+1] - pe2[i,k] ) #formerly labeled 123 + + return q2 + + +def fillq(im, km, nq, q, dp): + + for ic in range(0,nq): + for k in range(km-1,0,-1): + k1 = k-1 + for i in range(0,im): + if( q[i,k,ic] < 0. ): + q[i,k1,ic] = q[i,k1,ic] + q[i,k,ic]*dp[i,k]/dp[i,k1] + q[i,k ,ic] = 0. + + #! Top down: + for k in range(0,km-1): + k1 = k+1 + for i in range(0,im): + if( q[i,k,ic] < 0. ): + q[i,k1,ic] = q[i,k1,ic] + q[i,k,ic]*dp[i,k]/dp[i,k1] + q[i,k ,ic] = 0. + + return q + +def fillz(im, km, nq, q, dp): + #integer, intent(in):: im !< No. of longitudes + #integer, intent(in):: km !< No. of levels + #integer, intent(in):: nq !< Total number of tracers + #real , intent(in):: dp(im,km) !< pressure thickness + #real , intent(inout) :: q(im,km,nq) !< tracer mixing ratio + #! LOCAL VARIABLES: + #logical:: zfix(im) + #real :: dm(km) + #integer i, k, ic, k1 + #real qup, qly, dup, dq, sum0, sum1, fac + + dm = np.zeros([km]) + + #print ('orig q') + #print q + + for ic in range(0,nq): + for i in range(0,im): + #top layer + if( q[i,0,ic] < 0. ): + q[i,1,ic] = q[i,1,ic] + q[i,0,ic]*dp[i,0]/dp[i,1] + q[i,0,ic] = 0. + #! Interior + zfix = [False] * im + for k in range(1,km-1): + for i in range(0,im): + if( q[i,k,ic] < 0. ): + #print('neg in layer',k,q[i,k,ic]) + zfix[i] = True + if ( q[i,k-1,ic] > 0. ): + #print('borrow from above') + #! Borrow from above + dq = np.min( [q[i,k-1,ic]*dp[i,k-1], -q[i,k,ic]*dp[i,k]] ) + q[i,k-1,ic] = q[i,k-1,ic] - dq/dp[i,k-1] + q[i,k ,ic] = q[i,k ,ic] + dq/dp[i,k ] + if ( q[i,k,ic] < 0.0 and q[i,k+1,ic] > 0. ): + #! Borrow from below: + #print('borrow from below') + dq = np.min ( [q[i,k+1,ic]*dp[i,k+1], -q[i,k,ic]*dp[i,k]] ) + q[i,k+1,ic] = q[i,k+1,ic] - dq/dp[i,k+1] + q[i,k ,ic] = q[i,k ,ic] + dq/dp[i,k ] + #print ('new q',q[i,k ,ic]) + #! Bottom layer + k = km-1 + for i in range(0,im): + if( q[i,k,ic] < 0. and q[i,k-1,ic] > 0.): + zfix[i] = True + #! Borrow from above + qup = q[i,k-1,ic]*dp[i,k-1] + qly = -q[i,k ,ic]*dp[i,k ] + dup = np.min([qly, qup]) + q[i,k-1,ic] = q[i,k-1,ic] - dup/dp[i,k-1] + q[i,k, ic] = q[i,k, ic] + dup/dp[i,k ] + + #! Perform final check and non-local fix if needed + for i in range(0,im): + if ( zfix[i] ): + sum0 = 0. + for k in range(1,km): + dm[k] = q[i,k,ic]*dp[i,k] + sum0 = sum0 + dm[k] + #print('sum0',sum0) + if ( sum0 > 0. ): + sum1 = 0. + for k in range(1,km): + sum1 = sum1 + np.max([0., dm[k]]) + fac = sum0 / sum1 + #print('fac',fac) + for k in range(1,km): + q[i,k,ic] = np.max([0., fac*dm[k]/dp[i,k]]) + + return q + +def mp_auto_conversion(ql, qi): + qi0_max = 2.0E-3 + ql0_max = 2.5E-3 + qr = 0.0 + qs = 0.0 + + #! Convert excess cloud water into rain: + if ( ql > ql0_max ): + qr = ql - ql0_max + ql = ql0_max + #! Convert excess cloud ice into snow: + if ( qi > qi0_max ): + qs = qi - qi0_max + qi = qi0_max + + return (ql, qr, qi, qs) + +def latlon2xyz(p): + +#real(kind=R_GRID), intent(in) :: p(2) +#real(kind=R_GRID), intent(out):: e(3) + +#integer n +#real (f_p):: q(2) +#real (f_p):: e1, e2, e3 + + e = np.zeros(3) + + e1 = math.cos(p[1]) * math.cos(p[0]) + e2 = math.cos(p[1]) * math.sin(p[0]) + e3 = math.sin(p[1]) +#!----------------------------------- +#! Truncate to the desired precision: +#!----------------------------------- + e = [e1, e2, e3] + + return e + +def mid_pt3_cart(p1, p2): +# real(kind=R_GRID), intent(IN) :: p1(3), p2(3) +# real(kind=R_GRID), intent(OUT) :: e(3) +#! +# real (f_p):: q1(3), q2(3) +# real (f_p):: dd, e1, e2, e3 +# integer k + + e = np.zeros(3) + + # do k=1,3 + # q1(k) = p1(k) + # q2(k) = p2(k) + # enddo + + e1 = p1[0] + p2[0] + e2 = p1[1] + p2[1] + e3 = p1[2] + p2[2] + + dd = math.sqrt( e1**2 + e2**2 + e3**2 ) + e1 = e1 / dd + e2 = e2 / dd + e3 = e3 / dd + + e = [e1, e2, e3] + + return e + +def cart_to_latlon(q): +#! vector version of cart_to_latlon1 +#integer, intent(in):: np +#real(kind=R_GRID), intent(inout):: q(3,np) +#real(kind=R_GRID), intent(inout):: xs(np), ys(np) +#! local +#real(kind=R_GRID), parameter:: esl=1.d-10 +#real (f_p):: p(3) +#real (f_p):: dist, lat, lon +#integer i,k + esl = 1.0E-10 + + dist = math.sqrt(q[0]**2 + q[1]**2 + q[2]**2) + q = np.divide(q,dist) + + if ( (abs(q[0])+abs(q[1])) < esl ): + lon = 0.0 + else: + lon = math.atan2( q[1], q[0] ) #! range [-pi,pi] + + + if ( lon < 0.): + lon = 2*math.pi + lon + + #! RIGHT_HAND system: + lat = math.asin(q[2]) + + return (lon, lat) + +def mid_pt_sphere(p1, p2): +# real(kind=R_GRID) , intent(IN) :: p1(2), p2(2) +# real(kind=R_GRID) , intent(OUT) :: pm(2) +#!------------------------------------------ +# real(kind=R_GRID) e1(3), e2(3), e3(3) + + pm = np.zeros(2) + + e1 = latlon2xyz(p1) + e2 = latlon2xyz(p2) + e3 = mid_pt3_cart(e1, e2) + (pm[0], pm[1]) = cart_to_latlon(e3) + + return pm + +def vect_cross(p1, p2): + #real(kind=R_GRID), intent(in) :: p1(3), p2(3) + #real(kind=R_GRID), intent(out):: e(3) + e = np.zeros(3) + + e[0] = p1[1]*p2[2] - p1[2]*p2[1] + e[1] = p1[2]*p2[0] - p1[0]*p2[2] + e[2] = p1[0]*p2[1] - p1[1]*p2[0] + + return e + +def normalize_vect(e): + + #real(kind=R_GRID), intent(inout):: e(3) + #real(f_p):: pdot + #integer k + + pdot = e[0]**2 + e[1]**2 + e[2]**2 + pdot = math.sqrt( pdot ) + e = e/pdot + + return e + +def get_unit_vect2( e1, e2): + #real(kind=R_GRID), intent(in) :: e1(2), e2(2) + #real(kind=R_GRID), intent(out):: uc(3) !< unit vector e1--->e2 +#! Local: + #real(kind=R_GRID), dimension(3):: pc, p1, p2, p3 + uc = np.zeros(3) + +#! RIGHT_HAND system: + p1 = latlon2xyz(e1) + p2 = latlon2xyz(e2) + + pc = mid_pt3_cart(p1, p2) + p3 = vect_cross(p2, p1) + uc = vect_cross(pc, p3) + uc = normalize_vect( uc ) + + return uc + +def get_latlon_vector(pp): + #real(kind=R_GRID), intent(IN) :: pp(2) + #real(kind=R_GRID), intent(OUT) :: elon(3), elat(3) + elon = np.zeros(3) + elat = np.zeros(3) + + elon[0] = -math.sin(pp[0]) + elon[1] = math.cos(pp[0]) + elon[2] = 0.0 + elat[0] = -math.sin(pp[1])*math.cos(pp[0]) + elat[1] = -math.sin(pp[1])*math.sin(pp[0]) +#!!! RIGHT_HAND + elat[2] = math.cos(pp[1]) +#! Left-hand system needed to be consistent with rest of the codes +#! elat[2] = -math.cos(pp[1]) + + return (elon, elat) + +def inner_prod(v1, v2): + #real(kind=R_GRID),intent(in):: v1(3), v2(3) + #real (f_p) :: vp1(3), vp2(3), prod16 + #integer k + + return v1[0]*v2[0] + v1[1]*v2[1] + v1[2]*v2[2] diff --git a/scm/src/fv_eta.h b/scm/src/fv_eta.h index 7df22b11b..66660fb96 100644 --- a/scm/src/fv_eta.h +++ b/scm/src/fv_eta.h @@ -10,7 +10,7 @@ !* (at your option) any later version. !* !* The FV3 dynamical core is distributed in the hope that it will be -!* useful, but WITHOUT ANYWARRANTY; without even the implied warranty +!* useful, but WITHOUT ANY WARRANTY; without even the implied warranty !* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. !* See the GNU General Public License for more details. !* @@ -18,6 +18,7 @@ !* License along with the FV3 dynamical core. !* If not, see . !*********************************************************************** + #ifndef _FV_ETA_ #define _FV_ETA__ @@ -42,11 +43,15 @@ real a63meso(64),b63meso(64) real a64(65),b64(65) real a64gfs(65),b64gfs(65) + real a65(66),b65(66) ! kgao: L65 with enhanced surface resolution by xi chen real a68(69),b68(69) ! cjg: grid with enhanced PBL resolution real a96(97),b96(97) ! cjg: grid with enhanced PBL resolution + real a88(89),b88(89) ! kgao: grid with enhanced PBL resolution + real a75(76),b75(76) ! kgao: emc grid with enhanced PBL resolution real a100(101),b100(101) real a104(105),b104(105) real a125(126),b125(126) + real a127(128),b127(128) !----------------------------------------------- ! GFDL AM2-L24: modified by SJL at the model top @@ -455,39 +460,53 @@ 0.94565, 0.95762, 0.96827, & 0.97771, 0.98608, 0.99347, 1./ -! NAM levels - data a60/200., 1311.4934, 2424.6044, 3541.7594,& - 4662.9584, 5790.2234, 6932.6534, 8095.3034,& - 9278.1734, 10501.4834, 11755.1234, 13049.2034,& - 14403.9434, 15809.2334, 17315.6234, 18953.4434,& - 20783.3534, 22815.4634, 25059.8834, 27567.1634,& - 30148.42896047, 32193.91776039, 33237.35176644, 33332.15200668,& - 32747.34688095, 31710.06232008, 30381.0344269, 28858.71577772,& - 27218.00439794, 25500.31691133, 23734.52294749, 21947.3406187,& - 20167.06984021, 18396.08144096, 16688.20978135, 15067.73749198,& - 13564.49530178, 12183.34512952, 10928.24869364, 9815.02787644,& - 8821.38325756, 7943.05793658, 7181.90985128, 6500.94645341,& - 5932.84856135, 5420.87683616, 4959.15585353, 4522.15047657,& - 4103.63596619, 3703.72540955, 3322.52525084, 2953.65688391,& - 2597.18532669, 2253.10764634, 1915.10585833, 1583.14516612,& - 1257.18953818, 937.3977544 , 623.60136981, 311.11085215,& - 0. / - data b60/0., 0., 0., 0., 0.,& - 0. , 0. , 0. , 0. , 0. ,& - 0. , 0. , 0. , 0. , 0. ,& - 0. , 0. , 0. , 0. , 0. ,& - 0.0014653 , 0.01021565, 0.0301554 , 0.06025816, 0.09756877,& - 0.13994493, 0.18550048, 0.23318371, 0.2819159 , 0.33120838,& - 0.38067633, 0.42985641, 0.47816985, 0.52569303, 0.57109611,& - 0.61383996, 0.6532309 , 0.68922093, 0.72177094, 0.75052515,& - 0.77610288, 0.79864598, 0.81813309, 0.83553022, 0.85001773,& - 0.86305395, 0.8747947 , 0.88589325, 0.89650986, 0.9066434 ,& - 0.91629284, 0.92562094, 0.93462705, 0.94331221, 0.95183659,& - 0.96020153, 0.96840839, 0.97645359, 0.98434181, 0.99219119, 1. / + data a60/ 1.7861000000e-01, 1.0805100000e+00, 3.9647100000e+00, & + 9.7516000000e+00, 1.9816580000e+01, 3.6695950000e+01, & + 6.2550570000e+01, 9.9199620000e+01, 1.4792505000e+02, & + 2.0947487000e+02, 2.8422571000e+02, 3.7241721000e+02, & + 4.7437835000e+02, 5.9070236000e+02, 7.2236063000e+02, & + 8.7076746000e+02, 1.0378138800e+03, 1.2258877300e+03, & + 1.4378924600e+03, 1.6772726600e+03, 1.9480506400e+03, & + 2.2548762700e+03, 2.6030909400e+03, 2.9988059200e+03, & + 3.4489952300e+03, 3.9616028900e+03, 4.5456641600e+03, & + 5.2114401700e+03, 5.9705644000e+03, 6.8361981800e+03, & + 7.8231906000e+03, 8.9482351000e+03, 1.0230010660e+04, & + 1.1689289750e+04, 1.3348986860e+04, 1.5234111060e+04, & + 1.7371573230e+04, 1.9789784580e+04, 2.2005564550e+04, & + 2.3550115120e+04, 2.4468583320e+04, 2.4800548800e+04, & + 2.4582445070e+04, 2.3849999620e+04, 2.2640519740e+04, & + 2.0994737150e+04, 1.8957848730e+04, 1.6579413230e+04, & + 1.4080071030e+04, 1.1753630920e+04, 9.6516996300e+03, & + 7.7938009300e+03, 6.1769062800e+03, 4.7874276000e+03, & + 3.6050497500e+03, 2.6059860700e+03, 1.7668328200e+03, & + 1.0656131200e+03, 4.8226201000e+02, 0.0000000000e+00, & + 0.0000000000e+00 / + + data b60/ 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 5.0600000000e-03, & + 2.0080000000e-02, 4.4900000000e-02, 7.9360000000e-02, & + 1.2326000000e-01, 1.7634000000e-01, 2.3820000000e-01, & + 3.0827000000e-01, 3.8581000000e-01, 4.6989000000e-01, & + 5.5393000000e-01, 6.2958000000e-01, 6.9642000000e-01, & + 7.5458000000e-01, 8.0463000000e-01, 8.4728000000e-01, & + 8.8335000000e-01, 9.1368000000e-01, 9.3905000000e-01, & + 9.6020000000e-01, 9.7775000000e-01, 9.9223000000e-01, & + 1.0000000000e+00 / ! This is activated by USE_GFSL63 -! Thfollowing L63 setting is the same as NCEP GFS's L64 except the top +! The following L63 setting is the same as NCEP GFS's L64 except the top ! 3 layers data a63/64.247, 137.790, 221.958, & 318.266, 428.434, 554.424, & @@ -668,6 +687,51 @@ 0.87352, 0.91502, 0.95235, & 0.98511, 1.00000 / + data a65/1.00000000, 5.13470268, 14.04240036, & + 30.72783852, 53.79505539, 82.45489502, & + 117.05598450, 158.62843323, 208.79000854, & + 270.02725220, 345.50848389, 438.41940308, & + 551.85266113, 689.25054932, 854.40936279, & + 1051.47802734, 1284.95031738, 1559.65148926, & + 1880.71691895, 2253.56542969, 2683.86547852, & + 3177.49560547, 3740.49951172, 4379.03613281, & + 5099.32617188, 5907.59326172, 6810.00781250, & + 7812.62353516, 8921.31933594, 10141.73632812,& + 11285.93066406, 12188.79101562, 12884.30078125,& + 13400.11523438, 13758.84960938, 13979.10351562,& + 14076.26074219, 14063.13085938, 13950.45507812,& + 13747.31445312, 13461.45410156, 13099.54199219,& + 12667.38183594, 12170.08203125, 11612.18847656,& + 10997.79980469, 10330.65039062, 9611.05468750, & + 8843.30371094, 8045.85009766, 7236.31152344, & + 6424.55712891, 5606.50927734, 4778.05908203, & + 3944.97241211, 3146.77514648, 2416.63354492, & + 1778.22607422, 1246.21462402, 826.51950684, & + 511.21385254, 290.74072876, 150.00000000, & + 68.89300000, 14.99899865, 0.00000000 / + + data b65/0.00000000, 0.00000000, 0.00000000, & + 0.00000000, 0.00000000, 0.00000000, & + 0.00000000, 0.00000000, 0.00000000, & + 0.00000000, 0.00000000, 0.00000000, & + 0.00000000, 0.00000000, 0.00000000, & + 0.00000000, 0.00000000, 0.00000000, & + 0.00000000, 0.00000000, 0.00000000, & + 0.00000000, 0.00000000, 0.00000000, & + 0.00000000, 0.00000000, 0.00000000, & + 0.00000000, 0.00000000, 0.00000000, & + 0.00193294, 0.00749994, 0.01640714, & + 0.02841953, 0.04334756, 0.06103661, & + 0.08135860, 0.10420541, 0.12948355, & + 0.15711005, 0.18700911, 0.21910952, & + 0.25334257, 0.28964061, 0.32793567, & + 0.36815873, 0.41023913, 0.45429301, & + 0.50016892, 0.54688859, 0.59356427, & + 0.63976413, 0.68518244, 0.72950502, & + 0.77231618, 0.81251526, 0.84921405, & + 0.88174411, 0.90978803, 0.93327247, & + 0.95249488, 0.96783525, 0.97980107, & + 0.98896214, 0.99575002, 1.00000000 / !-->cjg data a68/1.00000, 2.68881, 5.15524, & 8.86683, 14.20349, 22.00278, & @@ -785,6 +849,129 @@ 0.97918, 0.98723, 0.99460, & 1.00000 / !<--cjg + +!---> kgao: remove top layers from l96 + data a88/65.28397, & + 95.48274, 137.90344, 196.76073, & + 277.45330, 386.81095, 533.37018, & + 727.67600, 982.60677, 1313.71685, & + 1739.59104, 2282.20281, 2967.26766, & + 3824.58158, 4888.33404, 6197.38450, & + 7795.49158, 9731.48414, 11969.71024, & + 14502.88894, 17304.52434, 20134.76139, & + 22536.63814, 24252.54459, 25230.65591, & + 25585.72044, 25539.91412, 25178.87141, & + 24644.84493, 23978.98781, 23245.49366, & + 22492.11600, 21709.93990, 20949.64473, & + 20225.94258, 19513.31158, 18829.32485, & + 18192.62250, 17589.39396, 17003.45386, & + 16439.01774, 15903.91204, 15396.39758, & + 14908.02140, 14430.65897, 13967.88643, & + 13524.16667, 13098.30227, 12687.56457, & + 12287.08757, 11894.41553, 11511.54106, & + 11139.22483, 10776.01912, 10419.75711, & + 10067.11881, 9716.63489, 9369.61967, & + 9026.69066, 8687.29884, 8350.04978, & + 8013.20925, 7677.12187, 7343.12994, & + 7011.62844, 6681.98102, 6353.09764, & + 6025.10535, 5699.10089, 5375.54503, & + 5053.63074, 4732.62740, 4413.38037, & + 4096.62775, 3781.79777, 3468.45371, & + 3157.19882, 2848.25306, 2541.19150, & + 2236.21942, 1933.50628, 1632.83741, & + 1334.35954, 1038.16655, 744.22318, & + 452.71094, 194.91899, 0.00000, & + 0.00000 / + + data b88/0.00000, & + 0.00000, 0.00000, 0.00000, & + 0.00000, 0.00000, 0.00000, & + 0.00000, 0.00000, 0.00000, & + 0.00000, 0.00000, 0.00000, & + 0.00000, 0.00000, 0.00000, & + 0.00000, 0.00000, 0.00000, & + 0.00000, 0.00000, 0.00193, & + 0.00974, 0.02538, 0.04876, & + 0.07817, 0.11081, 0.14514, & + 0.18007, 0.21486, 0.24866, & + 0.28088, 0.31158, 0.34030, & + 0.36701, 0.39210, 0.41554, & + 0.43733, 0.45774, 0.47707, & + 0.49540, 0.51275, 0.52922, & + 0.54495, 0.56007, 0.57459, & + 0.58850, 0.60186, 0.61471, & + 0.62715, 0.63922, 0.65095, & + 0.66235, 0.67348, 0.68438, & + 0.69510, 0.70570, 0.71616, & + 0.72651, 0.73675, 0.74691, & + 0.75700, 0.76704, 0.77701, & + 0.78690, 0.79672, 0.80649, & + 0.81620, 0.82585, 0.83542, & + 0.84492, 0.85437, 0.86375, & + 0.87305, 0.88229, 0.89146, & + 0.90056, 0.90958, 0.91854, & + 0.92742, 0.93623, 0.94497, & + 0.95364, 0.96223, 0.97074, & + 0.97918, 0.98723, 0.99460, & + 1.00000 / +!<--- kgao: end of a88/b88 + +!---> kgao: EMC L75 config + + data a75/200.0, 572.419, 1104.437, & + 1760.239, 2499.052, 3300.438, & + 4161.36, 5090.598, 6114.272, & + 7241.963, 8489.481, 9855.825, & + 11338.34, 12682.56, 13688.97, & + 14422.61, 14934.2, 15263.88, & + 15443.77, 15499.9, 15453.61, & + 15322.6, 15121.64, 14863.23, & + 14557.97, 14214.93, 13841.91, & + 13445.62, 13031.86, 12605.65, & + 12171.31, 11732.57, 11292.65, & + 10854.29, 10419.82, 9991.243, & + 9570.207, 9158.088, 8756.019, & + 8364.893, 7985.424, 7618.15, & + 7263.452, 6921.581, 6592.674, & + 6276.763, 5963.31, 5652.806, & + 5345.765, 5042.658, 4743.966, & + 4450.172, 4161.769, 3879.194, & + 3602.911, 3333.365, 3071.016, & + 2816.274, 2569.556, 2331.264, & + 2101.816, 1881.57, 1670.887, & + 1470.119, 1279.627, 1099.702, & + 930.651, 772.757, 626.305, & + 491.525, 368.641, 257.862, & + 159.399, 73.396, 0.001, & + 0.0/ + data b75/0.0, 0.0, 0.0, & + 0.0, 0.0, 0.0, & + 0.0, 0.0, 0.0, & + 0.0, 0.0, 0.0, & + 0.0, 0.00250213, 0.00944449,& + 0.02010732, 0.03390246, 0.0503391, & + 0.06899972, 0.08952269, 0.1115907, & + 0.134922, 0.1592647, 0.1843923, & + 0.2101002, 0.2362043, 0.2625384, & + 0.2889538, 0.3153166, 0.3415084, & + 0.3674242, 0.3929729, 0.4180741, & + 0.4426602, 0.4666739, 0.4900666, & + 0.5127994, 0.5348418, 0.5561699, & + 0.5767674, 0.5966232, 0.6157322, & + 0.6340936, 0.6517111, 0.668592, & + 0.6847468, 0.7007225, 0.7164985, & + 0.7320531, 0.7473667, 0.7624187, & + 0.7771889, 0.7916558, 0.8058007, & + 0.819604, 0.8330461, 0.8461072, & + 0.8587694, 0.8710147, 0.8828254, & + 0.8941834, 0.9050727, 0.9154776, & + 0.9253828, 0.9347721, 0.9436326, & + 0.9519511, 0.9597148, 0.966911, & + 0.9735298, 0.9795609, 0.9849954, & + 0.9898235, 0.9940391, 0.9976355, & + 1.0/ +! <--- kgao: end of a75/b75 + ! ! Ultra high troposphere resolution data a100/100.00000, 300.00000, 800.00000, & @@ -859,79 +1046,79 @@ 0.99223, 1.00000 / data a104/ & - 1.8827062944e-01, 7.7977549145e-01, 2.1950593583e+00, & - 4.9874566624e+00, 9.8041418997e+00, 1.7019717163e+01, & - 2.7216579591e+01, 4.0518628401e+01, 5.6749646818e+01, & - 7.5513868331e+01, 9.6315093333e+01, 1.1866706195e+02, & - 1.4216835396e+02, 1.6653733709e+02, 1.9161605772e+02, & - 2.1735580129e+02, 2.4379516604e+02, 2.7103771847e+02, & - 2.9923284173e+02, 3.2856100952e+02, 3.5922338766e+02, & - 3.9143507908e+02, 4.2542117983e+02, 4.6141487902e+02, & - 4.9965698106e+02, 5.4039638379e+02, 5.8389118154e+02, & - 6.3041016829e+02, 6.8023459505e+02, 7.3366009144e+02, & - 7.9099869949e+02, 8.5258099392e+02, 9.1875827946e+02, & - 9.8990486716e+02, 1.0664204381e+03, 1.1487325074e+03, & - 1.2372990044e+03, 1.3326109855e+03, 1.4351954993e+03, & - 1.5456186222e+03, 1.6644886848e+03, 1.7924597105e+03, & - 1.9302350870e+03, 2.0785714934e+03, 2.2382831070e+03, & - 2.4102461133e+03, 2.5954035462e+03, 2.7947704856e+03, & - 3.0094396408e+03, 3.2405873512e+03, 3.4894800360e+03, & - 3.7574811281e+03, 4.0460585279e+03, 4.3567926151e+03, & - 4.6913848588e+03, 5.0516670674e+03, 5.4396113207e+03, & - 5.8573406270e+03, 6.3071403487e+03, 6.7914704368e+03, & - 7.3129785102e+03, 7.8745138115e+03, 8.4791420557e+03, & - 9.1301611750e+03, 9.8311179338e+03, 1.0585825354e+04, & - 1.1398380836e+04, 1.2273184781e+04, 1.3214959424e+04, & - 1.4228767429e+04, 1.5320029596e+04, 1.6494540743e+04, & - 1.7758482452e+04, 1.9118430825e+04, 2.0422798801e+04, & - 2.1520147587e+04, 2.2416813461e+04, 2.3118184510e+04, & - 2.3628790785e+04, 2.3952411814e+04, 2.4092209011e+04, & - 2.4050892106e+04, 2.3830930156e+04, 2.3434818358e+04, & - 2.2865410898e+04, 2.2126326004e+04, 2.1222420323e+04, & - 2.0160313690e+04, 1.8948920926e+04, 1.7599915822e+04, & - 1.6128019809e+04, 1.4550987232e+04, 1.2889169132e+04, & - 1.1164595563e+04, 9.4227665517e+03, 7.7259097899e+03, & - 6.1538244381e+03, 4.7808126007e+03, 3.5967415552e+03, & - 2.5886394104e+03, 1.7415964865e+03, 1.0393721271e+03, & - 4.6478852032e+02, 7.0308342481e-13, 0.0000000000e+00 / + 1.8827062944e-01, 7.7977549145e-01, 2.1950593583e+00, & + 4.9874566624e+00, 9.8041418997e+00, 1.7019717163e+01, & + 2.7216579591e+01, 4.0518628401e+01, 5.6749646818e+01, & + 7.5513868331e+01, 9.6315093333e+01, 1.1866706195e+02, & + 1.4216835396e+02, 1.6653733709e+02, 1.9161605772e+02, & + 2.1735580129e+02, 2.4379516604e+02, 2.7103771847e+02, & + 2.9923284173e+02, 3.2856100952e+02, 3.5922338766e+02, & + 3.9143507908e+02, 4.2542117983e+02, 4.6141487902e+02, & + 4.9965698106e+02, 5.4039638379e+02, 5.8389118154e+02, & + 6.3041016829e+02, 6.8023459505e+02, 7.3366009144e+02, & + 7.9099869949e+02, 8.5258099392e+02, 9.1875827946e+02, & + 9.8990486716e+02, 1.0664204381e+03, 1.1487325074e+03, & + 1.2372990044e+03, 1.3326109855e+03, 1.4351954993e+03, & + 1.5456186222e+03, 1.6644886848e+03, 1.7924597105e+03, & + 1.9302350870e+03, 2.0785714934e+03, 2.2382831070e+03, & + 2.4102461133e+03, 2.5954035462e+03, 2.7947704856e+03, & + 3.0094396408e+03, 3.2405873512e+03, 3.4894800360e+03, & + 3.7574811281e+03, 4.0460585279e+03, 4.3567926151e+03, & + 4.6913848588e+03, 5.0516670674e+03, 5.4396113207e+03, & + 5.8573406270e+03, 6.3071403487e+03, 6.7914704368e+03, & + 7.3129785102e+03, 7.8745138115e+03, 8.4791420557e+03, & + 9.1301611750e+03, 9.8311179338e+03, 1.0585825354e+04, & + 1.1398380836e+04, 1.2273184781e+04, 1.3214959424e+04, & + 1.4228767429e+04, 1.5320029596e+04, 1.6494540743e+04, & + 1.7758482452e+04, 1.9118430825e+04, 2.0422798801e+04, & + 2.1520147587e+04, 2.2416813461e+04, 2.3118184510e+04, & + 2.3628790785e+04, 2.3952411814e+04, 2.4092209011e+04, & + 2.4050892106e+04, 2.3830930156e+04, 2.3434818358e+04, & + 2.2865410898e+04, 2.2126326004e+04, 2.1222420323e+04, & + 2.0160313690e+04, 1.8948920926e+04, 1.7599915822e+04, & + 1.6128019809e+04, 1.4550987232e+04, 1.2889169132e+04, & + 1.1164595563e+04, 9.4227665517e+03, 7.7259097899e+03, & + 6.1538244381e+03, 4.7808126007e+03, 3.5967415552e+03, & + 2.5886394104e+03, 1.7415964865e+03, 1.0393721271e+03, & + 4.6478852032e+02, 7.0308342481e-13, 0.0000000000e+00 / data b104/ & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & - 0.0000000000e+00, 0.0000000000e+00, 1.5648447298e-03, & - 6.2617046389e-03, 1.4104157933e-02, 2.5118187415e-02, & - 3.9340510972e-02, 5.6816335609e-02, 7.7596328431e-02, & - 1.0173255472e-01, 1.2927309709e-01, 1.6025505622e-01, & - 1.9469566981e-01, 2.3258141217e-01, 2.7385520518e-01, & - 3.1840233814e-01, 3.6603639170e-01, 4.1648734767e-01, & - 4.6939496013e-01, 5.2431098738e-01, 5.8071350676e-01, & - 6.3803478105e-01, 6.9495048840e-01, 7.4963750338e-01, & - 7.9975208897e-01, 8.4315257576e-01, 8.8034012292e-01, & - 9.1184389721e-01, 9.3821231526e-01, 9.6000677644e-01, & - 9.7779792223e-01, 9.9216315122e-01, 1.0000000000e+00 / + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 0.0000000000e+00, & + 0.0000000000e+00, 0.0000000000e+00, 1.5648447298e-03, & + 6.2617046389e-03, 1.4104157933e-02, 2.5118187415e-02, & + 3.9340510972e-02, 5.6816335609e-02, 7.7596328431e-02, & + 1.0173255472e-01, 1.2927309709e-01, 1.6025505622e-01, & + 1.9469566981e-01, 2.3258141217e-01, 2.7385520518e-01, & + 3.1840233814e-01, 3.6603639170e-01, 4.1648734767e-01, & + 4.6939496013e-01, 5.2431098738e-01, 5.8071350676e-01, & + 6.3803478105e-01, 6.9495048840e-01, 7.4963750338e-01, & + 7.9975208897e-01, 8.4315257576e-01, 8.8034012292e-01, & + 9.1184389721e-01, 9.3821231526e-01, 9.6000677644e-01, & + 9.7779792223e-01, 9.9216315122e-01, 1.0000000000e+00 / ! IFS-like L125(top 12 levels removed from IFSL137) data a125/ 64., & @@ -980,5 +1167,74 @@ 0.984542, 0.988500, 0.991984, 0.995003, 0.997630, 1.000000 / + data a127/ & + 0.99900, 1.60500, 2.53200, 3.92400, & + 5.97600, 8.94700, 13.17700, 19.09600, & + 27.24300, 38.27600, 52.98400, 72.29300, & + 97.26900, 129.11000, 169.13500, 218.76700, & + 279.50600, 352.89400, 440.48100, 543.78200, & + 664.23600, 803.16400, 961.73400, 1140.93100, & + 1341.53800, 1564.11900, 1809.02800, 2076.41500, & + 2366.25200, 2678.37200, 3012.51000, 3368.36300, & + 3745.64600, 4144.16400, 4563.88100, 5004.99500, & + 5468.01700, 5953.84800, 6463.86400, 7000.00000, & + 7563.49400, 8150.66100, 8756.52900, 9376.14100, & + 10004.55300, 10636.85100, 11268.15700, 11893.63900, & + 12508.51900, 13108.09100, 13687.72700, 14242.89000, & + 14769.15300, 15262.20200, 15717.85900, 16132.09000, & + 16501.01800, 16820.93800, 17088.32400, 17299.85200, & + 17453.08400, 17548.35000, 17586.77100, 17569.69700, & + 17498.69700, 17375.56100, 17202.29900, 16981.13700, & + 16714.50400, 16405.02000, 16055.48500, 15668.86000, & + 15248.24700, 14796.86800, 14318.04000, 13815.15000, & + 13291.62900, 12750.92400, 12196.46800, 11631.65900, & + 11059.82700, 10484.20800, 9907.92700, 9333.96700, & + 8765.15500, 8204.14200, 7653.38700, 7115.14700, & + 6591.46800, 6084.17600, 5594.87600, 5124.94900, & + 4675.55400, 4247.63300, 3841.91800, 3458.93300, & + 3099.01000, 2762.29700, 2448.76800, 2158.23800, & + 1890.37500, 1644.71200, 1420.66100, 1217.52800, & + 1034.52400, 870.77800, 725.34800, 597.23500, & + 485.39200, 388.73400, 306.14900, 236.50200, & + 178.65100, 131.44700, 93.74000, 64.39200, & + 42.27400, 26.27400, 15.30200, 8.28700, & + 4.19000, 1.99400, 0.81000, 0.23200, & + 0.02900, 0.00000, 0.00000, 0.00000 / + + + data b127/ & + 0.000000000, 0.000000000, 0.000000000, 0.000000000, & + 0.000000000, 0.000000000, 0.000000000, 0.000000000, & + 0.000000000, 0.000000000, 0.000000000, 0.000000000, & + 0.000000000, 0.000000000, 0.000000000, 0.000000000, & + 0.000000000, 0.000000000, 0.000000000, 0.000000000, & + 0.000000000, 0.000000000, 0.000000000, 0.000000000, & + 0.000000000, 0.000000000, 0.000000000, 0.000000000, & + 0.000000000, 0.000000000, 0.000000000, 0.000000000, & + 0.000000000, 0.000000000, 0.000000000, 0.000000000, & + 0.000000000, 0.000000000, 0.000000000, 0.000000000, & + 0.000010180, 0.000081410, 0.000274690, 0.000650780, & + 0.001270090, 0.002192480, 0.003477130, 0.005182280, & + 0.007365040, 0.010081200, 0.013384920, 0.017328570, & + 0.021962390, 0.027334280, 0.033489540, 0.040470560, & + 0.048316610, 0.057063580, 0.066743720, 0.077385480, & + 0.089006290, 0.101593970, 0.115126180, 0.129576220, & + 0.144912940, 0.161100800, 0.178099890, 0.195866050, & + 0.214351120, 0.233503070, 0.253266330, 0.273582160, & + 0.294388980, 0.315622900, 0.337218050, 0.359107230, & + 0.381222370, 0.403495070, 0.425857160, 0.448241260, & + 0.470581260, 0.492812960, 0.514874340, 0.536706210, & + 0.558252450, 0.579460500, 0.600281540, 0.620670740, & + 0.640587510, 0.659995680, 0.678863350, 0.697163110, & + 0.714872000, 0.731971260, 0.748446460, 0.764287110, & + 0.779486660, 0.794042170, 0.807954130, 0.821226300, & + 0.833865170, 0.845880090, 0.857282640, 0.868086640, & + 0.878307700, 0.887963240, 0.897071780, 0.905653240, & + 0.913728360, 0.921318710, 0.928446350, 0.935133760, & + 0.941403690, 0.947278860, 0.952782090, 0.957935990, & + 0.962762950, 0.967285100, 0.971524000, 0.975500880, & + 0.979236420, 0.982750770, 0.986062530, 0.989185090, & + 0.992129920, 0.994907680, 0.997528200, 1.000000000 / + #endif _FV_ETA_ diff --git a/scm/src/run_scm.py b/scm/src/run_scm.py index c7a7b63f2..0d04edd25 100755 --- a/scm/src/run_scm.py +++ b/scm/src/run_scm.py @@ -9,7 +9,7 @@ import subprocess import sys import time -from suite_info import suite, suite_list +from suite_info import suite_list from netCDF4 import Dataset import importlib @@ -208,7 +208,7 @@ def find_gdb(): class Experiment(object): - def __init__(self, case, suite, runtime, runtime_mult, levels, npz_type, vert_coord_file, case_data_dir, n_itt_out, n_itt_diag): + def __init__(self, case, suite, runtime, runtime_mult, levels, npz_type, vert_coord_file, case_data_dir, n_itt_out, n_itt_diag, timestep): """Initialize experiment. This routine does most of the work, including setting and checking the experiment configuration (namelist).""" @@ -309,11 +309,11 @@ def __init__(self, case, suite, runtime, runtime_mult, levels, npz_type, vert_co self._n_itt_diag = n_itt_diag else: self._n_itt_diag = DEFAULT_DIAG_PERIOD - - if suite.timestep is not None: - self._timestep = suite.timestep + + if timestep: + self._timestep = timestep else: - self._timestep = None + self._timestep = suite.timestep @property def name(self): @@ -866,7 +866,7 @@ def main(): irun, len(run_list), run["case"], run["suite"], active_suite.namelist)) # exp = Experiment(run["case"], active_suite, runtime, runtime_mult, levels, \ - npz_type, vert_coord_file, case_data_dir, n_itt_out, n_itt_diag) + npz_type, vert_coord_file, case_data_dir, n_itt_out, n_itt_diag, timestep) # exp_dir = exp.setup_rundir() if len(run_list) > 1: diff --git a/scm/src/scm_input.F90 b/scm/src/scm_input.F90 index dd4f4df38..dcb18fc21 100644 --- a/scm/src/scm_input.F90 +++ b/scm/src/scm_input.F90 @@ -1302,7 +1302,8 @@ subroutine get_case_init_DEPHY(scm_state, scm_input) if (trim(input_surfaceForcingLSM) == "lsm") then !if model ICs are included in the file - + scm_state%lsm_ics = .true. + !variables with vertical extent allocate(input_ozone (input_n_lev, input_n_init_times), & input_stc (input_n_soil, input_n_init_times), & diff --git a/scm/src/scm_vgrid.F90 b/scm/src/scm_vgrid.F90 index bde37775d..f827f89f9 100644 --- a/scm/src/scm_vgrid.F90 +++ b/scm/src/scm_vgrid.F90 @@ -350,6 +350,14 @@ subroutine get_FV3_vgrid(scm_input, scm_state) scm_state%b_k(k) = b64(k) enddo endif + ! xi chen's l65 + case (65) + ks = 29 + do k=1,km+1 + scm_state%a_k(k) = a65(k) + scm_state%b_k(k) = b65(k) + enddo + !-->cjg case (68) ks = 27 @@ -361,11 +369,23 @@ subroutine get_FV3_vgrid(scm_input, scm_state) ptop = 1. stretch_fac = 1.03 auto_routine = 1 - case (75) ! HS-SGO test configuration - pint = 100.E2 - ptop = 10.E2 - stretch_fac = 1.035 - auto_routine = 6 + ! kgao: introduce EMC's L75 config + case (75) + if (trim(scm_state%npz_type) == 'emc') then + ! EMC's L75 config + ks = 12 + do k=1,km+1 + scm_state%a_k(k) = a75(k) + scm_state%b_k(k) = b75(k) + enddo + else + ! HS-SGO test configuration + pint = 100.E2 + ptop = 10.E2 + stretch_fac = 1.035 + auto_routine = 6 + endif + case (79) ! N = 10, M=5 if (trim(scm_state%npz_type) == 'gcrm') then pint = 100.E2 @@ -377,7 +397,14 @@ subroutine get_FV3_vgrid(scm_input, scm_state) stretch_fac = 1.03 auto_routine = 1 endif - case (90) ! super-duper cell + ! kgao L88 + case (88) + ks = 20 !19 bug fix + do k=1,km+1 + scm_state%a_k(k) = a88(k) + scm_state%b_k(k) = b88(k) + enddo + case (90) ! super-duper cell ptop = 40.e2 stretch_fac = 1.025 auto_routine = 2 @@ -421,16 +448,24 @@ subroutine get_FV3_vgrid(scm_input, scm_state) scm_state%b_k(k) = b125(k) enddo case (127) ! N = 10, M=5 - if (trim(scm_state%npz_type) == 'hitop') then - ptop = 1. - stretch_fac = 1.03 - auto_routine = 2 - else - ptop = 1. - pint = 75.E2 - stretch_fac = 1.028 - auto_routine = 6 - endif + if (trim(scm_state%npz_type) == 'hitop') then + ptop = 1. + stretch_fac = 1.03 + auto_routine = 2 + elseif (trim(scm_state%npz_type) == 'gfs') then + ks = 39 + ptop = a127(1) + pint = a127(ks+1) + do k=1,km+1 + scm_state%a_k(k) = a127(k) + scm_state%b_k(k) = b127(k) + enddo + else + ptop = 1. + pint = 75.E2 + stretch_fac = 1.028 + auto_routine = 6 + endif case (151) !LES applications ptop = 75.e2