diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 1fccc357f..4c1eb33e9 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -360,6 +360,8 @@ jobs: - jobid: 'job3' tests: 'climatology_2.5deg' - jobid: 'job4' + tests: 'climatology_mixed' + - jobid: 'job5' tests: 'python point2grid plot_data_plane mode mode_analysis perc_thresh hira plot_point_obs quality_filter obs_summary duplicate_flag' fail-fast: false steps: diff --git a/data/table_files/met_header_columns_V12.0.txt b/data/table_files/met_header_columns_V12.0.txt index 3d98f0220..5a7f27978 100644 --- a/data/table_files/met_header_columns_V12.0.txt +++ b/data/table_files/met_header_columns_V12.0.txt @@ -5,7 +5,7 @@ V12.0 : STAT : FHO : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID V12.0 : STAT : ISC : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL TILE_DIM TILE_XLL TILE_YLL NSCALE ISCALE MSE ISC FENERGY2 OENERGY2 BASER FBIAS V12.0 : STAT : MCTC : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL (N_CAT) F[0-9]*_O[0-9]* EC_VALUE V12.0 : STAT : MCTS : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL N_CAT ACC ACC_NCL ACC_NCU ACC_BCL ACC_BCU HK HK_BCL HK_BCU HSS HSS_BCL HSS_BCU GER GER_BCL GER_BCU HSS_EC HSS_EC_BCL HSS_EC_BCU EC_VALUE -V12.0 : STAT : MPR : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL INDEX OBS_SID OBS_LAT OBS_LON OBS_LVL OBS_ELV FCST OBS OBS_QC CLIMO_MEAN CLIMO_STDEV CLIMO_CDF +V12.0 : STAT : MPR : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL INDEX OBS_SID OBS_LAT OBS_LON OBS_LVL OBS_ELV FCST OBS OBS_QC OBS_CLIMO_MEAN OBS_CLIMO_STDEV OBS_CLIMO_CDF FCST_CLIMO_MEAN FCST_CLIMO_STDEV V12.0 : STAT : SEEPS : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL S12 S13 S21 S23 S31 S32 PF1 PF2 PF3 PV1 PV2 PV3 MEAN_FCST MEAN_OBS SEEPS V12.0 : STAT : SEEPS_MPR : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE OBS_SID OBS_LAT OBS_LON FCST OBS OBS_QC FCST_CAT OBS_CAT P1 P2 T1 T2 SEEPS V12.0 : STAT : NBRCNT : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL FBS FBS_BCL FBS_BCU FSS FSS_BCL FSS_BCU AFSS AFSS_BCL AFSS_BCU UFSS UFSS_BCL UFSS_BCU F_RATE F_RATE_BCL F_RATE_BCU O_RATE O_RATE_BCL O_RATE_BCU @@ -13,7 +13,7 @@ V12.0 : STAT : NBRCTC : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID V12.0 : STAT : NBRCTS : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL BASER BASER_NCL BASER_NCU BASER_BCL BASER_BCU FMEAN FMEAN_NCL FMEAN_NCU FMEAN_BCL FMEAN_BCU ACC ACC_NCL ACC_NCU ACC_BCL ACC_BCU FBIAS FBIAS_BCL FBIAS_BCU PODY PODY_NCL PODY_NCU PODY_BCL PODY_BCU PODN PODN_NCL PODN_NCU PODN_BCL PODN_BCU POFD POFD_NCL POFD_NCU POFD_BCL POFD_BCU FAR FAR_NCL FAR_NCU FAR_BCL FAR_BCU CSI CSI_NCL CSI_NCU CSI_BCL CSI_BCU GSS GSS_BCL GSS_BCU HK HK_NCL HK_NCU HK_BCL HK_BCU HSS HSS_BCL HSS_BCU ODDS ODDS_NCL ODDS_NCU ODDS_BCL ODDS_BCU LODDS LODDS_NCL LODDS_NCU LODDS_BCL LODDS_BCU ORSS ORSS_NCL ORSS_NCU ORSS_BCL ORSS_BCU EDS EDS_NCL EDS_NCU EDS_BCL EDS_BCU SEDS SEDS_NCL SEDS_NCU SEDS_BCL SEDS_BCU EDI EDI_NCL EDI_NCU EDI_BCL EDI_BCU SEDI SEDI_NCL SEDI_NCU SEDI_BCL SEDI_BCU BAGSS BAGSS_BCL BAGSS_BCU V12.0 : STAT : GRAD : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL FGBAR OGBAR MGBAR EGBAR S1 S1_OG FGOG_RATIO DX DY V12.0 : STAT : DMAP : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL FY OY FBIAS BADDELEY HAUSDORFF MED_FO MED_OF MED_MIN MED_MAX MED_MEAN FOM_FO FOM_OF FOM_MIN FOM_MAX FOM_MEAN ZHU_FO ZHU_OF ZHU_MIN ZHU_MAX ZHU_MEAN G GBETA BETA_VALUE -V12.0 : STAT : ORANK : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL INDEX OBS_SID OBS_LAT OBS_LON OBS_LVL OBS_ELV OBS PIT RANK N_ENS_VLD (N_ENS) ENS_[0-9]* OBS_QC ENS_MEAN CLIMO_MEAN SPREAD ENS_MEAN_OERR SPREAD_OERR SPREAD_PLUS_OERR CLIMO_STDEV +V12.0 : STAT : ORANK : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL INDEX OBS_SID OBS_LAT OBS_LON OBS_LVL OBS_ELV OBS PIT RANK N_ENS_VLD (N_ENS) ENS_[0-9]* OBS_QC ENS_MEAN OBS_CLIMO_MEAN SPREAD ENS_MEAN_OERR SPREAD_OERR SPREAD_PLUS_OERR OBS_CLIMO_STDEV FCST_CLIMO_MEAN FCST_CLIMO_STDEV V12.0 : STAT : PCT : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL (N_THRESH) THRESH_[0-9]* OY_[0-9]* ON_[0-9]* V12.0 : STAT : PJC : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL (N_THRESH) THRESH_[0-9]* OY_TP_[0-9]* ON_TP_[0-9]* CALIBRATION_[0-9]* REFINEMENT_[0-9]* LIKELIHOOD_[0-9]* BASER_[0-9]* V12.0 : STAT : PRC : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL (N_THRESH) THRESH_[0-9]* PODY_[0-9]* POFD_[0-9]* diff --git a/docs/Users_Guide/appendixA.rst b/docs/Users_Guide/appendixA.rst index 4dbf57100..f39c96913 100644 --- a/docs/Users_Guide/appendixA.rst +++ b/docs/Users_Guide/appendixA.rst @@ -1801,8 +1801,11 @@ Q. What are MET's conventions for latitude, longitude, azimuth and bearing angle .. dropdown:: Answer - MET considers north latitude and east longitude positive. Latitudes - have range from :math:`-90^\circ` to :math:`+90^\circ`. Longitudes have + MET considers north latitude and east longitude positive. However, + internally MET considers east longitude negative so users may encounter + DEBUG statements with longitude of a different sign than they provided + (e.g. for observation locations or grid metadata). Latitudes have + range from :math:`-90^\circ` to :math:`+90^\circ`. Longitudes have range from :math:`-180^\circ` to :math:`+180^\circ`. Plane angles such as azimuths and bearing (example: horizontal wind direction) have range :math:`0^\circ` to :math:`360^\circ` and are measured clockwise diff --git a/docs/Users_Guide/appendixC.rst b/docs/Users_Guide/appendixC.rst index 15c3ab5c2..a6bbb0fe5 100644 --- a/docs/Users_Guide/appendixC.rst +++ b/docs/Users_Guide/appendixC.rst @@ -616,23 +616,23 @@ Anomaly Correlation Coefficient Called "ANOM_CORR" and "ANOM_CORR_UNCNTR" for centered and uncentered versions in CNT output :numref:`table_PS_format_info_CNT` -The anomaly correlation coefficient is equivalent to the Pearson correlation coefficient, except that both the forecasts and observations are first adjusted according to a climatology value. The anomaly is the difference between the individual forecast or observation and the typical situation, as measured by a climatology (**c**) of some variety. It measures the strength of linear association between the forecast anomalies and observed anomalies. The anomaly correlation coefficient is defined as: +The anomaly correlation coefficient is equivalent to the Pearson correlation coefficient, except that both the forecasts and observations are first adjusted by subtracting their corresponding climatology value. The anomaly is the difference between the individual forecast or observation and the typical situation, as measured by a forecast climatology (:math:`c_f`) and observation climatology (:math:`c_o`). It measures the strength of linear association between the forecast anomalies and observed anomalies. The anomaly correlation coefficient is defined as: -.. math:: \text{Anomaly Correlation} = \frac{\sum(f_i - c)(o_i - c)}{\sqrt{\sum(f_i - c)^2} \sqrt{\sum(o_i -c)^2}} . +.. math:: \text{Anomaly Correlation} = \frac{\sum(f_i - {c_f}_i)(o_i - {c_o}_i)}{\sqrt{\sum(f_i - {c_f}_i)^2} \sqrt{\sum(o_i - {c_o}_i)^2}} . The centered anomaly correlation coefficient (ANOM_CORR) which includes the mean error is defined as: .. only:: latex - .. math:: \text{ANOM\_CORR } = \frac{ \overline{[(f - c) - \overline{(f - c)}][(a - c) - \overline{(a - c)}]}}{ \sqrt{ \overline{( (f - c) - \overline{(f - c)})^2} \overline{( (a - c) - \overline{(a - c)})^2}}} + .. math:: \text{ANOM\_CORR } = \frac{ \overline{[(f - c_f) - \overline{(f - c_f)}][(o - c_o) - \overline{(o - c_o)}]}}{ \sqrt{ \overline{( (f - c_f) - \overline{(f - c_f)})^2} \overline{( (o - c_o) - \overline{(o - c_o)})^2}}} .. only:: html - .. math:: \text{ANOM_CORR } = \frac{ \overline{[(f - c) - \overline{(f - c)}][(a - c) - \overline{(a - c)}]}}{ \sqrt{ \overline{( (f - c) - \overline{(f - c)})^2} \overline{( (a - c) - \overline{(a - c)})^2}}} + .. math:: \text{ANOM_CORR } = \frac{ \overline{[(f - c_f) - \overline{(f - c_f)}][(o - c_o) - \overline{(o - c_o)}]}}{ \sqrt{ \overline{( (f - c_f) - \overline{(f - c_f)})^2} \overline{( (o - c_o) - \overline{(o - c_o)})^2}}} The uncentered anomaly correlation coefficient (ANOM_CORR_UNCNTR) which does not include the mean errors is defined as: -.. math:: \text{Anomaly Correlation Raw } = \frac{ \overline{(f - c)(a - c)}}{ \sqrt{\overline{(f - c)^2} \overline{(a - c)^2}}} +.. math:: \text{Anomaly Correlation Raw } = \frac{ \overline{(f - c_f)(o - c_o)}}{ \sqrt{\overline{(f - c_f)^2} \overline{(o - c_o)^2}}} Anomaly correlation can range between -1 and 1; a value of 1 indicates perfect correlation and a value of -1 indicates perfect negative correlation. A value of 0 indicates that the forecast and observed anomalies are not correlated. @@ -650,56 +650,60 @@ The partial sums can be accumulated over individual cases to produce statistics Scalar L1 and L2 Values ----------------------- -Called "FBAR", "OBAR", "FOBAR", "FFBAR", and "OOBAR" in SL1L2 output :numref:`table_PS_format_info_SL1L2` +Called "FBAR", "OBAR", "FOBAR", "FFBAR", "OOBAR", and "MAE" in SL1L2 output :numref:`table_PS_format_info_SL1L2` These statistics are simply the 1st and 2nd moments of the forecasts, observations and errors: .. math:: - \text{FBAR} = \text{Mean}(f) = \bar{f} = \frac{1}{n} \sum_{i=1}^n f_i + \text{FBAR} = \text{Mean}(f) = \frac{1}{n} \sum_{i=1}^n f_i - \text{OBAR} = \text{Mean}(o) = \bar{o} = \frac{1}{n} \sum_{i=1}^n o_i + \text{OBAR} = \text{Mean}(o) = \frac{1}{n} \sum_{i=1}^n o_i - \text{FOBAR} = \text{Mean}(fo) = \bar{fo} = \frac{1}{n} \sum_{i=1}^n f_i o_i + \text{FOBAR} = \text{Mean}(fo) = \frac{1}{n} \sum_{i=1}^n f_i o_i - \text{FFBAR} = \text{Mean}(f^2) = \bar{f}^2 = \frac{1}{n} \sum_{i=1}^n f_i^2 + \text{FFBAR} = \text{Mean}(f^2) = \frac{1}{n} \sum_{i=1}^n f_i^2 - \text{OOBAR} = \text{Mean}(o^2) = \bar{o}^2 = \frac{1}{n} \sum_{i=1}^n o_i^2 + \text{OOBAR} = \text{Mean}(o^2) = \frac{1}{n} \sum_{i=1}^n o_i^2 + + \text{MAE} = \text{Mean}(|f - o|) = \frac{1}{n} \sum_{i=1}^n |f_i - o_i| Some of the other statistics for continuous forecasts (e.g., RMSE) can be derived from these moments. Scalar Anomaly L1 and L2 Values ------------------------------- -Called "FABAR", "OABAR", "FOABAR", "FFABAR", "OOABAR" in SAL1L2 output :numref:`table_PS_format_info_SAL1L2` +Called "FABAR", "OABAR", "FOABAR", "FFABAR", "OOABAR", and "MAE" in SAL1L2 output :numref:`table_PS_format_info_SAL1L2` -Computation of these statistics requires a climatological value, c. These statistics are the 1st and 2nd moments of the scalar anomalies. The moments are defined as: +Computation of these statistics requires climatological values, where :math:`c_f` is the forecast climatology value and :math:`c_o` is the observation climatology value. These statistics are the 1st and 2nd moments of the scalar anomalies. The moments are defined as: .. math:: - \text{FABAR} = \text{Mean}(f - c) = \bar{f - c} = \frac{1}{n} \sum_{i=1}^n (f_i - c) + \text{FABAR} = \text{Mean}(f - c_f) = \frac{1}{n} \sum_{i=1}^n (f_i - {c_f}_i) + + \text{OABAR} = \text{Mean}(o - c_o) = \frac{1}{n} \sum_{i=1}^n (o_i - {c_o}_i) - \text{OABAR} = \text{Mean}(o - c) = \bar{o - c} = \frac{1}{n} \sum_{i=1}^n (o_i - c) + \text{FOABAR} = \text{Mean}[(f - c_f)(o - c_o)] = \frac{1}{n} \sum_{i=1}^n (f_i - {c_f}_i)(o_i - {c_o}_i) - \text{FOABAR} = \text{Mean}[(f - c)(o - c)] = \bar{(f - c)(o - c)} = \frac{1}{n} \sum_{i=1}^n (f_i - c)(o_i - c) + \text{FFABAR} = \text{Mean}[(f - c_f)^2] = \frac{1}{n} \sum_{i=1}^n (f_i - {c_f}_i)^2 - \text{FFABAR} = \text{Mean}[(f - c)^2] = \bar{(f - c)}^2 = \frac{1}{n} \sum_{i=1}^n (f_i - c)^2 + \text{OOABAR} = \text{Mean}[(o - c_o)^2] = \frac{1}{n} \sum_{i=1}^n (o_i - {c_o}_i)^2 - \text{OOABAR} = \text{Mean}[(o - c)^2] = \bar{(o - c)}^2 = \frac{1}{n} \sum_{i=1}^n (o_i - c)^2 + \text{MAE} = \text{Mean}(|(f - c_f) - (o - c_o)|) = \frac{1}{n} \sum_{i=1}^n |(f_i - {c_f}_i) - (o_i - {c_o}_i)| Vector L1 and L2 Values ----------------------- -Called "UFBAR", "VFBAR", "UOBAR", "VOBAR", "UVFOBAR", "UVFFBAR", "UVOOBAR" in VL1L2 output :numref:`table_PS_format_info_VL1L2` +Called "UFBAR", "VFBAR", "UOBAR", "VOBAR", "UVFOBAR", "UVFFBAR", and "UVOOBAR" in VL1L2 output :numref:`table_PS_format_info_VL1L2` -These statistics are the moments for wind vector values, where **u** is the E-W wind component and **v** is the N-S wind component ( :math:`u_f` is the forecast E-W wind component; :math:`u_o` is the observed E-W wind component; :math:`v_f` is the forecast N-S wind component; and :math:`v_o` is the observed N-S wind component). The following measures are computed: +These statistics are the moments for wind vector values, where :math:`u` is the E-W wind component and :math:`v` is the N-S wind component ( :math:`u_f` is the forecast E-W wind component; :math:`u_o` is the observed E-W wind component; :math:`v_f` is the forecast N-S wind component; and :math:`v_o` is the observed N-S wind component). The following measures are computed: .. math:: - \text{UFBAR} = \text{Mean}(u_f) = \bar{u}_f = \frac{1}{n} \sum_{i=1}^n u_{fi} + \text{UFBAR} = \text{Mean}(u_f) = \frac{1}{n} \sum_{i=1}^n u_{fi} - \text{VFBAR} = \text{Mean}(v_f) = \bar{v}_f = \frac{1}{n} \sum_{i=1}^n v_{fi} + \text{VFBAR} = \text{Mean}(v_f) = \frac{1}{n} \sum_{i=1}^n v_{fi} - \text{UOBAR} = \text{Mean}(u_o) = \bar{u}_o = \frac{1}{n} \sum_{i=1}^n u_{oi} + \text{UOBAR} = \text{Mean}(u_o) = \frac{1}{n} \sum_{i=1}^n u_{oi} - \text{VOBAR} = \text{Mean}(v_o) = \bar{v}_o = \frac{1}{n} \sum_{i=1}^n v_{oi} + \text{VOBAR} = \text{Mean}(v_o) = \frac{1}{n} \sum_{i=1}^n v_{oi} \text{UVFOBAR} = \text{Mean}(u_f u_o + v_f v_o) = \frac{1}{n} \sum_{i=1}^n (u_{fi} u_{oi} + v_{fi} v_{oi}) @@ -710,25 +714,27 @@ These statistics are the moments for wind vector values, where **u** is the E-W Vector Anomaly L1 and L2 Values ------------------------------- -Called "UFABAR", "VFABAR", "UOABAR", "VOABAR", "UVFOABAR", "UVFFABAR", "UVOOABAR" in VAL1L2 output :numref:`table_PS_format_info_VAL1L2` +Called "UFABAR", "VFABAR", "UOABAR", "VOABAR", "UVFOABAR", "UVFFABAR", and "UVOOABAR" in VAL1L2 output :numref:`table_PS_format_info_VAL1L2` -These statistics require climatological values for the wind vector components, :math:`u_c \text{ and } v_c`. The measures are defined below: +These statistics require climatological values for the wind vector components, where :math:`{u_c}_f` and :math:`{v_c}_f` are the forecast climatology vectors and :math:`{u_c}_o` and :math:`{v_c}_o` are the observation climatology vectors. The measures are defined below: .. math:: - \text{UFABAR} = \text{Mean}(u_f - u_c) = \frac{1}{n} \sum_{i=1}^n (u_{fi} - u_c) + \text{UFABAR} = \text{Mean}(u_f - {u_c}_f) = \frac{1}{n} \sum_{i=1}^n ({u_f}_i - {{u_c}_f}_i) - \text{VFBAR} = \text{Mean}(v_f - v_c) = \frac{1}{n} \sum_{i=1}^n (v_{fi} - v_c) + \text{VFBAR} = \text{Mean}(v_f - {v_c}_f) = \frac{1}{n} \sum_{i=1}^n ({v_f}_i - {{v_c}_f}_i) - \text{UOABAR} = \text{Mean}(u_o - u_c) = \frac{1}{n} \sum_{i=1}^n (u_{oi} - u_c) + \text{UOABAR} = \text{Mean}(u_o - {u_c}_o) = \frac{1}{n} \sum_{i=1}^n ({u_o}_i - {{u_c}_o}_i) - \text{VOABAR} = \text{Mean}(v_o - v_c) = \frac{1}{n} \sum_{i=1}^n (v_{oi} - v_c) + \text{VOABAR} = \text{Mean}(v_o - {v_c}_o) = \frac{1}{n} \sum_{i=1}^n ({v_o}_i - {{v_c}_o}_i) - \text{UVFOABAR} &= \text{Mean}[(u_f - u_c)(u_o - u_c) + (v_f - v_c)(v_o - v_c)] \\ - &= \frac{1}{n} \sum_{i=1}^n (u_{fi} - u_c) + (u_{oi} - u_c) + (v_{fi} - v_c)(v_{oi} - v_c) + \text{UVFOABAR} &= \text{Mean}[(u_f - {u_c}_f)(u_o - {u_c}_o) + (v_f - {v_c}_f)(v_o - {v_c}_o)] \\ + &= \frac{1}{n} \sum_{i=1}^n ({u_f}_i - {{u_c}_f}_i) + ({u_o}_i - {{u_c}_o}_i) + ({v_f}_i - {{v_c}_f}_i)({v_o}_i - {{v_c}_o}_i) - \text{UVFFABAR} = \text{Mean}[(u_f - u_c)^2 + (v_f - v_c)^2] = \frac{1}{n} \sum_{i=1}^n ((u_{fi} - u_c)^2 + (v_{fi} - v_c)^2) + \text{UVFFABAR} &= \text{Mean}[(u_f - {u_c}_f)^2 + (v_f - {v_c}_f)^2] \\ + &= \frac{1}{n} \sum_{i=1}^n (({u_f}_i - {{u_c}_f}_i)^2 + ({v_f}_i - {{v_c}_f}_i)^2) - \text{UVOOABAR} = \text{Mean}[(u_o - u_c)^2 + (v_o - v_c)^2] = \frac{1}{n} \sum_{i=1}^n ((u_{oi} - u_c)^2 + (v_{oi} - v_c)^2) + \text{UVOOABAR} &= \text{Mean}[(u_o - {u_c}_o)^2 + (v_o - {v_c}_o)^2] \\ + &= \frac{1}{n} \sum_{i=1}^n (({u_o}_i - {{u_c}_o}_i)^2 + ({v_o}_i - {{v_c}_o}_i)^2) Gradient Values --------------- diff --git a/docs/Users_Guide/config_options.rst b/docs/Users_Guide/config_options.rst index de538bd7c..3d892808e 100644 --- a/docs/Users_Guide/config_options.rst +++ b/docs/Users_Guide/config_options.rst @@ -158,15 +158,15 @@ The configuration file language supports the following data types: .. note:: - Prior to MET version 12.0.0, forecast climatological inputs were not - supported. The observation climatological inputs were used to process - threshold types named "SCP" and "CDP". + Prior to MET version 12.0.0, forecast climatological inputs were not + supported. The observation climatological inputs were used to process + threshold types named "SCP" and "CDP". - For backward compatibility, the "SCP" threshold type is processed the same - as "SOCP" and "CDP" the same as "OCDP". + For backward compatibility, the "SCP" threshold type is processed the same + as "SOCP" and "CDP" the same as "OCDP". - Users are encouraged to replace the deprecated "SCP" and "CDP" threshold - types with the updated "SOCP" and "OCDP" types, respectively. + Users are encouraged to replace the deprecated "SCP" and "CDP" threshold + types with the updated "SOCP" and "OCDP" types, respectively. * Piecewise-Linear Function (currently used only by MODE): @@ -324,8 +324,10 @@ To run this utility: -e EXISTING_FILE, --existing=EXISTING_FILE Save the text into the named file (optional, default: ../../../data/table_files/ndbc_stations.xml) -NOTE: The downloaded files are written to a subdirectory ndbc_temp_data which -can be deleted once the final output file is created. +.. note:: + + The downloaded files are written to a subdirectory ndbc_temp_data which + can be deleted once the final output file is created. MET_BASE -------- @@ -1508,8 +1510,11 @@ the climatology file names and fields to be used. with 6 and 12 being common choices. Use "NA" if the timing of the climatology data should not be checked. -* The "day_interval" and "hour_interval" entries replace the deprecated - entries "match_month", "match_day", and "time_step". +.. note:: + + As of MET version 11.0.0, the "day_interval" and "hour_interval" entries + replace the "match_month", "match_day", and "time_step" entries, which are + now deprecated. .. code-block:: none @@ -1563,6 +1568,27 @@ over the "climo_mean" setting and then updating the "file_name" entry. file_name = [ "/path/to/climatological/standard/deviation/files" ]; } +Prior to MET version 12.0.0, forecast climatological inputs were not supported. +If the "climo_mean" and "climo_stdev" dictionaries are defined at the top-level +configuration file context, the same data is used for both the forecast and +observation climatologies. To specify separate forecast and observation +climatologies, define "climo_mean" and "climo_stdev" inside the "fcst" and "obs" +dictionaries, as shown below. + +.. code-block:: none + + fcst = { + field = [ ... ]; + climo_mean = { ... }; + climo_stdev = { ... }; + } + + obs = { + field = [ ... ]; + climo_mean = { ... }; + climo_stdev = { ... }; + } + climo_cdf --------- @@ -1729,7 +1755,7 @@ Point-Stat and Ensemble-Stat, the reference time is the forecast valid time. .. _config_options-mask: mask ---- +---- The "mask" entry is a dictionary that specifies the verification masking regions to be used when computing statistics. Each mask defines a @@ -1749,11 +1775,18 @@ in the following ways: * The "poly" entry contains a comma-separated list of files that define verification masking regions. These masking regions may be specified in - two ways: in an ASCII file containing lat/lon points defining the mask polygon, - or using a gridded data file such as the NetCDF output of the Gen-Vx-Mask tool. - Some details for each of these options are described below: + three ways: + + 1. An ASCII polyline file containing lat/lon points defining the mask polygon. + 2. The NetCDF output of the Gen-Vx-Mask tool. + 3. Any gridded data file followed by a configuration string describing the + data to be read and an optional threshold to be applied to that data. + + These three options are described below: - * If providing an ASCII file containing the lat/lon points defining the mask + * Option 1 - ASCII polyline file: + + If providing an ASCII file containing the lat/lon points defining the mask polygon, the file must contain a name for the region followed by the latitude (degrees north) and longitude (degrees east) for each vertex of the polygon. The values are separated by whitespace (e.g. spaces or newlines), and the @@ -1781,17 +1814,38 @@ in the following ways: observation point falls within the polygon defined is done in x/y grid space. - * The NetCDF output of the gen_vx_mask tool. Please see :numref:`masking` + .. code-block:: none + + mask = { poly = [ "share/met/poly/CONUS.poly" ]; } + + * Option 2 - Gen-Vx-Mask output: + + The NetCDF output of the gen_vx_mask tool. Please see :numref:`masking` for more details. - * Any gridded data file that MET can read may be used to define a + .. code-block:: none + + mask = { poly = [ "/path/to/gen_vx_mask_output.nc" ]; } + + * Option 3 - Any gridded data file: + + Any gridded data file that MET can read may be used to define a verification masking region. Users must specify a description of the field to be used from the input file and, optionally, may specify a threshold to be applied to that field. Once this threshold is applied, any grid point where the resulting field is 0, the mask is turned off. Any grid point where it is non-zero, the mask is turned on. - For example, "sample.grib {name = \"TMP\"; level = \"Z2\";} >273" + + .. code-block:: none + + mask = { poly = [ "/path/to/sample.grib {name = \"TMP\"; level = \"Z2\";} >273" ]; } + + .. note:: + + The syntax for the Option 3 is complicated since it includes quotes + embedded within another quoted string. Any such embedded quotes must + be escaped using a preceeding backslash character. * The "sid" entry is an array of strings which define groups of observation station ID's over which to compute statistics. Each entry @@ -2269,8 +2323,10 @@ For example: | nc_pairs_var_suffix = "FREEZING"; (for the freezing level height) | -NOTE: This option was previously named "nc_pairs_var_str", which is -now deprecated. +.. note:: + + Prior to MET version 9.0.0, this option was named "nc_pairs_var_str",' + which is now deprecated. .. code-block:: none diff --git a/docs/Users_Guide/grid-diag.rst b/docs/Users_Guide/grid-diag.rst index f2fd55e78..59bbb3861 100644 --- a/docs/Users_Guide/grid-diag.rst +++ b/docs/Users_Guide/grid-diag.rst @@ -27,7 +27,9 @@ The following sections describe the usage statement, required arguments, and opt [-v level] [-compress level] - NOTE: The "-data" option can be used once to read all fields from each input file or once for each field to be processed. +.. note:: + + The "-data" option can be used once to read all fields from each input file or once for each field to be processed. grid_diag has required arguments and can accept several optional arguments. diff --git a/docs/Users_Guide/mode-td.rst b/docs/Users_Guide/mode-td.rst index c8c07c111..f7dd558a7 100644 --- a/docs/Users_Guide/mode-td.rst +++ b/docs/Users_Guide/mode-td.rst @@ -217,7 +217,9 @@ Required Arguments for mtd Optional Arguments for mtd ^^^^^^^^^^^^^^^^^^^^^^^^^^ -4. **-single file\_list** may be used instead of **-fcst** and **-obs** to define objects in a single field. +4. **-single file\_list** command line option may be used instead of the **-fcst** and **-obs** command line options to define objects in a single field. + +.. note:: When the **-single** command line option is used, data specified in the **fcst** configuration file entry is read from those input files. 5. **-log file** gives the name of a file where a log of this MTD run will be written. All output that appears on the screen during a MTD run will be duplicated in the log file. diff --git a/docs/Users_Guide/mode.rst b/docs/Users_Guide/mode.rst index 491b45200..bb59cfee3 100644 --- a/docs/Users_Guide/mode.rst +++ b/docs/Users_Guide/mode.rst @@ -913,7 +913,7 @@ The contents of the columns in this ASCII file are summarized in :numref:`MODE_o * - 50 - AREA_RATIO - The forecast object area divided by the observation object area (unitless) :raw-html:`
` - **NOTE:** Prior to met-10.0.0, defined as the lesser of the two object areas divided by the greater of the two + **NOTE:** Prior to MET version 10.0.0, the AREA_RATIO was defined as the lesser of the two object areas divided by the greater of the two. * - 51 - INTERSECTION :raw-html:`
` \_AREA - Intersection area of two objects (in grid squares) diff --git a/docs/Users_Guide/point-stat.rst b/docs/Users_Guide/point-stat.rst index 6c9849511..70e3847b7 100644 --- a/docs/Users_Guide/point-stat.rst +++ b/docs/Users_Guide/point-stat.rst @@ -1204,7 +1204,7 @@ The first set of header columns are common to all of the output files generated - Mean(o²) * - 31 - MAE - - Mean Absolute Error + - Mean(\|f-o\|) .. _table_PS_format_info_SAL1L2: @@ -1223,25 +1223,25 @@ The first set of header columns are common to all of the output files generated - Scalar Anomaly L1L2 line type * - 25 - TOTAL - - Total number of matched triplets of forecast (f), observation (o), and climatological value (c) + - Total number of matched pairs of forecast (f), observation (o), forecast climatology (cf), and observation climatology (co) * - 26 - FABAR - - Mean(f-c) + - Mean(f-cf) * - 27 - OABAR - - Mean(o-c) + - Mean(o-co) * - 28 - FOABAR - - Mean((f-c)*(o-c)) + - Mean((f-cf)*(o-co)) * - 29 - FFABAR - - Mean((f-c)²) + - Mean((f-cf)²) * - 30 - OOABAR - - Mean((o-c)²) + - Mean((o-co)²) * - 31 - MAE - - Mean Absolute Error + - Mean(\|(f-cf)-(o-co)\|) .. _table_PS_format_info_VL1L2: @@ -1318,28 +1318,28 @@ The first set of header columns are common to all of the output files generated - Vector Anomaly L1L2 line type * - 25 - TOTAL - - Total number of matched triplets of forecast winds (uf, vf), observation winds (uo, vo), and climatological winds (uc, vc) + - Total number of matched pairs of forecast winds (uf, vf), observation winds (uo, vo), forecast climatology winds (ucf, vcf), and observation climatology winds (uco, vco) * - 26 - UFABAR - - Mean(uf-uc) + - Mean(uf-ucf) * - 27 - VFABAR - - Mean(vf-vc) + - Mean(vf-vcf) * - 28 - UOABAR - - Mean(uo-uc) + - Mean(uo-uco) * - 29 - VOABAR - - Mean(vo-vc) + - Mean(vo-vco) * - 30 - UVFOABAR - - Mean((uf-uc)*(uo-uc)+(vf-vc)*(vo-vc)) + - Mean((uf-ucf)*(uo-uco)+(vf-vcf)*(vo-vco)) * - 31 - UVFFABAR - - Mean((uf-uc)²+(vf-vc)²) + - Mean((uf-ucf)²+(vf-vcf)²) * - 32 - UVOOABAR - - Mean((uo-uc)²+(vo-vc)²) + - Mean((uo-uco)²+(vo-vco)²) * - 33 - FA_SPEED_BAR - Mean forecast wind speed anomaly @@ -1348,7 +1348,7 @@ The first set of header columns are common to all of the output files generated - Mean observed wind speed anomaly * - 35 - TOTAL_DIR - - Total number of matched triplets for which the forecast, observation, and climatological wind directions are well-defined (i.e. non-zero vectors) + - Total number of matched pairs for which the forecast, observation, forecast climatology, and observation climatology wind directions are well-defined (i.e. non-zero vectors) * - 36 - DIRA_ME - Mean wind direction anomaly difference, from -180 to 180 degrees diff --git a/internal/scripts/installation/compile_MET_all.wcoss_beta5.sh b/internal/scripts/installation/compile_MET_all.wcoss_beta5.sh new file mode 100755 index 000000000..5065643c8 --- /dev/null +++ b/internal/scripts/installation/compile_MET_all.wcoss_beta5.sh @@ -0,0 +1,959 @@ +#!/bin/bash +# +# Compile and install MET +# (Model Evaluation Tools) +#================================================ +# +# This compile_MET_all.sh script expects certain environment +# variables to be set: +# TEST_BASE, COMPILER (or COMPILER_FAMILY and COMPILER_VERSION), +# MET_SUBDIR, MET_TARBALL, and USE_MODULES. +# +# If compiling support for Python embedding, users will need to +# set MET_PYTHON, MET_PYTHON_BIN_EXE, MET_PYTHON_CC, and MET_PYTHON_LD. +# Users can directly set the python module to be loaded by setting +# either PYTHON_MODULE or by setting PYTHON_NAME and PYTHON_VERSION: +# - PYTHON_MODULE (only used if USE_MODULES=TRUE) - format is the name +# of the Python module to load followed by an underscore and then the +# version number (e.g. python_3.10.4, The script will then run "module +# load python/3.10.4") +# - PYTHON_NAME = python (or e.g. python3, etc.) +# - PYTHON_VERSION = 3.10.4 +# +# For a description of these and other variables, visit the MET +# downloads page under "Sample Script For Compiling External +# Libraries And MET": +# https://dtcenter.org/community-code/model-evaluation-tools-met/download +# +# An easy way to set these necessary environment variables is +# in an environment configuration file (for example, +# install_met_env.). This script and example +# environment config files for various machines can be found in +# the MET GitHub repository in the scripts/installation directory: +# https://github.com/dtcenter/MET +# +# USAGE: compile_MET_all.sh install_met_env. +# +# The compile_MET_all.sh script will compile and install MET and its +# external library dependencies, if needed, including: +# PROJ (with dependency SQLITE >= 3.11), GSL, BUFRLIB, +# GRIB2C (with dependencies Z, PNG, JASPER, JPEG), HDF5, NETCDF (C and CXX), +# HDF4 (optional for MODIS-Regrid and lidar2nc), HDFEOS (optional for +# MODIS-Regrid and lidar2nc), FREETYPE (optional for MODE Graphics), +# and CAIRO (optional for MODE Graphics). +# +# If these libraries have already been installed and don't need to be +# reinstalled or if you are compiling on a machine that uses modulefiles +# and you'd like to make use of the existing dependent libraries on +# your machine, there are more environment variables that you will +# need to set to let MET know where the library and header files are. +# Please supply values for the following environment variables +# in the input environment configuration file (install_met_env.: +# MET_GRIB2CLIB, MET_GRIB2CINC, GRIB2CLIB_NAME, MET_BUFRLIB, BUFRLIB_NAME, +# MET_HDF5, MET_NETCDF, MET_PROJ, MET_GSL, LIB_JASPER, LIB_LIBPNG, LIB_Z, +# LIB_JPEG, SQLITE_INCLUDE_DIR, SQLITE_LIB_DIR, TIFF_INCLUDE_DIR, TIFF_LIB_DIR. +# +# The optional libraries ecKit and atlas offer support for unstructured +# grids. The optional libraries HDF4, HDFEOS, FREETYPE, and CAIRO are +# used for the following, not widely used tools, MODIS-Regrid, +# lidar2nc, and MODE Graphics. To enable building of these libraries, +# set the compile flags for the library (e.g. COMPILE_ECKIT, COMPILE_ATLAS, +# COMPILE_HDF, COMPILE_HDFEOS) to any value in the environment config +# file. If these libraries have already been installed and don't need +# to be reinstalled, please supply values for the following environment +# variables in the input environment configuration file +# (install_met_env.): MET_ECKIT, MET_ATLAS, MET_HDF, +# MET_HDFEOS, MET_FREETYPEINC, MET_FREETYPELIB, MET_CAIROINC, +# MET_CAIROLIB. +# +# Users can speed up the compilation of MET and its dependent libraries +# by adding the following to their environment configuration file: +# export MAKE_ARGS=-j # +# replacing the # with the number of cores to use (integer) or simply +# specifying: +# export MAKE_ARGS=-j +# with no integer argument to start as many processes in parallel as +# possible. +#================================================ + +# print command, run it, then error and exit if non-zero value is returned +function run_cmd { + echo $* + eval "$@" + ret=$? + if [ $ret != 0 ]; then + echo "ERROR: Command returned with non-zero ($ret) status: $*" + exit $ret + fi +} + +if [ -z $1 ]; then + echo + echo "No environment configuration file provided (e.g. install_met_env.). Starting compilation with current environment." +else + if [ ! -f "$1" ]; then + echo "The file \"$1\" does not exist!" + exit 1 + fi + + source $1 +fi + +echo +echo "TEST_BASE = ${TEST_BASE? "ERROR: TEST_BASE must be set"}" +echo "MET_SUBDIR = ${MET_SUBDIR? "ERROR: MET_SUBDIR must be set"}" +echo "MET_TARBALL = ${MET_TARBALL? "ERROR: MET_TARBALL must be set"}" +echo "USE_MODULES = ${USE_MODULES? "ERROR: USE_MODULES must be set to TRUE if using modules or FALSE otherwise"}" +if [[ -z "$COMPILER" ]] && [[ -z "$COMPILER_FAMILY" && -z "$COMPILER_VERSION" ]]; then + echo "ERROR: COMPILER or COMPILER_FAMILY and COMPILER_VERSION must be set" + exit 1 +fi +echo ${MAKE_ARGS:+MAKE_ARGS = $MAKE_ARGS} + + +LIB_DIR=${TEST_BASE}/external_libs +MET_DIR=${MET_SUBDIR} + +if [ -z "${BIN_DIR_PATH}" ]; then + if [ -z "${MET_INSTALL_DIR}" ]; then + BIN_DIR_PATH=${TEST_BASE}/bin + else + BIN_DIR_PATH=${MET_INSTALL_DIR}/bin + fi +fi + +if [ -z "${MET_INSTALL_DIR}" ]; then + MET_INSTALL_DIR=${MET_DIR} +else + LIB_DIR=${MET_INSTALL_DIR} +fi + +TAR_DIR=${TEST_BASE}/tar_files +MET_TARBALL=${TAR_DIR}/${MET_TARBALL} + +# Create directory for libraries +mkdir -p ${LIB_DIR} + +# Check that tar files exist +if [ ! -e $TAR_DIR ]; then + echo "TAR File directory doesn't exist: ${TAR_DIR}" + exit 1 +fi + +# If MET_PYTHON_LIB is not set in the environment file, set it to the +# lib directory so it can be use to install MET with Python Embedding +# support +if [[ -z "$MET_PYTHON_LIB" ]]; then + MET_PYTHON_LIB=${MET_PYTHON}/lib +fi + + +# Print library linker path +echo "LD_LIBRARY_PATH = ${LD_LIBRARY_PATH}" + +# if LIB_Z is not set in the environment file, set it to the +# lib directory so it can be used to install HDF5 with zlib support +if [[ -z "$LIB_Z" ]]; then + LIB_Z=${LIB_DIR}/lib +fi + +# if TIFF is not defined in the environment file, enable its compilation +if [[ -z ${TIFF_INCLUDE_DIR} ]] && [[ -z ${TIFF_LIB_DIR} ]]; then + COMPILE_TIFF=1 +else + COMPILE_TIFF=0 +fi + +# if SQLITE is not defined in the environment file, enable its compilation +if [[ -z ${SQLITE_INCLUDE_DIR} ]] && [[ -z ${SQLITE_LIB_DIR} ]]; then + COMPILE_SQLITE=1 +else + COMPILE_SQLITE=0 +fi + +# Constants +if [[ -z ${MET_GRIB2CLIB} ]] && [[ -z ${MET_GRIB2C} ]]; then + COMPILE_ZLIB=1 + COMPILE_LIBPNG=1 + COMPILE_JASPER=1 + COMPILE_JPEG=1 + COMPILE_G2CLIB=1 +else + COMPILE_ZLIB=0 + COMPILE_LIBPNG=0 + COMPILE_JASPER=0 + COMPILE_JPEG=0 + COMPILE_G2CLIB=0 +fi + +if [ -z ${MET_BUFRLIB} ]; then COMPILE_BUFRLIB=1; else COMPILE_BUFRLIB=0; fi + +if [ -z ${MET_NETCDF} ]; then COMPILE_NETCDF=1; else COMPILE_NETCDF=0; fi + +if [ -z ${MET_PROJ} ]; then COMPILE_PROJ=1; else COMPILE_PROJ=0; fi + +if [ -z ${MET_GSL} ]; then COMPILE_GSL=1; else COMPILE_GSL=0; fi + +# Only set COMPILE_ECKIT and COMPILE_ATLAS if you want to compile and enable support for unstructued grids +if [ ! -z "${COMPILE_ECKIT}" ]; then COMPILE_ECKIT=1; else COMPILE_ECKIT=0; fi +if [ ! -z "${COMPILE_ATLAS}" ]; then COMPILE_ATLAS=1; else COMPILE_ATLAS=0; fi + +if [[ -z ${MET_ECKIT} ]] && [[ -z ${MET_ATLAS} ]]; then + if [[ $COMPILE_ECKIT -eq 1 && $COMPILE_ATLAS -eq 1 ]]; then + export MET_ECKIT=${LIB_DIR} + export MET_ATLAS=${LIB_DIR} + fi +else + # Only set COMPILE_ECKIT and COMPILE_ATLAS to 1 if you have already compiled ECKIT and ATLAS, + # have set MET_ECKIT and MET_ATLAS in your configuration file, and want to enable + # unstructured grids + COMPILE_ECKIT=0 + COMPILE_ATLAS=0 +fi + +# Only set COMPILE_HDF and COMPILE_HDFEOS if you want to compile and enable MODIS-Regrid (not widely used) +if [ ! -z "${COMPILE_HDF}" ]; then COMPILE_HDF=1; else COMPILE_HDF=0; fi +if [ ! -z "${COMPILE_HDFEOS}" ]; then COMPILE_HDFEOS=1; else COMPILE_HDFEOS=0; fi + +if [[ -z ${MET_HDF} ]] && [[ -z ${MET_HDFEOS} ]]; then + if [[ $COMPILE_HDF -eq 1 && $COMPILE_HDFEOS -eq 1 ]]; then + export MET_HDF=${LIB_DIR} + export MET_HDFEOS=${LIB_DIR} + fi +else + # Only set COMPILE_HDF and COMPILE_HDFEOS to 1 if you have already compiled HDF4 and HDFEOS, + # have set MET_HDF and MET_HDFEOS in your configuration file, and want to enable + # MODIS-Regrid (not widely used) + COMPILE_HDF=0 + COMPILE_HDFEOS=0 +fi + +# Only set COMPILE_FREETYPE and COMPILE_CAIRO if you want to compile and enable MODE Graphics (not widely used) +if [ ! -z "${COMPILE_FREETYPE}" ]; then COMPILE_FREETYPE=1; else COMPILE_FREETYPE=0; fi +if [ ! -z "${COMPILE_CAIRO}" ]; then COMPILE_CAIRO=1; else COMPILE_CAIRO=0; fi + + +if [[ ! -z ${MET_FREETYPE} ]]; then + echo "ERROR: MET_FREETYPEINC and MET_FREETYPELIB must be set instead of MET_FREETYPE" + exit 1 +fi + +if [[ ! -z ${MET_CAIRO} ]]; then + echo "ERROR: MET_CAIROINC and MET_CAIROLIB must be set instead of MET_CAIRO" + exit 1 +fi + +if [[ -z ${MET_FREETYPEINC} && -z ${MET_FREETYPELIB} && -z ${MET_CAIROINC} && -z ${MET_CAIROLIB} ]]; then + if [[ $COMPILE_CAIRO -eq 1 && $COMPILE_FREETYPE -eq 1 ]]; then + export MET_CAIROINC=${LIB_DIR}/include/cairo + export MET_CAIROLIB=${LIB_DIR}/lib + export MET_FREETYPEINC=${LIB_DIR}/include/freetype2 + export MET_FREETYPELIB=${LIB_DIR}/lib + fi +else + # Only set COMPILE_FREETYPE and COMPILE_CAIRO to 1 if you have compiled FREETYPE and CAIRO, + # have set MET_FREETYPEINC, MET_FREETYPELIB, MET_CAIROINC, and MET_CAIROLIB in your + # configuration file, and want to enable MODE Graphics (not widely used) + COMPILE_FREETYPE=0 + COMPILE_CAIRO=0 +fi + +COMPILE_MET=1 + +# skip compilation of MET if SKIP_MET is set +if [ ! -z "${SKIP_MET}" ]; then COMPILE_MET=0; fi + +# skip compilation of external libraries if SKIP_LIBS is set +if [ ! -z "${SKIP_LIBS}" ]; then + COMPILE_PROJ=0 + COMPILE_GSL=0 + COMPILE_BUFRLIB=0 + COMPILE_ZLIB=0 + COMPILE_LIBPNG=0 + COMPILE_JASPER=0 + COMPILE_JPEG=0 + COMPILE_G2CLIB=0 + COMPILE_ECKIT=0 + COMPILE_ATLAS=0 + COMPILE_HDF=0 + COMPILE_HDFEOS=0 + COMPILE_NETCDF=0 + COMPILE_FREETYPE=0 + COMPILE_CAIRO=0 +fi + +if [ -z ${BIN_DIR_PATH} ]; then + BIN_DIR_PATH=${TEST_BASE}/bin +fi + +if [ -z ${USE_MET_TAR_FILE} ]; then + export USE_MET_TAR_FILE=TRUE +fi + +echo +echo "Compiling libraries into: ${LIB_DIR}" + +if [ ! -e ${LIB_DIR}/include ]; then + mkdir ${LIB_DIR}/include +fi + +if [ ! -e ${LIB_DIR}/lib ]; then + mkdir ${LIB_DIR}/lib +fi + +# Load compiler version +if [ -z ${COMPILER_FAMILY} ]; then + COMPILER_FAMILY=` echo $COMPILER | cut -d'_' -f1` +fi + +# Check for "oneapi" in compiler family name +#if echo ${COMPILER_FAMILY} | grep -E "^intel"; then +if [[ ${COMPILER_FAMILY} == *intel* ]]; then + COMPILER_FAMILY_SUFFIX=` echo $COMPILER_FAMILY | cut -d'-' -f2` +fi + +if [ -z ${COMPILER_VERSION} ]; then + COMPILER_VERSION=`echo $COMPILER | cut -d'_' -f2` +fi + +echo "COMPILER = $COMPILER" +echo "COMPILER_FAMILY = $COMPILER_FAMILY" +echo "COMPILER_FAMILY_SUFFIX = $COMPILER_FAMILY_SUFFIX" +echo "COMPILER_VERSION = $COMPILER_VERSION" +COMPILER_MAJOR_VERSION=`echo $COMPILER_VERSION | cut -d'.' -f1` +COMPILER_MINOR_VERSION=`echo $COMPILER_VERSION | cut -d'.' -f2` + +echo +echo "USE_MODULES = ${USE_MODULES}" +echo + +if [ ${USE_MODULES} = "TRUE" ]; then + echo "module load ${COMPILER_FAMILY}/${COMPILER_VERSION}" + echo ${COMPILER_FAMILY}/${COMPILER_VERSION} + + module load ${COMPILER_FAMILY}/${COMPILER_VERSION} + if [ ${COMPILER_FAMILY} = "PrgEnv-intel" ]; then + module load craype + module switch craype craype-sandybridge + fi +fi + +# After loading the compiler module, strip any extra +# characters off of "gnu" (e.g. "gnu9") +if [[ ${COMPILER_FAMILY} == *gnu* ]]; then + export COMPILER_FAMILY="gnu" +fi + +if [ ${COMPILER_FAMILY} = "gnu" ]; then + if [ -z ${CC} ]; then export CC=`which gcc`; fi + if [ -z ${CXX} ]; then export CXX=`which g++`; fi + if [ -z ${FC} ]; then export FC=`which gfortran`; fi + if [ -z ${F77} ]; then export F77=`which gfortran`; fi + if [ -z ${F90} ]; then export F90=`which gfortran`; fi +elif [ ${COMPILER_FAMILY} = "pgi" ]; then + if [ -z ${CC} ]; then export CC=`which pgcc`; fi + if [ -z ${CXX} ]; then export CXX=`which pgc++`; fi + if [ -z ${FC} ]; then export FC=`which pgf90`; fi + if [ -z ${F77} ]; then export F77=`which pgf90`; fi + if [ -z ${F90} ]; then export F90=`which pgf90`; fi +elif [[ ${COMPILER_FAMILY} == *intel* && ${CC} == "icc" ]] || \ + [[ ${COMPILER_FAMILY} == "ics" ]] || \ + [[ ${COMPILER_FAMILY} == "ips" ]] || \ + [[ ${COMPILER_FAMILY} == "intel-classic" ]] || \ + [[ ${COMPILER_FAMILY} == "PrgEnv-intel" ]]; then + if [ -z ${CC} ]; then export CC=`which icc`; fi + if [ -z ${CXX} ]; then export CXX=`which icpc`; fi + if [ -z ${FC} ]; then export FC=`which ifort`; fi + if [ -z ${F77} ]; then export F77=`which ifort`; fi + if [ -z ${F90} ]; then export F90=`which ifort`; fi +elif [[ ${COMPILER_FAMILY} == *intel* ]] && [[ ${CC} == *icx* ]]; then + export CXX=`which icpx` + export FC=`which ifx` + export F77=`which ifx` + export F90=`which ifx` +elif [[ ${COMPILER_FAMILY_SUFFIX} == oneapi ]]; then + export CC=`which icx` + export CXX=`which icpx` + export FC=`which ifx` + export F77=`which ifx` + export F90=`which ifx` +else + echo "ERROR: \${COMPILER} must start with gnu, intel, ics, ips, intel-classic, PrgEnv-intel, or pgi" + exit +fi + +echo "export CC=${CC}" +echo "export CXX=${CXX}" +echo "export FC=${FC}" +echo "export F77=${F77}" +echo "export F90=${F90}" +echo + +# Figure out what kind of OS is being used +unameOut="$(uname -s)" +case "${unameOut}" in + Linux*) machine=Linux;; + Darwin*) machine=Mac;; + CYGWIN*) machine=Cygwin;; + MINGW*) machine=MinGw;; + *) machine="UNKNOWN:${unameOut}" +esac + +# change sed command and extension for dynamic library files +if [[ $machine == "Mac" ]]; then + sed_inline="sed -i ''" +else + sed_inline="sed -i''" +fi + +if [[ "$(uname -m)" == "arm64" ]]; then + dynamic_lib_ext="dylib" +else + dynamic_lib_ext="so" +fi + +# Load Python module + +if [ ${USE_MODULES} = "TRUE" ]; then + if [ ! -z ${PYTHON_MODULE} ]; then + PYTHON_NAME=`echo $PYTHON_MODULE | cut -d'_' -f1` + PYTHON_VERSION_NUM=`echo $PYTHON_MODULE | cut -d'_' -f2` + echo "module load ${PYTHON_NAME}/${PYTHON_VERSION_NUM}" + echo ${PYTHON_NAME}/${PYTHON_VERSION_NUM} + module load ${PYTHON_NAME}/${PYTHON_VERSION_NUM} + # Allow the user to specify the name and version of the module to load + elif [[ ! -z ${PYTHON_NAME} && ! -z ${PYTHON_VERSION_NUM} ]]; then + echo "module load ${PYTHON_NAME}/${PYTHON_VERSION_NUM}" + echo ${PYTHON_NAME}/${PYTHON_VERSION_NUM} + module load ${PYTHON_NAME}/${PYTHON_VERSION_NUM} + fi +fi + +if [[ ${MET_PYTHON}/bin/python3 ]]; then + echo "Using python version: " + ${MET_PYTHON}/bin/python3 --version +fi + +# Compile Proj +if [ $COMPILE_PROJ -eq 1 ]; then + + + if [ $COMPILE_TIFF -eq 1 ]; then + echo + echo "Compiling TIFF at `date`" + mkdir -p ${LIB_DIR}/tiff + rm -rf ${LIB_DIR}/tiff/tiff* + tar -xzf ${TAR_DIR}/tiff*.tar.gz -C ${LIB_DIR}/tiff + cd ${LIB_DIR}/tiff/tiff* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} > $(pwd)/tiff.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/tiff.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/tiff.make_install.log 2>&1" + export TIFF_INCLUDE_DIR=${LIB_DIR}/include + export TIFF_LIB_DIR=${LIB_DIR}/lib + fi + + if [ $COMPILE_SQLITE -eq 1 ]; then + echo + echo "Compiling SQLITE at `date`" + mkdir -p ${LIB_DIR}/sqlite + rm -rf ${LIB_DIR}/sqlite/sqlite* + tar -xf ${TAR_DIR}/sqlite*.tar.gz -C ${LIB_DIR}/sqlite > /dev/null 2>&1 + cd ${LIB_DIR}/sqlite/sqlite* + echo "cd `pwd`" + run_cmd "./configure --enable-shared --prefix=${LIB_DIR} > $(pwd)/sqlite.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/sqlite.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/sqlite.make_install.log 2>&1" + export SQLITE_INCLUDE_DIR=${LIB_DIR}/include + export SQLITE_LIB_DIR=${LIB_DIR}/lib + fi + + vrs="7.1.0" + + echo + echo "Compiling PROJ_${vrs} at `date`" + echo "cmake version `cmake --version`" + mkdir -p ${LIB_DIR}/proj + rm -rf ${LIB_DIR}/proj/proj* + tar -xf ${TAR_DIR}/proj-${vrs}.tar.gz -C ${LIB_DIR}/proj + cd ${LIB_DIR}/proj/proj* + echo "cd `pwd`" + export PATH=${LIB_DIR}/bin:${PATH} + run_cmd "mkdir build; cd build" + + tiff_arg="" + # add tiff library and include arguments if necessary + if [[ ! -z "$TIFF_LIB_DIR" ]]; then + tiff_arg+="-DTIFF_LIBRARY_RELEASE=${TIFF_LIB_DIR}/libtiff.${dynamic_lib_ext}" + fi + if [[ ! -z "$TIFF_INCLUDE_DIR" ]]; then + tiff_arg+=" -DTIFF_INCLUDE_DIR=${TIFF_INCLUDE_DIR}" + fi + + cmd="cmake -DCMAKE_INSTALL_PREFIX=${LIB_DIR} -DSQLITE3_INCLUDE_DIR=${SQLITE_INCLUDE_DIR} -DSQLITE3_LIBRARY=${SQLITE_LIB_DIR}/libsqlite3.${dynamic_lib_ext} ${tiff_arg} .. > $(pwd)/proj.cmake.log 2>&1" + run_cmd ${cmd} + run_cmd "cmake --build . > $(pwd)/proj.cmake_build.log 2>&1" + run_cmd "cmake --build . --target install > $(pwd)/proj.cmake_install.log 2>&1" + +fi + +# Compile GSL +if [ $COMPILE_GSL -eq 1 ]; then + + if [ ${COMPILER_FAMILY} = "pgi" ]; then + vrs="1.11" + else + vrs="2.7.1" + fi + + echo + echo "Compiling GSL_${vrs} at `date`" + mkdir -p ${LIB_DIR}/gsl + rm -rf ${LIB_DIR}/gsl/gsl* + tar -xf ${TAR_DIR}/gsl-${vrs}.tar.gz -C ${LIB_DIR}/gsl + cd ${LIB_DIR}/gsl/gsl* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} > $(pwd)/gsl.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/gsl.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/gsl.make_install.log 2>&1" +fi + +# Compile BUFRLIB +if [ $COMPILE_BUFRLIB -eq 1 ]; then + + vrs="v11.6.0" + + echo + echo "Compiling bufr_${vrs} at `date`" + mkdir -p ${LIB_DIR}/bufrlib + rm -rf ${LIB_DIR}/bufrlib/NCEPLIBS-bufr-bufr_${vrs} + tar -xf ${TAR_DIR}/bufr_${vrs}.tar.gz -C ${LIB_DIR}/bufrlib + export SOURCE_DIR=${LIB_DIR}/bufrlib/NCEPLIBS-bufr-bufr_${vrs} + cd $SOURCE_DIR + echo "cd `pwd`" + run_cmd "mkdir build" + export BUILD_DIR=${SOURCE_DIR}/build + run_cmd "cmake -H${SOURCE_DIR} -B${BUILD_DIR} -DCMAKE_INSTALL_PREFIX=${LIB_DIR} -DCMAKE_BUILD_TYPE=Debug > $(pwd)/bufr.cmake.log 2>&1" + run_cmd "cd ${BUILD_DIR}" + run_cmd "make ${MAKE_ARGS} > $(pwd)/bufr.make.log 2>&1" + run_cmd "ctest > $(pwd)/bufr.ctest.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/bufr.make_install.log 2>&1" +fi + + +# Compile ZLIB +if [ $COMPILE_ZLIB -eq 1 ]; then + echo + echo "Compiling ZLIB at `date`" + mkdir -p ${LIB_DIR}/zlib + rm -rf ${LIB_DIR}/zlib/zlib* + tar -xzf ${TAR_DIR}/zlib*.tar.gz -C ${LIB_DIR}/zlib + cd ${LIB_DIR}/zlib/zlib* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} > $(pwd)/zlib.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/zlib.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/zlib.make_install.log 2>&1" + + # GPM: why is this removed? Could we add a comment to + # describe why this is needed? + run_cmd "rm ${LIB_DIR}/lib/libz.a" +fi + +# Compile LIBPNG +if [[ $COMPILE_LIBPNG -eq 1 && $HOST != ys* ]]; then + echo + echo "Compiling LIBPNG at `date`" + mkdir -p ${LIB_DIR}/libpng + rm -rf ${LIB_DIR}/libpng/libpng* + tar -xzf ${TAR_DIR}/libpng*.tar.gz -C ${LIB_DIR}/libpng + cd ${LIB_DIR}/libpng/libpng* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > $(pwd)/libpng.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/libpng.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/libpng.make_install.log 2>&1" +fi + + +# Compile JASPER +if [ $COMPILE_JASPER -eq 1 ]; then + + vrs="2.0.25" + + echo + echo "Compiling JASPER at `date`" + mkdir -p ${LIB_DIR}/jasper + rm -rf ${LIB_DIR}/jasper/jasper* + tar -xf ${TAR_DIR}/jasper-${vrs}.tar.gz -C ${LIB_DIR}/jasper + cd ${LIB_DIR}/jasper/jasper-version-${vrs} + export CPPFLAGS="-I${LIB_DIR}/include" + export SOURCE_DIR=${LIB_DIR}/jasper/jasper-version-${vrs} + echo "cd `pwd`" + export BUILD_DIR=${LIB_DIR}/jasper/jasper-version-${vrs}/build + run_cmd "cmake -G \"Unix Makefiles\" -H${SOURCE_DIR} -B${BUILD_DIR} -DCMAKE_INSTALL_PREFIX=${LIB_DIR} -DJAS_ENABLE_DOC=false > $(pwd)/jasper.cmake.log 2>&1" + run_cmd "cd ${BUILD_DIR}" + run_cmd "make clean all > $(pwd)/jasper.make.log 2>&1" + # Commented out due to “which: no opj2_compress in …” error, which causes one of four tests to fail + # This is a known problem, so skipping tests for now: https://github.com/AAROC/CODE-RADE/issues/36#issuecomment-359744351 + #run_cmd "make ${MAKE_ARGS} test > $(pwd)/jasper.make_test.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/jasper.make_install.log 2>&1" +fi + +# Compile JPEG +if [ $COMPILE_JPEG -eq 1 ]; then + + vrs="9e" + + echo + echo "Compiling JPEG at `date`" + mkdir -p ${LIB_DIR}/jpeg + rm -rf ${LIB_DIR}/jpeg/jpeg* + tar -xf ${TAR_DIR}/jpegsrc.v${vrs}.tar.gz -C ${LIB_DIR}/jpeg + cd ${LIB_DIR}/jpeg/jpeg-${vrs} + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > $(pwd)/libjpeg.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/libjpeg.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/libjpeg.make_install.log 2>&1" +fi + + +# Compile G2CLIB +if [ $COMPILE_G2CLIB -eq 1 ]; then + + vrs="1.6.4" + + echo + echo "Compiling G2CLIB at `date`" + mkdir -p ${LIB_DIR}/g2clib + rm -rf ${LIB_DIR}/g2clib/NCEP* + tar -xf ${TAR_DIR}/g2clib-${vrs}.tar.gz -C ${LIB_DIR}/g2clib + cd ${LIB_DIR}/g2clib/NCEP* + echo "cd `pwd`" + run_cmd "mkdir build; cd build" + run_cmd "cmake -DCMAKE_INSTALL_PREFIX=${LIB_DIR} -DCMAKE_PREFIX_PATH=${LIB_DIR} .. > $(pwd)/g2c.cmake.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/g2c.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} test > $(pwd)/g2c.make_test.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/g2c.make_install.log 2>&1" +fi + +# Compile ECKIT +if [ $COMPILE_ECKIT -eq 1 ]; then + + # Need to obtain ecbuild before installing eckit + + vrs="3.5.0" + + echo + echo "Compiling ECBUILD at `date`" + mkdir -p ${LIB_DIR}/ecbuild + rm -rf ${LIB_DIR}/ecbuild/ecbuild* + tar -xf ${TAR_DIR}/ecbuild-${vrs}.tar.gz -C ${LIB_DIR}/ecbuild + cd ${LIB_DIR}/ecbuild/ecbuild* + echo "cd `pwd`" + run_cmd "mkdir build; cd build" + run_cmd "cmake ../ -DCMAKE_INSTALL_PREFIX=${LIB_DIR} > $(pwd)/ecbuild.cmake.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/ecbuild.make_install.log 2>&1" + + vrs="1.20.2" + + echo + echo "Compiling ECKIT at `date`" + mkdir -p ${LIB_DIR}/eckit + rm -rf ${LIB_DIR}/eckit/eckit* + tar -xf ${TAR_DIR}/eckit-${vrs}.tar.gz -C ${LIB_DIR}/eckit + cd ${LIB_DIR}/eckit/eckit* + echo "cd `pwd`" + run_cmd "mkdir build; cd build" + run_cmd "cmake ../ -DCMAKE_INSTALL_PREFIX=${LIB_DIR} -DCMAKE_PREFIX_PATH=${LIB_DIR} > $(pwd)/eckit.cmake.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/eckit.make_install.log 2>&1" + +fi + +# Compile ATLAS +if [ $COMPILE_ATLAS -eq 1 ]; then + + vrs="0.30.0" + + echo + echo "Compiling ATLAS at `date`" + mkdir -p ${LIB_DIR}/atlas + rm -rf ${LIB_DIR}/atlas/atlas* + tar -xf ${TAR_DIR}/atlas-${vrs}.tar.gz -C ${LIB_DIR}/atlas + cd ${LIB_DIR}/atlas/atlas* + echo "cd `pwd`" + run_cmd "mkdir build; cd build" + run_cmd "cmake ../ -DCMAKE_INSTALL_PREFIX=${LIB_DIR} -DCMAKE_PREFIX_PATH=${LIB_DIR} > $(pwd)/atlas.cmake.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/atlas.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/atlas.make_install.log 2>&1" + +fi + +# Compile HDF +# Depends on jpeg +# Edit 'mfhdf/hdiff/Makefile' as follows: +# From: LIBS = -ljpeg -lz +# To: LIBS = -ljpeg -lz -lm +if [ $COMPILE_HDF -eq 1 ]; then + echo + echo "Compiling HDF at `date`" + mkdir -p ${LIB_DIR}/hdf + rm -rf ${LIB_DIR}/hdf/HDF* + tar -xf ${TAR_DIR}/HDF4.2*.tar.gz -C ${LIB_DIR}/hdf + cd ${LIB_DIR}/hdf/HDF* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} --disable-netcdf --with-jpeg=${LIB_DIR} --with-zlib=${LIB_DIR} CPPFLAGS=-I/usr/include/tirpc LIBS='-lm -ltirpc' > $(pwd)/hdf4.configure.log 2>&1" + if [[ ${COMPILER_MAJOR_VERSION} -ge 10 ]]; then + cat hdf/src/Makefile | \ + sed 's/FFLAGS = -O2/FFLAGS = -w -fallow-argument-mismatch -O2/g' \ + > Makefile_new + elif [[ ${COMPILER_MAJOR_VERSION} -lt 10 ]]; then + cat hdf/src/Makefile | \ + sed 's/FFLAGS = -O2/FFLAGS = -w -Wno-argument-mismatch -O2/g' \ + > Makefile_new + fi + mv Makefile_new hdf/src/Makefile + run_cmd "make ${MAKE_ARGS} > $(pwd)/hdf4.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/hdf4.make_install.log 2>&1" +fi + +# Compile HDFEOS +# Depends on HDF +if [ $COMPILE_HDFEOS -eq 1 ]; then + echo + echo "Compiling HDFEOS at `date`" + mkdir -p ${LIB_DIR}/hdfeos + rm -rf ${LIB_DIR}/hdfeos/HDF-EOS* + tar -xzf ${TAR_DIR}/HDF-EOS*.tar.* -C ${LIB_DIR}/hdfeos + cd ${LIB_DIR}/hdfeos/hdfeos + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} --with-hdf4=${LIB_DIR} --with-jpeg=${LIB_DIR} > $(pwd)/hdf-eos.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/hed-eos.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/hsf-eos.make_install.log 2>&1" + + cp include/*.h ${LIB_DIR}/include/ +fi + +# Compile NetCDF +if [ $COMPILE_NETCDF -eq 1 ]; then + + echo + echo "Compiling HDF5 at `date`" + mkdir -p ${LIB_DIR}/hdf5 + rm -rf ${LIB_DIR}/hdf5/hdf5* + tar -xzf ${TAR_DIR}/hdf5*.tar.gz -C ${LIB_DIR}/hdf5 + cd ${LIB_DIR}/hdf5/hdf5* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} --with-zlib=${LIB_Z} CFLAGS=-fPIC CXXFLAGS=-fPIC FFLAGS=-fPIC LDFLAGS=-L${LIB_DIR}/lib:${LIB_Z} CPPFLAGS=-I${LIB_DIR}/include > $(pwd)/hdf5.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/hdf5.make_install.log 2>&1" + + echo + echo "Compiling NetCDF-C at `date`" + mkdir -p ${LIB_DIR}/netcdf + rm -rf ${LIB_DIR}/netcdf/netcdf* + tar -xzf ${TAR_DIR}/netcdf-4*.tar.gz -C ${LIB_DIR}/netcdf > /dev/null 2>&1 || unzip ${TAR_DIR}/netcdf-4*.zip -d ${LIB_DIR}/netcdf + cd ${LIB_DIR}/netcdf/netcdf-* + export FC='' + export F90='' + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} CFLAGS=-fPIC CXXFLAGS=-fPIC LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > $(pwd)/netcdf-c.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/netcdf-c.make_install.log 2>&1" + + echo + echo "Compiling NetCDF-CXX at `date`" + tar -xzf ${TAR_DIR}/netcdf-cxx*.tar.gz -C ${LIB_DIR}/netcdf + cd ${LIB_DIR}/netcdf/netcdf-cxx* + echo "cd `pwd`" + configure_lib_args="" + if [[ $machine == "Mac" ]]; then + configure_lib_args="-lnetcdf -lhdf5_hl -lhdf5 -lz" + fi + run_cmd "./configure --prefix=${LIB_DIR} LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include LIBS=\"${LIBS} ${configure_lib_args}\" > $(pwd)/netcdf-cxx.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/netcdf-cxx.make_install.log 2>&1" +fi + +# Compile FREETYPE +if [ $COMPILE_FREETYPE -eq 1 ]; then + echo + echo "Compiling FREETYPE at `date`" + mkdir -p ${LIB_DIR}/freetype + rm -rf ${LIB_DIR}/freetype/freetype* + tar -xzf ${TAR_DIR}/freetype*.tar.gz -C ${LIB_DIR}/freetype + cd ${LIB_DIR}/freetype/freetype* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} --with-png=yes > $(pwd)/freetype.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/freetype.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/freetype.make_install.log 2>&1" +fi + + +# Compile CAIRO +if [ $COMPILE_CAIRO -eq 1 ]; then + + # If on Cray, compile PIXMAN + if [ ${COMPILER_FAMILY} = "PrgEnv-intel" ]; then + echo + echo "Compiling pixman at `date`" + mkdir -p ${LIB_DIR}/pixman + rm -rf ${LIB_DIR}/pixman/pixman* + tar -xzf ${TAR_DIR}/pixman*.tar.gz -C ${LIB_DIR}/pixman + cd ${LIB_DIR}/pixman/pixman* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} > $(pwd)/pixman.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/pixman.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/pixman.make_install.log 2>&1" + fi + + echo + echo "Compiling CAIRO at `date`" + mkdir -p ${LIB_DIR}/cairo + rm -rf ${LIB_DIR}/cairo/cairo* + tar -xf ${TAR_DIR}/cairo*.tar* -C ${LIB_DIR}/cairo + cd ${LIB_DIR}/cairo/cairo* + export PKG_CONFIG=`which pkg-config` + if [ ${COMPILER_FAMILY} = "PrgEnv-intel" ]; then + export PKG_CONFIG_PATH=${LIB_DIR}/lib/pkgconfig/ + fi + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} ax_cv_c_float_words_bigendian=no LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > $(pwd)/cairo.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/cairo.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/cairo.make_install.log 2>&1" +fi + +# Compile MET +if [ $COMPILE_MET -eq 0 ]; then + echo Skipping MET compilation + echo "Finished compiling at `date`" + exit 0 +fi + +echo +echo "Compiling MET at `date`" +# If using source from a tar file remove everything and unpack the tar file +# FALSE = compiling from github repo and we don't want to overwrite the files +if [ ${USE_MET_TAR_FILE} = "TRUE" ]; then + rm -rf ${MET_DIR}/MET* + tar -xzf ${MET_TARBALL} -C ${MET_DIR} +fi +cd ${MET_DIR}/MET* + +echo "Modifying configure" +cat configure | \ + sed 's/C11/C17/g' | \ + sed 's/c11/c17/g' | \ + sed 's/cxx11/cxx17/g' | \ + sed 's/c++11/c++17/g' | \ + sed 's/gnu11/gnu17/g' \ + > configure_new + +mv configure_new configure +chmod 755 configure + +if [ -z ${MET_BUFRLIB} ]; then + export MET_BUFRLIB=${LIB_DIR}/lib + export BUFRLIB_NAME=-lbufr_4 +fi + +if [ -z ${MET_GRIB2CLIB} ]; then + export MET_GRIB2CLIB=${LIB_DIR}/lib + export MET_GRIB2CINC=${LIB_DIR}/include + export LIB_JASPER=${LIB_DIR}/lib + export LIB_LIBPNG=${LIB_DIR}/lib + export LIB_Z=${LIB_DIR}/lib + export GRIB2CLIB_NAME=-lg2c +fi + +if [ -z ${MET_NETCDF} ]; then + export MET_NETCDF=${LIB_DIR} + export MET_HDF5=${LIB_DIR} +fi + +if [ -z ${MET_GSL} ]; then + export MET_GSL=${LIB_DIR} +fi + +if [ -z ${MET_PROJ} ]; then + export MET_PROJ=${LIB_DIR} +fi + +export MET_PYTHON_BIN_EXE=${MET_PYTHON_BIN_EXE:=${MET_PYTHON}/bin/python3} +export MET_PYTHON_LD +export MET_PYTHON_CC + +# add flags to user-defined LDFLAGS for MacOS +if [[ $machine != "Mac" ]]; then + LDFLAGS="${LDFLAGS} -Wl,--disable-new-dtags" +fi + +# https://www.gnu.org/software/bash/manual/html_node/Shell-Parameter-Expansion.html +# ${parameter:+word} +# If parameter is null or unset, nothing is substituted, otherwise the expansion of word is substituted. + +# add LIB_DIR/lib and LIB_DIR/lib64 to rpath and -L +LDFLAGS="${LDFLAGS} -Wl,-rpath,${LIB_DIR}/lib -L${LIB_DIR}/lib -Wl,-rpath,${LIB_DIR}/lib64 -L${LIB_DIR}/lib64" + +# if variables are set, add /lib to rpath and -L +for x in $MET_CAIRO $MET_FREETYPE $MET_GSL $MET_HDF $MET_HDF5 $MET_NETCDF; do + arg="${x:+-Wl,-rpath,$x/lib -L$x/lib}" + if [[ "$LDFLAGS" != *"$arg"* ]]; then + LDFLAGS+=" $arg" + fi +done + +# if variables are set, add /lib64 to rpath and -L +for x in $MET_ATLAS $MET_BUFR $MET_ECKIT $MET_GRIB2C $MET_PROJ $LIB_JASPER; do + arg="${x:+-Wl,-rpath,$x/lib64 -L$x/lib64}" + if [[ "$LDFLAGS" != *"$arg"* ]]; then + LDFLAGS+=" $arg" + fi +done + +# if variables are set, add to rpath and -L +for x in $MET_ATLASLIB $MET_BUFRLIB $MET_CAIROLIB $MET_ECKITLIB $MET_FREETYPELIB $MET_GRIB2CLIB $MET_GSLLIB $MET_HDF5LIB $MET_HDFLIB $MET_NETCDFLIB $MET_PROJLIB $MET_PYTHON_LIB $LIB_JASPER $LIB_LIBPNG $LIB_Z $ADDTL_DIR; do + arg="${x:+-Wl,-rpath,$x -L$x}" + if [[ "$LDFLAGS" != *"$arg"* ]]; then + LDFLAGS+=" $arg" + fi +done + +export LDFLAGS + +export LIBS="${LIBS} -lhdf5_hl -lhdf5 -lz" +export MET_FONT_DIR=${TEST_BASE}/fonts + + +echo "MET Configuration settings..." +printenv | egrep "^MET_" | sed -r 's/^/export /g' +echo "LDFLAGS = ${LDFLAGS}" +export OPT_ARGS='' +if [[ $COMPILER_FAMILY == "pgi" ]]; then + export OPT_ARGS="${OPT_ARGS} FFLAGS=-lpgf90" +fi + +configure_cmd="./configure --prefix=${MET_INSTALL_DIR} --bindir=${BIN_DIR_PATH}" +configure_cmd="${configure_cmd} BUFRLIB_NAME=${BUFRLIB_NAME}" +configure_cmd="${configure_cmd} GRIB2CLIB_NAME=${GRIB2CLIB_NAME} --enable-grib2" +if [[ ! -z ${MET_FREETYPEINC} && ! -z ${MET_FREETYPELIB} && \ + ! -z ${MET_CAIROINC} && ! -z ${MET_CAIROLIB} ]]; then + configure_cmd="${configure_cmd} --enable-mode_graphics" +fi + +if [[ ! -z $MET_ECKIT && ! -z $MET_ATLAS ]]; then + configure_cmd="${configure_cmd} --enable-ugrid" +fi + +if [[ ! -z $MET_HDF && ! -z $MET_HDFEOS ]]; then + configure_cmd="${configure_cmd} --enable-modis --enable-lidar2nc" +fi + +if [[ ! -z ${MET_PYTHON_CC} || ! -z ${MET_PYTHON_LD} ]]; then + configure_cmd="${configure_cmd} --enable-python" +fi + +configure_cmd="${configure_cmd} ${OPT_ARGS}" + +echo "cd `pwd`" +run_cmd "${configure_cmd} > $(pwd)/configure.log 2>&1" +run_cmd "make ${MAKE_ARGS} > $(pwd)/met.make.log 2>&1" +run_cmd "make install > $(pwd)/met.make_install.log 2>&1" +run_cmd "make test > $(pwd)/met.make_test.log 2>&1" + +echo "Finished compiling at `date`" diff --git a/internal/scripts/installation/config/install_met_env.cactus b/internal/scripts/installation/config/install_met_env.cactus new file mode 100644 index 000000000..63ac85192 --- /dev/null +++ b/internal/scripts/installation/config/install_met_env.cactus @@ -0,0 +1,50 @@ +module reset +module use /apps/dev/modulefiles/ +module load ve/evs/2.0 +module use /apps/ops/para/libs/modulefiles/compiler/intel/19.1.3.304 +module load netcdf/4.7.4 +module load hdf5/1.10.6 +module load bufr/11.5.0 +module load zlib/1.2.11 +module load jasper/2.0.25 +module load libpng/1.6.37 +module load gsl/2.7 +module load g2c/1.6.4 +module load proj/7.1.0 +module use /apps/dev/modulefiles +module load fckit/0.11.0 +module load atlas/0.35.0 +module load eckit/1.24.4 + +export FC=ifort +export F77=ifort +export F90=ifort +export CC=icc +export CXX=icpc +export TEST_BASE=/lfs/h2/users/julie.prestopnik/12.0.0-beta5 +export LIB_DIR=${TEST_BASE}/external_libs +export COMPILER=intel_19.1.3.304 +export MET_SUBDIR=${TEST_BASE} +export MET_TARBALL=v12.0.0-beta5.tar.gz +export USE_MODULES=TRUE +export ADDTL_DIR=/apps/spack/gettext/0.21/intel/19.1.3.304/at2kdo4edvuhyzrt5g6zhwrdb7bdui4s/lib64/ +export PYTHON_MODULE=python_3.10.4 +export MET_PYTHON=/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/ +export MET_PYTHON_LIB=/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/lib64 +export MET_PYTHON_CC=-I/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/include/python3.10 +export MET_PYTHON_LD=-L/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/lib64\ -lpython3.10\ -lintl\ -lcrypt\ -ldl\ -lutil\ -lm\ -lm +export MET_NETCDF=/apps/prod/hpc-stack/intel-19.1.3.304/netcdf/4.7.4 +export MET_HDF5=/apps/prod/hpc-stack/intel-19.1.3.304/hdf5/1.10.6 +export MET_BUFRLIB=/apps/ops/prod/libs/intel/19.1.3.304/bufr/11.5.0/lib64 +export MET_GRIB2CLIB=/apps/ops/prod/libs/intel/19.1.3.304/g2c/1.6.4/lib64 +export MET_GRIB2CINC=/apps/ops/prod/libs/intel/19.1.3.304/g2c/1.6.4/include +export MET_GSL=/apps/spack/gsl/2.7/intel/19.1.3.304/xks7dxbowrdxhjck5zxc4rompopocevb +export MET_PROJ=/apps/spack/proj/7.1.0/intel/19.1.3.304/cjbmc7tacv5qcfatslqmcrzo5kb4raaq +export MET_ATLAS=/apps/dev/atlas/install-0.35.0 +export MET_ECKIT=/apps/dev/eckit/install-1.24.4 +export BUFRLIB_NAME=-lbufr_4 +export GRIB2CLIB_NAME=-lg2c +export LIB_JASPER=/apps/spack/jasper/2.0.25/intel/19.1.3.304/sjib74krrorkyczqpqah4tvewmlnqdx4/lib64 +export LIB_LIBPNG=/apps/spack/libpng/1.6.37/intel/19.1.3.304/4ohkronuhlyherusoszzrmur5ewvlwzh/lib +export LIB_Z=/apps/spack/zlib/1.2.11/intel/19.1.3.304/hjotqkckeoyt6j6tibalwzrlfljcjtdh/lib +export MAKE_ARGS=-j diff --git a/internal/scripts/installation/config/install_met_env.wcoss2 b/internal/scripts/installation/config/install_met_env.wcoss2 index 86b73e006..2d02be1f8 100644 --- a/internal/scripts/installation/config/install_met_env.wcoss2 +++ b/internal/scripts/installation/config/install_met_env.wcoss2 @@ -1,33 +1,32 @@ module reset -module use /apps/ops/para/libs/modulefiles/compiler/intel/19.1.3.304 -export HPC_OPT=/apps/ops/para/libs module use /apps/dev/modulefiles/ module load ve/evs/2.0 +module use /apps/ops/para/libs/modulefiles/compiler/intel/19.1.3.304 module load netcdf/4.7.4 module load hdf5/1.10.6 -module load bufr/11.6.0 +module load bufr/11.5.0 module load zlib/1.2.11 module load jasper/2.0.25 module load libpng/1.6.37 module load gsl/2.7 module load g2c/1.6.4 module load proj/7.1.0 -module use /apps/dev/lmodules/intel/19.1.3.304/ -module load atlas/0.30.0 -module load eckit/1.20.2 +module use /apps/dev/modulefiles +module load fckit/0.11.0 +module load atlas/0.35.0 +module load eckit/1.24.4 export FC=ifort export F77=ifort export F90=ifort export CC=icc export CXX=icpc -#export TEST_BASE=/apps/ops/para/libs/intel/19.1.3.304/met/12.0.0-beta3 export TEST_BASE=$(pwd) export LIB_DIR=${TEST_BASE}/external_libs export BIN_DIR_PATH=${TEST_BASE}/bin export COMPILER=intel_19.1.3.304 export MET_SUBDIR=${TEST_BASE} -export MET_TARBALL=v12.0.0-beta3.tar.gz +export MET_TARBALL=v12.0.0-beta5.tar.gz export USE_MODULES=TRUE export ADDTL_DIR=/apps/spack/gettext/0.21/intel/19.1.3.304/at2kdo4edvuhyzrt5g6zhwrdb7bdui4s/lib64 export PYTHON_MODULE=python_3.10.4 @@ -45,10 +44,10 @@ export MET_GRIB2CLIB=${g2c_ROOT}/lib64 export MET_GRIB2CINC=${G2C_INC} export MET_GSL=/apps/spack/gsl/2.7/intel/19.1.3.304/xks7dxbowrdxhjck5zxc4rompopocevb export MET_PROJ=/apps/spack/proj/7.1.0/intel/19.1.3.304/cjbmc7tacv5qcfatslqmcrzo5kb4raaq -export MET_ATLASLIB=/apps/dev/intel-19.1.3.304/cray-mpich-8.1.9/atlas/0.30.0/lib64 -export MET_ATLASINC=/apps/dev/intel-19.1.3.304/cray-mpich-8.1.9/atlas/0.30.0/include/atlas -export MET_ECKITLIB=/apps/dev/intel-19.1.3.304/cray-mpich-8.1.9/eckit/1.20.2/lib64 -export MET_ECKITINC=/apps/dev/intel-19.1.3.304/cray-mpich-8.1.9/eckit/1.20.2/include/eckit +export MET_ATLASLIB=/apps/dev/atlas/install-0.35.0/lib64 +export MET_ATLASINC=/apps/dev/atlas/install-0.35.0/include/atlas +export MET_ECKITLIB=/apps/dev/eckit/install-1.24.4/lib64 +export MET_ECKITINC=/apps/dev/eckit/install-1.24.4/include/eckit export BUFRLIB_NAME=-lbufr_4 export GRIB2CLIB_NAME=-lg2c # JY export LIB_JASPER=/apps/spack/jasper/2.0.25/intel/19.1.3.304/sjib74krrorkyczqpqah4tvewmlnqdx4/lib64 @@ -58,4 +57,3 @@ export LIB_LIBPNG=${LIBPNG_LIBDIR} # JY export LIB_Z=/apps/spack/zlib/1.2.11/intel/19.1.3.304/hjotqkckeoyt6j6tibalwzrlfljcjtdh/lib export LIB_Z=${ZLIB_LIBDIR} export MAKE_ARGS=-j -export CXXFLAGS="-std=c++11" diff --git a/internal/test_unit/bin/unit_test.sh b/internal/test_unit/bin/unit_test.sh index 0e2579464..e3bc29e18 100755 --- a/internal/test_unit/bin/unit_test.sh +++ b/internal/test_unit/bin/unit_test.sh @@ -86,6 +86,7 @@ UNIT_XML="unit_ascii2nc.xml \ unit_climatology_1.0deg.xml \ unit_climatology_1.5deg.xml \ unit_climatology_2.5deg.xml \ + unit_climatology_mixed.xml \ unit_grib_tables.xml \ unit_grid_weight.xml \ unit_netcdf.xml \ diff --git a/internal/test_unit/config/GridStatConfig_climo_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG b/internal/test_unit/config/GridStatConfig_climo_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG new file mode 100644 index 000000000..ab1cdd836 --- /dev/null +++ b/internal/test_unit/config/GridStatConfig_climo_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG @@ -0,0 +1,277 @@ +//////////////////////////////////////////////////////////////////////////////// +// +// Grid-Stat configuration file. +// +// For additional information, please see the MET User's Guide. +// +//////////////////////////////////////////////////////////////////////////////// + +// +// Output model name to be written +// +model = "GFS"; + +// +// Output description to be written +// May be set separately in each "obs.field" entry +// +desc = "NA"; + +// +// Output observation type to be written +// +obtype = "GFSANL"; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Verification grid +// +regrid = { + to_grid = "${OBS_CLIMO_DIR}/mslp_mean.grib"; + method = BILIN; + width = 2; + vld_thresh = 0.5; + shape = SQUARE; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// May be set separately in each "field" entry +// +censor_thresh = []; +censor_val = []; +mpr_column = []; +mpr_thresh = []; +cat_thresh = []; +cnt_thresh = [ NA ]; +cnt_logic = UNION; +wind_thresh = [ NA ]; +wind_logic = UNION; +eclv_points = 0.05; +nc_pairs_var_name = ""; +nc_pairs_var_suffix = ""; +hss_ec_value = NA; +rank_corr_flag = FALSE; + +// +// Forecast and observation fields to be verified +// + +field_list = [ + { name = "TMP"; level = [ "P500" ]; }, + { name = "UGRD"; level = [ "P500" ]; }, + { name = "VGRD"; level = [ "P500" ]; }, + { name = "TMP"; level = [ "P850" ]; cat_thresh = [ >FCDP75, >OCDP75 ]; }, + { name = "UGRD"; level = [ "P850" ]; cat_thresh = [ >FCDP75, >OCDP75 ]; }, + { name = "VGRD"; level = [ "P850" ]; cat_thresh = [ >FCDP75, >OCDP75 ]; } +]; + +fcst = { + + field = field_list; + + climo_mean = { + field = field_list; + file_name = [ "${FCST_CLIMO_DIR}/cmean_1d.19590410" ]; + }; + + climo_stdev = { + field = field_list; + file_name = [ "${FCST_CLIMO_DIR}/cstdv_1d.19590410" ]; + }; + + time_interp_method = DW_MEAN; + day_interval = 1; + hour_interval = 6; +} + +obs = { + + field = field_list; + + climo_mean = { + field = field_list; + file_name = [ "${OBS_CLIMO_DIR}/t500hPa_mean.grib", + "${OBS_CLIMO_DIR}/t850hPa_mean.grib", + "${OBS_CLIMO_DIR}/u500hPa_mean.grib", + "${OBS_CLIMO_DIR}/u850hPa_mean.grib", + "${OBS_CLIMO_DIR}/v500hPa_mean.grib", + "${OBS_CLIMO_DIR}/v850hPa_mean.grib" ]; + }; + + climo_stdev = { + field = field_list; + file_name = [ "${OBS_CLIMO_DIR}/t850hPa_stdev.grib", + "${OBS_CLIMO_DIR}/u850hPa_stdev.grib", + "${OBS_CLIMO_DIR}/v850hPa_stdev.grib" ]; + }; + + time_interp_method = DW_MEAN; + day_interval = 1; + hour_interval = 12; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// May be set separately in each "obs.field" entry +// +climo_cdf = { + cdf_bins = 1; + center_bins = TRUE; + write_bins = FALSE; + direct_prob = FALSE; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Verification masking regions +// +mask = { + grid = [ "FULL" ]; + poly = []; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Confidence interval settings +// +ci_alpha = [ 0.05 ]; + +boot = { + interval = PCTILE; + rep_prop = 1.0; + n_rep = 0; + rng = "mt19937"; + seed = ""; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Data smoothing methods +// +interp = { + field = BOTH; + vld_thresh = 1.0; + shape = SQUARE; + + type = [ + { + method = NEAREST; + width = 1; + } + ]; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Neighborhood methods +// +nbrhd = { + width = [ 1 ]; + cov_thresh = [ >=0.5 ]; + vld_thresh = 1.0; + shape = SQUARE; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Fourier decomposition +// +fourier = { + wave_1d_beg = []; + wave_1d_end = []; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Gradient statistics +// May be set separately in each "obs.field" entry +// +gradient = { + dx = []; + dy = []; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Distance Map statistics +// May be set separately in each "obs.field" entry +// +distance_map = { + baddeley_p = 2; + baddeley_max_dist = NA; + fom_alpha = 0.1; + zhu_weight = 0.5; + beta_value(n) = n * n / 2.0; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Statistical output types +// +output_flag = { + fho = NONE; + ctc = NONE; + cts = NONE; + mctc = NONE; + mcts = NONE; + cnt = STAT; + sl1l2 = STAT; + sal1l2 = STAT; + vl1l2 = STAT; + val1l2 = STAT; + vcnt = STAT; + pct = NONE; + pstd = NONE; + pjc = NONE; + prc = NONE; + eclv = NONE; + nbrctc = NONE; + nbrcts = NONE; + nbrcnt = NONE; + grad = NONE; + dmap = NONE; + seeps = NONE; +} + +// +// NetCDF matched pairs output file +// +nc_pairs_flag = { + latlon = TRUE; + raw = TRUE; + diff = TRUE; + climo = TRUE; + climo_cdp = TRUE; + weight = TRUE; + nbrhd = FALSE; + fourier = FALSE; + gradient = FALSE; + distance_map = FALSE; + apply_mask = FALSE; +} + +//////////////////////////////////////////////////////////////////////////////// +// Threshold for SEEPS p1 (Probability of being dry) + +seeps_p1_thresh = NA; + +//////////////////////////////////////////////////////////////////////////////// + +grid_weight_flag = COS_LAT; +tmp_dir = "/tmp"; +output_prefix = "${OUTPUT_PREFIX}"; +version = "V12.0.0"; + +//////////////////////////////////////////////////////////////////////////////// diff --git a/internal/test_unit/config/GridStatConfig_mpr_thresh b/internal/test_unit/config/GridStatConfig_mpr_thresh index 01a9037fd..e99405be7 100644 --- a/internal/test_unit/config/GridStatConfig_mpr_thresh +++ b/internal/test_unit/config/GridStatConfig_mpr_thresh @@ -79,32 +79,18 @@ fcst = { desc = "ABS_OBS_FCST_DIFF"; nc_pairs_var_suffix = desc; }, -// MET #2924 Replace this section -// { -// mpr_column = [ "ABS(OBS-OBS_CLIMO_MEAN)" ]; -// mpr_thresh = [ <=5 ]; -// desc = "ABS_OBS_CLIMO_MEAN_DIFF"; -// nc_pairs_var_suffix = desc; -// }, -// { -// mpr_column = [ "OBS_CLIMO_CDF" ]; -// mpr_thresh = [ >=0.25&&<=0.75 ]; -// desc = "OBS_CLIMO_CDF_IQR"; -// nc_pairs_var_suffix = desc; -// } { - mpr_column = [ "ABS(OBS-CLIMO_MEAN)" ]; + mpr_column = [ "ABS(OBS-OBS_CLIMO_MEAN)" ]; mpr_thresh = [ <=5 ]; desc = "ABS_OBS_CLIMO_MEAN_DIFF"; nc_pairs_var_suffix = desc; }, { - mpr_column = [ "CLIMO_CDF" ]; + mpr_column = [ "OBS_CLIMO_CDF" ]; mpr_thresh = [ >=0.25&&<=0.75 ]; - desc = "CLIMO_CDF_IQR"; + desc = "OBS_CLIMO_CDF_IQR"; nc_pairs_var_suffix = desc; } -// MET #2924 End replace ]; } obs = fcst; diff --git a/internal/test_unit/config/PointStatConfig_mpr_thresh b/internal/test_unit/config/PointStatConfig_mpr_thresh index e0d54b921..675b0696c 100644 --- a/internal/test_unit/config/PointStatConfig_mpr_thresh +++ b/internal/test_unit/config/PointStatConfig_mpr_thresh @@ -68,28 +68,16 @@ fcst = { mpr_thresh = [ <=5 ]; desc = "ABS_OBS_FCST_DIFF"; }, -// MET #2924 Replace this section -// { -// mpr_column = [ "ABS(OBS-OBS_CLIMO_MEAN)" ]; -// mpr_thresh = [ <=5 ]; -// desc = "ABS_OBS_CLIMO_MEAN_DIFF"; -// }, -// { -// mpr_column = [ "OBS_CLIMO_CDF" ]; -// mpr_thresh = [ >=0.25&&<=0.75 ]; -// desc = "OBS_CLIMO_CDF_IQR"; -// } { - mpr_column = [ "ABS(OBS-CLIMO_MEAN)" ]; + mpr_column = [ "ABS(OBS-OBS_CLIMO_MEAN)" ]; mpr_thresh = [ <=5 ]; desc = "ABS_OBS_CLIMO_MEAN_DIFF"; }, { - mpr_column = [ "CLIMO_CDF" ]; + mpr_column = [ "OBS_CLIMO_CDF" ]; mpr_thresh = [ >=0.25&&<=0.75 ]; - desc = "CLIMO_CDF_IQR"; + desc = "OBS_CLIMO_CDF_IQR"; } -// MET #2924 End replace ]; } obs = fcst; diff --git a/internal/test_unit/hdr/met_12_0.hdr b/internal/test_unit/hdr/met_12_0.hdr index b7d835c72..f3785b7b4 100644 --- a/internal/test_unit/hdr/met_12_0.hdr +++ b/internal/test_unit/hdr/met_12_0.hdr @@ -5,7 +5,7 @@ FHO : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_L ISC : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL TILE_DIM TILE_XLL TILE_YLL NSCALE ISCALE MSE ISC FENERGY2 OENERGY2 BASER FBIAS MCTC : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL N_CAT _VAR_ MCTS : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL N_CAT ACC ACC_NCL ACC_NCU ACC_BCL ACC_BCU HK HK_BCL HK_BCU HSS HSS_BCL HSS_BCU GER GER_BCL GER_BCU HSS_EC HSS_EC_BCL HSS_EC_BCU EC_VALUE -MPR : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL INDEX OBS_SID OBS_LAT OBS_LON OBS_LVL OBS_ELV FCST OBS OBS_QC CLIMO_MEAN CLIMO_STDEV CLIMO_CDF +MPR : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL INDEX OBS_SID OBS_LAT OBS_LON OBS_LVL OBS_ELV FCST OBS OBS_QC OBS_CLIMO_MEAN OBS_CLIMO_STDEV OBS_CLIMO_CDF FCST_CLIMO_MEAN FCST_CLIMO_STDEV SEEPS : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL S12 S13 S21 S23 S31 S32 PF1 PF2 PF3 PV1 PV2 PV3 MEAN_FCST MEAN_OBS SEEPS SEEPS_MPR : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE OBS_SID OBS_LAT OBS_LON FCST OBS OBS_QC FCST_CAT OBS_CAT P1 P2 T1 T2 SEEPS NBRCNT : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL FBS FBS_BCL FBS_BCU FSS FSS_BCL FSS_BCU AFSS AFSS_BCL AFSS_BCU UFSS UFSS_BCL UFSS_BCU F_RATE F_RATE_BCL F_RATE_BCU O_RATE O_RATE_BCL O_RATE_BCU diff --git a/internal/test_unit/xml/unit_climatology_1.5deg.xml b/internal/test_unit/xml/unit_climatology_1.5deg.xml index 278df4a8b..de22d9567 100644 --- a/internal/test_unit/xml/unit_climatology_1.5deg.xml +++ b/internal/test_unit/xml/unit_climatology_1.5deg.xml @@ -82,19 +82,11 @@ OUTPUT_DIR &OUTPUT_DIR;/climatology_1.5deg - - \ - -lookin &OUTPUT_DIR;/climatology_1.5deg/point_stat_WMO_CLIMO_1.5DEG_120000L_20120409_120000V.stat \ - -job filter -line_type MPR -column_thresh CLIMO_CDF 'lt0.1||gt0.9' \ - -dump_row &OUTPUT_DIR;/climatology_1.5deg/stat_analysis_WMO_1.5DEG_FILTER_CDF_dump.stat - - &OUTPUT_DIR;/climatology_1.5deg/stat_analysis_WMO_1.5DEG_FILTER_CDF_dump.stat diff --git a/internal/test_unit/xml/unit_climatology_mixed.xml b/internal/test_unit/xml/unit_climatology_mixed.xml new file mode 100644 index 000000000..0433b39e8 --- /dev/null +++ b/internal/test_unit/xml/unit_climatology_mixed.xml @@ -0,0 +1,39 @@ + + + + + + + + + + +]> + + + + + + &TEST_DIR; + true + + + &MET_BIN;/grid_stat + + OUTPUT_PREFIX FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG + FCST_CLIMO_DIR &DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg + OBS_CLIMO_DIR &DATA_DIR_CLIMO;/ERA_DAILY_1.5deg + + \ + &DATA_DIR_MODEL;/grib2/gfs/gfs_2012040900_F024.grib2 \ + &DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120410_0000_000.grb2 \ + &CONFIG_DIR;/GridStatConfig_climo_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG \ + -outdir &OUTPUT_DIR;/climatology_mixed -v 2 + + + &OUTPUT_DIR;/climatology_mixed/grid_stat_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG_240000L_20120410_000000V.stat + &OUTPUT_DIR;/climatology_mixed/grid_stat_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG_240000L_20120410_000000V_pairs.nc + + + diff --git a/src/basic/vx_config/threshold.cc b/src/basic/vx_config/threshold.cc index bfff7a008..cbf0a3cb7 100644 --- a/src/basic/vx_config/threshold.cc +++ b/src/basic/vx_config/threshold.cc @@ -103,9 +103,8 @@ for (auto const& x : perc_thresh_info_map) { } // - // MET #2924: For backward compatibility support SCP and CDP - // threshold types - // + // MET #2924 Backward compatible support for SCP and CDP types + // if ( !match && (search_cs.startswith(scp_perc_thresh_type_str.c_str()) || diff --git a/src/basic/vx_util/stat_column_defs.h b/src/basic/vx_util/stat_column_defs.h index 5ae3a64b1..a7b542714 100644 --- a/src/basic/vx_util/stat_column_defs.h +++ b/src/basic/vx_util/stat_column_defs.h @@ -197,7 +197,6 @@ static const char * const eclv_columns [] = { "N_PNT", "CL_", "VALUE_" }; -/* MET #2924 Replace this change static const char * const mpr_columns [] = { "TOTAL", "INDEX", "OBS_SID", "OBS_LAT", "OBS_LON", "OBS_LVL", @@ -206,17 +205,6 @@ static const char * const mpr_columns [] = { "OBS_CLIMO_MEAN", "OBS_CLIMO_STDEV", "OBS_CLIMO_CDF", "FCST_CLIMO_MEAN", "FCST_CLIMO_STDEV" }; -*/ - -static const char * const mpr_columns [] = { - "TOTAL", "INDEX", "OBS_SID", - "OBS_LAT", "OBS_LON", "OBS_LVL", - "OBS_ELV", "FCST", "OBS", - "OBS_QC", - "CLIMO_MEAN", "CLIMO_STDEV", "CLIMO_CDF" -}; - -// MET #2924 End replace static const char * const nbrctc_columns [] = { "TOTAL", "FY_OY", "FY_ON", @@ -308,7 +296,6 @@ static const char * const phist_columns [] = { "BIN_" }; -/* MET #2924 Replace this section static const char * const orank_columns [] = { "TOTAL", "INDEX", "OBS_SID", "OBS_LAT", "OBS_LON", "OBS_LVL", @@ -319,19 +306,6 @@ static const char * const orank_columns [] = { "SPREAD_OERR", "SPREAD_PLUS_OERR", "OBS_CLIMO_STDEV", "FCST_CLIMO_MEAN", "FCST_CLIMO_STDEV" }; -*/ - -static const char * const orank_columns [] = { - "TOTAL", "INDEX", "OBS_SID", - "OBS_LAT", "OBS_LON", "OBS_LVL", - "OBS_ELV", "OBS", "PIT", - "RANK", "N_ENS_VLD", "N_ENS", - "ENS_", "OBS_QC", "ENS_MEAN", - "CLIMO_MEAN", "SPREAD", "ENS_MEAN_OERR", - "SPREAD_OERR", "SPREAD_PLUS_OERR", "CLIMO_STDEV" -}; - -// MET #2924 End replace static const char * const ssvar_columns [] = { "TOTAL", "N_BIN", "BIN_i", diff --git a/src/libcode/vx_data2d_grib/data2d_grib_utils.cc b/src/libcode/vx_data2d_grib/data2d_grib_utils.cc index f948b2625..cc3e81f82 100644 --- a/src/libcode/vx_data2d_grib/data2d_grib_utils.cc +++ b/src/libcode/vx_data2d_grib/data2d_grib_utils.cc @@ -669,7 +669,8 @@ void read_pds(const GribRecord &r, int &bms_flag, // // Set the valid and accumulation times based on the - // contents of the time range indicator + // contents of the time range indicator in GRIB1 Table 5: + // https://www.nco.ncep.noaa.gov/pmb/docs/on388/table5.html // switch((int) pds->tri) { @@ -725,6 +726,11 @@ void read_pds(const GribRecord &r, int &bms_flag, accum = 0; break; + case 123: // Average of N uninitialized analyses, starting at the reference time, at intervals of P2. + valid_ut = init_ut; + accum = 0; + break; + case 136: // Climatological Standard Deviation valid_ut = init_ut; accum = 0; diff --git a/src/libcode/vx_stat_out/stat_columns.cc b/src/libcode/vx_stat_out/stat_columns.cc index 330a74d82..51992d7b5 100644 --- a/src/libcode/vx_stat_out/stat_columns.cc +++ b/src/libcode/vx_stat_out/stat_columns.cc @@ -488,10 +488,8 @@ void write_orank_header_row(int hdr_flag, int n_ens, AsciiTable &at, at.set_entry(r, c+17+n_ens, (string)orank_columns[18]); at.set_entry(r, c+18+n_ens, (string)orank_columns[19]); at.set_entry(r, c+19+n_ens, (string)orank_columns[20]); - /* MET #2924 Uncomment this section at.set_entry(r, c+20+n_ens, (string)orank_columns[21]); at.set_entry(r, c+21+n_ens, (string)orank_columns[22]); - */ return; } @@ -2927,7 +2925,7 @@ void write_sl1l2_cols(const SL1L2Info &sl1l2_info, sl1l2_info.oobar); at.set_entry(r, c+6, // MAE - sl1l2_info.mae); + sl1l2_info.smae); return; } @@ -2963,7 +2961,7 @@ void write_sal1l2_cols(const SL1L2Info &sl1l2_info, sl1l2_info.ooabar); at.set_entry(r, c+6, // MAE - sl1l2_info.mae); + sl1l2_info.samae); return; } @@ -4126,13 +4124,11 @@ void write_mpr_cols(const PairDataPoint *pd_ptr, int i, at.set_entry(r, c+12, // Observation Climatological CDF Value pd_ptr->ocdf_na[i]); -/* MET #2924 Uncomment this section at.set_entry(r, c+13, // Forecast Climatological Mean Value pd_ptr->fcmn_na[i]); at.set_entry(r, c+14, // Forecast Climatological Standard Deviation Value pd_ptr->fcsd_na[i]); -*/ return; } @@ -4589,7 +4585,6 @@ void write_orank_cols(const PairDataEnsemble *pd_ptr, int i, at.set_entry(r, c+19+pd_ptr->n_ens, pd_ptr->ocsd_na[i]); -/* MET #2924 Uncomment this section // Forecast climatology mean values at.set_entry(r, c+20+pd_ptr->n_ens, pd_ptr->fcmn_na[i]); @@ -4597,7 +4592,6 @@ void write_orank_cols(const PairDataEnsemble *pd_ptr, int i, // Forecast climatology standard deviation values at.set_entry(r, c+21+pd_ptr->n_ens, pd_ptr->fcsd_na[i]); -*/ return; } diff --git a/src/libcode/vx_statistics/compute_stats.cc b/src/libcode/vx_statistics/compute_stats.cc index 40c4e8258..bbc9e0ac1 100644 --- a/src/libcode/vx_statistics/compute_stats.cc +++ b/src/libcode/vx_statistics/compute_stats.cc @@ -101,7 +101,7 @@ void compute_cntinfo(const SL1L2Info &s, bool aflag, CNTInfo &cnt_info) { cnt_info.me2.v = cnt_info.me.v * cnt_info.me.v; // Compute mean absolute error - cnt_info.mae.v = s.mae; + cnt_info.mae.v = s.smae; // Compute mean squared error cnt_info.mse.v = ffbar + oobar - 2.0*fobar; @@ -1111,7 +1111,7 @@ void compute_sl1l2_mean(const SL1L2Info *sl1l2_info, int n, sl1l2_mean.obar += sl1l2_info[i].obar; sl1l2_mean.ffbar += sl1l2_info[i].ffbar; sl1l2_mean.oobar += sl1l2_info[i].oobar; - sl1l2_mean.mae += sl1l2_info[i].mae; + sl1l2_mean.smae += sl1l2_info[i].smae; } if(sl1l2_info[i].sacount > 0) { @@ -1121,6 +1121,7 @@ void compute_sl1l2_mean(const SL1L2Info *sl1l2_info, int n, sl1l2_mean.oabar += sl1l2_info[i].oabar; sl1l2_mean.ffabar += sl1l2_info[i].ffabar; sl1l2_mean.ooabar += sl1l2_info[i].ooabar; + sl1l2_mean.samae += sl1l2_info[i].samae; } } // end for i @@ -1130,13 +1131,14 @@ void compute_sl1l2_mean(const SL1L2Info *sl1l2_info, int n, sl1l2_mean.obar /= n_sl1l2; sl1l2_mean.ffbar /= n_sl1l2; sl1l2_mean.oobar /= n_sl1l2; - sl1l2_mean.mae /= n_sl1l2; + sl1l2_mean.smae /= n_sl1l2; } if(sl1l2_mean.sacount > 0) { sl1l2_mean.fabar /= n_sal1l2; sl1l2_mean.oabar /= n_sal1l2; sl1l2_mean.ffabar /= n_sal1l2; sl1l2_mean.ooabar /= n_sal1l2; + sl1l2_mean.samae /= n_sal1l2; } return; diff --git a/src/libcode/vx_statistics/met_stats.cc b/src/libcode/vx_statistics/met_stats.cc index 9312867e4..4c679aed8 100644 --- a/src/libcode/vx_statistics/met_stats.cc +++ b/src/libcode/vx_statistics/met_stats.cc @@ -1124,11 +1124,11 @@ SL1L2Info & SL1L2Info::operator+=(const SL1L2Info &c) { s_info.ffbar = (ffbar*scount + c.ffbar*c.scount)/s_info.scount; s_info.oobar = (oobar*scount + c.oobar*c.scount)/s_info.scount; - if(is_bad_data(mae) || is_bad_data(c.mae)) { - s_info.mae = bad_data_double; + if(is_bad_data(smae) || is_bad_data(c.smae)) { + s_info.smae = bad_data_double; } else { - s_info.mae = (mae*scount + c.mae*c.scount)/s_info.scount; + s_info.smae = (smae*scount + c.smae*c.scount)/s_info.scount; } } @@ -1141,11 +1141,11 @@ SL1L2Info & SL1L2Info::operator+=(const SL1L2Info &c) { s_info.ffabar = (ffabar*sacount + c.ffabar*c.sacount)/s_info.sacount; s_info.ooabar = (ooabar*sacount + c.ooabar*c.sacount)/s_info.sacount; - if(is_bad_data(mae) || is_bad_data(c.mae)) { - s_info.mae = bad_data_double; + if(is_bad_data(samae) || is_bad_data(c.samae)) { + s_info.samae = bad_data_double; } else { - s_info.mae = (mae*sacount + c.mae*c.sacount)/s_info.sacount; + s_info.samae = (samae*sacount + c.samae*c.sacount)/s_info.sacount; } } @@ -1170,15 +1170,15 @@ void SL1L2Info::zero_out() { // SL1L2 Quantities fbar = obar = 0.0; fobar = ffbar = oobar = 0.0; + smae = 0.0; scount = 0; // SAL1L2 Quantities fabar = oabar = 0.0; foabar = ffabar = ooabar = 0.0; + samae = 0.0; sacount = 0; - mae = 0.0; - return; } @@ -1211,6 +1211,7 @@ void SL1L2Info::assign(const SL1L2Info &c) { fobar = c.fobar; ffbar = c.ffbar; oobar = c.oobar; + smae = c.smae; scount = c.scount; // SAL1L2 Quantities @@ -1219,10 +1220,9 @@ void SL1L2Info::assign(const SL1L2Info &c) { foabar = c.foabar; ffabar = c.ffabar; ooabar = c.ooabar; + samae = c.samae; sacount = c.sacount; - mae = c.mae; - return; } @@ -1272,7 +1272,7 @@ void SL1L2Info::set(const PairDataPoint &pd_all) { fobar += wgt*f*o; ffbar += wgt*f*f; oobar += wgt*o*o; - mae += wgt*fabs(f-o); + smae += wgt*fabs(f-o); scount++; // SAL1L2 sums @@ -1282,6 +1282,7 @@ void SL1L2Info::set(const PairDataPoint &pd_all) { foabar += wgt*(f-fc)*(o-oc); ffabar += wgt*(f-fc)*(f-fc); ooabar += wgt*(o-oc)*(o-oc); + samae += wgt*fabs((f-fc)-(o-oc)); sacount++; } } diff --git a/src/libcode/vx_statistics/met_stats.h b/src/libcode/vx_statistics/met_stats.h index b053266c3..f3bef1a90 100644 --- a/src/libcode/vx_statistics/met_stats.h +++ b/src/libcode/vx_statistics/met_stats.h @@ -224,17 +224,16 @@ class SL1L2Info { double fbar, obar; double fobar; double ffbar, oobar; + double smae; int scount; // SAL1L2 Quantities double fabar, oabar; double foabar; double ffabar, ooabar; + double samae; int sacount; - // Mean absolute error - double mae; - // Compute sums void set(const PairDataPoint &); diff --git a/src/libcode/vx_statistics/pair_data_point.cc b/src/libcode/vx_statistics/pair_data_point.cc index 0d29dda9e..0e7f6203e 100644 --- a/src/libcode/vx_statistics/pair_data_point.cc +++ b/src/libcode/vx_statistics/pair_data_point.cc @@ -820,7 +820,6 @@ double get_mpr_column_value(double f, double o, const ClimoPntInfo &cpi, const char *s) { double v; -/* #MET #2924 Replace this section if(strcasecmp(s, "FCST") == 0) v = f; else if(strcasecmp(s, "OBS") == 0) v = o; else if(strcasecmp(s, "FCST_CLIMO_MEAN") == 0) v = cpi.fcmn; @@ -831,16 +830,6 @@ double get_mpr_column_value(double f, double o, const ClimoPntInfo &cpi, v = (is_bad_data(cpi.ocmn) || is_bad_data(cpi.ocsd) ? bad_data_double : normal_cdf(o, cpi.ocmn, cpi.ocsd)); } -*/ - if(strcasecmp(s, "FCST") == 0) v = f; - else if(strcasecmp(s, "OBS") == 0) v = o; - else if(strcasecmp(s, "CLIMO_MEAN") == 0) v = cpi.ocmn; - else if(strcasecmp(s, "CLIMO_STDEV") == 0) v = cpi.ocsd; - else if(strcasecmp(s, "CLIMO_CDF") == 0) { - v = (is_bad_data(cpi.ocmn) || is_bad_data(cpi.ocsd) ? - bad_data_double : normal_cdf(o, cpi.ocmn, cpi.ocsd)); - } -// MET #2924 End replace else { mlog << Error << "\nget_mpr_column_value() -> " << "unsupported matched pair column name requested in \"" diff --git a/src/libcode/vx_statistics/read_climo.cc b/src/libcode/vx_statistics/read_climo.cc index 8e43749a8..f5a0f2db7 100644 --- a/src/libcode/vx_statistics/read_climo.cc +++ b/src/libcode/vx_statistics/read_climo.cc @@ -29,7 +29,7 @@ using namespace std; static void read_climo_file( const char *, GrdFileType, Dictionary *, unixtime, int, int, const Grid &, const RegridInfo &, - DataPlaneArray &dpa); + DataPlaneArray &dpa, const char *); static DataPlaneArray climo_time_interp( const DataPlaneArray &, int, unixtime, InterpMthd); @@ -40,7 +40,8 @@ static DataPlane climo_hms_interp( //////////////////////////////////////////////////////////////////////// DataPlane read_climo_data_plane(Dictionary *dict, int i_vx, - unixtime vld_ut, const Grid &vx_grid) { + unixtime vld_ut, const Grid &vx_grid, + const char *desc) { DataPlane dp; DataPlaneArray dpa; @@ -48,13 +49,13 @@ DataPlane read_climo_data_plane(Dictionary *dict, int i_vx, if(!dict) return dp; // Read array of climatology fields - dpa = read_climo_data_plane_array(dict, i_vx, vld_ut, vx_grid); + dpa = read_climo_data_plane_array(dict, i_vx, vld_ut, vx_grid, desc); // Check for multiple matches if(dpa.n_planes() > 1) { mlog << Warning << "\nread_climo_data_plane() -> " - << "Found " << dpa.n_planes() << " matching climatology " - << "fields. Using the first match found.\n\n"; + << "Found " << dpa.n_planes() << " matching " << desc + << " fields. Using the first match found.\n\n"; } // Store the first match found @@ -67,7 +68,8 @@ DataPlane read_climo_data_plane(Dictionary *dict, int i_vx, DataPlaneArray read_climo_data_plane_array(Dictionary *dict, int i_vx, unixtime vld_ut, - const Grid &vx_grid) { + const Grid &vx_grid, + const char *desc) { DataPlaneArray dpa; StringArray climo_files; RegridInfo regrid_info; @@ -100,7 +102,7 @@ DataPlaneArray read_climo_data_plane_array(Dictionary *dict, int i_vx, // Range check day_interval if(!is_bad_data(day_interval) && day_interval < 1) { mlog << Error << "\nread_climo_data_plane_array() -> " - << "The \"" << conf_key_day_interval << "\" entry (" + << "The " << conf_key_day_interval << " entry (" << day_interval << ") can be set to " << na_str << " or a value of at least 1.\n\n"; exit(1); @@ -113,7 +115,7 @@ DataPlaneArray read_climo_data_plane_array(Dictionary *dict, int i_vx, if(!is_bad_data(hour_interval) && (hour_interval <= 0 || hour_interval > 24)) { mlog << Error << "\nread_climo_data_plane_array() -> " - << "The \"" << conf_key_hour_interval << "\" entry (" + << "The " << conf_key_hour_interval << " entry (" << hour_interval << ") can be set to " << na_str << " or a value between 0 and 24.\n\n"; exit(1); @@ -131,14 +133,15 @@ DataPlaneArray read_climo_data_plane_array(Dictionary *dict, int i_vx, // Search the files for the requested records for(i=0; i " - << "Trouble reading climatology file \"" - << climo_file << "\"\n\n"; + << "Trouble reading climatology file " + << climo_file << "\n\n"; return; } @@ -191,21 +194,21 @@ void read_climo_file(const char *climo_file, GrdFileType ctype, // Check the day time step if(!is_bad_data(day_ts) && abs(day_diff_sec) >= day_ts) { - mlog << Debug(3) << "Skipping " << clm_ut_cs << " \"" << info->magic_str() - << "\" climatology field with " << day_diff_sec / sec_per_day + mlog << Debug(3) << "Skipping " << clm_ut_cs << " " << info->magic_str() + << " climatology field with " << day_diff_sec / sec_per_day << " day offset (" << conf_key_day_interval << " = " - << day_ts / sec_per_day << ") from file \"" - << climo_file << "\".\n"; + << day_ts / sec_per_day << ") from file " + << climo_file << ".\n"; continue; } // Check the hour time step if(!is_bad_data(hour_ts) && abs(hms_diff_sec) >= hour_ts) { - mlog << Debug(3) << "Skipping " << clm_ut_cs << " \"" << info->magic_str() - << "\" climatology field with " << (double) hms_diff_sec / sec_per_hour + mlog << Debug(3) << "Skipping " << clm_ut_cs << " " << info->magic_str() + << " climatology field with " << (double) hms_diff_sec / sec_per_hour << " hour offset (" << conf_key_hour_interval << " = " - << hour_ts / sec_per_hour << ") from file \"" - << climo_file << "\".\n"; + << hour_ts / sec_per_hour << ") from file " + << climo_file << ".\n"; continue; } @@ -213,17 +216,17 @@ void read_climo_file(const char *climo_file, GrdFileType ctype, unixtime clm_vld_ut = vld_ut + day_diff_sec + hms_diff_sec; // Print log message for matching record - mlog << Debug(3) << "Storing " << clm_ut_cs << " \"" << info->magic_str() - << "\" climatology field with " << day_diff_sec / sec_per_day + mlog << Debug(3) << "Storing " << clm_ut_cs << " " << info->magic_str() + << " climatology field with " << day_diff_sec / sec_per_day << " day, " << (double) hms_diff_sec / sec_per_hour << " hour offset as time " - << unix_to_yyyymmdd_hhmmss(clm_vld_ut) << " from file \"" - << climo_file << "\".\n"; + << unix_to_yyyymmdd_hhmmss(clm_vld_ut) << " from file " + << climo_file << ".\n"; // Regrid, if needed if(!(mtddf->grid() == vx_grid)) { - mlog << Debug(2) << "Regridding " << clm_ut_cs << " \"" - << info->magic_str() - << "\" climatology field to the verification grid.\n"; + mlog << Debug(2) << "Regridding " << clm_ut_cs << " " + << desc << " field " << info->magic_str() + << " to the verification grid.\n"; dp = met_regrid(clm_dpa[i], mtddf->grid(), vx_grid, regrid_info); } @@ -343,8 +346,8 @@ DataPlaneArray climo_time_interp(const DataPlaneArray &dpa, int day_ts, // This should only occur when day_interval > 1. if(day_ts <= 3600*24) { mlog << Error << "\nclimo_time_interp() -> " - << "Expecting 1 or 2 climatology fields when \"" - << conf_key_day_interval << "\" <= 1 but found " + << "Expecting 1 or 2 climatology fields when " + << conf_key_day_interval << " <= 1 but found " << it->second.n() << "\n\n"; exit(1); } diff --git a/src/libcode/vx_statistics/read_climo.h b/src/libcode/vx_statistics/read_climo.h index a017df638..362efa3fc 100644 --- a/src/libcode/vx_statistics/read_climo.h +++ b/src/libcode/vx_statistics/read_climo.h @@ -19,10 +19,12 @@ //////////////////////////////////////////////////////////////////////// extern DataPlane read_climo_data_plane(Dictionary *, int, - unixtime, const Grid &); + unixtime, const Grid &, + const char *); extern DataPlaneArray read_climo_data_plane_array(Dictionary *, int, - unixtime, const Grid &); + unixtime, const Grid &, + const char *); //////////////////////////////////////////////////////////////////////// diff --git a/src/tools/core/ensemble_stat/ensemble_stat.cc b/src/tools/core/ensemble_stat/ensemble_stat.cc index 3dedd76ef..826b8eaf7 100644 --- a/src/tools/core/ensemble_stat/ensemble_stat.cc +++ b/src/tools/core/ensemble_stat/ensemble_stat.cc @@ -784,18 +784,22 @@ void process_point_vx() { // Read forecast climatology data fcmn_dpa = read_climo_data_plane_array( conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), - i, ens_valid_ut, grid); + i, ens_valid_ut, grid, + "forecast climatology mean"); fcsd_dpa = read_climo_data_plane_array( - conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), - i, ens_valid_ut, grid); + conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), + i, ens_valid_ut, grid, + "forecast climatology standard deviation"); // Read observation climatology data ocmn_dpa = read_climo_data_plane_array( conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - i, ens_valid_ut, grid); + i, ens_valid_ut, grid, + "observation climatology mean"); ocsd_dpa = read_climo_data_plane_array( - conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), - i, ens_valid_ut, grid); + conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), + i, ens_valid_ut, grid, + "observation climatology standard deviation"); mlog << Debug(3) << "For " << conf_info.vx_opt[i].vx_pd.fcst_info->magic_str() << ", found " @@ -1424,18 +1428,22 @@ void process_grid_vx() { // Read forecast climatology data fcmn_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), - i, ens_valid_ut, grid); + i, ens_valid_ut, grid, + "forecast climatology mean"); fcsd_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), - i, ens_valid_ut, grid); + i, ens_valid_ut, grid, + "forecast climatology standard deviation"); // Read observation climatology data ocmn_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - i, ens_valid_ut, grid); + i, ens_valid_ut, grid, + "observation climatology mean"); ocsd_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), - i, ens_valid_ut, grid); + i, ens_valid_ut, grid, + "observation climatology standard deviation"); mlog << Debug(3) << "For " << conf_info.vx_opt[i].vx_pd.fcst_info->magic_str() << ", found " diff --git a/src/tools/core/grid_stat/grid_stat.cc b/src/tools/core/grid_stat/grid_stat.cc index fcd0bc33a..33f3e14df 100644 --- a/src/tools/core/grid_stat/grid_stat.cc +++ b/src/tools/core/grid_stat/grid_stat.cc @@ -196,8 +196,8 @@ static void clean_up(); static void usage(); static void set_outdir(const StringArray &); static void set_compress(const StringArray &); -static bool read_data_plane(VarInfo* info, DataPlane& dp, Met2dDataFile* mtddf, - const ConcatString &filename); +static bool read_data_plane(VarInfo *info, DataPlane &dp, Met2dDataFile *mtddf, + const ConcatString &filename, const char *desc); #ifdef WITH_UGRID static void set_ugrid_config(const StringArray &); #endif @@ -725,7 +725,8 @@ void process_scores() { // Read the gridded data from the input forecast file if(!read_data_plane(conf_info.vx_opt[i].fcst_info, - fcst_dp, fcst_mtddf, fcst_file)) continue; + fcst_dp, fcst_mtddf, fcst_file, + "forecast")) continue; mlog << Debug(3) << "Reading forecast data for " @@ -740,7 +741,8 @@ void process_scores() { // Read the gridded data from the input observation file if(!read_data_plane(conf_info.vx_opt[i].obs_info, - obs_dp, obs_mtddf, obs_file)) continue; + obs_dp, obs_mtddf, obs_file, + "observation")) continue; mlog << Debug(3) << "Reading observation data for " @@ -790,18 +792,22 @@ void process_scores() { // Read forecast climatology data fcmn_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), - i, fcst_dp.valid(), grid); + i, fcst_dp.valid(), grid, + "forecast climatology mean"); fcsd_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), - i, fcst_dp.valid(), grid); + i, fcst_dp.valid(), grid, + "forecast climatology standard deviation"); // Read observation climatology data ocmn_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - i, fcst_dp.valid(), grid); + i, fcst_dp.valid(), grid, + "observation climatology mean"); ocsd_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), - i, fcst_dp.valid(), grid); + i, fcst_dp.valid(), grid, + "observation climatology standard deviation"); mlog << Debug(3) << "For " << conf_info.vx_opt[i].fcst_info->magic_str() << ", found " @@ -1048,27 +1054,33 @@ void process_scores() { // Read forecast data for UGRD if(!read_data_plane(conf_info.vx_opt[ui].fcst_info, - fu_dp, fcst_mtddf, fcst_file)) continue; + fu_dp, fcst_mtddf, fcst_file, + "U-wind forecast")) continue; // Read observation data for UGRD if(!read_data_plane(conf_info.vx_opt[ui].obs_info, - ou_dp, obs_mtddf, obs_file)) continue; + ou_dp, obs_mtddf, obs_file, + "U-wind observation")) continue; // Read the forecast climatology data for UGRD fcmnu_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), - ui, fcst_dp.valid(), grid); + ui, fcst_dp.valid(), grid, + "forecast U-wind climatology mean"); fcsdu_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), - ui, fcst_dp.valid(), grid); + ui, fcst_dp.valid(), grid, + "forecast U-wind climatology standard deviation"); // Read the observation climatology data for UGRD ocmnu_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - ui, fcst_dp.valid(), grid); + ui, fcst_dp.valid(), grid, + "observation U-wind climatology mean"); ocsdu_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), - ui, fcst_dp.valid(), grid); + ui, fcst_dp.valid(), grid, + "observation U-wind climatology standard deviation"); // If requested in the config file, smooth the forecast // and climatology U-wind fields @@ -1178,7 +1190,6 @@ void process_scores() { i, mthd, pnts, conf_info.vx_opt[i].interp_info.field); } - /* MET #2924 Replace this section if(conf_info.vx_opt[i].nc_info.do_climo && !fcmn_dp.is_empty()) { write_nc((string)"FCST_CLIMO_MEAN", fcmn_dp, @@ -1209,26 +1220,6 @@ void process_scores() { i, mthd, pnts, conf_info.vx_opt[i].interp_info.field); } - */ - if(conf_info.vx_opt[i].nc_info.do_climo && - !ocmn_dp.is_empty()) { - write_nc((string)"CLIMO_MEAN", ocmn_dp, - i, mthd, pnts, - conf_info.vx_opt[i].interp_info.field); - } - if(conf_info.vx_opt[i].nc_info.do_climo && - !ocsd_dp.is_empty()) { - write_nc((string)"CLIMO_STDEV", fcsd_dp, - i, mthd, pnts, - conf_info.vx_opt[i].interp_info.field); - } - if(conf_info.vx_opt[i].nc_info.do_climo && - !ocmn_dp.is_empty() && !ocsd_dp.is_empty()) { - write_nc((string)"CLIMO_CDF", normal_cdf(obs_dp, ocmn_dp, ocsd_dp), - i, mthd, pnts, - conf_info.vx_opt[i].interp_info.field); - } - // MET #2924 End replace // Write out the fields of requested climo distribution percentile threshold values if(conf_info.vx_opt[i].nc_info.do_climo_cdp && @@ -1249,7 +1240,6 @@ void process_scores() { // Process all CDP thresholds except 0 and 100 for(vector::iterator it = simp.begin(); it != simp.end(); it++) { - /* MET #2924 Replace this section if(it->ptype() == perc_thresh_fcst_climo_dist && !is_eq(it->pvalue(), 0.0) && !is_eq(it->pvalue(), 100.0)) { @@ -1266,16 +1256,6 @@ void process_scores() { i, mthd, pnts, conf_info.vx_opt[i].interp_info.field); } - */ - if(it->ptype() == perc_thresh_obs_climo_dist && - !is_eq(it->pvalue(), 0.0) && - !is_eq(it->pvalue(), 100.0)) { - cs << cs_erase << "CLIMO_CDP" << nint(it->pvalue()); - write_nc(cs, normal_cdf_inv(it->pvalue()/100.0, ocmn_dp, ocsd_dp), - i, mthd, pnts, - conf_info.vx_opt[i].interp_info.field); - } - // MET #2924 End replace } // end for it } @@ -1912,19 +1892,23 @@ void process_scores() { // Read forecast data for UGRD if(!read_data_plane(conf_info.vx_opt[ui].fcst_info, - fu_dp, fcst_mtddf, fcst_file)) continue; + fu_dp, fcst_mtddf, fcst_file, + "U-wind forecast")) continue; // Read observation data for UGRD if(!read_data_plane(conf_info.vx_opt[ui].obs_info, - ou_dp, obs_mtddf, obs_file)) continue; + ou_dp, obs_mtddf, obs_file, + "U-wind observation")) continue; // Read climatology data for UGRD fcmnu_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), - ui, fcst_dp.valid(), grid); + ui, fcst_dp.valid(), grid, + "forecast U-wind climatology mean"); ocmnu_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - ui, fcst_dp.valid(), grid); + ui, fcst_dp.valid(), grid, + "observation U-wind climatology mean"); // Apply Fourier decomposition to the U-wind fields fu_dp_smooth = fu_dp; @@ -2037,7 +2021,6 @@ void process_scores() { i, shc.get_interp_mthd(), bad_data_int, FieldType::Both); } - /* MET #2924 Replace this change if(conf_info.vx_opt[i].nc_info.do_climo && !fcmn_dp_smooth.is_empty()) { write_nc((string)"FCST_CLIMO_MEAN", fcmn_dp_smooth, @@ -2050,14 +2033,6 @@ void process_scores() { i, shc.get_interp_mthd(), bad_data_int, FieldType::Both); } - */ - if(conf_info.vx_opt[i].nc_info.do_climo && - !ocmn_dp_smooth.is_empty()) { - write_nc((string)"CLIMO_MEAN", ocmn_dp_smooth, - i, shc.get_interp_mthd(), - bad_data_int, FieldType::Both); - } - // MET #2924 End replace } // end if } // end for j @@ -2838,7 +2813,6 @@ void write_nc(const ConcatString &field_name, const DataPlane &dp, level_att = shc.get_fcst_lev(); units_att = conf_info.vx_opt[i_vx].fcst_info->units_attr(); } - /* MET #2924 Replace this section else if(field_name == "OBS_CLIMO_MEAN") { var_name << cs_erase << field_name << "_" << obs_name << var_suffix << "_" << mask_str; @@ -2895,52 +2869,6 @@ void write_nc(const ConcatString &field_name, const DataPlane &dp, level_att = shc.get_obs_lev(); units_att = conf_info.vx_opt[i_vx].obs_info->units_attr(); } - */ - else if(field_name == "CLIMO_MEAN") { - var_name << cs_erase << field_name << "_" - << obs_name << var_suffix << "_" << mask_str; - - // Append interpolation string for Fourier decomposition - if(interp_str.nonempty()) { - if(interp_str.startswith("_WV")) var_name << interp_str; - } - long_att << cs_erase - << "Climatology mean for " - << obs_long_name; - level_att = shc.get_obs_lev(); - units_att = conf_info.vx_opt[i_vx].obs_info->units_attr(); - } - else if(field_name == "CLIMO_STDEV") { - var_name << cs_erase << field_name << "_" - << obs_name << var_suffix << "_" << mask_str; - long_att << cs_erase - << "Climatology standard deviation for " - << obs_long_name; - level_att = shc.get_obs_lev(); - units_att = conf_info.vx_opt[i_vx].obs_info->units_attr(); - } - else if(field_name == "CLIMO_CDF") { - var_name << cs_erase << field_name << "_" - << obs_name << var_suffix << "_" << mask_str; - long_att << cs_erase - << "Climatology cumulative distribution function for " - << obs_long_name; - level_att = shc.get_obs_lev(); - units_att = conf_info.vx_opt[i_vx].obs_info->units_attr(); - } - else if(field_name.startswith("CLIMO_CDP")) { - var_name << cs_erase - << field_name << "_" - << conf_info.vx_opt[i_vx].obs_info->name_attr() << "_" - << conf_info.vx_opt[i_vx].obs_info->level_attr() - << var_suffix << "_" << mask_str; - long_att << cs_erase - << "Climatology distribution percentile thresholds for " - << obs_long_name; - level_att = shc.get_obs_lev(); - units_att = conf_info.vx_opt[i_vx].obs_info->units_attr(); - } - // MET #2924 end replace else if(check_reg_exp("FCST_XGRAD_", field_name.c_str()) || check_reg_exp("FCST_YGRAD_", field_name.c_str())) { var_name << cs_erase << field_name << "_" @@ -3394,15 +3322,15 @@ void set_compress(const StringArray & a) { //////////////////////////////////////////////////////////////////////// -bool read_data_plane(VarInfo* info, DataPlane& dp, Met2dDataFile* mtddf, - const ConcatString &filename) { +bool read_data_plane(VarInfo *info, DataPlane &dp, Met2dDataFile *mtddf, + const ConcatString &filename, const char *desc) { bool status = mtddf->data_plane(*info, dp); if(!status) { mlog << Warning << "\nread_data_plane() -> " << info->magic_str() - << " not found in file: " << filename + << " not found in " << desc << " file: " << filename << "\n\n"; return false; } @@ -3410,7 +3338,7 @@ bool read_data_plane(VarInfo* info, DataPlane& dp, Met2dDataFile* mtddf, // Regrid, if necessary if(!(mtddf->grid() == grid)) { mlog << Debug(1) - << "Regridding field " + << "Regridding " << desc << " field " << info->magic_str() << " to the verification grid.\n"; dp = met_regrid(dp, mtddf->grid(), grid, info->regrid()); diff --git a/src/tools/core/point_stat/point_stat.cc b/src/tools/core/point_stat/point_stat.cc index 5532ea7e1..bda41ddf2 100644 --- a/src/tools/core/point_stat/point_stat.cc +++ b/src/tools/core/point_stat/point_stat.cc @@ -668,18 +668,22 @@ void process_fcst_climo_files() { // Read forecast climatology data fcmn_dpa = read_climo_data_plane_array( conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), - i, fcst_dpa[0].valid(), grid); + i, fcst_dpa[0].valid(), grid, + "forecast climatology mean"); fcsd_dpa = read_climo_data_plane_array( conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), - i, fcst_dpa[0].valid(), grid); + i, fcst_dpa[0].valid(), grid, + "forecast climatology standard deviation"); // Read observation climatology data ocmn_dpa = read_climo_data_plane_array( conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - i, fcst_dpa[0].valid(), grid); + i, fcst_dpa[0].valid(), grid, + "observation climatology mean"); ocsd_dpa = read_climo_data_plane_array( conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), - i, fcst_dpa[0].valid(), grid); + i, fcst_dpa[0].valid(), grid, + "observation climatology standard deviation"); // Store data for the current verification task conf_info.vx_opt[i].vx_pd.set_fcst_dpa(fcst_dpa); diff --git a/src/tools/core/series_analysis/series_analysis.cc b/src/tools/core/series_analysis/series_analysis.cc index 9373c0e04..a2e8cdf5c 100644 --- a/src/tools/core/series_analysis/series_analysis.cc +++ b/src/tools/core/series_analysis/series_analysis.cc @@ -92,17 +92,18 @@ static void do_cnt (int, const PairDataPoint *); static void do_sl1l2 (int, const PairDataPoint *); static void do_pct (int, const PairDataPoint *); -static void store_stat_fho (int, const ConcatString &, const CTSInfo &); -static void store_stat_ctc (int, const ConcatString &, const CTSInfo &); -static void store_stat_cts (int, const ConcatString &, const CTSInfo &); -static void store_stat_mctc (int, const ConcatString &, const MCTSInfo &); -static void store_stat_mcts (int, const ConcatString &, const MCTSInfo &); -static void store_stat_cnt (int, const ConcatString &, const CNTInfo &); -static void store_stat_sl1l2(int, const ConcatString &, const SL1L2Info &); -static void store_stat_pct (int, const ConcatString &, const PCTInfo &); -static void store_stat_pstd (int, const ConcatString &, const PCTInfo &); -static void store_stat_pjc (int, const ConcatString &, const PCTInfo &); -static void store_stat_prc (int, const ConcatString &, const PCTInfo &); +static void store_stat_fho (int, const ConcatString &, const CTSInfo &); +static void store_stat_ctc (int, const ConcatString &, const CTSInfo &); +static void store_stat_cts (int, const ConcatString &, const CTSInfo &); +static void store_stat_mctc (int, const ConcatString &, const MCTSInfo &); +static void store_stat_mcts (int, const ConcatString &, const MCTSInfo &); +static void store_stat_cnt (int, const ConcatString &, const CNTInfo &); +static void store_stat_sl1l2 (int, const ConcatString &, const SL1L2Info &); +static void store_stat_sal1l2(int, const ConcatString &, const SL1L2Info &); +static void store_stat_pct (int, const ConcatString &, const PCTInfo &); +static void store_stat_pstd (int, const ConcatString &, const PCTInfo &); +static void store_stat_pjc (int, const ConcatString &, const PCTInfo &); +static void store_stat_prc (int, const ConcatString &, const PCTInfo &); static void setup_nc_file(const VarInfo *, const VarInfo *); static void add_nc_var(const ConcatString &, const ConcatString &, @@ -737,19 +738,25 @@ void process_scores() { << min(i_point + conf_info.block_size, nxy) << ".\n"; } - // Read climatology data for the current series entry + // Read forecast climatology data fcmn_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), - i_fcst, fcst_dp.valid(), grid); + i_fcst, fcst_dp.valid(), grid, + "forecast climatology mean"); fcsd_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), - i_fcst, fcst_dp.valid(), grid); + i_fcst, fcst_dp.valid(), grid, + "forecast climatology standard deviation"); + + // Read observation climatology data ocmn_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - i_fcst, fcst_dp.valid(), grid); + i_fcst, fcst_dp.valid(), grid, + "observation climatology mean"); ocsd_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), - i_fcst, fcst_dp.valid(), grid); + i_fcst, fcst_dp.valid(), grid, + "observation climatology standard deviation"); bool fcmn_flag = (fcmn_dp.nx() == fcst_dp.nx() && fcmn_dp.ny() == fcst_dp.ny()); @@ -860,8 +867,8 @@ void process_scores() { // Compute partial sums if(!conf_info.fcst_info[0]->is_prob() && - (conf_info.output_stats[STATLineType::sl1l2].n() > 0 || - conf_info.output_stats[STATLineType::sal1l2].n() > 0)) { + (conf_info.output_stats[STATLineType::sl1l2].n() + + conf_info.output_stats[STATLineType::sal1l2].n()) > 0) { do_sl1l2(i_point+i, &pd_ptr[i]); } @@ -1128,6 +1135,11 @@ void do_sl1l2(int n, const PairDataPoint *pd_ptr) { for(j=0; j " << "unsupported column name requested \"" << c @@ -1822,6 +1828,62 @@ void store_stat_sl1l2(int n, const ConcatString &col, //////////////////////////////////////////////////////////////////////// +void store_stat_sal1l2(int n, const ConcatString &col, + const SL1L2Info &s_info) { + double v; + + // Set the column name to all upper case + ConcatString c = to_upper(col); + + // Get the column value + if(c == "TOTAL") { v = (double) s_info.sacount; } + else if(c == "FABAR") { v = s_info.fabar; } + else if(c == "OABAR") { v = s_info.oabar; } + else if(c == "FOABAR") { v = s_info.foabar; } + else if(c == "FFABAR") { v = s_info.ffabar; } + else if(c == "OOABAR") { v = s_info.ooabar; } + else if(c == "MAE") { v = s_info.samae; } + else { + mlog << Error << "\nstore_stat_sal1l2() -> " + << "unsupported column name requested \"" << c + << "\"\n\n"; + exit(1); + } + + // Construct the NetCDF variable name + ConcatString var_name("series_sal1l2_"); + var_name << c; + + // Append threshold information, if supplied + if(s_info.fthresh.get_type() != thresh_na || + s_info.othresh.get_type() != thresh_na) { + var_name << "_fcst" << s_info.fthresh.get_abbr_str() + << "_" << setlogic_to_abbr(conf_info.cnt_logic) + << "_obs" << s_info.othresh.get_abbr_str(); + } + + // Add map for this variable name + if(stat_data.count(var_name) == 0) { + + // Build key + ConcatString lty_stat("SAL1L2_"); + lty_stat << c; + + // Add new map entry + add_nc_var(var_name, c, stat_long_name[lty_stat], + s_info.fthresh.get_str(), + s_info.othresh.get_str(), + bad_data_double); + } + + // Store the statistic value + put_nc_val(n, var_name, (float) v); + + return; +} + +//////////////////////////////////////////////////////////////////////// + void store_stat_pct(int n, const ConcatString &col, const PCTInfo &pct_info) { int i = 0; diff --git a/src/tools/core/stat_analysis/aggr_stat_line.cc b/src/tools/core/stat_analysis/aggr_stat_line.cc index 75a9f6041..441cbe5e0 100644 --- a/src/tools/core/stat_analysis/aggr_stat_line.cc +++ b/src/tools/core/stat_analysis/aggr_stat_line.cc @@ -3999,12 +3999,6 @@ void mpr_to_cnt(STATAnalysisJob &job, const AggrMPRInfo &info, void mpr_to_psum(STATAnalysisJob &job, const AggrMPRInfo &info, int i_thresh, SL1L2Info &s_info) { - int i; - int scount, sacount; - double f, o, fc, oc; - double f_sum, o_sum, ff_sum, oo_sum, fo_sum; - double fa_sum, oa_sum, ffa_sum, ooa_sum, foa_sum; - double abs_err_sum; PairDataPoint pd_thr; // @@ -4034,31 +4028,41 @@ void mpr_to_psum(STATAnalysisJob &job, const AggrMPRInfo &info, // // Initialize counts // - scount = sacount = 0; - f_sum = o_sum = ff_sum = oo_sum = fo_sum = 0.0; - fa_sum = oa_sum = ffa_sum = ooa_sum = foa_sum = 0.0; - abs_err_sum = 0.0; + int scount = 0; + int sacount = 0; + double f_sum = 0.0; + double o_sum = 0.0; + double ff_sum = 0.0; + double oo_sum = 0.0; + double fo_sum = 0.0; + double smae_sum = 0.0; + double fa_sum = 0.0; + double oa_sum = 0.0; + double ffa_sum = 0.0; + double ooa_sum = 0.0; + double foa_sum = 0.0; + double samae_sum = 0.0; // // Update the partial sums // - for(i=0; in_use); // N_USE @@ -711,10 +709,8 @@ void write_orank_row_rad(AsciiTable &at, int row, int i_obs) { at.set_entry(row, col++, bad_data_double); // SPREAD_OERR at.set_entry(row, col++, bad_data_double); // SPREAD_PLUS_OERR at.set_entry(row, col++, bad_data_double); // OBS_CLIMO_STDEV - /* MET #2924 Uncomment this section at.set_entry(row, col++, bad_data_double); // FCST_CLIMO_MEAN at.set_entry(row, col++, bad_data_double); // FCST_CLIMO_STDEV - */ // Write extra columns at.set_entry(row, col++, d->n_use); // N_USE