From a9a3a778b53e2e55ff87f08a3427b7f71ac48eaf Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 10:32:29 -0600 Subject: [PATCH] Update develop-ref after dtcenter/MET#2942 (#2950) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 2673 Moved dvariable declaration after include * #2673 Move down namespace below include * Feature #2395 wdir (#2820) * Per #2395, add new columns to VL1L2, VAL1L2, and VCNT line types for wind direction statistics. Work still in progress. * Per #2395, write the new VCNT columns to the output and document the additions to the VL1L2, VAL1L2, and VCNT columns. * Per #2395, add the definition of new statistics to Appendix G. * Per #2395, update file version history. * Per #2395, tweak warning message about zero wind vectors and update grid-stat and point-stat to log calls to the do_vl1l2() function. * Per #2395, refine the weights for wind direction stats, ignoring the undefined directions. * Update src/tools/core/stat_analysis/aggr_stat_line.cc * Update src/tools/core/stat_analysis/parse_stat_line.cc * Update src/tools/core/stat_analysis/aggr_stat_line.cc * Recent changes to branch protection rules for the develop branch have broken the logic of the update_truth.yml GHA workflow. Instead of submitting a PR to merge develop into develop-ref directly, use an intermediate update_truth_for_develop branch. * Feature #2280 ens_prob (#2823) * Per #2280, update to support probability threshold strings like ==8, where 8 is the number of ensemble members, to create probability bins centered on the n/8 for n = 0 ... 8. * Per #2280, update docs about probability threshold settings. * Per #2280, use a loose tolerance when checking for consistent bin widths. * Per #2280, add a new unit test for grid_stat to demonstrate processing the output from gen_ens_prod. * Per #2280, when verifying NMEP probability forecasts, smooth the obs data first. * Per #2280, only request STAT output for the PCT line type to match unit_grid_stat.xml and minimize the new output files. * Per #2280, update config option docs. * Per #2280, update config option docs. * #2673 Change 0 to nullptr * #2673 Change 0 to nullptr * #2673 Change 0 to nullptr * #2673 Change 0 to nullptr * #2673 Change 0 to nullptr * #2673 Removed the redundant parentheses with return * #2673 Removed the redundant parentheses with return * #2673 Removed the redundant parentheses with return * #2673 Removed the redundant parentheses with return * #2673 Removed the redundant parentheses with return * #2673 restored return statement * #2673 Added std namespace * #2673 Moved down 'using namespace' statement. Removed trailing spaces * #2673 Moved down 'using namespace' statement. * #2673 Moved down 'using namespace' statement. * #2673 Moved down 'using namespace' statement. * #2673 Moved down 'using namespace' statement. * #2673 Added std namespace * #2673 Added std namespace * #2673 Added std namespace * #2673 Changed literal 1 to boolean value, true * Feature #2673 enum_to_string (#2835) * Feature #2583 ecnt (#2825) * Unrelated to #2583, fix typo in code comments. * Per #2583, add hooks write 3 new ECNT columns for observation error data. * Per #2583, make error messages about mis-matched array lengths more informative. * Per #2583, switch to more concise variable naming conventions of ign_oerr_cnv, ign_oerr_cor, and dawid_seb. * Per #2583, fix typo to enable compilation * Per #2583, define the 5 new ECNT column names. * Per #2583, add 5 new columns to the ECNT table in the Ensemble-Stat chapter * Per #2583, update stat_columns.cc to write these 5 new ECNT columns * Per #2583, update ECNTInfo class to compute the 5 new ECNT statistics. * Per #2583, update stat-analysis to parse the 5 new ECNT columns. * Per #2583, update aggregate_stat logic for 5 new ECNT columns. * Per #2583, update PairDataEnsemble logic for 5 new ECNT columns * Per #2583, update vx_statistics library with obs_error handling logic for the 5 new ECNT columns * Per #2583, changes to make it compile * Per #2583, changes to make it compile * Per #2583, switch to a consistent ECNT column naming convention with OERR at the end. Using IGN_CONV_OERR and IGN_CORR_OERR. * Per #2583, define ObsErrorEntry::variance() with a call to the dist_var() utility function. * Per #2583, update PairDataEnsemble::compute_pair_vals() to compute the 5 new stats with the correct inputs. * Per #2583, add DEBUG(10) log messages about computing these new stats. * Per #2583, update Stat-Analysis to compute these 5 new stats from the ORANK line type. * Per #2583, whitespace and comments. * Per #2583, update the User's Guide. * Per #2583, remove the DS_ADD_OERR and DS_MULT_OERR ECNT columns and rename DS_OERR as DSS, since observation error is not actually involved in its computation. * Per #2583, minor update to Appendix C * Per #2583, rename ECNT line type statistic DSS to IDSS. * Per #2583, fix a couple of typos * Per #2583, more error checking. * Per #2583, remove the ECNT IDSS column since its just 2*pi*IGN, the existing ignorance score, and only provides meaningful information when combined with the other Dawid-Sebastiani statistics that have already been removed. * Per #2583, add Eric's documentation of these new stats to Appendix C. Along the way, update the DOI links in the references based on this APA style guide: https://apastyle.apa.org/style-grammar-guidelines/references/dois-urls#:~:text=Include%20a%20DOI%20for%20all,URL%2C%20include%20only%20the%20DOI. * Per #2583, fix new equations with embedded underscores for PDF by defining both html and pdf formatting options. * Per #2583, update the ign_conv_oerr equation to include a 2 *pi multiplier for consistency with the existing ignorance score. Also, fix the documented equations. * Per #2583, remove log file that was inadvertently added on this branch. * Per #2583, simplify ObsErrorEntry::variance() implementation. For the distribution type of NONE, return a variance of 0.0 rather than bad data, as discussed with @michelleharrold and @JeffBeck-NOAA on 3/8/2024. --------- Co-authored-by: MET Tools Test Account * Revert #2825 since more documentation and testing is needed (#2837) This reverts commit 108a8958b206d6712197823a083666ab039bf818. * Feature #2583 ecnt fix IGN_OERR_CORR (#2838) * Unrelated to #2583, fix typo in code comments. * Per #2583, add hooks write 3 new ECNT columns for observation error data. * Per #2583, make error messages about mis-matched array lengths more informative. * Per #2583, switch to more concise variable naming conventions of ign_oerr_cnv, ign_oerr_cor, and dawid_seb. * Per #2583, fix typo to enable compilation * Per #2583, define the 5 new ECNT column names. * Per #2583, add 5 new columns to the ECNT table in the Ensemble-Stat chapter * Per #2583, update stat_columns.cc to write these 5 new ECNT columns * Per #2583, update ECNTInfo class to compute the 5 new ECNT statistics. * Per #2583, update stat-analysis to parse the 5 new ECNT columns. * Per #2583, update aggregate_stat logic for 5 new ECNT columns. * Per #2583, update PairDataEnsemble logic for 5 new ECNT columns * Per #2583, update vx_statistics library with obs_error handling logic for the 5 new ECNT columns * Per #2583, changes to make it compile * Per #2583, changes to make it compile * Per #2583, switch to a consistent ECNT column naming convention with OERR at the end. Using IGN_CONV_OERR and IGN_CORR_OERR. * Per #2583, define ObsErrorEntry::variance() with a call to the dist_var() utility function. * Per #2583, update PairDataEnsemble::compute_pair_vals() to compute the 5 new stats with the correct inputs. * Per #2583, add DEBUG(10) log messages about computing these new stats. * Per #2583, update Stat-Analysis to compute these 5 new stats from the ORANK line type. * Per #2583, whitespace and comments. * Per #2583, update the User's Guide. * Per #2583, remove the DS_ADD_OERR and DS_MULT_OERR ECNT columns and rename DS_OERR as DSS, since observation error is not actually involved in its computation. * Per #2583, minor update to Appendix C * Per #2583, rename ECNT line type statistic DSS to IDSS. * Per #2583, fix a couple of typos * Per #2583, more error checking. * Per #2583, remove the ECNT IDSS column since its just 2*pi*IGN, the existing ignorance score, and only provides meaningful information when combined with the other Dawid-Sebastiani statistics that have already been removed. * Per #2583, add Eric's documentation of these new stats to Appendix C. Along the way, update the DOI links in the references based on this APA style guide: https://apastyle.apa.org/style-grammar-guidelines/references/dois-urls#:~:text=Include%20a%20DOI%20for%20all,URL%2C%20include%20only%20the%20DOI. * Per #2583, fix new equations with embedded underscores for PDF by defining both html and pdf formatting options. * Per #2583, update the ign_conv_oerr equation to include a 2 *pi multiplier for consistency with the existing ignorance score. Also, fix the documented equations. * Per #2583, remove log file that was inadvertently added on this branch. * Per #2583, simplify ObsErrorEntry::variance() implementation. For the distribution type of NONE, return a variance of 0.0 rather than bad data, as discussed with @michelleharrold and @JeffBeck-NOAA on 3/8/2024. * Per #2583, updates to ensemble-stat.rst recommended by @michelleharrold and @JeffBeck-NOAA. * Per #2583, implement changes to the IGN_CORR_OERR corrected as directed by @ericgilleland. --------- Co-authored-by: MET Tools Test Account * Update the pull request template to include a question about expected impacts to existing METplus Use Cases. * #2830 Changed enum Builtin to enum class * #2830 Converted enum to enum class at config_constants.h * Feature #2830 bootstrap enum (#2843) * Bugfix #2833 develop azimuth (#2840) * Per #2833, fix n-1 bug when defining the azimuth delta for range/azimuth grids. * Per #2833, when definng TcrmwData:range_max_km, divide by n_range - 1 since the range values start at 0. * Per #2833, remove max_range_km from the TC-RMW config file. Set the default rmw_scale to NA so that its not used by default. And update the documentation. Still actually need to make the logic of the code work as it should. * Per #2833, update tc_rmw to define the range as either a function of rmw or using explicit spacing in km. * Per #2833, update the TCRMW Config files to remove the max_range_km entry, and update the unit test for one call to use RMW ranges and the other to use ranges defined in kilometers. * Per #2833, just correct code comments. * Per #2833, divide by n - 1 when computing the range delta, rather than n. * Per #2833, correct the handling of the maximum range in the tc-rmw tool. For fixed delta km, need to define the max range when setting up the grid at the beginning. --------- Co-authored-by: MET Tools Test Account * #2830 Changed enum PadSize to enum class * #2830 Removed redundant parantheses * #2830 Removed commenyted out code * #2830 Use auto * #2830 Changed enum to enum class for DistType, InterpMthd, GridTemplates, and NormalizeType * #2830 Moved enum_class_as_integer from header file to cc files * #2830 Added enum_as_int.hpp * #2830 Added enum_as_int.hpp * Deleted enum_class_as_integer and renamed it to enum_class_as_int * Removed redundant paranthese * #2830 Changed enum to enumclass * #2830 Changed enum_class_as_integer to enum_class_as_int * Feature #2379 sonarqube gha (#2847) * Per #2379, testing initial GHA SonarQube setup. * Per #2379, switch to only analyzing the src directory. * Per #2379, move more config logic from sonar-project.properties into the workflow. #ci-skip-all * Per #2379, try removing + symbols * Per #2379, move projectKey into xml workflow and remove sonar-project.properties. * Per #2379, try following the instructions at https://github.com/sonarsource-cfamily-examples/linux-autotools-gh-actions-sq/blob/main/.github/workflows/build.yml ci-skip-all * Per #2379, see details of progress described in this issue comment: https://github.com/dtcenter/MET/issues/2379#issuecomment-2000242425 * Unrelated to #2379, just removing spurious space that gets flagged as a diff when re-running enum_to_string on seneca. * Per #2379, try running SonarQube through GitHub. * Per #2379, remove empty env section and also disable the testing workflow temporarily during sonarqube development. * Per #2379, fix docker image name. * Per #2379, delete unneeded script. * Per #2379, update GHA to scan Python code and push to the correct SonarQube projects. * Per #2379, update GHA SonarQube project names * Per #2379, update the build job name * Per #2379, update the comile step name * Per #2379, switch to consistent SONAR variable names. * Per #2379, fix type in sed expressions. * Per #2379, just rename the log artifact * Per #2379, use time_command wrapper instead of run_command. * Per #2379, fix bad env var name * Per #2379, switch from egrep to grep. * Per #2379, just try cat-ting the logfile * Per #2379, test whether cat-ting the log file actually works. * Per #2379, revert back * Per #2379, mention SonarQube in the PR template. Make workflow name more succinct. * Per #2379, add SONAR_REFERENCE_BRANCH setting to define the sonar.newCode.referenceBranch property. The goal is to define the comparison reference branch for each SonarQube scan. * Per #2379, have the sonarqube.yml job print the reference branch it's using * Per #2379, intentionally introduce a new code smell to see if SonarQube correctly flag it as appearing in new code. * Per #2379, trying adding the SonarQube quality gate check. * Per #2379, add logic for using the report-task.txt output files to check the quality gate status for both the python and cxx scans. * Per #2379 must use unique GHA id's * Per #2379, working on syntax for quality gate checks * Per #2379, try again. * Per #2379, try again * Per #2379, try again * Per #2379, try again * Per #2379, try again * Per #2379, try again * Per #2379, try yet again * Per #2379 * Per #2379, add more debug * Per #2379, remove -it option from docker run commands * Per #2379, again * Per #2379, now that the scan works as expected, remove the intentional SonarQube code smell as well as debug logging. * Hotfix related to #2379. The sonar.newCode.referenceBranch and sonar.branch.name cannot be set to the same string! Only add the newCode definition when they differ. * #2830 Changed enum STATJobType to enum class * #2830 Changed STATLineType to enum class * #2830 Changed Action to enum class * #2830 Changed ModeDataType to enum class * #2830 Changed StepCase to enum class * #2830 Changed enum to enum class * #2830 Changed GenesisPairCategory to enum class * #2830 Removed rediundabt parenrthese * #2830 Reduced same if checking * #2830 Cleanup * #2830 USe empty() instead of lebgth checking * #2830 Adjusted indentations * Feature #2379 develop sonarqube updates (#2850) * Per #2379, move rgb2ctable.py into the python utility scripts directory for better organization and to enable convenient SonarQube scanning. * Per #2379, remove point.py from the vx_python3_utils directory which cleary was inadvertenlty added during development 4 years ago. As far as I can tell it isn't being called by any other code and doesn't belong in the repository. Note that scripts/python/met/point.py has the same name but is entirely different. * Per #2379, update the GHA SonarQube scan to do a single one with Python and C++ combined. The nightly build script is still doing 2 separate scans for now. If this all works well, they could also be combined into a single one. * Per #2379, eliminate MET_CONFIG_OPTIONS from the SonarQube workflow since it doesn't need to be and probably shouldn't be configurable. * Per #2379, trying to copy report-task.txt out of the image * Per #2379, update build_met_sonarqube.sh to check the scan return status * Per #2379, fix bash assignment syntax * Per #2379, remove unused SCRIPT_DIR envvar * Per #2379, switch to a single SonarQube scan for MET's nightly build as well * Feature 2654 ascii2nc polar buoy support (#2846) * Added iabp data type, and modified file_handler to filter based on time range, which was added as a command line option * handle time using input year, hour, min, and doy * cleanup and switch to position day of year for time computations * Added an ascii2nc unit test for iabp data * Added utility scripts to pull iabp data from the web and find files in a time range * Modified iabp_handler to always output a placeholder 'location' observation with value 1 * added description of IABP data python utility scripts * Fixed syntax error * Fixed Another syntax error. * Slight reformat of documentation * Per #2654, update the Makefiles in scripts/python/utility to include all the python scripts that should be installed. * Per #2654, remove unused code from get_iabp_from_web.py that is getting flagged as a bug by SonarQube. * Per #2654, fix typo in docs --------- Co-authored-by: John Halley Gotway Co-authored-by: MET Tools Test Account * Feature #2786 rpss_from_prob (#2861) * Per #2786, small change to a an error message unrelated to this development. * Per #2786, add RPSInfo::set_climo_prob() function to derive the RPS line type from climatology probability bins. And update Ensemble-Stat to call it. * Per #2786, minor change to clarify error log message. * Per #2786, for is_prob = TRUE input, the RPS line type is the only output option. Still need to update docs! * Per #2786, add new call to Ensemble-Stat to test computing RPS from climo probabilities * Per #2786, use name rps_climo_bin_prob to be very explicit. * Per #2786, redefine logic of RPSInfo::set_climo_bin_prob() to match the CPC definition. Note that reliability, resolution, uncertainty, and RPSS based on the sample climatology are all set to bad data. Need to investigate whether they can be computed using these inputs. * Per #2786, remove the requirement that any fcst.prob_cat_thresh thresholds must be defined. If they are defined, pass them through to the FCST_THRESH output column. If not, write NA. Add check to make sure the event occurs in exactly 1 category. * Per #2786, don't enforce fcst.prob_cat_thresh == obs.prob_cat_thresh for probabilistic inputs. And add more is_prob checks so that only the RPS line type can be written when given probabilistic inputs. * updated documentation * Per #2786, call rescale_probability() function to convert from 0-100 probs to 0-1 probs. --------- Co-authored-by: j-opatz * Feature #2862 v12.0.0-beta4 (#2864) * Feature #2379 develop single_sq_project (#2865) * Hotfix to the documentation in the develop branch. Issue #2858 was closed as a duplicate of #2857. I had included it in the MET-12.0.0-beta4 release notes, but the work is not yet actually complete. * Feature 2842 ugrid config (#2852) * #2842 Removed UGrid related setting * #2842 Corrected vertical level for data_plane_array * #2842 Do not allow the time range * #2842 The UGridConfig file can be passed as ugrid_dataset * #2842 Changed -config option to -ugrid_config * #2842 Deleted UGrid configurations * 2842 Fix a compile error when UGrid is disabled * #2842 Cleanup * #2842 Added an unittest point_stat_ugrid_mpas_config * #2842 Added a PointStatConfig without UGrid dataset. * #2842 Corrected ty[po at the variable name * Switched from time_centered to time_instant. I think time_centered is the center of the forecast lead window and time_instant is the time the forecast is valid (end of forecast window). * #2842 Removed ugrid_max_distance_km and unused metadata names * #2842 Restored time variable time_instant for LFric * #2842 Adjust lon between -180 and 180 * #2842 Adjust lon between -180 and 180 * #2842 Adjust lon between -180 and 180 * #2842 Adjusted lon to between -180 to 180 * #2842 Changed variable names * Per #2842, switch from degrees east to west right when the longitudes are read. * #2842, switch from degrees east to west right when the longitudes are read * #2842 Cleanup debug messages --------- Co-authored-by: Howard Soh Co-authored-by: Daniel Adriaansen Co-authored-by: John Halley Gotway * Feature 2753 comp script config (#2868) * set dynamic library file extension to .dylib if running on MacOS and .so otherwise * Added disabling of jasper documentation for compiliation on Hera * Updated * remove extra export of compiler env vars * include full path to log file so it is easier to file the log file to examine when a command fails * send cmake output to a log file * remove redundant semi-colon * use full path to log file so it is easier to examine on failure * use run_cmd to catch if rm command fails * Modifications for compilation on hera, gaea, and orion * Updating * fixed variable name * clean up if/else statements * set TIFF_LIBRARY_RELEASE argument to use full path to dynamic library file to prevent failure installing proj library * set LDFLAGS so that LDFLAGS value set in the user's environment will also be used * Updated based on gaea, orion, and hera installs * Updated * change extension of dynamic library files only if architecture is arm64 because older Macs still use .so * added netcdf library to args to prevent error installing NetCDF-CXX when PROJ has been installed in the same run of the script -- PATH is set in the COMPILE_PROJ if block that causes this flag from being added automatically * clean up how rpath and -L are added to LDFLAGS so that each entry is separate -- prevents errors installing on Mac arm64 because multiple rpath values aren't read using :. Also use MET_PROJLIB * Updated * removed -ltiff from MET libs * only add path to rpath and -L arguments if they are not already included in LDFLAGS * changed from using LIB_TIFF (full path to tiff lib file) to use TIFF_LIB_DIR (dir containing tiff lib file). Added TIFF_INCLUDE_DIR to proj compilation and -DJAS_ENABLE_DOC to jasper compliation taken from @jprestop branch * update comments * ensure all MET_* and MET_*LIB variables are added to the rpath for consistency * remove unnecessary if block and only export LDFLAGS at the end of setting locally * Updated * Added section for adding /lib64 and rearranged placement of ADDTL_DIR * Commenting out the running of the Jasper lib tests * Updating and/or removing files * Updating and/or removing files * Latest udpates which include the addition of the tiff library for proj * Remove commented out line. Co-authored-by: John Halley Gotway * Make indentation consistent. Co-authored-by: John Halley Gotway * Make indentation consistent. Co-authored-by: John Halley Gotway * Make indentation consistent. Co-authored-by: John Halley Gotway * Per 2753, added -lm to configure_lib_args for NetCDF-CXX * Per #2753 updating acorn files * Per #2753, update wcoss2 files * Per #2753, updating acorn file to include MET_PYTHON_EXE * Per #2753, updated files for 12.0.0 for derecho * Per #2753, updated derecho file adding MET_PYTHON_EXE and made corrections * Updating config files * Updating orion files * Updates for gaea's files * Updating gaea modulefile * Removing modulefile for cheyenne * Added MET_PYTHON_EXE * Added MET_PYTHON_EXE to hera too * Adding file for hercules * Removing equals sign from setenv * Adding file for hercules * Updated script to add libjpeg installation for grib2c * Per #2753, Adding file for casper --------- Co-authored-by: George McCabe <23407799+georgemccabe@users.noreply.github.com> Co-authored-by: John Halley Gotway * Feature #2795 level_mismatch_warning (#2873) * Per #2795, move the warning message about level mismatch from the config validation step to when the forecast files are being processed. Only check this when the number of forecast fields is greater than 1, but no longer limit the check to pressure levels only. * Per #2795, add comments * Whitespace * Per #2795, port level mismatch fix over to Ensemble-Stat. Check it for each verification task, but only print it once for each task, rather than once for each task * ensemble member. * Feature #2870 removing_MISSING_warning (#2872) * Per #2870, define utility functions for parsing the file type from a file list and for logging missing files, checking for the MISSING keyword. Also, update Ensemble-Stat and Gen-Ens-Prod to call these functions. * Per #2870, update the gen_ens_prod tests to demonstrate the use of the MISSING keyword for missing files. METplus uses this keyword for Ensemble-Stat and Gen-Ens-Prod. * Feature 2842 ugrid config (#2875) * #2842 Removed UGrid related setting * #2842 Corrected vertical level for data_plane_array * #2842 Do not allow the time range * #2842 The UGridConfig file can be passed as ugrid_dataset * #2842 Changed -config option to -ugrid_config * #2842 Deleted UGrid configurations * 2842 Fix a compile error when UGrid is disabled * #2842 Cleanup * #2842 Added an unittest point_stat_ugrid_mpas_config * #2842 Added a PointStatConfig without UGrid dataset. * #2842 Corrected ty[po at the variable name * Switched from time_centered to time_instant. I think time_centered is the center of the forecast lead window and time_instant is the time the forecast is valid (end of forecast window). * #2842 Removed ugrid_max_distance_km and unused metadata names * #2842 Restored time variable time_instant for LFric * #2842 Adjust lon between -180 and 180 * #2842 Adjust lon between -180 and 180 * #2842 Adjust lon between -180 and 180 * #2842 Adjusted lon to between -180 to 180 * #2842 Changed variable names * Per #2842, switch from degrees east to west right when the longitudes are read. * #2842, switch from degrees east to west right when the longitudes are read * #2842 Cleanup debug messages * #2842 Disabled output types except STAT for sl1l2 * #2842 Disabled output types except STAT for sl1l2 and MPR * #2842 Reduced output files for UGrid --------- Co-authored-by: Howard Soh Co-authored-by: Daniel Adriaansen Co-authored-by: John Halley Gotway * Hotfix to develop branch to remove duplicate test named 'point_stat_ugrid_mpas_config'. That was causing unit_ugrid.xml to fail because it was still looking for .txt output files that are no longer being generated. * Feature 2748 document ugrid (#2869) * Initial documentation of the UGRID capability. * Fixes error in references, adds appendix to index, and adds sub-section for configuration entries and a table for metadata map items. * Corrects LFRic, rewords section on UGRID conventions, updates description of using GridStat, and removes mention of nodes. * Forgot one more mention of UGRID conventions. * Incorporates more suggestions from @willmayfield. * Switches to numerical table reference. * Feature #2781 Convert MET NetCDF point obs to Pandas DataFrame (#2877) * Per #2781, added function to convert MET NetCDF point observation data to pandas so it can be read and modified in a python embedding script. Added example python embedding script * ignore python cache files * fixed function call * reduce cognitive complexity to satisfy SonarQube and add boolean return value to catch if function fails to read data * clean up script and add comments * replace call to object function that doesn't exist, handle exception when file passed to script cannot be read by the NetCDF library * rename example script * add new example script to makefiles * fix logic to build pandas DataFrame to properly get header information from observation header IDs * Per #2781, add unit test to demonstrate python embedding script that reads MET NetCDF point observation file and converts it to a pandas DataFrame * Per #2781, added init function for nc_point_obs to take an input filename. Also raise TypeError exception from nc_point_obs.read_data() if input file cannot be read * call parent class init function to properly initialize nc_point_obs * Feature #2833 pcp_combine_missing (#2886) * Per #2883, add -input_thresh command line option to configure allowable missing input files. * Per #2883, update pcp_combine usage statement. * Per #2883, update existing pcp_combine -derive unit test example by adding 3 new missing file inputs at the beginning, middle, and end of the file list. The first two are ignored since they include the MISSING keyword, but the third without that keyword triggers a warning message as desired. The -input_thresh option is added to only require 70% of the input files be present. This should produce the exact same output data. * Per #2883, update the pcp_combine logic for the sum command to allow missing data files based on the -input_thresh threshold. Add a test in unit_pcp_combine.xml to demonstrate. * Update docs/Users_Guide/reformat_grid.rst Co-authored-by: George McCabe <23407799+georgemccabe@users.noreply.github.com> * Per #2883, update pcp_combine usage statement in the code to be more simliar to the User's Guide. * Per #2883, switch to using derive_file_list_missing as the one containing missing files and recreate derive_file_list as it had existed for the test named pcp_combine_derive_VLD_THRESH. * Per #2883, move initialization inside the same loop to resolve SonarQube issues. * Per #2883, update sum_data_files() to switch from allocating memory to using STL vectors to satisfy SonarQube. * Per #2883, changes to declarations of variables to satisfy SonarQube. * Per #2883, address more SonarQube issues * Per #2883, backing out an unintended change I made to tcrmw_grid.cc. This change belongs on a different branch. * Per #2883, update logic of parse_file_list_type() function to handle python input strings. Also update pcp_combine to parse the type of input files being read and log non-missing python input files expected. --------- Co-authored-by: George McCabe <23407799+georgemccabe@users.noreply.github.com> * Per #2888, update STATAnalysisJob::dump_stat_line() to support dumping stat line types VCNT, RPS, DMAP, and SSIDX. (#2891) * Per #2659, making updates as proposed at the 20240516 MET Eng. Mtg. (#2895) * Feature #2395 TOTAL_DIR (#2892) * Per #2395, remove the n_dir_undef and n_dira_undef variables that are superceded by the new dcount and dacount VL1L2Info members to keep track of the number of valid wind direction vectors. * Per #2395, add TOTAL_DIR columns to the VL1L2, VAL1L2, and VCNT line types and update the header column tables. * Per #2395, update the User's Guide to list the new TOTAL_DIR columns in the VL1L2, VAL1L2, and VCNT line types. * Per #2395, update stat_analysis to parse the new TOTAL_DIR columns and use the values to aggregate results when needed. * Per #2395, for SonarQube change 'const char *' to 'const char * const' to satisfy the finding that 'Global variables should be const.' Should probably switch from 'char char *' to strings eventually. But for now, I'm just making up for some SonarQube technical debt. * Per #2395, fix typo in placement of the DIR_ME column name in the met_header_columns_V12.0.txt file * Per #2395, add 2 new Stat-Analysis jobs to demonstrate the processing of VL1L2 lines. * Per #2395, update logic of is_vector_dir_stat(). Instead of just checking 'DIR_', check 'DIR_ME', 'DIR_MAE', and 'DIR_MSE' to avoid an false positive match for the 'DIR_ERR' column which is computed from the vector partial sums rather than the individual direction differences. * Bugfix #2897 develop python_valid_time (#2899) * Per #2897, fix typos in 2 log messages. Also fix the bug in storing the valid time strings. The time string in vld_array should exactly correspond to the numeric unixtime values in vld_num_array. Therefore they need to be updated inside the same if block. The bug is that we were storing only the unique unixtime values but storing ALL of the valid time string, not just the unique ones. * Per #2897, minor change to formatting of log message * MET #2897, don’t waste time searching, just set the index to n - 1 * Per #2897, remove unused add_prec_point_obs(...) function * Per #2897, update add_point_obs(...) logic for DEBUG(9) to print very detailed log messages about what obs are being rejected and which are being used for each verification task. * Per #2897, refine the 'using' log message to make the wording consistent with the summary rejection reason counts log message * Per #2897, update the User's Guide about -v 9 for Point-Stat --------- Co-authored-by: j-opatz Co-authored-by: MET Tools Test Account * Bugfix 2867 point2grid qc flag (#2890) * #2867 Added compute_adp_qc_flag and adjusted ADP QC flags * #2867 Added point2grid_GOES_16_ADP_Enterprise_high. Changed AOD QC flags to 0,1,2 (was 1,2,3) * #2867 Added get_nc_att_values_ * #2867 Added get_nc_att_values. Added the argument allow_conversion to get_nc_data(netCDF::NcVar *, uchar *data) * #2867 Read the ADP QC flag values and meanings attributes from DQF variable and set the QC high, meduium, low values to support Enterprise algorithm. Adjusted the ADP QC values by using AOD qc values * #2867 Cleanup * #2867 Corrected indent * #2867 Changed log message * #2867 Removed unused argument * #2867 Removed unused argument * Cleanup * #2867 Fix SonarQube findings * #2867 Deleted protected section with no members * #2867 Cleanup * #2867 FIxed SonarQube findings; unused local variables, decalare as const, etc * #2867 MOved include directives to top * #2867 Changed some argumenmt with references to avoid copying objects * #2867 Do not filter by QC flag if -qc is not given * #2867 Use enumj class for GOES QC: HIGH, MEDIUM, and LOW * #2867 Added log message back which were deleted accidently * #2867 Chaned statci const to constexpr * #2867 Initial release. Separated from nc_utils.h * @2867 Added nc_utils_core.h * #2867 Moved some blocks to nc_utils_core.h * #2867 Include nc_utils_core.h * #2867 Added const references * Per #2867, fixing typo in comments. --------- Co-authored-by: Howard Soh Co-authored-by: j-opatz * Hotfix to develop to fix the update_truth.yml workflow logic. This testing workflow run failed (https://github.com/dtcenter/MET/actions/runs/9209471209). Here we switch to a unique update truth branch name to avoid conflicts. * Avoid pushing directly to the develop or main_vX.Y branches since that is not necessary for the automation logic in MET. * #2904 Changed R path to R-4.4.0 (#2905) Co-authored-by: Howard Soh * Feature #2912 pb2nc error (#2914) * Feature 2717 convert unit.pl to unit.py (#2871) * created unit.py module in new internal/test_unit/python directory * added xml parsing to unit.py * added repl_env function * added reading of the remaining xml tags in build_tests function * progress on main function (putting together test commands) * a few more lines in the main function * minor updates * fixed how the test command was being run * added if name/main and command line parsing * fixed handling of no 'env' in cmd_only mode * handle params from xml that have \ after filename without space in between * added logging * added some more pieces to unit * more updates to unit.py, including running checks on output files * bug fixes, improved handling of output file names, improved handling of env vars, improved logging output * fixed how shell commands are run, and other minor fixes * added last bits from the perl script, fixed some bugs * created unit.py module in new internal/test_unit/python directory * added xml parsing to unit.py * added repl_env function * added reading of the remaining xml tags in build_tests function * progress on main function (putting together test commands) * a few more lines in the main function * minor updates * update scripts to call python unit test script instead of the old perl script * fix she-bang line to allow script to be run without python3 before it * add missing test_dir and exit_on_fail tags that are found in the rest of the unit test xml files * fix call to logger.warning * change tags named 'exists' to 'exist' to match the rest of the xml files * added logger to function * removed tab at end of line that was causing output file path to be excluded from the command * fix broken checks for output files * incorporated george's recommended changes * changed default to overwrite logs; allow for more than one xml file to be passed in command --------- Co-authored-by: Natalie babij Co-authored-by: Natalie babij Co-authored-by: Natalie babij Co-authored-by: Natalie Babij Co-authored-by: John Halley Gotway Co-authored-by: George McCabe <23407799+georgemccabe@users.noreply.github.com> Co-authored-by: j-opatz * Bugfix 2867 point2grid qc unittest (#2913) * #2867 Added compute_adp_qc_flag and adjusted ADP QC flags * #2867 Added point2grid_GOES_16_ADP_Enterprise_high. Changed AOD QC flags to 0,1,2 (was 1,2,3) * #2867 Added get_nc_att_values_ * #2867 Added get_nc_att_values. Added the argument allow_conversion to get_nc_data(netCDF::NcVar *, uchar *data) * #2867 Read the ADP QC flag values and meanings attributes from DQF variable and set the QC high, meduium, low values to support Enterprise algorithm. Adjusted the ADP QC values by using AOD qc values * #2867 Cleanup * #2867 Corrected indent * #2867 Changed log message * #2867 Removed unused argument * #2867 Removed unused argument * Cleanup * #2867 Fix SonarQube findings * #2867 Deleted protected section with no members * #2867 Cleanup * #2867 FIxed SonarQube findings; unused local variables, decalare as const, etc * #2867 MOved include directives to top * #2867 Changed some argumenmt with references to avoid copying objects * #2867 Do not filter by QC flag if -qc is not given * #2867 Use enumj class for GOES QC: HIGH, MEDIUM, and LOW * #2867 Added log message back which were deleted accidently * #2867 Chaned statci const to constexpr * #2867 Initial release. Separated from nc_utils.h * @2867 Added nc_utils_core.h * #2867 Moved some blocks to nc_utils_core.h * #2867 Include nc_utils_core.h * #2867 Added const references * #2867 Some 'static const' were chnaged to constexpr * #2867 Changed -qc options (1,2,3 to 0,1 - high & medium) for AOD * #2867 Merged develop branch * #2867 Corrected the unit test name --------- Co-authored-by: Howard Soh * Feature #2911 tc_stat_set_hdr (#2916) * Per #2911, no real changes for Stat-Analysis. Just changing order of variables for consistency. * Per #2911, add StatHdrColumns::apply_set_hdr_opts(...) function to be used by TC-Stat. * Per #2911, move ByColumn to the TCStatJob base class and add HdrName and HdrValue to support the -set_hdr job command. * Per #2911, update GSI tools to call the newly added StatHdrColumns::apply_set_hdr_opts(...) function. * Per #2911, update logic of Stat-Analysis for consistency to make use of common apply_set_hdr_opts() function. * Per #2911, add DataLine::set_item() function to support -set_hdr options. * Per #2911, just update contents of error message * Per #2911, add TCStatLine member functions for has() and get_offset(). * Per #2911, update tc_stat to support applying -set_hdr to TC-Stat filter jobs. * Per #2911, revise TC-Stat config files to exercise the -set_hdr job command option * Per #2911, update TC-Stat documentation to mention the -set_hdr job command option * Per #2911, add note * Per #2911, as recommended by SonarQube, make some of these member functions const. * Bugfix #2856 develop ens_climo (#2918) * Per #2856, port over fixes from main_v11.1 to develop. * Per #2856, correct conditionals in set_job_controls.sh and tweak existing Ensemble-Stat configuration file to exercise the logic that's being impacted here. * Bugfix #2841 develop tang_rad_winds (#2921) * Per #2841, port over fixes from bugfix_2841_main_v11.1_tang_rad_winds for the develop branch * Per #2841, clarify in the docs that azimuths are defined in degrees counter-clockwise from due east. * Per #2841, just updating with output from enum_to_string. * Per #2841, tweak the documentation. * Per #2841, correct the location of using namespace lines. * Per #2841, update compute_tc_diag.py to no longer skip writing the radial and tangential wind diagnostics. * Per #2841, update compute_tc_diag.py to no longer skip writing radial and tangential wind diagnostics. * Revert "Per #2841, update compute_tc_diag.py to no longer skip writing radial and tangential wind diagnostics." This reverts commit f097345bedcfcca663e8fb4322eed5b5e00e19fd. * Revert "Per #2841, update compute_tc_diag.py to no longer skip writing the radial and tangential wind diagnostics." This reverts commit c0402151b038c59efab99c060cc5c390edf002f6. * Per #2841, update comp_dir.sh logic to include .dat in the files that are diffed * Replace tab with spaces * Per #2841, correct the units for the azimuth netcdf output variable * Per #2841, reverse the x dimension of the rotated latlon grid to effectively switch from counterclockwise rotation to clockwise. --------- Co-authored-by: MET Tools Test Account * Feature #2601 seeps climo config (#2927) * #2601 Added seeps_grid_climo_name and seeps_point_climo_name * #2601 Added seeps_grid_climo_name * #2601 Removed SEEPS settings * #2601 Initial release * #2601 Changed to set the SEEPS climo by using the configuration * #2601 Removed SEESP settings at PointStatConfig_APCP and use PointStatConfig_SEEPS for SEEPSm testing * #2601 Updated descryption for seeps_grid_climo_name * #2601 Added a argument for the SEEPS clomo file * #2601 Added conf_key_seeps_grid_climo_name and conf_key_seeps_point_climo_name * #2601 Support the climo filename from the configuration * #2601 Corrected key for climo name * Removing duplicate word --------- Co-authored-by: Howard Soh Co-authored-by: Julie Prestopnik * Feature 2673 sonarqube beta5 redundant parentheses (#2930) * #2673 Removed redundant_parentheses * #2673 Removed redundant_parentheses * #2673 Removed redundant parentheses * #2673 Removed redundant parentheses --------- Co-authored-by: Howard Soh * Fix release checksum action (#2929) * Feature 2857 tripolar coordinates (#2928) * #2857 Added MetNcCFDataFile::build_grid_from_lat_lon_vars * #2857 Added NcCfFile::build_grid_from_lat_lon_vars * #2857 Check the coordinates attribute to find latitude, longitude, and time variables * #2857 Get the lat/lon variables from coordinates attribute if exists * #2857 Added two constants * #2857 Deleted debug messages * #2857 Added lat_vname and lon_vname for var_name_map * #2857 Added two unit tests: point2grid_sea_ice_tripolar and point2grid_sea_ice_tripolar_config * #2857 Initial release * #2857 Correct dictinary to get file_type * #2857 DO not check the time variable for point2grid * #2857 Added point2grid_tripolar_rtofs --------- Co-authored-by: Howard Soh * Feature 2932 v12.0.0-beta5 (#2933) * Per #2932, updating version and release notes * Per #2932, updating date on release notes * Per #2932, fixed formatting and links * Update release-notes.rst * Update release-notes.rst Removing inline backticks since they do not format the way I expected, especially when put inside bolded release notes. --------- Co-authored-by: John Halley Gotway * Feature fix release notes (#2934) * Fixing up release notes * Update release-notes.rst --------- Co-authored-by: John Halley Gotway * Per dtcenter/METplus#2643 discussion, add more detail about the budget interpolation method. * Feature #2924 fcst climo, PR 1 of 2 (#2939) * Per #2924, Update the MPR and ORANK output line types to just write duplicate existing climo values, update the header tables and MPR/ORANK documentation tables. * Per #2924, update get_n_orank_columns() logic * Per #2924, update the Stat-Analysis parsing logic to parse the new MPR and ORANK climatology columns. * Per #2924, making some changes to the vx_statistics library to store climo data... but more work to come. Committing this first set of changes that are incomplete but do compile. * Per #2924, this big set of changes does compile but make test produces a segfault for ensemble-stat * Per #2924, fix return value for is_keeper_obs() * Per #2924, move fcst_info/obs_info into the VxPairBase pointer. * Per #2924, update Ensemble-Stat to set the VxPairBase::fcst_info pointer * Per #2924 udpate handling of fcst_info and obs_info pointers in Ensemble-Stat * Per #2924, update the GSI tools to handle the new fcst climo columns. * Per #2924, add backward compatibility logic so that when old climo column names are requested, the new ones are used. * Per #2924, print a DEBUG(2) log message if old column names are used. * Per #2924, switch the unit tests to reference the updated MPR column names rather than the old ones. * Per #2924, working progress. Not fully compiling yet * Per #2924, another round of changes. Removing MPR:FCST_CLIMO_CDF output column. This compiles but not sure if it actually runs yet * Per #2924, work in progress * Per #2924, work in progress. Almost compiling again. * Per #2924, get it compiling * Per #2924, add back in support for SCP and CDP which are interpreted as SOCP and OCDP, resp * Per #2924, update docs about SCP and CDP threshold types * Per #2924, minor whitespace changes * Per #2924, fix an uninitialized pointer bug by defining/calling SeepsClimoGrid::init_from_scratch() member function. The constructor had been calling clear() to delete pointers that weren't properly initialized to nullptr. Also, simplify some map processing logic. * Per #2924, rename SeepsAggScore from seeps to seeps_agg for clarity and to avoid conflicts in member function implementations. * Per #2924, fix seeps compilation error in Point-Stat * Per #2924, fix bug in the boolean logic for handling the do_climo_cdp NetCDF output option. * Per #2924, add missing exit statement. * Per #2924, tweak threshold.h * Per #2924, define one perc_thresh_info entry for each enumerated PercThreshType value * Per #2924, simplify the logic for handling percentile threshold types and print a log message once when the old versions are still used. * Per #2924, update the string comparison return value logic * Per #2924, fix the perc thresh string parsing logic by calling ConcatString::startswith() * Per #2924, switch all instances of CDP to OCDP. Gen-Ens-Prod was writing NetCDF files with OCDP in the output variable names, but Grid-Stat was requesting that the wrong variable name be read. So the unit tests failed. * Per #2924, add more doc details * Per #2924, update default config file to indicate when climo_mean and climo_stdev can be set seperately in the fcst and obs dictionaries. * Per #2924, update the MET tools to parse climo_mean and climo_stdev separately from the fcst and obs dictionaries. * Per #2924, backing out new/modified columns to minimize reg test diffs * Per #2924, one more section to be commented out later. * Per #2924, replace several calls to strncmp() with ConcatString::startswith() to simplify the code * Per #2924, strip out some more references to OBS_CLIMO_... in the unit tests. * Per #2924, delete accidental file * Per #2924 fix broken XML comments * Per #2924, fix comments * Per #2924, address SonarQube findings * Per #2924, tweak a Point-Stat and Grid-Stat unit test config file to make the output more comparable to develop. * Per #2924, fix bug in the logic of PairDataPoint and PairDataEnsemble, when looping over the 3-dim array do not return when checking the climo and fcst values. Instead we need to continue to the next loop iteration. * Per #2924, address more SonarQube code smells to reduce the overall number in MET for this PR. * Per #2924, correct the logic for parsing climo data from MPR lines. * Per #2924, cleanup grid_stat.cc source code by making calls to DataPlane::is_empty() and Grid::nxy(). * Per #2924, remove unneeded ==0 * Hotfix to the develop branch for a copy/paste bug introduced by PR #2939 * Feature #2924 sal1l2_mae, PR 3 of 3 (#2943) * Per #2924, track SL1L2 and SAL1L2 MAE scores with separate variables since they are no longer the same value. I renamed the existing 'mae' as 'smae' and added a new 'samae' variable. Renaming the existing lets me use the compiler help find all references to it throughout the code. * Per #2924, update the User's Guide climatology details and equations. * Per #2924, some changes to aggr_stat_line.cc and series_analysis.cc to satisfy some SonarQube code smells. * Update develop to clarify masking poly options based on METplus Discussion dtcenter/METplus#2650 * Remove two semi-colons that are not actually necessary to avoid confusion. * Per dtcenter/METplus#2653 discussion, update the MTD usage statement to clarify that data specified in the fcst dictionary is read from the -single input files. * Feature #2924 fcst climo, PR 2 of 3 (#2942) * Per #2924, Update the MPR and ORANK output line types to just write duplicate existing climo values, update the header tables and MPR/ORANK documentation tables. * Per #2924, update get_n_orank_columns() logic * Per #2924, update the Stat-Analysis parsing logic to parse the new MPR and ORANK climatology columns. * Per #2924, making some changes to the vx_statistics library to store climo data... but more work to come. Committing this first set of changes that are incomplete but do compile. * Per #2924, this big set of changes does compile but make test produces a segfault for ensemble-stat * Per #2924, fix return value for is_keeper_obs() * Per #2924, move fcst_info/obs_info into the VxPairBase pointer. * Per #2924, update Ensemble-Stat to set the VxPairBase::fcst_info pointer * Per #2924 udpate handling of fcst_info and obs_info pointers in Ensemble-Stat * Per #2924, update the GSI tools to handle the new fcst climo columns. * Per #2924, add backward compatibility logic so that when old climo column names are requested, the new ones are used. * Per #2924, print a DEBUG(2) log message if old column names are used. * Per #2924, switch the unit tests to reference the updated MPR column names rather than the old ones. * Per #2924, working progress. Not fully compiling yet * Per #2924, another round of changes. Removing MPR:FCST_CLIMO_CDF output column. This compiles but not sure if it actually runs yet * Per #2924, work in progress * Per #2924, work in progress. Almost compiling again. * Per #2924, get it compiling * Per #2924, add back in support for SCP and CDP which are interpreted as SOCP and OCDP, resp * Per #2924, update docs about SCP and CDP threshold types * Per #2924, minor whitespace changes * Per #2924, fix an uninitialized pointer bug by defining/calling SeepsClimoGrid::init_from_scratch() member function. The constructor had been calling clear() to delete pointers that weren't properly initialized to nullptr. Also, simplify some map processing logic. * Per #2924, rename SeepsAggScore from seeps to seeps_agg for clarity and to avoid conflicts in member function implementations. * Per #2924, fix seeps compilation error in Point-Stat * Per #2924, fix bug in the boolean logic for handling the do_climo_cdp NetCDF output option. * Per #2924, add missing exit statement. * Per #2924, tweak threshold.h * Per #2924, define one perc_thresh_info entry for each enumerated PercThreshType value * Per #2924, simplify the logic for handling percentile threshold types and print a log message once when the old versions are still used. * Per #2924, update the string comparison return value logic * Per #2924, fix the perc thresh string parsing logic by calling ConcatString::startswith() * Per #2924, switch all instances of CDP to OCDP. Gen-Ens-Prod was writing NetCDF files with OCDP in the output variable names, but Grid-Stat was requesting that the wrong variable name be read. So the unit tests failed. * Per #2924, add more doc details * Per #2924, update default config file to indicate when climo_mean and climo_stdev can be set seperately in the fcst and obs dictionaries. * Per #2924, update the MET tools to parse climo_mean and climo_stdev separately from the fcst and obs dictionaries. * Per #2924, backing out new/modified columns to minimize reg test diffs * Per #2924, one more section to be commented out later. * Per #2924, replace several calls to strncmp() with ConcatString::startswith() to simplify the code * Per #2924, strip out some more references to OBS_CLIMO_... in the unit tests. * Per #2924, delete accidental file * Per #2924 fix broken XML comments * Per #2924, fix comments * Per #2924, address SonarQube findings * Per #2924, tweak a Point-Stat and Grid-Stat unit test config file to make the output more comparable to develop. * Per #2924, fix bug in the logic of PairDataPoint and PairDataEnsemble, when looping over the 3-dim array do not return when checking the climo and fcst values. Instead we need to continue to the next loop iteration. * Per #2924, address more SonarQube code smells to reduce the overall number in MET for this PR. * Per #2924, correct the logic for parsing climo data from MPR lines. * Per #2924, update MPR and ORANK line types to update/add FCST/OBS_CLIMO_MEAN/STDEV/CDF columns. * Per #2924, cleanup grid_stat.cc source code by making calls to DataPlane::is_empty() and Grid::nxy(). * Per #2924, remove unneeded ==0 * Per #2924, working on PR2. * Per #2924, update User's Guide with notional example of specifying climo_mean and climo_stdev separately in the fcst and obs dicts. * Per #2924, adding a new unit test. It does NOT yet run as expected. Will debug on seneca * Per #2924, pass the description string to the read_climo_data_plane*() function to provide better log messages * Per #2924, more work on consistent log messages * Per #2924, tweak the configuration to define both field, climo_mean, and climo_stdev in both the fcst and obs dictionaries * Per #2924, tweak the unit_climatology_mixed.xml test * Per #2924, only whitespace changes. * Per #2924, missed swapping MET #2924 changes in 3 test files * Per #2924, delete accidentally committed file * Per #2924, delete accidentally committed files * Per #2924, add support for GRIB1 time range indicator value of 123 used for the corresponding METplus Use Case. Note that there are 22 other TRI values not currently supported. * Adds caveat regarding longitudes appearing in DEBUG statements with a… (#2947) * Adds caveat regarding longitudes appearing in DEBUG statements with a different sign to the FAQ. * Update appendixA.rst Missing paren * Create install_met_env.cactus * Adding special script for installing beta5 on wcoss2 * Modifying script, including updates to eckit and atlas --------- Co-authored-by: Howard Soh Co-authored-by: John Halley Gotway Co-authored-by: Howard Soh Co-authored-by: MET Tools Test Account Co-authored-by: davidalbo Co-authored-by: j-opatz Co-authored-by: Daniel Adriaansen Co-authored-by: Julie Prestopnik Co-authored-by: George McCabe <23407799+georgemccabe@users.noreply.github.com> Co-authored-by: natalieb-noaa <146213121+natalieb-noaa@users.noreply.github.com> Co-authored-by: Natalie babij Co-authored-by: Natalie babij Co-authored-by: Natalie babij Co-authored-by: Natalie Babij Co-authored-by: metplus-bot <97135045+metplus-bot@users.noreply.github.com> --- .github/workflows/testing.yml | 2 + data/table_files/met_header_columns_V12.0.txt | 4 +- docs/Users_Guide/appendixA.rst | 7 +- docs/Users_Guide/appendixC.rst | 74 +- docs/Users_Guide/config_options.rst | 98 +- docs/Users_Guide/grid-diag.rst | 4 +- docs/Users_Guide/mode-td.rst | 4 +- docs/Users_Guide/mode.rst | 2 +- docs/Users_Guide/point-stat.rst | 34 +- .../compile_MET_all.wcoss_beta5.sh | 959 ++++++++++++++++++ .../config/install_met_env.cactus | 50 + .../config/install_met_env.wcoss2 | 24 +- internal/test_unit/bin/unit_test.sh | 1 + ...nfig_climo_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG | 277 +++++ .../config/GridStatConfig_mpr_thresh | 20 +- .../config/PointStatConfig_mpr_thresh | 18 +- internal/test_unit/hdr/met_12_0.hdr | 2 +- .../test_unit/xml/unit_climatology_1.5deg.xml | 8 - .../test_unit/xml/unit_climatology_mixed.xml | 39 + src/basic/vx_config/threshold.cc | 5 +- src/basic/vx_util/stat_column_defs.h | 26 - .../vx_data2d_grib/data2d_grib_utils.cc | 8 +- src/libcode/vx_stat_out/stat_columns.cc | 10 +- src/libcode/vx_statistics/compute_stats.cc | 8 +- src/libcode/vx_statistics/met_stats.cc | 23 +- src/libcode/vx_statistics/met_stats.h | 5 +- src/libcode/vx_statistics/pair_data_point.cc | 11 - src/libcode/vx_statistics/read_climo.cc | 63 +- src/libcode/vx_statistics/read_climo.h | 6 +- src/tools/core/ensemble_stat/ensemble_stat.cc | 28 +- src/tools/core/grid_stat/grid_stat.cc | 148 +-- src/tools/core/point_stat/point_stat.cc | 12 +- .../core/series_analysis/series_analysis.cc | 124 ++- .../core/stat_analysis/aggr_stat_line.cc | 66 +- .../core/stat_analysis/parse_stat_line.cc | 4 +- src/tools/other/gen_ens_prod/gen_ens_prod.cc | 6 +- src/tools/other/gsi_tools/gsid2mpr.cc | 4 - src/tools/other/gsi_tools/gsidens2orank.cc | 4 - 38 files changed, 1760 insertions(+), 428 deletions(-) create mode 100755 internal/scripts/installation/compile_MET_all.wcoss_beta5.sh create mode 100644 internal/scripts/installation/config/install_met_env.cactus create mode 100644 internal/test_unit/config/GridStatConfig_climo_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG create mode 100644 internal/test_unit/xml/unit_climatology_mixed.xml diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 1fccc357fa..4c1eb33e9c 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -360,6 +360,8 @@ jobs: - jobid: 'job3' tests: 'climatology_2.5deg' - jobid: 'job4' + tests: 'climatology_mixed' + - jobid: 'job5' tests: 'python point2grid plot_data_plane mode mode_analysis perc_thresh hira plot_point_obs quality_filter obs_summary duplicate_flag' fail-fast: false steps: diff --git a/data/table_files/met_header_columns_V12.0.txt b/data/table_files/met_header_columns_V12.0.txt index 3d98f02206..5a7f27978e 100644 --- a/data/table_files/met_header_columns_V12.0.txt +++ b/data/table_files/met_header_columns_V12.0.txt @@ -5,7 +5,7 @@ V12.0 : STAT : FHO : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID V12.0 : STAT : ISC : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL TILE_DIM TILE_XLL TILE_YLL NSCALE ISCALE MSE ISC FENERGY2 OENERGY2 BASER FBIAS V12.0 : STAT : MCTC : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL (N_CAT) F[0-9]*_O[0-9]* EC_VALUE V12.0 : STAT : MCTS : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL N_CAT ACC ACC_NCL ACC_NCU ACC_BCL ACC_BCU HK HK_BCL HK_BCU HSS HSS_BCL HSS_BCU GER GER_BCL GER_BCU HSS_EC HSS_EC_BCL HSS_EC_BCU EC_VALUE -V12.0 : STAT : MPR : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL INDEX OBS_SID OBS_LAT OBS_LON OBS_LVL OBS_ELV FCST OBS OBS_QC CLIMO_MEAN CLIMO_STDEV CLIMO_CDF +V12.0 : STAT : MPR : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL INDEX OBS_SID OBS_LAT OBS_LON OBS_LVL OBS_ELV FCST OBS OBS_QC OBS_CLIMO_MEAN OBS_CLIMO_STDEV OBS_CLIMO_CDF FCST_CLIMO_MEAN FCST_CLIMO_STDEV V12.0 : STAT : SEEPS : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL S12 S13 S21 S23 S31 S32 PF1 PF2 PF3 PV1 PV2 PV3 MEAN_FCST MEAN_OBS SEEPS V12.0 : STAT : SEEPS_MPR : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE OBS_SID OBS_LAT OBS_LON FCST OBS OBS_QC FCST_CAT OBS_CAT P1 P2 T1 T2 SEEPS V12.0 : STAT : NBRCNT : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL FBS FBS_BCL FBS_BCU FSS FSS_BCL FSS_BCU AFSS AFSS_BCL AFSS_BCU UFSS UFSS_BCL UFSS_BCU F_RATE F_RATE_BCL F_RATE_BCU O_RATE O_RATE_BCL O_RATE_BCU @@ -13,7 +13,7 @@ V12.0 : STAT : NBRCTC : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID V12.0 : STAT : NBRCTS : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL BASER BASER_NCL BASER_NCU BASER_BCL BASER_BCU FMEAN FMEAN_NCL FMEAN_NCU FMEAN_BCL FMEAN_BCU ACC ACC_NCL ACC_NCU ACC_BCL ACC_BCU FBIAS FBIAS_BCL FBIAS_BCU PODY PODY_NCL PODY_NCU PODY_BCL PODY_BCU PODN PODN_NCL PODN_NCU PODN_BCL PODN_BCU POFD POFD_NCL POFD_NCU POFD_BCL POFD_BCU FAR FAR_NCL FAR_NCU FAR_BCL FAR_BCU CSI CSI_NCL CSI_NCU CSI_BCL CSI_BCU GSS GSS_BCL GSS_BCU HK HK_NCL HK_NCU HK_BCL HK_BCU HSS HSS_BCL HSS_BCU ODDS ODDS_NCL ODDS_NCU ODDS_BCL ODDS_BCU LODDS LODDS_NCL LODDS_NCU LODDS_BCL LODDS_BCU ORSS ORSS_NCL ORSS_NCU ORSS_BCL ORSS_BCU EDS EDS_NCL EDS_NCU EDS_BCL EDS_BCU SEDS SEDS_NCL SEDS_NCU SEDS_BCL SEDS_BCU EDI EDI_NCL EDI_NCU EDI_BCL EDI_BCU SEDI SEDI_NCL SEDI_NCU SEDI_BCL SEDI_BCU BAGSS BAGSS_BCL BAGSS_BCU V12.0 : STAT : GRAD : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL FGBAR OGBAR MGBAR EGBAR S1 S1_OG FGOG_RATIO DX DY V12.0 : STAT : DMAP : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL FY OY FBIAS BADDELEY HAUSDORFF MED_FO MED_OF MED_MIN MED_MAX MED_MEAN FOM_FO FOM_OF FOM_MIN FOM_MAX FOM_MEAN ZHU_FO ZHU_OF ZHU_MIN ZHU_MAX ZHU_MEAN G GBETA BETA_VALUE -V12.0 : STAT : ORANK : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL INDEX OBS_SID OBS_LAT OBS_LON OBS_LVL OBS_ELV OBS PIT RANK N_ENS_VLD (N_ENS) ENS_[0-9]* OBS_QC ENS_MEAN CLIMO_MEAN SPREAD ENS_MEAN_OERR SPREAD_OERR SPREAD_PLUS_OERR CLIMO_STDEV +V12.0 : STAT : ORANK : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL INDEX OBS_SID OBS_LAT OBS_LON OBS_LVL OBS_ELV OBS PIT RANK N_ENS_VLD (N_ENS) ENS_[0-9]* OBS_QC ENS_MEAN OBS_CLIMO_MEAN SPREAD ENS_MEAN_OERR SPREAD_OERR SPREAD_PLUS_OERR OBS_CLIMO_STDEV FCST_CLIMO_MEAN FCST_CLIMO_STDEV V12.0 : STAT : PCT : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL (N_THRESH) THRESH_[0-9]* OY_[0-9]* ON_[0-9]* V12.0 : STAT : PJC : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL (N_THRESH) THRESH_[0-9]* OY_TP_[0-9]* ON_TP_[0-9]* CALIBRATION_[0-9]* REFINEMENT_[0-9]* LIKELIHOOD_[0-9]* BASER_[0-9]* V12.0 : STAT : PRC : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL (N_THRESH) THRESH_[0-9]* PODY_[0-9]* POFD_[0-9]* diff --git a/docs/Users_Guide/appendixA.rst b/docs/Users_Guide/appendixA.rst index 4dbf571008..f39c96913a 100644 --- a/docs/Users_Guide/appendixA.rst +++ b/docs/Users_Guide/appendixA.rst @@ -1801,8 +1801,11 @@ Q. What are MET's conventions for latitude, longitude, azimuth and bearing angle .. dropdown:: Answer - MET considers north latitude and east longitude positive. Latitudes - have range from :math:`-90^\circ` to :math:`+90^\circ`. Longitudes have + MET considers north latitude and east longitude positive. However, + internally MET considers east longitude negative so users may encounter + DEBUG statements with longitude of a different sign than they provided + (e.g. for observation locations or grid metadata). Latitudes have + range from :math:`-90^\circ` to :math:`+90^\circ`. Longitudes have range from :math:`-180^\circ` to :math:`+180^\circ`. Plane angles such as azimuths and bearing (example: horizontal wind direction) have range :math:`0^\circ` to :math:`360^\circ` and are measured clockwise diff --git a/docs/Users_Guide/appendixC.rst b/docs/Users_Guide/appendixC.rst index 15c3ab5c2d..a6bbb0fe51 100644 --- a/docs/Users_Guide/appendixC.rst +++ b/docs/Users_Guide/appendixC.rst @@ -616,23 +616,23 @@ Anomaly Correlation Coefficient Called "ANOM_CORR" and "ANOM_CORR_UNCNTR" for centered and uncentered versions in CNT output :numref:`table_PS_format_info_CNT` -The anomaly correlation coefficient is equivalent to the Pearson correlation coefficient, except that both the forecasts and observations are first adjusted according to a climatology value. The anomaly is the difference between the individual forecast or observation and the typical situation, as measured by a climatology (**c**) of some variety. It measures the strength of linear association between the forecast anomalies and observed anomalies. The anomaly correlation coefficient is defined as: +The anomaly correlation coefficient is equivalent to the Pearson correlation coefficient, except that both the forecasts and observations are first adjusted by subtracting their corresponding climatology value. The anomaly is the difference between the individual forecast or observation and the typical situation, as measured by a forecast climatology (:math:`c_f`) and observation climatology (:math:`c_o`). It measures the strength of linear association between the forecast anomalies and observed anomalies. The anomaly correlation coefficient is defined as: -.. math:: \text{Anomaly Correlation} = \frac{\sum(f_i - c)(o_i - c)}{\sqrt{\sum(f_i - c)^2} \sqrt{\sum(o_i -c)^2}} . +.. math:: \text{Anomaly Correlation} = \frac{\sum(f_i - {c_f}_i)(o_i - {c_o}_i)}{\sqrt{\sum(f_i - {c_f}_i)^2} \sqrt{\sum(o_i - {c_o}_i)^2}} . The centered anomaly correlation coefficient (ANOM_CORR) which includes the mean error is defined as: .. only:: latex - .. math:: \text{ANOM\_CORR } = \frac{ \overline{[(f - c) - \overline{(f - c)}][(a - c) - \overline{(a - c)}]}}{ \sqrt{ \overline{( (f - c) - \overline{(f - c)})^2} \overline{( (a - c) - \overline{(a - c)})^2}}} + .. math:: \text{ANOM\_CORR } = \frac{ \overline{[(f - c_f) - \overline{(f - c_f)}][(o - c_o) - \overline{(o - c_o)}]}}{ \sqrt{ \overline{( (f - c_f) - \overline{(f - c_f)})^2} \overline{( (o - c_o) - \overline{(o - c_o)})^2}}} .. only:: html - .. math:: \text{ANOM_CORR } = \frac{ \overline{[(f - c) - \overline{(f - c)}][(a - c) - \overline{(a - c)}]}}{ \sqrt{ \overline{( (f - c) - \overline{(f - c)})^2} \overline{( (a - c) - \overline{(a - c)})^2}}} + .. math:: \text{ANOM_CORR } = \frac{ \overline{[(f - c_f) - \overline{(f - c_f)}][(o - c_o) - \overline{(o - c_o)}]}}{ \sqrt{ \overline{( (f - c_f) - \overline{(f - c_f)})^2} \overline{( (o - c_o) - \overline{(o - c_o)})^2}}} The uncentered anomaly correlation coefficient (ANOM_CORR_UNCNTR) which does not include the mean errors is defined as: -.. math:: \text{Anomaly Correlation Raw } = \frac{ \overline{(f - c)(a - c)}}{ \sqrt{\overline{(f - c)^2} \overline{(a - c)^2}}} +.. math:: \text{Anomaly Correlation Raw } = \frac{ \overline{(f - c_f)(o - c_o)}}{ \sqrt{\overline{(f - c_f)^2} \overline{(o - c_o)^2}}} Anomaly correlation can range between -1 and 1; a value of 1 indicates perfect correlation and a value of -1 indicates perfect negative correlation. A value of 0 indicates that the forecast and observed anomalies are not correlated. @@ -650,56 +650,60 @@ The partial sums can be accumulated over individual cases to produce statistics Scalar L1 and L2 Values ----------------------- -Called "FBAR", "OBAR", "FOBAR", "FFBAR", and "OOBAR" in SL1L2 output :numref:`table_PS_format_info_SL1L2` +Called "FBAR", "OBAR", "FOBAR", "FFBAR", "OOBAR", and "MAE" in SL1L2 output :numref:`table_PS_format_info_SL1L2` These statistics are simply the 1st and 2nd moments of the forecasts, observations and errors: .. math:: - \text{FBAR} = \text{Mean}(f) = \bar{f} = \frac{1}{n} \sum_{i=1}^n f_i + \text{FBAR} = \text{Mean}(f) = \frac{1}{n} \sum_{i=1}^n f_i - \text{OBAR} = \text{Mean}(o) = \bar{o} = \frac{1}{n} \sum_{i=1}^n o_i + \text{OBAR} = \text{Mean}(o) = \frac{1}{n} \sum_{i=1}^n o_i - \text{FOBAR} = \text{Mean}(fo) = \bar{fo} = \frac{1}{n} \sum_{i=1}^n f_i o_i + \text{FOBAR} = \text{Mean}(fo) = \frac{1}{n} \sum_{i=1}^n f_i o_i - \text{FFBAR} = \text{Mean}(f^2) = \bar{f}^2 = \frac{1}{n} \sum_{i=1}^n f_i^2 + \text{FFBAR} = \text{Mean}(f^2) = \frac{1}{n} \sum_{i=1}^n f_i^2 - \text{OOBAR} = \text{Mean}(o^2) = \bar{o}^2 = \frac{1}{n} \sum_{i=1}^n o_i^2 + \text{OOBAR} = \text{Mean}(o^2) = \frac{1}{n} \sum_{i=1}^n o_i^2 + + \text{MAE} = \text{Mean}(|f - o|) = \frac{1}{n} \sum_{i=1}^n |f_i - o_i| Some of the other statistics for continuous forecasts (e.g., RMSE) can be derived from these moments. Scalar Anomaly L1 and L2 Values ------------------------------- -Called "FABAR", "OABAR", "FOABAR", "FFABAR", "OOABAR" in SAL1L2 output :numref:`table_PS_format_info_SAL1L2` +Called "FABAR", "OABAR", "FOABAR", "FFABAR", "OOABAR", and "MAE" in SAL1L2 output :numref:`table_PS_format_info_SAL1L2` -Computation of these statistics requires a climatological value, c. These statistics are the 1st and 2nd moments of the scalar anomalies. The moments are defined as: +Computation of these statistics requires climatological values, where :math:`c_f` is the forecast climatology value and :math:`c_o` is the observation climatology value. These statistics are the 1st and 2nd moments of the scalar anomalies. The moments are defined as: .. math:: - \text{FABAR} = \text{Mean}(f - c) = \bar{f - c} = \frac{1}{n} \sum_{i=1}^n (f_i - c) + \text{FABAR} = \text{Mean}(f - c_f) = \frac{1}{n} \sum_{i=1}^n (f_i - {c_f}_i) + + \text{OABAR} = \text{Mean}(o - c_o) = \frac{1}{n} \sum_{i=1}^n (o_i - {c_o}_i) - \text{OABAR} = \text{Mean}(o - c) = \bar{o - c} = \frac{1}{n} \sum_{i=1}^n (o_i - c) + \text{FOABAR} = \text{Mean}[(f - c_f)(o - c_o)] = \frac{1}{n} \sum_{i=1}^n (f_i - {c_f}_i)(o_i - {c_o}_i) - \text{FOABAR} = \text{Mean}[(f - c)(o - c)] = \bar{(f - c)(o - c)} = \frac{1}{n} \sum_{i=1}^n (f_i - c)(o_i - c) + \text{FFABAR} = \text{Mean}[(f - c_f)^2] = \frac{1}{n} \sum_{i=1}^n (f_i - {c_f}_i)^2 - \text{FFABAR} = \text{Mean}[(f - c)^2] = \bar{(f - c)}^2 = \frac{1}{n} \sum_{i=1}^n (f_i - c)^2 + \text{OOABAR} = \text{Mean}[(o - c_o)^2] = \frac{1}{n} \sum_{i=1}^n (o_i - {c_o}_i)^2 - \text{OOABAR} = \text{Mean}[(o - c)^2] = \bar{(o - c)}^2 = \frac{1}{n} \sum_{i=1}^n (o_i - c)^2 + \text{MAE} = \text{Mean}(|(f - c_f) - (o - c_o)|) = \frac{1}{n} \sum_{i=1}^n |(f_i - {c_f}_i) - (o_i - {c_o}_i)| Vector L1 and L2 Values ----------------------- -Called "UFBAR", "VFBAR", "UOBAR", "VOBAR", "UVFOBAR", "UVFFBAR", "UVOOBAR" in VL1L2 output :numref:`table_PS_format_info_VL1L2` +Called "UFBAR", "VFBAR", "UOBAR", "VOBAR", "UVFOBAR", "UVFFBAR", and "UVOOBAR" in VL1L2 output :numref:`table_PS_format_info_VL1L2` -These statistics are the moments for wind vector values, where **u** is the E-W wind component and **v** is the N-S wind component ( :math:`u_f` is the forecast E-W wind component; :math:`u_o` is the observed E-W wind component; :math:`v_f` is the forecast N-S wind component; and :math:`v_o` is the observed N-S wind component). The following measures are computed: +These statistics are the moments for wind vector values, where :math:`u` is the E-W wind component and :math:`v` is the N-S wind component ( :math:`u_f` is the forecast E-W wind component; :math:`u_o` is the observed E-W wind component; :math:`v_f` is the forecast N-S wind component; and :math:`v_o` is the observed N-S wind component). The following measures are computed: .. math:: - \text{UFBAR} = \text{Mean}(u_f) = \bar{u}_f = \frac{1}{n} \sum_{i=1}^n u_{fi} + \text{UFBAR} = \text{Mean}(u_f) = \frac{1}{n} \sum_{i=1}^n u_{fi} - \text{VFBAR} = \text{Mean}(v_f) = \bar{v}_f = \frac{1}{n} \sum_{i=1}^n v_{fi} + \text{VFBAR} = \text{Mean}(v_f) = \frac{1}{n} \sum_{i=1}^n v_{fi} - \text{UOBAR} = \text{Mean}(u_o) = \bar{u}_o = \frac{1}{n} \sum_{i=1}^n u_{oi} + \text{UOBAR} = \text{Mean}(u_o) = \frac{1}{n} \sum_{i=1}^n u_{oi} - \text{VOBAR} = \text{Mean}(v_o) = \bar{v}_o = \frac{1}{n} \sum_{i=1}^n v_{oi} + \text{VOBAR} = \text{Mean}(v_o) = \frac{1}{n} \sum_{i=1}^n v_{oi} \text{UVFOBAR} = \text{Mean}(u_f u_o + v_f v_o) = \frac{1}{n} \sum_{i=1}^n (u_{fi} u_{oi} + v_{fi} v_{oi}) @@ -710,25 +714,27 @@ These statistics are the moments for wind vector values, where **u** is the E-W Vector Anomaly L1 and L2 Values ------------------------------- -Called "UFABAR", "VFABAR", "UOABAR", "VOABAR", "UVFOABAR", "UVFFABAR", "UVOOABAR" in VAL1L2 output :numref:`table_PS_format_info_VAL1L2` +Called "UFABAR", "VFABAR", "UOABAR", "VOABAR", "UVFOABAR", "UVFFABAR", and "UVOOABAR" in VAL1L2 output :numref:`table_PS_format_info_VAL1L2` -These statistics require climatological values for the wind vector components, :math:`u_c \text{ and } v_c`. The measures are defined below: +These statistics require climatological values for the wind vector components, where :math:`{u_c}_f` and :math:`{v_c}_f` are the forecast climatology vectors and :math:`{u_c}_o` and :math:`{v_c}_o` are the observation climatology vectors. The measures are defined below: .. math:: - \text{UFABAR} = \text{Mean}(u_f - u_c) = \frac{1}{n} \sum_{i=1}^n (u_{fi} - u_c) + \text{UFABAR} = \text{Mean}(u_f - {u_c}_f) = \frac{1}{n} \sum_{i=1}^n ({u_f}_i - {{u_c}_f}_i) - \text{VFBAR} = \text{Mean}(v_f - v_c) = \frac{1}{n} \sum_{i=1}^n (v_{fi} - v_c) + \text{VFBAR} = \text{Mean}(v_f - {v_c}_f) = \frac{1}{n} \sum_{i=1}^n ({v_f}_i - {{v_c}_f}_i) - \text{UOABAR} = \text{Mean}(u_o - u_c) = \frac{1}{n} \sum_{i=1}^n (u_{oi} - u_c) + \text{UOABAR} = \text{Mean}(u_o - {u_c}_o) = \frac{1}{n} \sum_{i=1}^n ({u_o}_i - {{u_c}_o}_i) - \text{VOABAR} = \text{Mean}(v_o - v_c) = \frac{1}{n} \sum_{i=1}^n (v_{oi} - v_c) + \text{VOABAR} = \text{Mean}(v_o - {v_c}_o) = \frac{1}{n} \sum_{i=1}^n ({v_o}_i - {{v_c}_o}_i) - \text{UVFOABAR} &= \text{Mean}[(u_f - u_c)(u_o - u_c) + (v_f - v_c)(v_o - v_c)] \\ - &= \frac{1}{n} \sum_{i=1}^n (u_{fi} - u_c) + (u_{oi} - u_c) + (v_{fi} - v_c)(v_{oi} - v_c) + \text{UVFOABAR} &= \text{Mean}[(u_f - {u_c}_f)(u_o - {u_c}_o) + (v_f - {v_c}_f)(v_o - {v_c}_o)] \\ + &= \frac{1}{n} \sum_{i=1}^n ({u_f}_i - {{u_c}_f}_i) + ({u_o}_i - {{u_c}_o}_i) + ({v_f}_i - {{v_c}_f}_i)({v_o}_i - {{v_c}_o}_i) - \text{UVFFABAR} = \text{Mean}[(u_f - u_c)^2 + (v_f - v_c)^2] = \frac{1}{n} \sum_{i=1}^n ((u_{fi} - u_c)^2 + (v_{fi} - v_c)^2) + \text{UVFFABAR} &= \text{Mean}[(u_f - {u_c}_f)^2 + (v_f - {v_c}_f)^2] \\ + &= \frac{1}{n} \sum_{i=1}^n (({u_f}_i - {{u_c}_f}_i)^2 + ({v_f}_i - {{v_c}_f}_i)^2) - \text{UVOOABAR} = \text{Mean}[(u_o - u_c)^2 + (v_o - v_c)^2] = \frac{1}{n} \sum_{i=1}^n ((u_{oi} - u_c)^2 + (v_{oi} - v_c)^2) + \text{UVOOABAR} &= \text{Mean}[(u_o - {u_c}_o)^2 + (v_o - {v_c}_o)^2] \\ + &= \frac{1}{n} \sum_{i=1}^n (({u_o}_i - {{u_c}_o}_i)^2 + ({v_o}_i - {{v_c}_o}_i)^2) Gradient Values --------------- diff --git a/docs/Users_Guide/config_options.rst b/docs/Users_Guide/config_options.rst index de538bd7cb..3d892808e0 100644 --- a/docs/Users_Guide/config_options.rst +++ b/docs/Users_Guide/config_options.rst @@ -158,15 +158,15 @@ The configuration file language supports the following data types: .. note:: - Prior to MET version 12.0.0, forecast climatological inputs were not - supported. The observation climatological inputs were used to process - threshold types named "SCP" and "CDP". + Prior to MET version 12.0.0, forecast climatological inputs were not + supported. The observation climatological inputs were used to process + threshold types named "SCP" and "CDP". - For backward compatibility, the "SCP" threshold type is processed the same - as "SOCP" and "CDP" the same as "OCDP". + For backward compatibility, the "SCP" threshold type is processed the same + as "SOCP" and "CDP" the same as "OCDP". - Users are encouraged to replace the deprecated "SCP" and "CDP" threshold - types with the updated "SOCP" and "OCDP" types, respectively. + Users are encouraged to replace the deprecated "SCP" and "CDP" threshold + types with the updated "SOCP" and "OCDP" types, respectively. * Piecewise-Linear Function (currently used only by MODE): @@ -324,8 +324,10 @@ To run this utility: -e EXISTING_FILE, --existing=EXISTING_FILE Save the text into the named file (optional, default: ../../../data/table_files/ndbc_stations.xml) -NOTE: The downloaded files are written to a subdirectory ndbc_temp_data which -can be deleted once the final output file is created. +.. note:: + + The downloaded files are written to a subdirectory ndbc_temp_data which + can be deleted once the final output file is created. MET_BASE -------- @@ -1508,8 +1510,11 @@ the climatology file names and fields to be used. with 6 and 12 being common choices. Use "NA" if the timing of the climatology data should not be checked. -* The "day_interval" and "hour_interval" entries replace the deprecated - entries "match_month", "match_day", and "time_step". +.. note:: + + As of MET version 11.0.0, the "day_interval" and "hour_interval" entries + replace the "match_month", "match_day", and "time_step" entries, which are + now deprecated. .. code-block:: none @@ -1563,6 +1568,27 @@ over the "climo_mean" setting and then updating the "file_name" entry. file_name = [ "/path/to/climatological/standard/deviation/files" ]; } +Prior to MET version 12.0.0, forecast climatological inputs were not supported. +If the "climo_mean" and "climo_stdev" dictionaries are defined at the top-level +configuration file context, the same data is used for both the forecast and +observation climatologies. To specify separate forecast and observation +climatologies, define "climo_mean" and "climo_stdev" inside the "fcst" and "obs" +dictionaries, as shown below. + +.. code-block:: none + + fcst = { + field = [ ... ]; + climo_mean = { ... }; + climo_stdev = { ... }; + } + + obs = { + field = [ ... ]; + climo_mean = { ... }; + climo_stdev = { ... }; + } + climo_cdf --------- @@ -1729,7 +1755,7 @@ Point-Stat and Ensemble-Stat, the reference time is the forecast valid time. .. _config_options-mask: mask ---- +---- The "mask" entry is a dictionary that specifies the verification masking regions to be used when computing statistics. Each mask defines a @@ -1749,11 +1775,18 @@ in the following ways: * The "poly" entry contains a comma-separated list of files that define verification masking regions. These masking regions may be specified in - two ways: in an ASCII file containing lat/lon points defining the mask polygon, - or using a gridded data file such as the NetCDF output of the Gen-Vx-Mask tool. - Some details for each of these options are described below: + three ways: + + 1. An ASCII polyline file containing lat/lon points defining the mask polygon. + 2. The NetCDF output of the Gen-Vx-Mask tool. + 3. Any gridded data file followed by a configuration string describing the + data to be read and an optional threshold to be applied to that data. + + These three options are described below: - * If providing an ASCII file containing the lat/lon points defining the mask + * Option 1 - ASCII polyline file: + + If providing an ASCII file containing the lat/lon points defining the mask polygon, the file must contain a name for the region followed by the latitude (degrees north) and longitude (degrees east) for each vertex of the polygon. The values are separated by whitespace (e.g. spaces or newlines), and the @@ -1781,17 +1814,38 @@ in the following ways: observation point falls within the polygon defined is done in x/y grid space. - * The NetCDF output of the gen_vx_mask tool. Please see :numref:`masking` + .. code-block:: none + + mask = { poly = [ "share/met/poly/CONUS.poly" ]; } + + * Option 2 - Gen-Vx-Mask output: + + The NetCDF output of the gen_vx_mask tool. Please see :numref:`masking` for more details. - * Any gridded data file that MET can read may be used to define a + .. code-block:: none + + mask = { poly = [ "/path/to/gen_vx_mask_output.nc" ]; } + + * Option 3 - Any gridded data file: + + Any gridded data file that MET can read may be used to define a verification masking region. Users must specify a description of the field to be used from the input file and, optionally, may specify a threshold to be applied to that field. Once this threshold is applied, any grid point where the resulting field is 0, the mask is turned off. Any grid point where it is non-zero, the mask is turned on. - For example, "sample.grib {name = \"TMP\"; level = \"Z2\";} >273" + + .. code-block:: none + + mask = { poly = [ "/path/to/sample.grib {name = \"TMP\"; level = \"Z2\";} >273" ]; } + + .. note:: + + The syntax for the Option 3 is complicated since it includes quotes + embedded within another quoted string. Any such embedded quotes must + be escaped using a preceeding backslash character. * The "sid" entry is an array of strings which define groups of observation station ID's over which to compute statistics. Each entry @@ -2269,8 +2323,10 @@ For example: | nc_pairs_var_suffix = "FREEZING"; (for the freezing level height) | -NOTE: This option was previously named "nc_pairs_var_str", which is -now deprecated. +.. note:: + + Prior to MET version 9.0.0, this option was named "nc_pairs_var_str",' + which is now deprecated. .. code-block:: none diff --git a/docs/Users_Guide/grid-diag.rst b/docs/Users_Guide/grid-diag.rst index f2fd55e78c..59bbb38616 100644 --- a/docs/Users_Guide/grid-diag.rst +++ b/docs/Users_Guide/grid-diag.rst @@ -27,7 +27,9 @@ The following sections describe the usage statement, required arguments, and opt [-v level] [-compress level] - NOTE: The "-data" option can be used once to read all fields from each input file or once for each field to be processed. +.. note:: + + The "-data" option can be used once to read all fields from each input file or once for each field to be processed. grid_diag has required arguments and can accept several optional arguments. diff --git a/docs/Users_Guide/mode-td.rst b/docs/Users_Guide/mode-td.rst index c8c07c1117..f7dd558a7c 100644 --- a/docs/Users_Guide/mode-td.rst +++ b/docs/Users_Guide/mode-td.rst @@ -217,7 +217,9 @@ Required Arguments for mtd Optional Arguments for mtd ^^^^^^^^^^^^^^^^^^^^^^^^^^ -4. **-single file\_list** may be used instead of **-fcst** and **-obs** to define objects in a single field. +4. **-single file\_list** command line option may be used instead of the **-fcst** and **-obs** command line options to define objects in a single field. + +.. note:: When the **-single** command line option is used, data specified in the **fcst** configuration file entry is read from those input files. 5. **-log file** gives the name of a file where a log of this MTD run will be written. All output that appears on the screen during a MTD run will be duplicated in the log file. diff --git a/docs/Users_Guide/mode.rst b/docs/Users_Guide/mode.rst index 491b452002..bb59cfee3e 100644 --- a/docs/Users_Guide/mode.rst +++ b/docs/Users_Guide/mode.rst @@ -913,7 +913,7 @@ The contents of the columns in this ASCII file are summarized in :numref:`MODE_o * - 50 - AREA_RATIO - The forecast object area divided by the observation object area (unitless) :raw-html:`
` - **NOTE:** Prior to met-10.0.0, defined as the lesser of the two object areas divided by the greater of the two + **NOTE:** Prior to MET version 10.0.0, the AREA_RATIO was defined as the lesser of the two object areas divided by the greater of the two. * - 51 - INTERSECTION :raw-html:`
` \_AREA - Intersection area of two objects (in grid squares) diff --git a/docs/Users_Guide/point-stat.rst b/docs/Users_Guide/point-stat.rst index 6c9849511e..70e3847b79 100644 --- a/docs/Users_Guide/point-stat.rst +++ b/docs/Users_Guide/point-stat.rst @@ -1204,7 +1204,7 @@ The first set of header columns are common to all of the output files generated - Mean(o²) * - 31 - MAE - - Mean Absolute Error + - Mean(\|f-o\|) .. _table_PS_format_info_SAL1L2: @@ -1223,25 +1223,25 @@ The first set of header columns are common to all of the output files generated - Scalar Anomaly L1L2 line type * - 25 - TOTAL - - Total number of matched triplets of forecast (f), observation (o), and climatological value (c) + - Total number of matched pairs of forecast (f), observation (o), forecast climatology (cf), and observation climatology (co) * - 26 - FABAR - - Mean(f-c) + - Mean(f-cf) * - 27 - OABAR - - Mean(o-c) + - Mean(o-co) * - 28 - FOABAR - - Mean((f-c)*(o-c)) + - Mean((f-cf)*(o-co)) * - 29 - FFABAR - - Mean((f-c)²) + - Mean((f-cf)²) * - 30 - OOABAR - - Mean((o-c)²) + - Mean((o-co)²) * - 31 - MAE - - Mean Absolute Error + - Mean(\|(f-cf)-(o-co)\|) .. _table_PS_format_info_VL1L2: @@ -1318,28 +1318,28 @@ The first set of header columns are common to all of the output files generated - Vector Anomaly L1L2 line type * - 25 - TOTAL - - Total number of matched triplets of forecast winds (uf, vf), observation winds (uo, vo), and climatological winds (uc, vc) + - Total number of matched pairs of forecast winds (uf, vf), observation winds (uo, vo), forecast climatology winds (ucf, vcf), and observation climatology winds (uco, vco) * - 26 - UFABAR - - Mean(uf-uc) + - Mean(uf-ucf) * - 27 - VFABAR - - Mean(vf-vc) + - Mean(vf-vcf) * - 28 - UOABAR - - Mean(uo-uc) + - Mean(uo-uco) * - 29 - VOABAR - - Mean(vo-vc) + - Mean(vo-vco) * - 30 - UVFOABAR - - Mean((uf-uc)*(uo-uc)+(vf-vc)*(vo-vc)) + - Mean((uf-ucf)*(uo-uco)+(vf-vcf)*(vo-vco)) * - 31 - UVFFABAR - - Mean((uf-uc)²+(vf-vc)²) + - Mean((uf-ucf)²+(vf-vcf)²) * - 32 - UVOOABAR - - Mean((uo-uc)²+(vo-vc)²) + - Mean((uo-uco)²+(vo-vco)²) * - 33 - FA_SPEED_BAR - Mean forecast wind speed anomaly @@ -1348,7 +1348,7 @@ The first set of header columns are common to all of the output files generated - Mean observed wind speed anomaly * - 35 - TOTAL_DIR - - Total number of matched triplets for which the forecast, observation, and climatological wind directions are well-defined (i.e. non-zero vectors) + - Total number of matched pairs for which the forecast, observation, forecast climatology, and observation climatology wind directions are well-defined (i.e. non-zero vectors) * - 36 - DIRA_ME - Mean wind direction anomaly difference, from -180 to 180 degrees diff --git a/internal/scripts/installation/compile_MET_all.wcoss_beta5.sh b/internal/scripts/installation/compile_MET_all.wcoss_beta5.sh new file mode 100755 index 0000000000..5065643c84 --- /dev/null +++ b/internal/scripts/installation/compile_MET_all.wcoss_beta5.sh @@ -0,0 +1,959 @@ +#!/bin/bash +# +# Compile and install MET +# (Model Evaluation Tools) +#================================================ +# +# This compile_MET_all.sh script expects certain environment +# variables to be set: +# TEST_BASE, COMPILER (or COMPILER_FAMILY and COMPILER_VERSION), +# MET_SUBDIR, MET_TARBALL, and USE_MODULES. +# +# If compiling support for Python embedding, users will need to +# set MET_PYTHON, MET_PYTHON_BIN_EXE, MET_PYTHON_CC, and MET_PYTHON_LD. +# Users can directly set the python module to be loaded by setting +# either PYTHON_MODULE or by setting PYTHON_NAME and PYTHON_VERSION: +# - PYTHON_MODULE (only used if USE_MODULES=TRUE) - format is the name +# of the Python module to load followed by an underscore and then the +# version number (e.g. python_3.10.4, The script will then run "module +# load python/3.10.4") +# - PYTHON_NAME = python (or e.g. python3, etc.) +# - PYTHON_VERSION = 3.10.4 +# +# For a description of these and other variables, visit the MET +# downloads page under "Sample Script For Compiling External +# Libraries And MET": +# https://dtcenter.org/community-code/model-evaluation-tools-met/download +# +# An easy way to set these necessary environment variables is +# in an environment configuration file (for example, +# install_met_env.). This script and example +# environment config files for various machines can be found in +# the MET GitHub repository in the scripts/installation directory: +# https://github.com/dtcenter/MET +# +# USAGE: compile_MET_all.sh install_met_env. +# +# The compile_MET_all.sh script will compile and install MET and its +# external library dependencies, if needed, including: +# PROJ (with dependency SQLITE >= 3.11), GSL, BUFRLIB, +# GRIB2C (with dependencies Z, PNG, JASPER, JPEG), HDF5, NETCDF (C and CXX), +# HDF4 (optional for MODIS-Regrid and lidar2nc), HDFEOS (optional for +# MODIS-Regrid and lidar2nc), FREETYPE (optional for MODE Graphics), +# and CAIRO (optional for MODE Graphics). +# +# If these libraries have already been installed and don't need to be +# reinstalled or if you are compiling on a machine that uses modulefiles +# and you'd like to make use of the existing dependent libraries on +# your machine, there are more environment variables that you will +# need to set to let MET know where the library and header files are. +# Please supply values for the following environment variables +# in the input environment configuration file (install_met_env.: +# MET_GRIB2CLIB, MET_GRIB2CINC, GRIB2CLIB_NAME, MET_BUFRLIB, BUFRLIB_NAME, +# MET_HDF5, MET_NETCDF, MET_PROJ, MET_GSL, LIB_JASPER, LIB_LIBPNG, LIB_Z, +# LIB_JPEG, SQLITE_INCLUDE_DIR, SQLITE_LIB_DIR, TIFF_INCLUDE_DIR, TIFF_LIB_DIR. +# +# The optional libraries ecKit and atlas offer support for unstructured +# grids. The optional libraries HDF4, HDFEOS, FREETYPE, and CAIRO are +# used for the following, not widely used tools, MODIS-Regrid, +# lidar2nc, and MODE Graphics. To enable building of these libraries, +# set the compile flags for the library (e.g. COMPILE_ECKIT, COMPILE_ATLAS, +# COMPILE_HDF, COMPILE_HDFEOS) to any value in the environment config +# file. If these libraries have already been installed and don't need +# to be reinstalled, please supply values for the following environment +# variables in the input environment configuration file +# (install_met_env.): MET_ECKIT, MET_ATLAS, MET_HDF, +# MET_HDFEOS, MET_FREETYPEINC, MET_FREETYPELIB, MET_CAIROINC, +# MET_CAIROLIB. +# +# Users can speed up the compilation of MET and its dependent libraries +# by adding the following to their environment configuration file: +# export MAKE_ARGS=-j # +# replacing the # with the number of cores to use (integer) or simply +# specifying: +# export MAKE_ARGS=-j +# with no integer argument to start as many processes in parallel as +# possible. +#================================================ + +# print command, run it, then error and exit if non-zero value is returned +function run_cmd { + echo $* + eval "$@" + ret=$? + if [ $ret != 0 ]; then + echo "ERROR: Command returned with non-zero ($ret) status: $*" + exit $ret + fi +} + +if [ -z $1 ]; then + echo + echo "No environment configuration file provided (e.g. install_met_env.). Starting compilation with current environment." +else + if [ ! -f "$1" ]; then + echo "The file \"$1\" does not exist!" + exit 1 + fi + + source $1 +fi + +echo +echo "TEST_BASE = ${TEST_BASE? "ERROR: TEST_BASE must be set"}" +echo "MET_SUBDIR = ${MET_SUBDIR? "ERROR: MET_SUBDIR must be set"}" +echo "MET_TARBALL = ${MET_TARBALL? "ERROR: MET_TARBALL must be set"}" +echo "USE_MODULES = ${USE_MODULES? "ERROR: USE_MODULES must be set to TRUE if using modules or FALSE otherwise"}" +if [[ -z "$COMPILER" ]] && [[ -z "$COMPILER_FAMILY" && -z "$COMPILER_VERSION" ]]; then + echo "ERROR: COMPILER or COMPILER_FAMILY and COMPILER_VERSION must be set" + exit 1 +fi +echo ${MAKE_ARGS:+MAKE_ARGS = $MAKE_ARGS} + + +LIB_DIR=${TEST_BASE}/external_libs +MET_DIR=${MET_SUBDIR} + +if [ -z "${BIN_DIR_PATH}" ]; then + if [ -z "${MET_INSTALL_DIR}" ]; then + BIN_DIR_PATH=${TEST_BASE}/bin + else + BIN_DIR_PATH=${MET_INSTALL_DIR}/bin + fi +fi + +if [ -z "${MET_INSTALL_DIR}" ]; then + MET_INSTALL_DIR=${MET_DIR} +else + LIB_DIR=${MET_INSTALL_DIR} +fi + +TAR_DIR=${TEST_BASE}/tar_files +MET_TARBALL=${TAR_DIR}/${MET_TARBALL} + +# Create directory for libraries +mkdir -p ${LIB_DIR} + +# Check that tar files exist +if [ ! -e $TAR_DIR ]; then + echo "TAR File directory doesn't exist: ${TAR_DIR}" + exit 1 +fi + +# If MET_PYTHON_LIB is not set in the environment file, set it to the +# lib directory so it can be use to install MET with Python Embedding +# support +if [[ -z "$MET_PYTHON_LIB" ]]; then + MET_PYTHON_LIB=${MET_PYTHON}/lib +fi + + +# Print library linker path +echo "LD_LIBRARY_PATH = ${LD_LIBRARY_PATH}" + +# if LIB_Z is not set in the environment file, set it to the +# lib directory so it can be used to install HDF5 with zlib support +if [[ -z "$LIB_Z" ]]; then + LIB_Z=${LIB_DIR}/lib +fi + +# if TIFF is not defined in the environment file, enable its compilation +if [[ -z ${TIFF_INCLUDE_DIR} ]] && [[ -z ${TIFF_LIB_DIR} ]]; then + COMPILE_TIFF=1 +else + COMPILE_TIFF=0 +fi + +# if SQLITE is not defined in the environment file, enable its compilation +if [[ -z ${SQLITE_INCLUDE_DIR} ]] && [[ -z ${SQLITE_LIB_DIR} ]]; then + COMPILE_SQLITE=1 +else + COMPILE_SQLITE=0 +fi + +# Constants +if [[ -z ${MET_GRIB2CLIB} ]] && [[ -z ${MET_GRIB2C} ]]; then + COMPILE_ZLIB=1 + COMPILE_LIBPNG=1 + COMPILE_JASPER=1 + COMPILE_JPEG=1 + COMPILE_G2CLIB=1 +else + COMPILE_ZLIB=0 + COMPILE_LIBPNG=0 + COMPILE_JASPER=0 + COMPILE_JPEG=0 + COMPILE_G2CLIB=0 +fi + +if [ -z ${MET_BUFRLIB} ]; then COMPILE_BUFRLIB=1; else COMPILE_BUFRLIB=0; fi + +if [ -z ${MET_NETCDF} ]; then COMPILE_NETCDF=1; else COMPILE_NETCDF=0; fi + +if [ -z ${MET_PROJ} ]; then COMPILE_PROJ=1; else COMPILE_PROJ=0; fi + +if [ -z ${MET_GSL} ]; then COMPILE_GSL=1; else COMPILE_GSL=0; fi + +# Only set COMPILE_ECKIT and COMPILE_ATLAS if you want to compile and enable support for unstructued grids +if [ ! -z "${COMPILE_ECKIT}" ]; then COMPILE_ECKIT=1; else COMPILE_ECKIT=0; fi +if [ ! -z "${COMPILE_ATLAS}" ]; then COMPILE_ATLAS=1; else COMPILE_ATLAS=0; fi + +if [[ -z ${MET_ECKIT} ]] && [[ -z ${MET_ATLAS} ]]; then + if [[ $COMPILE_ECKIT -eq 1 && $COMPILE_ATLAS -eq 1 ]]; then + export MET_ECKIT=${LIB_DIR} + export MET_ATLAS=${LIB_DIR} + fi +else + # Only set COMPILE_ECKIT and COMPILE_ATLAS to 1 if you have already compiled ECKIT and ATLAS, + # have set MET_ECKIT and MET_ATLAS in your configuration file, and want to enable + # unstructured grids + COMPILE_ECKIT=0 + COMPILE_ATLAS=0 +fi + +# Only set COMPILE_HDF and COMPILE_HDFEOS if you want to compile and enable MODIS-Regrid (not widely used) +if [ ! -z "${COMPILE_HDF}" ]; then COMPILE_HDF=1; else COMPILE_HDF=0; fi +if [ ! -z "${COMPILE_HDFEOS}" ]; then COMPILE_HDFEOS=1; else COMPILE_HDFEOS=0; fi + +if [[ -z ${MET_HDF} ]] && [[ -z ${MET_HDFEOS} ]]; then + if [[ $COMPILE_HDF -eq 1 && $COMPILE_HDFEOS -eq 1 ]]; then + export MET_HDF=${LIB_DIR} + export MET_HDFEOS=${LIB_DIR} + fi +else + # Only set COMPILE_HDF and COMPILE_HDFEOS to 1 if you have already compiled HDF4 and HDFEOS, + # have set MET_HDF and MET_HDFEOS in your configuration file, and want to enable + # MODIS-Regrid (not widely used) + COMPILE_HDF=0 + COMPILE_HDFEOS=0 +fi + +# Only set COMPILE_FREETYPE and COMPILE_CAIRO if you want to compile and enable MODE Graphics (not widely used) +if [ ! -z "${COMPILE_FREETYPE}" ]; then COMPILE_FREETYPE=1; else COMPILE_FREETYPE=0; fi +if [ ! -z "${COMPILE_CAIRO}" ]; then COMPILE_CAIRO=1; else COMPILE_CAIRO=0; fi + + +if [[ ! -z ${MET_FREETYPE} ]]; then + echo "ERROR: MET_FREETYPEINC and MET_FREETYPELIB must be set instead of MET_FREETYPE" + exit 1 +fi + +if [[ ! -z ${MET_CAIRO} ]]; then + echo "ERROR: MET_CAIROINC and MET_CAIROLIB must be set instead of MET_CAIRO" + exit 1 +fi + +if [[ -z ${MET_FREETYPEINC} && -z ${MET_FREETYPELIB} && -z ${MET_CAIROINC} && -z ${MET_CAIROLIB} ]]; then + if [[ $COMPILE_CAIRO -eq 1 && $COMPILE_FREETYPE -eq 1 ]]; then + export MET_CAIROINC=${LIB_DIR}/include/cairo + export MET_CAIROLIB=${LIB_DIR}/lib + export MET_FREETYPEINC=${LIB_DIR}/include/freetype2 + export MET_FREETYPELIB=${LIB_DIR}/lib + fi +else + # Only set COMPILE_FREETYPE and COMPILE_CAIRO to 1 if you have compiled FREETYPE and CAIRO, + # have set MET_FREETYPEINC, MET_FREETYPELIB, MET_CAIROINC, and MET_CAIROLIB in your + # configuration file, and want to enable MODE Graphics (not widely used) + COMPILE_FREETYPE=0 + COMPILE_CAIRO=0 +fi + +COMPILE_MET=1 + +# skip compilation of MET if SKIP_MET is set +if [ ! -z "${SKIP_MET}" ]; then COMPILE_MET=0; fi + +# skip compilation of external libraries if SKIP_LIBS is set +if [ ! -z "${SKIP_LIBS}" ]; then + COMPILE_PROJ=0 + COMPILE_GSL=0 + COMPILE_BUFRLIB=0 + COMPILE_ZLIB=0 + COMPILE_LIBPNG=0 + COMPILE_JASPER=0 + COMPILE_JPEG=0 + COMPILE_G2CLIB=0 + COMPILE_ECKIT=0 + COMPILE_ATLAS=0 + COMPILE_HDF=0 + COMPILE_HDFEOS=0 + COMPILE_NETCDF=0 + COMPILE_FREETYPE=0 + COMPILE_CAIRO=0 +fi + +if [ -z ${BIN_DIR_PATH} ]; then + BIN_DIR_PATH=${TEST_BASE}/bin +fi + +if [ -z ${USE_MET_TAR_FILE} ]; then + export USE_MET_TAR_FILE=TRUE +fi + +echo +echo "Compiling libraries into: ${LIB_DIR}" + +if [ ! -e ${LIB_DIR}/include ]; then + mkdir ${LIB_DIR}/include +fi + +if [ ! -e ${LIB_DIR}/lib ]; then + mkdir ${LIB_DIR}/lib +fi + +# Load compiler version +if [ -z ${COMPILER_FAMILY} ]; then + COMPILER_FAMILY=` echo $COMPILER | cut -d'_' -f1` +fi + +# Check for "oneapi" in compiler family name +#if echo ${COMPILER_FAMILY} | grep -E "^intel"; then +if [[ ${COMPILER_FAMILY} == *intel* ]]; then + COMPILER_FAMILY_SUFFIX=` echo $COMPILER_FAMILY | cut -d'-' -f2` +fi + +if [ -z ${COMPILER_VERSION} ]; then + COMPILER_VERSION=`echo $COMPILER | cut -d'_' -f2` +fi + +echo "COMPILER = $COMPILER" +echo "COMPILER_FAMILY = $COMPILER_FAMILY" +echo "COMPILER_FAMILY_SUFFIX = $COMPILER_FAMILY_SUFFIX" +echo "COMPILER_VERSION = $COMPILER_VERSION" +COMPILER_MAJOR_VERSION=`echo $COMPILER_VERSION | cut -d'.' -f1` +COMPILER_MINOR_VERSION=`echo $COMPILER_VERSION | cut -d'.' -f2` + +echo +echo "USE_MODULES = ${USE_MODULES}" +echo + +if [ ${USE_MODULES} = "TRUE" ]; then + echo "module load ${COMPILER_FAMILY}/${COMPILER_VERSION}" + echo ${COMPILER_FAMILY}/${COMPILER_VERSION} + + module load ${COMPILER_FAMILY}/${COMPILER_VERSION} + if [ ${COMPILER_FAMILY} = "PrgEnv-intel" ]; then + module load craype + module switch craype craype-sandybridge + fi +fi + +# After loading the compiler module, strip any extra +# characters off of "gnu" (e.g. "gnu9") +if [[ ${COMPILER_FAMILY} == *gnu* ]]; then + export COMPILER_FAMILY="gnu" +fi + +if [ ${COMPILER_FAMILY} = "gnu" ]; then + if [ -z ${CC} ]; then export CC=`which gcc`; fi + if [ -z ${CXX} ]; then export CXX=`which g++`; fi + if [ -z ${FC} ]; then export FC=`which gfortran`; fi + if [ -z ${F77} ]; then export F77=`which gfortran`; fi + if [ -z ${F90} ]; then export F90=`which gfortran`; fi +elif [ ${COMPILER_FAMILY} = "pgi" ]; then + if [ -z ${CC} ]; then export CC=`which pgcc`; fi + if [ -z ${CXX} ]; then export CXX=`which pgc++`; fi + if [ -z ${FC} ]; then export FC=`which pgf90`; fi + if [ -z ${F77} ]; then export F77=`which pgf90`; fi + if [ -z ${F90} ]; then export F90=`which pgf90`; fi +elif [[ ${COMPILER_FAMILY} == *intel* && ${CC} == "icc" ]] || \ + [[ ${COMPILER_FAMILY} == "ics" ]] || \ + [[ ${COMPILER_FAMILY} == "ips" ]] || \ + [[ ${COMPILER_FAMILY} == "intel-classic" ]] || \ + [[ ${COMPILER_FAMILY} == "PrgEnv-intel" ]]; then + if [ -z ${CC} ]; then export CC=`which icc`; fi + if [ -z ${CXX} ]; then export CXX=`which icpc`; fi + if [ -z ${FC} ]; then export FC=`which ifort`; fi + if [ -z ${F77} ]; then export F77=`which ifort`; fi + if [ -z ${F90} ]; then export F90=`which ifort`; fi +elif [[ ${COMPILER_FAMILY} == *intel* ]] && [[ ${CC} == *icx* ]]; then + export CXX=`which icpx` + export FC=`which ifx` + export F77=`which ifx` + export F90=`which ifx` +elif [[ ${COMPILER_FAMILY_SUFFIX} == oneapi ]]; then + export CC=`which icx` + export CXX=`which icpx` + export FC=`which ifx` + export F77=`which ifx` + export F90=`which ifx` +else + echo "ERROR: \${COMPILER} must start with gnu, intel, ics, ips, intel-classic, PrgEnv-intel, or pgi" + exit +fi + +echo "export CC=${CC}" +echo "export CXX=${CXX}" +echo "export FC=${FC}" +echo "export F77=${F77}" +echo "export F90=${F90}" +echo + +# Figure out what kind of OS is being used +unameOut="$(uname -s)" +case "${unameOut}" in + Linux*) machine=Linux;; + Darwin*) machine=Mac;; + CYGWIN*) machine=Cygwin;; + MINGW*) machine=MinGw;; + *) machine="UNKNOWN:${unameOut}" +esac + +# change sed command and extension for dynamic library files +if [[ $machine == "Mac" ]]; then + sed_inline="sed -i ''" +else + sed_inline="sed -i''" +fi + +if [[ "$(uname -m)" == "arm64" ]]; then + dynamic_lib_ext="dylib" +else + dynamic_lib_ext="so" +fi + +# Load Python module + +if [ ${USE_MODULES} = "TRUE" ]; then + if [ ! -z ${PYTHON_MODULE} ]; then + PYTHON_NAME=`echo $PYTHON_MODULE | cut -d'_' -f1` + PYTHON_VERSION_NUM=`echo $PYTHON_MODULE | cut -d'_' -f2` + echo "module load ${PYTHON_NAME}/${PYTHON_VERSION_NUM}" + echo ${PYTHON_NAME}/${PYTHON_VERSION_NUM} + module load ${PYTHON_NAME}/${PYTHON_VERSION_NUM} + # Allow the user to specify the name and version of the module to load + elif [[ ! -z ${PYTHON_NAME} && ! -z ${PYTHON_VERSION_NUM} ]]; then + echo "module load ${PYTHON_NAME}/${PYTHON_VERSION_NUM}" + echo ${PYTHON_NAME}/${PYTHON_VERSION_NUM} + module load ${PYTHON_NAME}/${PYTHON_VERSION_NUM} + fi +fi + +if [[ ${MET_PYTHON}/bin/python3 ]]; then + echo "Using python version: " + ${MET_PYTHON}/bin/python3 --version +fi + +# Compile Proj +if [ $COMPILE_PROJ -eq 1 ]; then + + + if [ $COMPILE_TIFF -eq 1 ]; then + echo + echo "Compiling TIFF at `date`" + mkdir -p ${LIB_DIR}/tiff + rm -rf ${LIB_DIR}/tiff/tiff* + tar -xzf ${TAR_DIR}/tiff*.tar.gz -C ${LIB_DIR}/tiff + cd ${LIB_DIR}/tiff/tiff* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} > $(pwd)/tiff.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/tiff.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/tiff.make_install.log 2>&1" + export TIFF_INCLUDE_DIR=${LIB_DIR}/include + export TIFF_LIB_DIR=${LIB_DIR}/lib + fi + + if [ $COMPILE_SQLITE -eq 1 ]; then + echo + echo "Compiling SQLITE at `date`" + mkdir -p ${LIB_DIR}/sqlite + rm -rf ${LIB_DIR}/sqlite/sqlite* + tar -xf ${TAR_DIR}/sqlite*.tar.gz -C ${LIB_DIR}/sqlite > /dev/null 2>&1 + cd ${LIB_DIR}/sqlite/sqlite* + echo "cd `pwd`" + run_cmd "./configure --enable-shared --prefix=${LIB_DIR} > $(pwd)/sqlite.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/sqlite.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/sqlite.make_install.log 2>&1" + export SQLITE_INCLUDE_DIR=${LIB_DIR}/include + export SQLITE_LIB_DIR=${LIB_DIR}/lib + fi + + vrs="7.1.0" + + echo + echo "Compiling PROJ_${vrs} at `date`" + echo "cmake version `cmake --version`" + mkdir -p ${LIB_DIR}/proj + rm -rf ${LIB_DIR}/proj/proj* + tar -xf ${TAR_DIR}/proj-${vrs}.tar.gz -C ${LIB_DIR}/proj + cd ${LIB_DIR}/proj/proj* + echo "cd `pwd`" + export PATH=${LIB_DIR}/bin:${PATH} + run_cmd "mkdir build; cd build" + + tiff_arg="" + # add tiff library and include arguments if necessary + if [[ ! -z "$TIFF_LIB_DIR" ]]; then + tiff_arg+="-DTIFF_LIBRARY_RELEASE=${TIFF_LIB_DIR}/libtiff.${dynamic_lib_ext}" + fi + if [[ ! -z "$TIFF_INCLUDE_DIR" ]]; then + tiff_arg+=" -DTIFF_INCLUDE_DIR=${TIFF_INCLUDE_DIR}" + fi + + cmd="cmake -DCMAKE_INSTALL_PREFIX=${LIB_DIR} -DSQLITE3_INCLUDE_DIR=${SQLITE_INCLUDE_DIR} -DSQLITE3_LIBRARY=${SQLITE_LIB_DIR}/libsqlite3.${dynamic_lib_ext} ${tiff_arg} .. > $(pwd)/proj.cmake.log 2>&1" + run_cmd ${cmd} + run_cmd "cmake --build . > $(pwd)/proj.cmake_build.log 2>&1" + run_cmd "cmake --build . --target install > $(pwd)/proj.cmake_install.log 2>&1" + +fi + +# Compile GSL +if [ $COMPILE_GSL -eq 1 ]; then + + if [ ${COMPILER_FAMILY} = "pgi" ]; then + vrs="1.11" + else + vrs="2.7.1" + fi + + echo + echo "Compiling GSL_${vrs} at `date`" + mkdir -p ${LIB_DIR}/gsl + rm -rf ${LIB_DIR}/gsl/gsl* + tar -xf ${TAR_DIR}/gsl-${vrs}.tar.gz -C ${LIB_DIR}/gsl + cd ${LIB_DIR}/gsl/gsl* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} > $(pwd)/gsl.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/gsl.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/gsl.make_install.log 2>&1" +fi + +# Compile BUFRLIB +if [ $COMPILE_BUFRLIB -eq 1 ]; then + + vrs="v11.6.0" + + echo + echo "Compiling bufr_${vrs} at `date`" + mkdir -p ${LIB_DIR}/bufrlib + rm -rf ${LIB_DIR}/bufrlib/NCEPLIBS-bufr-bufr_${vrs} + tar -xf ${TAR_DIR}/bufr_${vrs}.tar.gz -C ${LIB_DIR}/bufrlib + export SOURCE_DIR=${LIB_DIR}/bufrlib/NCEPLIBS-bufr-bufr_${vrs} + cd $SOURCE_DIR + echo "cd `pwd`" + run_cmd "mkdir build" + export BUILD_DIR=${SOURCE_DIR}/build + run_cmd "cmake -H${SOURCE_DIR} -B${BUILD_DIR} -DCMAKE_INSTALL_PREFIX=${LIB_DIR} -DCMAKE_BUILD_TYPE=Debug > $(pwd)/bufr.cmake.log 2>&1" + run_cmd "cd ${BUILD_DIR}" + run_cmd "make ${MAKE_ARGS} > $(pwd)/bufr.make.log 2>&1" + run_cmd "ctest > $(pwd)/bufr.ctest.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/bufr.make_install.log 2>&1" +fi + + +# Compile ZLIB +if [ $COMPILE_ZLIB -eq 1 ]; then + echo + echo "Compiling ZLIB at `date`" + mkdir -p ${LIB_DIR}/zlib + rm -rf ${LIB_DIR}/zlib/zlib* + tar -xzf ${TAR_DIR}/zlib*.tar.gz -C ${LIB_DIR}/zlib + cd ${LIB_DIR}/zlib/zlib* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} > $(pwd)/zlib.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/zlib.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/zlib.make_install.log 2>&1" + + # GPM: why is this removed? Could we add a comment to + # describe why this is needed? + run_cmd "rm ${LIB_DIR}/lib/libz.a" +fi + +# Compile LIBPNG +if [[ $COMPILE_LIBPNG -eq 1 && $HOST != ys* ]]; then + echo + echo "Compiling LIBPNG at `date`" + mkdir -p ${LIB_DIR}/libpng + rm -rf ${LIB_DIR}/libpng/libpng* + tar -xzf ${TAR_DIR}/libpng*.tar.gz -C ${LIB_DIR}/libpng + cd ${LIB_DIR}/libpng/libpng* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > $(pwd)/libpng.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/libpng.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/libpng.make_install.log 2>&1" +fi + + +# Compile JASPER +if [ $COMPILE_JASPER -eq 1 ]; then + + vrs="2.0.25" + + echo + echo "Compiling JASPER at `date`" + mkdir -p ${LIB_DIR}/jasper + rm -rf ${LIB_DIR}/jasper/jasper* + tar -xf ${TAR_DIR}/jasper-${vrs}.tar.gz -C ${LIB_DIR}/jasper + cd ${LIB_DIR}/jasper/jasper-version-${vrs} + export CPPFLAGS="-I${LIB_DIR}/include" + export SOURCE_DIR=${LIB_DIR}/jasper/jasper-version-${vrs} + echo "cd `pwd`" + export BUILD_DIR=${LIB_DIR}/jasper/jasper-version-${vrs}/build + run_cmd "cmake -G \"Unix Makefiles\" -H${SOURCE_DIR} -B${BUILD_DIR} -DCMAKE_INSTALL_PREFIX=${LIB_DIR} -DJAS_ENABLE_DOC=false > $(pwd)/jasper.cmake.log 2>&1" + run_cmd "cd ${BUILD_DIR}" + run_cmd "make clean all > $(pwd)/jasper.make.log 2>&1" + # Commented out due to “which: no opj2_compress in …” error, which causes one of four tests to fail + # This is a known problem, so skipping tests for now: https://github.com/AAROC/CODE-RADE/issues/36#issuecomment-359744351 + #run_cmd "make ${MAKE_ARGS} test > $(pwd)/jasper.make_test.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/jasper.make_install.log 2>&1" +fi + +# Compile JPEG +if [ $COMPILE_JPEG -eq 1 ]; then + + vrs="9e" + + echo + echo "Compiling JPEG at `date`" + mkdir -p ${LIB_DIR}/jpeg + rm -rf ${LIB_DIR}/jpeg/jpeg* + tar -xf ${TAR_DIR}/jpegsrc.v${vrs}.tar.gz -C ${LIB_DIR}/jpeg + cd ${LIB_DIR}/jpeg/jpeg-${vrs} + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > $(pwd)/libjpeg.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/libjpeg.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/libjpeg.make_install.log 2>&1" +fi + + +# Compile G2CLIB +if [ $COMPILE_G2CLIB -eq 1 ]; then + + vrs="1.6.4" + + echo + echo "Compiling G2CLIB at `date`" + mkdir -p ${LIB_DIR}/g2clib + rm -rf ${LIB_DIR}/g2clib/NCEP* + tar -xf ${TAR_DIR}/g2clib-${vrs}.tar.gz -C ${LIB_DIR}/g2clib + cd ${LIB_DIR}/g2clib/NCEP* + echo "cd `pwd`" + run_cmd "mkdir build; cd build" + run_cmd "cmake -DCMAKE_INSTALL_PREFIX=${LIB_DIR} -DCMAKE_PREFIX_PATH=${LIB_DIR} .. > $(pwd)/g2c.cmake.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/g2c.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} test > $(pwd)/g2c.make_test.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/g2c.make_install.log 2>&1" +fi + +# Compile ECKIT +if [ $COMPILE_ECKIT -eq 1 ]; then + + # Need to obtain ecbuild before installing eckit + + vrs="3.5.0" + + echo + echo "Compiling ECBUILD at `date`" + mkdir -p ${LIB_DIR}/ecbuild + rm -rf ${LIB_DIR}/ecbuild/ecbuild* + tar -xf ${TAR_DIR}/ecbuild-${vrs}.tar.gz -C ${LIB_DIR}/ecbuild + cd ${LIB_DIR}/ecbuild/ecbuild* + echo "cd `pwd`" + run_cmd "mkdir build; cd build" + run_cmd "cmake ../ -DCMAKE_INSTALL_PREFIX=${LIB_DIR} > $(pwd)/ecbuild.cmake.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/ecbuild.make_install.log 2>&1" + + vrs="1.20.2" + + echo + echo "Compiling ECKIT at `date`" + mkdir -p ${LIB_DIR}/eckit + rm -rf ${LIB_DIR}/eckit/eckit* + tar -xf ${TAR_DIR}/eckit-${vrs}.tar.gz -C ${LIB_DIR}/eckit + cd ${LIB_DIR}/eckit/eckit* + echo "cd `pwd`" + run_cmd "mkdir build; cd build" + run_cmd "cmake ../ -DCMAKE_INSTALL_PREFIX=${LIB_DIR} -DCMAKE_PREFIX_PATH=${LIB_DIR} > $(pwd)/eckit.cmake.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/eckit.make_install.log 2>&1" + +fi + +# Compile ATLAS +if [ $COMPILE_ATLAS -eq 1 ]; then + + vrs="0.30.0" + + echo + echo "Compiling ATLAS at `date`" + mkdir -p ${LIB_DIR}/atlas + rm -rf ${LIB_DIR}/atlas/atlas* + tar -xf ${TAR_DIR}/atlas-${vrs}.tar.gz -C ${LIB_DIR}/atlas + cd ${LIB_DIR}/atlas/atlas* + echo "cd `pwd`" + run_cmd "mkdir build; cd build" + run_cmd "cmake ../ -DCMAKE_INSTALL_PREFIX=${LIB_DIR} -DCMAKE_PREFIX_PATH=${LIB_DIR} > $(pwd)/atlas.cmake.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/atlas.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/atlas.make_install.log 2>&1" + +fi + +# Compile HDF +# Depends on jpeg +# Edit 'mfhdf/hdiff/Makefile' as follows: +# From: LIBS = -ljpeg -lz +# To: LIBS = -ljpeg -lz -lm +if [ $COMPILE_HDF -eq 1 ]; then + echo + echo "Compiling HDF at `date`" + mkdir -p ${LIB_DIR}/hdf + rm -rf ${LIB_DIR}/hdf/HDF* + tar -xf ${TAR_DIR}/HDF4.2*.tar.gz -C ${LIB_DIR}/hdf + cd ${LIB_DIR}/hdf/HDF* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} --disable-netcdf --with-jpeg=${LIB_DIR} --with-zlib=${LIB_DIR} CPPFLAGS=-I/usr/include/tirpc LIBS='-lm -ltirpc' > $(pwd)/hdf4.configure.log 2>&1" + if [[ ${COMPILER_MAJOR_VERSION} -ge 10 ]]; then + cat hdf/src/Makefile | \ + sed 's/FFLAGS = -O2/FFLAGS = -w -fallow-argument-mismatch -O2/g' \ + > Makefile_new + elif [[ ${COMPILER_MAJOR_VERSION} -lt 10 ]]; then + cat hdf/src/Makefile | \ + sed 's/FFLAGS = -O2/FFLAGS = -w -Wno-argument-mismatch -O2/g' \ + > Makefile_new + fi + mv Makefile_new hdf/src/Makefile + run_cmd "make ${MAKE_ARGS} > $(pwd)/hdf4.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/hdf4.make_install.log 2>&1" +fi + +# Compile HDFEOS +# Depends on HDF +if [ $COMPILE_HDFEOS -eq 1 ]; then + echo + echo "Compiling HDFEOS at `date`" + mkdir -p ${LIB_DIR}/hdfeos + rm -rf ${LIB_DIR}/hdfeos/HDF-EOS* + tar -xzf ${TAR_DIR}/HDF-EOS*.tar.* -C ${LIB_DIR}/hdfeos + cd ${LIB_DIR}/hdfeos/hdfeos + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} --with-hdf4=${LIB_DIR} --with-jpeg=${LIB_DIR} > $(pwd)/hdf-eos.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/hed-eos.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/hsf-eos.make_install.log 2>&1" + + cp include/*.h ${LIB_DIR}/include/ +fi + +# Compile NetCDF +if [ $COMPILE_NETCDF -eq 1 ]; then + + echo + echo "Compiling HDF5 at `date`" + mkdir -p ${LIB_DIR}/hdf5 + rm -rf ${LIB_DIR}/hdf5/hdf5* + tar -xzf ${TAR_DIR}/hdf5*.tar.gz -C ${LIB_DIR}/hdf5 + cd ${LIB_DIR}/hdf5/hdf5* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} --with-zlib=${LIB_Z} CFLAGS=-fPIC CXXFLAGS=-fPIC FFLAGS=-fPIC LDFLAGS=-L${LIB_DIR}/lib:${LIB_Z} CPPFLAGS=-I${LIB_DIR}/include > $(pwd)/hdf5.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/hdf5.make_install.log 2>&1" + + echo + echo "Compiling NetCDF-C at `date`" + mkdir -p ${LIB_DIR}/netcdf + rm -rf ${LIB_DIR}/netcdf/netcdf* + tar -xzf ${TAR_DIR}/netcdf-4*.tar.gz -C ${LIB_DIR}/netcdf > /dev/null 2>&1 || unzip ${TAR_DIR}/netcdf-4*.zip -d ${LIB_DIR}/netcdf + cd ${LIB_DIR}/netcdf/netcdf-* + export FC='' + export F90='' + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} CFLAGS=-fPIC CXXFLAGS=-fPIC LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > $(pwd)/netcdf-c.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/netcdf-c.make_install.log 2>&1" + + echo + echo "Compiling NetCDF-CXX at `date`" + tar -xzf ${TAR_DIR}/netcdf-cxx*.tar.gz -C ${LIB_DIR}/netcdf + cd ${LIB_DIR}/netcdf/netcdf-cxx* + echo "cd `pwd`" + configure_lib_args="" + if [[ $machine == "Mac" ]]; then + configure_lib_args="-lnetcdf -lhdf5_hl -lhdf5 -lz" + fi + run_cmd "./configure --prefix=${LIB_DIR} LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include LIBS=\"${LIBS} ${configure_lib_args}\" > $(pwd)/netcdf-cxx.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/netcdf-cxx.make_install.log 2>&1" +fi + +# Compile FREETYPE +if [ $COMPILE_FREETYPE -eq 1 ]; then + echo + echo "Compiling FREETYPE at `date`" + mkdir -p ${LIB_DIR}/freetype + rm -rf ${LIB_DIR}/freetype/freetype* + tar -xzf ${TAR_DIR}/freetype*.tar.gz -C ${LIB_DIR}/freetype + cd ${LIB_DIR}/freetype/freetype* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} --with-png=yes > $(pwd)/freetype.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/freetype.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/freetype.make_install.log 2>&1" +fi + + +# Compile CAIRO +if [ $COMPILE_CAIRO -eq 1 ]; then + + # If on Cray, compile PIXMAN + if [ ${COMPILER_FAMILY} = "PrgEnv-intel" ]; then + echo + echo "Compiling pixman at `date`" + mkdir -p ${LIB_DIR}/pixman + rm -rf ${LIB_DIR}/pixman/pixman* + tar -xzf ${TAR_DIR}/pixman*.tar.gz -C ${LIB_DIR}/pixman + cd ${LIB_DIR}/pixman/pixman* + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} > $(pwd)/pixman.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/pixman.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/pixman.make_install.log 2>&1" + fi + + echo + echo "Compiling CAIRO at `date`" + mkdir -p ${LIB_DIR}/cairo + rm -rf ${LIB_DIR}/cairo/cairo* + tar -xf ${TAR_DIR}/cairo*.tar* -C ${LIB_DIR}/cairo + cd ${LIB_DIR}/cairo/cairo* + export PKG_CONFIG=`which pkg-config` + if [ ${COMPILER_FAMILY} = "PrgEnv-intel" ]; then + export PKG_CONFIG_PATH=${LIB_DIR}/lib/pkgconfig/ + fi + echo "cd `pwd`" + run_cmd "./configure --prefix=${LIB_DIR} ax_cv_c_float_words_bigendian=no LDFLAGS=-L${LIB_DIR}/lib CPPFLAGS=-I${LIB_DIR}/include > $(pwd)/cairo.configure.log 2>&1" + run_cmd "make ${MAKE_ARGS} > $(pwd)/cairo.make.log 2>&1" + run_cmd "make ${MAKE_ARGS} install > $(pwd)/cairo.make_install.log 2>&1" +fi + +# Compile MET +if [ $COMPILE_MET -eq 0 ]; then + echo Skipping MET compilation + echo "Finished compiling at `date`" + exit 0 +fi + +echo +echo "Compiling MET at `date`" +# If using source from a tar file remove everything and unpack the tar file +# FALSE = compiling from github repo and we don't want to overwrite the files +if [ ${USE_MET_TAR_FILE} = "TRUE" ]; then + rm -rf ${MET_DIR}/MET* + tar -xzf ${MET_TARBALL} -C ${MET_DIR} +fi +cd ${MET_DIR}/MET* + +echo "Modifying configure" +cat configure | \ + sed 's/C11/C17/g' | \ + sed 's/c11/c17/g' | \ + sed 's/cxx11/cxx17/g' | \ + sed 's/c++11/c++17/g' | \ + sed 's/gnu11/gnu17/g' \ + > configure_new + +mv configure_new configure +chmod 755 configure + +if [ -z ${MET_BUFRLIB} ]; then + export MET_BUFRLIB=${LIB_DIR}/lib + export BUFRLIB_NAME=-lbufr_4 +fi + +if [ -z ${MET_GRIB2CLIB} ]; then + export MET_GRIB2CLIB=${LIB_DIR}/lib + export MET_GRIB2CINC=${LIB_DIR}/include + export LIB_JASPER=${LIB_DIR}/lib + export LIB_LIBPNG=${LIB_DIR}/lib + export LIB_Z=${LIB_DIR}/lib + export GRIB2CLIB_NAME=-lg2c +fi + +if [ -z ${MET_NETCDF} ]; then + export MET_NETCDF=${LIB_DIR} + export MET_HDF5=${LIB_DIR} +fi + +if [ -z ${MET_GSL} ]; then + export MET_GSL=${LIB_DIR} +fi + +if [ -z ${MET_PROJ} ]; then + export MET_PROJ=${LIB_DIR} +fi + +export MET_PYTHON_BIN_EXE=${MET_PYTHON_BIN_EXE:=${MET_PYTHON}/bin/python3} +export MET_PYTHON_LD +export MET_PYTHON_CC + +# add flags to user-defined LDFLAGS for MacOS +if [[ $machine != "Mac" ]]; then + LDFLAGS="${LDFLAGS} -Wl,--disable-new-dtags" +fi + +# https://www.gnu.org/software/bash/manual/html_node/Shell-Parameter-Expansion.html +# ${parameter:+word} +# If parameter is null or unset, nothing is substituted, otherwise the expansion of word is substituted. + +# add LIB_DIR/lib and LIB_DIR/lib64 to rpath and -L +LDFLAGS="${LDFLAGS} -Wl,-rpath,${LIB_DIR}/lib -L${LIB_DIR}/lib -Wl,-rpath,${LIB_DIR}/lib64 -L${LIB_DIR}/lib64" + +# if variables are set, add /lib to rpath and -L +for x in $MET_CAIRO $MET_FREETYPE $MET_GSL $MET_HDF $MET_HDF5 $MET_NETCDF; do + arg="${x:+-Wl,-rpath,$x/lib -L$x/lib}" + if [[ "$LDFLAGS" != *"$arg"* ]]; then + LDFLAGS+=" $arg" + fi +done + +# if variables are set, add /lib64 to rpath and -L +for x in $MET_ATLAS $MET_BUFR $MET_ECKIT $MET_GRIB2C $MET_PROJ $LIB_JASPER; do + arg="${x:+-Wl,-rpath,$x/lib64 -L$x/lib64}" + if [[ "$LDFLAGS" != *"$arg"* ]]; then + LDFLAGS+=" $arg" + fi +done + +# if variables are set, add to rpath and -L +for x in $MET_ATLASLIB $MET_BUFRLIB $MET_CAIROLIB $MET_ECKITLIB $MET_FREETYPELIB $MET_GRIB2CLIB $MET_GSLLIB $MET_HDF5LIB $MET_HDFLIB $MET_NETCDFLIB $MET_PROJLIB $MET_PYTHON_LIB $LIB_JASPER $LIB_LIBPNG $LIB_Z $ADDTL_DIR; do + arg="${x:+-Wl,-rpath,$x -L$x}" + if [[ "$LDFLAGS" != *"$arg"* ]]; then + LDFLAGS+=" $arg" + fi +done + +export LDFLAGS + +export LIBS="${LIBS} -lhdf5_hl -lhdf5 -lz" +export MET_FONT_DIR=${TEST_BASE}/fonts + + +echo "MET Configuration settings..." +printenv | egrep "^MET_" | sed -r 's/^/export /g' +echo "LDFLAGS = ${LDFLAGS}" +export OPT_ARGS='' +if [[ $COMPILER_FAMILY == "pgi" ]]; then + export OPT_ARGS="${OPT_ARGS} FFLAGS=-lpgf90" +fi + +configure_cmd="./configure --prefix=${MET_INSTALL_DIR} --bindir=${BIN_DIR_PATH}" +configure_cmd="${configure_cmd} BUFRLIB_NAME=${BUFRLIB_NAME}" +configure_cmd="${configure_cmd} GRIB2CLIB_NAME=${GRIB2CLIB_NAME} --enable-grib2" +if [[ ! -z ${MET_FREETYPEINC} && ! -z ${MET_FREETYPELIB} && \ + ! -z ${MET_CAIROINC} && ! -z ${MET_CAIROLIB} ]]; then + configure_cmd="${configure_cmd} --enable-mode_graphics" +fi + +if [[ ! -z $MET_ECKIT && ! -z $MET_ATLAS ]]; then + configure_cmd="${configure_cmd} --enable-ugrid" +fi + +if [[ ! -z $MET_HDF && ! -z $MET_HDFEOS ]]; then + configure_cmd="${configure_cmd} --enable-modis --enable-lidar2nc" +fi + +if [[ ! -z ${MET_PYTHON_CC} || ! -z ${MET_PYTHON_LD} ]]; then + configure_cmd="${configure_cmd} --enable-python" +fi + +configure_cmd="${configure_cmd} ${OPT_ARGS}" + +echo "cd `pwd`" +run_cmd "${configure_cmd} > $(pwd)/configure.log 2>&1" +run_cmd "make ${MAKE_ARGS} > $(pwd)/met.make.log 2>&1" +run_cmd "make install > $(pwd)/met.make_install.log 2>&1" +run_cmd "make test > $(pwd)/met.make_test.log 2>&1" + +echo "Finished compiling at `date`" diff --git a/internal/scripts/installation/config/install_met_env.cactus b/internal/scripts/installation/config/install_met_env.cactus new file mode 100644 index 0000000000..63ac85192a --- /dev/null +++ b/internal/scripts/installation/config/install_met_env.cactus @@ -0,0 +1,50 @@ +module reset +module use /apps/dev/modulefiles/ +module load ve/evs/2.0 +module use /apps/ops/para/libs/modulefiles/compiler/intel/19.1.3.304 +module load netcdf/4.7.4 +module load hdf5/1.10.6 +module load bufr/11.5.0 +module load zlib/1.2.11 +module load jasper/2.0.25 +module load libpng/1.6.37 +module load gsl/2.7 +module load g2c/1.6.4 +module load proj/7.1.0 +module use /apps/dev/modulefiles +module load fckit/0.11.0 +module load atlas/0.35.0 +module load eckit/1.24.4 + +export FC=ifort +export F77=ifort +export F90=ifort +export CC=icc +export CXX=icpc +export TEST_BASE=/lfs/h2/users/julie.prestopnik/12.0.0-beta5 +export LIB_DIR=${TEST_BASE}/external_libs +export COMPILER=intel_19.1.3.304 +export MET_SUBDIR=${TEST_BASE} +export MET_TARBALL=v12.0.0-beta5.tar.gz +export USE_MODULES=TRUE +export ADDTL_DIR=/apps/spack/gettext/0.21/intel/19.1.3.304/at2kdo4edvuhyzrt5g6zhwrdb7bdui4s/lib64/ +export PYTHON_MODULE=python_3.10.4 +export MET_PYTHON=/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/ +export MET_PYTHON_LIB=/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/lib64 +export MET_PYTHON_CC=-I/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/include/python3.10 +export MET_PYTHON_LD=-L/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/lib64\ -lpython3.10\ -lintl\ -lcrypt\ -ldl\ -lutil\ -lm\ -lm +export MET_NETCDF=/apps/prod/hpc-stack/intel-19.1.3.304/netcdf/4.7.4 +export MET_HDF5=/apps/prod/hpc-stack/intel-19.1.3.304/hdf5/1.10.6 +export MET_BUFRLIB=/apps/ops/prod/libs/intel/19.1.3.304/bufr/11.5.0/lib64 +export MET_GRIB2CLIB=/apps/ops/prod/libs/intel/19.1.3.304/g2c/1.6.4/lib64 +export MET_GRIB2CINC=/apps/ops/prod/libs/intel/19.1.3.304/g2c/1.6.4/include +export MET_GSL=/apps/spack/gsl/2.7/intel/19.1.3.304/xks7dxbowrdxhjck5zxc4rompopocevb +export MET_PROJ=/apps/spack/proj/7.1.0/intel/19.1.3.304/cjbmc7tacv5qcfatslqmcrzo5kb4raaq +export MET_ATLAS=/apps/dev/atlas/install-0.35.0 +export MET_ECKIT=/apps/dev/eckit/install-1.24.4 +export BUFRLIB_NAME=-lbufr_4 +export GRIB2CLIB_NAME=-lg2c +export LIB_JASPER=/apps/spack/jasper/2.0.25/intel/19.1.3.304/sjib74krrorkyczqpqah4tvewmlnqdx4/lib64 +export LIB_LIBPNG=/apps/spack/libpng/1.6.37/intel/19.1.3.304/4ohkronuhlyherusoszzrmur5ewvlwzh/lib +export LIB_Z=/apps/spack/zlib/1.2.11/intel/19.1.3.304/hjotqkckeoyt6j6tibalwzrlfljcjtdh/lib +export MAKE_ARGS=-j diff --git a/internal/scripts/installation/config/install_met_env.wcoss2 b/internal/scripts/installation/config/install_met_env.wcoss2 index 86b73e0064..2d02be1f87 100644 --- a/internal/scripts/installation/config/install_met_env.wcoss2 +++ b/internal/scripts/installation/config/install_met_env.wcoss2 @@ -1,33 +1,32 @@ module reset -module use /apps/ops/para/libs/modulefiles/compiler/intel/19.1.3.304 -export HPC_OPT=/apps/ops/para/libs module use /apps/dev/modulefiles/ module load ve/evs/2.0 +module use /apps/ops/para/libs/modulefiles/compiler/intel/19.1.3.304 module load netcdf/4.7.4 module load hdf5/1.10.6 -module load bufr/11.6.0 +module load bufr/11.5.0 module load zlib/1.2.11 module load jasper/2.0.25 module load libpng/1.6.37 module load gsl/2.7 module load g2c/1.6.4 module load proj/7.1.0 -module use /apps/dev/lmodules/intel/19.1.3.304/ -module load atlas/0.30.0 -module load eckit/1.20.2 +module use /apps/dev/modulefiles +module load fckit/0.11.0 +module load atlas/0.35.0 +module load eckit/1.24.4 export FC=ifort export F77=ifort export F90=ifort export CC=icc export CXX=icpc -#export TEST_BASE=/apps/ops/para/libs/intel/19.1.3.304/met/12.0.0-beta3 export TEST_BASE=$(pwd) export LIB_DIR=${TEST_BASE}/external_libs export BIN_DIR_PATH=${TEST_BASE}/bin export COMPILER=intel_19.1.3.304 export MET_SUBDIR=${TEST_BASE} -export MET_TARBALL=v12.0.0-beta3.tar.gz +export MET_TARBALL=v12.0.0-beta5.tar.gz export USE_MODULES=TRUE export ADDTL_DIR=/apps/spack/gettext/0.21/intel/19.1.3.304/at2kdo4edvuhyzrt5g6zhwrdb7bdui4s/lib64 export PYTHON_MODULE=python_3.10.4 @@ -45,10 +44,10 @@ export MET_GRIB2CLIB=${g2c_ROOT}/lib64 export MET_GRIB2CINC=${G2C_INC} export MET_GSL=/apps/spack/gsl/2.7/intel/19.1.3.304/xks7dxbowrdxhjck5zxc4rompopocevb export MET_PROJ=/apps/spack/proj/7.1.0/intel/19.1.3.304/cjbmc7tacv5qcfatslqmcrzo5kb4raaq -export MET_ATLASLIB=/apps/dev/intel-19.1.3.304/cray-mpich-8.1.9/atlas/0.30.0/lib64 -export MET_ATLASINC=/apps/dev/intel-19.1.3.304/cray-mpich-8.1.9/atlas/0.30.0/include/atlas -export MET_ECKITLIB=/apps/dev/intel-19.1.3.304/cray-mpich-8.1.9/eckit/1.20.2/lib64 -export MET_ECKITINC=/apps/dev/intel-19.1.3.304/cray-mpich-8.1.9/eckit/1.20.2/include/eckit +export MET_ATLASLIB=/apps/dev/atlas/install-0.35.0/lib64 +export MET_ATLASINC=/apps/dev/atlas/install-0.35.0/include/atlas +export MET_ECKITLIB=/apps/dev/eckit/install-1.24.4/lib64 +export MET_ECKITINC=/apps/dev/eckit/install-1.24.4/include/eckit export BUFRLIB_NAME=-lbufr_4 export GRIB2CLIB_NAME=-lg2c # JY export LIB_JASPER=/apps/spack/jasper/2.0.25/intel/19.1.3.304/sjib74krrorkyczqpqah4tvewmlnqdx4/lib64 @@ -58,4 +57,3 @@ export LIB_LIBPNG=${LIBPNG_LIBDIR} # JY export LIB_Z=/apps/spack/zlib/1.2.11/intel/19.1.3.304/hjotqkckeoyt6j6tibalwzrlfljcjtdh/lib export LIB_Z=${ZLIB_LIBDIR} export MAKE_ARGS=-j -export CXXFLAGS="-std=c++11" diff --git a/internal/test_unit/bin/unit_test.sh b/internal/test_unit/bin/unit_test.sh index 0e2579464d..e3bc29e181 100755 --- a/internal/test_unit/bin/unit_test.sh +++ b/internal/test_unit/bin/unit_test.sh @@ -86,6 +86,7 @@ UNIT_XML="unit_ascii2nc.xml \ unit_climatology_1.0deg.xml \ unit_climatology_1.5deg.xml \ unit_climatology_2.5deg.xml \ + unit_climatology_mixed.xml \ unit_grib_tables.xml \ unit_grid_weight.xml \ unit_netcdf.xml \ diff --git a/internal/test_unit/config/GridStatConfig_climo_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG b/internal/test_unit/config/GridStatConfig_climo_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG new file mode 100644 index 0000000000..ab1cdd8362 --- /dev/null +++ b/internal/test_unit/config/GridStatConfig_climo_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG @@ -0,0 +1,277 @@ +//////////////////////////////////////////////////////////////////////////////// +// +// Grid-Stat configuration file. +// +// For additional information, please see the MET User's Guide. +// +//////////////////////////////////////////////////////////////////////////////// + +// +// Output model name to be written +// +model = "GFS"; + +// +// Output description to be written +// May be set separately in each "obs.field" entry +// +desc = "NA"; + +// +// Output observation type to be written +// +obtype = "GFSANL"; + +//////////////////////////////////////////////////////////////////////////////// + +// +// Verification grid +// +regrid = { + to_grid = "${OBS_CLIMO_DIR}/mslp_mean.grib"; + method = BILIN; + width = 2; + vld_thresh = 0.5; + shape = SQUARE; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// May be set separately in each "field" entry +// +censor_thresh = []; +censor_val = []; +mpr_column = []; +mpr_thresh = []; +cat_thresh = []; +cnt_thresh = [ NA ]; +cnt_logic = UNION; +wind_thresh = [ NA ]; +wind_logic = UNION; +eclv_points = 0.05; +nc_pairs_var_name = ""; +nc_pairs_var_suffix = ""; +hss_ec_value = NA; +rank_corr_flag = FALSE; + +// +// Forecast and observation fields to be verified +// + +field_list = [ + { name = "TMP"; level = [ "P500" ]; }, + { name = "UGRD"; level = [ "P500" ]; }, + { name = "VGRD"; level = [ "P500" ]; }, + { name = "TMP"; level = [ "P850" ]; cat_thresh = [ >FCDP75, >OCDP75 ]; }, + { name = "UGRD"; level = [ "P850" ]; cat_thresh = [ >FCDP75, >OCDP75 ]; }, + { name = "VGRD"; level = [ "P850" ]; cat_thresh = [ >FCDP75, >OCDP75 ]; } +]; + +fcst = { + + field = field_list; + + climo_mean = { + field = field_list; + file_name = [ "${FCST_CLIMO_DIR}/cmean_1d.19590410" ]; + }; + + climo_stdev = { + field = field_list; + file_name = [ "${FCST_CLIMO_DIR}/cstdv_1d.19590410" ]; + }; + + time_interp_method = DW_MEAN; + day_interval = 1; + hour_interval = 6; +} + +obs = { + + field = field_list; + + climo_mean = { + field = field_list; + file_name = [ "${OBS_CLIMO_DIR}/t500hPa_mean.grib", + "${OBS_CLIMO_DIR}/t850hPa_mean.grib", + "${OBS_CLIMO_DIR}/u500hPa_mean.grib", + "${OBS_CLIMO_DIR}/u850hPa_mean.grib", + "${OBS_CLIMO_DIR}/v500hPa_mean.grib", + "${OBS_CLIMO_DIR}/v850hPa_mean.grib" ]; + }; + + climo_stdev = { + field = field_list; + file_name = [ "${OBS_CLIMO_DIR}/t850hPa_stdev.grib", + "${OBS_CLIMO_DIR}/u850hPa_stdev.grib", + "${OBS_CLIMO_DIR}/v850hPa_stdev.grib" ]; + }; + + time_interp_method = DW_MEAN; + day_interval = 1; + hour_interval = 12; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// May be set separately in each "obs.field" entry +// +climo_cdf = { + cdf_bins = 1; + center_bins = TRUE; + write_bins = FALSE; + direct_prob = FALSE; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Verification masking regions +// +mask = { + grid = [ "FULL" ]; + poly = []; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Confidence interval settings +// +ci_alpha = [ 0.05 ]; + +boot = { + interval = PCTILE; + rep_prop = 1.0; + n_rep = 0; + rng = "mt19937"; + seed = ""; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Data smoothing methods +// +interp = { + field = BOTH; + vld_thresh = 1.0; + shape = SQUARE; + + type = [ + { + method = NEAREST; + width = 1; + } + ]; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Neighborhood methods +// +nbrhd = { + width = [ 1 ]; + cov_thresh = [ >=0.5 ]; + vld_thresh = 1.0; + shape = SQUARE; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Fourier decomposition +// +fourier = { + wave_1d_beg = []; + wave_1d_end = []; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Gradient statistics +// May be set separately in each "obs.field" entry +// +gradient = { + dx = []; + dy = []; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Distance Map statistics +// May be set separately in each "obs.field" entry +// +distance_map = { + baddeley_p = 2; + baddeley_max_dist = NA; + fom_alpha = 0.1; + zhu_weight = 0.5; + beta_value(n) = n * n / 2.0; +} + +//////////////////////////////////////////////////////////////////////////////// + +// +// Statistical output types +// +output_flag = { + fho = NONE; + ctc = NONE; + cts = NONE; + mctc = NONE; + mcts = NONE; + cnt = STAT; + sl1l2 = STAT; + sal1l2 = STAT; + vl1l2 = STAT; + val1l2 = STAT; + vcnt = STAT; + pct = NONE; + pstd = NONE; + pjc = NONE; + prc = NONE; + eclv = NONE; + nbrctc = NONE; + nbrcts = NONE; + nbrcnt = NONE; + grad = NONE; + dmap = NONE; + seeps = NONE; +} + +// +// NetCDF matched pairs output file +// +nc_pairs_flag = { + latlon = TRUE; + raw = TRUE; + diff = TRUE; + climo = TRUE; + climo_cdp = TRUE; + weight = TRUE; + nbrhd = FALSE; + fourier = FALSE; + gradient = FALSE; + distance_map = FALSE; + apply_mask = FALSE; +} + +//////////////////////////////////////////////////////////////////////////////// +// Threshold for SEEPS p1 (Probability of being dry) + +seeps_p1_thresh = NA; + +//////////////////////////////////////////////////////////////////////////////// + +grid_weight_flag = COS_LAT; +tmp_dir = "/tmp"; +output_prefix = "${OUTPUT_PREFIX}"; +version = "V12.0.0"; + +//////////////////////////////////////////////////////////////////////////////// diff --git a/internal/test_unit/config/GridStatConfig_mpr_thresh b/internal/test_unit/config/GridStatConfig_mpr_thresh index 01a9037fd0..e99405be7a 100644 --- a/internal/test_unit/config/GridStatConfig_mpr_thresh +++ b/internal/test_unit/config/GridStatConfig_mpr_thresh @@ -79,32 +79,18 @@ fcst = { desc = "ABS_OBS_FCST_DIFF"; nc_pairs_var_suffix = desc; }, -// MET #2924 Replace this section -// { -// mpr_column = [ "ABS(OBS-OBS_CLIMO_MEAN)" ]; -// mpr_thresh = [ <=5 ]; -// desc = "ABS_OBS_CLIMO_MEAN_DIFF"; -// nc_pairs_var_suffix = desc; -// }, -// { -// mpr_column = [ "OBS_CLIMO_CDF" ]; -// mpr_thresh = [ >=0.25&&<=0.75 ]; -// desc = "OBS_CLIMO_CDF_IQR"; -// nc_pairs_var_suffix = desc; -// } { - mpr_column = [ "ABS(OBS-CLIMO_MEAN)" ]; + mpr_column = [ "ABS(OBS-OBS_CLIMO_MEAN)" ]; mpr_thresh = [ <=5 ]; desc = "ABS_OBS_CLIMO_MEAN_DIFF"; nc_pairs_var_suffix = desc; }, { - mpr_column = [ "CLIMO_CDF" ]; + mpr_column = [ "OBS_CLIMO_CDF" ]; mpr_thresh = [ >=0.25&&<=0.75 ]; - desc = "CLIMO_CDF_IQR"; + desc = "OBS_CLIMO_CDF_IQR"; nc_pairs_var_suffix = desc; } -// MET #2924 End replace ]; } obs = fcst; diff --git a/internal/test_unit/config/PointStatConfig_mpr_thresh b/internal/test_unit/config/PointStatConfig_mpr_thresh index e0d54b9219..675b0696c2 100644 --- a/internal/test_unit/config/PointStatConfig_mpr_thresh +++ b/internal/test_unit/config/PointStatConfig_mpr_thresh @@ -68,28 +68,16 @@ fcst = { mpr_thresh = [ <=5 ]; desc = "ABS_OBS_FCST_DIFF"; }, -// MET #2924 Replace this section -// { -// mpr_column = [ "ABS(OBS-OBS_CLIMO_MEAN)" ]; -// mpr_thresh = [ <=5 ]; -// desc = "ABS_OBS_CLIMO_MEAN_DIFF"; -// }, -// { -// mpr_column = [ "OBS_CLIMO_CDF" ]; -// mpr_thresh = [ >=0.25&&<=0.75 ]; -// desc = "OBS_CLIMO_CDF_IQR"; -// } { - mpr_column = [ "ABS(OBS-CLIMO_MEAN)" ]; + mpr_column = [ "ABS(OBS-OBS_CLIMO_MEAN)" ]; mpr_thresh = [ <=5 ]; desc = "ABS_OBS_CLIMO_MEAN_DIFF"; }, { - mpr_column = [ "CLIMO_CDF" ]; + mpr_column = [ "OBS_CLIMO_CDF" ]; mpr_thresh = [ >=0.25&&<=0.75 ]; - desc = "CLIMO_CDF_IQR"; + desc = "OBS_CLIMO_CDF_IQR"; } -// MET #2924 End replace ]; } obs = fcst; diff --git a/internal/test_unit/hdr/met_12_0.hdr b/internal/test_unit/hdr/met_12_0.hdr index b7d835c72c..f3785b7b48 100644 --- a/internal/test_unit/hdr/met_12_0.hdr +++ b/internal/test_unit/hdr/met_12_0.hdr @@ -5,7 +5,7 @@ FHO : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_L ISC : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL TILE_DIM TILE_XLL TILE_YLL NSCALE ISCALE MSE ISC FENERGY2 OENERGY2 BASER FBIAS MCTC : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL N_CAT _VAR_ MCTS : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL N_CAT ACC ACC_NCL ACC_NCU ACC_BCL ACC_BCU HK HK_BCL HK_BCU HSS HSS_BCL HSS_BCU GER GER_BCL GER_BCU HSS_EC HSS_EC_BCL HSS_EC_BCU EC_VALUE -MPR : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL INDEX OBS_SID OBS_LAT OBS_LON OBS_LVL OBS_ELV FCST OBS OBS_QC CLIMO_MEAN CLIMO_STDEV CLIMO_CDF +MPR : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL INDEX OBS_SID OBS_LAT OBS_LON OBS_LVL OBS_ELV FCST OBS OBS_QC OBS_CLIMO_MEAN OBS_CLIMO_STDEV OBS_CLIMO_CDF FCST_CLIMO_MEAN FCST_CLIMO_STDEV SEEPS : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL S12 S13 S21 S23 S31 S32 PF1 PF2 PF3 PV1 PV2 PV3 MEAN_FCST MEAN_OBS SEEPS SEEPS_MPR : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE OBS_SID OBS_LAT OBS_LON FCST OBS OBS_QC FCST_CAT OBS_CAT P1 P2 T1 T2 SEEPS NBRCNT : VERSION MODEL DESC FCST_LEAD FCST_VALID_BEG FCST_VALID_END OBS_LEAD OBS_VALID_BEG OBS_VALID_END FCST_VAR FCST_UNITS FCST_LEV OBS_VAR OBS_UNITS OBS_LEV OBTYPE VX_MASK INTERP_MTHD INTERP_PNTS FCST_THRESH OBS_THRESH COV_THRESH ALPHA LINE_TYPE TOTAL FBS FBS_BCL FBS_BCU FSS FSS_BCL FSS_BCU AFSS AFSS_BCL AFSS_BCU UFSS UFSS_BCL UFSS_BCU F_RATE F_RATE_BCL F_RATE_BCU O_RATE O_RATE_BCL O_RATE_BCU diff --git a/internal/test_unit/xml/unit_climatology_1.5deg.xml b/internal/test_unit/xml/unit_climatology_1.5deg.xml index 278df4a8be..de22d95673 100644 --- a/internal/test_unit/xml/unit_climatology_1.5deg.xml +++ b/internal/test_unit/xml/unit_climatology_1.5deg.xml @@ -82,19 +82,11 @@ OUTPUT_DIR &OUTPUT_DIR;/climatology_1.5deg - - \ - -lookin &OUTPUT_DIR;/climatology_1.5deg/point_stat_WMO_CLIMO_1.5DEG_120000L_20120409_120000V.stat \ - -job filter -line_type MPR -column_thresh CLIMO_CDF 'lt0.1||gt0.9' \ - -dump_row &OUTPUT_DIR;/climatology_1.5deg/stat_analysis_WMO_1.5DEG_FILTER_CDF_dump.stat - - &OUTPUT_DIR;/climatology_1.5deg/stat_analysis_WMO_1.5DEG_FILTER_CDF_dump.stat diff --git a/internal/test_unit/xml/unit_climatology_mixed.xml b/internal/test_unit/xml/unit_climatology_mixed.xml new file mode 100644 index 0000000000..0433b39e8d --- /dev/null +++ b/internal/test_unit/xml/unit_climatology_mixed.xml @@ -0,0 +1,39 @@ + + + + + + + + + + +]> + + + + + + &TEST_DIR; + true + + + &MET_BIN;/grid_stat + + OUTPUT_PREFIX FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG + FCST_CLIMO_DIR &DATA_DIR_CLIMO;/NCEP_NCAR_40YR_1.0deg + OBS_CLIMO_DIR &DATA_DIR_CLIMO;/ERA_DAILY_1.5deg + + \ + &DATA_DIR_MODEL;/grib2/gfs/gfs_2012040900_F024.grib2 \ + &DATA_DIR_MODEL;/grib2/gfsanl/gfsanl_4_20120410_0000_000.grb2 \ + &CONFIG_DIR;/GridStatConfig_climo_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG \ + -outdir &OUTPUT_DIR;/climatology_mixed -v 2 + + + &OUTPUT_DIR;/climatology_mixed/grid_stat_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG_240000L_20120410_000000V.stat + &OUTPUT_DIR;/climatology_mixed/grid_stat_FCST_NCEP_1.0DEG_OBS_WMO_1.5DEG_240000L_20120410_000000V_pairs.nc + + + diff --git a/src/basic/vx_config/threshold.cc b/src/basic/vx_config/threshold.cc index bfff7a0089..cbf0a3cb7d 100644 --- a/src/basic/vx_config/threshold.cc +++ b/src/basic/vx_config/threshold.cc @@ -103,9 +103,8 @@ for (auto const& x : perc_thresh_info_map) { } // - // MET #2924: For backward compatibility support SCP and CDP - // threshold types - // + // MET #2924 Backward compatible support for SCP and CDP types + // if ( !match && (search_cs.startswith(scp_perc_thresh_type_str.c_str()) || diff --git a/src/basic/vx_util/stat_column_defs.h b/src/basic/vx_util/stat_column_defs.h index 5ae3a64b1a..a7b5427143 100644 --- a/src/basic/vx_util/stat_column_defs.h +++ b/src/basic/vx_util/stat_column_defs.h @@ -197,7 +197,6 @@ static const char * const eclv_columns [] = { "N_PNT", "CL_", "VALUE_" }; -/* MET #2924 Replace this change static const char * const mpr_columns [] = { "TOTAL", "INDEX", "OBS_SID", "OBS_LAT", "OBS_LON", "OBS_LVL", @@ -206,17 +205,6 @@ static const char * const mpr_columns [] = { "OBS_CLIMO_MEAN", "OBS_CLIMO_STDEV", "OBS_CLIMO_CDF", "FCST_CLIMO_MEAN", "FCST_CLIMO_STDEV" }; -*/ - -static const char * const mpr_columns [] = { - "TOTAL", "INDEX", "OBS_SID", - "OBS_LAT", "OBS_LON", "OBS_LVL", - "OBS_ELV", "FCST", "OBS", - "OBS_QC", - "CLIMO_MEAN", "CLIMO_STDEV", "CLIMO_CDF" -}; - -// MET #2924 End replace static const char * const nbrctc_columns [] = { "TOTAL", "FY_OY", "FY_ON", @@ -308,7 +296,6 @@ static const char * const phist_columns [] = { "BIN_" }; -/* MET #2924 Replace this section static const char * const orank_columns [] = { "TOTAL", "INDEX", "OBS_SID", "OBS_LAT", "OBS_LON", "OBS_LVL", @@ -319,19 +306,6 @@ static const char * const orank_columns [] = { "SPREAD_OERR", "SPREAD_PLUS_OERR", "OBS_CLIMO_STDEV", "FCST_CLIMO_MEAN", "FCST_CLIMO_STDEV" }; -*/ - -static const char * const orank_columns [] = { - "TOTAL", "INDEX", "OBS_SID", - "OBS_LAT", "OBS_LON", "OBS_LVL", - "OBS_ELV", "OBS", "PIT", - "RANK", "N_ENS_VLD", "N_ENS", - "ENS_", "OBS_QC", "ENS_MEAN", - "CLIMO_MEAN", "SPREAD", "ENS_MEAN_OERR", - "SPREAD_OERR", "SPREAD_PLUS_OERR", "CLIMO_STDEV" -}; - -// MET #2924 End replace static const char * const ssvar_columns [] = { "TOTAL", "N_BIN", "BIN_i", diff --git a/src/libcode/vx_data2d_grib/data2d_grib_utils.cc b/src/libcode/vx_data2d_grib/data2d_grib_utils.cc index f948b26253..cc3e81f82c 100644 --- a/src/libcode/vx_data2d_grib/data2d_grib_utils.cc +++ b/src/libcode/vx_data2d_grib/data2d_grib_utils.cc @@ -669,7 +669,8 @@ void read_pds(const GribRecord &r, int &bms_flag, // // Set the valid and accumulation times based on the - // contents of the time range indicator + // contents of the time range indicator in GRIB1 Table 5: + // https://www.nco.ncep.noaa.gov/pmb/docs/on388/table5.html // switch((int) pds->tri) { @@ -725,6 +726,11 @@ void read_pds(const GribRecord &r, int &bms_flag, accum = 0; break; + case 123: // Average of N uninitialized analyses, starting at the reference time, at intervals of P2. + valid_ut = init_ut; + accum = 0; + break; + case 136: // Climatological Standard Deviation valid_ut = init_ut; accum = 0; diff --git a/src/libcode/vx_stat_out/stat_columns.cc b/src/libcode/vx_stat_out/stat_columns.cc index 330a74d827..51992d7b50 100644 --- a/src/libcode/vx_stat_out/stat_columns.cc +++ b/src/libcode/vx_stat_out/stat_columns.cc @@ -488,10 +488,8 @@ void write_orank_header_row(int hdr_flag, int n_ens, AsciiTable &at, at.set_entry(r, c+17+n_ens, (string)orank_columns[18]); at.set_entry(r, c+18+n_ens, (string)orank_columns[19]); at.set_entry(r, c+19+n_ens, (string)orank_columns[20]); - /* MET #2924 Uncomment this section at.set_entry(r, c+20+n_ens, (string)orank_columns[21]); at.set_entry(r, c+21+n_ens, (string)orank_columns[22]); - */ return; } @@ -2927,7 +2925,7 @@ void write_sl1l2_cols(const SL1L2Info &sl1l2_info, sl1l2_info.oobar); at.set_entry(r, c+6, // MAE - sl1l2_info.mae); + sl1l2_info.smae); return; } @@ -2963,7 +2961,7 @@ void write_sal1l2_cols(const SL1L2Info &sl1l2_info, sl1l2_info.ooabar); at.set_entry(r, c+6, // MAE - sl1l2_info.mae); + sl1l2_info.samae); return; } @@ -4126,13 +4124,11 @@ void write_mpr_cols(const PairDataPoint *pd_ptr, int i, at.set_entry(r, c+12, // Observation Climatological CDF Value pd_ptr->ocdf_na[i]); -/* MET #2924 Uncomment this section at.set_entry(r, c+13, // Forecast Climatological Mean Value pd_ptr->fcmn_na[i]); at.set_entry(r, c+14, // Forecast Climatological Standard Deviation Value pd_ptr->fcsd_na[i]); -*/ return; } @@ -4589,7 +4585,6 @@ void write_orank_cols(const PairDataEnsemble *pd_ptr, int i, at.set_entry(r, c+19+pd_ptr->n_ens, pd_ptr->ocsd_na[i]); -/* MET #2924 Uncomment this section // Forecast climatology mean values at.set_entry(r, c+20+pd_ptr->n_ens, pd_ptr->fcmn_na[i]); @@ -4597,7 +4592,6 @@ void write_orank_cols(const PairDataEnsemble *pd_ptr, int i, // Forecast climatology standard deviation values at.set_entry(r, c+21+pd_ptr->n_ens, pd_ptr->fcsd_na[i]); -*/ return; } diff --git a/src/libcode/vx_statistics/compute_stats.cc b/src/libcode/vx_statistics/compute_stats.cc index 40c4e82589..bbc9e0ac1a 100644 --- a/src/libcode/vx_statistics/compute_stats.cc +++ b/src/libcode/vx_statistics/compute_stats.cc @@ -101,7 +101,7 @@ void compute_cntinfo(const SL1L2Info &s, bool aflag, CNTInfo &cnt_info) { cnt_info.me2.v = cnt_info.me.v * cnt_info.me.v; // Compute mean absolute error - cnt_info.mae.v = s.mae; + cnt_info.mae.v = s.smae; // Compute mean squared error cnt_info.mse.v = ffbar + oobar - 2.0*fobar; @@ -1111,7 +1111,7 @@ void compute_sl1l2_mean(const SL1L2Info *sl1l2_info, int n, sl1l2_mean.obar += sl1l2_info[i].obar; sl1l2_mean.ffbar += sl1l2_info[i].ffbar; sl1l2_mean.oobar += sl1l2_info[i].oobar; - sl1l2_mean.mae += sl1l2_info[i].mae; + sl1l2_mean.smae += sl1l2_info[i].smae; } if(sl1l2_info[i].sacount > 0) { @@ -1121,6 +1121,7 @@ void compute_sl1l2_mean(const SL1L2Info *sl1l2_info, int n, sl1l2_mean.oabar += sl1l2_info[i].oabar; sl1l2_mean.ffabar += sl1l2_info[i].ffabar; sl1l2_mean.ooabar += sl1l2_info[i].ooabar; + sl1l2_mean.samae += sl1l2_info[i].samae; } } // end for i @@ -1130,13 +1131,14 @@ void compute_sl1l2_mean(const SL1L2Info *sl1l2_info, int n, sl1l2_mean.obar /= n_sl1l2; sl1l2_mean.ffbar /= n_sl1l2; sl1l2_mean.oobar /= n_sl1l2; - sl1l2_mean.mae /= n_sl1l2; + sl1l2_mean.smae /= n_sl1l2; } if(sl1l2_mean.sacount > 0) { sl1l2_mean.fabar /= n_sal1l2; sl1l2_mean.oabar /= n_sal1l2; sl1l2_mean.ffabar /= n_sal1l2; sl1l2_mean.ooabar /= n_sal1l2; + sl1l2_mean.samae /= n_sal1l2; } return; diff --git a/src/libcode/vx_statistics/met_stats.cc b/src/libcode/vx_statistics/met_stats.cc index 9312867e49..4c679aed83 100644 --- a/src/libcode/vx_statistics/met_stats.cc +++ b/src/libcode/vx_statistics/met_stats.cc @@ -1124,11 +1124,11 @@ SL1L2Info & SL1L2Info::operator+=(const SL1L2Info &c) { s_info.ffbar = (ffbar*scount + c.ffbar*c.scount)/s_info.scount; s_info.oobar = (oobar*scount + c.oobar*c.scount)/s_info.scount; - if(is_bad_data(mae) || is_bad_data(c.mae)) { - s_info.mae = bad_data_double; + if(is_bad_data(smae) || is_bad_data(c.smae)) { + s_info.smae = bad_data_double; } else { - s_info.mae = (mae*scount + c.mae*c.scount)/s_info.scount; + s_info.smae = (smae*scount + c.smae*c.scount)/s_info.scount; } } @@ -1141,11 +1141,11 @@ SL1L2Info & SL1L2Info::operator+=(const SL1L2Info &c) { s_info.ffabar = (ffabar*sacount + c.ffabar*c.sacount)/s_info.sacount; s_info.ooabar = (ooabar*sacount + c.ooabar*c.sacount)/s_info.sacount; - if(is_bad_data(mae) || is_bad_data(c.mae)) { - s_info.mae = bad_data_double; + if(is_bad_data(samae) || is_bad_data(c.samae)) { + s_info.samae = bad_data_double; } else { - s_info.mae = (mae*sacount + c.mae*c.sacount)/s_info.sacount; + s_info.samae = (samae*sacount + c.samae*c.sacount)/s_info.sacount; } } @@ -1170,15 +1170,15 @@ void SL1L2Info::zero_out() { // SL1L2 Quantities fbar = obar = 0.0; fobar = ffbar = oobar = 0.0; + smae = 0.0; scount = 0; // SAL1L2 Quantities fabar = oabar = 0.0; foabar = ffabar = ooabar = 0.0; + samae = 0.0; sacount = 0; - mae = 0.0; - return; } @@ -1211,6 +1211,7 @@ void SL1L2Info::assign(const SL1L2Info &c) { fobar = c.fobar; ffbar = c.ffbar; oobar = c.oobar; + smae = c.smae; scount = c.scount; // SAL1L2 Quantities @@ -1219,10 +1220,9 @@ void SL1L2Info::assign(const SL1L2Info &c) { foabar = c.foabar; ffabar = c.ffabar; ooabar = c.ooabar; + samae = c.samae; sacount = c.sacount; - mae = c.mae; - return; } @@ -1272,7 +1272,7 @@ void SL1L2Info::set(const PairDataPoint &pd_all) { fobar += wgt*f*o; ffbar += wgt*f*f; oobar += wgt*o*o; - mae += wgt*fabs(f-o); + smae += wgt*fabs(f-o); scount++; // SAL1L2 sums @@ -1282,6 +1282,7 @@ void SL1L2Info::set(const PairDataPoint &pd_all) { foabar += wgt*(f-fc)*(o-oc); ffabar += wgt*(f-fc)*(f-fc); ooabar += wgt*(o-oc)*(o-oc); + samae += wgt*fabs((f-fc)-(o-oc)); sacount++; } } diff --git a/src/libcode/vx_statistics/met_stats.h b/src/libcode/vx_statistics/met_stats.h index b053266c33..f3bef1a90c 100644 --- a/src/libcode/vx_statistics/met_stats.h +++ b/src/libcode/vx_statistics/met_stats.h @@ -224,17 +224,16 @@ class SL1L2Info { double fbar, obar; double fobar; double ffbar, oobar; + double smae; int scount; // SAL1L2 Quantities double fabar, oabar; double foabar; double ffabar, ooabar; + double samae; int sacount; - // Mean absolute error - double mae; - // Compute sums void set(const PairDataPoint &); diff --git a/src/libcode/vx_statistics/pair_data_point.cc b/src/libcode/vx_statistics/pair_data_point.cc index 0d29dda9ed..0e7f6203e7 100644 --- a/src/libcode/vx_statistics/pair_data_point.cc +++ b/src/libcode/vx_statistics/pair_data_point.cc @@ -820,7 +820,6 @@ double get_mpr_column_value(double f, double o, const ClimoPntInfo &cpi, const char *s) { double v; -/* #MET #2924 Replace this section if(strcasecmp(s, "FCST") == 0) v = f; else if(strcasecmp(s, "OBS") == 0) v = o; else if(strcasecmp(s, "FCST_CLIMO_MEAN") == 0) v = cpi.fcmn; @@ -831,16 +830,6 @@ double get_mpr_column_value(double f, double o, const ClimoPntInfo &cpi, v = (is_bad_data(cpi.ocmn) || is_bad_data(cpi.ocsd) ? bad_data_double : normal_cdf(o, cpi.ocmn, cpi.ocsd)); } -*/ - if(strcasecmp(s, "FCST") == 0) v = f; - else if(strcasecmp(s, "OBS") == 0) v = o; - else if(strcasecmp(s, "CLIMO_MEAN") == 0) v = cpi.ocmn; - else if(strcasecmp(s, "CLIMO_STDEV") == 0) v = cpi.ocsd; - else if(strcasecmp(s, "CLIMO_CDF") == 0) { - v = (is_bad_data(cpi.ocmn) || is_bad_data(cpi.ocsd) ? - bad_data_double : normal_cdf(o, cpi.ocmn, cpi.ocsd)); - } -// MET #2924 End replace else { mlog << Error << "\nget_mpr_column_value() -> " << "unsupported matched pair column name requested in \"" diff --git a/src/libcode/vx_statistics/read_climo.cc b/src/libcode/vx_statistics/read_climo.cc index 8e43749a8d..f5a0f2db71 100644 --- a/src/libcode/vx_statistics/read_climo.cc +++ b/src/libcode/vx_statistics/read_climo.cc @@ -29,7 +29,7 @@ using namespace std; static void read_climo_file( const char *, GrdFileType, Dictionary *, unixtime, int, int, const Grid &, const RegridInfo &, - DataPlaneArray &dpa); + DataPlaneArray &dpa, const char *); static DataPlaneArray climo_time_interp( const DataPlaneArray &, int, unixtime, InterpMthd); @@ -40,7 +40,8 @@ static DataPlane climo_hms_interp( //////////////////////////////////////////////////////////////////////// DataPlane read_climo_data_plane(Dictionary *dict, int i_vx, - unixtime vld_ut, const Grid &vx_grid) { + unixtime vld_ut, const Grid &vx_grid, + const char *desc) { DataPlane dp; DataPlaneArray dpa; @@ -48,13 +49,13 @@ DataPlane read_climo_data_plane(Dictionary *dict, int i_vx, if(!dict) return dp; // Read array of climatology fields - dpa = read_climo_data_plane_array(dict, i_vx, vld_ut, vx_grid); + dpa = read_climo_data_plane_array(dict, i_vx, vld_ut, vx_grid, desc); // Check for multiple matches if(dpa.n_planes() > 1) { mlog << Warning << "\nread_climo_data_plane() -> " - << "Found " << dpa.n_planes() << " matching climatology " - << "fields. Using the first match found.\n\n"; + << "Found " << dpa.n_planes() << " matching " << desc + << " fields. Using the first match found.\n\n"; } // Store the first match found @@ -67,7 +68,8 @@ DataPlane read_climo_data_plane(Dictionary *dict, int i_vx, DataPlaneArray read_climo_data_plane_array(Dictionary *dict, int i_vx, unixtime vld_ut, - const Grid &vx_grid) { + const Grid &vx_grid, + const char *desc) { DataPlaneArray dpa; StringArray climo_files; RegridInfo regrid_info; @@ -100,7 +102,7 @@ DataPlaneArray read_climo_data_plane_array(Dictionary *dict, int i_vx, // Range check day_interval if(!is_bad_data(day_interval) && day_interval < 1) { mlog << Error << "\nread_climo_data_plane_array() -> " - << "The \"" << conf_key_day_interval << "\" entry (" + << "The " << conf_key_day_interval << " entry (" << day_interval << ") can be set to " << na_str << " or a value of at least 1.\n\n"; exit(1); @@ -113,7 +115,7 @@ DataPlaneArray read_climo_data_plane_array(Dictionary *dict, int i_vx, if(!is_bad_data(hour_interval) && (hour_interval <= 0 || hour_interval > 24)) { mlog << Error << "\nread_climo_data_plane_array() -> " - << "The \"" << conf_key_hour_interval << "\" entry (" + << "The " << conf_key_hour_interval << " entry (" << hour_interval << ") can be set to " << na_str << " or a value between 0 and 24.\n\n"; exit(1); @@ -131,14 +133,15 @@ DataPlaneArray read_climo_data_plane_array(Dictionary *dict, int i_vx, // Search the files for the requested records for(i=0; i " - << "Trouble reading climatology file \"" - << climo_file << "\"\n\n"; + << "Trouble reading climatology file " + << climo_file << "\n\n"; return; } @@ -191,21 +194,21 @@ void read_climo_file(const char *climo_file, GrdFileType ctype, // Check the day time step if(!is_bad_data(day_ts) && abs(day_diff_sec) >= day_ts) { - mlog << Debug(3) << "Skipping " << clm_ut_cs << " \"" << info->magic_str() - << "\" climatology field with " << day_diff_sec / sec_per_day + mlog << Debug(3) << "Skipping " << clm_ut_cs << " " << info->magic_str() + << " climatology field with " << day_diff_sec / sec_per_day << " day offset (" << conf_key_day_interval << " = " - << day_ts / sec_per_day << ") from file \"" - << climo_file << "\".\n"; + << day_ts / sec_per_day << ") from file " + << climo_file << ".\n"; continue; } // Check the hour time step if(!is_bad_data(hour_ts) && abs(hms_diff_sec) >= hour_ts) { - mlog << Debug(3) << "Skipping " << clm_ut_cs << " \"" << info->magic_str() - << "\" climatology field with " << (double) hms_diff_sec / sec_per_hour + mlog << Debug(3) << "Skipping " << clm_ut_cs << " " << info->magic_str() + << " climatology field with " << (double) hms_diff_sec / sec_per_hour << " hour offset (" << conf_key_hour_interval << " = " - << hour_ts / sec_per_hour << ") from file \"" - << climo_file << "\".\n"; + << hour_ts / sec_per_hour << ") from file " + << climo_file << ".\n"; continue; } @@ -213,17 +216,17 @@ void read_climo_file(const char *climo_file, GrdFileType ctype, unixtime clm_vld_ut = vld_ut + day_diff_sec + hms_diff_sec; // Print log message for matching record - mlog << Debug(3) << "Storing " << clm_ut_cs << " \"" << info->magic_str() - << "\" climatology field with " << day_diff_sec / sec_per_day + mlog << Debug(3) << "Storing " << clm_ut_cs << " " << info->magic_str() + << " climatology field with " << day_diff_sec / sec_per_day << " day, " << (double) hms_diff_sec / sec_per_hour << " hour offset as time " - << unix_to_yyyymmdd_hhmmss(clm_vld_ut) << " from file \"" - << climo_file << "\".\n"; + << unix_to_yyyymmdd_hhmmss(clm_vld_ut) << " from file " + << climo_file << ".\n"; // Regrid, if needed if(!(mtddf->grid() == vx_grid)) { - mlog << Debug(2) << "Regridding " << clm_ut_cs << " \"" - << info->magic_str() - << "\" climatology field to the verification grid.\n"; + mlog << Debug(2) << "Regridding " << clm_ut_cs << " " + << desc << " field " << info->magic_str() + << " to the verification grid.\n"; dp = met_regrid(clm_dpa[i], mtddf->grid(), vx_grid, regrid_info); } @@ -343,8 +346,8 @@ DataPlaneArray climo_time_interp(const DataPlaneArray &dpa, int day_ts, // This should only occur when day_interval > 1. if(day_ts <= 3600*24) { mlog << Error << "\nclimo_time_interp() -> " - << "Expecting 1 or 2 climatology fields when \"" - << conf_key_day_interval << "\" <= 1 but found " + << "Expecting 1 or 2 climatology fields when " + << conf_key_day_interval << " <= 1 but found " << it->second.n() << "\n\n"; exit(1); } diff --git a/src/libcode/vx_statistics/read_climo.h b/src/libcode/vx_statistics/read_climo.h index a017df6388..362efa3fce 100644 --- a/src/libcode/vx_statistics/read_climo.h +++ b/src/libcode/vx_statistics/read_climo.h @@ -19,10 +19,12 @@ //////////////////////////////////////////////////////////////////////// extern DataPlane read_climo_data_plane(Dictionary *, int, - unixtime, const Grid &); + unixtime, const Grid &, + const char *); extern DataPlaneArray read_climo_data_plane_array(Dictionary *, int, - unixtime, const Grid &); + unixtime, const Grid &, + const char *); //////////////////////////////////////////////////////////////////////// diff --git a/src/tools/core/ensemble_stat/ensemble_stat.cc b/src/tools/core/ensemble_stat/ensemble_stat.cc index 3dedd76ef5..826b8eaf7a 100644 --- a/src/tools/core/ensemble_stat/ensemble_stat.cc +++ b/src/tools/core/ensemble_stat/ensemble_stat.cc @@ -784,18 +784,22 @@ void process_point_vx() { // Read forecast climatology data fcmn_dpa = read_climo_data_plane_array( conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), - i, ens_valid_ut, grid); + i, ens_valid_ut, grid, + "forecast climatology mean"); fcsd_dpa = read_climo_data_plane_array( - conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), - i, ens_valid_ut, grid); + conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), + i, ens_valid_ut, grid, + "forecast climatology standard deviation"); // Read observation climatology data ocmn_dpa = read_climo_data_plane_array( conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - i, ens_valid_ut, grid); + i, ens_valid_ut, grid, + "observation climatology mean"); ocsd_dpa = read_climo_data_plane_array( - conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), - i, ens_valid_ut, grid); + conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), + i, ens_valid_ut, grid, + "observation climatology standard deviation"); mlog << Debug(3) << "For " << conf_info.vx_opt[i].vx_pd.fcst_info->magic_str() << ", found " @@ -1424,18 +1428,22 @@ void process_grid_vx() { // Read forecast climatology data fcmn_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), - i, ens_valid_ut, grid); + i, ens_valid_ut, grid, + "forecast climatology mean"); fcsd_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), - i, ens_valid_ut, grid); + i, ens_valid_ut, grid, + "forecast climatology standard deviation"); // Read observation climatology data ocmn_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - i, ens_valid_ut, grid); + i, ens_valid_ut, grid, + "observation climatology mean"); ocsd_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), - i, ens_valid_ut, grid); + i, ens_valid_ut, grid, + "observation climatology standard deviation"); mlog << Debug(3) << "For " << conf_info.vx_opt[i].vx_pd.fcst_info->magic_str() << ", found " diff --git a/src/tools/core/grid_stat/grid_stat.cc b/src/tools/core/grid_stat/grid_stat.cc index fcd0bc33a9..33f3e14dfc 100644 --- a/src/tools/core/grid_stat/grid_stat.cc +++ b/src/tools/core/grid_stat/grid_stat.cc @@ -196,8 +196,8 @@ static void clean_up(); static void usage(); static void set_outdir(const StringArray &); static void set_compress(const StringArray &); -static bool read_data_plane(VarInfo* info, DataPlane& dp, Met2dDataFile* mtddf, - const ConcatString &filename); +static bool read_data_plane(VarInfo *info, DataPlane &dp, Met2dDataFile *mtddf, + const ConcatString &filename, const char *desc); #ifdef WITH_UGRID static void set_ugrid_config(const StringArray &); #endif @@ -725,7 +725,8 @@ void process_scores() { // Read the gridded data from the input forecast file if(!read_data_plane(conf_info.vx_opt[i].fcst_info, - fcst_dp, fcst_mtddf, fcst_file)) continue; + fcst_dp, fcst_mtddf, fcst_file, + "forecast")) continue; mlog << Debug(3) << "Reading forecast data for " @@ -740,7 +741,8 @@ void process_scores() { // Read the gridded data from the input observation file if(!read_data_plane(conf_info.vx_opt[i].obs_info, - obs_dp, obs_mtddf, obs_file)) continue; + obs_dp, obs_mtddf, obs_file, + "observation")) continue; mlog << Debug(3) << "Reading observation data for " @@ -790,18 +792,22 @@ void process_scores() { // Read forecast climatology data fcmn_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), - i, fcst_dp.valid(), grid); + i, fcst_dp.valid(), grid, + "forecast climatology mean"); fcsd_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), - i, fcst_dp.valid(), grid); + i, fcst_dp.valid(), grid, + "forecast climatology standard deviation"); // Read observation climatology data ocmn_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - i, fcst_dp.valid(), grid); + i, fcst_dp.valid(), grid, + "observation climatology mean"); ocsd_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), - i, fcst_dp.valid(), grid); + i, fcst_dp.valid(), grid, + "observation climatology standard deviation"); mlog << Debug(3) << "For " << conf_info.vx_opt[i].fcst_info->magic_str() << ", found " @@ -1048,27 +1054,33 @@ void process_scores() { // Read forecast data for UGRD if(!read_data_plane(conf_info.vx_opt[ui].fcst_info, - fu_dp, fcst_mtddf, fcst_file)) continue; + fu_dp, fcst_mtddf, fcst_file, + "U-wind forecast")) continue; // Read observation data for UGRD if(!read_data_plane(conf_info.vx_opt[ui].obs_info, - ou_dp, obs_mtddf, obs_file)) continue; + ou_dp, obs_mtddf, obs_file, + "U-wind observation")) continue; // Read the forecast climatology data for UGRD fcmnu_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), - ui, fcst_dp.valid(), grid); + ui, fcst_dp.valid(), grid, + "forecast U-wind climatology mean"); fcsdu_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), - ui, fcst_dp.valid(), grid); + ui, fcst_dp.valid(), grid, + "forecast U-wind climatology standard deviation"); // Read the observation climatology data for UGRD ocmnu_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - ui, fcst_dp.valid(), grid); + ui, fcst_dp.valid(), grid, + "observation U-wind climatology mean"); ocsdu_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), - ui, fcst_dp.valid(), grid); + ui, fcst_dp.valid(), grid, + "observation U-wind climatology standard deviation"); // If requested in the config file, smooth the forecast // and climatology U-wind fields @@ -1178,7 +1190,6 @@ void process_scores() { i, mthd, pnts, conf_info.vx_opt[i].interp_info.field); } - /* MET #2924 Replace this section if(conf_info.vx_opt[i].nc_info.do_climo && !fcmn_dp.is_empty()) { write_nc((string)"FCST_CLIMO_MEAN", fcmn_dp, @@ -1209,26 +1220,6 @@ void process_scores() { i, mthd, pnts, conf_info.vx_opt[i].interp_info.field); } - */ - if(conf_info.vx_opt[i].nc_info.do_climo && - !ocmn_dp.is_empty()) { - write_nc((string)"CLIMO_MEAN", ocmn_dp, - i, mthd, pnts, - conf_info.vx_opt[i].interp_info.field); - } - if(conf_info.vx_opt[i].nc_info.do_climo && - !ocsd_dp.is_empty()) { - write_nc((string)"CLIMO_STDEV", fcsd_dp, - i, mthd, pnts, - conf_info.vx_opt[i].interp_info.field); - } - if(conf_info.vx_opt[i].nc_info.do_climo && - !ocmn_dp.is_empty() && !ocsd_dp.is_empty()) { - write_nc((string)"CLIMO_CDF", normal_cdf(obs_dp, ocmn_dp, ocsd_dp), - i, mthd, pnts, - conf_info.vx_opt[i].interp_info.field); - } - // MET #2924 End replace // Write out the fields of requested climo distribution percentile threshold values if(conf_info.vx_opt[i].nc_info.do_climo_cdp && @@ -1249,7 +1240,6 @@ void process_scores() { // Process all CDP thresholds except 0 and 100 for(vector::iterator it = simp.begin(); it != simp.end(); it++) { - /* MET #2924 Replace this section if(it->ptype() == perc_thresh_fcst_climo_dist && !is_eq(it->pvalue(), 0.0) && !is_eq(it->pvalue(), 100.0)) { @@ -1266,16 +1256,6 @@ void process_scores() { i, mthd, pnts, conf_info.vx_opt[i].interp_info.field); } - */ - if(it->ptype() == perc_thresh_obs_climo_dist && - !is_eq(it->pvalue(), 0.0) && - !is_eq(it->pvalue(), 100.0)) { - cs << cs_erase << "CLIMO_CDP" << nint(it->pvalue()); - write_nc(cs, normal_cdf_inv(it->pvalue()/100.0, ocmn_dp, ocsd_dp), - i, mthd, pnts, - conf_info.vx_opt[i].interp_info.field); - } - // MET #2924 End replace } // end for it } @@ -1912,19 +1892,23 @@ void process_scores() { // Read forecast data for UGRD if(!read_data_plane(conf_info.vx_opt[ui].fcst_info, - fu_dp, fcst_mtddf, fcst_file)) continue; + fu_dp, fcst_mtddf, fcst_file, + "U-wind forecast")) continue; // Read observation data for UGRD if(!read_data_plane(conf_info.vx_opt[ui].obs_info, - ou_dp, obs_mtddf, obs_file)) continue; + ou_dp, obs_mtddf, obs_file, + "U-wind observation")) continue; // Read climatology data for UGRD fcmnu_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), - ui, fcst_dp.valid(), grid); + ui, fcst_dp.valid(), grid, + "forecast U-wind climatology mean"); ocmnu_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - ui, fcst_dp.valid(), grid); + ui, fcst_dp.valid(), grid, + "observation U-wind climatology mean"); // Apply Fourier decomposition to the U-wind fields fu_dp_smooth = fu_dp; @@ -2037,7 +2021,6 @@ void process_scores() { i, shc.get_interp_mthd(), bad_data_int, FieldType::Both); } - /* MET #2924 Replace this change if(conf_info.vx_opt[i].nc_info.do_climo && !fcmn_dp_smooth.is_empty()) { write_nc((string)"FCST_CLIMO_MEAN", fcmn_dp_smooth, @@ -2050,14 +2033,6 @@ void process_scores() { i, shc.get_interp_mthd(), bad_data_int, FieldType::Both); } - */ - if(conf_info.vx_opt[i].nc_info.do_climo && - !ocmn_dp_smooth.is_empty()) { - write_nc((string)"CLIMO_MEAN", ocmn_dp_smooth, - i, shc.get_interp_mthd(), - bad_data_int, FieldType::Both); - } - // MET #2924 End replace } // end if } // end for j @@ -2838,7 +2813,6 @@ void write_nc(const ConcatString &field_name, const DataPlane &dp, level_att = shc.get_fcst_lev(); units_att = conf_info.vx_opt[i_vx].fcst_info->units_attr(); } - /* MET #2924 Replace this section else if(field_name == "OBS_CLIMO_MEAN") { var_name << cs_erase << field_name << "_" << obs_name << var_suffix << "_" << mask_str; @@ -2895,52 +2869,6 @@ void write_nc(const ConcatString &field_name, const DataPlane &dp, level_att = shc.get_obs_lev(); units_att = conf_info.vx_opt[i_vx].obs_info->units_attr(); } - */ - else if(field_name == "CLIMO_MEAN") { - var_name << cs_erase << field_name << "_" - << obs_name << var_suffix << "_" << mask_str; - - // Append interpolation string for Fourier decomposition - if(interp_str.nonempty()) { - if(interp_str.startswith("_WV")) var_name << interp_str; - } - long_att << cs_erase - << "Climatology mean for " - << obs_long_name; - level_att = shc.get_obs_lev(); - units_att = conf_info.vx_opt[i_vx].obs_info->units_attr(); - } - else if(field_name == "CLIMO_STDEV") { - var_name << cs_erase << field_name << "_" - << obs_name << var_suffix << "_" << mask_str; - long_att << cs_erase - << "Climatology standard deviation for " - << obs_long_name; - level_att = shc.get_obs_lev(); - units_att = conf_info.vx_opt[i_vx].obs_info->units_attr(); - } - else if(field_name == "CLIMO_CDF") { - var_name << cs_erase << field_name << "_" - << obs_name << var_suffix << "_" << mask_str; - long_att << cs_erase - << "Climatology cumulative distribution function for " - << obs_long_name; - level_att = shc.get_obs_lev(); - units_att = conf_info.vx_opt[i_vx].obs_info->units_attr(); - } - else if(field_name.startswith("CLIMO_CDP")) { - var_name << cs_erase - << field_name << "_" - << conf_info.vx_opt[i_vx].obs_info->name_attr() << "_" - << conf_info.vx_opt[i_vx].obs_info->level_attr() - << var_suffix << "_" << mask_str; - long_att << cs_erase - << "Climatology distribution percentile thresholds for " - << obs_long_name; - level_att = shc.get_obs_lev(); - units_att = conf_info.vx_opt[i_vx].obs_info->units_attr(); - } - // MET #2924 end replace else if(check_reg_exp("FCST_XGRAD_", field_name.c_str()) || check_reg_exp("FCST_YGRAD_", field_name.c_str())) { var_name << cs_erase << field_name << "_" @@ -3394,15 +3322,15 @@ void set_compress(const StringArray & a) { //////////////////////////////////////////////////////////////////////// -bool read_data_plane(VarInfo* info, DataPlane& dp, Met2dDataFile* mtddf, - const ConcatString &filename) { +bool read_data_plane(VarInfo *info, DataPlane &dp, Met2dDataFile *mtddf, + const ConcatString &filename, const char *desc) { bool status = mtddf->data_plane(*info, dp); if(!status) { mlog << Warning << "\nread_data_plane() -> " << info->magic_str() - << " not found in file: " << filename + << " not found in " << desc << " file: " << filename << "\n\n"; return false; } @@ -3410,7 +3338,7 @@ bool read_data_plane(VarInfo* info, DataPlane& dp, Met2dDataFile* mtddf, // Regrid, if necessary if(!(mtddf->grid() == grid)) { mlog << Debug(1) - << "Regridding field " + << "Regridding " << desc << " field " << info->magic_str() << " to the verification grid.\n"; dp = met_regrid(dp, mtddf->grid(), grid, info->regrid()); diff --git a/src/tools/core/point_stat/point_stat.cc b/src/tools/core/point_stat/point_stat.cc index 5532ea7e1d..bda41ddf26 100644 --- a/src/tools/core/point_stat/point_stat.cc +++ b/src/tools/core/point_stat/point_stat.cc @@ -668,18 +668,22 @@ void process_fcst_climo_files() { // Read forecast climatology data fcmn_dpa = read_climo_data_plane_array( conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), - i, fcst_dpa[0].valid(), grid); + i, fcst_dpa[0].valid(), grid, + "forecast climatology mean"); fcsd_dpa = read_climo_data_plane_array( conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), - i, fcst_dpa[0].valid(), grid); + i, fcst_dpa[0].valid(), grid, + "forecast climatology standard deviation"); // Read observation climatology data ocmn_dpa = read_climo_data_plane_array( conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - i, fcst_dpa[0].valid(), grid); + i, fcst_dpa[0].valid(), grid, + "observation climatology mean"); ocsd_dpa = read_climo_data_plane_array( conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), - i, fcst_dpa[0].valid(), grid); + i, fcst_dpa[0].valid(), grid, + "observation climatology standard deviation"); // Store data for the current verification task conf_info.vx_opt[i].vx_pd.set_fcst_dpa(fcst_dpa); diff --git a/src/tools/core/series_analysis/series_analysis.cc b/src/tools/core/series_analysis/series_analysis.cc index 9373c0e040..a2e8cdf5c1 100644 --- a/src/tools/core/series_analysis/series_analysis.cc +++ b/src/tools/core/series_analysis/series_analysis.cc @@ -92,17 +92,18 @@ static void do_cnt (int, const PairDataPoint *); static void do_sl1l2 (int, const PairDataPoint *); static void do_pct (int, const PairDataPoint *); -static void store_stat_fho (int, const ConcatString &, const CTSInfo &); -static void store_stat_ctc (int, const ConcatString &, const CTSInfo &); -static void store_stat_cts (int, const ConcatString &, const CTSInfo &); -static void store_stat_mctc (int, const ConcatString &, const MCTSInfo &); -static void store_stat_mcts (int, const ConcatString &, const MCTSInfo &); -static void store_stat_cnt (int, const ConcatString &, const CNTInfo &); -static void store_stat_sl1l2(int, const ConcatString &, const SL1L2Info &); -static void store_stat_pct (int, const ConcatString &, const PCTInfo &); -static void store_stat_pstd (int, const ConcatString &, const PCTInfo &); -static void store_stat_pjc (int, const ConcatString &, const PCTInfo &); -static void store_stat_prc (int, const ConcatString &, const PCTInfo &); +static void store_stat_fho (int, const ConcatString &, const CTSInfo &); +static void store_stat_ctc (int, const ConcatString &, const CTSInfo &); +static void store_stat_cts (int, const ConcatString &, const CTSInfo &); +static void store_stat_mctc (int, const ConcatString &, const MCTSInfo &); +static void store_stat_mcts (int, const ConcatString &, const MCTSInfo &); +static void store_stat_cnt (int, const ConcatString &, const CNTInfo &); +static void store_stat_sl1l2 (int, const ConcatString &, const SL1L2Info &); +static void store_stat_sal1l2(int, const ConcatString &, const SL1L2Info &); +static void store_stat_pct (int, const ConcatString &, const PCTInfo &); +static void store_stat_pstd (int, const ConcatString &, const PCTInfo &); +static void store_stat_pjc (int, const ConcatString &, const PCTInfo &); +static void store_stat_prc (int, const ConcatString &, const PCTInfo &); static void setup_nc_file(const VarInfo *, const VarInfo *); static void add_nc_var(const ConcatString &, const ConcatString &, @@ -737,19 +738,25 @@ void process_scores() { << min(i_point + conf_info.block_size, nxy) << ".\n"; } - // Read climatology data for the current series entry + // Read forecast climatology data fcmn_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_mean_field, false), - i_fcst, fcst_dp.valid(), grid); + i_fcst, fcst_dp.valid(), grid, + "forecast climatology mean"); fcsd_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_fcst_climo_stdev_field, false), - i_fcst, fcst_dp.valid(), grid); + i_fcst, fcst_dp.valid(), grid, + "forecast climatology standard deviation"); + + // Read observation climatology data ocmn_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_mean_field, false), - i_fcst, fcst_dp.valid(), grid); + i_fcst, fcst_dp.valid(), grid, + "observation climatology mean"); ocsd_dp = read_climo_data_plane( conf_info.conf.lookup_array(conf_key_obs_climo_stdev_field, false), - i_fcst, fcst_dp.valid(), grid); + i_fcst, fcst_dp.valid(), grid, + "observation climatology standard deviation"); bool fcmn_flag = (fcmn_dp.nx() == fcst_dp.nx() && fcmn_dp.ny() == fcst_dp.ny()); @@ -860,8 +867,8 @@ void process_scores() { // Compute partial sums if(!conf_info.fcst_info[0]->is_prob() && - (conf_info.output_stats[STATLineType::sl1l2].n() > 0 || - conf_info.output_stats[STATLineType::sal1l2].n() > 0)) { + (conf_info.output_stats[STATLineType::sl1l2].n() + + conf_info.output_stats[STATLineType::sal1l2].n()) > 0) { do_sl1l2(i_point+i, &pd_ptr[i]); } @@ -1128,6 +1135,11 @@ void do_sl1l2(int n, const PairDataPoint *pd_ptr) { for(j=0; j " << "unsupported column name requested \"" << c @@ -1822,6 +1828,62 @@ void store_stat_sl1l2(int n, const ConcatString &col, //////////////////////////////////////////////////////////////////////// +void store_stat_sal1l2(int n, const ConcatString &col, + const SL1L2Info &s_info) { + double v; + + // Set the column name to all upper case + ConcatString c = to_upper(col); + + // Get the column value + if(c == "TOTAL") { v = (double) s_info.sacount; } + else if(c == "FABAR") { v = s_info.fabar; } + else if(c == "OABAR") { v = s_info.oabar; } + else if(c == "FOABAR") { v = s_info.foabar; } + else if(c == "FFABAR") { v = s_info.ffabar; } + else if(c == "OOABAR") { v = s_info.ooabar; } + else if(c == "MAE") { v = s_info.samae; } + else { + mlog << Error << "\nstore_stat_sal1l2() -> " + << "unsupported column name requested \"" << c + << "\"\n\n"; + exit(1); + } + + // Construct the NetCDF variable name + ConcatString var_name("series_sal1l2_"); + var_name << c; + + // Append threshold information, if supplied + if(s_info.fthresh.get_type() != thresh_na || + s_info.othresh.get_type() != thresh_na) { + var_name << "_fcst" << s_info.fthresh.get_abbr_str() + << "_" << setlogic_to_abbr(conf_info.cnt_logic) + << "_obs" << s_info.othresh.get_abbr_str(); + } + + // Add map for this variable name + if(stat_data.count(var_name) == 0) { + + // Build key + ConcatString lty_stat("SAL1L2_"); + lty_stat << c; + + // Add new map entry + add_nc_var(var_name, c, stat_long_name[lty_stat], + s_info.fthresh.get_str(), + s_info.othresh.get_str(), + bad_data_double); + } + + // Store the statistic value + put_nc_val(n, var_name, (float) v); + + return; +} + +//////////////////////////////////////////////////////////////////////// + void store_stat_pct(int n, const ConcatString &col, const PCTInfo &pct_info) { int i = 0; diff --git a/src/tools/core/stat_analysis/aggr_stat_line.cc b/src/tools/core/stat_analysis/aggr_stat_line.cc index 75a9f60417..441cbe5e07 100644 --- a/src/tools/core/stat_analysis/aggr_stat_line.cc +++ b/src/tools/core/stat_analysis/aggr_stat_line.cc @@ -3999,12 +3999,6 @@ void mpr_to_cnt(STATAnalysisJob &job, const AggrMPRInfo &info, void mpr_to_psum(STATAnalysisJob &job, const AggrMPRInfo &info, int i_thresh, SL1L2Info &s_info) { - int i; - int scount, sacount; - double f, o, fc, oc; - double f_sum, o_sum, ff_sum, oo_sum, fo_sum; - double fa_sum, oa_sum, ffa_sum, ooa_sum, foa_sum; - double abs_err_sum; PairDataPoint pd_thr; // @@ -4034,31 +4028,41 @@ void mpr_to_psum(STATAnalysisJob &job, const AggrMPRInfo &info, // // Initialize counts // - scount = sacount = 0; - f_sum = o_sum = ff_sum = oo_sum = fo_sum = 0.0; - fa_sum = oa_sum = ffa_sum = ooa_sum = foa_sum = 0.0; - abs_err_sum = 0.0; + int scount = 0; + int sacount = 0; + double f_sum = 0.0; + double o_sum = 0.0; + double ff_sum = 0.0; + double oo_sum = 0.0; + double fo_sum = 0.0; + double smae_sum = 0.0; + double fa_sum = 0.0; + double oa_sum = 0.0; + double ffa_sum = 0.0; + double ooa_sum = 0.0; + double foa_sum = 0.0; + double samae_sum = 0.0; // // Update the partial sums // - for(i=0; in_use); // N_USE @@ -711,10 +709,8 @@ void write_orank_row_rad(AsciiTable &at, int row, int i_obs) { at.set_entry(row, col++, bad_data_double); // SPREAD_OERR at.set_entry(row, col++, bad_data_double); // SPREAD_PLUS_OERR at.set_entry(row, col++, bad_data_double); // OBS_CLIMO_STDEV - /* MET #2924 Uncomment this section at.set_entry(row, col++, bad_data_double); // FCST_CLIMO_MEAN at.set_entry(row, col++, bad_data_double); // FCST_CLIMO_STDEV - */ // Write extra columns at.set_entry(row, col++, d->n_use); // N_USE