diff --git a/MATScommon b/MATScommon index c788590257..f8879eb7b6 160000 --- a/MATScommon +++ b/MATScommon @@ -1 +1 @@ -Subproject commit c7885902576929b6799aecc04f44bd47f62ac4d5 +Subproject commit f8879eb7b694cb06875ed677103860223876cd3c diff --git a/METexpress b/METexpress index 142da60b5a..a48ff25570 160000 --- a/METexpress +++ b/METexpress @@ -1 +1 @@ -Subproject commit 142da60b5ae471ceab980ede7d2cf255067b8d09 +Subproject commit a48ff2557011d9eebee2fdaa74c7cea91bba31cd diff --git a/apps/upperair/server/dataFunctions/data_contour.js b/apps/upperair/server/dataFunctions/data_contour.js index 6b10881d02..257c68d5cb 100644 --- a/apps/upperair/server/dataFunctions/data_contour.js +++ b/apps/upperair/server/dataFunctions/data_contour.js @@ -151,10 +151,12 @@ dataContour = function (plotParams, plotFunction) { } else { queryTableClause = `${queryTableClause}, ${databaseRef.sumsDB}.GFS_Areg${region} as m1`; } + } else if (database.includes("Vapor")) { + queryTableClause = queryTableClause.replace("_sums", "_vapor_sums"); } let phaseClause = ""; - if (database === "AMDAR") { + if (database.includes("AMDAR")) { const phaseStr = curve.phase; const phaseOptionsMap = matsCollections.phase.findOne( { name: "phase" }, @@ -208,7 +210,7 @@ dataContour = function (plotParams, plotFunction) { statement = statement.replace("{{phaseClause}}", phaseClause); statement = statement.replace("{{dateClause}}", dateClause); statement = statement.split("{{dateString}}").join(dateString); - if (database === "AMDAR") { + if (database.includes("AMDAR")) { // AMDAR tables have all partial sums so we can get them all from the main table statement = statement.split("m1").join("m0"); } diff --git a/apps/upperair/server/dataFunctions/data_contour_diff.js b/apps/upperair/server/dataFunctions/data_contour_diff.js index e0bf4030b5..82ad4ef0a1 100644 --- a/apps/upperair/server/dataFunctions/data_contour_diff.js +++ b/apps/upperair/server/dataFunctions/data_contour_diff.js @@ -159,10 +159,12 @@ dataContourDiff = function (plotParams, plotFunction) { } else { queryTableClause = `${queryTableClause}, ${databaseRef.sumsDB}.GFS_Areg${region} as m1`; } + } else if (database.includes("Vapor")) { + queryTableClause = queryTableClause.replace("_sums", "_vapor_sums"); } let phaseClause = ""; - if (database === "AMDAR") { + if (database.includes("AMDAR")) { const phaseStr = curve.phase; const phaseOptionsMap = matsCollections.phase.findOne( { name: "phase" }, @@ -218,7 +220,7 @@ dataContourDiff = function (plotParams, plotFunction) { statement = statement.replace("{{phaseClause}}", phaseClause); statement = statement.replace("{{dateClause}}", dateClause); statement = statement.split("{{dateString}}").join(dateString); - if (database === "AMDAR") { + if (database.includes("AMDAR")) { // AMDAR tables have all partial sums so we can get them all from the main table statement = statement.split("m1").join("m0"); } diff --git a/apps/upperair/server/dataFunctions/data_dailymodelcycle.js b/apps/upperair/server/dataFunctions/data_dailymodelcycle.js index 50d1f7af6f..ca2f80dbe2 100644 --- a/apps/upperair/server/dataFunctions/data_dailymodelcycle.js +++ b/apps/upperair/server/dataFunctions/data_dailymodelcycle.js @@ -139,9 +139,11 @@ dataDailyModelCycle = function (plotParams, plotFunction) { } else { queryTableClause = `${queryTableClause}, ${databaseRef.sumsDB}.GFS_Areg${region} as m1`; } + } else if (database.includes("Vapor")) { + queryTableClause = queryTableClause.replace("_sums", "_vapor_sums"); } } else { - if (database === "AMDAR") { + if (database.includes("AMDAR")) { throw new Error( "Single/multi-station plotting is not supported by the AMDAR databse." ); @@ -195,7 +197,7 @@ dataDailyModelCycle = function (plotParams, plotFunction) { `group_concat(unix_timestamp(m0.date)+3600*m0.hour, ';', ${levelVar}, ';', ${variable[0]}, ';', ${NClause}, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by unix_timestamp(m0.date)+3600*m0.hour, ${levelVar}) as sub_data, count(${variable[0]}) as N0`; let phaseClause = ""; - if (database === "AMDAR") { + if (database.includes("AMDAR")) { const phaseStr = curve.phase; const phaseOptionsMap = matsCollections.phase.findOne( { name: "phase" }, @@ -256,7 +258,7 @@ dataDailyModelCycle = function (plotParams, plotFunction) { statement = statement.replace("{{phaseClause}}", phaseClause); statement = statement.replace("{{dateClause}}", dateClause); statement = statement.replace("{{siteDateClause}}", siteDateClause); - if (database === "AMDAR") { + if (database.includes("AMDAR")) { // AMDAR tables have all partial sums so we can get them all from the main table statement = statement.split("m1").join("m0"); } diff --git a/apps/upperair/server/dataFunctions/data_dieoff.js b/apps/upperair/server/dataFunctions/data_dieoff.js index 8fa9c3079d..349851a2a5 100644 --- a/apps/upperair/server/dataFunctions/data_dieoff.js +++ b/apps/upperair/server/dataFunctions/data_dieoff.js @@ -143,9 +143,11 @@ dataDieoff = function (plotParams, plotFunction) { } else { queryTableClause = `${queryTableClause}, ${databaseRef.sumsDB}.GFS_Areg${region} as m1`; } + } else if (database.includes("Vapor")) { + queryTableClause = queryTableClause.replace("_sums", "_vapor_sums"); } } else { - if (database === "AMDAR") { + if (database.includes("AMDAR")) { throw new Error( "Single/multi-station plotting is not supported by the AMDAR databse." ); @@ -214,7 +216,7 @@ dataDieoff = function (plotParams, plotFunction) { `group_concat(unix_timestamp(m0.date)+3600*m0.hour, ';', ${levelVar}, ';', ${variable[0]}, ';', ${NClause}, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by unix_timestamp(m0.date)+3600*m0.hour, ${levelVar}) as sub_data, count(${variable[0]}) as N0`; let phaseClause = ""; - if (database === "AMDAR") { + if (database.includes("AMDAR")) { const phaseStr = curve.phase; const phaseOptionsMap = matsCollections.phase.findOne( { name: "phase" }, @@ -277,7 +279,7 @@ dataDieoff = function (plotParams, plotFunction) { statement = statement.replace("{{phaseClause}}", phaseClause); statement = statement.replace("{{dateClause}}", dateClause); statement = statement.replace("{{siteDateClause}}", siteDateClause); - if (database === "AMDAR") { + if (database.includes("AMDAR")) { // AMDAR tables have all partial sums so we can get them all from the main table statement = statement.split("m1").join("m0"); } diff --git a/apps/upperair/server/dataFunctions/data_histogram.js b/apps/upperair/server/dataFunctions/data_histogram.js index da23f10c6e..9ff6f7625a 100644 --- a/apps/upperair/server/dataFunctions/data_histogram.js +++ b/apps/upperair/server/dataFunctions/data_histogram.js @@ -138,9 +138,11 @@ dataHistogram = function (plotParams, plotFunction) { } else { queryTableClause = `${queryTableClause}, ${databaseRef.sumsDB}.GFS_Areg${region} as m1`; } + } else if (database.includes("Vapor")) { + queryTableClause = queryTableClause.replace("_sums", "_vapor_sums"); } } else { - if (database === "AMDAR") { + if (database.includes("AMDAR")) { throw new Error( "Single/multi-station plotting is not supported by the AMDAR databse." ); @@ -194,7 +196,7 @@ dataHistogram = function (plotParams, plotFunction) { `group_concat(unix_timestamp(m0.date)+3600*m0.hour, ';', ${levelVar}, ';', ${variable[0]}, ';', ${NClause}, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by unix_timestamp(m0.date)+3600*m0.hour, ${levelVar}) as sub_data, count(${variable[0]}) as N0`; let phaseClause = ""; - if (database === "AMDAR") { + if (database.includes("AMDAR")) { const phaseStr = curve.phase; const phaseOptionsMap = matsCollections.phase.findOne( { name: "phase" }, @@ -258,7 +260,7 @@ dataHistogram = function (plotParams, plotFunction) { statement = statement.replace("{{phaseClause}}", phaseClause); statement = statement.replace("{{dateClause}}", dateClause); statement = statement.replace("{{siteDateClause}}", siteDateClause); - if (database === "AMDAR") { + if (database.includes("AMDAR")) { // AMDAR tables have all partial sums so we can get them all from the main table statement = statement.split("m1").join("m0"); } diff --git a/apps/upperair/server/dataFunctions/data_map.js b/apps/upperair/server/dataFunctions/data_map.js index 6aaf63ecdd..be6896005d 100644 --- a/apps/upperair/server/dataFunctions/data_map.js +++ b/apps/upperair/server/dataFunctions/data_map.js @@ -79,7 +79,7 @@ dataMap = function (plotParams, plotFunction) { let sitesClause = ""; - if (database === "AMDAR") { + if (database.includes("AMDAR")) { throw new Error( "Single/multi-station plotting is not supported by the AMDAR databse." ); diff --git a/apps/upperair/server/dataFunctions/data_profile.js b/apps/upperair/server/dataFunctions/data_profile.js index f4e063cd6f..f029983185 100644 --- a/apps/upperair/server/dataFunctions/data_profile.js +++ b/apps/upperair/server/dataFunctions/data_profile.js @@ -136,9 +136,11 @@ dataProfile = function (plotParams, plotFunction) { } else { queryTableClause = `${queryTableClause}, ${databaseRef.sumsDB}.GFS_Areg${region} as m1`; } + } else if (database.includes("Vapor")) { + queryTableClause = queryTableClause.replace("_sums", "_vapor_sums"); } } else { - if (database === "AMDAR") { + if (database.includes("AMDAR")) { throw new Error( "Single/multi-station plotting is not supported by the AMDAR databse." ); @@ -192,7 +194,7 @@ dataProfile = function (plotParams, plotFunction) { `group_concat(unix_timestamp(m0.date)+3600*m0.hour, ';', ${levelVar}, ';', ${variable[0]}, ';', ${NClause}, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by unix_timestamp(m0.date)+3600*m0.hour, ${levelVar}) as sub_data, count(${variable[0]}) as N0`; let phaseClause = ""; - if (database === "AMDAR") { + if (database.includes("AMDAR")) { const phaseStr = curve.phase; const phaseOptionsMap = matsCollections.phase.findOne( { name: "phase" }, @@ -254,7 +256,7 @@ dataProfile = function (plotParams, plotFunction) { statement = statement.replace("{{phaseClause}}", phaseClause); statement = statement.replace("{{dateClause}}", dateClause); statement = statement.replace("{{siteDateClause}}", siteDateClause); - if (database === "AMDAR") { + if (database.includes("AMDAR")) { // AMDAR tables have all partial sums so we can get them all from the main table statement = statement.split("m1").join("m0"); } diff --git a/apps/upperair/server/dataFunctions/data_series.js b/apps/upperair/server/dataFunctions/data_series.js index 3303e0bfa7..25586d4c18 100644 --- a/apps/upperair/server/dataFunctions/data_series.js +++ b/apps/upperair/server/dataFunctions/data_series.js @@ -144,9 +144,11 @@ dataSeries = function (plotParams, plotFunction) { } else { queryTableClause = `${queryTableClause}, ${databaseRef.sumsDB}.GFS_Areg${region} as m1`; } + } else if (database.includes("Vapor")) { + queryTableClause = queryTableClause.replace("_sums", "_vapor_sums"); } } else { - if (database === "AMDAR") { + if (database.includes("AMDAR")) { throw new Error( "Single/multi-station plotting is not supported by the AMDAR databse." ); @@ -200,7 +202,7 @@ dataSeries = function (plotParams, plotFunction) { `group_concat(unix_timestamp(m0.date)+3600*m0.hour, ';', ${levelVar}, ';', ${variable[0]}, ';', ${NClause}, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by unix_timestamp(m0.date)+3600*m0.hour, ${levelVar}) as sub_data, count(${variable[0]}) as N0`; let phaseClause = ""; - if (database === "AMDAR") { + if (database.includes("AMDAR")) { const phaseStr = curve.phase; const phaseOptionsMap = matsCollections.phase.findOne( { name: "phase" }, @@ -262,7 +264,7 @@ dataSeries = function (plotParams, plotFunction) { statement = statement.replace("{{phaseClause}}", phaseClause); statement = statement.replace("{{dateClause}}", dateClause); statement = statement.replace("{{siteDateClause}}", siteDateClause); - if (database === "AMDAR") { + if (database.includes("AMDAR")) { // AMDAR tables have all partial sums so we can get them all from the main table statement = statement.split("m1").join("m0"); } diff --git a/apps/upperair/server/dataFunctions/data_simple_scatter.js b/apps/upperair/server/dataFunctions/data_simple_scatter.js index 2bf4bfd8fd..eeb918290c 100644 --- a/apps/upperair/server/dataFunctions/data_simple_scatter.js +++ b/apps/upperair/server/dataFunctions/data_simple_scatter.js @@ -143,7 +143,7 @@ dataSimpleScatter = function (plotParams, plotFunction) { } let phaseClause = ""; - if (database === "AMDAR") { + if (database.includes("AMDAR")) { const phaseStr = curve.phase; const phaseOptionsMap = matsCollections.phase.findOne( { name: "phase" }, @@ -174,6 +174,8 @@ dataSimpleScatter = function (plotParams, plotFunction) { } else { queryTableClause = `${queryTableClause}, ${databaseRef.sumsDB}.GFS_Areg${region} as m1`; } + } else if (database.includes("Vapor")) { + queryTableClause = queryTableClause.replace("_sums", "_vapor_sums"); } const { statVarUnitMap } = matsCollections.variable.findOne( @@ -218,7 +220,7 @@ dataSimpleScatter = function (plotParams, plotFunction) { statement = statement.replace("{{phaseClause}}", phaseClause); statement = statement.replace("{{dateClause}}", dateClause); statement = statement.split("{{dateString}}").join(dateString); - if (database === "AMDAR") { + if (database.includes("AMDAR")) { // AMDAR tables have all partial sums so we can get them all from the main table statement = statement.split("m1").join("m0"); } diff --git a/apps/upperair/server/dataFunctions/data_validtime.js b/apps/upperair/server/dataFunctions/data_validtime.js index 2660cd244f..2699d23146 100644 --- a/apps/upperair/server/dataFunctions/data_validtime.js +++ b/apps/upperair/server/dataFunctions/data_validtime.js @@ -131,9 +131,11 @@ dataValidTime = function (plotParams, plotFunction) { } else { queryTableClause = `${queryTableClause}, ${databaseRef.sumsDB}.GFS_Areg${region} as m1`; } + } else if (database.includes("Vapor")) { + queryTableClause = queryTableClause.replace("_sums", "_vapor_sums"); } } else { - if (database === "AMDAR") { + if (database.includes("AMDAR")) { throw new Error( "Single/multi-station plotting is not supported by the AMDAR databse." ); @@ -187,7 +189,7 @@ dataValidTime = function (plotParams, plotFunction) { `group_concat(unix_timestamp(m0.date)+3600*m0.hour, ';', ${levelVar}, ';', ${variable[0]}, ';', ${NClause}, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by unix_timestamp(m0.date)+3600*m0.hour, ${levelVar}) as sub_data, count(${variable[0]}) as N0`; let phaseClause = ""; - if (database === "AMDAR") { + if (database.includes("AMDAR")) { const phaseStr = curve.phase; const phaseOptionsMap = matsCollections.phase.findOne( { name: "phase" }, @@ -246,7 +248,7 @@ dataValidTime = function (plotParams, plotFunction) { statement = statement.replace("{{phaseClause}}", phaseClause); statement = statement.replace("{{dateClause}}", dateClause); statement = statement.replace("{{siteDateClause}}", siteDateClause); - if (database === "AMDAR") { + if (database.includes("AMDAR")) { // AMDAR tables have all partial sums so we can get them all from the main table statement = statement.split("m1").join("m0"); } diff --git a/apps/upperair/server/main.js b/apps/upperair/server/main.js index fcb1985d7b..a3932e2340 100644 --- a/apps/upperair/server/main.js +++ b/apps/upperair/server/main.js @@ -20,7 +20,12 @@ import { const dbNames = { "RAOBs (Traditional)": { modelDB: "ruc_ua", sumsDB: "ruc_ua_sums2" }, "RAOBs (GDAS)": { modelDB: "ruc_ua_pb", sumsDB: "ruc_ua_pb_sums2" }, - AMDAR: { modelDB: "acars_RR2", sumsDB: "acars_RR2" }, + "AMDAR (Traditional)": { modelDB: "acars_RR2", sumsDB: "acars_RR2" }, + "AMDAR (GDAS)": { modelDB: "pb_amdar", sumsDB: "pb_amdar" }, + "AMDAR (GDAS; Only Obs That Include Vapor)": { + modelDB: "pb_amdar", + sumsDB: "pb_amdar", + }, }; const dbs = Object.keys(dbNames); diff --git a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/make_regions_per_model_mats_all_categories_aircraft_prepbufr.py b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/make_regions_per_model_mats_all_categories_aircraft_prepbufr.py new file mode 100755 index 0000000000..c14552b8b5 --- /dev/null +++ b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/make_regions_per_model_mats_all_categories_aircraft_prepbufr.py @@ -0,0 +1,405 @@ +#!/usr/bin/env python +# +# Creates a regions_per_model_mats_all_categories table for all models in pb_amdar + +# __future__ must come first +from __future__ import print_function +from datetime import datetime + +import time +import re +import sys +import ast +import MySQLdb + + +############################################################################ + +def update_rpm_record(cnx, cursor, table_name, display_text, regions, fcst_lens, display_category, display_order, mindate, maxdate, numrecs): + + # see if this record already exists (it shouldn't, because this script cleaned the tables when it started) + find_rpm_rec = "SELECT id FROM regions_per_model_mats_all_categories_build WHERE model = '" + \ + str(table_name) + "'" + cursor.execute(find_rpm_rec) + record_id = int(0) + for row in cursor: + val = list(row.values())[0] + record_id = int(val) + + if len(regions) > int(0) and len(fcst_lens) > int(0): + qd = [] + updated_utc = datetime.utcnow().strftime('%s') + # if it's a new record (it should be) add it + if record_id == 0: + insert_rpm_rec = "INSERT INTO regions_per_model_mats_all_categories_build (model, display_text, regions, fcst_lens, display_category, display_order, id, mindate, maxdate, numrecs, updated) values( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + qd.append(str(table_name)) + qd.append(str(display_text)) + qd.append(str(regions)) + qd.append(str(fcst_lens)) + qd.append(display_category) + qd.append(display_order) + qd.append(record_id) + qd.append(mindate) + qd.append(maxdate) + qd.append(numrecs) + qd.append(updated_utc) + cursor.execute(insert_rpm_rec, qd) + cnx.commit() + else: + # if there's a pre-existing record, update it + update_rpm_rec = "UPDATE regions_per_model_mats_all_categories_build SET regions = %s, fcst_lens = %s, display_category = %s, display_order = %s, mindate = %s, maxdate = %s, numrecs = %s, updated = %s WHERE id = %s" + qd.append(str(regions)) + qd.append(str(fcst_lens)) + qd.append(display_category) + qd.append(display_order) + qd.append(mindate) + qd.append(maxdate) + qd.append(numrecs) + qd.append(updated_utc) + qd.append(record_id) + cursor.execute(update_rpm_rec, qd) + cnx.commit() + + +############################################################################ + +def regions_per_model_mats_all_categories(mode): + # connect to database + try: + # location of cnf file on Hera; edit if running locally + cnx = MySQLdb.connect(read_default_file="/home/role.amb-verif/.my.cnf") + cnx.autocommit = True + cursor = cnx.cursor(MySQLdb.cursors.DictCursor) + cursor.execute("set session wait_timeout=28800") + cursor.execute("set session interactive_timeout=28800") + except MySQLdb.Error as e: + print("Error: " + str(e)) + sys.exit(1) + + try: + cnx3 = MySQLdb.connect( + read_default_file="/home/role.amb-verif/.my.cnf") + cnx3.autocommit = True + cursor3 = cnx3.cursor(MySQLdb.cursors.DictCursor) + cursor3.execute("set session wait_timeout=28800") + cursor3.execute("set session interactive_timeout=28800") + except MySQLdb.Error as e: + print("Error: " + str(e)) + sys.exit(1) + + db = "pb_amdar" + usedb = "use " + db + cursor.execute(usedb) + + db3 = "mats_common" + usedb3 = "use " + db3 + cursor3.execute(usedb3) + + # get a map of valid MATS regions + valid_regions = {} + valid_region_orders = {} + get_valid_regions = "select id,short_name from region_descriptions" + cursor3.execute(get_valid_regions) + for row in cursor3: + region_name = row['short_name'] + region_id = row['id'] + valid_regions[region_name] = region_id + for region_name in valid_regions.keys(): + get_region_order = "select region_order from region_orders where id=" + \ + str(valid_regions[region_name]) + ";" + cursor3.execute(get_region_order) + for row in cursor3: + region_order = int(row['region_order']) + valid_region_orders[region_name] = region_order + + cursor3.close() + cnx3.close() + + # clean TABLESTATS_build in order to get updated data source information. If nothing has changed, you can set + # TScleaned to False and just use the old data source info. + clean_tablestats = "delete from " + db + ".TABLESTATS_build" + # TScleaned = False + TScleaned = True + if TScleaned: + cursor.execute(clean_tablestats) + else: + print("NOT executing: " + str(clean_tablestats)) + + # string of tables not to include in our search for metadata + skiptables = " regions_per_model_mats_all_categories regions_per_model_mats_all_categories_build template TABLESTATS_build 7day acars limits tail " + + # get an array of all relevant data sources in this db + all_data_sources = [] + per_table = {} + + show_tables = "show tables like '%_sums';" + cursor.execute(show_tables) + for row in cursor: + tablename = str(list(row.values())[0]) + # print( "tablename is " + tablename) + if " " + tablename + " " not in skiptables and "vapor" not in tablename: + # parse the data sources, forecast lengths, and regions from the table names + model = re.sub('_[A-Za-z]*_sums$', '', tablename) + if model not in all_data_sources: + all_data_sources.append(model) + per_table[tablename] = {} + per_table[tablename]['model'] = model + temp = "^" + model + "_" + region = re.sub(temp, "", tablename) + region = re.sub("_sums", "", region) + per_table[tablename]['region'] = region + # print("model is " + model + ", region is " + region) + + # sys.exit(-1) + + # parse the other metadata contained in the tables + if TScleaned: + for tablename in per_table.keys(): + # get forecast lengths from this table + get_fcst_lens = ( + "SELECT DISTINCT fcst_len FROM " + tablename + ";") + cursor.execute(get_fcst_lens) + per_table[tablename]['fcst_lens'] = [] + this_fcst_lens = [] + for row in cursor: + val = list(row.values())[0] + this_fcst_lens.append(int(val)) + this_fcst_lens.sort(key=int) + per_table[tablename]['fcst_lens'] = this_fcst_lens + # print(tablename + " fcst_lens: " + str(per_table[tablename]['fcst_lens']) ) + + # get statistics for this table + get_tablestats = "SELECT min(date) AS mindate, max(date) AS maxdate, count(date) AS numrecs FROM " + tablename + ";" + cursor.execute(get_tablestats) + stats = {} + for row in cursor: + rowkeys = row.keys() + for rowkey in rowkeys: + val = str(row[rowkey]) + stats[rowkey] = val + + if stats['mindate'] != 'None': + get_minhour = "SELECT min(hour) AS minhour FROM " + \ + tablename + " WHERE date = '" + stats['mindate'] + "' ;" + cursor.execute(get_minhour) + for row in cursor: + minhour = str(row['minhour']) + stats['mindate'] = int(time.mktime(time.strptime( + stats['mindate'] + ' ' + minhour, '%Y-%m-%d %H'))) + + if stats['maxdate'] != 'None': + get_maxhour = "SELECT max(hour) AS maxhour FROM " + \ + tablename + " WHERE date = '" + stats['maxdate'] + "' ;" + cursor.execute(get_maxhour) + for row in cursor: + maxhour = str(row['maxhour']) + stats['maxdate'] = int(time.mktime(time.strptime( + stats['maxdate'] + ' ' + maxhour, '%Y-%m-%d %H'))) + + # print(tablename + " stats:\n" + str(stats) ) + + replace_tablestats_rec = "REPLACE INTO TABLESTATS_build (tablename, mindate, maxdate, model, region, fcst_lens, numrecs) values( %s, %s, %s, %s, %s, %s, %s )" + qd = [] + qd.append(str(tablename)) + qd.append(str(stats['mindate'])) + qd.append(str(stats['maxdate'])) + qd.append(str(per_table[tablename]['model'])) + qd.append(str(per_table[tablename]['region'])) + qd.append(str(per_table[tablename]['fcst_lens'])) + qd.append(str(stats['numrecs'])) + cursor.execute(replace_tablestats_rec, qd) + cnx.commit() + # sys.exit(-1) + else: + print("TScleaned is " + str(TScleaned) + + " skipped populating TABLESTATS_build") + + # sys.exit(-1) + + # refresh database connection + cursor.close() + cnx.close() + + try: + cnx = MySQLdb.connect(read_default_file="/home/role.amb-verif/.my.cnf") + cnx.autocommit = True + cursor = cnx.cursor(MySQLdb.cursors.DictCursor) + cursor.execute("set session wait_timeout=28800") + cursor.execute("set session interactive_timeout=28800") + except MySQLdb.Error as e: + print("Error: " + str(e)) + sys.exit(1) + + db = "pb_amdar" + usedb = "use " + db + cursor.execute(usedb) + + # use standardized model names + try: + cnx4 = MySQLdb.connect( + read_default_file="/home/role.amb-verif/.my.cnf") + cnx4.autocommit = True + cursor4 = cnx4.cursor(MySQLdb.cursors.DictCursor) + cursor4.execute("set session wait_timeout=28800") + cursor4.execute("set session interactive_timeout=28800") + except MySQLdb.Error as e: + print("Error: " + str(e)) + sys.exit(1) + + usedb = "use mats_common" + cursor4.execute(usedb) + + get_model_keys_vals = "select old_model,new_model from standardized_model_list;" + cursor4.execute(get_model_keys_vals) + + main_model_keys = [] + main_models = {} + for row in cursor4: + old_model = str(row['old_model']) + new_model = str(row['new_model']) + if old_model in all_data_sources: + main_model_keys.append(old_model) + main_models[old_model] = new_model + + get_model_orders = "select model,m_order from primary_model_orders order by m_order;" + cursor4.execute(get_model_orders) + + new_model_list = list(main_models.values()) + main_model_order_keys = [] + main_model_orders = {} + for row in cursor4: + new_model = str(row['model']) + m_order = int(row['m_order']) + if new_model in new_model_list: + main_model_order_keys.append(new_model) + main_model_orders[new_model] = m_order + + cursor4.close() + cnx4.close() + + # sys.exit(-1) + + # clean metadata build table + clean_rpmmac = "delete from regions_per_model_mats_all_categories_build" + cursor.execute(clean_rpmmac) + cnx.commit() + set_ai = "alter table regions_per_model_mats_all_categories_build auto_increment = 1" + cursor.execute(set_ai) + cnx.commit() + + # sort the data sources into groups + data_sources_in_this_app = all_data_sources + data_sources_in_this_app.sort(key=str.lower) + data_source_cats = {} + data_source_key_cats = {} + + ds_idx = 2 + + for model in data_sources_in_this_app: + if model in main_model_keys and main_models[model] in main_model_order_keys: + data_source_cats[model] = 1 + else: + sub_idx = model.find('_', 0) + model_key = model[0:sub_idx] + if model_key in data_source_key_cats.keys(): + data_source_cats[model] = data_source_key_cats[model_key] + else: + data_source_key_cats[model_key] = ds_idx + data_source_cats[model] = ds_idx + ds_idx = ds_idx + 1 + + # combine the metadata per table into metadata per data source + do_non_main = 0 + for model in all_data_sources: + if model in main_model_keys and main_models[model] in main_model_order_keys: + cat = 1 + display_text = main_models[model] + do = main_model_orders[display_text] + else: + cat = data_source_cats[model] + display_text = str(model) + do = do_non_main + 1 + do_non_main = do_non_main + 1 + + # get regions for all tables pertaining to this model + get_these_regions = "select distinct(region) as region from " + db + \ + ".TABLESTATS_build where tablename like '" + model + \ + "%' and model = '" + model + "' and numrecs > 0;" + cursor.execute(get_these_regions) + these_regions_raw = [] + these_regions_orders = [] + for row in cursor: + val = str(list(row.values())[0]) + these_regions_raw.append(val) + these_regions_orders.append(valid_region_orders[val]) + these_regions = [x for _, x in sorted( + zip(these_regions_orders, these_regions_raw))] + # print( "these_regions:\n" + str(these_regions) ) + + # get forecast lengths for all tables pertaining to this model + get_these_fcst_lens = "select distinct(fcst_lens) as fcst_lens from " + db + ".TABLESTATS_build where tablename like '" + \ + model + "%' and fcst_lens != '[]' and model = '" + model + \ + "' and numrecs > 0 order by length(fcst_lens) desc;" + cursor.execute(get_these_fcst_lens) + these_fcst_lens = [] + for row in cursor: + val_array = ast.literal_eval(list(row.values())[0]) + for val in val_array: + if val not in these_fcst_lens: + these_fcst_lens.append(val) + these_fcst_lens.sort(key=int) + # print( "these_fcst_lens:\n" + str(these_fcst_lens) ) + + # get statistics for all tables pertaining to this model + get_cat_stats = "select min(mindate) as mindate, max(maxdate) as maxdate, sum(numrecs) as numrecs from " + \ + db + ".TABLESTATS_build where tablename like '" + model + \ + "%' and model = '" + model + "' and numrecs > 0" + cursor.execute(get_cat_stats) + catstats = cursor.fetchall()[0] + # print( "catstats:\n" + str(catstats) ) + + # update the metadata for this data source in the build table + if len(these_regions) > 0 and len(these_fcst_lens) > 0: + update_rpm_record(cnx, cursor, model, display_text, these_regions, these_fcst_lens, + cat, do, catstats['mindate'], catstats['maxdate'], catstats['numrecs']) + + # clean metadata publication table and add the build data into it + updated_utc = datetime.utcnow().strftime('%Y/%m/%d %H:%M') + if 'deploy' in mode: + clean_rpmmac = "delete from regions_per_model_mats_all_categories" + cursor.execute(clean_rpmmac) + cnx.commit() + set_ai = "alter table regions_per_model_mats_all_categories auto_increment = 1" + cursor.execute(set_ai) + cnx.commit() + sync_rpm = "insert into regions_per_model_mats_all_categories select * from regions_per_model_mats_all_categories_build" + cursor.execute(sync_rpm) + cnx.commit() + print("deploy " + db + + ".regions_per_model_mats_all_categories complete at " + str(updated_utc)) + else: + print("skipping deployment at " + str(updated_utc)) + + cursor.close() + cnx.close() + + +##################### regions_per_model_mats_all_categories #################################### + +if __name__ == '__main__': + def selftest(mode): + regions_per_model_mats_all_categories(mode) + + if len(sys.argv) == 2: + if sys.argv[1] == 'selftest': + selftest('selftest') + + if len(sys.argv) == 2: + if sys.argv[1] == 'deploy': + utcnow = str(datetime.now()) + msg = 'PB AIRCRAFT MATS METADATA START: ' + utcnow + print(msg) + regions_per_model_mats_all_categories('deploy') + utcnow = str(datetime.now()) + msg = 'PB AIRCRAFT MATS METADATA END: ' + utcnow + print(msg) diff --git a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt1.tcsh b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt1.tcsh index 591b878418..645cf11622 100755 --- a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt1.tcsh +++ b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt1.tcsh @@ -25,6 +25,7 @@ conda activate avid_verify_py3 #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_echotop.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_anomalycor.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_aircraft.py deploy +#/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_aircraft_prepbufr.py deploy /home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_vgtyp.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_surfrad.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_raobamdar.py deploy diff --git a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt2.tcsh b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt2.tcsh index 6f0e6abf99..a439257532 100755 --- a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt2.tcsh +++ b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt2.tcsh @@ -25,6 +25,7 @@ conda activate avid_verify_py3 #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_echotop.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_anomalycor.py deploy /home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_aircraft.py deploy +/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_aircraft_prepbufr.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_vgtyp.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_surfrad.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_raobamdar.py deploy diff --git a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt3.tcsh b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt3.tcsh index 9bf95a881d..e38173cf2f 100755 --- a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt3.tcsh +++ b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt3.tcsh @@ -25,6 +25,7 @@ conda activate avid_verify_py3 /home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_echotop.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_anomalycor.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_aircraft.py deploy +#/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_aircraft_prepbufr.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_vgtyp.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_surfrad.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_raobamdar.py deploy diff --git a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt4.tcsh b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt4.tcsh index 3f8ae42586..3bc5d26422 100755 --- a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt4.tcsh +++ b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt4.tcsh @@ -25,6 +25,7 @@ conda activate avid_verify_py3 #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_echotop.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_anomalycor.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_aircraft.py deploy +#/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_aircraft_prepbufr.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_vgtyp.py deploy /home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_surfrad.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_raobamdar.py deploy diff --git a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt5.tcsh b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt5.tcsh index 605c32c895..af9e2afc33 100755 --- a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt5.tcsh +++ b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt5.tcsh @@ -25,6 +25,7 @@ conda activate avid_verify_py3 #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_echotop.py deploy # /home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_anomalycor.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_aircraft.py deploy +#/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_aircraft_prepbufr.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_vgtyp.py deploy # /home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_surfrad.py deploy #/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_raobamdar.py deploy diff --git a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/update_metadata_aircraft_prepbufr.py b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/update_metadata_aircraft_prepbufr.py new file mode 100755 index 0000000000..5c90a1522a --- /dev/null +++ b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/update_metadata_aircraft_prepbufr.py @@ -0,0 +1,346 @@ +#!/scratch1/BMC/amb-verif/miniconda/miniconda3/envs/avid_verify_py3/bin/python +# +# The reason I am hardcoding the python path above is that this script is usally run by model developers +# without guidance from us, and I don't want them to be tripped up by the fact that the default puthon on +# Hera is python 2, while this script requires python 3. There's also an error to that effect below, but +# I'm trying to cut down on the number of confused emails we get. Our main scripts are all environment-agnostic, +# becuause they are run by verification team members who know which conda environment to use. +# +# Updates the regions_per_model_mats_all_categories table for all models in pb_amdar + +# __future__ must come first +from __future__ import print_function +from datetime import datetime + +import time +import re +import sys + +try: + import MySQLdb +except ImportError: + raise ImportError('--------------------IMPORTANT: This script now requires python 3 to run. \ + You can do this in the amb-verif conda environment by running "conda activate \ + avid_verify_py3" and then trying this script again.-------------------------') + + +############################################################################ + +def update_rpm_record(cnx, cursor, table_name, display_text, regions, fcst_lens, display_category, display_order, mindate, maxdate, numrecs): + + # see if this record already exists in the build table + # (does not guarantee the result will be the same for the prod table) + find_rpm_rec = "SELECT id FROM regions_per_model_mats_all_categories_build WHERE model = '" + \ + str(table_name) + "'" + cursor.execute(find_rpm_rec) + build_record_id = int(0) + for row in cursor: + val = list(row.values())[0] + build_record_id = int(val) + + # see if this record already exists in the prod table + # (does not guarantee the result will be the same for the build table) + find_rpm_rec = "SELECT id FROM regions_per_model_mats_all_categories WHERE model = '" + \ + str(table_name) + "'" + cursor.execute(find_rpm_rec) + prod_record_id = int(0) + for row in cursor: + val = list(row.values())[0] + prod_record_id = int(val) + + if len(regions) > int(0) and len(fcst_lens) > int(0): + qd = [] + updated_utc = datetime.utcnow().strftime('%s') + # if it's a new record for the build table, add it + if build_record_id == 0: + insert_rpm_rec = "INSERT INTO regions_per_model_mats_all_categories_build (model, display_text, regions, fcst_lens, display_category, display_order, id, mindate, maxdate, numrecs, updated) values( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + qd.append(str(table_name)) + qd.append(str(display_text)) + qd.append(str(regions)) + qd.append(str(fcst_lens)) + qd.append(display_category) + qd.append(display_order) + qd.append(build_record_id) + qd.append(mindate) + qd.append(maxdate) + qd.append(numrecs) + qd.append(updated_utc) + cursor.execute(insert_rpm_rec, qd) + cnx.commit() + else: + # if there's a pre-existing record for the build table, update it + update_rpm_rec = "UPDATE regions_per_model_mats_all_categories_build SET regions = %s, fcst_lens = %s, display_category = %s, display_order = %s, mindate = %s, maxdate = %s, numrecs = %s, updated = %s WHERE id = %s" + qd.append(str(regions)) + qd.append(str(fcst_lens)) + qd.append(display_category) + qd.append(display_order) + qd.append(mindate) + qd.append(maxdate) + qd.append(numrecs) + qd.append(updated_utc) + qd.append(build_record_id) + cursor.execute(update_rpm_rec, qd) + cnx.commit() + + # reset qd array + qd = [] + # if it's a new record for the prod table, add it + if prod_record_id == 0: + insert_rpm_rec = "INSERT INTO regions_per_model_mats_all_categories (model, display_text, regions, fcst_lens, display_category, display_order, id, mindate, maxdate, numrecs, updated) values( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + qd.append(str(table_name)) + qd.append(str(display_text)) + qd.append(str(regions)) + qd.append(str(fcst_lens)) + qd.append(display_category) + qd.append(display_order) + qd.append(prod_record_id) + qd.append(mindate) + qd.append(maxdate) + qd.append(numrecs) + qd.append(updated_utc) + cursor.execute(insert_rpm_rec, qd) + cnx.commit() + else: + # if there's a pre-existing record for the prod table, update it + update_rpm_rec = "UPDATE regions_per_model_mats_all_categories SET regions = %s, fcst_lens = %s, display_category = %s, display_order = %s, mindate = %s, maxdate = %s, numrecs = %s, updated = %s WHERE id = %s" + qd.append(str(regions)) + qd.append(str(fcst_lens)) + qd.append(display_category) + qd.append(display_order) + qd.append(mindate) + qd.append(maxdate) + qd.append(numrecs) + qd.append(updated_utc) + qd.append(prod_record_id) + cursor.execute(update_rpm_rec, qd) + cnx.commit() + + +############################################################################ + +def reprocess_specific_metadata(models_to_reprocess): + # connect to database + try: + # location of cnf file on Hera; edit if running locally + cnx = MySQLdb.connect(read_default_file="/home/role.amb-verif/.my.cnf") + cnx.autocommit = True + cursor = cnx.cursor(MySQLdb.cursors.DictCursor) + except MySQLdb.Error as e: + print("Error: " + str(e)) + sys.exit(1) + + try: + cnx2 = MySQLdb.connect( + read_default_file="/home/role.amb-verif/.my.cnf") + cnx2.autocommit = True + cursor2 = cnx2.cursor(MySQLdb.cursors.DictCursor) + except MySQLdb.Error as e: + print("Error: " + str(e)) + sys.exit(1) + + try: + cnx3 = MySQLdb.connect( + read_default_file="/home/role.amb-verif/.my.cnf") + cnx3.autocommit = True + cursor3 = cnx3.cursor(MySQLdb.cursors.DictCursor) + except MySQLdb.Error as e: + print("Error: " + str(e)) + sys.exit(1) + + db = "pb_amdar" + usedb = "use " + db + cursor.execute(usedb) + cursor2.execute(usedb) + + db3 = "mats_common" + usedb3 = "use " + db3 + cursor3.execute(usedb3) + + # get a map of valid MATS regions + valid_regions = {} + valid_region_orders = {} + get_valid_regions = "select id,short_name from region_descriptions" + cursor3.execute(get_valid_regions) + for row in cursor3: + region_name = row['short_name'] + region_id = row['id'] + valid_regions[region_name] = region_id + for region_name in valid_regions.keys(): + get_region_order = "select region_order from region_orders where id=" + \ + str(valid_regions[region_name]) + ";" + cursor3.execute(get_region_order) + for row in cursor3: + region_order = int(row['region_order']) + valid_region_orders[region_name] = region_order + + # get common MATS model names + get_model_keys_vals = "select old_model,new_model from standardized_model_list;" + cursor3.execute(get_model_keys_vals) + + main_model_keys = [] + main_models = {} + for row in cursor3: + old_model = str(row['old_model']) + new_model = str(row['new_model']) + main_model_keys.append(old_model) + main_models[old_model] = new_model + + get_model_orders = "select model,m_order from primary_model_orders order by m_order;" + cursor3.execute(get_model_orders) + + new_model_list = list(main_models.values()) + main_model_order_keys = [] + main_model_orders = {} + for row in cursor3: + new_model = str(row['model']) + m_order = int(row['m_order']) + if new_model in new_model_list: + main_model_order_keys.append(new_model) + main_model_orders[new_model] = m_order + + # get max category used so far + cursor3.execute(usedb) + cursor3.execute( + "select max(display_category) from regions_per_model_mats_all_categories;") + for row in cursor3: + max_display_category = list(row.values())[0] + curr_model_order = 1 + + cursor3.close() + cnx3.close() + + per_model = {} + for model in models_to_reprocess: + # initialize output object + per_model[model] = {} + per_model[model]['region'] = [] + per_model[model]['fcst_len'] = [] + per_model[model]['mindate'] = sys.float_info.max + per_model[model]['maxdate'] = 0 + per_model[model]['numrecs'] = 0 + + if model in main_model_keys and main_models[model] in main_model_order_keys: + per_model[model]['display_text'] = main_models[model] + per_model[model]['display_category'] = 1 + per_model[model]['display_order'] = main_model_orders[per_model[model]['display_text']] + else: + get_display_params = "select display_category,display_order from regions_per_model_mats_all_categories where model = '" + model + "';" + cursor2.execute(get_display_params) + per_model[model]['display_text'] = model + if cursor2.rowcount == 0: + per_model[model]['display_category'] = int( + max_display_category) + 1 + per_model[model]['display_order'] = curr_model_order + curr_model_order = curr_model_order + 1 + else: + for row in cursor2: + per_model[model]['display_category'] = row['display_category'] + per_model[model]['display_order'] = row['display_order'] + + # get all tables that remotely resemble this model name + show_tables = ("show tables like '" + model + "_%_sums';") + cursor.execute(show_tables) + for row in cursor: + tablename = str(list(row.values())[0]) + table_model = re.sub('_[A-Za-z]*_sums$', '', tablename) + if table_model == model and "vapor" not in tablename: + # this is a table that does belong to this model + get_tablestats = "SELECT min(date) AS mindate, max(date) AS maxdate, count(date) AS numrecs FROM " + tablename + ";" + cursor2.execute(get_tablestats) + stats = {} + for row2 in cursor2: + rowkeys = row2.keys() + for rowkey in rowkeys: + val = str(row2[rowkey]) + stats[rowkey] = val + + if int(stats['numrecs']) > 0: + # make sure the table actually has data + if stats['mindate'] != 'None': + get_minhour = "SELECT min(hour) AS minhour FROM " + \ + tablename + " WHERE date = '" + \ + stats['mindate'] + "' ;" + cursor2.execute(get_minhour) + for row2 in cursor2: + minhour = str(row2['minhour']) + stats['mindate'] = int(time.mktime(time.strptime( + stats['mindate'] + ' ' + minhour, '%Y-%m-%d %H'))) + per_model[model]['mindate'] = stats['mindate'] if stats[ + 'mindate'] < per_model[model]['mindate'] else per_model[model]['mindate'] + + if stats['maxdate'] != 'None': + get_maxhour = "SELECT max(hour) AS maxhour FROM " + \ + tablename + " WHERE date = '" + \ + stats['maxdate'] + "' ;" + cursor2.execute(get_maxhour) + for row2 in cursor2: + maxhour = str(row2['maxhour']) + stats['maxdate'] = int(time.mktime(time.strptime( + stats['maxdate'] + ' ' + maxhour, '%Y-%m-%d %H'))) + per_model[model]['maxdate'] = stats['maxdate'] if stats[ + 'maxdate'] > per_model[model]['maxdate'] else per_model[model]['maxdate'] + + per_model[model]['numrecs'] = per_model[model]['numrecs'] + \ + int(stats['numrecs']) + + temp = "^" + model + "_" + region1 = re.sub(temp, "", tablename) + region = re.sub("_sums", "", region1) + if region not in per_model[model]['region']: + per_model[model]['region'].append(region) + + get_fcst_lens = ( + "SELECT DISTINCT fcst_len FROM " + tablename + ";") + cursor2.execute(get_fcst_lens) + thisfcst_lens = [] + for row2 in cursor2: + val = list(row2.values())[0] + thisfcst_lens.append(int(val)) + per_model[model]['fcst_len'] = list( + set(per_model[model]['fcst_len']) | set(thisfcst_lens)) + per_model[model]['fcst_len'].sort(key=int) + + if per_model[model]['mindate'] == sys.float_info.max: + per_model[model]['mindate'] = str(datetime.now().strftime('%s')) + if per_model[model]['maxdate'] == 0: + per_model[model]['maxdate'] = str(datetime.now().strftime('%s')) + + if len(per_model[model]['region']) > 0: + region_orders = [] + for region in per_model[model]['region']: + region_orders.append(valid_region_orders[region]) + per_model[model]['region'] = [x for _, x in sorted( + zip(region_orders, per_model[model]['region']))] + + print(per_model) + + # sys.exit(-1) + + usedb = "use " + db + cursor.execute(usedb) + for model in models_to_reprocess: + if len(per_model[model]['region']) > 0 and len(per_model[model]['fcst_len']) > 0: + update_rpm_record(cnx, cursor, model, per_model[model]['display_text'], per_model[model]['region'], per_model[model]['fcst_len'], per_model[model] + ['display_category'], per_model[model]['display_order'], per_model[model]['mindate'], per_model[model]['maxdate'], per_model[model]['numrecs']) + + updated_utc = datetime.utcnow().strftime('%Y/%m/%d %H:%M') + print("deploy " + db + + ".regions_per_model_mats_all_categories complete at " + str(updated_utc)) + + cursor.close() + cnx.close() + cursor2.close() + cnx2.close() + + +if __name__ == '__main__': + # args[1] should be a comma-separated list of models to reprocess + if len(sys.argv) == 2: + utcnow = str(datetime.now()) + msg = 'AIRCRAFT MATS METADATA START: ' + utcnow + print(msg) + models_to_reprocess = sys.argv[1].strip().split(',') + reprocess_specific_metadata(models_to_reprocess) + utcnow = str(datetime.now()) + msg = 'AIRCRAFT MATS METADATA END: ' + utcnow + print(msg) diff --git a/tests/src/features/upperair/basic/addRemoveDailyModelCycleCurve.feature b/tests/src/features/upperair/basic/addRemoveDailyModelCycleCurve.feature index 26d215bd38..2dac388ff8 100644 --- a/tests/src/features/upperair/basic/addRemoveDailyModelCycleCurve.feature +++ b/tests/src/features/upperair/basic/addRemoveDailyModelCycleCurve.feature @@ -16,8 +16,8 @@ Feature: Add Remove DailyModelCycle Curve Scenario: addRemoveDailyModelCycleCurve When I set the plot type to "DailyModelCycle" Then the plot type should be "DailyModelCycle" - When I change the "database" parameter to "AMDAR" - Then the "database" parameter value matches "AMDAR" + When I change the "database" parameter to "AMDAR (Traditional)" + Then the "database" parameter value matches "AMDAR (Traditional)" When I change the "data-source" parameter to "RAP_GSL_iso" Then the "data-source" parameter value matches "RAP_GSL_iso" When I set the dates to "09/21/2019 00:00 - 09/24/2019 00:00" diff --git a/tests/src/features/upperair/basic/addRemoveValidTimeCurve.feature b/tests/src/features/upperair/basic/addRemoveValidTimeCurve.feature index 2c960575a0..20c2b0930a 100644 --- a/tests/src/features/upperair/basic/addRemoveValidTimeCurve.feature +++ b/tests/src/features/upperair/basic/addRemoveValidTimeCurve.feature @@ -16,8 +16,8 @@ Feature: Add Remove Valid Time Curve Scenario: addRemoveValidTimeCurve When I set the plot type to "ValidTime" Then the plot type should be "ValidTime" - When I change the "database" parameter to "AMDAR" - Then the "database" parameter value matches "AMDAR" + When I change the "database" parameter to "AMDAR (Traditional)" + Then the "database" parameter value matches "AMDAR (Traditional)" When I change the "data-source" parameter to "RAP_GSL_iso" Then the "data-source" parameter value matches "RAP_GSL_iso" When I set the curve-dates to "09/21/2019 00:00 - 09/24/2019 00:00" diff --git a/tests/src/features/upperair/basic/matchUnmatchDiffCurvesDailyModelCycle.feature b/tests/src/features/upperair/basic/matchUnmatchDiffCurvesDailyModelCycle.feature index a6ad7ac916..6b11c456e6 100644 --- a/tests/src/features/upperair/basic/matchUnmatchDiffCurvesDailyModelCycle.feature +++ b/tests/src/features/upperair/basic/matchUnmatchDiffCurvesDailyModelCycle.feature @@ -15,8 +15,8 @@ Feature: Match Unmatch Diff Curves DailyModelCycle Scenario: matchUnmatchDiffCurvesDailyModelCycle When I set the plot type to "DailyModelCycle" Then the plot type should be "DailyModelCycle" - When I change the "database" parameter to "AMDAR" - Then the "database" parameter value matches "AMDAR" + When I change the "database" parameter to "AMDAR (Traditional)" + Then the "database" parameter value matches "AMDAR (Traditional)" When I change the "data-source" parameter to "RAP_GSL_iso" Then the "data-source" parameter value matches "RAP_GSL_iso" When I set the dates to "09/21/2019 00:00 - 09/24/2019 00:00" diff --git a/tests/src/features/upperair/basic/matchUnmatchDiffCurvesValidTime.feature b/tests/src/features/upperair/basic/matchUnmatchDiffCurvesValidTime.feature index 392344c295..209862a62f 100644 --- a/tests/src/features/upperair/basic/matchUnmatchDiffCurvesValidTime.feature +++ b/tests/src/features/upperair/basic/matchUnmatchDiffCurvesValidTime.feature @@ -15,8 +15,8 @@ Feature: Match Unmatch Diff Curves Valid Time Scenario: matchUnmatchDiffCurvesValidTime When I set the plot type to "ValidTime" Then the plot type should be "ValidTime" - When I change the "database" parameter to "AMDAR" - Then the "database" parameter value matches "AMDAR" + When I change the "database" parameter to "AMDAR (Traditional)" + Then the "database" parameter value matches "AMDAR (Traditional)" When I change the "data-source" parameter to "RAP_GSL_iso" Then the "data-source" parameter value matches "RAP_GSL_iso" When I set the curve-dates to "09/21/2019 00:00 - 09/24/2019 00:00"