diff --git a/MATScommon b/MATScommon
index 0d0e5f3c7c..d031cfd652 160000
--- a/MATScommon
+++ b/MATScommon
@@ -1 +1 @@
-Subproject commit 0d0e5f3c7c8178baead73825ba9181c4c3da645d
+Subproject commit d031cfd652cc603f72af87e72a91edee6a342c2a
diff --git a/METexpress b/METexpress
index bcb4f38fa9..febc9402c3 160000
--- a/METexpress
+++ b/METexpress
@@ -1 +1 @@
-Subproject commit bcb4f38fa93fbf65090af1570cde8d35c2d7d99e
+Subproject commit febc9402c34fbecbf8b2de7f8eadbd7308d6eb32
diff --git a/apps/ceil-vis15/server/main.js b/apps/ceil-vis15/server/main.js
index 8244262664..79c7f2a594 100644
--- a/apps/ceil-vis15/server/main.js
+++ b/apps/ceil-vis15/server/main.js
@@ -457,7 +457,7 @@ const doCurveParams = function () {
.split(",")
.map(Function.prototype.call, String.prototype.trim)
.map(function (fhr) {
- return Number(fhr.replace(/'|\[|\]/g, "")) / 60;
+ return (Number(fhr.replace(/'|\[|\]/g, "")) / 60).toString();
});
const thresholds = rows[i].trsh;
diff --git a/apps/surfrad/server/dataFunctions/data_contour.js b/apps/surfrad/server/dataFunctions/data_contour.js
index e8a5839d46..5ad2fee9e0 100644
--- a/apps/surfrad/server/dataFunctions/data_contour.js
+++ b/apps/surfrad/server/dataFunctions/data_contour.js
@@ -62,7 +62,6 @@ dataContour = function (plotParams, plotFunction) {
{ name: "variable" },
{ optionsMap: 1 }
).optionsMap;
- const variable = variableOptionsMap[variableStr];
let scaleClause = "";
const scaleStr = curve.scale;
@@ -105,13 +104,9 @@ dataContour = function (plotParams, plotFunction) {
{ name: "statistic" },
{ optionsMap: 1 }
).optionsMap;
- const statisticClause =
- `sum(${variable[0]}) as square_diff_sum, count(${variable[1]}) as N_sum, sum(${variable[2]}) as obs_model_diff_sum, sum(${variable[3]}) as model_sum, sum(${variable[4]}) as obs_sum, sum(${variable[5]}) as abs_sum, ` +
- `group_concat(m0.secs, ';', ${variable[0]}, ';', 1, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by m0.secs) as sub_data, count(${variable[0]}) as N0`;
let dateString = "";
let dateClause = "";
- let matchClause = "";
if (
(xAxisParam === "Init Date" || yAxisParam === "Init Date") &&
xAxisParam !== "Valid Date" &&
@@ -121,8 +116,7 @@ dataContour = function (plotParams, plotFunction) {
} else {
dateString = "m0.secs";
}
- dateClause = `and o.secs >= ${fromSecs} and o.secs <= ${toSecs} and ${dateString} >= ${fromSecs} and ${dateString} <= ${toSecs}`;
- matchClause = "and m0.id = o.id and m0.secs = o.secs";
+ dateClause = `and ${dateString} >= ${fromSecs} and ${dateString} <= ${toSecs}`;
const regionStr = curve.region;
const region = Object.keys(
@@ -131,18 +125,36 @@ dataContour = function (plotParams, plotFunction) {
(key) =>
matsCollections.region.findOne({ name: "region" }).valuesMap[key] === regionStr
);
- let regionClause;
+
+ let queryTableClause;
+ let NAggregate;
+ let NClause;
+ let variable;
if (region === "all_stat") {
- regionClause = "";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_site_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else if (region === "all_surf") {
- regionClause = "and m0.id in(1,2,3,4,5,6,7) ";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_surfrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else if (region === "all_sol") {
- regionClause = "and m0.id in(8,9,10,11,12,13,14) ";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_solrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else {
- regionClause = `and m0.id in(${region}) `;
+ variable = variableOptionsMap[variableStr]["Select stations"];
+ queryTableClause = `from ${model}_site_${region} as m0`;
+ NAggregate = "count";
+ NClause = "1";
}
- const queryTableClause = `from surfrad as o, ${model} as m0`;
+ const statisticClause =
+ `sum(${variable[0]}) as square_diff_sum, ${NAggregate}(${variable[1]}) as N_sum, sum(${variable[2]}) as obs_model_diff_sum, sum(${variable[3]}) as model_sum, sum(${variable[4]}) as obs_sum, sum(${variable[5]}) as abs_sum, ` +
+ `group_concat(m0.secs, ';', ${variable[0]}, ';', ${NClause}, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by m0.secs) as sub_data, count(${variable[0]}) as N0`;
// For contours, this functions as the colorbar label.
const { statVarUnitMap } = matsCollections.variable.findOne(
@@ -168,12 +180,10 @@ dataContour = function (plotParams, plotFunction) {
"{{statisticClause}} " +
"{{queryTableClause}} " +
"where 1=1 " +
- "{{matchClause}} " +
"{{dateClause}} " +
"{{validTimeClause}} " +
"{{forecastLengthClause}} " +
"{{scaleClause}} " +
- "{{regionClause}} " +
"group by xVal,yVal " +
"order by xVal,yVal" +
";";
@@ -185,22 +195,10 @@ dataContour = function (plotParams, plotFunction) {
statement = statement.replace("{{validTimeClause}}", validTimeClause);
statement = statement.replace("{{forecastLengthClause}}", forecastLengthClause);
statement = statement.replace("{{scaleClause}}", scaleClause);
- statement = statement.replace("{{regionClause}}", regionClause);
- statement = statement.replace("{{matchClause}}", matchClause);
statement = statement.replace("{{dateClause}}", dateClause);
statement = statement.split("{{dateString}}").join(dateString);
dataRequests[label] = statement;
- if (
- model !== "HRRR" &&
- variableStr !== "dswrf" &&
- statisticSelect !== "Obs average"
- ) {
- throw new Error(
- `INFO: The statistic/variable combination [${statisticSelect} and ${variableStr}] is only available for the HRRR data-source.`
- );
- }
-
// send the query statement to the query function
queryResult = matsDataQueryUtils.queryDBContour(
sumPool, // eslint-disable-line no-undef
diff --git a/apps/surfrad/server/dataFunctions/data_contour_diff.js b/apps/surfrad/server/dataFunctions/data_contour_diff.js
index cfe5c6d01f..3420d6d264 100644
--- a/apps/surfrad/server/dataFunctions/data_contour_diff.js
+++ b/apps/surfrad/server/dataFunctions/data_contour_diff.js
@@ -71,7 +71,6 @@ dataContourDiff = function (plotParams, plotFunction) {
{ name: "variable" },
{ optionsMap: 1 }
).optionsMap;
- const variable = variableOptionsMap[variableStr];
let scaleClause = "";
const scaleStr = curve.scale;
@@ -114,13 +113,9 @@ dataContourDiff = function (plotParams, plotFunction) {
{ name: "statistic" },
{ optionsMap: 1 }
).optionsMap;
- const statisticClause =
- `sum(${variable[0]}) as square_diff_sum, count(${variable[1]}) as N_sum, sum(${variable[2]}) as obs_model_diff_sum, sum(${variable[3]}) as model_sum, sum(${variable[4]}) as obs_sum, sum(${variable[5]}) as abs_sum, ` +
- `group_concat(m0.secs, ';', ${variable[0]}, ';', 1, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by m0.secs) as sub_data, count(${variable[0]}) as N0`;
let dateString = "";
let dateClause = "";
- let matchClause = "";
if (
(xAxisParam === "Init Date" || yAxisParam === "Init Date") &&
xAxisParam !== "Valid Date" &&
@@ -130,8 +125,7 @@ dataContourDiff = function (plotParams, plotFunction) {
} else {
dateString = "m0.secs";
}
- dateClause = `and o.secs >= ${fromSecs} and o.secs <= ${toSecs} and ${dateString} >= ${fromSecs} and ${dateString} <= ${toSecs}`;
- matchClause = "and m0.id = o.id and m0.secs = o.secs";
+ dateClause = `and ${dateString} >= ${fromSecs} and ${dateString} <= ${toSecs}`;
const regionStr = curve.region;
const region = Object.keys(
@@ -140,18 +134,36 @@ dataContourDiff = function (plotParams, plotFunction) {
(key) =>
matsCollections.region.findOne({ name: "region" }).valuesMap[key] === regionStr
);
- let regionClause;
+
+ let queryTableClause;
+ let NAggregate;
+ let NClause;
+ let variable;
if (region === "all_stat") {
- regionClause = "";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_site_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else if (region === "all_surf") {
- regionClause = "and m0.id in(1,2,3,4,5,6,7) ";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_surfrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else if (region === "all_sol") {
- regionClause = "and m0.id in(8,9,10,11,12,13,14) ";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_solrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else {
- regionClause = `and m0.id in(${region}) `;
+ variable = variableOptionsMap[variableStr]["Select stations"];
+ queryTableClause = `from ${model}_site_${region} as m0`;
+ NAggregate = "count";
+ NClause = "1";
}
- const queryTableClause = `from surfrad as o, ${model} as m0`;
+ const statisticClause =
+ `sum(${variable[0]}) as square_diff_sum, ${NAggregate}(${variable[1]}) as N_sum, sum(${variable[2]}) as obs_model_diff_sum, sum(${variable[3]}) as model_sum, sum(${variable[4]}) as obs_sum, sum(${variable[5]}) as abs_sum, ` +
+ `group_concat(m0.secs, ';', ${variable[0]}, ';', ${NClause}, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by m0.secs) as sub_data, count(${variable[0]}) as N0`;
// For contours, this functions as the colorbar label.
const { statVarUnitMap } = matsCollections.variable.findOne(
@@ -179,12 +191,10 @@ dataContourDiff = function (plotParams, plotFunction) {
"{{statisticClause}} " +
"{{queryTableClause}} " +
"where 1=1 " +
- "{{matchClause}} " +
"{{dateClause}} " +
"{{validTimeClause}} " +
"{{forecastLengthClause}} " +
"{{scaleClause}} " +
- "{{regionClause}} " +
"group by xVal,yVal " +
"order by xVal,yVal" +
";";
@@ -196,22 +206,10 @@ dataContourDiff = function (plotParams, plotFunction) {
statement = statement.replace("{{validTimeClause}}", validTimeClause);
statement = statement.replace("{{forecastLengthClause}}", forecastLengthClause);
statement = statement.replace("{{scaleClause}}", scaleClause);
- statement = statement.replace("{{regionClause}}", regionClause);
- statement = statement.replace("{{matchClause}}", matchClause);
statement = statement.replace("{{dateClause}}", dateClause);
statement = statement.split("{{dateString}}").join(dateString);
dataRequests[label] = statement;
- if (
- model !== "HRRR" &&
- variableStr !== "dswrf" &&
- statisticSelect !== "Obs average"
- ) {
- throw new Error(
- `INFO: The statistic/variable combination [${statisticSelect} and ${variableStr}] is only available for the HRRR data-source.`
- );
- }
-
// send the query statement to the query function
queryResult = matsDataQueryUtils.queryDBContour(
sumPool, // eslint-disable-line no-undef
diff --git a/apps/surfrad/server/dataFunctions/data_dailymodelcycle.js b/apps/surfrad/server/dataFunctions/data_dailymodelcycle.js
index f33c0ac8c1..12e10af28a 100644
--- a/apps/surfrad/server/dataFunctions/data_dailymodelcycle.js
+++ b/apps/surfrad/server/dataFunctions/data_dailymodelcycle.js
@@ -65,7 +65,6 @@ dataDailyModelCycle = function (plotParams, plotFunction) {
{ name: "variable" },
{ optionsMap: 1 }
).optionsMap;
- const variable = variableOptionsMap[variableStr];
const scaleStr = curve.scale;
const scale = Object.keys(
@@ -87,12 +86,8 @@ dataDailyModelCycle = function (plotParams, plotFunction) {
{ name: "statistic" },
{ optionsMap: 1 }
).optionsMap;
- const statisticClause =
- `sum(${variable[0]}) as square_diff_sum, count(${variable[1]}) as N_sum, sum(${variable[2]}) as obs_model_diff_sum, sum(${variable[3]}) as model_sum, sum(${variable[4]}) as obs_sum, sum(${variable[5]}) as abs_sum, ` +
- `group_concat(m0.secs, ';', ${variable[0]}, ';', 1, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by m0.secs) as sub_data, count(${variable[0]}) as N0`;
- const dateClause = `and o.secs >= ${fromSecs} and o.secs <= ${toSecs} and m0.secs >= ${fromSecs} and m0.secs <= ${toSecs}`;
- const matchClause = "and m0.id = o.id and m0.secs = o.secs";
+ const dateClause = `and m0.secs >= ${fromSecs} and m0.secs <= ${toSecs}`;
const regionStr = curve.region;
const region = Object.keys(
@@ -101,18 +96,36 @@ dataDailyModelCycle = function (plotParams, plotFunction) {
(key) =>
matsCollections.region.findOne({ name: "region" }).valuesMap[key] === regionStr
);
- let regionClause;
+
+ let queryTableClause;
+ let NAggregate;
+ let NClause;
+ let variable;
if (region === "all_stat") {
- regionClause = "";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_site_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else if (region === "all_surf") {
- regionClause = "and m0.id in(1,2,3,4,5,6,7) ";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_surfrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else if (region === "all_sol") {
- regionClause = "and m0.id in(8,9,10,11,12,13,14) ";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_solrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else {
- regionClause = `and m0.id in(${region}) `;
+ variable = variableOptionsMap[variableStr]["Select stations"];
+ queryTableClause = `from ${model}_site_${region} as m0`;
+ NAggregate = "count";
+ NClause = "1";
}
- const queryTableClause = `from surfrad as o, ${model} as m0`;
+ const statisticClause =
+ `sum(${variable[0]}) as square_diff_sum, ${NAggregate}(${variable[1]}) as N_sum, sum(${variable[2]}) as obs_model_diff_sum, sum(${variable[3]}) as model_sum, sum(${variable[4]}) as obs_sum, sum(${variable[5]}) as abs_sum, ` +
+ `group_concat(m0.secs, ';', ${variable[0]}, ';', ${NClause}, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by m0.secs) as sub_data, count(${variable[0]}) as N0`;
// axisKey is used to determine which axis a curve should use.
// This axisKeySet object is used like a set and if a curve has the same
@@ -141,12 +154,10 @@ dataDailyModelCycle = function (plotParams, plotFunction) {
"{{statisticClause}} " +
"{{queryTableClause}} " +
"where 1=1 " +
- "{{matchClause}} " +
"{{dateClause}} " +
"{{utcCycleStartClause}} " +
"{{forecastLengthClause}} " +
"{{scaleClause}} " +
- "{{regionClause}} " +
"group by avtime " +
"order by avtime" +
";";
@@ -156,21 +167,9 @@ dataDailyModelCycle = function (plotParams, plotFunction) {
statement = statement.replace("{{utcCycleStartClause}}", utcCycleStartClause);
statement = statement.replace("{{forecastLengthClause}}", forecastLengthClause);
statement = statement.replace("{{scaleClause}}", scaleClause);
- statement = statement.replace("{{regionClause}}", regionClause);
- statement = statement.replace("{{matchClause}}", matchClause);
statement = statement.replace("{{dateClause}}", dateClause);
dataRequests[label] = statement;
- if (
- model !== "HRRR" &&
- variableStr !== "dswrf" &&
- statisticSelect !== "Obs average"
- ) {
- throw new Error(
- `INFO: The statistic/variable combination [${statisticSelect} and ${variableStr}] is only available for the HRRR data-source.`
- );
- }
-
// send the query statement to the query function
queryResult = matsDataQueryUtils.queryDBSpecialtyCurve(
sumPool, // eslint-disable-line no-undef
diff --git a/apps/surfrad/server/dataFunctions/data_dieoff.js b/apps/surfrad/server/dataFunctions/data_dieoff.js
new file mode 100644
index 0000000000..ea7e7d99b8
--- /dev/null
+++ b/apps/surfrad/server/dataFunctions/data_dieoff.js
@@ -0,0 +1,317 @@
+/*
+ * Copyright (c) 2021 Colorado State University and Regents of the University of Colorado. All rights reserved.
+ */
+
+import {
+ matsCollections,
+ matsTypes,
+ matsDataUtils,
+ matsDataQueryUtils,
+ matsDataDiffUtils,
+ matsDataCurveOpsUtils,
+ matsDataProcessUtils,
+} from "meteor/randyp:mats-common";
+import { moment } from "meteor/momentjs:moment";
+
+// eslint-disable-next-line no-undef
+dataDieoff = function (plotParams, plotFunction) {
+ // initialize variables common to all curves
+ const appParams = {
+ plotType: matsTypes.PlotTypes.dieoff,
+ matching: plotParams.plotAction === matsTypes.PlotActions.matched,
+ completeness: plotParams.completeness,
+ outliers: plotParams.outliers,
+ hideGaps: plotParams.noGapsCheck,
+ hasLevels: false,
+ };
+
+ const totalProcessingStart = moment();
+ const dataRequests = {}; // used to store data queries
+ let dataFoundForCurve = true;
+ let dataFoundForAnyCurve = false;
+
+ const curves = JSON.parse(JSON.stringify(plotParams.curves));
+ const curvesLength = curves.length;
+
+ const axisMap = Object.create(null);
+ let xmax = -1 * Number.MAX_VALUE;
+ let ymax = -1 * Number.MAX_VALUE;
+ let xmin = Number.MAX_VALUE;
+ let ymin = Number.MAX_VALUE;
+
+ let statType;
+ const allStatTypes = [];
+ const utcCycleStarts = [];
+ const idealValues = [];
+
+ let statement = "";
+ let error = "";
+ const dataset = [];
+
+ for (let curveIndex = 0; curveIndex < curvesLength; curveIndex += 1) {
+ // initialize variables specific to each curve
+ const curve = curves[curveIndex];
+ const { label } = curve;
+ const { diffFrom } = curve;
+ const model = matsCollections["data-source"].findOne({ name: "data-source" })
+ .optionsMap[curve["data-source"]][0];
+
+ const variableStr = curve.variable;
+ const variableOptionsMap = matsCollections.variable.findOne(
+ { name: "variable" },
+ { optionsMap: 1 }
+ ).optionsMap;
+
+ const scaleStr = curve.scale;
+ const scale = Object.keys(
+ matsCollections.scale.findOne({ name: "scale" }).valuesMap
+ ).find(
+ (key) =>
+ matsCollections.scale.findOne({ name: "scale" }).valuesMap[key] === scaleStr
+ );
+ const scaleClause = `and m0.scale = ${scale}`;
+
+ let validTimeClause = "";
+ let validTimes;
+
+ let utcCycleStartClause = "";
+ let utcCycleStart;
+
+ const forecastLengthStr = curve["dieoff-type"];
+ const forecastLengthOptionsMap = matsCollections["dieoff-type"].findOne(
+ { name: "dieoff-type" },
+ { optionsMap: 1 }
+ ).optionsMap;
+ const forecastLength = forecastLengthOptionsMap[forecastLengthStr][0];
+
+ const statisticSelect = curve.statistic;
+ const statisticOptionsMap = matsCollections.statistic.findOne(
+ { name: "statistic" },
+ { optionsMap: 1 }
+ ).optionsMap;
+
+ const dateRange = matsDataUtils.getDateRange(curve["curve-dates"]);
+ const fromSecs = dateRange.fromSeconds;
+ const toSecs = dateRange.toSeconds;
+ let dateClause;
+
+ const regionStr = curve.region;
+ const region = Object.keys(
+ matsCollections.region.findOne({ name: "region" }).valuesMap
+ ).find(
+ (key) =>
+ matsCollections.region.findOne({ name: "region" }).valuesMap[key] === regionStr
+ );
+
+ let queryTableClause;
+ let NAggregate;
+ let NClause;
+ let variable;
+ if (region === "all_stat") {
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_site_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
+ } else if (region === "all_surf") {
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_surfrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
+ } else if (region === "all_sol") {
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_solrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
+ } else {
+ variable = variableOptionsMap[variableStr]["Select stations"];
+ queryTableClause = `from ${model}_site_${region} as m0`;
+ NAggregate = "count";
+ NClause = "1";
+ }
+
+ const statisticClause =
+ `sum(${variable[0]}) as square_diff_sum, ${NAggregate}(${variable[1]}) as N_sum, sum(${variable[2]}) as obs_model_diff_sum, sum(${variable[3]}) as model_sum, sum(${variable[4]}) as obs_sum, sum(${variable[5]}) as abs_sum, ` +
+ `group_concat(m0.secs, ';', ${variable[0]}, ';', ${NClause}, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by m0.secs) as sub_data, count(${variable[0]}) as N0`;
+
+ if (forecastLength === matsTypes.ForecastTypes.dieoff) {
+ validTimes = curve["valid-time"] === undefined ? [] : curve["valid-time"];
+ if (validTimes.length !== 0 && validTimes !== matsTypes.InputTypes.unused) {
+ validTimeClause = `and floor(m0.secs%(24*3600)/3600) IN(${validTimes})`;
+ }
+ dateClause = `and m0.secs >= ${fromSecs} and m0.secs <= ${toSecs}`;
+ } else if (forecastLength === matsTypes.ForecastTypes.utcCycle) {
+ utcCycleStart =
+ curve["utc-cycle-start"] === undefined ? [] : curve["utc-cycle-start"];
+ if (utcCycleStart.length !== 0 && utcCycleStart !== matsTypes.InputTypes.unused) {
+ utcCycleStartClause = `and floor((m0.secs - m0.fcst_len*3600)%(24*3600)/3600) IN(${utcCycleStart})`;
+ }
+ dateClause = `and m0.secs-m0.fcst_len*3600 >= ${fromSecs} and m0.secs-m0.fcst_len*3600 <= ${toSecs}`;
+ } else {
+ dateClause = `and m0.secs-m0.fcst_len*3600 = ${fromSecs}`;
+ }
+
+ // axisKey is used to determine which axis a curve should use.
+ // This axisKeySet object is used like a set and if a curve has the same
+ // units (axisKey) it will use the same axis.
+ // The axis number is assigned to the axisKeySet value, which is the axisKey.
+ const { statVarUnitMap } = matsCollections.variable.findOne(
+ { name: "variable" },
+ { statVarUnitMap: 1 }
+ );
+ statType = statisticOptionsMap[statisticSelect];
+ allStatTypes.push(statType);
+ const axisKey = statVarUnitMap[statisticSelect][variableStr];
+ curves[curveIndex].axisKey = axisKey; // stash the axisKey to use it later for axis options
+
+ let d;
+ if (!diffFrom) {
+ let queryResult;
+ const startMoment = moment();
+ let finishMoment;
+ try {
+ statement =
+ "select m0.fcst_len/60 as fcst_lead, " +
+ "count(distinct m0.secs) as N_times, " +
+ "min(m0.secs) as min_secs, " +
+ "max(m0.secs) as max_secs, " +
+ "{{statisticClause}} " +
+ "{{queryTableClause}} " +
+ "where 1=1 " +
+ "{{dateClause}} " +
+ "{{validTimeClause}} " +
+ "{{utcCycleStartClause}} " +
+ "{{scaleClause}} " +
+ "group by fcst_lead " +
+ "order by fcst_lead" +
+ ";";
+
+ statement = statement.replace("{{statisticClause}}", statisticClause);
+ statement = statement.replace("{{queryTableClause}}", queryTableClause);
+ statement = statement.replace("{{validTimeClause}}", validTimeClause);
+ statement = statement.replace("{{utcCycleStartClause}}", utcCycleStartClause);
+ statement = statement.replace("{{scaleClause}}", scaleClause);
+ statement = statement.replace("{{dateClause}}", dateClause);
+ dataRequests[label] = statement;
+
+ // send the query statement to the query function
+ queryResult = matsDataQueryUtils.queryDBSpecialtyCurve(
+ sumPool, // eslint-disable-line no-undef
+ statement,
+ appParams,
+ `${statisticSelect}_${variableStr}`
+ );
+
+ finishMoment = moment();
+ dataRequests[label] = statement;
+ dataRequests[`data retrieval (query) time - ${label}`] = {
+ begin: startMoment.format(),
+ finish: finishMoment.format(),
+ duration: `${moment
+ .duration(finishMoment.diff(startMoment))
+ .asSeconds()} seconds`,
+ recordCount: queryResult.data.x.length,
+ };
+ // get the data back from the query
+ d = queryResult.data;
+ } catch (e) {
+ // this is an error produced by a bug in the query function, not an error returned by the mysql database
+ e.message = `Error in queryDB: ${e.message} for statement: ${statement}`;
+ throw new Error(e.message);
+ }
+
+ if (queryResult.error !== undefined && queryResult.error !== "") {
+ if (queryResult.error === matsTypes.Messages.NO_DATA_FOUND) {
+ // this is NOT an error just a no data condition
+ dataFoundForCurve = false;
+ } else {
+ // this is an error returned by the mysql database
+ error += `Error from verification query:
${queryResult.error}
query:
${statement}
`;
+ throw new Error(error);
+ }
+ } else {
+ dataFoundForAnyCurve = true;
+ }
+
+ // set axis limits based on returned data
+ if (dataFoundForCurve) {
+ xmin = xmin < d.xmin ? xmin : d.xmin;
+ xmax = xmax > d.xmax ? xmax : d.xmax;
+ ymin = ymin < d.ymin ? ymin : d.ymin;
+ ymax = ymax > d.ymax ? ymax : d.ymax;
+ }
+ } else {
+ // this is a difference curve
+ const diffResult = matsDataDiffUtils.getDataForDiffCurve(
+ dataset,
+ diffFrom,
+ appParams,
+ allStatTypes
+ );
+ d = diffResult.dataset;
+ xmin = xmin < d.xmin ? xmin : d.xmin;
+ xmax = xmax > d.xmax ? xmax : d.xmax;
+ ymin = ymin < d.ymin ? ymin : d.ymin;
+ ymax = ymax > d.ymax ? ymax : d.ymax;
+ }
+
+ // set curve annotation to be the curve mean -- may be recalculated later
+ // also pass previously calculated axis stats to curve options
+ const postQueryStartMoment = moment();
+ const mean = d.sum / d.x.length;
+ const annotation =
+ mean === undefined
+ ? `${label}- mean = NoData`
+ : `${label}- mean = ${mean.toPrecision(4)}`;
+ curve.annotation = annotation;
+ curve.xmin = d.xmin;
+ curve.xmax = d.xmax;
+ curve.ymin = d.ymin;
+ curve.ymax = d.ymax;
+ curve.axisKey = axisKey;
+ const cOptions = matsDataCurveOpsUtils.generateSeriesCurveOptions(
+ curve,
+ curveIndex,
+ axisMap,
+ d,
+ appParams
+ ); // generate plot with data, curve annotation, axis labels, etc.
+ dataset.push(cOptions);
+ const postQueryFinishMoment = moment();
+ dataRequests[`post data retrieval (query) process time - ${label}`] = {
+ begin: postQueryStartMoment.format(),
+ finish: postQueryFinishMoment.format(),
+ duration: `${moment
+ .duration(postQueryFinishMoment.diff(postQueryStartMoment))
+ .asSeconds()} seconds`,
+ };
+ } // end for curves
+
+ if (!dataFoundForAnyCurve) {
+ // we found no data for any curves so don't bother proceeding
+ throw new Error("INFO: No valid data for any curves.");
+ }
+
+ // process the data returned by the query
+ const curveInfoParams = {
+ curves,
+ curvesLength,
+ idealValues,
+ utcCycleStarts,
+ statType,
+ axisMap,
+ xmax,
+ xmin,
+ };
+ const bookkeepingParams = {
+ dataRequests,
+ totalProcessingStart,
+ };
+ const result = matsDataProcessUtils.processDataXYCurve(
+ dataset,
+ appParams,
+ curveInfoParams,
+ plotParams,
+ bookkeepingParams
+ );
+ plotFunction(result);
+};
diff --git a/apps/surfrad/server/dataFunctions/data_histogram.js b/apps/surfrad/server/dataFunctions/data_histogram.js
index 333884b0fe..d6ebaad052 100644
--- a/apps/surfrad/server/dataFunctions/data_histogram.js
+++ b/apps/surfrad/server/dataFunctions/data_histogram.js
@@ -61,7 +61,6 @@ dataHistogram = function (plotParams, plotFunction) {
{ name: "variable" },
{ optionsMap: 1 }
).optionsMap;
- const variable = variableOptionsMap[variableStr];
const scaleStr = curve.scale;
const scale = Object.keys(
@@ -86,15 +85,11 @@ dataHistogram = function (plotParams, plotFunction) {
{ name: "statistic" },
{ optionsMap: 1 }
).optionsMap;
- const statisticClause =
- `sum(${variable[0]}) as square_diff_sum, count(${variable[1]}) as N_sum, sum(${variable[2]}) as obs_model_diff_sum, sum(${variable[3]}) as model_sum, sum(${variable[4]}) as obs_sum, sum(${variable[5]}) as abs_sum, ` +
- `group_concat(m0.secs, ';', ${variable[0]}, ';', 1, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by m0.secs) as sub_data, count(${variable[0]}) as N0`;
const dateRange = matsDataUtils.getDateRange(curve["curve-dates"]);
const fromSecs = dateRange.fromSeconds;
const toSecs = dateRange.toSeconds;
- const dateClause = `and o.secs >= ${fromSecs} and o.secs <= ${toSecs} and m0.secs >= ${fromSecs} and m0.secs <= ${toSecs}`;
- const matchClause = "and m0.id = o.id and m0.secs = o.secs";
+ const dateClause = `and m0.secs >= ${fromSecs} and m0.secs <= ${toSecs}`;
const regionStr = curve.region;
const region = Object.keys(
@@ -103,18 +98,36 @@ dataHistogram = function (plotParams, plotFunction) {
(key) =>
matsCollections.region.findOne({ name: "region" }).valuesMap[key] === regionStr
);
- let regionClause;
+
+ let queryTableClause;
+ let NAggregate;
+ let NClause;
+ let variable;
if (region === "all_stat") {
- regionClause = "";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_site_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else if (region === "all_surf") {
- regionClause = "and m0.id in(1,2,3,4,5,6,7) ";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_surfrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else if (region === "all_sol") {
- regionClause = "and m0.id in(8,9,10,11,12,13,14) ";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_solrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else {
- regionClause = `and m0.id in(${region}) `;
+ variable = variableOptionsMap[variableStr]["Select stations"];
+ queryTableClause = `from ${model}_site_${region} as m0`;
+ NAggregate = "count";
+ NClause = "1";
}
- const queryTableClause = `from surfrad as o, ${model} as m0`;
+ const statisticClause =
+ `sum(${variable[0]}) as square_diff_sum, ${NAggregate}(${variable[1]}) as N_sum, sum(${variable[2]}) as obs_model_diff_sum, sum(${variable[3]}) as model_sum, sum(${variable[4]}) as obs_sum, sum(${variable[5]}) as abs_sum, ` +
+ `group_concat(m0.secs, ';', ${variable[0]}, ';', ${NClause}, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by m0.secs) as sub_data, count(${variable[0]}) as N0`;
// axisKey is used to determine which axis a curve should use.
// This axisKeySet object is used like a set and if a curve has the same
@@ -147,12 +160,10 @@ dataHistogram = function (plotParams, plotFunction) {
"{{statisticClause}} " +
"{{queryTableClause}} " +
"where 1=1 " +
- "{{matchClause}} " +
"{{dateClause}} " +
"{{validTimeClause}} " +
"{{forecastLengthClause}} " +
"{{scaleClause}} " +
- "{{regionClause}} " +
"group by avtime " +
"order by avtime" +
";";
@@ -162,21 +173,9 @@ dataHistogram = function (plotParams, plotFunction) {
statement = statement.replace("{{validTimeClause}}", validTimeClause);
statement = statement.replace("{{forecastLengthClause}}", forecastLengthClause);
statement = statement.replace("{{scaleClause}}", scaleClause);
- statement = statement.replace("{{regionClause}}", regionClause);
- statement = statement.replace("{{matchClause}}", matchClause);
statement = statement.replace("{{dateClause}}", dateClause);
dataRequests[label] = statement;
- if (
- model !== "HRRR" &&
- variableStr !== "dswrf" &&
- statisticSelect !== "Obs average"
- ) {
- throw new Error(
- `INFO: The statistic/variable combination [${statisticSelect} and ${variableStr}] is only available for the HRRR data-source.`
- );
- }
-
// send the query statement to the query function
queryResult = matsDataQueryUtils.queryDBSpecialtyCurve(
sumPool, // eslint-disable-line no-undef
diff --git a/apps/surfrad/server/dataFunctions/data_series.js b/apps/surfrad/server/dataFunctions/data_series.js
index ca4e1ee11f..429c5de5c0 100644
--- a/apps/surfrad/server/dataFunctions/data_series.js
+++ b/apps/surfrad/server/dataFunctions/data_series.js
@@ -65,7 +65,6 @@ dataSeries = function (plotParams, plotFunction) {
{ name: "variable" },
{ optionsMap: 1 }
).optionsMap;
- const variable = variableOptionsMap[variableStr];
const scaleStr = curve.scale;
const scale = Object.keys(
@@ -90,9 +89,6 @@ dataSeries = function (plotParams, plotFunction) {
{ name: "statistic" },
{ optionsMap: 1 }
).optionsMap;
- const statisticClause =
- `sum(${variable[0]}) as square_diff_sum, count(${variable[1]}) as N_sum, sum(${variable[2]}) as obs_model_diff_sum, sum(${variable[3]}) as model_sum, sum(${variable[4]}) as obs_sum, sum(${variable[5]}) as abs_sum, ` +
- `group_concat(m0.secs, ';', ${variable[0]}, ';', 1, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by m0.secs) as sub_data, count(${variable[0]}) as N0`;
const averageStr = curve.average;
const averageOptionsMap = matsCollections.average.findOne(
@@ -101,8 +97,7 @@ dataSeries = function (plotParams, plotFunction) {
).optionsMap;
const average = averageOptionsMap[averageStr][0];
- const dateClause = `and o.secs >= ${fromSecs} and o.secs <= ${toSecs} and m0.secs >= ${fromSecs} and m0.secs <= ${toSecs}`;
- const matchClause = "and m0.id = o.id and m0.secs = o.secs";
+ const dateClause = `and m0.secs >= ${fromSecs} and m0.secs <= ${toSecs}`;
const regionStr = curve.region;
const region = Object.keys(
@@ -111,18 +106,36 @@ dataSeries = function (plotParams, plotFunction) {
(key) =>
matsCollections.region.findOne({ name: "region" }).valuesMap[key] === regionStr
);
- let regionClause;
+
+ let queryTableClause;
+ let NAggregate;
+ let NClause;
+ let variable;
if (region === "all_stat") {
- regionClause = "";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_site_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else if (region === "all_surf") {
- regionClause = "and m0.id in(1,2,3,4,5,6,7) ";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_surfrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else if (region === "all_sol") {
- regionClause = "and m0.id in(8,9,10,11,12,13,14) ";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_solrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else {
- regionClause = `and m0.id in(${region}) `;
+ variable = variableOptionsMap[variableStr]["Select stations"];
+ queryTableClause = `from ${model}_site_${region} as m0`;
+ NAggregate = "count";
+ NClause = "1";
}
- const queryTableClause = `from surfrad as o, ${model} as m0`;
+ const statisticClause =
+ `sum(${variable[0]}) as square_diff_sum, ${NAggregate}(${variable[1]}) as N_sum, sum(${variable[2]}) as obs_model_diff_sum, sum(${variable[3]}) as model_sum, sum(${variable[4]}) as obs_sum, sum(${variable[5]}) as abs_sum, ` +
+ `group_concat(m0.secs, ';', ${variable[0]}, ';', ${NClause}, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by m0.secs) as sub_data, count(${variable[0]}) as N0`;
// axisKey is used to determine which axis a curve should use.
// This axisKeySet object is used like a set and if a curve has the same
@@ -151,12 +164,10 @@ dataSeries = function (plotParams, plotFunction) {
"{{statisticClause}} " +
"{{queryTableClause}} " +
"where 1=1 " +
- "{{matchClause}} " +
"{{dateClause}} " +
"{{validTimeClause}} " +
"{{forecastLengthClause}} " +
"{{scaleClause}} " +
- "{{regionClause}} " +
"group by avtime " +
"order by avtime" +
";";
@@ -167,21 +178,9 @@ dataSeries = function (plotParams, plotFunction) {
statement = statement.replace("{{validTimeClause}}", validTimeClause);
statement = statement.replace("{{forecastLengthClause}}", forecastLengthClause);
statement = statement.replace("{{scaleClause}}", scaleClause);
- statement = statement.replace("{{regionClause}}", regionClause);
- statement = statement.replace("{{matchClause}}", matchClause);
statement = statement.replace("{{dateClause}}", dateClause);
dataRequests[label] = statement;
- if (
- model !== "HRRR" &&
- variableStr !== "dswrf" &&
- statisticSelect !== "Obs average"
- ) {
- throw new Error(
- `INFO: The statistic/variable combination [${statisticSelect} and ${variableStr}] is only available for the HRRR data-source.`
- );
- }
-
// math is done on forecastLength later on -- set all analyses to 0
if (forecastLength === "-99") {
forecastLength = "0";
diff --git a/apps/surfrad/server/dataFunctions/data_simple_scatter.js b/apps/surfrad/server/dataFunctions/data_simple_scatter.js
index 4193a83304..a210acc2af 100644
--- a/apps/surfrad/server/dataFunctions/data_simple_scatter.js
+++ b/apps/surfrad/server/dataFunctions/data_simple_scatter.js
@@ -66,8 +66,6 @@ dataSimpleScatter = function (plotParams, plotFunction) {
{ name: "variable" },
{ optionsMap: 1 }
).optionsMap;
- const variableX = variableOptionsMap[variableXStr];
- const variableY = variableOptionsMap[variableYStr];
const scaleStr = curve.scale;
const scale = Object.keys(
@@ -103,24 +101,18 @@ dataSimpleScatter = function (plotParams, plotFunction) {
{ name: "statistic" },
{ optionsMap: 1 }
).optionsMap;
- const statisticClause =
- `sum(${variableX[0]}) as square_diff_sumX, count(${variableX[1]}) as N_sumX, sum(${variableX[2]}) as obs_model_diff_sumX, sum(${variableX[3]}) as model_sumX, sum(${variableX[4]}) as obs_sumX, sum(${variableX[5]}) as abs_sumX, ` +
- `sum(${variableY[0]}) as square_diff_sumY, count(${variableY[1]}) as N_sumY, sum(${variableY[2]}) as obs_model_diff_sumY, sum(${variableY[3]}) as model_sumY, sum(${variableY[4]}) as obs_sumY, sum(${variableY[5]}) as abs_sumY, ` +
- `group_concat(m0.secs, ';', ${variableX[0]}, ';', 1, ';', ${variableX[2]}, ';', ${variableX[3]}, ';', ${variableX[4]}, ';', ${variableX[5]}, ';', ${variableY[0]}, ';', 1, ';', ${variableY[2]}, ';', ${variableY[3]}, ';', ${variableY[4]}, ';', ${variableY[5]} order by m0.secs) as sub_data, count(${variableX[0]}) as N0`;
const dateRange = matsDataUtils.getDateRange(curve["curve-dates"]);
const fromSecs = dateRange.fromSeconds;
const toSecs = dateRange.toSeconds;
let dateString = "";
let dateClause = "";
- let matchClause = "";
if (binParam === "Init Date" && binParam !== "Valid Date") {
dateString = "m0.secs-m0.fcst_len*60";
} else {
dateString = "m0.secs";
}
- dateClause = `and o.secs >= ${fromSecs} and o.secs <= ${toSecs} and ${dateString} >= ${fromSecs} and ${dateString} <= ${toSecs}`;
- matchClause = "and m0.id = o.id and m0.secs = o.secs";
+ dateClause = `and ${dateString} >= ${fromSecs} and ${dateString} <= ${toSecs}`;
const regionStr = curve.region;
const region = Object.keys(
@@ -129,18 +121,47 @@ dataSimpleScatter = function (plotParams, plotFunction) {
(key) =>
matsCollections.region.findOne({ name: "region" }).valuesMap[key] === regionStr
);
- let regionClause;
+
+ let queryTableClause;
+ let NAggregate;
+ let NClauseX;
+ let NClauseY;
+ let variableX;
+ let variableY;
if (region === "all_stat") {
- regionClause = "";
+ variableX = variableOptionsMap[variableXStr]["Predefined region"];
+ variableY = variableOptionsMap[variableYStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_site_sums as m0`;
+ NAggregate = "sum";
+ [, NClauseX] = variableX;
+ [, NClauseY] = variableY;
} else if (region === "all_surf") {
- regionClause = "and m0.id in(1,2,3,4,5,6,7) ";
+ variableX = variableOptionsMap[variableXStr]["Predefined region"];
+ variableY = variableOptionsMap[variableYStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_surfrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClauseX] = variableX;
+ [, NClauseY] = variableY;
} else if (region === "all_sol") {
- regionClause = "and m0.id in(8,9,10,11,12,13,14) ";
+ variableX = variableOptionsMap[variableXStr]["Predefined region"];
+ variableY = variableOptionsMap[variableYStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_solrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClauseX] = variableX;
+ [, NClauseY] = variableY;
} else {
- regionClause = `and m0.id in(${region}) `;
+ variableX = variableOptionsMap[variableXStr]["Select stations"];
+ variableY = variableOptionsMap[variableYStr]["Select stations"];
+ queryTableClause = `from ${model}_site_${region} as m0`;
+ NAggregate = "count";
+ NClauseX = "1";
+ NClauseY = "1";
}
- const queryTableClause = `from surfrad as o, ${model} as m0`;
+ const statisticClause =
+ `sum(${variableX[0]}) as square_diff_sumX, ${NAggregate}(${variableX[1]}) as N_sumX, sum(${variableX[2]}) as obs_model_diff_sumX, sum(${variableX[3]}) as model_sumX, sum(${variableX[4]}) as obs_sumX, sum(${variableX[5]}) as abs_sumX, ` +
+ `sum(${variableY[0]}) as square_diff_sumY, ${NAggregate}(${variableY[1]}) as N_sumY, sum(${variableY[2]}) as obs_model_diff_sumY, sum(${variableY[3]}) as model_sumY, sum(${variableY[4]}) as obs_sumY, sum(${variableY[5]}) as abs_sumY, ` +
+ `group_concat(m0.secs, ';', ${variableX[0]}, ';', ${NClauseX}, ';', ${variableX[2]}, ';', ${variableX[3]}, ';', ${variableX[4]}, ';', ${variableX[5]}, ';', ${variableY[0]}, ';', ${NClauseY}, ';', ${variableY[2]}, ';', ${variableY[3]}, ';', ${variableY[4]}, ';', ${variableY[5]} order by m0.secs) as sub_data, count(${variableX[0]}) as N0`;
const { statVarUnitMap } = matsCollections.variable.findOne(
{ name: "variable" },
@@ -164,12 +185,10 @@ dataSimpleScatter = function (plotParams, plotFunction) {
"{{statisticClause}} " +
"{{queryTableClause}} " +
"where 1=1 " +
- "{{matchClause}} " +
"{{dateClause}} " +
"{{validTimeClause}} " +
"{{forecastLengthClause}} " +
"{{scaleClause}} " +
- "{{regionClause}} " +
"group by binVal " +
"order by binVal" +
";";
@@ -180,30 +199,10 @@ dataSimpleScatter = function (plotParams, plotFunction) {
statement = statement.replace("{{validTimeClause}}", validTimeClause);
statement = statement.replace("{{forecastLengthClause}}", forecastLengthClause);
statement = statement.replace("{{scaleClause}}", scaleClause);
- statement = statement.replace("{{regionClause}}", regionClause);
- statement = statement.replace("{{matchClause}}", matchClause);
statement = statement.replace("{{dateClause}}", dateClause);
statement = statement.split("{{dateString}}").join(dateString);
dataRequests[label] = statement;
- if (
- model !== "HRRR" &&
- variableXStr !== "dswrf" &&
- statisticXSelect !== "Obs average"
- ) {
- throw new Error(
- `INFO: The statistic/variable combination [${statisticXSelect} and ${variableXStr}] is only available for the HRRR data-source.`
- );
- } else if (
- model !== "HRRR" &&
- variableYStr !== "dswrf" &&
- statisticYSelect !== "Obs average"
- ) {
- throw new Error(
- `INFO: The statistic/variable combination [${statisticYSelect} and ${variableYStr}] is only available for the HRRR data-source.`
- );
- }
-
// send the query statement to the query function
queryResult = matsDataQueryUtils.queryDBSimpleScatter(
sumPool, // eslint-disable-line no-undef
diff --git a/apps/surfrad/server/dataFunctions/data_validtime.js b/apps/surfrad/server/dataFunctions/data_validtime.js
index 232fdf97a5..e953377240 100644
--- a/apps/surfrad/server/dataFunctions/data_validtime.js
+++ b/apps/surfrad/server/dataFunctions/data_validtime.js
@@ -61,7 +61,6 @@ dataValidTime = function (plotParams, plotFunction) {
{ name: "variable" },
{ optionsMap: 1 }
).optionsMap;
- const variable = variableOptionsMap[variableStr];
const scaleStr = curve.scale;
const scale = Object.keys(
@@ -80,15 +79,11 @@ dataValidTime = function (plotParams, plotFunction) {
{ name: "statistic" },
{ optionsMap: 1 }
).optionsMap;
- const statisticClause =
- `sum(${variable[0]}) as square_diff_sum, count(${variable[1]}) as N_sum, sum(${variable[2]}) as obs_model_diff_sum, sum(${variable[3]}) as model_sum, sum(${variable[4]}) as obs_sum, sum(${variable[5]}) as abs_sum, ` +
- `group_concat(m0.secs, ';', ${variable[0]}, ';', 1, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by m0.secs) as sub_data, count(${variable[0]}) as N0`;
const dateRange = matsDataUtils.getDateRange(curve["curve-dates"]);
const fromSecs = dateRange.fromSeconds;
const toSecs = dateRange.toSeconds;
- const dateClause = `and o.secs >= ${fromSecs} and o.secs <= ${toSecs} and m0.secs >= ${fromSecs} and m0.secs <= ${toSecs}`;
- const matchClause = "and m0.id = o.id and m0.secs = o.secs";
+ const dateClause = `and m0.secs >= ${fromSecs} and m0.secs <= ${toSecs}`;
const regionStr = curve.region;
const region = Object.keys(
@@ -97,18 +92,36 @@ dataValidTime = function (plotParams, plotFunction) {
(key) =>
matsCollections.region.findOne({ name: "region" }).valuesMap[key] === regionStr
);
- let regionClause;
+
+ let queryTableClause;
+ let NAggregate;
+ let NClause;
+ let variable;
if (region === "all_stat") {
- regionClause = "";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_site_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else if (region === "all_surf") {
- regionClause = "and m0.id in(1,2,3,4,5,6,7) ";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_surfrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else if (region === "all_sol") {
- regionClause = "and m0.id in(8,9,10,11,12,13,14) ";
+ variable = variableOptionsMap[variableStr]["Predefined region"];
+ queryTableClause = `from ${model}_all_solrad_sums as m0`;
+ NAggregate = "sum";
+ [, NClause] = variable;
} else {
- regionClause = `and m0.id in(${region}) `;
+ variable = variableOptionsMap[variableStr]["Select stations"];
+ queryTableClause = `from ${model}_site_${region} as m0`;
+ NAggregate = "count";
+ NClause = "1";
}
- const queryTableClause = `from surfrad as o, ${model} as m0`;
+ const statisticClause =
+ `sum(${variable[0]}) as square_diff_sum, ${NAggregate}(${variable[1]}) as N_sum, sum(${variable[2]}) as obs_model_diff_sum, sum(${variable[3]}) as model_sum, sum(${variable[4]}) as obs_sum, sum(${variable[5]}) as abs_sum, ` +
+ `group_concat(m0.secs, ';', ${variable[0]}, ';', ${NClause}, ';', ${variable[2]}, ';', ${variable[3]}, ';', ${variable[4]}, ';', ${variable[5]} order by m0.secs) as sub_data, count(${variable[0]}) as N0`;
// axisKey is used to determine which axis a curve should use.
// This axisKeySet object is used like a set and if a curve has the same
@@ -137,11 +150,9 @@ dataValidTime = function (plotParams, plotFunction) {
"{{statisticClause}} " +
"{{queryTableClause}} " +
"where 1=1 " +
- "{{matchClause}} " +
"{{dateClause}} " +
"{{forecastLengthClause}} " +
"{{scaleClause}} " +
- "{{regionClause}} " +
"group by hr_of_day " +
"order by hr_of_day" +
";";
@@ -150,21 +161,9 @@ dataValidTime = function (plotParams, plotFunction) {
statement = statement.replace("{{queryTableClause}}", queryTableClause);
statement = statement.replace("{{forecastLengthClause}}", forecastLengthClause);
statement = statement.replace("{{scaleClause}}", scaleClause);
- statement = statement.replace("{{regionClause}}", regionClause);
- statement = statement.replace("{{matchClause}}", matchClause);
statement = statement.replace("{{dateClause}}", dateClause);
dataRequests[label] = statement;
- if (
- model !== "HRRR" &&
- variableStr !== "dswrf" &&
- statisticSelect !== "Obs average"
- ) {
- throw new Error(
- `INFO: The statistic/variable combination [${statisticSelect} and ${variableStr}] is only available for the HRRR data-source.`
- );
- }
-
// send the query statement to the query function
queryResult = matsDataQueryUtils.queryDBSpecialtyCurve(
sumPool, // eslint-disable-line no-undef
diff --git a/apps/surfrad/server/main.js b/apps/surfrad/server/main.js
index 6d020e2238..ee71e9b96d 100644
--- a/apps/surfrad/server/main.js
+++ b/apps/surfrad/server/main.js
@@ -378,7 +378,7 @@ const doCurveParams = function () {
.split(",")
.map(Function.prototype.call, String.prototype.trim)
.map(function (fhr) {
- return Number(fhr.replace(/'|\[|\]/g, "")) / 60;
+ return (Number(fhr.replace(/'|\[|\]/g, "")) / 60).toString();
});
const scales = rows[i].scle;
@@ -576,97 +576,47 @@ const doCurveParams = function () {
// 3: sum of model values
// 4: sum of obs values
// 5: sum of absolute obs-model difference (|bias_0| + |bias_1| + |bias_2| + ... + |bias_n|)
- dswrf: [
- "pow(o.direct + o.diffuse - m0.dswrf,2)",
- "(o.direct + o.diffuse - m0.dswrf)",
- "(o.direct + o.diffuse - m0.dswrf)",
- "(if(o.direct + o.diffuse is not null,m0.dswrf,null))",
- "(if(m0.dswrf is not null,o.direct + o.diffuse,null))",
- "(abs(o.direct + o.diffuse - m0.dswrf))",
- ],
- "direct (experimental HRRR only)": [
- "pow(o.direct - m0.direct,2)",
- "(o.direct - m0.direct)",
- "(o.direct - m0.direct)",
- "(if(o.direct is not null,m0.direct,null))",
- "(if(m0.direct is not null,o.direct,null))",
- "(abs(o.direct - m0.direct))",
- ],
- "diffuse (experimental HRRR only)": [
- "pow(o.diffuse - m0.diffuse,2)",
- "(o.diffuse - m0.diffuse)",
- "(o.diffuse - m0.diffuse)",
- "(if(o.diffuse is not null,m0.diffuse,null))",
- "(if(m0.diffuse is not null,o.diffuse,null))",
- "(abs(o.diffuse - m0.diffuse))",
- ],
- "15 min avg dswrf (experimental HRRR only)": [
- "pow(o.direct + o.diffuse - m0.dswrf15,2)",
- "(o.direct + o.diffuse - m0.dswrf15)",
- "(o.direct + o.diffuse - m0.dswrf15)",
- "(if(o.direct + o.diffuse is not null,m0.dswrf15,null))",
- "(if(m0.dswrf15 is not null,o.direct + o.diffuse,null))",
- "(abs(o.direct + o.diffuse - m0.dswrf15))",
- ],
- "15 min avg direct (experimental HRRR only)": [
- "pow(o.direct - m0.direct15,2)",
- "(o.direct - m0.direct15)",
- "(o.direct - m0.direct15)",
- "(if(o.direct is not null,m0.direct15,null))",
- "(if(m0.direct15 is not null,o.direct,null))",
- "(abs(o.direct - m0.direct15))",
- ],
+ "Downward Shortwave Radiation Flux (W/m2)": {
+ "Predefined region": [
+ "m0.sum2_d_dswrf",
+ "m0.N_d_dswrf",
+ "m0.sum_d_dswrf",
+ "-1 * (m0.sum_d_dswrf-m0.sum_ob_dswrf)",
+ "m0.sum_ob_dswrf",
+ "m0.sum_ad_dswrf",
+ ],
+ "Select stations": [
+ "pow(m0.obs_direct + m0.obs_diffuse - m0.model_dswrf,2)",
+ "(m0.obs_direct + m0.obs_diffuse - m0.model_dswrf)",
+ "(m0.obs_direct + m0.obs_diffuse - m0.model_dswrf)",
+ "(if(m0.obs_direct + m0.obs_diffuse is not null,m0.model_dswrf,null))",
+ "(if(m0.model_dswrf is not null,m0.obs_direct + m0.obs_diffuse,null))",
+ "(abs(m0.obs_direct + m0.obs_diffuse - m0.model_dswrf))",
+ ],
+ },
};
const statVarUnitMap = {
RMSE: {
- dswrf: "W/m2",
- "direct (experimental HRRR only)": "W/m2",
- "diffuse (experimental HRRR only)": "W/m2",
- "15 min avg dswrf (experimental HRRR only)": "W/m2",
- "15 min avg direct (experimental HRRR only)": "W/m2",
+ "Downward Shortwave Radiation Flux (W/m2)": "W/m2",
},
"Bias (Model - Obs)": {
- dswrf: "W/m2",
- "direct (experimental HRRR only)": "W/m2",
- "diffuse (experimental HRRR only)": "W/m2",
- "15 min avg dswrf (experimental HRRR only)": "W/m2",
- "15 min avg direct (experimental HRRR only)": "W/m2",
+ "Downward Shortwave Radiation Flux (W/m2)": "W/m2",
},
N: {
- dswrf: "Number",
- "direct (experimental HRRR only)": "Number",
- "diffuse (experimental HRRR only)": "Number",
- "15 min avg dswrf (experimental HRRR only)": "Number",
- "15 min avg direct (experimental HRRR only)": "Number",
+ "Downward Shortwave Radiation Flux (W/m2)": "Number",
},
"Model average": {
- dswrf: "W/m2",
- "direct (experimental HRRR only)": "W/m2",
- "diffuse (experimental HRRR only)": "W/m2",
- "15 min avg dswrf (experimental HRRR only)": "W/m2",
- "15 min avg direct (experimental HRRR only)": "W/m2",
+ "Downward Shortwave Radiation Flux (W/m2)": "W/m2",
},
"Obs average": {
- dswrf: "W/m2",
- "direct (experimental HRRR only)": "W/m2",
- "diffuse (experimental HRRR only)": "W/m2",
- "15 min avg dswrf (experimental HRRR only)": "W/m2",
- "15 min avg direct (experimental HRRR only)": "W/m2",
+ "Downward Shortwave Radiation Flux (W/m2)": "W/m2",
},
"Std deviation": {
- dswrf: "W/m2",
- "direct (experimental HRRR only)": "W/m2",
- "diffuse (experimental HRRR only)": "W/m2",
- "15 min avg dswrf (experimental HRRR only)": "W/m2",
- "15 min avg direct (experimental HRRR only)": "W/m2",
+ "Downward Shortwave Radiation Flux (W/m2)": "W/m2",
},
MAE: {
- dswrf: "W/m2",
- "direct (experimental HRRR only)": "W/m2",
- "diffuse (experimental HRRR only)": "W/m2",
- "15 min avg dswrf (experimental HRRR only)": "W/m2",
- "15 min avg direct (experimental HRRR only)": "W/m2",
+ "Downward Shortwave Radiation Flux (W/m2)": "W/m2",
},
};
@@ -730,7 +680,7 @@ const doCurveParams = function () {
superiorNames: ["data-source"],
controlButtonCovered: true,
unique: false,
- default: scaleModelOptionsMap[Object.keys(scaleModelOptionsMap)[0]][1],
+ default: scaleModelOptionsMap[Object.keys(scaleModelOptionsMap)[0]][0],
controlButtonVisibility: "block",
displayOrder: 3,
displayPriority: 1,
@@ -799,6 +749,41 @@ const doCurveParams = function () {
}
}
+ if (matsCollections["dieoff-type"].findOne({ name: "dieoff-type" }) === undefined) {
+ const dieoffOptionsMap = {
+ Dieoff: [matsTypes.ForecastTypes.dieoff],
+ "Dieoff for a specified UTC cycle init hour": [matsTypes.ForecastTypes.utcCycle],
+ "Single cycle forecast (uses first date in range)": [
+ matsTypes.ForecastTypes.singleCycle,
+ ],
+ };
+ matsCollections["dieoff-type"].insert({
+ name: "dieoff-type",
+ type: matsTypes.InputTypes.select,
+ optionsMap: dieoffOptionsMap,
+ options: Object.keys(dieoffOptionsMap),
+ hideOtherFor: {
+ "valid-time": [
+ "Dieoff for a specified UTC cycle init hour",
+ "Single cycle forecast (uses first date in range)",
+ ],
+ "utc-cycle-start": [
+ "Dieoff",
+ "Single cycle forecast (uses first date in range)",
+ ],
+ },
+ selected: "",
+ controlButtonCovered: true,
+ unique: false,
+ default: Object.keys(dieoffOptionsMap)[0],
+ controlButtonVisibility: "block",
+ controlButtonText: "dieoff type",
+ displayOrder: 2,
+ displayPriority: 1,
+ displayGroup: 4,
+ });
+ }
+
if (matsCollections["valid-time"].findOne({ name: "dieoff-type" }) === undefined) {
const optionsArrRaw = [...Array(96).keys()].map((x) => x / 4);
const optionsArr = optionsArrRaw.map(String);
@@ -1065,6 +1050,34 @@ const doCurveTextPatterns = function () {
],
groupSize: 6,
});
+ matsCollections.CurveTextPatterns.insert({
+ plotType: matsTypes.PlotTypes.dieoff,
+ textPattern: [
+ ["", "label", ": "],
+ ["", "data-source", " in "],
+ ["", "region", ", "],
+ ["", "scale", ", "],
+ ["", "variable", " "],
+ ["", "statistic", ", "],
+ ["", "dieoff-type", ", "],
+ ["valid-time: ", "valid-time", ", "],
+ ["start utc: ", "utc-cycle-start", ", "],
+ ["", "curve-dates", ""],
+ ],
+ displayParams: [
+ "label",
+ "data-source",
+ "region",
+ "statistic",
+ "variable",
+ "dieoff-type",
+ "scale",
+ "valid-time",
+ "utc-cycle-start",
+ "curve-dates",
+ ],
+ groupSize: 6,
+ });
matsCollections.CurveTextPatterns.insert({
plotType: matsTypes.PlotTypes.validtime,
textPattern: [
@@ -1246,6 +1259,12 @@ const doPlotGraph = function () {
dataFunction: "dataSeries",
checked: true,
});
+ matsCollections.PlotGraphFunctions.insert({
+ plotType: matsTypes.PlotTypes.dieoff,
+ graphFunction: "graphPlotly",
+ dataFunction: "dataDieoff",
+ checked: false,
+ });
matsCollections.PlotGraphFunctions.insert({
plotType: matsTypes.PlotTypes.validtime,
graphFunction: "graphPlotly",
@@ -1366,7 +1385,7 @@ Meteor.startup(function () {
}
// create list of tables we need to monitor for update
- const mdr = new matsTypes.MetaDataDBRecord("sumPool", "surfrad3", [
+ const mdr = new matsTypes.MetaDataDBRecord("sumPool", "surfrad4", [
"scale_descriptions",
"station_descriptions",
"regions_per_model_mats_all_categories",
diff --git a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/make_regions_per_model_mats_all_categories_surfrad4.py b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/make_regions_per_model_mats_all_categories_surfrad4.py
new file mode 100755
index 0000000000..2f680be387
--- /dev/null
+++ b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/make_regions_per_model_mats_all_categories_surfrad4.py
@@ -0,0 +1,370 @@
+#!/usr/bin/env python
+#
+# Creates a regions_per_model_mats_all_categories table for all models in surfrad4
+
+# __future__ must come first
+from __future__ import print_function
+from datetime import datetime, timedelta
+
+import re
+import sys
+import ast
+import MySQLdb
+
+
+############################################################################
+
+def update_rpm_record(cnx, cursor, table_name, display_text, regions, fcst_lens, scales, display_category, display_order, mindate, maxdate, numrecs):
+
+ # see if this record already exists (it shouldn't, because this script cleaned the tables when it started)
+ find_rpm_rec = "SELECT id FROM regions_per_model_mats_all_categories_build WHERE model = '" + \
+ str(table_name) + "'"
+ cursor.execute(find_rpm_rec)
+ record_id = int(0)
+ for row in cursor:
+ val = list(row.values())[0]
+ record_id = int(val)
+
+ if len(regions) > int(0) and len(fcst_lens) > int(0) and len(scales) > int(0):
+ qd = []
+ updated_utc = datetime.utcnow().strftime('%s')
+ # if it's a new record (it should be) add it
+ if record_id == 0:
+ insert_rpm_rec = "INSERT INTO regions_per_model_mats_all_categories_build (model, display_text, regions, fcst_lens, scle, display_category, display_order, id, mindate, maxdate, numrecs, updated) values( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s )"
+ qd.append(str(table_name))
+ qd.append(str(display_text))
+ qd.append(str(regions))
+ qd.append(str(fcst_lens))
+ qd.append(str(scales))
+ qd.append(display_category)
+ qd.append(display_order)
+ qd.append(record_id)
+ qd.append(mindate)
+ qd.append(maxdate)
+ qd.append(numrecs)
+ qd.append(updated_utc)
+ cursor.execute(insert_rpm_rec, qd)
+ cnx.commit()
+ else:
+ # if there's a pre-existing record, update it
+ update_rpm_rec = "UPDATE regions_per_model_mats_all_categories_build SET regions = %s, fcst_lens = %s, scle = %s, display_category = %s, display_order = %s, mindate = %s, maxdate = %s, numrecs = %s, updated = %s WHERE id = %s"
+ qd.append(str(regions))
+ qd.append(str(fcst_lens))
+ qd.append(str(scales))
+ qd.append(display_category)
+ qd.append(display_order)
+ qd.append(mindate)
+ qd.append(maxdate)
+ qd.append(numrecs)
+ qd.append(updated_utc)
+ qd.append(record_id)
+ cursor.execute(update_rpm_rec, qd)
+ cnx.commit()
+
+
+############################################################################
+
+def regions_per_model_mats_all_categories(mode):
+ # connect to database
+ try:
+ # location of cnf file on Hera; edit if running locally
+ cnx = MySQLdb.connect(read_default_file="/home/role.amb-verif/.my.cnf")
+ cnx.autocommit = True
+ cursor = cnx.cursor(MySQLdb.cursors.DictCursor)
+ cursor.execute("set session wait_timeout=28800")
+ cursor.execute("set session interactive_timeout=28800")
+ except MySQLdb.Error as e:
+ print("Error: " + str(e))
+ sys.exit(1)
+
+ db = "surfrad4"
+ usedb = "use " + db
+ cursor.execute(usedb)
+
+ # clean TABLESTATS_build in order to get updated data source information. If nothing has changed, you can set
+ # TScleaned to False and just use the old data source info.
+ clean_tablestats = "delete from " + db + ".TABLESTATS_build"
+ # TScleaned = False
+ TScleaned = True
+ if TScleaned:
+ cursor.execute(clean_tablestats)
+ else:
+ print("NOT executing: " + str(clean_tablestats))
+
+ # string of tables not to include in our search for metadata
+ skiptables = " all_display_categories all_display_categories_build all_display_categories_dev regions_per_model_mats_all_categories regions_per_model_mats_all_categories_dev regions_per_model_mats_all_categories_build template TABLESTATS_build stations surfrad scale_descriptions station_descriptions tables_to_backup template_HRRR "
+
+ # get an array of all relevant data sources in this db
+ all_data_sources = []
+ per_table = {}
+
+ show_tables = "show tables;"
+ cursor.execute(show_tables)
+ for row in cursor:
+ tablename = str(list(row.values())[0])
+ # print( "tablename is " + tablename)
+ if " " + tablename + " " not in skiptables and "all" not in tablename:
+ # parse the data sources from the table names
+ model = re.sub("_site_.*", "", tablename)
+ if model not in all_data_sources:
+ all_data_sources.append(model)
+ per_table[tablename] = {}
+ per_table[tablename]['model'] = model
+ region = re.sub(model + "_site_", "", tablename)
+ per_table[tablename]['region'] = region
+ # print("model is " + model + ", region is " + region)
+
+ # sys.exit(-1)
+
+ # parse the other metadata contained in the tables
+ if TScleaned:
+ for tablename in per_table.keys():
+ # get forecast lengths from this table
+ get_fcst_lens = (
+ "SELECT DISTINCT fcst_len FROM " + tablename + ";")
+ cursor.execute(get_fcst_lens)
+ per_table[tablename]['fcst_lens'] = []
+ this_fcst_lens = []
+ for row in cursor:
+ val = list(row.values())[0]
+ this_fcst_lens.append(int(val))
+ this_fcst_lens.sort(key=int)
+ per_table[tablename]['fcst_lens'] = this_fcst_lens
+ # print(tablename + " fcst_lens: " + str(per_table[tablename]['fcst_lens']) )
+
+ # get scales from this table
+ get_scales = ("SELECT DISTINCT scale FROM " + tablename + ";")
+ cursor.execute(get_scales)
+ per_table[tablename]['scales'] = []
+ this_scales = []
+ for row in cursor:
+ val = list(row.values())[0]
+ this_scales.append(int(val))
+ this_scales.sort(key=int)
+ per_table[tablename]['scales'] = this_scales
+ # print(tablename + " scales: " + str(per_table[tablename]['scales']) )
+
+ # get statistics for this table
+ get_tablestats = "SELECT min(secs) AS mindate, max(secs) AS maxdate, count(secs) AS numrecs FROM " + tablename + ";"
+
+ cursor.execute(get_tablestats)
+ stats = cursor.fetchall()[0]
+ # print(tablename + " stats:\n" + str(stats) )
+
+ replace_tablestats_rec = "REPLACE INTO TABLESTATS_build (tablename, mindate, maxdate, model, region, fcst_lens, scle, numrecs) values( %s, %s, %s, %s, %s, %s, %s, %s )"
+ qd = []
+ qd.append(str(tablename))
+ qd.append(str(stats['mindate']))
+ qd.append(str(stats['maxdate']))
+ qd.append(str(per_table[tablename]['model']))
+ qd.append(str(per_table[tablename]['region']))
+ qd.append(str(per_table[tablename]['fcst_lens']))
+ qd.append(str(per_table[tablename]['scales']))
+ qd.append(str(stats['numrecs']))
+ cursor.execute(replace_tablestats_rec, qd)
+ cnx.commit()
+ # sys.exit(-1)
+ else:
+ print("TScleaned is " + str(TScleaned) +
+ " skipped populating TABLESTATS_build")
+
+ # sys.exit(-1)
+
+ # refresh database connection
+ cursor.close()
+ cnx.close()
+
+ try:
+ cnx = MySQLdb.connect(read_default_file="/home/role.amb-verif/.my.cnf")
+ cnx.autocommit = True
+ cursor = cnx.cursor(MySQLdb.cursors.DictCursor)
+ cursor.execute("set session wait_timeout=28800")
+ cursor.execute("set session interactive_timeout=28800")
+ except MySQLdb.Error as e:
+ print("Error: " + str(e))
+ sys.exit(1)
+
+ db = "surfrad4"
+ usedb = "use " + db
+ cursor.execute(usedb)
+
+ # use standardized model names
+ try:
+ cnx4 = MySQLdb.connect(
+ read_default_file="/home/role.amb-verif/.my.cnf")
+ cnx4.autocommit = True
+ cursor4 = cnx4.cursor(MySQLdb.cursors.DictCursor)
+ cursor4.execute("set session wait_timeout=28800")
+ cursor4.execute("set session interactive_timeout=28800")
+ except MySQLdb.Error as e:
+ print("Error: " + str(e))
+ sys.exit(1)
+
+ usedb = "use mats_common"
+ cursor4.execute(usedb)
+
+ get_model_keys_vals = "select old_model,new_model from standardized_model_list;"
+ cursor4.execute(get_model_keys_vals)
+
+ main_model_keys = []
+ main_models = {}
+ for row in cursor4:
+ old_model = str(row['old_model'])
+ new_model = str(row['new_model'])
+ if old_model in all_data_sources:
+ main_model_keys.append(old_model)
+ main_models[old_model] = new_model
+
+ get_model_orders = "select model,m_order from primary_model_orders order by m_order;"
+ cursor4.execute(get_model_orders)
+
+ new_model_list = list(main_models.values())
+ main_model_order_keys = []
+ main_model_orders = {}
+ for row in cursor4:
+ new_model = str(row['model'])
+ m_order = int(row['m_order'])
+ if new_model in new_model_list:
+ main_model_order_keys.append(new_model)
+ main_model_orders[new_model] = m_order
+
+ cursor4.close()
+ cnx4.close()
+
+ # sys.exit(-1)
+
+ # clean metadata build table
+ clean_rpmmac = "delete from regions_per_model_mats_all_categories_build"
+ cursor.execute(clean_rpmmac)
+ cnx.commit()
+ set_ai = "alter table regions_per_model_mats_all_categories_build auto_increment = 1"
+ cursor.execute(set_ai)
+ cnx.commit()
+
+ # sort the data sources into groups
+ data_sources_in_this_app = all_data_sources
+ data_sources_in_this_app.sort(key=str.lower)
+ data_source_cats = {}
+ data_source_key_cats = {}
+
+ ds_idx = 2
+
+ for model in data_sources_in_this_app:
+ if model in main_model_keys and main_models[model] in main_model_order_keys:
+ data_source_cats[model] = 1
+ else:
+ sub_idx = model.find('_', 0)
+ model_key = model[0:sub_idx]
+ if model_key in data_source_key_cats.keys():
+ data_source_cats[model] = data_source_key_cats[model_key]
+ else:
+ data_source_key_cats[model_key] = ds_idx
+ data_source_cats[model] = ds_idx
+ ds_idx = ds_idx + 1
+
+ # combine the metadata per table into metadata per data source
+ do_non_main = 0
+ for model in all_data_sources:
+ if model in main_model_keys and main_models[model] in main_model_order_keys:
+ cat = 1
+ display_text = main_models[model]
+ do = main_model_orders[display_text]
+ else:
+ cat = data_source_cats[model]
+ display_text = str(model)
+ do = do_non_main + 1
+ do_non_main = do_non_main + 1
+
+ # get regions for all tables pertaining to this model
+ get_these_regions = "select distinct(region) as region from " + db + \
+ ".TABLESTATS_build where tablename like '" + model + \
+ "%' and model = '" + model + "' and numrecs > 0;"
+ cursor.execute(get_these_regions)
+ these_regions = []
+ for row in cursor:
+ val = str(list(row.values())[0])
+ these_regions.append(val)
+ these_regions.sort(key=int)
+ # print( "these_regions:\n" + str(these_regions) )
+
+ # get forecast lengths for all tables pertaining to this model
+ get_these_fcst_lens = "select distinct(fcst_lens) as fcst_lens from " + db + ".TABLESTATS_build where tablename like '" + \
+ model + "%' and fcst_lens != '[]' and model = '" + model + \
+ "' and numrecs > 0 order by length(fcst_lens) desc;"
+ cursor.execute(get_these_fcst_lens)
+ these_fcst_lens = []
+ for row in cursor:
+ val_array = ast.literal_eval(list(row.values())[0])
+ for val in val_array:
+ if val not in these_fcst_lens:
+ these_fcst_lens.append(val)
+ these_fcst_lens.sort(key=int)
+ # print( "these_fcst_lens:\n" + str(these_fcst_lens) )
+
+ # get scales for all tables pertaining to this model
+ get_these_scales = "select distinct(scle) as scle from " + db + ".TABLESTATS_build where tablename like '" + \
+ model + "%' and model = '" + model + \
+ "' and numrecs > 0 order by length(scle) desc;"
+ cursor.execute(get_these_scales)
+ these_scales = []
+ for row in cursor:
+ val_array = ast.literal_eval(list(row.values())[0])
+ for val in val_array:
+ if val not in these_scales:
+ these_scales.append(val)
+ these_scales.sort(key=int)
+ # print( " thesescales:\n" + str(thesescales) )
+
+ # get statistics for all tables pertaining to this model
+ get_cat_stats = "select min(mindate) as mindate, max(maxdate) as maxdate, sum(numrecs) as numrecs from " + \
+ db + ".TABLESTATS_build where tablename like '" + model + \
+ "%' and model = '" + model + "' and numrecs > 0"
+ cursor.execute(get_cat_stats)
+ catstats = cursor.fetchall()[0]
+ # print( "catstats:\n" + str(catstats) )
+
+ # update the metadata for this data source in the build table
+ if len(these_regions) > 0 and len(these_fcst_lens) > 0 and len(these_scales) > 0:
+ update_rpm_record(cnx, cursor, model, display_text, these_regions, these_fcst_lens,
+ these_scales, cat, do, catstats['mindate'], catstats['maxdate'], catstats['numrecs'])
+
+ # clean metadata publication table and add the build data into it
+ updated_utc = datetime.utcnow().strftime('%Y/%m/%d %H:%M')
+ if 'deploy' in mode:
+ clean_rpmmac = "delete from regions_per_model_mats_all_categories"
+ cursor.execute(clean_rpmmac)
+ cnx.commit()
+ set_ai = "alter table regions_per_model_mats_all_categories auto_increment = 1"
+ cursor.execute(set_ai)
+ cnx.commit()
+ sync_rpm = "insert into regions_per_model_mats_all_categories select * from regions_per_model_mats_all_categories_build"
+ cursor.execute(sync_rpm)
+ cnx.commit()
+ print("deploy " + db +
+ ".regions_per_model_mats_all_categories complete at " + str(updated_utc))
+ else:
+ print("skipping deployment at " + str(updated_utc))
+
+ cursor.close()
+ cnx.close()
+
+
+##################### regions_per_model_mats_all_categories ####################################
+
+if __name__ == '__main__':
+ def selftest(mode):
+ regions_per_model_mats_all_categories(mode)
+
+ if len(sys.argv) == 2:
+ if sys.argv[1] == 'selftest':
+ selftest('selftest')
+
+ if len(sys.argv) == 2:
+ if sys.argv[1] == 'deploy':
+ utcnow = str(datetime.now())
+ msg = 'SURFRAD MATS METADATA START: ' + utcnow
+ print(msg)
+ regions_per_model_mats_all_categories('deploy')
+ utcnow = str(datetime.now())
+ msg = 'SURFRAD MATS METADATA END: ' + utcnow
+ print(msg)
diff --git a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt4.tcsh b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt4.tcsh
index 3bc5d26422..67af42a1af 100755
--- a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt4.tcsh
+++ b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/run_mats_metadata_pt4.tcsh
@@ -28,6 +28,7 @@ conda activate avid_verify_py3
#/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_aircraft_prepbufr.py deploy
#/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_vgtyp.py deploy
/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_surfrad.py deploy
+/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_surfrad4.py deploy
#/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_raobamdar.py deploy
#/home/role.amb-verif/mats_metadata/make_regions_per_model_mats_all_categories_upperair_prepbufr.py deploy
#/home/role.amb-verif/mats_metadata/update_metadata_upperair_prepbufr.py persis
diff --git a/scripts/matsMetaDataForApps/createMetaData/mysql/mats/update_metadata_surfrad4.py b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/update_metadata_surfrad4.py
new file mode 100755
index 0000000000..703ac15a07
--- /dev/null
+++ b/scripts/matsMetaDataForApps/createMetaData/mysql/mats/update_metadata_surfrad4.py
@@ -0,0 +1,314 @@
+#!/scratch1/BMC/amb-verif/miniconda/miniconda3/envs/avid_verify_py3/bin/python
+#
+# The reason I am hardcoding the python path above is that this script is usally run by model developers
+# without guidance from us, and I don't want them to be tripped up by the fact that the default puthon on
+# Hera is python 2, while this script requires python 3. There's also an error to that effect below, but
+# I'm trying to cut down on the number of confused emails we get. Our main scripts are all environment-agnostic,
+# becuause they are run by verification team members who know which conda environment to use.
+#
+# Updates the regions_per_model_mats_all_categories table for all models in surfrad4
+
+# __future__ must come first
+from __future__ import print_function
+from datetime import datetime
+
+import re
+import sys
+
+try:
+ import MySQLdb
+except ImportError:
+ raise ImportError('--------------------IMPORTANT: This script now requires python 3 to run. \
+ You can do this in the amb-verif conda environment by running "conda activate \
+ avid_verify_py3" and then trying this script again.-------------------------')
+
+
+############################################################################
+
+def update_rpm_record(cnx, cursor, table_name, display_text, regions, fcst_lens, scales, display_category, display_order, mindate, maxdate, numrecs):
+
+ # see if this record already exists in the build table
+ # (does not guarantee the result will be the same for the prod table)
+ find_rpm_rec = "SELECT id FROM regions_per_model_mats_all_categories_build WHERE model = '" + \
+ str(table_name) + "'"
+ cursor.execute(find_rpm_rec)
+ build_record_id = int(0)
+ for row in cursor:
+ val = list(row.values())[0]
+ build_record_id = int(val)
+
+ # see if this record already exists in the prod table
+ # (does not guarantee the result will be the same for the build table)
+ find_rpm_rec = "SELECT id FROM regions_per_model_mats_all_categories WHERE model = '" + \
+ str(table_name) + "'"
+ cursor.execute(find_rpm_rec)
+ prod_record_id = int(0)
+ for row in cursor:
+ val = list(row.values())[0]
+ prod_record_id = int(val)
+
+ if len(regions) > int(0) and len(fcst_lens) > int(0) and len(scales) > int(0):
+ qd = []
+ updated_utc = datetime.utcnow().strftime('%s')
+ # if it's a new record for the build table, add it
+ if build_record_id == 0:
+ insert_rpm_rec = "INSERT INTO regions_per_model_mats_all_categories_build (model, display_text, regions, fcst_lens, scle, display_category, display_order, id, mindate, maxdate, numrecs, updated) values( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s )"
+ qd.append(str(table_name))
+ qd.append(str(display_text))
+ qd.append(str(regions))
+ qd.append(str(fcst_lens))
+ qd.append(str(scales))
+ qd.append(display_category)
+ qd.append(display_order)
+ qd.append(build_record_id)
+ qd.append(mindate)
+ qd.append(maxdate)
+ qd.append(numrecs)
+ qd.append(updated_utc)
+ cursor.execute(insert_rpm_rec, qd)
+ cnx.commit()
+ else:
+ # if there's a pre-existing record for the build table, update it
+ update_rpm_rec = "UPDATE regions_per_model_mats_all_categories_build SET regions = %s, fcst_lens = %s, scle = %s, display_category = %s, display_order = %s, mindate = %s, maxdate = %s, numrecs = %s, updated = %s WHERE id = %s"
+ qd.append(str(regions))
+ qd.append(str(fcst_lens))
+ qd.append(str(scales))
+ qd.append(display_category)
+ qd.append(display_order)
+ qd.append(mindate)
+ qd.append(maxdate)
+ qd.append(numrecs)
+ qd.append(updated_utc)
+ qd.append(build_record_id)
+ cursor.execute(update_rpm_rec, qd)
+ cnx.commit()
+
+ # reset qd array
+ qd = []
+ # if it's a new record for the prod table, add it
+ if prod_record_id == 0:
+ insert_rpm_rec = "INSERT INTO regions_per_model_mats_all_categories (model, display_text, regions, fcst_lens, scle, display_category, display_order, id, mindate, maxdate, numrecs, updated) values( %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s )"
+ qd.append(str(table_name))
+ qd.append(str(display_text))
+ qd.append(str(regions))
+ qd.append(str(fcst_lens))
+ qd.append(str(scales))
+ qd.append(display_category)
+ qd.append(display_order)
+ qd.append(prod_record_id)
+ qd.append(mindate)
+ qd.append(maxdate)
+ qd.append(numrecs)
+ qd.append(updated_utc)
+ cursor.execute(insert_rpm_rec, qd)
+ cnx.commit()
+ else:
+ # if there's a pre-existing record for the prod table, update it
+ update_rpm_rec = "UPDATE regions_per_model_mats_all_categories SET regions = %s, fcst_lens = %s, scle = %s, display_category = %s, display_order = %s, mindate = %s, maxdate = %s, numrecs = %s, updated = %s WHERE id = %s"
+ qd.append(str(regions))
+ qd.append(str(fcst_lens))
+ qd.append(str(scales))
+ qd.append(display_category)
+ qd.append(display_order)
+ qd.append(mindate)
+ qd.append(maxdate)
+ qd.append(numrecs)
+ qd.append(updated_utc)
+ qd.append(prod_record_id)
+ cursor.execute(update_rpm_rec, qd)
+ cnx.commit()
+
+
+############################################################################
+
+def reprocess_specific_metadata(models_to_reprocess):
+ # connect to database
+ try:
+ # location of cnf file on Hera; edit if running locally
+ cnx = MySQLdb.connect(read_default_file="/home/role.amb-verif/.my.cnf")
+ cnx.autocommit = True
+ cursor = cnx.cursor(MySQLdb.cursors.DictCursor)
+ except MySQLdb.Error as e:
+ print("Error: " + str(e))
+ sys.exit(1)
+
+ try:
+ cnx2 = MySQLdb.connect(
+ read_default_file="/home/role.amb-verif/.my.cnf")
+ cnx2.autocommit = True
+ cursor2 = cnx2.cursor(MySQLdb.cursors.DictCursor)
+ except MySQLdb.Error as e:
+ print("Error: " + str(e))
+ sys.exit(1)
+
+ try:
+ cnx3 = MySQLdb.connect(
+ read_default_file="/home/role.amb-verif/.my.cnf")
+ cnx3.autocommit = True
+ cursor3 = cnx3.cursor(MySQLdb.cursors.DictCursor)
+ except MySQLdb.Error as e:
+ print("Error: " + str(e))
+ sys.exit(1)
+
+ db = "surfrad4"
+ usedb = "use " + db
+ cursor.execute(usedb)
+ cursor2.execute(usedb)
+
+ db3 = "mats_common"
+ usedb3 = "use " + db3
+ cursor3.execute(usedb3)
+
+ # get common MATS model names
+ get_model_keys_vals = "select old_model,new_model from standardized_model_list;"
+ cursor3.execute(get_model_keys_vals)
+
+ main_model_keys = []
+ main_models = {}
+ for row in cursor3:
+ old_model = str(row['old_model'])
+ new_model = str(row['new_model'])
+ main_model_keys.append(old_model)
+ main_models[old_model] = new_model
+
+ get_model_orders = "select model,m_order from primary_model_orders order by m_order;"
+ cursor3.execute(get_model_orders)
+
+ new_model_list = list(main_models.values())
+ main_model_order_keys = []
+ main_model_orders = {}
+ for row in cursor3:
+ new_model = str(row['model'])
+ m_order = int(row['m_order'])
+ if new_model in new_model_list:
+ main_model_order_keys.append(new_model)
+ main_model_orders[new_model] = m_order
+
+ # get max category used so far
+ cursor3.execute(usedb)
+ cursor3.execute(
+ "select max(display_category) from regions_per_model_mats_all_categories;")
+ for row in cursor3:
+ max_display_category = list(row.values())[0]
+ curr_model_order = 1
+
+ cursor3.close()
+ cnx3.close()
+
+ per_model = {}
+ for model in models_to_reprocess:
+ # initialize output object
+ per_model[model] = {}
+ per_model[model]['display_text'] = ""
+ per_model[model]['region'] = []
+ per_model[model]['fcst_len'] = []
+ per_model[model]['scales'] = []
+ per_model[model]['mindate'] = sys.float_info.max
+ per_model[model]['maxdate'] = 0
+ per_model[model]['numrecs'] = 0
+
+ if model in main_model_keys and main_models[model] in main_model_order_keys:
+ per_model[model]['display_text'] = main_models[model]
+ per_model[model]['display_category'] = 1
+ per_model[model]['display_order'] = main_model_orders[per_model[model]['display_text']]
+ else:
+ get_display_params = "select display_category,display_order from regions_per_model_mats_all_categories where model = '" + model + "';"
+ cursor2.execute(get_display_params)
+ per_model[model]['display_text'] = model
+ if cursor2.rowcount == 0:
+ per_model[model]['display_category'] = int(
+ max_display_category) + 1
+ per_model[model]['display_order'] = curr_model_order
+ curr_model_order = curr_model_order + 1
+ else:
+ for row in cursor2:
+ per_model[model]['display_category'] = row['display_category']
+ per_model[model]['display_order'] = row['display_order']
+
+ # get all tables that remotely resemble this model name
+ show_tables = ("show tables like '" + model + "_site_%';")
+ cursor.execute(show_tables)
+ for row in cursor:
+ tablename = str(list(row.values())[0])
+ table_model = re.sub("_site_.*", "", tablename)
+ if table_model == model:
+ # this is a table that does belong to this model
+ get_tablestats = "SELECT min(secs) AS mindate, max(secs) AS maxdate, count(secs) AS numrecs FROM " + tablename + ";"
+ cursor2.execute(get_tablestats)
+ stats = {}
+ for row2 in cursor2:
+ rowkeys = row2.keys()
+ for rowkey in rowkeys:
+ val = str(row2[rowkey])
+ stats[rowkey] = val
+
+ if int(stats['numrecs']) > 0:
+ # make sure the table actually has data
+ per_model[model]['mindate'] = int(stats['mindate']) if stats['mindate'] != 'None' and int(
+ stats['mindate']) < per_model[model]['mindate'] else per_model[model]['mindate']
+ per_model[model]['maxdate'] = int(stats['maxdate']) if stats['maxdate'] != 'None' and int(
+ stats['maxdate']) > per_model[model]['maxdate'] else per_model[model]['maxdate']
+ per_model[model]['numrecs'] = per_model[model]['numrecs'] + \
+ int(stats['numrecs'])
+
+ region = re.sub(".*_site_", "", tablename)
+ per_model[model]['region'].append(region)
+
+ get_fcst_lens = (
+ "SELECT DISTINCT fcst_len FROM " + tablename + ";")
+ cursor2.execute(get_fcst_lens)
+ thisfcst_lens = []
+ for row2 in cursor2:
+ val = list(row2.values())[0]
+ thisfcst_lens.append(int(val))
+ per_model[model]['fcst_len'] = list(
+ set(per_model[model]['fcst_len']) | set(thisfcst_lens))
+ per_model[model]['fcst_len'].sort(key=int)
+
+ get_scales = ("SELECT DISTINCT scale FROM " + tablename + ";")
+ cursor2.execute(get_scales)
+ thisscales = []
+ for row2 in cursor2:
+ val = list(row2.values())[0]
+ thisscales.append(int(val))
+ per_model[model]['scales'] = list(
+ set(per_model[model]['scales']) | set(thisscales))
+ per_model[model]['scales'].sort(key=int)
+
+ if per_model[model]['mindate'] == sys.float_info.max:
+ per_model[model]['mindate'] = str(datetime.now().strftime('%s'))
+ if per_model[model]['maxdate'] == 0:
+ per_model[model]['maxdate'] = str(datetime.now().strftime('%s'))
+
+ per_model[model]['region'].sort(key=int)
+
+ print(per_model)
+
+ # sys.exit(-1)
+
+ for model in models_to_reprocess:
+ if len(per_model[model]['region']) > 0 and len(per_model[model]['fcst_len']) > 0 and len(per_model[model]['scales']) > 0:
+ update_rpm_record(cnx, cursor, model, per_model[model]['display_text'], per_model[model]['region'], per_model[model]['fcst_len'], per_model[model]['scales'],
+ per_model[model]['display_category'], per_model[model]['display_order'], per_model[model]['mindate'], per_model[model]['maxdate'], per_model[model]['numrecs'])
+
+ updated_utc = datetime.utcnow().strftime('%Y/%m/%d %H:%M')
+ print("deploy " + db +
+ ".regions_per_model_mats_all_categories complete at " + str(updated_utc))
+
+ cursor.close()
+ cnx.close()
+ cursor2.close()
+ cnx2.close()
+
+
+if __name__ == '__main__':
+ # args[1] should be a comma-separated list of models to reprocess
+ if len(sys.argv) == 2:
+ utcnow = str(datetime.now())
+ msg = 'SURFRAD MATS METADATA START: ' + utcnow
+ print(msg)
+ models_to_reprocess = sys.argv[1].strip().split(',')
+ reprocess_specific_metadata(models_to_reprocess)
+ utcnow = str(datetime.now())
+ msg = 'SURFRAD MATS METADATA END: ' + utcnow
+ print(msg)
diff --git a/tests/src/features/surfrad/basic/addRemoveContour.feature b/tests/src/features/surfrad/basic/addRemoveContour.feature
index 405c2c4680..3c222b67cf 100644
--- a/tests/src/features/surfrad/basic/addRemoveContour.feature
+++ b/tests/src/features/surfrad/basic/addRemoveContour.feature
@@ -16,8 +16,8 @@ Feature: Add Remove Contour
Scenario: addRemoveContour
When I set the plot type to "Contour"
Then the plot type should be "Contour"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I set the dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
Then I click the "Add Curve" button
diff --git a/tests/src/features/surfrad/basic/addRemoveContourDiff.feature b/tests/src/features/surfrad/basic/addRemoveContourDiff.feature
index 312abdf96c..61361a0596 100644
--- a/tests/src/features/surfrad/basic/addRemoveContourDiff.feature
+++ b/tests/src/features/surfrad/basic/addRemoveContourDiff.feature
@@ -16,8 +16,8 @@ Feature: Add Remove ContourDiff
Scenario: addRemoveContourDiff
When I set the plot type to "ContourDiff"
Then the plot type should be "ContourDiff"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I set the dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
Then I click the "Add Curve" button
diff --git a/tests/src/features/surfrad/basic/addRemoveCurve.feature b/tests/src/features/surfrad/basic/addRemoveCurve.feature
index f33222c21a..c134051449 100644
--- a/tests/src/features/surfrad/basic/addRemoveCurve.feature
+++ b/tests/src/features/surfrad/basic/addRemoveCurve.feature
@@ -15,8 +15,8 @@ Feature: Add Remove Curve
Scenario: addRemoveCurve
When I set the plot type to "TimeSeries"
Then the plot type should be "TimeSeries"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I set the dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
Then I click the "Add Curve" button
diff --git a/tests/src/features/surfrad/basic/addRemoveDailyModelCycleCurve.feature b/tests/src/features/surfrad/basic/addRemoveDailyModelCycleCurve.feature
index df9046f7e8..270b824051 100644
--- a/tests/src/features/surfrad/basic/addRemoveDailyModelCycleCurve.feature
+++ b/tests/src/features/surfrad/basic/addRemoveDailyModelCycleCurve.feature
@@ -16,8 +16,8 @@ Feature: Add Remove DailyModelCycle Curve
Scenario: addRemoveDailyModelCycleCurve
When I set the plot type to "DailyModelCycle"
Then the plot type should be "DailyModelCycle"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I set the dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
Then I click the "Add Curve" button
diff --git a/tests/src/features/surfrad/basic/addRemoveDieOffCurve.feature b/tests/src/features/surfrad/basic/addRemoveDieOffCurve.feature
new file mode 100644
index 0000000000..a7bd5e8178
--- /dev/null
+++ b/tests/src/features/surfrad/basic/addRemoveDieOffCurve.feature
@@ -0,0 +1,39 @@
+Feature: Add Remove Dieoff Curve
+
+ As an unauthenticated user to the app,
+ with the app in its default state,
+ I want click the dieoff radio button,
+ I want to set the forecast-length selector to dieoff
+ I want to add one curve
+ then plot that curve and see the graph,
+ then go back to the curve management page,
+ then delete that curve.
+
+ Background:
+ Given I load the app "/surfrad"
+ Then I expect the app title to be "Surface Radiation"
+
+ @watch
+ Scenario: addRemoveDieoffCurve
+ When I set the plot type to "Dieoff"
+ Then the plot type should be "Dieoff"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
+ When I set the curve-dates to "09/21/2019 00:00 - 09/24/2019 00:00"
+ Then the curve-dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
+ Then I click the "Add Curve" button
+ Then "Curve0" is added
+ And I should see a list of curves containing "Curve0"
+
+ When I click the "Plot Unmatched" button
+ Then I should be on the graph page
+ And I should have a "Dieoff" plot
+
+ When I click the "Back" button
+ Then I should be on the main page
+ And the "Plot Unmatched" button should be visible
+
+ Then I click the "Remove Curve0" button
+ And the "Remove curve Curve0" button should be visible
+ Then I click the "Remove curve Curve0" button
+ Then I should have 0 curves
diff --git a/tests/src/features/surfrad/basic/addRemoveHistogram.feature b/tests/src/features/surfrad/basic/addRemoveHistogram.feature
index d67cbc4de9..dbb9eb22c1 100644
--- a/tests/src/features/surfrad/basic/addRemoveHistogram.feature
+++ b/tests/src/features/surfrad/basic/addRemoveHistogram.feature
@@ -16,8 +16,8 @@ Feature: Add Remove Histogram
Scenario: addRemoveHistogram
When I set the plot type to "Histogram"
Then the plot type should be "Histogram"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I set the curve-dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the curve-dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
Then I click the "Add Curve" button
diff --git a/tests/src/features/surfrad/basic/addRemoveScatter.feature b/tests/src/features/surfrad/basic/addRemoveScatter.feature
index 8f9309a383..44a349f0a4 100644
--- a/tests/src/features/surfrad/basic/addRemoveScatter.feature
+++ b/tests/src/features/surfrad/basic/addRemoveScatter.feature
@@ -15,16 +15,16 @@ Feature: Add Remove Scatter
Scenario: addRemoveScatter
When I set the plot type to "SimpleScatter"
Then the plot type should be "SimpleScatter"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I change the "x-statistic" parameter to "Bias (Model - Obs)"
Then the "x-statistic" parameter value matches "Bias (Model - Obs)"
- When I change the "x-variable" parameter to "dswrf"
- Then the "x-variable" parameter value matches "dswrf"
+ When I change the "x-variable" parameter to "Downward Shortwave Radiation Flux (W/m2)"
+ Then the "x-variable" parameter value matches "Downward Shortwave Radiation Flux (W/m2)"
When I change the "y-statistic" parameter to "Obs average"
Then the "y-statistic" parameter value matches "Obs average"
- When I change the "y-variable" parameter to "dswrf"
- Then the "y-variable" parameter value matches "dswrf"
+ When I change the "y-variable" parameter to "Downward Shortwave Radiation Flux (W/m2)"
+ Then the "y-variable" parameter value matches "Downward Shortwave Radiation Flux (W/m2)"
When I set the curve-dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the curve-dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
Then I click the "Add Curve" button
diff --git a/tests/src/features/surfrad/basic/addRemoveTwoCurves.feature b/tests/src/features/surfrad/basic/addRemoveTwoCurves.feature
index 60c6809e72..b067945acd 100644
--- a/tests/src/features/surfrad/basic/addRemoveTwoCurves.feature
+++ b/tests/src/features/surfrad/basic/addRemoveTwoCurves.feature
@@ -18,8 +18,8 @@ Feature: Add Remove Two Curves
Scenario: addRemoveTwoCurves
When I set the plot type to "TimeSeries"
Then the plot type should be "TimeSeries"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I set the dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
Then I click the "Add Curve" button
diff --git a/tests/src/features/surfrad/basic/addRemoveTwoScatters.feature b/tests/src/features/surfrad/basic/addRemoveTwoScatters.feature
index fb24c88482..906333a248 100644
--- a/tests/src/features/surfrad/basic/addRemoveTwoScatters.feature
+++ b/tests/src/features/surfrad/basic/addRemoveTwoScatters.feature
@@ -18,16 +18,16 @@ Feature: Add Remove Two Scatters
Scenario: addRemoveTwoScatters
When I set the plot type to "SimpleScatter"
Then the plot type should be "SimpleScatter"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I change the "x-statistic" parameter to "Bias (Model - Obs)"
Then the "x-statistic" parameter value matches "Bias (Model - Obs)"
- When I change the "x-variable" parameter to "dswrf"
- Then the "x-variable" parameter value matches "dswrf"
+ When I change the "x-variable" parameter to "Downward Shortwave Radiation Flux (W/m2)"
+ Then the "x-variable" parameter value matches "Downward Shortwave Radiation Flux (W/m2)"
When I change the "y-statistic" parameter to "Obs average"
Then the "y-statistic" parameter value matches "Obs average"
- When I change the "y-variable" parameter to "dswrf"
- Then the "y-variable" parameter value matches "dswrf"
+ When I change the "y-variable" parameter to "Downward Shortwave Radiation Flux (W/m2)"
+ Then the "y-variable" parameter value matches "Downward Shortwave Radiation Flux (W/m2)"
When I set the curve-dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the curve-dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
Then I click the "Add Curve" button
diff --git a/tests/src/features/surfrad/basic/addRemoveValidTimeCurve.feature b/tests/src/features/surfrad/basic/addRemoveValidTimeCurve.feature
index 274179ed98..3d796d18a5 100644
--- a/tests/src/features/surfrad/basic/addRemoveValidTimeCurve.feature
+++ b/tests/src/features/surfrad/basic/addRemoveValidTimeCurve.feature
@@ -16,8 +16,8 @@ Feature: Add Remove Valid Time Curve
Scenario: addRemoveValidTimeCurve
When I set the plot type to "ValidTime"
Then the plot type should be "ValidTime"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I set the curve-dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the curve-dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
Then I click the "Add Curve" button
diff --git a/tests/src/features/surfrad/basic/addThreeCurvesRemoveOneCurveAddAnotherCurve.feature b/tests/src/features/surfrad/basic/addThreeCurvesRemoveOneCurveAddAnotherCurve.feature
index bb130410c3..fa6dc9f077 100644
--- a/tests/src/features/surfrad/basic/addThreeCurvesRemoveOneCurveAddAnotherCurve.feature
+++ b/tests/src/features/surfrad/basic/addThreeCurvesRemoveOneCurveAddAnotherCurve.feature
@@ -18,8 +18,8 @@ Feature: addThreeCurvesRemoveOneCurveAddAnotherCurve
Scenario: addThreeCurvesRemoveOneCurveAddAnotherCurve
When I set the plot type to "TimeSeries"
Then the plot type should be "TimeSeries"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I set the dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
Then I click the "Add Curve" button
@@ -30,8 +30,8 @@ Feature: addThreeCurvesRemoveOneCurveAddAnotherCurve
Then I click the "Add Curve" button
Then "Curve1" is added
- When I change the "data-source" parameter to "HRRR_OPS"
- Then the "data-source" parameter value matches "HRRR_OPS"
+ When I change the "data-source" parameter to "NAM"
+ Then the "data-source" parameter value matches "NAM"
When I click the "Add Curve" button
Then "Curve2" is added
And I should see a list of curves containing "Curve0,Curve1,Curve2"
diff --git a/tests/src/features/surfrad/basic/histogramBinSpacings.feature b/tests/src/features/surfrad/basic/histogramBinSpacings.feature
index a9f0cadd04..18cddb3d14 100644
--- a/tests/src/features/surfrad/basic/histogramBinSpacings.feature
+++ b/tests/src/features/surfrad/basic/histogramBinSpacings.feature
@@ -17,8 +17,8 @@ Feature: Histogram Bin Spacings
Scenario: histogramBinSpacings
When I set the plot type to "Histogram"
Then the plot type should be "Histogram"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I set the curve-dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the curve-dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
Then I click the "Add Curve" button
diff --git a/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesDailyModelCycle.feature b/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesDailyModelCycle.feature
index bd0c7a03bc..2f1cd8e5df 100644
--- a/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesDailyModelCycle.feature
+++ b/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesDailyModelCycle.feature
@@ -15,8 +15,8 @@ Feature: Match Unmatch Diff Curves DailyModelCycle
Scenario: matchUnmatchDiffCurvesDailyModelCycle
When I set the plot type to "DailyModelCycle"
Then the plot type should be "DailyModelCycle"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I set the dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
When I click the "Add Curve" button
diff --git a/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesDieoff.feature b/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesDieoff.feature
new file mode 100644
index 0000000000..4368f129ed
--- /dev/null
+++ b/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesDieoff.feature
@@ -0,0 +1,97 @@
+Feature: Match Unmatch Diff Curves Dieoff
+
+ As an unauthenticated user to the app,
+ with the app in its default state so that the plots are Dieoff,
+ I want to add two curves, plot unmatched, and then return to the main page.
+ I then want to add a matched difference curve, plot unmatched, return to the main page, plot matched, and then return to the main page.
+ I then want to add a piecewise difference curve, plot unmatched, return to the main page, plot matched, and then return to the main page.
+ I want to end by removing all of the curves.
+
+ Background:
+ Given I load the app "/surfrad"
+ Then I expect the app title to be "Surface Radiation"
+
+ @watch
+ Scenario: matchUnmatchDiffCurvesDieoff
+ When I set the plot type to "Dieoff"
+ Then the plot type should be "Dieoff"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
+ When I set the curve-dates to "09/21/2019 00:00 - 09/24/2019 00:00"
+ Then the curve-dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
+ When I change the "dieoff-type" parameter to "Dieoff for a specified UTC cycle init hour"
+ Then the "dieoff-type" parameter value matches "Dieoff for a specified UTC cycle init hour"
+ When I click the "Add Curve" button
+ Then "Curve0" is added
+
+ When I change the "data-source" parameter to "RAP_OPS_130"
+ Then the "data-source" parameter value matches "RAP_OPS_130"
+ When I click the "Add Curve" button
+ Then "Curve1" is added
+ And I should see a list of curves containing "Curve0,Curve1"
+
+ When I click the "Plot Unmatched" button
+ Then I should be on the graph page
+ And I should have a "Dieoff" plot
+
+ When I click the "Back" button
+ Then I should be on the main page
+ And the "Plot Unmatched" button should be visible
+
+ When I click the "Plot Matched" button
+ Then I should be on the graph page
+ And I should have a "Dieoff" plot
+
+ When I click the "Back" button
+ Then I should be on the main page
+ And the "Plot Matched" button should be visible
+
+ When I click the "matching diffs" radio button
+ Then "Curve1-Curve0" is added
+ And I should see a list of curves containing "Curve0,Curve1,Curve1-Curve0"
+
+ When I click the "Plot Unmatched" button
+ Then I should be on the graph page
+ And I should have a "Dieoff" plot
+
+ When I click the "Back" button
+ Then I should be on the main page
+ And the "Plot Unmatched" button should be visible
+
+ When I click the "Plot Matched" button
+ Then I should be on the graph page
+ And I should have a "Dieoff" plot
+
+ When I click the "Back" button
+ Then I should be on the main page
+ And the "Plot Matched" button should be visible
+
+ When I click the "pairwise diffs" radio button
+ Then the plot format should be "pairwise"
+ Then I should see a list of curves containing "Curve0,Curve1,Curve1-Curve0"
+ And I should have 3 curves
+
+ When I click the "Plot Unmatched" button
+ Then I should be on the graph page
+ And I should have a "Dieoff" plot
+
+ When I click the "Back" button
+ Then I should be on the main page
+ And the "Plot Unmatched" button should be visible
+
+ When I click the "Plot Matched" button
+ Then I should be on the graph page
+ And I should have a "Dieoff" plot
+
+ When I click the "Back" button
+ Then I should be on the main page
+ And the "Plot Matched" button should be visible
+
+ When I click the "no diffs" radio button
+ Then I should see a list of curves containing "Curve0,Curve1"
+ And I should have 2 curves
+
+ When I click the "Remove All" button
+ And the "Remove all the curves" button should be visible
+ Then I click the "Remove all the curves" button
+ Then I should have 0 curves
diff --git a/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesHistogram.feature b/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesHistogram.feature
index da8c0e3819..a61bfb3cfe 100644
--- a/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesHistogram.feature
+++ b/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesHistogram.feature
@@ -15,8 +15,8 @@ Feature: Match Unmatch Diff Curves Histogram
Scenario: matchUnmatchDiffCurvesHistogram
When I set the plot type to "Histogram"
Then the plot type should be "Histogram"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I set the curve-dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the curve-dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
When I click the "Add Curve" button
diff --git a/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesTimeseries.feature b/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesTimeseries.feature
index e3c1945c5a..e341ec83c7 100644
--- a/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesTimeseries.feature
+++ b/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesTimeseries.feature
@@ -15,8 +15,8 @@ Feature: Match Unmatch Diff Curves Timeseries
Scenario: matchUnmatchDiffCurvesTimeseries
When I set the plot type to "TimeSeries"
Then the plot type should be "TimeSeries"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I set the dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
When I click the "Add Curve" button
diff --git a/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesValidTime.feature b/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesValidTime.feature
index 6aa5204a8c..ba5cfe781a 100644
--- a/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesValidTime.feature
+++ b/tests/src/features/surfrad/basic/matchUnmatchDiffCurvesValidTime.feature
@@ -15,8 +15,8 @@ Feature: Match Unmatch Diff Curves Valid Time
Scenario: matchUnmatchDiffCurvesValidTime
When I set the plot type to "ValidTime"
Then the plot type should be "ValidTime"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I set the curve-dates to "09/21/2019 00:00 - 09/24/2019 00:00"
Then the curve-dates value is "09/21/2019 00:00 - 09/24/2019 00:00"
When I click the "Add Curve" button
diff --git a/tests/src/features/surfrad/exception/noDataFoundException_invalid_times.feature b/tests/src/features/surfrad/exception/noDataFoundException_invalid_times.feature
index 1131aa548f..61f700075e 100644
--- a/tests/src/features/surfrad/exception/noDataFoundException_invalid_times.feature
+++ b/tests/src/features/surfrad/exception/noDataFoundException_invalid_times.feature
@@ -19,8 +19,8 @@ Feature: No Data Found Exception: invalid_times
Scenario: noDataFoundException_invalid_times
When I set the plot type to "TimeSeries"
Then the plot type should be "TimeSeries"
- When I change the "data-source" parameter to "RAP_GSL_130"
- Then the "data-source" parameter value matches "RAP_GSL_130"
+ When I change the "data-source" parameter to "HRRR_OPS"
+ Then the "data-source" parameter value matches "HRRR_OPS"
When I set the dates to "01/19/1995 12:00 - 06/19/1996 12:00"
Then the dates value is "01/19/1995 12:00 - 06/19/1996 12:00"
When I click the "Add Curve" button