Skip to content

Commit

Permalink
Merge pull request #348 from aodn/1427-failure-emails-missing-collect…
Browse files Browse the repository at this point in the history
…ion-name

1427 failure emails missing collection name
  • Loading branch information
craigrose authored Nov 21, 2023
2 parents 8af791d + 2ff6222 commit 777d5d6
Show file tree
Hide file tree
Showing 14 changed files with 117 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import au.org.aodn.aws.geoserver.client.HttpIndexReader;
import au.org.aodn.aws.geoserver.client.SubsetParameters;
import au.org.aodn.aws.geoserver.client.TimeNotSupportedException;
import au.org.emii.util.IntegerHelper;
import au.org.emii.util.NumberRange;
import au.org.emii.util.ProvenanceWriter;
import com.amazonaws.AmazonServiceException;
Expand Down Expand Up @@ -546,6 +545,15 @@ public void start() {

// Send failed job email to user
if (contactEmail != null) {
if (collectionTitle == "") {
// The metadata has not been retrieved yet
try {
collectionTitle = getCollectionTitle(statusFileManager);
} catch (Exception ex) {
logger.error(ex.getMessage(), ex);
collectionTitle = "Could not retrieve collection metadata.";
}
}
try {
emailService.sendFailedJobEmail(contactEmail,
administratorEmail,
Expand All @@ -568,6 +576,38 @@ public void start() {
}
}

private String getCollectionTitle(JobFileManager jobFileManager) throws Exception {

String collectionTitle = "";

// Get the layer name from the request
String requestXML = jobFileManager.read(requestFilename);
XmlRequestParser parser = new XmlRequestParser();
Execute request = (Execute) parser.parse(requestXML);
ExecuteRequestHelper requestHelper = new ExecuteRequestHelper(request);
String layer = requestHelper.getLiteralInputValue(LITERAL_INPUT_IDENTIFIER_LAYER);

// Search for the metadata record for the layer by layer name
String catalogueURL = WpsConfig.getProperty(GEONETWORK_CATALOGUE_URL_CONFIG_KEY);
String layerSearchField = WpsConfig.getProperty(GEONETWORK_CATALOGUE_LAYER_FIELD_CONFIG_KEY);
CatalogueReader catalogueReader = new CatalogueReader(catalogueURL, layerSearchField);

String metadataResponseXML = catalogueReader.getMetadataSummaryXML(layer);

if (metadataResponseXML != null && metadataResponseXML.length() > 0) {

// We only need the <metadata> tag and its contents
String metadataSummary = catalogueReader.getMetadataNodeContent(metadataResponseXML);

if (metadataSummary != null) {
collectionTitle = catalogueReader.getCollectionTitle(metadataSummary);
logger.info("Metadata collection title: " + collectionTitle);
}
}

return collectionTitle;
}

private void checkLoggingConfiguration() {
// If we don't have a sumo endpoint defined - but we do have a Sumo log appender configured
// we should remove the appender - because it isn't configured properly anyway.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import java.util.UUID;

import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertThrows;

Expand Down Expand Up @@ -98,4 +99,21 @@ public void verifyNoRequestShowExceptionTextInStatus() {
x.contains("Exception occurred during aggregation : jakarta.xml.bind.UnmarshalException"));
});
}

@Test
public void requestFails() throws IOException {
// This test depends on a request run against a faulty geoserver layer.
// Since we don't normally have such layers, the test will be skipped if the request does not fail.
// For debugging layers that are failing, update the request_fails.xml file to generate a failed request.
String uuid = UUID.randomUUID().toString();
System.setProperty(WpsConfig.AWS_BATCH_JOB_ID_CONFIG_KEY, uuid);

File f = ResourceUtils.getFile("classpath:request_fails.xml");
writeRequestXml(f);
runner.start();

String statusXml = getStatusXml();
assumeTrue("Assuming the process failed", statusXml.contains("ProcessFailed"));

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,9 @@ public class YmlToSystemEnvConfigRunner implements CommandLineRunner {
@Value("${aggregationWorker.GEONETWORK_CATALOGUE_URL}")
protected Optional<String> geoNetworkUrl;

@Value("${aggregationWorker.GEONETWORK_LAYER_SEARCH_FIELD}")
protected Optional<String> geonetworkLayerSearchField;

@Value("${aggregationWorker.GEOSERVER_CATALOGUE_ENDPOINT_URL}")
protected Optional<String> geoserverUrl;

Expand Down Expand Up @@ -89,6 +92,7 @@ public void run(String... args) {
System.setProperty(WpsConfig.REQUEST_S3_FILENAME_CONFIG_KEY, requestFileName.orElse(null));
System.setProperty(WpsConfig.WPS_ENDPOINT_URL_CONFIG_KEY, endPointUrl.orElse(null));
System.setProperty(WpsConfig.GEONETWORK_CATALOGUE_URL_CONFIG_KEY, geoNetworkUrl.orElse(null));
System.setProperty(WpsConfig.GEONETWORK_CATALOGUE_LAYER_FIELD_CONFIG_KEY, geonetworkLayerSearchField.orElse(null));
System.setProperty(WpsConfig.ADMINISTRATOR_EMAIL, administratorEmail.orElse(null));
System.setProperty(WpsConfig.OUTPUT_S3_BUCKET_CONFIG_KEY, outputBucket.orElse(null));
System.setProperty(WpsConfig.AWS_REGION_SES_CONFIG_KEY, region.orElse(null));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ aggregationWorker:
# Need enhance to point to some test server for development
GEOSERVER_CATALOGUE_ENDPOINT_URL: 'https://geoserver-123.aodn.org.au/geoserver/imos/ows'
GEONETWORK_CATALOGUE_URL: 'https://catalogue.aodn.org.au/geonetwork'
GEONETWORK_LAYER_SEARCH_FIELD: 'layer'
DATA_DOWNLOAD_URL_PREFIX: 'http://data.aodn.org.au/'
AGGREGATOR_TEMPLATE_FILE_URL: 'https://raw.githubusercontent.com/aodn/geoserver-config/production/wps/templates.xml'
administratorEmail: '[email protected]'
Expand All @@ -24,4 +25,4 @@ aggregationWorker:

logging:
level:
root: debug
root: info
37 changes: 37 additions & 0 deletions aggregation-worker/src/test/resources/request_fails.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
<?xml version="1.0" encoding="utf-8" ?>
<ns3:Execute service="WPS" version="1.0.0" xmlns:ns2="http://www.w3.org/1999/xlink" xmlns:ns1="http://www.opengis.net/ows/1.1" xmlns:ns3="http://www.opengis.net/wps/1.0.0">
<ns1:Identifier>gs:GoGoDuck</ns1:Identifier>
<ns3:DataInputs>
<ns3:Input>
<ns1:Identifier>layer</ns1:Identifier>
<ns3:Data>
<ns3:LiteralData>gsla_fv02_nrt_timeseries_url</ns3:LiteralData>
</ns3:Data>
</ns3:Input>
<ns3:Input>
<ns1:Identifier>TestMode</ns1:Identifier>
<ns3:Data>
<ns3:LiteralData>false</ns3:LiteralData>
</ns3:Data>
</ns3:Input>
<ns3:Input>
<ns1:Identifier>subset</ns1:Identifier>
<ns3:Data>
<ns3:LiteralData>TIME,2021-09-01T00:00:00.000Z,2021-10-01T00:00:00.000Z;LATITUDE,-36.958984375,-37.75;LONGITUDE,129.375,130.78125</ns3:LiteralData>
</ns3:Data>
</ns3:Input>
<ns3:Input>
<ns1:Identifier>callbackParams</ns1:Identifier>
<ns3:Data>
<ns3:LiteralData>[email protected]</ns3:LiteralData>
</ns3:Data>
</ns3:Input>
</ns3:DataInputs>
<ns3:ResponseForm>
<ns3:ResponseDocument storeExecuteResponse="true" status="true">
<ns3:Output asReference="true" mimeType="text/csv">
<ns1:Identifier>result</ns1:Identifier>
</ns3:Output>
</ns3:ResponseDocument>
</ns3:ResponseForm>
</ns3:Execute>
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,11 @@ outputFile, new AggregationOverrides(), null, bbox, null, null
netcdfAggregator.add(resourcePath("au/org/emii/aggregator/projection-2.nc"));
}

assertNetcdfFilesEqual(resourcePath("au/org/emii/aggregator/projection-expected.nc"), outputFile);
if (libraryVersion.startsWith("4.8")) {
assertNetcdfFilesEqual(resourcePath("au/org/emii/aggregator/projection-expected-4.8.nc"), outputFile);
} else {
assertNetcdfFilesEqual(resourcePath("au/org/emii/aggregator/projection-expected.nc"), outputFile);
}
}

@Test
Expand All @@ -223,7 +227,11 @@ outputFile, new AggregationOverrides(), null, bbox, null, null
netcdfAggregator.add(resourcePath("au/org/emii/aggregator/projection-2.nc"));
}

assertNetcdfFilesEqual(resourcePath("au/org/emii/aggregator/projection-point-within-expected.nc"), outputFile);
if (libraryVersion.startsWith("4.8")) {
assertNetcdfFilesEqual(resourcePath("au/org/emii/aggregator/projection-point-within-expected-4.8.nc"), outputFile);
} else {
assertNetcdfFilesEqual(resourcePath("au/org/emii/aggregator/projection-point-within-expected.nc"), outputFile);
}
}

@Test
Expand All @@ -237,7 +245,11 @@ outputFile, new AggregationOverrides(), null, bbox, null, null
netcdfAggregator.add(resourcePath("au/org/emii/aggregator/projection-2.nc"));
}

assertNetcdfFilesEqual(resourcePath("au/org/emii/aggregator/projection-point-outside-expected.nc"), outputFile);
if (libraryVersion.startsWith("4.8")) {
assertNetcdfFilesEqual(resourcePath("au/org/emii/aggregator/projection-point-outside-expected-4.8.nc"), outputFile);
} else {
assertNetcdfFilesEqual(resourcePath("au/org/emii/aggregator/projection-point-outside-expected.nc"), outputFile);
}
}

@Test(expected = AggregationException.class)
Expand Down
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file modified lambda/job-status-service-lambda-package.zip
Binary file not shown.
Binary file modified lambda/request-handler-lambda-package.zip
Binary file not shown.

0 comments on commit 777d5d6

Please sign in to comment.