diff --git a/api/dataimport/for-trigger_test.go b/api/dataimport/for-trigger_test.go index 39ce1297..784933e2 100644 --- a/api/dataimport/for-trigger_test.go +++ b/api/dataimport/for-trigger_test.go @@ -492,6 +492,24 @@ func Example_ImportForTrigger_Manual_SBU_NoAutoShare() { // |{"id":"test1234sbu","title":"test1234sbu","dataTypes":[{"dataType":"SD_XRF","count":2520}],"instrument":"SBU_BREADBOARD","instrumentConfig":"StonyBrookBreadboard","meta":{"DriveID":"0","RTT":"","SCLK":"0","SOL":"","Site":"","SiteID":"0","Target":"","TargetID":"0"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":2520,"PseudoIntensities":0},"creatorUserId":"SBUImport"} } +/* Didnt get this working when the above was changed. Problem is this still generates the user name: SBUImport, so the + premise of the test fails because it doesn't end up with no user id at that point! +func Test_ImportForTrigger_Manual_SBU_NoAutoShare_FailForPipeline(t *testing.T) { + remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_OK2", "", "") + + trigger := `{ + "datasetID": "test1234sbu", + "jobID": "dataimport-unittest123sbu" +}` + + _, err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, db, log, remoteFS) + + // Make sure we got the error + if !strings.HasSuffix(err.Error(), "Cannot work out groups to auto-share imported dataset with") { + t.Errorf("ImportForTrigger didnt return expected error") + } +} +*/ // Import a breadboard dataset from manual uploaded zip file func Example_importForTrigger_Manual_EM() { remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("ManualEM_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId") diff --git a/api/dataimport/internal/converterSelector/selector.go b/api/dataimport/internal/converterSelector/selector.go index d75a1c12..c15c6356 100644 --- a/api/dataimport/internal/converterSelector/selector.go +++ b/api/dataimport/internal/converterSelector/selector.go @@ -50,6 +50,7 @@ func SelectDataConverter(localFS fileaccess.FileAccess, remoteFS fileaccess.File } */ // Check if it's a PIXL FM style dataset + log.Infof("Checking path \"%v\" for PIXL FM structure...", importPath) pathType, err := pixlfm.DetectPIXLFMStructure(importPath) if len(pathType) > 0 && err == nil { // We know it's a PIXL FM type dataset... it'll later be determined which one @@ -75,10 +76,14 @@ func SelectDataConverter(localFS fileaccess.FileAccess, remoteFS fileaccess.File var detectorFile dataimportModel.DetectorChoice err = localFS.ReadJSON(detPath, "", &detectorFile, false) if err == nil { + log.Infof("Loaded detector.json...") + // We found it, work out based on what's in there if strings.HasSuffix(detectorFile.Detector, "-breadboard") { + log.Infof("Assuming breadboard dataset...") return jplbreadboard.MSATestData{}, nil } else if detectorFile.Detector == "pixl-em" { + log.Infof("Assuming PIXL EM dataset...") return pixlem.PIXLEM{}, nil } } else { diff --git a/api/dataimport/internal/converters/pixlfm/import.go b/api/dataimport/internal/converters/pixlfm/import.go index 78f515aa..acb117f7 100644 --- a/api/dataimport/internal/converters/pixlfm/import.go +++ b/api/dataimport/internal/converters/pixlfm/import.go @@ -85,11 +85,13 @@ func (p PIXLFM) Import(importPath string, pseudoIntensityRangesPath string, data rgbuImgDir := fileStructure{} discoImgDir := fileStructure{} + log.Infof("Checking path \"%v\" for FM dataset type", importPath) pathType, err := DetectPIXLFMStructure(importPath) if err != nil { return nil, "", err } + log.Infof("Found path \"%v\" is of type %v", importPath, pathType) if pathType == "DataDrive" { // This is the official way we receive PIXL FM data from Mars // We expect these directories to exist... diff --git a/api/endpoints/Scan.go b/api/endpoints/Scan.go index 84c49d05..34f1439a 100644 --- a/api/endpoints/Scan.go +++ b/api/endpoints/Scan.go @@ -51,6 +51,19 @@ func PutScanData(params apiRouter.ApiHandlerGenericParams) error { s3PathStart := path.Join(filepaths.DatasetUploadRoot, scanId) // NOTE: We overwrite any previous attempts without worry! + existing, err := params.Svcs.FS.ListObjects(destBucket, s3PathStart+"/") + if err == nil && len(existing) > 0 { + // Delete all that exists + msg := fmt.Sprintf("PutScan for \"%v\": Deleting existing file...\n", scanId) + for _, existingItem := range existing { + msg += existingItem + "\n" + if err := params.Svcs.FS.DeleteObject(destBucket, existingItem); err != nil { + return fmt.Errorf("Failed to delete: \"%v\", error: %v", existing, err) + } + } + + params.Svcs.Log.Infof(msg) + } // Read in body zippedData, err := io.ReadAll(params.Request.Body)