Skip to content

Commit

Permalink
Merge pull request #193 from pixlise/feature/v4-post-release-fixes
Browse files Browse the repository at this point in the history
Feature/v4 post release fixes
  • Loading branch information
pnemere authored Mar 26, 2024
2 parents 3c6df23 + e0398db commit 5cae72e
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 0 deletions.
18 changes: 18 additions & 0 deletions api/dataimport/for-trigger_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -492,6 +492,24 @@ func Example_ImportForTrigger_Manual_SBU_NoAutoShare() {
// <nil>|{"id":"test1234sbu","title":"test1234sbu","dataTypes":[{"dataType":"SD_XRF","count":2520}],"instrument":"SBU_BREADBOARD","instrumentConfig":"StonyBrookBreadboard","meta":{"DriveID":"0","RTT":"","SCLK":"0","SOL":"","Site":"","SiteID":"0","Target":"","TargetID":"0"},"contentCounts":{"BulkSpectra":2,"DwellSpectra":0,"MaxSpectra":2,"NormalSpectra":2520,"PseudoIntensities":0},"creatorUserId":"SBUImport"}
}

/* Didnt get this working when the above was changed. Problem is this still generates the user name: SBUImport, so the
premise of the test fails because it doesn't end up with no user id at that point!
func Test_ImportForTrigger_Manual_SBU_NoAutoShare_FailForPipeline(t *testing.T) {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_OK2", "", "")
trigger := `{
"datasetID": "test1234sbu",
"jobID": "dataimport-unittest123sbu"
}`
_, err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, db, log, remoteFS)
// Make sure we got the error
if !strings.HasSuffix(err.Error(), "Cannot work out groups to auto-share imported dataset with") {
t.Errorf("ImportForTrigger didnt return expected error")
}
}
*/
// Import a breadboard dataset from manual uploaded zip file
func Example_importForTrigger_Manual_EM() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("ManualEM_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")
Expand Down
5 changes: 5 additions & 0 deletions api/dataimport/internal/converterSelector/selector.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ func SelectDataConverter(localFS fileaccess.FileAccess, remoteFS fileaccess.File
}
*/
// Check if it's a PIXL FM style dataset
log.Infof("Checking path \"%v\" for PIXL FM structure...", importPath)
pathType, err := pixlfm.DetectPIXLFMStructure(importPath)
if len(pathType) > 0 && err == nil {
// We know it's a PIXL FM type dataset... it'll later be determined which one
Expand All @@ -75,10 +76,14 @@ func SelectDataConverter(localFS fileaccess.FileAccess, remoteFS fileaccess.File
var detectorFile dataimportModel.DetectorChoice
err = localFS.ReadJSON(detPath, "", &detectorFile, false)
if err == nil {
log.Infof("Loaded detector.json...")

// We found it, work out based on what's in there
if strings.HasSuffix(detectorFile.Detector, "-breadboard") {
log.Infof("Assuming breadboard dataset...")
return jplbreadboard.MSATestData{}, nil
} else if detectorFile.Detector == "pixl-em" {
log.Infof("Assuming PIXL EM dataset...")
return pixlem.PIXLEM{}, nil
}
} else {
Expand Down
2 changes: 2 additions & 0 deletions api/dataimport/internal/converters/pixlfm/import.go
Original file line number Diff line number Diff line change
Expand Up @@ -85,11 +85,13 @@ func (p PIXLFM) Import(importPath string, pseudoIntensityRangesPath string, data
rgbuImgDir := fileStructure{}
discoImgDir := fileStructure{}

log.Infof("Checking path \"%v\" for FM dataset type", importPath)
pathType, err := DetectPIXLFMStructure(importPath)
if err != nil {
return nil, "", err
}

log.Infof("Found path \"%v\" is of type %v", importPath, pathType)
if pathType == "DataDrive" {
// This is the official way we receive PIXL FM data from Mars
// We expect these directories to exist...
Expand Down
13 changes: 13 additions & 0 deletions api/endpoints/Scan.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,19 @@ func PutScanData(params apiRouter.ApiHandlerGenericParams) error {
s3PathStart := path.Join(filepaths.DatasetUploadRoot, scanId)

// NOTE: We overwrite any previous attempts without worry!
existing, err := params.Svcs.FS.ListObjects(destBucket, s3PathStart+"/")
if err == nil && len(existing) > 0 {
// Delete all that exists
msg := fmt.Sprintf("PutScan for \"%v\": Deleting existing file...\n", scanId)
for _, existingItem := range existing {
msg += existingItem + "\n"
if err := params.Svcs.FS.DeleteObject(destBucket, existingItem); err != nil {
return fmt.Errorf("Failed to delete: \"%v\", error: %v", existing, err)
}
}

params.Svcs.Log.Infof(msg)
}

// Read in body
zippedData, err := io.ReadAll(params.Request.Body)
Expand Down

0 comments on commit 5cae72e

Please sign in to comment.