Skip to content

Commit

Permalink
Merge pull request #352 from pixlise/feature/em-import
Browse files Browse the repository at this point in the history
Feature/em import
  • Loading branch information
pnemere authored Nov 11, 2024
2 parents 47cc3db + ed68cff commit 3228489
Show file tree
Hide file tree
Showing 6 changed files with 80 additions and 77 deletions.
13 changes: 10 additions & 3 deletions api/dataimport/internal/converters/pixlem/import.go
Original file line number Diff line number Diff line change
Expand Up @@ -136,13 +136,16 @@ func (p PIXLEM) Import(importPath string, pseudoIntensityRangesPath string, data
}

beamPath := filepath.Join(importPath, beamName)
data, err := importEMData(rttStr, beamPath, imageList, bulkMaxList, msaList, &fs, log)
// HK file should be here too...
hkPath := filepath.Join(importPath, "housekeeping-"+rttStr+".csv")
data, err := importEMData(rttStr, beamPath, hkPath, imageList, bulkMaxList, msaList, &fs, log)
if err != nil {
log.Errorf("Import failed for %v: %v", beamName, err)
continue
}

log.Infof("Imported scan with RTT: %v", rtt)
data.DatasetID += "_em" // To ensure we don't overwrite real datasets
return data, filepath.Join(importPath, zipName, zipName), nil
}

Expand Down Expand Up @@ -176,7 +179,7 @@ func extractZipName(files []string) (string, error) {

return zipName, nil
}
func importEMData(rtt string, beamLocPath string, imagePathList []string, bulkMaxList []string, msaList []string, fs fileaccess.FileAccess, logger logger.ILogger) (*dataConvertModels.OutputData, error) {
func importEMData(rtt string, beamLocPath string, hkPath string, imagePathList []string, bulkMaxList []string, msaList []string, fs fileaccess.FileAccess, logger logger.ILogger) (*dataConvertModels.OutputData, error) {
// Read MSAs
locSpectraLookup, err := jplbreadboard.MakeSpectraLookup("", msaList, true, false, "", false, logger)
if err != nil {
Expand All @@ -198,8 +201,12 @@ func importEMData(rtt string, beamLocPath string, imagePathList []string, bulkMa
return nil, err
}

hkData, err := importerutils.ReadHousekeepingFile(hkPath, 1, logger)
if err != nil {
return nil, err
}

// We don't have everything a full FM dataset would have...
var hkData dataConvertModels.HousekeepingData
var pseudoIntensityData dataConvertModels.PseudoIntensities
var pseudoIntensityRanges []dataConvertModels.PseudoIntensityRange
var matchedAlignedImages []dataConvertModels.MatchedAlignedImageMeta
Expand Down
43 changes: 30 additions & 13 deletions api/dataimport/sdfToRSI/readHousekeeping.go
Original file line number Diff line number Diff line change
Expand Up @@ -129,22 +129,27 @@ SDF-Raw:
// 2022-302T00:22:08 : 1604 hk raw -------->> 0673085B 074006BF 08470890 00000000 04640000 DEADDEAD DEADDEAD 190425F4
// 2022-302T00:22:08 : 1604 hk raw -------->> 2AEE8631 99800668 F80039F1 00238D00 0CD4000D 25000036 000D816F 4AE4F2CA
// ]
func processHousekeeping(lineNo int, lineData string, lines []string, sclk string, rtt int64, pmc int) (int64, string, error) {
func processHousekeeping(lineNo int, lineData string, lines []string, sclk string, rtt int64, pmc int) (int64, string, string, error) {
if len(lines) != 23 {
return 0, "", fmt.Errorf("hk line count invalid on line %v", lineNo)
return 0, "", "", fmt.Errorf("hk line count invalid on line %v", lineNo)
}

hktime, _, _, err := readNumBetween(lineData, "HK Time: 0x", " ", read_int_hex)
if err != nil || hktime <= 0 {
return 0, "", fmt.Errorf("hk start didn't contain hk time on line %v", lineNo)
return 0, "", "", fmt.Errorf("hk start didn't contain hk time on line %v", lineNo)
}

fcnt, _, _, err := readNumBetween(lineData, "fcnt:", " ", read_int)
if err != nil || hktime <= 0 {
return 0, "", "", fmt.Errorf("hk start didn't contain fcnt on line %v", lineNo)
}

// Snip all lines so they start after mcc_trn
tok := fmt.Sprintf("%v hk", pmc)
for c := 0; c < 23; c++ {
pos := strings.Index(lines[c], tok)
if pos < 0 {
return 0, "", fmt.Errorf("%v not found on line %v", tok, lineNo)
return 0, "", "", fmt.Errorf("%v not found on line %v", tok, lineNo)
}

lines[c] = strings.Trim(lines[c][pos+len(tok):], " ")
Expand All @@ -156,14 +161,14 @@ func processHousekeeping(lineNo int, lineData string, lines []string, sclk strin

tok, lines[15], ok = takeToken(lines[15], ":")
if !ok || tok != "Motor Pos" {
return 0, "", fmt.Errorf("Expected Motor Pos, got %v on line %v", tok, lineNo)
return 0, "", "", fmt.Errorf("Expected Motor Pos, got %v on line %v", tok, lineNo)
}

for c := 0; c < 6; c++ {
var p int64
p, lines[15], err = readInt(lines[15])
if err != nil {
return 0, "", fmt.Errorf("Failed to read Motor Pos %v on line %v", c, lineNo)
return 0, "", "", fmt.Errorf("Failed to read Motor Pos %v on line %v", c, lineNo)
}
motorPos = append(motorPos, int(p))
}
Expand All @@ -178,10 +183,10 @@ func processHousekeeping(lineNo int, lineData string, lines []string, sclk strin
for c := 0; c < len(names); c++ {
_, f, pos, err = readNumBetween(lines[c+lineOffset], names[c], " ", read_float)
if err != nil {
return 0, "", err
return 0, "", "", err
}
if pos < 0 {
return 0, "", fmt.Errorf("Missing value: %v", names[c])
return 0, "", "", fmt.Errorf("Missing value: %v", names[c])
}
fVal = append(fVal, f)

Expand All @@ -190,16 +195,28 @@ func processHousekeeping(lineNo int, lineData string, lines []string, sclk strin
}
}

// DataDrive RSI format has table headers:
// HK Frame
// SCLK,PMC,hk_fcnt,f_pixl_analog_fpga,f_pixl_chassis_top,f_pixl_chassis_bottom,f_pixl_aft_low_cal,f_pixl_aft_high_cal,f_pixl_motor_v_plus,f_pixl_motor_v_minus,f_pixl_sdd_1,f_pixl_sdd_2,f_pixl_3_3_volt,f_pixl_1_8_volt,f_pixl_dspc_v_plus,f_pixl_dspc_v_minus,f_pixl_prt_curr,f_pixl_arm_resist,f_head_sdd_1,f_head_sdd_2,f_head_afe,f_head_lvcm,f_head_hvmm,f_head_bipod1,f_head_bipod2,f_head_bipod3,f_head_cover,f_head_hop,f_head_flie,f_head_tec1,f_head_tec2,f_head_xray,f_head_yellow_piece,f_head_mcc,f_hvps_fvmon,f_hvps_fimon,f_hvps_hvmon,f_hvps_himon,f_hvps_13v_plus,f_hvps_13v_minus,f_hvps_5v_plus,f_hvps_lvcm,i_valid_cmds,i_crf_retry,i_sdf_retry,i_rejected_cmds,i_hk_side,i_motor_1,i_motor_2,i_motor_3,i_motor_4,i_motor_5,i_motor_6,i_motor_cover,i_hes_sense,i_flash_status,u_hk_version,u_hk_time,u_hk_power,u_fsw_0,u_fsw_1,u_fsw_2,u_fsw_3,u_fsw_4,u_fsw_5,f_pixl_analog_fpga_conv,f_pixl_chassis_top_conv,f_pixl_chassis_bottom_conv,f_pixl_aft_low_cal_conv,f_pixl_aft_high_cal_conv,f_pixl_motor_v_plus_conv,f_pixl_motor_v_minus_conv,f_pixl_sdd_1_conv,f_pixl_sdd_2_conv,f_pixl_3_3_volt_conv,f_pixl_1_8_volt_conv,f_pixl_dspc_v_plus_conv,f_pixl_dspc_v_minus_conv,f_pixl_prt_curr_conv,f_pixl_arm_resist_conv,f_head_sdd_1_conv,f_head_sdd_2_conv,f_head_afe_conv,f_head_lvcm_conv,f_head_hvmm_conv,f_head_bipod1_conv,f_head_bipod2_conv,f_head_bipod3_conv,f_head_cover_conv,f_head_hop_conv,f_head_flie_conv,f_head_tec1_conv,f_head_tec2_conv,f_head_xray_conv,f_head_yellow_piece_conv,f_head_mcc_conv,f_hvps_fvmon_conv,f_hvps_fimon_conv,f_hvps_hvmon_conv,f_hvps_himon_conv,f_hvps_13v_plus_conv,f_hvps_13v_minus_conv,f_hvps_5v_plus_conv,f_hvps_lvcm_conv,i_valid_cmds_conv,i_crf_retry_conv,i_sdf_retry_conv,i_rejected_cmds_conv,i_hk_side_conv,i_motor_1_conv,i_motor_2_conv,i_motor_3_conv,i_motor_4_conv,i_motor_5_conv,i_motor_6_conv,i_motor_cover_conv,i_hes_sense_conv,i_flash_status_conv,RTT

// Outputs:
// 2AEE898E, C6F0202, 1658, 8, HK Frame, 1957, 1967, 2040, 1958, 1966, 2098, -146.50, -146.47, 7.64, -30.04, -30.02, -10.47, -11.04, 2.17, 8.87, -8.83, -0.04, 3.92, 0.70, 27.79, 20.05
hk := fmt.Sprintf("%v, %X, %v, 8, HK Frame, %d, %d, %d, %d, %d, %d, %v, %v, %v, %v, %v, -1, -1, -1, -1, -1, -1, %v, %v, %v, %v\n",
makeWriteSCLK(sclk), rtt, pmc,
motorPos[0], motorPos[1], motorPos[2], motorPos[3], motorPos[4], motorPos[5],
fVal[0], fVal[1], fVal[2], fVal[3], fVal[4], fVal[5], fVal[6], fVal[7], fVal[8])

return hktime, hk, nil
// We also output housekeeping data in a different "RSI" format thats compatible with the ones output by the pipeline for PIXLISE to read actual housekeeping
// values from. This differs from the above, and doesn't have all the columns in the "real" files but PIXLISE gets a lot of what it needs this way already. If
// specific data is required, we'll have to add it here

// DataDrive RSI format has table headers:
// HK Frame
// SCLK,PMC,hk_fcnt,f_pixl_analog_fpga,f_pixl_chassis_top,f_pixl_chassis_bottom,f_pixl_aft_low_cal,f_pixl_aft_high_cal,f_pixl_motor_v_plus,f_pixl_motor_v_minus,f_pixl_sdd_1,f_pixl_sdd_2,f_pixl_3_3_volt,f_pixl_1_8_volt,f_pixl_dspc_v_plus,f_pixl_dspc_v_minus,f_pixl_prt_curr,f_pixl_arm_resist,f_head_sdd_1,f_head_sdd_2,f_head_afe,f_head_lvcm,f_head_hvmm,f_head_bipod1,f_head_bipod2,f_head_bipod3,f_head_cover,f_head_hop,f_head_flie,f_head_tec1,f_head_tec2,f_head_xray,f_head_yellow_piece,f_head_mcc,f_hvps_fvmon,f_hvps_fimon,f_hvps_hvmon,f_hvps_himon,f_hvps_13v_plus,f_hvps_13v_minus,f_hvps_5v_plus,f_hvps_lvcm,i_valid_cmds,i_crf_retry,i_sdf_retry,i_rejected_cmds,i_hk_side,i_motor_1,i_motor_2,i_motor_3,i_motor_4,i_motor_5,i_motor_6,i_motor_cover,i_hes_sense,i_flash_status,u_hk_version,u_hk_time,u_hk_power,u_fsw_0,u_fsw_1,u_fsw_2,u_fsw_3,u_fsw_4,u_fsw_5,f_pixl_analog_fpga_conv,f_pixl_chassis_top_conv,f_pixl_chassis_bottom_conv,f_pixl_aft_low_cal_conv,f_pixl_aft_high_cal_conv,f_pixl_motor_v_plus_conv,f_pixl_motor_v_minus_conv,f_pixl_sdd_1_conv,f_pixl_sdd_2_conv,f_pixl_3_3_volt_conv,f_pixl_1_8_volt_conv,f_pixl_dspc_v_plus_conv,f_pixl_dspc_v_minus_conv,f_pixl_prt_curr_conv,f_pixl_arm_resist_conv,f_head_sdd_1_conv,f_head_sdd_2_conv,f_head_afe_conv,f_head_lvcm_conv,f_head_hvmm_conv,f_head_bipod1_conv,f_head_bipod2_conv,f_head_bipod3_conv,f_head_cover_conv,f_head_hop_conv,f_head_flie_conv,f_head_tec1_conv,f_head_tec2_conv,f_head_xray_conv,f_head_yellow_piece_conv,f_head_mcc_conv,f_hvps_fvmon_conv,f_hvps_fimon_conv,f_hvps_hvmon_conv,f_hvps_himon_conv,f_hvps_13v_plus_conv,f_hvps_13v_minus_conv,f_hvps_5v_plus_conv,f_hvps_lvcm_conv,i_valid_cmds_conv,i_crf_retry_conv,i_sdf_retry_conv,i_rejected_cmds_conv,i_hk_side_conv,i_motor_1_conv,i_motor_2_conv,i_motor_3_conv,i_motor_4_conv,i_motor_5_conv,i_motor_6_conv,i_motor_cover_conv,i_hes_sense_conv,i_flash_status_conv,RTT
// 720274993,1604,10329,8840,8625,8624,6823,9492,4925,60015,58755,58754,3172,1777,8940,56119,3112,10763,2398,2400,8439,7902,8181,6313,6317,6323,6946,6867,6871,7864,7839,7810,8241,7856,3201,542,3455,3266,3590,61946,1364,2210,42,0,0,0,0,1651,2139,1856,1727,2119,2192,0,1124,0,0x190425F4,720274993,0x99800668,0xF80039F1,0x00238D00,0x0CD4000D,0x25000036,0x000D816F,0x4AE4F2CA,21.06158,14.166139999999999,14.13406,-43.62744,41.97247,4.925,-4.9094,-146.53015,-146.51488999999998,3.172,1.777,8.94,-8.83681,3.112,7.63,-29.934690000000003,-29.948140000000002,8.20074,-9.02187,-0.07379,-59.9841,-59.855819999999994,-59.66339,-39.6826,-42.21628,-42.08798,-10.24059,-11.04239,-11.97247,1.8505200000000002,-10.497160000000001,3.90843,0.66178,27.84249,19.93895,13.150179999999999,-13.150179999999999,4.99634,-3.31345,42,0,0,0,0,1651,2139,1856,1727,2119,2192,0,1124,0,208601602

// We output:
// SCLK,PMC,hk_fcnt,f_pixl_sdd_1_conv,f_pixl_sdd_2_conv,f_pixl_arm_resist_conv,f_head_sdd_1_conv,f_head_sdd_2_conv,f_hvps_fvmon_conv,f_hvps_fimon_conv,f_hvps_hvmon_conv,f_hvps_himon_conv,i_motor_1_conv,i_motor_2_conv,i_motor_3_conv,i_motor_4_conv,i_motor_5_conv,i_motor_6_conv

hk2 := fmt.Sprintf("%v, %v, %v, %v, %v, %v, %v, %v, %v, %v, %v, %v, %v, %v, %v, %v, %v, %v\n",
makeWriteSCLK(sclk), pmc, fcnt, fVal[1], fVal[0], fVal[2], fVal[3], fVal[4], fVal[5], fVal[6], fVal[7], fVal[8],
motorPos[0], motorPos[1], motorPos[2], motorPos[3], motorPos[4], motorPos[5])

return hktime, hk, hk2, nil
}
29 changes: 21 additions & 8 deletions api/dataimport/sdfToRSI/sdfToRSI.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,13 @@ func ConvertSDFtoRSIs(sdfPath string, outPath string) ([]string, []int64, error)
} else if ref.Value != "end" {
return files, rtts, fmt.Errorf("End not found for science RTT: %v", rtt)
} else {
name := fmt.Sprintf("RSI-%v.csv", rtt)
err = sdfToRSI(sdfPath, rtt, startLine, ref.Line, path.Join(outPath, name))
nameRSI := fmt.Sprintf("RSI-%v.csv", rtt)
nameHK := fmt.Sprintf("HK-%v.csv", rtt)
err = sdfToRSI(sdfPath, rtt, startLine, ref.Line, path.Join(outPath, nameRSI), path.Join(outPath, nameHK))
if err != nil {
return files, rtts, fmt.Errorf("Failed to generate %v: %v", name, err)
return files, rtts, fmt.Errorf("Failed to generate files %v, %v: %v", nameRSI, nameHK, err)
}
files = append(files, name)
files = append(files, nameRSI, nameHK)
rtts = append(rtts, rtt)
}
}
Expand All @@ -48,7 +49,7 @@ func ConvertSDFtoRSIs(sdfPath string, outPath string) ([]string, []int64, error)
return files, rtts, nil
}

func sdfToRSI(sdfPath string, rtt int64, startLine int, endLine int, outPath string) error {
func sdfToRSI(sdfPath string, rtt int64, startLine int, endLine int, outPath string, outPath_Housekeeping string) error {
file, err := os.Open(sdfPath)
if err != nil {
return fmt.Errorf("Failed to open SDF %v: %v", sdfPath, err)
Expand All @@ -59,7 +60,12 @@ func sdfToRSI(sdfPath string, rtt int64, startLine int, endLine int, outPath str

fout, err := os.Create(outPath)
if err != nil {
return fmt.Errorf("Failed to create output CSV %v: %v", outPath, err)
return fmt.Errorf("Failed to create output RSI CSV %v: %v", outPath, err)
}

fout_hk, err := os.Create(outPath_Housekeeping)
if err != nil {
return fmt.Errorf("Failed to create output housekeeping CSV %v: %v", outPath, err)
}

_, err = fout.WriteString(fmt.Sprintf("Spatial information from PIXL SDF or dat files %v for RTT: %v\n", sdfPath, rtt) +
Expand Down Expand Up @@ -163,7 +169,7 @@ func sdfToRSI(sdfPath string, rtt int64, startLine int, endLine int, outPath str
return fmt.Errorf("hk: %v", err)
}

hktime, hkline, err := processHousekeeping(lineNo, lineData, hkLines, sclk, rtt, pmc)
hktime, hkline, hkline_RSI, err := processHousekeeping(lineNo, lineData, hkLines, sclk, rtt, pmc)
if hktime == lastHKTime {
// We overwrite in this case!
hklinesSaved := len(outLinesByType[tok])
Expand All @@ -173,6 +179,9 @@ func sdfToRSI(sdfPath string, rtt int64, startLine int, endLine int, outPath str
// just let it get written like anything else
out.WriteString(hkline)

// Also write to the HK file
fout_hk.WriteString(hkline_RSI)

lastHKTime = hktime
lineNo += 23
} else if tok == "scanlog" {
Expand Down Expand Up @@ -247,7 +256,11 @@ func sdfToRSI(sdfPath string, rtt int64, startLine int, endLine int, outPath str

writeOutput(outLinesByType, fout)

return fout.Close()
err = fout.Close()
if err != nil {
return err
}
return fout_hk.Close()
}

func writeOutput(outLinesByType map[string][]string, fout *os.File) {
Expand Down
2 changes: 1 addition & 1 deletion api/dataimport/sdfToRSI/sdfToRSI_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ func Example_ConvertSDFtoRSI() {
// Output:
// mkdir worked: true
// Getwd: true
// [RSI-208536069.csv RSI-208601602.csv], [208536069 208601602]: <nil>
// [RSI-208536069.csv HK-208536069.csv RSI-208601602.csv HK-208601602.csv], [208536069 208601602]: <nil>
}

func ensureSDFRawExists() {
Expand Down
68 changes: 17 additions & 51 deletions api/ws/handlers/scan.go
Original file line number Diff line number Diff line change
Expand Up @@ -550,29 +550,38 @@ func processEM(importId string, zipReader *zip.Reader, zippedData []byte, destBu
}

// Create an RSI file from the sdf_raw file
rsis, rtts, err := sdfToRSI.ConvertSDFtoRSIs(sdfLocalPath, localTemp)
genFiles, rtts, err := sdfToRSI.ConvertSDFtoRSIs(sdfLocalPath, localTemp)

if err != nil {
return fmt.Errorf("Failed to scan %v for RSI creation: %v", sdfLocalPath, err)
}

logger.Infof("Generated RSI files:")
for _, rsi := range rsis {
logger.Infof(" %v", rsi)
for _, f := range genFiles {
logger.Infof(" %v", f)
}

rsiUploaded := 0
for c, rsi := range rsis {
rxlPath, logPath, surfPath, err := createBeamLocation(filepath.Join(localTemp, rsi), rtts[c], localTemp, logger)
for c := 0; c < len(genFiles); c += 2 {
f := genFiles[c]
hkFile := genFiles[c+1]

// Every second file is a HK file not an actual RSI file... make sure we have the right prefix here
if !strings.HasPrefix(f, "RSI-") || !strings.HasPrefix(hkFile, "HK-") {
logger.Errorf("ConvertSDFtoRSIs generated : %v. Error: %v", f, err)
continue
}

rxlPath, logPath, surfPath, err := createBeamLocation(filepath.Join(localTemp, f), rtts[c], localTemp, logger)
if err != nil {
// Don't fail on errors for these - we may have run beam location tool on some incomplete scan, so failure isn't terrible!
logger.Errorf("Beam location generation failed for RSI: %v. Error: %v", rsi, err)
logger.Errorf("Beam location generation failed for RSI: %v. Error: %v", f, err)
continue
}

// Upoad the output files (beam locations, log and surface)
files := []string{rxlPath, logPath, surfPath}
name := []string{"beam location", "log", "surface"}
files := []string{filepath.Join(localTemp, hkFile), rxlPath, logPath, surfPath}
name := []string{"housekeeping", "beam location", "log", "surface"}
for _, file := range files {
data, err := os.ReadFile(file)
if err != nil {
Expand Down Expand Up @@ -612,38 +621,6 @@ func processEM(importId string, zipReader *zip.Reader, zippedData []byte, destBu
return fmt.Errorf("Failed to write MSA list: %v", err)
}

/*
// Upload the images
for _, image := range images {
p := filepath.Join(localImagesPath, image)
b, err := os.ReadFile(p)
if err != nil {
return fmt.Errorf("Failed to read image: %v. Error: %v", p, err)
}
savePath := path.Join(s3PathStart, image)
err = fs.WriteObject(destBucket, savePath, b)
if err != nil {
return err
}
logger.Infof(" Uploaded: s3://%v/%v", destBucket, savePath)
}
// Zip up the MSA's
msaData, err := utils.ZipDirectory(localMSAPath)
if err != nil {
return fmt.Errorf("Failed to zip MSA files from: %v. Error: %v", localMSAPath, err)
}
// Upload the MSA zip
savePath := path.Join(s3PathStart, "spectra.zip")
err = fs.WriteObject(destBucket, savePath, msaData)
if err != nil {
return err
}
logger.Infof(" Uploaded: s3://%v/%v", destBucket, savePath)
*/
// Process each RSI file generated
return nil
}

Expand All @@ -659,17 +636,6 @@ func createBeamLocation(rsiPath string, rtt int64, outputBeamLocationPath string
if _, err := os.Stat(bgtPath + "BGT"); err != nil {
// Try the path used in local testing
bgtPath = ".." + string(os.PathSeparator) + ".." + string(os.PathSeparator) + "beam-tool" + string(os.PathSeparator)

/*if _, err := os.Stat(bgtPath + "BGT"); err != nil {
// Try the path used in local testing
fmt.Println("PATH WONT BE FOUND")
d, _ := os.Getwd()
fmt.Println(d)
// Try get it backwards
bgtPath = filepath.Dir(filepath.Dir(d)) + string(os.PathSeparator)
}*/
}

if _, err := os.Stat(bgtPath + "Geometry_PIXL_EM_Landing_25Jan2021.csv"); err != nil {
Expand Down
2 changes: 1 addition & 1 deletion internal/cmd-line-tools/dataset-archive-optimiser/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ func optimise(rtts map[string]string, remoteFS fileaccess.FileAccess, iLog logge
if !strings.HasSuffix(localArchivePath, zipFile) {
zipPath := path.Join("Archive", zipFile)

iLog.Infof("Deleting from S3: %v", zip.ErrInsecurePath)
iLog.Infof("Deleting from S3: %v", zipPath)
err = remoteFS.DeleteObject(dataBucket, zipPath)
if err != nil {
iLog.Errorf("Error deleting archive file %v: %v\n", zipPath, err)
Expand Down

0 comments on commit 3228489

Please sign in to comment.