Skip to content

Commit

Permalink
Merge pull request #306 from pixlise/development
Browse files Browse the repository at this point in the history
Release 4.33.0
  • Loading branch information
pnemere authored Sep 16, 2024
2 parents 8b6f9d7 + bc057db commit 8baac19
Show file tree
Hide file tree
Showing 42 changed files with 4,315 additions and 2,099 deletions.
13 changes: 7 additions & 6 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,17 +90,18 @@ jobs:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- name: Integration Test
# NOTE: integration-test-config.json must have EnvironmentName at the end, so we can append a suffix
# to it, to make DB names unique for builds to not clash!
run: |
make integrationtest
truncate -s-3 ./integration-test-config.json
UNIX_MS=$(date +%s%N | cut -b1-13)
echo "_${UNIX_MS}\"}" >> ./integration-test-config.json
./api-service -customConfigPath ./integration-test-config.json &
sleep 5
#pwd
#ls
cd ./internal/cmd-line-tools/api-integration-test
#pwd
#ls
#echo ./tester -apiHost localhost:8080 -datasetBucket "integration-test-data-pixlise" -usersBucket "integration-test-users-pixlise" -auth0Domain ${{ secrets.PIXLISE_API_TEST_AUTH0_DOMAIN }} -auth0ClientId ${{ secrets.PIXLISE_API_TEST_AUTH0_CLIENT_ID }} -auth0Secret ${{ secrets.PIXLISE_API_TEST_AUTH0_SECRET }} -auth0Audience "pixlise-backend" -testType "ci" -test1Username "[email protected]" -test1Password ${{ secrets.TEST_USER_1_PASSWORD }} -test2Username "[email protected]" -test2Password ${{ secrets.TEST_USER_2_PASSWORD }}
./tester -apiHost localhost:8080 -datasetBucket "integration-test-data-pixlise" -usersBucket "integration-test-users-pixlise" -auth0Domain ${{ secrets.PIXLISE_API_TEST_AUTH0_DOMAIN }} -auth0ClientId ${{ secrets.PIXLISE_API_TEST_AUTH0_CLIENT_ID }} -auth0Secret ${{ secrets.PIXLISE_API_TEST_AUTH0_SECRET }} -auth0Audience "pixlise-backend" -testType "ci" -test1Username "[email protected]" -test1Password ${{ secrets.TEST_USER_1_PASSWORD }} -test2Username "[email protected]" -test2Password ${{ secrets.TEST_USER_2_PASSWORD }}
echo "unittest_${UNIX_MS}"
./tester -apiHost localhost:8080 -envName "unittest_${UNIX_MS}" -datasetBucket "integration-test-data-pixlise" -usersBucket "integration-test-users-pixlise" -auth0Domain ${{ secrets.PIXLISE_API_TEST_AUTH0_DOMAIN }} -auth0ClientId ${{ secrets.PIXLISE_API_TEST_AUTH0_CLIENT_ID }} -auth0Secret ${{ secrets.PIXLISE_API_TEST_AUTH0_SECRET }} -auth0Audience "pixlise-backend" -testType "ci" -test1Username "[email protected]" -test1Password ${{ secrets.TEST_USER_1_PASSWORD }} -test2Username "[email protected]" -test2Password ${{ secrets.TEST_USER_2_PASSWORD }}
env:
AWS_DEFAULT_REGION: us-east-1
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
Expand Down
7 changes: 7 additions & 0 deletions api/config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,13 @@ type APIConfig struct {

// The GroupId of the group a new user is added to by default as a member
DefaultUserGroupId string

// PIXLISE backup & restore settings
DataBackupBucket string
BackupEnabled bool
RestoreEnabled bool

ImpersonateEnabled bool
}

func homeDir() string {
Expand Down
1 change: 0 additions & 1 deletion api/dataimport/for-trigger.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ type ImportResult struct {
// Error (or nil)
func ImportForTrigger(
triggerMessage []byte,
envName string,
configBucket string,
datasetBucket string,
manualBucket string,
Expand Down
51 changes: 25 additions & 26 deletions api/dataimport/for-trigger_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,15 +36,14 @@ import (
"google.golang.org/protobuf/encoding/protojson"
)

func initTest(testName string, testDir string, autoShareCreatorId string, autoShareCreatorGroupEditor string) (fileaccess.FileAccess, *logger.StdOutLoggerForTest, string, string, string, string, *mongo.Database) {
func initTest(testName string, testDir string, autoShareCreatorId string, autoShareCreatorGroupEditor string) (fileaccess.FileAccess, *logger.StdOutLoggerForTest, string, string, string, *mongo.Database) {
remoteFS := &fileaccess.FSAccess{}
log := &logger.StdOutLoggerForTest{}
envName := testName
configBucket := "./test-data/" + testDir + "/config-bucket"
datasetBucket := "./test-data/" + testDir + "/dataset-bucket"
manualBucket := "./test-data/" + testDir + "/manual-bucket"

db := wstestlib.GetDB()
db := wstestlib.GetDBWithEnvironment("unittest_" + testName)
ctx := context.TODO()

// Clear relevant collections
Expand All @@ -65,7 +64,7 @@ func initTest(testName string, testDir string, autoShareCreatorId string, autoSh
db.Collection(dbCollections.ScanAutoShareName).InsertOne(ctx, &item)
}

return remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db
return remoteFS, log, configBucket, datasetBucket, manualBucket, db
}

/*
Expand All @@ -78,7 +77,7 @@ func startTestWithMockMongo(name string, t *testing.T, testFunc func(mt *mtest.T
*/
// Import unknown dataset (simulate trigger by OCS pipeline), file goes to archive, then all files downloaded from archive, dataset create fails due to unknown data type
func Example_importForTrigger_OCS_Archive_BadData() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("OCS_Archive_BadData", "Archive_BadData", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")
remoteFS, log, configBucket, datasetBucket, manualBucket, db := initTest("OCS_Archive_BadData", "Archive_BadData", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")

// In case it ran before, delete the file from dataset bucket, otherwise we will end for the wrong reason
os.Remove(datasetBucket + "/Archive/70000_069-02-09-2021-06-25-13.zip")
Expand Down Expand Up @@ -122,7 +121,7 @@ func Example_importForTrigger_OCS_Archive_BadData() {
]
}`

result, err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, db, log, remoteFS)
result, err := ImportForTrigger([]byte(trigger), configBucket, datasetBucket, manualBucket, db, log, remoteFS)

fmt.Printf("Errors: %v, changes: %v, isUpdate: %v\n", err, result.WhatChanged, result.IsUpdate)

Expand Down Expand Up @@ -152,7 +151,7 @@ func Example_importForTrigger_OCS_Archive_BadData() {

// Import FM-style (simulate trigger by OCS pipeline), file already in archive, so should do nothing
func Example_importForTrigger_OCS_Archive_Exists() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("OCS_Archive_Exists", "Archive_Exists", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")
remoteFS, log, configBucket, datasetBucket, manualBucket, db := initTest("OCS_Archive_Exists", "Archive_Exists", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")
trigger := `{
"Records": [
{
Expand Down Expand Up @@ -192,7 +191,7 @@ func Example_importForTrigger_OCS_Archive_Exists() {
]
}`

result, err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, db, log, remoteFS)
result, err := ImportForTrigger([]byte(trigger), configBucket, datasetBucket, manualBucket, db, log, remoteFS)

fmt.Printf("Errors: %v, changes: %v, isUpdate: %v\n", err, result.WhatChanged, result.IsUpdate)

Expand Down Expand Up @@ -245,7 +244,7 @@ func printArchiveOKLogOutput(logger *logger.StdOutLoggerForTest, db *mongo.Datab

// Import FM-style (simulate trigger by OCS pipeline), file goes to archive, then all files downloaded from archive and dataset created
func Example_importForTrigger_OCS_Archive_OK() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("OCS_Archive_OK", "Archive_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")
remoteFS, log, configBucket, datasetBucket, manualBucket, db := initTest("OCS_Archive_OK", "Archive_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")
// In case it ran before, delete the file from dataset bucket, otherwise we will end for the wrong reason
os.Remove(datasetBucket + "/Archive/048300551-27-06-2021-09-52-25.zip")

Expand Down Expand Up @@ -288,7 +287,7 @@ func Example_importForTrigger_OCS_Archive_OK() {
]
}`

result, err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, db, log, remoteFS)
result, err := ImportForTrigger([]byte(trigger), configBucket, datasetBucket, manualBucket, db, log, remoteFS)

fmt.Printf("Errors: %v, changes: %v, isUpdate: %v\n", err, result.WhatChanged, result.IsUpdate)

Expand All @@ -311,7 +310,7 @@ func Example_importForTrigger_OCS_Archive_OK() {

// Import FM-style (simulate trigger by dataset edit screen), should create dataset with custom name+image
func Example_importForTrigger_OCS_DatasetEdit() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("OCS_DatasetEdit", "Archive_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")
remoteFS, log, configBucket, datasetBucket, manualBucket, db := initTest("OCS_DatasetEdit", "Archive_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")

// To save from checking in 2 sets of the same zip files for this and Example_ImportForTrigger_OCS_Archive_OK, here we copy
// the archive files from the Archive_OK test to here.
Expand Down Expand Up @@ -341,7 +340,7 @@ func Example_importForTrigger_OCS_DatasetEdit() {
"jobID": "dataimport-unittest123"
}`

result, err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, db, log, remoteFS)
result, err := ImportForTrigger([]byte(trigger), configBucket, datasetBucket, manualBucket, db, log, remoteFS)

fmt.Printf("Errors: %v, changes: %v, isUpdate: %v\n", err, result.WhatChanged, result.IsUpdate)

Expand Down Expand Up @@ -407,14 +406,14 @@ func printManualOKLogOutput(log *logger.StdOutLoggerForTest, db *mongo.Database,

// Import a breadboard dataset from manual uploaded zip file
func Example_importForTrigger_Manual_JPL() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_JPL", "Manual_OK", specialUserIds.JPLImport, "JPLTestUserGroupId")
remoteFS, log, configBucket, datasetBucket, manualBucket, db := initTest("Manual_JPL", "Manual_OK", specialUserIds.JPLImport, "JPLTestUserGroupId")

trigger := `{
"datasetID": "test1234",
"jobID": "dataimport-unittest123"
}`

result, err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, db, log, remoteFS)
result, err := ImportForTrigger([]byte(trigger), configBucket, datasetBucket, manualBucket, db, log, remoteFS)

fmt.Printf("Errors: %v, changes: %v, isUpdate: %v\n", err, result.WhatChanged, result.IsUpdate)

Expand All @@ -440,14 +439,14 @@ func Example_importForTrigger_Manual_JPL() {

// Import a breadboard dataset from manual uploaded zip file
func Example_importForTrigger_Manual_SBU() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_JPL", "Manual_OK2", specialUserIds.SBUImport, "SBUTestUserGroupId")
remoteFS, log, configBucket, datasetBucket, manualBucket, db := initTest("Manual_JPL", "Manual_OK2", specialUserIds.SBUImport, "SBUTestUserGroupId")

trigger := `{
"datasetID": "test1234sbu",
"jobID": "dataimport-unittest123sbu"
}`

result, err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, db, log, remoteFS)
result, err := ImportForTrigger([]byte(trigger), configBucket, datasetBucket, manualBucket, db, log, remoteFS)

fmt.Printf("Errors: %v, changes: %v, isUpdate: %v\n", err, result.WhatChanged, result.IsUpdate)

Expand All @@ -473,14 +472,14 @@ func Example_importForTrigger_Manual_SBU() {

// Import a breadboard dataset from manual uploaded zip file
func Example_ImportForTrigger_Manual_SBU_NoAutoShare() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_SBU_NoAutoShare", "Manual_OK2", specialUserIds.JPLImport, "JPLTestUserGroupId")
remoteFS, log, configBucket, datasetBucket, manualBucket, db := initTest("Manual_SBU_NoAutoShare", "Manual_OK2", specialUserIds.JPLImport, "JPLTestUserGroupId")

trigger := `{
"datasetID": "test1234sbu",
"jobID": "dataimport-unittest123sbu"
}`

result, err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, db, log, remoteFS)
result, err := ImportForTrigger([]byte(trigger), configBucket, datasetBucket, manualBucket, db, log, remoteFS)

fmt.Printf("Errors: %v, changes: %v, isUpdate: %v\n", err, result.WhatChanged, result.IsUpdate)

Expand All @@ -507,14 +506,14 @@ func Example_ImportForTrigger_Manual_SBU_NoAutoShare() {
/* Didnt get this working when the above was changed. Problem is this still generates the user name: SBUImport, so the
premise of the test fails because it doesn't end up with no user id at that point!
func Test_ImportForTrigger_Manual_SBU_NoAutoShare_FailForPipeline(t *testing.T) {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_OK2", "", "")
remoteFS, log, configBucket, datasetBucket, manualBucket, db := initTest("Manual_OK2", "", "")
trigger := `{
"datasetID": "test1234sbu",
"jobID": "dataimport-unittest123sbu"
}`
_, err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, db, log, remoteFS)
_, err := ImportForTrigger([]byte(trigger), configBucket, datasetBucket, manualBucket, db, log, remoteFS)
// Make sure we got the error
if !strings.HasSuffix(err.Error(), "Cannot work out groups to auto-share imported dataset with") {
Expand All @@ -524,14 +523,14 @@ func Test_ImportForTrigger_Manual_SBU_NoAutoShare_FailForPipeline(t *testing.T)
*/
// Import a breadboard dataset from manual uploaded zip file
func Example_importForTrigger_Manual_EM() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_EM", "ManualEM_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")
remoteFS, log, configBucket, datasetBucket, manualBucket, db := initTest("Manual_EM", "ManualEM_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")

trigger := `{
"datasetID": "048300551",
"jobID": "dataimport-unittest048300551"
}`

result, err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, db, log, remoteFS)
result, err := ImportForTrigger([]byte(trigger), configBucket, datasetBucket, manualBucket, db, log, remoteFS)

fmt.Printf("Errors: %v, changes: %v, isUpdate: %v\n", err, result.WhatChanged, result.IsUpdate)

Expand All @@ -558,14 +557,14 @@ func Example_importForTrigger_Manual_EM() {

// Import a breadboard dataset from manual uploaded zip file
func Example_importForTrigger_Manual_EM_WithBeamV2() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket, db := initTest("Manual_EM", "ManualEM_Beamv2_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")
remoteFS, log, configBucket, datasetBucket, manualBucket, db := initTest("Manual_EM", "ManualEM_Beamv2_OK", specialUserIds.PIXLISESystemUserId, "PIXLFMGroupId")

trigger := `{
"datasetID": "048300551",
"jobID": "dataimport-unittest048300551"
}`

result, err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, db, log, remoteFS)
result, err := ImportForTrigger([]byte(trigger), configBucket, datasetBucket, manualBucket, db, log, remoteFS)

fmt.Printf("Errors: %v, changes: %v, isUpdate: %v\n", err, result.WhatChanged, result.IsUpdate)

Expand Down Expand Up @@ -594,14 +593,14 @@ func Example_importForTrigger_Manual_EM_WithBeamV2() {
// Import a breadboard dataset from manual uploaded zip file, including custom name+image
func Example_importForTrigger_Manual_DatasetEdit() {
remoteFS, log, envName, configBucket, datasetBucket, manualBucket := initTest("Manual_Edit")
remoteFS, log, configBucket, datasetBucket, manualBucket := initTest("Manual_Edit")
trigger := `{
"datasetID": "test1234",
"logID": "dataimport-unittest123"
}`
err := ImportForTrigger([]byte(trigger), envName, configBucket, datasetBucket, manualBucket, log, remoteFS)
err := ImportForTrigger([]byte(trigger), configBucket, datasetBucket, manualBucket, log, remoteFS)
fmt.Printf("Errors: %v\n", err)
Expand Down
2 changes: 2 additions & 0 deletions api/dbCollections/collections.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,10 @@ const UserGroupsName = "userGroups"
const UserROIDisplaySettings = "userROIDisplaySettings"
const UserExpressionDisplaySettings = "userExpressionDisplaySettings"
const UsersName = "users"
const UserImpersonatorsName = "userImpersonators"
const ViewStatesName = "viewStates"
const WidgetDataName = "widgetData"
const ConnectTempTokensName = "connectTempTokens"

func GetAllCollections() []string {
return []string{
Expand Down
7 changes: 7 additions & 0 deletions api/services/apiServices.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ import (
"github.com/pixlise/core/v4/core/idgen"
"github.com/pixlise/core/v4/core/jwtparser"
"github.com/pixlise/core/v4/core/logger"
"github.com/pixlise/core/v4/core/mongoDBConnection"
"github.com/pixlise/core/v4/core/timestamper"
"go.mongodb.org/mongo-driver/mongo"
)
Expand Down Expand Up @@ -81,5 +82,11 @@ type APIServices struct {
// Our mongo db connection
MongoDB *mongo.Database

// And how we connected to it (so we can run mongodump later if needed)
MongoDetails mongoDBConnection.MongoConnectionDetails

Notifier INotifier

// The unique identifier of this API instance (so we can log/debug issues that are cross-instance!)
InstanceId string
}
49 changes: 48 additions & 1 deletion api/ws/handlers/screen-configuration.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,10 @@ func HandleScreenConfigurationListReq(req *protos.ScreenConfigurationListReq, hc
return nil, err
}

if req.SnapshotParentId != "" {
filter["snapshotParentId"] = req.SnapshotParentId
}

opts := options.Find()

cursor, err := hctx.Svcs.MongoDB.Collection(dbCollections.ScreenConfigurationName).Find(context.TODO(), filter, opts)
Expand Down Expand Up @@ -162,11 +166,54 @@ func writeScreenConfiguration(screenConfig *protos.ScreenConfiguration, hctx wsH
for i, layout := range screenConfig.Layouts {
if layout.TabId == "" {
layout.TabId = hctx.Svcs.IDGen.GenObjectID()
layout.TabName = "Tab " + fmt.Sprint(i+1)
if layout.TabName == "" {
layout.TabName = "Tab " + fmt.Sprint(i+1)
}
}
for _, widget := range layout.Widgets {
if widget.Id == "" {
widget.Id = formWidgetId(widget, screenConfig.Id, i)
if widget.Data != nil {
// We have widget data, but no ID, so write it to the database with a new ID
_, err := hctx.Svcs.MongoDB.Collection(dbCollections.WidgetDataName).UpdateOne(sessCtx, bson.M{
"_id": widget.Id,
}, bson.M{
"$set": widget.Data,
}, options.Update().SetUpsert(true))
if err != nil {
return nil, err
}
}
} else if widget.Id != "" {
// We have a widget ID, but no data, so we'll just copy the respective widget record to a new one
var oldWidgetId = widget.Id
widget.Id = formWidgetId(widget, screenConfig.Id, i)
// Fetch old widget data
result := hctx.Svcs.MongoDB.Collection(dbCollections.WidgetDataName).FindOne(sessCtx, bson.M{
"_id": oldWidgetId,
})
if result.Err() != nil {
// We can't get the data, so we'll just continue
continue
}

widgetData := &protos.WidgetData{}
err = result.Decode(&widgetData)
if err != nil {
continue
}

widgetData.Id = widget.Id

// Write the data to the new widget ID
_, err := hctx.Svcs.MongoDB.Collection(dbCollections.WidgetDataName).UpdateOne(sessCtx, bson.M{
"_id": widget.Id,
}, bson.M{
"$set": widgetData,
}, options.Update().SetUpsert(true))
if err != nil {
return nil, err
}
}
}
}
Expand Down
Loading

0 comments on commit 8baac19

Please sign in to comment.