From 92cef850abf5734bbcfb037ed101d6faf7aeec8c Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Thu, 14 Nov 2024 16:23:30 -0500 Subject: [PATCH 01/25] initial stuff --- app2/dataset_client.go | 156 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 156 insertions(+) create mode 100644 app2/dataset_client.go diff --git a/app2/dataset_client.go b/app2/dataset_client.go new file mode 100644 index 00000000000..20a3c4f303c --- /dev/null +++ b/app2/dataset_client.go @@ -0,0 +1,156 @@ +// Package dataset contains a gRPC based dataset client. +package dataset + +import ( + "context" + "time" + + pb "go.viam.com/api/app/datasync/v1" + "go.viam.com/rdk/logging" + "go.viam.com/rdk/protoutils" + "go.viam.com/utils/rpc" +) + +// Client implements the DataSyncServiceClient interface. +type Client struct { + client pb.DataSyncServiceClient + logger logging.Logger +} + +type DataType int32 + +const ( + Unspecified DataType = iota + BinarySensor + TabularSensor + File +) + +type MimeType int32 +const ( + Unspecified MimeType = iota + JPEG //can i name things this??? + PNG + PCD +) + +// BoundingBox represents a labeled bounding box on an image. +// x and y values are normalized ratios between 0 and 1. +type BoundingBox struct { + ID string + Label string + XMinNormalized float64 + YMinNormalized float64 + XMaxNormalized float64 + YMaxNormalized float64 +} + +// Annotations are data annotations used for machine learning. +type Annotations struct { + Bboxes []BoundingBox +} +type UploadMetadata struct { + PartID string + ComponentType string + ComponentName string + MethodName string + Type DataType + FileName string + MethodParameters map[string]string + FileExtension string + Tags []string +} + +type SensorMetadata struct { + TimeRequested time.Time + TimeReceived time.Time + MimeType MimeType + //annotations lives in the data client file...so maybe make a shared situation later on?? + Annotations Annotations +} + +type TabularData struct { + Data map[string]interface{} + MetadataIndex uint32 + Metadata UploadMetadata //its usually capturemetadata and idk if this will work or do anything (probs remove this) + TimeRequested time.Time + TimeReceived time.Time +} + +type SensorData struct { + //this is what can be filled by either tabular or binary data!! + Metadata SensorMetadata + //its one of, either binary or tabular ==> this needs help + Binary []byte + Tabular TabularData //??? feels wrong +} + +// NewDataClient constructs a new DataClient using the connection passed in by the viamClient and the provided logger. +func NewDataSyncClient( + channel rpc.ClientConn, + logger logging.Logger, +) (*Client, error) { + d := pb.NewDataSyncServiceClient(channel) + return &Client{ + client: d, + logger: logger, + }, nil +} + +func uploadMetadataToProto(metadata UploadMetadata) *pb.UploadMetadata { + methodParms, err := protoutils.ConvertStringMapToAnyPBMap(metadata.MethodParameters) + if err != nil { + return nil + } + return &pb.UploadMetadata{ + PartId: metadata.PartID, + ComponentType: metadata.ComponentType, + ComponentName: metadata.ComponentName, + MethodName: metadata.MethodName, + Type: pb.DataType(metadata.Type), + MethodParameters: methodParms, + FileExtension: metadata.FileExtension, + Tags: metadata.Tags, + } +} + + +// DataCaptureUpload uploads the contents and metadata for tabular data. +/* +notes: + +Metadata *UploadMetadata +SensorContents []*SensorData + +*/ +func (d *Client) DataCaptureUpload(ctx context.Context, metadata UploadMetadata, sensorContents []SensorData) error { + resp, err := d.client.DataCaptureUpload(ctx, &pb.DataCaptureUploadRequest{ + Metadata: uploadMetadataToProto(metadata), //should be in proto form !! + SensorContents: //sensorContents needs to go here or something, + }) + if err != nil { + return err + } + return resp + +} + +// FileUpload uploads the contents and metadata for binary (image + file) data, +// where the first packet must be the UploadMetadata. +func (d *Client) FileUpload(ctx context.Context) error { + resp, err := d.client.FileUpload(ctx, &pb.FileUploadRequest{}) + if err != nil { + return err + } + return nil +} + +// StreamingDataCaptureUpload uploads the streaming contents and metadata for streaming binary (image + file) data, +// where the first packet must be the UploadMetadata. +func (d *Client) StreamingDataCaptureUpload(ctx context.Context) error { + resp, err := d.client.FileUpload(ctx, &pb.StreamingDataCaptureUploadRequest{}) + if err != nil { + return err + } + return nil +} From a512c8dc644a4ff7791c9cccea31dad57eeeb117 Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Fri, 15 Nov 2024 16:28:25 -0500 Subject: [PATCH 02/25] more changes --- app2/dataset_client.go | 171 +++++++++++++++++++++++++++++++++-------- 1 file changed, 139 insertions(+), 32 deletions(-) diff --git a/app2/dataset_client.go b/app2/dataset_client.go index 20a3c4f303c..7c10a4709ff 100644 --- a/app2/dataset_client.go +++ b/app2/dataset_client.go @@ -7,8 +7,13 @@ import ( pb "go.viam.com/api/app/datasync/v1" "go.viam.com/rdk/logging" - "go.viam.com/rdk/protoutils" + + // "go.viam.com/rdk/protoutils" + "go.viam.com/utils/rpc" + "google.golang.org/protobuf/types/known/anypb" + "google.golang.org/protobuf/types/known/structpb" + "google.golang.org/protobuf/types/known/timestamppb" ) // Client implements the DataSyncServiceClient interface. @@ -20,18 +25,19 @@ type Client struct { type DataType int32 const ( - Unspecified DataType = iota - BinarySensor - TabularSensor - File + DataTypeUnspecified DataType = iota + DataTypeBinarySensor + DataTypeTabularSensor + DataTypeFile ) type MimeType int32 + const ( - Unspecified MimeType = iota - JPEG //can i name things this??? - PNG - PCD + MimeTypeUnspecified MimeType = iota + MimeTypeJPEG //can i name things this??? + MimeTypePNG + MimeTypePCD ) // BoundingBox represents a labeled bounding box on an image. @@ -56,19 +62,11 @@ type UploadMetadata struct { MethodName string Type DataType FileName string - MethodParameters map[string]string + MethodParameters map[string]interface{} //or map[string]string?? FileExtension string Tags []string } -type SensorMetadata struct { - TimeRequested time.Time - TimeReceived time.Time - MimeType MimeType - //annotations lives in the data client file...so maybe make a shared situation later on?? - Annotations Annotations -} - type TabularData struct { Data map[string]interface{} MetadataIndex uint32 @@ -77,12 +75,20 @@ type TabularData struct { TimeReceived time.Time } +// figure out if mimetype and annotations should be included or not +type SensorMetadata struct { + TimeRequested time.Time + TimeReceived time.Time + // MimeType MimeType + //annotations lives in the data client file...so maybe make a shared situation later on?? + // Annotations Annotations +} type SensorData struct { //this is what can be filled by either tabular or binary data!! Metadata SensorMetadata //its one of, either binary or tabular ==> this needs help - Binary []byte - Tabular TabularData //??? feels wrong + SDStruct map[string]interface{} //or should it be TabularData.data ?? + SDBinary []byte } // NewDataClient constructs a new DataClient using the connection passed in by the viamClient and the provided logger. @@ -97,8 +103,30 @@ func NewDataSyncClient( }, nil } +// ConvertMapToProtobufAny converts a map[string]interface{} to a map[string]*anypb.Any +func ConvertMapToProtoAny(input map[string]interface{}) (map[string]*anypb.Any, error) { + protoMap := make(map[string]*anypb.Any) + for key, value := range input { + // Convert the value to a protobuf Struct-compatible type + structValue, err := structpb.NewValue(value) + if err != nil { + return nil, err + } + // Pack the structpb.Value into an anypb.Any + anyValue, err := anypb.New(structValue) + if err != nil { + return nil, err + } + // Assign the packed value to the map + protoMap[key] = anyValue + } + return protoMap, nil +} + func uploadMetadataToProto(metadata UploadMetadata) *pb.UploadMetadata { - methodParms, err := protoutils.ConvertStringMapToAnyPBMap(metadata.MethodParameters) + // methodParms, err := protoutils.ConvertStringMapToAnyPBMap(metadata.MethodParameters) + methodParams, err := ConvertMapToProtoAny(metadata.MethodParameters) + if err != nil { return nil } @@ -108,30 +136,109 @@ func uploadMetadataToProto(metadata UploadMetadata) *pb.UploadMetadata { ComponentName: metadata.ComponentName, MethodName: metadata.MethodName, Type: pb.DataType(metadata.Type), - MethodParameters: methodParms, + MethodParameters: methodParams, FileExtension: metadata.FileExtension, Tags: metadata.Tags, } } +// why doesnt this protoype have mime type and annotations with it?? +func sensorMetadataToProto(metadata SensorMetadata) *pb.SensorMetadata { + return &pb.SensorMetadata{ + TimeRequested: timestamppb.New(metadata.TimeRequested), + TimeReceived: timestamppb.New(metadata.TimeReceived), + } +} -// DataCaptureUpload uploads the contents and metadata for tabular data. -/* -notes: +func sensorDataToProto(sensorData SensorData) *pb.SensorData { + protoSensorData := &pb.SensorData{ + Metadata: sensorMetadataToProto(sensorData.Metadata), + } + if sensorData.SDBinary != nil && len(sensorData.SDBinary) > 0 { + protoSensorData.Data = &pb.SensorData_Binary{ + Binary: sensorData.SDBinary, + } + } else if sensorData.SDStruct != nil { + pbStruct, _ := structpb.NewStruct(sensorData.SDStruct) + protoSensorData.Data = &pb.SensorData_Struct{ + Struct: pbStruct, + } + } else { + return nil //should an error message be set instead?? + } + return protoSensorData +} +func sensorContentsToProto(sensorContents []SensorData) []*pb.SensorData { + var protoSensorContents []*pb.SensorData + for _, item := range sensorContents { + protoSensorContents = append(protoSensorContents, sensorDataToProto(item)) + } + return protoSensorContents +} -Metadata *UploadMetadata -SensorContents []*SensorData +func (d *Client) BinaryDataCaptureUpload( + ctx context.Context, + binaryData []byte, + partID string, + componentType string, + componentName string, + methodName string, + fileExtension string, + methodParameters map[string]interface{}, //or map[string]interface{}??? + tags []string, + timeRequested time.Time, + TimeReceived time.Time, + // dataRequestTimes *[2]time.Time, //check the type on this!!! +) (string, error) { + // Validate file extension + if fileExtension != "" && fileExtension[0] != '.' { + fileExtension = "." + fileExtension + } + //create sensor metadata + sensorMetadata := SensorMetadata{ + TimeRequested: timeRequested, + TimeReceived: TimeReceived, + } + // Create SensorData + sensorData := SensorData{ + Metadata: sensorMetadata, + SDStruct: nil, + SDBinary: binaryData, + } + //create metadata + metadata := UploadMetadata{ + PartID: partID, + ComponentType: componentType, + ComponentName: componentName, + MethodName: methodName, + Type: DataTypeBinarySensor, //is this right?? + FileName: "", //not given + MethodParameters: methodParameters, + FileExtension: fileExtension, + Tags: tags, + } + sensorContents := []SensorData{sensorData} + // Call the upload method + response, err := d.DataCaptureUpload(ctx, metadata, sensorContents) + if err != nil { + return "", err + } + return response, nil +} -*/ -func (d *Client) DataCaptureUpload(ctx context.Context, metadata UploadMetadata, sensorContents []SensorData) error { +func tabularDataCaptureUpload() {} + +// DataCaptureUpload uploads the metadata and contents for either tabular or binary data, +// and returns the file ID associated with the uploaded data and metadata. +func (d *Client) DataCaptureUpload(ctx context.Context, metadata UploadMetadata, sensorContents []SensorData) (string, error) { resp, err := d.client.DataCaptureUpload(ctx, &pb.DataCaptureUploadRequest{ Metadata: uploadMetadataToProto(metadata), //should be in proto form !! - SensorContents: //sensorContents needs to go here or something, + SensorContents: sensorContentsToProto(sensorContents), }) if err != nil { - return err + return "", err } - return resp + return resp.FileId, nil } From e8c6cfda605749887230029d0619e7877d104f7f Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Mon, 18 Nov 2024 09:23:42 -0500 Subject: [PATCH 03/25] some more changes to data sync --- app2/dataset_client.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app2/dataset_client.go b/app2/dataset_client.go index 7c10a4709ff..cbe69bfcadf 100644 --- a/app2/dataset_client.go +++ b/app2/dataset_client.go @@ -104,7 +104,7 @@ func NewDataSyncClient( } // ConvertMapToProtobufAny converts a map[string]interface{} to a map[string]*anypb.Any -func ConvertMapToProtoAny(input map[string]interface{}) (map[string]*anypb.Any, error) { +func convertMapToProtoAny(input map[string]interface{}) (map[string]*anypb.Any, error) { protoMap := make(map[string]*anypb.Any) for key, value := range input { // Convert the value to a protobuf Struct-compatible type @@ -125,7 +125,7 @@ func ConvertMapToProtoAny(input map[string]interface{}) (map[string]*anypb.Any, func uploadMetadataToProto(metadata UploadMetadata) *pb.UploadMetadata { // methodParms, err := protoutils.ConvertStringMapToAnyPBMap(metadata.MethodParameters) - methodParams, err := ConvertMapToProtoAny(metadata.MethodParameters) + methodParams, err := convertMapToProtoAny(metadata.MethodParameters) if err != nil { return nil From 639ed919120ef1839dbb3c68f3c759867af341cf Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Mon, 18 Nov 2024 09:24:49 -0500 Subject: [PATCH 04/25] rename to datasync --- app2/{dataset_client.go => datasync_client.go} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename app2/{dataset_client.go => datasync_client.go} (99%) diff --git a/app2/dataset_client.go b/app2/datasync_client.go similarity index 99% rename from app2/dataset_client.go rename to app2/datasync_client.go index cbe69bfcadf..295651394fd 100644 --- a/app2/dataset_client.go +++ b/app2/datasync_client.go @@ -1,5 +1,5 @@ -// Package dataset contains a gRPC based dataset client. -package dataset +// Package datasync contains a gRPC based datasync client. +package datasync import ( "context" From ed16e5a3b6189c0b5d6962a408b9520d66a8555a Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Mon, 18 Nov 2024 13:48:46 -0500 Subject: [PATCH 05/25] move file to app folder --- {app2 => app}/datasync_client.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename {app2 => app}/datasync_client.go (99%) diff --git a/app2/datasync_client.go b/app/datasync_client.go similarity index 99% rename from app2/datasync_client.go rename to app/datasync_client.go index 295651394fd..edf221d583a 100644 --- a/app2/datasync_client.go +++ b/app/datasync_client.go @@ -1,5 +1,5 @@ -// Package datasync contains a gRPC based datasync client. -package datasync +// Package app contains a gRPC based datasync client. +package app import ( "context" From da1737ddbad246d3b90d2fa0a6f5389c59b0e8fb Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Mon, 18 Nov 2024 14:42:30 -0500 Subject: [PATCH 06/25] add some comments --- app/datasync_client.go | 44 +++++++++++++++++++++++++++--------------- 1 file changed, 28 insertions(+), 16 deletions(-) diff --git a/app/datasync_client.go b/app/datasync_client.go index edf221d583a..9403585a860 100644 --- a/app/datasync_client.go +++ b/app/datasync_client.go @@ -186,44 +186,56 @@ func (d *Client) BinaryDataCaptureUpload( fileExtension string, methodParameters map[string]interface{}, //or map[string]interface{}??? tags []string, - timeRequested time.Time, - TimeReceived time.Time, - // dataRequestTimes *[2]time.Time, //check the type on this!!! + // timeRequested time.Time, + // TimeReceived time.Time, + dataRequestTimes [2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived ) (string, error) { // Validate file extension if fileExtension != "" && fileExtension[0] != '.' { fileExtension = "." + fileExtension } - //create sensor metadata - sensorMetadata := SensorMetadata{ - TimeRequested: timeRequested, - TimeReceived: TimeReceived, + + // Create SensorMetadata based on the provided times + /* + + if len(dataRequestTimes) == 0 { + // No times provided ?? + } + */ + var sensorMetadata SensorMetadata + if dataRequestTimes != [2]time.Time{} { // Check if times are provided + sensorMetadata = SensorMetadata{ + TimeRequested: dataRequestTimes[0], // Convert time to timestamp??? + TimeReceived: dataRequestTimes[1], // Convert time to timestamp??? + } } + // Create SensorData sensorData := SensorData{ Metadata: sensorMetadata, - SDStruct: nil, - SDBinary: binaryData, + SDStruct: nil, // Assuming no struct is needed for binary data + SDBinary: binaryData, // Attach the binary data } - //create metadata + + // Create UploadMetadata metadata := UploadMetadata{ PartID: partID, ComponentType: componentType, ComponentName: componentName, MethodName: methodName, - Type: DataTypeBinarySensor, //is this right?? - FileName: "", //not given + Type: DataTypeBinarySensor, // assuming this is the correct type?? MethodParameters: methodParameters, - FileExtension: fileExtension, Tags: tags, } - sensorContents := []SensorData{sensorData} - // Call the upload method - response, err := d.DataCaptureUpload(ctx, metadata, sensorContents) + + // Upload the data (assuming DataCaptureUpload is your method for uploading) + response, err := d.DataCaptureUpload(ctx, metadata, []SensorData{sensorData}) if err != nil { return "", err } + return response, nil + } func tabularDataCaptureUpload() {} From ad7b156fecb791abccd53ff39d3b2b23151591b9 Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Mon, 18 Nov 2024 15:41:22 -0500 Subject: [PATCH 07/25] did tabularupload func --- app/datasync_client.go | 74 ++++++++++++++++++++++++++++++------------ 1 file changed, 54 insertions(+), 20 deletions(-) diff --git a/app/datasync_client.go b/app/datasync_client.go index 9403585a860..b291c695649 100644 --- a/app/datasync_client.go +++ b/app/datasync_client.go @@ -3,6 +3,7 @@ package app import ( "context" + "errors" "time" pb "go.viam.com/api/app/datasync/v1" @@ -184,39 +185,28 @@ func (d *Client) BinaryDataCaptureUpload( componentName string, methodName string, fileExtension string, - methodParameters map[string]interface{}, //or map[string]interface{}??? + methodParameters map[string]interface{}, tags []string, - // timeRequested time.Time, - // TimeReceived time.Time, dataRequestTimes [2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived ) (string, error) { // Validate file extension if fileExtension != "" && fileExtension[0] != '.' { fileExtension = "." + fileExtension } - // Create SensorMetadata based on the provided times - /* - - if len(dataRequestTimes) == 0 { - // No times provided ?? - } - */ var sensorMetadata SensorMetadata - if dataRequestTimes != [2]time.Time{} { // Check if times are provided + if len(dataRequestTimes) == 2 { //can i have a better check here? like if dataRequestTimes != [2]time.Time{} sensorMetadata = SensorMetadata{ - TimeRequested: dataRequestTimes[0], // Convert time to timestamp??? - TimeReceived: dataRequestTimes[1], // Convert time to timestamp??? + TimeRequested: dataRequestTimes[0], + TimeReceived: dataRequestTimes[1], } } - // Create SensorData sensorData := SensorData{ Metadata: sensorMetadata, SDStruct: nil, // Assuming no struct is needed for binary data SDBinary: binaryData, // Attach the binary data } - // Create UploadMetadata metadata := UploadMetadata{ PartID: partID, @@ -227,18 +217,62 @@ func (d *Client) BinaryDataCaptureUpload( MethodParameters: methodParameters, Tags: tags, } - - // Upload the data (assuming DataCaptureUpload is your method for uploading) response, err := d.DataCaptureUpload(ctx, metadata, []SensorData{sensorData}) if err != nil { return "", err } - return response, nil - } -func tabularDataCaptureUpload() {} +func (d *Client) tabularDataCaptureUpload( + ctx context.Context, + tabularData []map[string]interface{}, + partID string, + componentType string, + componentName string, + methodName string, + dataRequestTimes [][2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived + // fileExtension string, + methodParameters map[string]interface{}, + tags []string, +) (string, error) { + if len(dataRequestTimes) != len(tabularData) { + errors.New("dataRequestTimes and tabularData lengths must be equal") + } + var sensorContents []SensorData + // Iterate through the tabular data + for i, tabData := range tabularData { + sensorMetadata := SensorMetadata{} + dates := dataRequestTimes[i] + if len(dates) == 2 { + sensorMetadata.TimeRequested = dates[0] + sensorMetadata.TimeReceived = dates[1] + } + // Create SensorData + sensorData := SensorData{ + Metadata: sensorMetadata, + SDStruct: tabData, + SDBinary: nil, + } + sensorContents = append(sensorContents, sensorData) + } + + // Create UploadMetadata + metadata := UploadMetadata{ + PartID: partID, + ComponentType: componentType, + ComponentName: componentName, + MethodName: methodName, + Type: DataTypeTabularSensor, // assuming this is the correct type?? + MethodParameters: methodParameters, + Tags: tags, + } + response, err := d.DataCaptureUpload(ctx, metadata, sensorContents) + if err != nil { + return "", err + } + return response, nil +} // DataCaptureUpload uploads the metadata and contents for either tabular or binary data, // and returns the file ID associated with the uploaded data and metadata. From 61040858f18dcd05e887388606af360b0616e698 Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Mon, 18 Nov 2024 16:17:34 -0500 Subject: [PATCH 08/25] commit before i try to merge --- app/datasync_client.go | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/app/datasync_client.go b/app/datasync_client.go index b291c695649..c9251ba0f69 100644 --- a/app/datasync_client.go +++ b/app/datasync_client.go @@ -93,15 +93,11 @@ type SensorData struct { } // NewDataClient constructs a new DataClient using the connection passed in by the viamClient and the provided logger. -func NewDataSyncClient( - channel rpc.ClientConn, - logger logging.Logger, -) (*Client, error) { - d := pb.NewDataSyncServiceClient(channel) +func NewDataSyncClient(conn rpc.ClientConn) *Client { + d := pb.NewDataSyncServiceClient(conn) return &Client{ client: d, - logger: logger, - }, nil + } } // ConvertMapToProtobufAny converts a map[string]interface{} to a map[string]*anypb.Any @@ -298,6 +294,16 @@ func (d *Client) FileUpload(ctx context.Context) error { return nil } +// FileUpload uploads the contents and metadata for binary (image + file) data, +// where the first packet must be the UploadMetadata. +func (d *Client) FileUploadFromPath(ctx context.Context) error { + // resp, err := d.client.FileUpload(ctx, &pb.FileUploadRequest{}) + // if err != nil { + // return err + // } + return nil +} + // StreamingDataCaptureUpload uploads the streaming contents and metadata for streaming binary (image + file) data, // where the first packet must be the UploadMetadata. func (d *Client) StreamingDataCaptureUpload(ctx context.Context) error { From 118cb4aa65bf2f0f77c6283bf3ca2b95fa26b4be Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Mon, 18 Nov 2024 16:55:22 -0500 Subject: [PATCH 09/25] moved to dataClient file --- app/data_client.go | 257 ++++++++++++++++++++++++++++++++- app/datasync_client.go | 315 ----------------------------------------- 2 files changed, 252 insertions(+), 320 deletions(-) delete mode 100644 app/datasync_client.go diff --git a/app/data_client.go b/app/data_client.go index 8578eefc9e2..3bad82b9838 100644 --- a/app/data_client.go +++ b/app/data_client.go @@ -3,11 +3,14 @@ package app import ( "context" + "errors" "time" "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/bson/primitive" pb "go.viam.com/api/app/data/v1" + syncPb "go.viam.com/api/app/datasync/v1" + "go.viam.com/utils/rpc" "google.golang.org/protobuf/types/known/anypb" "google.golang.org/protobuf/types/known/structpb" @@ -18,7 +21,8 @@ import ( // DataClient implements the DataServiceClient interface. type DataClient struct { - client pb.DataServiceClient + client pb.DataServiceClient + dataSyncClient syncPb.DataSyncServiceClient } // Order specifies the order in which data is returned. @@ -177,13 +181,61 @@ type DatabaseConnReturn struct { HasDatabaseUser bool } +// :::::******NEW struct/variable ADDITIONS FOR DATASYNC START HERE!!!!**************** +type SensorMetadata struct { + // figure out if mimetype and annotations should be included or not + TimeRequested time.Time + TimeReceived time.Time + // MimeType MimeType + //annotations lives in the data client file...so maybe make a shared situation later on?? + // Annotations Annotations +} +type SensorData struct { + //this is what can be filled by either tabular or binary data!! + Metadata SensorMetadata + //its one of, either binary or tabular ==> this needs help + SDStruct map[string]interface{} //or should it be TabularData.data ?? + SDBinary []byte +} +type DataType int32 + +const ( + DataTypeUnspecified DataType = iota + DataTypeBinarySensor + DataTypeTabularSensor + DataTypeFile +) + +type MimeType int32 + +const ( + MimeTypeUnspecified MimeType = iota + MimeTypeJPEG //can i name things this??? + MimeTypePNG + MimeTypePCD +) + +type UploadMetadata struct { + PartID string + ComponentType string + ComponentName string + MethodName string + Type DataType + FileName string + MethodParameters map[string]interface{} //or map[string]string?? + FileExtension string + Tags []string +} + +//:::::******NEW struct/variable ADDITIONS FOR DATASYNC END HERE!!!!**************** + // NewDataClient constructs a new DataClient using the connection passed in by the viamClient. -func NewDataClient( - conn rpc.ClientConn, -) *DataClient { +func NewDataClient(conn rpc.ClientConn) *DataClient { d := pb.NewDataServiceClient(conn) + s := syncPb.NewDataSyncServiceClient(conn) return &DataClient{ - client: d, + client: d, + dataSyncClient: s, } } @@ -761,3 +813,198 @@ func (d *DataClient) RemoveBinaryDataFromDatasetByIDs( }) return err } + +// !!!!!!!!!! ******** ALL NEW ADDED FOR DATASYNC CLIENT + +func uploadMetadataToProto(metadata UploadMetadata) *syncPb.UploadMetadata { + // methodParms, err := protoutils.ConvertStringMapToAnyPBMap(metadata.MethodParameters) + methodParams, err := protoutils.ConvertMapToProtoAny(metadata.MethodParameters) + + if err != nil { + return nil + } + return &syncPb.UploadMetadata{ + PartId: metadata.PartID, + ComponentType: metadata.ComponentType, + ComponentName: metadata.ComponentName, + MethodName: metadata.MethodName, + Type: syncPb.DataType(metadata.Type), + MethodParameters: methodParams, + FileExtension: metadata.FileExtension, + Tags: metadata.Tags, + } +} + +// why doesnt this protoype have mime type and annotations with it?? +func sensorMetadataToProto(metadata SensorMetadata) *syncPb.SensorMetadata { + return &syncPb.SensorMetadata{ + TimeRequested: timestamppb.New(metadata.TimeRequested), + TimeReceived: timestamppb.New(metadata.TimeReceived), + } +} + +func sensorDataToProto(sensorData SensorData) *syncPb.SensorData { + protoSensorData := &syncPb.SensorData{ + Metadata: sensorMetadataToProto(sensorData.Metadata), + } + if sensorData.SDBinary != nil && len(sensorData.SDBinary) > 0 { + protoSensorData.Data = &syncPb.SensorData_Binary{ + Binary: sensorData.SDBinary, + } + } else if sensorData.SDStruct != nil { + pbStruct, _ := structpb.NewStruct(sensorData.SDStruct) + protoSensorData.Data = &syncPb.SensorData_Struct{ + Struct: pbStruct, + } + } else { + return nil //should an error message be set instead?? + } + return protoSensorData +} +func sensorContentsToProto(sensorContents []SensorData) []*syncPb.SensorData { + var protoSensorContents []*syncPb.SensorData + for _, item := range sensorContents { + protoSensorContents = append(protoSensorContents, sensorDataToProto(item)) + } + return protoSensorContents +} + +func (d *DataClient) BinaryDataCaptureUpload( + ctx context.Context, + binaryData []byte, + partID string, + componentType string, + componentName string, + methodName string, + fileExtension string, + methodParameters map[string]interface{}, + tags []string, + dataRequestTimes [2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived +) (string, error) { + // Validate file extension + if fileExtension != "" && fileExtension[0] != '.' { + fileExtension = "." + fileExtension + } + // Create SensorMetadata based on the provided times + var sensorMetadata SensorMetadata + if len(dataRequestTimes) == 2 { //can i have a better check here? like if dataRequestTimes != [2]time.Time{} + sensorMetadata = SensorMetadata{ + TimeRequested: dataRequestTimes[0], + TimeReceived: dataRequestTimes[1], + } + } + // Create SensorData + sensorData := SensorData{ + Metadata: sensorMetadata, + SDStruct: nil, // Assuming no struct is needed for binary data + SDBinary: binaryData, // Attach the binary data + } + // Create UploadMetadata + metadata := UploadMetadata{ + PartID: partID, + ComponentType: componentType, + ComponentName: componentName, + MethodName: methodName, + Type: DataTypeBinarySensor, // assuming this is the correct type?? + MethodParameters: methodParameters, + Tags: tags, + } + response, err := d.DataCaptureUpload(ctx, metadata, []SensorData{sensorData}) + if err != nil { + return "", err + } + return response, nil +} + +func (d *DataClient) tabularDataCaptureUpload( + ctx context.Context, + tabularData []map[string]interface{}, + partID string, + componentType string, + componentName string, + methodName string, + dataRequestTimes [][2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived + // fileExtension string, + methodParameters map[string]interface{}, + tags []string, +) (string, error) { + if len(dataRequestTimes) != len(tabularData) { + errors.New("dataRequestTimes and tabularData lengths must be equal") + } + var sensorContents []SensorData + // Iterate through the tabular data + for i, tabData := range tabularData { + sensorMetadata := SensorMetadata{} + dates := dataRequestTimes[i] + if len(dates) == 2 { + sensorMetadata.TimeRequested = dates[0] + sensorMetadata.TimeReceived = dates[1] + } + // Create SensorData + sensorData := SensorData{ + Metadata: sensorMetadata, + SDStruct: tabData, + SDBinary: nil, + } + sensorContents = append(sensorContents, sensorData) + } + + // Create UploadMetadata + metadata := UploadMetadata{ + PartID: partID, + ComponentType: componentType, + ComponentName: componentName, + MethodName: methodName, + Type: DataTypeTabularSensor, // assuming this is the correct type?? + MethodParameters: methodParameters, + Tags: tags, + } + response, err := d.DataCaptureUpload(ctx, metadata, sensorContents) + if err != nil { + return "", err + } + return response, nil +} + +// DataCaptureUpload uploads the metadata and contents for either tabular or binary data, +// and returns the file ID associated with the uploaded data and metadata. +func (d *DataClient) DataCaptureUpload(ctx context.Context, metadata UploadMetadata, sensorContents []SensorData) (string, error) { + resp, err := d.dataSyncClient.DataCaptureUpload(ctx, &syncPb.DataCaptureUploadRequest{ + Metadata: uploadMetadataToProto(metadata), //should be in proto form !! + SensorContents: sensorContentsToProto(sensorContents), + }) + if err != nil { + return "", err + } + return resp.FileId, nil +} + +// FileUpload uploads the contents and metadata for binary (image + file) data, +// where the first packet must be the UploadMetadata. +func (d *DataClient) FileUpload(ctx context.Context) error { + // resp, err := d.dataSyncClient.FileUpload(ctx, &pb.FileUploadRequest{}) + // if err != nil { + // return err + // } + return nil +} + +// FileUpload uploads the contents and metadata for binary (image + file) data, +// where the first packet must be the UploadMetadata. +func (d *DataClient) FileUploadFromPath(ctx context.Context) error { + // resp, err := d.client.FileUpload(ctx, &pb.FileUploadRequest{}) + // if err != nil { + // return err + // } + return nil +} + +// StreamingDataCaptureUpload uploads the streaming contents and metadata for streaming binary (image + file) data, +// where the first packet must be the UploadMetadata. +func (d *DataClient) StreamingDataCaptureUpload(ctx context.Context) error { + // resp, err := d.dataSyncClient.StreamingDataCaptureUpload(ctx, &pb.StreamingDataCaptureUploadRequest{}) + // if err != nil { + // return err + // } + return nil +} diff --git a/app/datasync_client.go b/app/datasync_client.go deleted file mode 100644 index c9251ba0f69..00000000000 --- a/app/datasync_client.go +++ /dev/null @@ -1,315 +0,0 @@ -// Package app contains a gRPC based datasync client. -package app - -import ( - "context" - "errors" - "time" - - pb "go.viam.com/api/app/datasync/v1" - "go.viam.com/rdk/logging" - - // "go.viam.com/rdk/protoutils" - - "go.viam.com/utils/rpc" - "google.golang.org/protobuf/types/known/anypb" - "google.golang.org/protobuf/types/known/structpb" - "google.golang.org/protobuf/types/known/timestamppb" -) - -// Client implements the DataSyncServiceClient interface. -type Client struct { - client pb.DataSyncServiceClient - logger logging.Logger -} - -type DataType int32 - -const ( - DataTypeUnspecified DataType = iota - DataTypeBinarySensor - DataTypeTabularSensor - DataTypeFile -) - -type MimeType int32 - -const ( - MimeTypeUnspecified MimeType = iota - MimeTypeJPEG //can i name things this??? - MimeTypePNG - MimeTypePCD -) - -// BoundingBox represents a labeled bounding box on an image. -// x and y values are normalized ratios between 0 and 1. -type BoundingBox struct { - ID string - Label string - XMinNormalized float64 - YMinNormalized float64 - XMaxNormalized float64 - YMaxNormalized float64 -} - -// Annotations are data annotations used for machine learning. -type Annotations struct { - Bboxes []BoundingBox -} -type UploadMetadata struct { - PartID string - ComponentType string - ComponentName string - MethodName string - Type DataType - FileName string - MethodParameters map[string]interface{} //or map[string]string?? - FileExtension string - Tags []string -} - -type TabularData struct { - Data map[string]interface{} - MetadataIndex uint32 - Metadata UploadMetadata //its usually capturemetadata and idk if this will work or do anything (probs remove this) - TimeRequested time.Time - TimeReceived time.Time -} - -// figure out if mimetype and annotations should be included or not -type SensorMetadata struct { - TimeRequested time.Time - TimeReceived time.Time - // MimeType MimeType - //annotations lives in the data client file...so maybe make a shared situation later on?? - // Annotations Annotations -} -type SensorData struct { - //this is what can be filled by either tabular or binary data!! - Metadata SensorMetadata - //its one of, either binary or tabular ==> this needs help - SDStruct map[string]interface{} //or should it be TabularData.data ?? - SDBinary []byte -} - -// NewDataClient constructs a new DataClient using the connection passed in by the viamClient and the provided logger. -func NewDataSyncClient(conn rpc.ClientConn) *Client { - d := pb.NewDataSyncServiceClient(conn) - return &Client{ - client: d, - } -} - -// ConvertMapToProtobufAny converts a map[string]interface{} to a map[string]*anypb.Any -func convertMapToProtoAny(input map[string]interface{}) (map[string]*anypb.Any, error) { - protoMap := make(map[string]*anypb.Any) - for key, value := range input { - // Convert the value to a protobuf Struct-compatible type - structValue, err := structpb.NewValue(value) - if err != nil { - return nil, err - } - // Pack the structpb.Value into an anypb.Any - anyValue, err := anypb.New(structValue) - if err != nil { - return nil, err - } - // Assign the packed value to the map - protoMap[key] = anyValue - } - return protoMap, nil -} - -func uploadMetadataToProto(metadata UploadMetadata) *pb.UploadMetadata { - // methodParms, err := protoutils.ConvertStringMapToAnyPBMap(metadata.MethodParameters) - methodParams, err := convertMapToProtoAny(metadata.MethodParameters) - - if err != nil { - return nil - } - return &pb.UploadMetadata{ - PartId: metadata.PartID, - ComponentType: metadata.ComponentType, - ComponentName: metadata.ComponentName, - MethodName: metadata.MethodName, - Type: pb.DataType(metadata.Type), - MethodParameters: methodParams, - FileExtension: metadata.FileExtension, - Tags: metadata.Tags, - } -} - -// why doesnt this protoype have mime type and annotations with it?? -func sensorMetadataToProto(metadata SensorMetadata) *pb.SensorMetadata { - return &pb.SensorMetadata{ - TimeRequested: timestamppb.New(metadata.TimeRequested), - TimeReceived: timestamppb.New(metadata.TimeReceived), - } -} - -func sensorDataToProto(sensorData SensorData) *pb.SensorData { - protoSensorData := &pb.SensorData{ - Metadata: sensorMetadataToProto(sensorData.Metadata), - } - if sensorData.SDBinary != nil && len(sensorData.SDBinary) > 0 { - protoSensorData.Data = &pb.SensorData_Binary{ - Binary: sensorData.SDBinary, - } - } else if sensorData.SDStruct != nil { - pbStruct, _ := structpb.NewStruct(sensorData.SDStruct) - protoSensorData.Data = &pb.SensorData_Struct{ - Struct: pbStruct, - } - } else { - return nil //should an error message be set instead?? - } - return protoSensorData -} -func sensorContentsToProto(sensorContents []SensorData) []*pb.SensorData { - var protoSensorContents []*pb.SensorData - for _, item := range sensorContents { - protoSensorContents = append(protoSensorContents, sensorDataToProto(item)) - } - return protoSensorContents -} - -func (d *Client) BinaryDataCaptureUpload( - ctx context.Context, - binaryData []byte, - partID string, - componentType string, - componentName string, - methodName string, - fileExtension string, - methodParameters map[string]interface{}, - tags []string, - dataRequestTimes [2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived -) (string, error) { - // Validate file extension - if fileExtension != "" && fileExtension[0] != '.' { - fileExtension = "." + fileExtension - } - // Create SensorMetadata based on the provided times - var sensorMetadata SensorMetadata - if len(dataRequestTimes) == 2 { //can i have a better check here? like if dataRequestTimes != [2]time.Time{} - sensorMetadata = SensorMetadata{ - TimeRequested: dataRequestTimes[0], - TimeReceived: dataRequestTimes[1], - } - } - // Create SensorData - sensorData := SensorData{ - Metadata: sensorMetadata, - SDStruct: nil, // Assuming no struct is needed for binary data - SDBinary: binaryData, // Attach the binary data - } - // Create UploadMetadata - metadata := UploadMetadata{ - PartID: partID, - ComponentType: componentType, - ComponentName: componentName, - MethodName: methodName, - Type: DataTypeBinarySensor, // assuming this is the correct type?? - MethodParameters: methodParameters, - Tags: tags, - } - response, err := d.DataCaptureUpload(ctx, metadata, []SensorData{sensorData}) - if err != nil { - return "", err - } - return response, nil -} - -func (d *Client) tabularDataCaptureUpload( - ctx context.Context, - tabularData []map[string]interface{}, - partID string, - componentType string, - componentName string, - methodName string, - dataRequestTimes [][2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived - // fileExtension string, - methodParameters map[string]interface{}, - tags []string, -) (string, error) { - if len(dataRequestTimes) != len(tabularData) { - errors.New("dataRequestTimes and tabularData lengths must be equal") - } - var sensorContents []SensorData - // Iterate through the tabular data - for i, tabData := range tabularData { - sensorMetadata := SensorMetadata{} - dates := dataRequestTimes[i] - if len(dates) == 2 { - sensorMetadata.TimeRequested = dates[0] - sensorMetadata.TimeReceived = dates[1] - } - // Create SensorData - sensorData := SensorData{ - Metadata: sensorMetadata, - SDStruct: tabData, - SDBinary: nil, - } - sensorContents = append(sensorContents, sensorData) - } - - // Create UploadMetadata - metadata := UploadMetadata{ - PartID: partID, - ComponentType: componentType, - ComponentName: componentName, - MethodName: methodName, - Type: DataTypeTabularSensor, // assuming this is the correct type?? - MethodParameters: methodParameters, - Tags: tags, - } - response, err := d.DataCaptureUpload(ctx, metadata, sensorContents) - if err != nil { - return "", err - } - return response, nil -} - -// DataCaptureUpload uploads the metadata and contents for either tabular or binary data, -// and returns the file ID associated with the uploaded data and metadata. -func (d *Client) DataCaptureUpload(ctx context.Context, metadata UploadMetadata, sensorContents []SensorData) (string, error) { - resp, err := d.client.DataCaptureUpload(ctx, &pb.DataCaptureUploadRequest{ - Metadata: uploadMetadataToProto(metadata), //should be in proto form !! - SensorContents: sensorContentsToProto(sensorContents), - }) - if err != nil { - return "", err - } - return resp.FileId, nil - -} - -// FileUpload uploads the contents and metadata for binary (image + file) data, -// where the first packet must be the UploadMetadata. -func (d *Client) FileUpload(ctx context.Context) error { - resp, err := d.client.FileUpload(ctx, &pb.FileUploadRequest{}) - if err != nil { - return err - } - return nil -} - -// FileUpload uploads the contents and metadata for binary (image + file) data, -// where the first packet must be the UploadMetadata. -func (d *Client) FileUploadFromPath(ctx context.Context) error { - // resp, err := d.client.FileUpload(ctx, &pb.FileUploadRequest{}) - // if err != nil { - // return err - // } - return nil -} - -// StreamingDataCaptureUpload uploads the streaming contents and metadata for streaming binary (image + file) data, -// where the first packet must be the UploadMetadata. -func (d *Client) StreamingDataCaptureUpload(ctx context.Context) error { - resp, err := d.client.FileUpload(ctx, &pb.StreamingDataCaptureUploadRequest{}) - if err != nil { - return err - } - return nil -} From 53180bfd6b99d117a26a01b70f5b62f1adcaf400 Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Mon, 18 Nov 2024 17:39:10 -0500 Subject: [PATCH 10/25] more changes --- app/data_client.go | 42 +++++++++--------- app/data_client_test.go | 49 ++++++++++++++++++++- testutils/inject/datasync_service_client.go | 48 ++++++++++++++++++++ 3 files changed, 117 insertions(+), 22 deletions(-) create mode 100644 testutils/inject/datasync_service_client.go diff --git a/app/data_client.go b/app/data_client.go index 3bad82b9838..e84eb443b59 100644 --- a/app/data_client.go +++ b/app/data_client.go @@ -295,26 +295,26 @@ func captureMetadataFromProto(proto *pb.CaptureMetadata) CaptureMetadata { } } -func captureMetadataToProto(metadata CaptureMetadata) *pb.CaptureMetadata { - methodParams, err := protoutils.ConvertMapToProtoAny(metadata.MethodParameters) - if err != nil { - return nil - } - return &pb.CaptureMetadata{ - OrganizationId: metadata.OrganizationID, - LocationId: metadata.LocationID, - RobotName: metadata.RobotName, - RobotId: metadata.RobotID, - PartName: metadata.PartName, - PartId: metadata.PartID, - ComponentType: metadata.ComponentType, - ComponentName: metadata.ComponentName, - MethodName: metadata.MethodName, - MethodParameters: methodParams, - Tags: metadata.Tags, - MimeType: metadata.MimeType, - } -} +// func captureMetadataToProto(metadata CaptureMetadata) *pb.CaptureMetadata { +// methodParams, err := protoutils.ConvertMapToProtoAny(metadata.MethodParameters) +// if err != nil { +// return nil +// } +// return &pb.CaptureMetadata{ +// OrganizationId: metadata.OrganizationID, +// LocationId: metadata.LocationID, +// RobotName: metadata.RobotName, +// RobotId: metadata.RobotID, +// PartName: metadata.PartName, +// PartId: metadata.PartID, +// ComponentType: metadata.ComponentType, +// ComponentName: metadata.ComponentName, +// MethodName: metadata.MethodName, +// MethodParameters: methodParams, +// Tags: metadata.Tags, +// MimeType: metadata.MimeType, +// } +// } func binaryDataFromProto(proto *pb.BinaryData) BinaryData { return BinaryData{ @@ -847,7 +847,7 @@ func sensorDataToProto(sensorData SensorData) *syncPb.SensorData { protoSensorData := &syncPb.SensorData{ Metadata: sensorMetadataToProto(sensorData.Metadata), } - if sensorData.SDBinary != nil && len(sensorData.SDBinary) > 0 { + if len(sensorData.SDBinary) > 0 { protoSensorData.Data = &syncPb.SensorData_Binary{ Binary: sensorData.SDBinary, } diff --git a/app/data_client_test.go b/app/data_client_test.go index 5a3a22468a0..23838cae778 100644 --- a/app/data_client_test.go +++ b/app/data_client_test.go @@ -7,11 +7,14 @@ import ( "go.mongodb.org/mongo-driver/bson" pb "go.viam.com/api/app/data/v1" + syncPb "go.viam.com/api/app/datasync/v1" + "go.viam.com/test" utils "go.viam.com/utils/protoutils" "google.golang.org/grpc" "google.golang.org/protobuf/types/known/timestamppb" + "go.viam.com/rdk/protoutils" "go.viam.com/rdk/testutils/inject" ) @@ -130,6 +133,27 @@ func binaryDataToProto(binaryData BinaryData) *pb.BinaryData { } } +func captureMetadataToProto(metadata CaptureMetadata) *pb.CaptureMetadata { + methodParams, err := protoutils.ConvertMapToProtoAny(metadata.MethodParameters) + if err != nil { + return nil + } + return &pb.CaptureMetadata{ + OrganizationId: metadata.OrganizationID, + LocationId: metadata.LocationID, + RobotName: metadata.RobotName, + RobotId: metadata.RobotID, + PartName: metadata.PartName, + PartId: metadata.PartID, + ComponentType: metadata.ComponentType, + ComponentName: metadata.ComponentName, + MethodName: metadata.MethodName, + MethodParameters: methodParams, + Tags: metadata.Tags, + MimeType: metadata.MimeType, + } +} + func binaryMetadataToProto(binaryMetadata BinaryMetadata) *pb.BinaryMetadata { return &pb.BinaryMetadata{ Id: binaryMetadata.ID, @@ -156,7 +180,9 @@ func dataRequestToProto(dataRequest DataRequest) *pb.DataRequest { func createGrpcClient() *inject.DataServiceClient { return &inject.DataServiceClient{} } - +func createGrpcDataSyncClient() *inject.DataSyncServiceClient { + return &inject.DataSyncServiceClient{} +} func TestDataClient(t *testing.T) { grpcClient := createGrpcClient() client := DataClient{client: grpcClient} @@ -579,3 +605,24 @@ func TestDataClient(t *testing.T) { client.RemoveBinaryDataFromDatasetByIDs(context.Background(), binaryIDs, datasetID) }) } + +// ***********Added this below for new dataSync !!!****** +func TestDataSyncClient(t *testing.T) { + grpcClient := createGrpcDataSyncClient() + client := DataClient{dataSyncClient: grpcClient} + + t.Run("DataCaptureUpload", func(t *testing.T) { + grpcClient.DataCaptureUploadFunc = func(ctx context.Context, in *syncPb.DataCaptureUploadRequest, + opts ...grpc.CallOption, + ) (*syncPb.DataCaptureUploadResponse, error) { + //test.That(t, in._, test.ShouldResemble, toProto(something)) //toProto + return &syncPb.DataCaptureUploadResponse{ + //fill all variables w prototype-types + + }, nil + } + resp, _ := client.DataCaptureUpload(context.Background()) //not proto-types, regular types u expect to recieve in the function + //test.That(t, resp._, test.ShouldResemble, fromProto(something if needed)) //compare response with regular expected types + }) + +} diff --git a/testutils/inject/datasync_service_client.go b/testutils/inject/datasync_service_client.go new file mode 100644 index 00000000000..e61d23c1e95 --- /dev/null +++ b/testutils/inject/datasync_service_client.go @@ -0,0 +1,48 @@ +package inject + +import ( + "context" + + datapb "go.viam.com/api/app/datasync/v1" + "google.golang.org/grpc" +) + +// DataServiceClient represents a fake instance of a data service client. +type DataSyncServiceClient struct { + datapb.DataSyncServiceClient + DataCaptureUploadFunc func(ctx context.Context, in *datapb.DataCaptureUploadRequest, + opts ...grpc.CallOption) (*datapb.DataCaptureUploadResponse, error) + FileUploadFunc func(ctx context.Context, + opts ...grpc.CallOption) (datapb.DataSyncService_FileUploadClient, error) + StreamingDataCaptureUploadFunc func (ctx context.Context, + opts ...grpc.CallOption) (datapb.DataSyncService_StreamingDataCaptureUploadClient, error) +} + +// DataCaptureUpload uploads the contents and metadata for tabular data. +func (client *DataSyncServiceClient) DataCaptureUpload(ctx context.Context, in *datapb.DataCaptureUploadRequest, + opts ...grpc.CallOption, +) (*datapb.DataCaptureUploadResponse, error) { + if client.DataCaptureUploadFunc == nil { + return client.DataSyncServiceClient.DataCaptureUpload(ctx, in, opts...) + } + return client.DataCaptureUploadFunc(ctx, in, opts...) +} +// FileUpload uploads the contents and metadata for binary (image + file) data, +// where the first packet must be the UploadMetadata. +func (client *DataSyncServiceClient) FileUpload(ctx context.Context, + opts ...grpc.CallOption, +) (datapb.DataSyncService_FileUploadClient, error) { + if client.FileUploadFunc == nil { + return client.DataSyncServiceClient.FileUpload(ctx, opts...) + } + return client.FileUploadFunc(ctx, opts...) +} +// DataCaptureUpload uploads the contents and metadata for tabular data. +func (client *DataSyncServiceClient) StreamingDataCaptureUpload(ctx context.Context, + opts ...grpc.CallOption, +) (datapb.DataSyncService_StreamingDataCaptureUploadClient, error) { + if client.StreamingDataCaptureUploadFunc == nil { + return client.DataSyncServiceClient.StreamingDataCaptureUpload(ctx, opts...) + } + return client.StreamingDataCaptureUpload(ctx, opts...) +} From 84b2bf5e7e772c76cf30ea5353ec0e50b2592ae3 Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Mon, 18 Nov 2024 21:11:21 -0500 Subject: [PATCH 11/25] trying to figure out streaming --- app/data_client.go | 93 ++++++++++++++++++++++++++++++++++++++--- app/data_client_test.go | 68 +++++++++++++++++++++++++++--- 2 files changed, 149 insertions(+), 12 deletions(-) diff --git a/app/data_client.go b/app/data_client.go index e84eb443b59..0663720cc07 100644 --- a/app/data_client.go +++ b/app/data_client.go @@ -4,6 +4,7 @@ package app import ( "context" "errors" + "fmt" "time" "go.mongodb.org/mongo-driver/bson" @@ -829,6 +830,7 @@ func uploadMetadataToProto(metadata UploadMetadata) *syncPb.UploadMetadata { ComponentName: metadata.ComponentName, MethodName: metadata.MethodName, Type: syncPb.DataType(metadata.Type), + FileName: metadata.FileName, MethodParameters: methodParams, FileExtension: metadata.FileExtension, Tags: metadata.Tags, @@ -882,6 +884,7 @@ func (d *DataClient) BinaryDataCaptureUpload( dataRequestTimes [2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived ) (string, error) { // Validate file extension + //**need to look into this! if fileExtension != "" && fileExtension[0] != '.' { fileExtension = "." + fileExtension } @@ -1001,10 +1004,88 @@ func (d *DataClient) FileUploadFromPath(ctx context.Context) error { // StreamingDataCaptureUpload uploads the streaming contents and metadata for streaming binary (image + file) data, // where the first packet must be the UploadMetadata. -func (d *DataClient) StreamingDataCaptureUpload(ctx context.Context) error { - // resp, err := d.dataSyncClient.StreamingDataCaptureUpload(ctx, &pb.StreamingDataCaptureUploadRequest{}) - // if err != nil { - // return err - // } - return nil +func (d *DataClient) StreamingDataCaptureUpload( + ctx context.Context, + data []byte, //data in bytes (so similar to binarydataCap)...the rest below are for dataCaptureUploadMetadata + partID string, //uploadmetadata + componentType string, //uploadmetadata + componentName string, //uploadmetadata + methodName string, //uploadmetadata + dataType DataType, //uploadmetadata + fileName string, //uploadmetadata + methodParameters map[string]interface{}, //uploadmetadata + fileExt string, //uploadmetadata + tags []string, //uploadmetadata + dataRequestTimes [2]time.Time, //sensorMetadata + //mimeType and annotations?? //sensorMetadata? +) (string, error) { + // Prepare UploadMetadata + metadata := UploadMetadata{ + PartID: partID, + ComponentType: componentType, + ComponentName: componentName, + MethodName: methodName, + Type: DataTypeBinarySensor, // assuming this is the correct type?? + MethodParameters: methodParameters, + Tags: tags, + } + //create uploadMetadata + uploadMetadatapb := uploadMetadataToProto(metadata) + + //create sensorMetadata + // Create SensorMetadata based on the provided times + var sensorMetadata SensorMetadata + if len(dataRequestTimes) == 2 { //can i have a better check here? like if dataRequestTimes != [2]time.Time{} + sensorMetadata = SensorMetadata{ + TimeRequested: dataRequestTimes[0], + TimeReceived: dataRequestTimes[1], + } + } + // create SensorData + sensorData := SensorData{ + Metadata: sensorMetadata, + SDStruct: nil, // assuming no struct is needed for binary data + SDBinary: data, // attach the binary data because our data is given in []bytes! + } + sensorDataPb := sensorContentsToProto([]SensorData{sensorData}) + + //create DataCaptureUploadMetadata (aka create the metadata for the first request) + dataCaptureUploadMetadataPb := &syncPb.DataCaptureUploadMetadata{ + UploadMetadata: uploadMetadatapb, + SensorMetadata: sensorDataPb[0].Metadata, + } + //this method either uses dataCaptureUploadMetadata OR data...? + + //create the first request w/ metadata + metadataRequest := &syncPb.StreamingDataCaptureUploadRequest{ + UploadPacket: dataCaptureUploadMetadataPb, //StreamingDataCaptureUploadRequest_Metadata ...this should be metadata?? + } + + // establish the streaming client + stream, err := d.dataSyncClient.StreamingDataCaptureUpload(ctx) + if err != nil { + return "", fmt.Errorf("failed to create streaming client: %w", err) + } + defer stream.CloseSend() + + // send metadata request + if err := stream.Send(metadataRequest); err != nil { + return "", fmt.Errorf("failed to send metadata: %w", err) + } + + // send another request, this time the data request + dataRequest := &syncPb.StreamingDataCaptureUploadRequest{ + UploadPacket: data, //*StreamingDataCaptureUploadRequest_Data ....this should be binaryData from above + } + if err := stream.Send(dataRequest); err != nil { + return "", fmt.Errorf("failed to send data: %w", err) + } + // close the stream and receive the response + response, err := stream.Recv() + if err != nil { + return "", fmt.Errorf("failed to receive response: %w", err) + } + + // return the file ID + return response.GetFileId(), nil } diff --git a/app/data_client_test.go b/app/data_client_test.go index 23838cae778..a3db822eab3 100644 --- a/app/data_client_test.go +++ b/app/data_client_test.go @@ -40,9 +40,12 @@ const ( mongodbURI = "mongo_uri" hostName = "host_name" last = "last" + fileID = "file_id" ) var ( + binaryDataType = DataTypeBinarySensor + // tabularDataType = DataTypeTabularSensor locationIDs = []string{locationID} orgIDs = []string{organizationID} mimeTypes = []string{mimeType} @@ -611,18 +614,71 @@ func TestDataSyncClient(t *testing.T) { grpcClient := createGrpcDataSyncClient() client := DataClient{dataSyncClient: grpcClient} - t.Run("DataCaptureUpload", func(t *testing.T) { + uploadMetadata := UploadMetadata{ + PartID: partID, + ComponentType: componentType, + ComponentName: componentName, + MethodName: method, + Type: DataTypeBinarySensor, + FileName: fileName, + MethodParameters: methodParameters, //or map[string]string?? + FileExtension: fileExt, + Tags: tags, + } + metadata := SensorMetadata{ + TimeRequested: startTime, + TimeReceived: endTime, + } + binarySensorData := SensorData{ + Metadata: metadata, + SDStruct: nil, + SDBinary: binaryDataByte, + } + + t.Run("BinaryDataCaptureUpload", func(t *testing.T) { grpcClient.DataCaptureUploadFunc = func(ctx context.Context, in *syncPb.DataCaptureUploadRequest, opts ...grpc.CallOption, ) (*syncPb.DataCaptureUploadResponse, error) { - //test.That(t, in._, test.ShouldResemble, toProto(something)) //toProto + // expectedBinaryData := binaryDataByte --> just a note for myself for now + methodParams, _ := protoutils.ConvertMapToProtoAny(methodParameters) + + test.That(t, in.Metadata.PartId, test.ShouldEqual, partID) + test.That(t, in.Metadata.ComponentType, test.ShouldEqual, componentType) + test.That(t, in.Metadata.ComponentName, test.ShouldEqual, componentName) + test.That(t, in.Metadata.MethodName, test.ShouldEqual, method) + test.That(t, in.Metadata.Type, test.ShouldEqual, binaryDataType) + test.That(t, in.Metadata.FileName, test.ShouldEqual, fileName) + test.That(t, in.Metadata.MethodParameters, test.ShouldResemble, methodParams) + test.That(t, in.Metadata.FileExtension, test.ShouldEqual, fileExt) + test.That(t, in.Metadata.Tags, test.ShouldResemble, tags) + + test.That(t, in.SensorContents[0].Metadata.TimeRequested, test.ShouldResemble, timestamppb.New(startTime)) + test.That(t, in.SensorContents[0].Metadata.TimeReceived, test.ShouldResemble, timestamppb.New(endTime)) + // Extract and validate SensorContents[0].Data + dataField, ok := in.SensorContents[0].Data.(*syncPb.SensorData_Binary) + test.That(t, ok, test.ShouldBeTrue) // Ensure the type is correct + test.That(t, dataField.Binary, test.ShouldResemble, binaryDataByte) + // test.That(t, in., test.ShouldResemble, toProto(something)) //toProto return &syncPb.DataCaptureUploadResponse{ - //fill all variables w prototype-types - + FileId: fileID, }, nil } - resp, _ := client.DataCaptureUpload(context.Background()) //not proto-types, regular types u expect to recieve in the function - //test.That(t, resp._, test.ShouldResemble, fromProto(something if needed)) //compare response with regular expected types + resp, _ := client.DataCaptureUpload(context.Background(), uploadMetadata, []SensorData{binarySensorData}) //not proto-types, regular types u expect to recieve in the function + test.That(t, resp, test.ShouldResemble, fileID) //compare response with regular expected types (fromProto if needed) }) + // t.Run("TabularDataCaptureUpload", func(t *testing.T) { + // grpcClient.DataCaptureUploadFunc = func(ctx context.Context, in *syncPb.DataCaptureUploadRequest, + // opts ...grpc.CallOption, + // ) (*syncPb.DataCaptureUploadResponse, error) { + // //test.That(t, in._, test.ShouldResemble, toProto(something)) //toProto + // return &syncPb.DataCaptureUploadResponse{ + // //fill all variables w prototype-types + + // }, nil + // } + // resp, _ := client.DataCaptureUpload(context.Background()) //not proto-types, regular types u expect to recieve in the function + // //test.That(t, resp._, test.ShouldResemble, fromProto(something if needed)) //compare response with regular expected types + // }) + } From 795491bab1179f05d36678c559fdb8047317b141 Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Tue, 19 Nov 2024 15:38:45 -0500 Subject: [PATCH 12/25] tabulardatacapture test --- app/data_client.go | 149 +++++++++------- app/data_client_test.go | 187 +++++++++++++++----- testutils/inject/datasync_service_client.go | 37 +++- 3 files changed, 258 insertions(+), 115 deletions(-) diff --git a/app/data_client.go b/app/data_client.go index 0663720cc07..c737f610bf5 100644 --- a/app/data_client.go +++ b/app/data_client.go @@ -871,6 +871,14 @@ func sensorContentsToProto(sensorContents []SensorData) []*syncPb.SensorData { return protoSensorContents } +// Helper function to format the file extension. +func formatFileExtension(fileExt string) string { + if len(fileExt) > 0 && fileExt[0] != '.' { + return "." + fileExt + } + return fileExt +} + func (d *DataClient) BinaryDataCaptureUpload( ctx context.Context, binaryData []byte, @@ -878,16 +886,15 @@ func (d *DataClient) BinaryDataCaptureUpload( componentType string, componentName string, methodName string, - fileExtension string, + fileName string, //not in python methodParameters map[string]interface{}, - tags []string, + fileExtension string, + tags []string, //this is the last thing apart of uploadmetadata dataRequestTimes [2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived ) (string, error) { // Validate file extension //**need to look into this! - if fileExtension != "" && fileExtension[0] != '.' { - fileExtension = "." + fileExtension - } + fileExtension = formatFileExtension(fileExtension) // Create SensorMetadata based on the provided times var sensorMetadata SensorMetadata if len(dataRequestTimes) == 2 { //can i have a better check here? like if dataRequestTimes != [2]time.Time{} @@ -909,7 +916,9 @@ func (d *DataClient) BinaryDataCaptureUpload( ComponentName: componentName, MethodName: methodName, Type: DataTypeBinarySensor, // assuming this is the correct type?? + FileName: fileName, MethodParameters: methodParameters, + FileExtension: fileExtension, Tags: tags, } response, err := d.DataCaptureUpload(ctx, metadata, []SensorData{sensorData}) @@ -926,11 +935,13 @@ func (d *DataClient) tabularDataCaptureUpload( componentType string, componentName string, methodName string, - dataRequestTimes [][2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived - // fileExtension string, + fileName string, methodParameters map[string]interface{}, + fileExtension string, tags []string, + dataRequestTimes [][2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived ) (string, error) { + fileExtension = formatFileExtension(fileExtension) if len(dataRequestTimes) != len(tabularData) { errors.New("dataRequestTimes and tabularData lengths must be equal") } @@ -959,7 +970,9 @@ func (d *DataClient) tabularDataCaptureUpload( ComponentName: componentName, MethodName: methodName, Type: DataTypeTabularSensor, // assuming this is the correct type?? + FileName: fileName, MethodParameters: methodParameters, + FileExtension: fileExtension, Tags: tags, } response, err := d.DataCaptureUpload(ctx, metadata, sensorContents) @@ -1002,90 +1015,92 @@ func (d *DataClient) FileUploadFromPath(ctx context.Context) error { return nil } -// StreamingDataCaptureUpload uploads the streaming contents and metadata for streaming binary (image + file) data, -// where the first packet must be the UploadMetadata. +// StreamingDataCaptureUpload uploads metadata and streaming binary data in chunks. func (d *DataClient) StreamingDataCaptureUpload( ctx context.Context, - data []byte, //data in bytes (so similar to binarydataCap)...the rest below are for dataCaptureUploadMetadata - partID string, //uploadmetadata - componentType string, //uploadmetadata - componentName string, //uploadmetadata - methodName string, //uploadmetadata - dataType DataType, //uploadmetadata - fileName string, //uploadmetadata - methodParameters map[string]interface{}, //uploadmetadata - fileExt string, //uploadmetadata - tags []string, //uploadmetadata - dataRequestTimes [2]time.Time, //sensorMetadata - //mimeType and annotations?? //sensorMetadata? + data []byte, + partID string, + fileExt string, + componentType string, + componentName string, + methodName string, + methodParameters map[string]interface{}, + dataRequestTimes [2]time.Time, + tags []string, ) (string, error) { - // Prepare UploadMetadata - metadata := UploadMetadata{ - PartID: partID, + UploadChunkSize := 64 * 1024 + // create metadata for the upload. + methodParams, _ := protoutils.ConvertMapToProtoAny(methodParameters) + uploadMetadataPb := &syncPb.UploadMetadata{ + PartId: partID, ComponentType: componentType, ComponentName: componentName, MethodName: methodName, - Type: DataTypeBinarySensor, // assuming this is the correct type?? - MethodParameters: methodParameters, + FileExtension: formatFileExtension(fileExt), + Type: syncPb.DataType_DATA_TYPE_BINARY_SENSOR, + MethodParameters: methodParams, Tags: tags, } - //create uploadMetadata - uploadMetadatapb := uploadMetadataToProto(metadata) - //create sensorMetadata - // Create SensorMetadata based on the provided times - var sensorMetadata SensorMetadata - if len(dataRequestTimes) == 2 { //can i have a better check here? like if dataRequestTimes != [2]time.Time{} - sensorMetadata = SensorMetadata{ - TimeRequested: dataRequestTimes[0], - TimeReceived: dataRequestTimes[1], + // handle data request times. + var sensorMetadataPb *syncPb.SensorMetadata + if len(dataRequestTimes) == 2 { + sensorMetadataPb = &syncPb.SensorMetadata{ + TimeRequested: timestamppb.New(dataRequestTimes[0]), + TimeReceived: timestamppb.New(dataRequestTimes[1]), } } - // create SensorData - sensorData := SensorData{ - Metadata: sensorMetadata, - SDStruct: nil, // assuming no struct is needed for binary data - SDBinary: data, // attach the binary data because our data is given in []bytes! - } - sensorDataPb := sensorContentsToProto([]SensorData{sensorData}) - - //create DataCaptureUploadMetadata (aka create the metadata for the first request) - dataCaptureUploadMetadataPb := &syncPb.DataCaptureUploadMetadata{ - UploadMetadata: uploadMetadatapb, - SensorMetadata: sensorDataPb[0].Metadata, - } - //this method either uses dataCaptureUploadMetadata OR data...? - //create the first request w/ metadata - metadataRequest := &syncPb.StreamingDataCaptureUploadRequest{ - UploadPacket: dataCaptureUploadMetadataPb, //StreamingDataCaptureUploadRequest_Metadata ...this should be metadata?? + // create the DataCaptureUploadMetadata. + metadata := &syncPb.DataCaptureUploadMetadata{ + UploadMetadata: uploadMetadataPb, + SensorMetadata: sensorMetadataPb, } - // establish the streaming client + // establish a streaming connection. stream, err := d.dataSyncClient.StreamingDataCaptureUpload(ctx) if err != nil { - return "", fmt.Errorf("failed to create streaming client: %w", err) + return "", fmt.Errorf("failed to establish streaming connection: %w", err) } - defer stream.CloseSend() - // send metadata request - if err := stream.Send(metadataRequest); err != nil { + // send the metadata as the first packet. + metaReq := &syncPb.StreamingDataCaptureUploadRequest{ + UploadPacket: &syncPb.StreamingDataCaptureUploadRequest_Metadata{ + Metadata: metadata, + }, + } + if err := stream.Send(metaReq); err != nil { return "", fmt.Errorf("failed to send metadata: %w", err) } - // send another request, this time the data request - dataRequest := &syncPb.StreamingDataCaptureUploadRequest{ - UploadPacket: data, //*StreamingDataCaptureUploadRequest_Data ....this should be binaryData from above - } - if err := stream.Send(dataRequest); err != nil { - return "", fmt.Errorf("failed to send data: %w", err) + // send the binary data in chunks. + for start := 0; start < len(data); start += UploadChunkSize { + end := start + UploadChunkSize + if end > len(data) { + end = len(data) + } + + chunk := data[start:end] + dataReq := &syncPb.StreamingDataCaptureUploadRequest{ + UploadPacket: &syncPb.StreamingDataCaptureUploadRequest_Data{ + Data: chunk, + }, + } + + if err := stream.Send(dataReq); err != nil { + return "", fmt.Errorf("failed to send data chunk: %w", err) + } } - // close the stream and receive the response - response, err := stream.Recv() + + // close the stream and get the response. + resp, err := stream.CloseAndRecv() if err != nil { return "", fmt.Errorf("failed to receive response: %w", err) } - // return the file ID - return response.GetFileId(), nil + // return the file ID from the response. + if resp == nil || resp.FileId == "" { + return "", fmt.Errorf("response is empty or invalid") + } + return resp.FileId, nil } diff --git a/app/data_client_test.go b/app/data_client_test.go index a3db822eab3..9b0f6b0bb92 100644 --- a/app/data_client_test.go +++ b/app/data_client_test.go @@ -44,8 +44,8 @@ const ( ) var ( - binaryDataType = DataTypeBinarySensor - // tabularDataType = DataTypeTabularSensor + binaryDataType = DataTypeBinarySensor + tabularDataType = DataTypeTabularSensor locationIDs = []string{locationID} orgIDs = []string{organizationID} mimeTypes = []string{mimeType} @@ -54,6 +54,7 @@ var ( tags = []string{tag} startTime = time.Now().UTC().Round(time.Millisecond) endTime = time.Now().UTC().Round(time.Millisecond) + dataRequestTimes = [2]time.Time{startTime, endTime} count = uint64(5) limit = uint64(5) countOnly = true @@ -61,6 +62,27 @@ var ( data = map[string]interface{}{ "key": "value", } + tabularMetadata = CaptureMetadata{ + OrganizationID: organizationID, + LocationID: locationID, + RobotName: robotName, + RobotID: robotID, + PartName: partName, + PartID: partID, + ComponentType: componentType, + ComponentName: componentName, + MethodName: method, + MethodParameters: methodParameters, + Tags: tags, + MimeType: mimeType, + } + tabularData = TabularData{ + Data: data, + MetadataIndex: 0, + Metadata: tabularMetadata, + TimeRequested: startTime, + TimeReceived: endTime, + } binaryID = BinaryID{ FileID: "file1", OrganizationID: organizationID, @@ -186,6 +208,10 @@ func createGrpcClient() *inject.DataServiceClient { func createGrpcDataSyncClient() *inject.DataSyncServiceClient { return &inject.DataSyncServiceClient{} } + +// func createMockStreamingDataCaptureUploadClient() *inject.MockStreamingDataCaptureUploadClient { +// return &inject.MockStreamingDataCaptureUploadClient{} +// } func TestDataClient(t *testing.T) { grpcClient := createGrpcClient() client := DataClient{client: grpcClient} @@ -216,21 +242,6 @@ func TestDataClient(t *testing.T) { DatasetID: datasetID, } - tabularMetadata := CaptureMetadata{ - OrganizationID: organizationID, - LocationID: locationID, - RobotName: robotName, - RobotID: robotID, - PartName: partName, - PartID: partID, - ComponentType: componentType, - ComponentName: componentName, - MethodName: method, - MethodParameters: methodParameters, - Tags: tags, - MimeType: mimeType, - } - binaryMetadata := BinaryMetadata{ ID: binaryMetaID, CaptureMetadata: tabularMetadata, @@ -256,13 +267,6 @@ func TestDataClient(t *testing.T) { } t.Run("TabularDataByFilter", func(t *testing.T) { - tabularData := TabularData{ - Data: data, - MetadataIndex: 0, - Metadata: tabularMetadata, - TimeRequested: startTime, - TimeReceived: endTime, - } dataStruct, _ := utils.StructToStructPb(data) tabularDataPb := &pb.TabularData{ Data: dataStruct, @@ -614,17 +618,8 @@ func TestDataSyncClient(t *testing.T) { grpcClient := createGrpcDataSyncClient() client := DataClient{dataSyncClient: grpcClient} - uploadMetadata := UploadMetadata{ - PartID: partID, - ComponentType: componentType, - ComponentName: componentName, - MethodName: method, - Type: DataTypeBinarySensor, - FileName: fileName, - MethodParameters: methodParameters, //or map[string]string?? - FileExtension: fileExt, - Tags: tags, - } + // mockStream := createMockStreamingDataCaptureUploadClient() + metadata := SensorMetadata{ TimeRequested: startTime, TimeReceived: endTime, @@ -635,7 +630,24 @@ func TestDataSyncClient(t *testing.T) { SDBinary: binaryDataByte, } + tabularSensorData := SensorData{ + Metadata: metadata, + SDStruct: tabularData.Data, + SDBinary: nil, + } + t.Run("BinaryDataCaptureUpload", func(t *testing.T) { + uploadMetadata := UploadMetadata{ + PartID: partID, + ComponentType: componentType, + ComponentName: componentName, + MethodName: method, + Type: DataTypeBinarySensor, + FileName: fileName, + MethodParameters: methodParameters, + FileExtension: fileExt, + Tags: tags, + } grpcClient.DataCaptureUploadFunc = func(ctx context.Context, in *syncPb.DataCaptureUploadRequest, opts ...grpc.CallOption, ) (*syncPb.DataCaptureUploadResponse, error) { @@ -667,18 +679,103 @@ func TestDataSyncClient(t *testing.T) { test.That(t, resp, test.ShouldResemble, fileID) //compare response with regular expected types (fromProto if needed) }) - // t.Run("TabularDataCaptureUpload", func(t *testing.T) { - // grpcClient.DataCaptureUploadFunc = func(ctx context.Context, in *syncPb.DataCaptureUploadRequest, - // opts ...grpc.CallOption, - // ) (*syncPb.DataCaptureUploadResponse, error) { - // //test.That(t, in._, test.ShouldResemble, toProto(something)) //toProto - // return &syncPb.DataCaptureUploadResponse{ - // //fill all variables w prototype-types + t.Run("TabularDataCaptureUpload", func(t *testing.T) { + uploadMetadata := UploadMetadata{ + PartID: partID, + ComponentType: componentType, + ComponentName: componentName, + MethodName: method, + Type: DataTypeTabularSensor, + FileName: fileName, + MethodParameters: methodParameters, + FileExtension: fileExt, + Tags: tags, + } + dataStruct, _ := utils.StructToStructPb(data) + tabularDataPb := &pb.TabularData{ + Data: dataStruct, + MetadataIndex: 0, + TimeRequested: timestamppb.New(startTime), + TimeReceived: timestamppb.New(endTime), + } + grpcClient.DataCaptureUploadFunc = func(ctx context.Context, in *syncPb.DataCaptureUploadRequest, + opts ...grpc.CallOption, + ) (*syncPb.DataCaptureUploadResponse, error) { + methodParams, _ := protoutils.ConvertMapToProtoAny(methodParameters) - // }, nil + test.That(t, in.Metadata.PartId, test.ShouldEqual, partID) + test.That(t, in.Metadata.ComponentType, test.ShouldEqual, componentType) + test.That(t, in.Metadata.ComponentName, test.ShouldEqual, componentName) + test.That(t, in.Metadata.MethodName, test.ShouldEqual, method) + test.That(t, in.Metadata.Type, test.ShouldEqual, tabularDataType) + test.That(t, in.Metadata.FileName, test.ShouldEqual, fileName) + test.That(t, in.Metadata.MethodParameters, test.ShouldResemble, methodParams) + test.That(t, in.Metadata.FileExtension, test.ShouldEqual, fileExt) + test.That(t, in.Metadata.Tags, test.ShouldResemble, tags) + + test.That(t, in.SensorContents[0].Metadata.TimeRequested, test.ShouldResemble, timestamppb.New(startTime)) + test.That(t, in.SensorContents[0].Metadata.TimeReceived, test.ShouldResemble, timestamppb.New(endTime)) + // Extract and validate SensorContents[0].Data + dataField, ok := in.SensorContents[0].Data.(*syncPb.SensorData_Struct) + test.That(t, ok, test.ShouldBeTrue) // Ensure the type is correct + test.That(t, dataField.Struct, test.ShouldResemble, tabularDataPb.Data) + return &syncPb.DataCaptureUploadResponse{ + FileId: fileID, + }, nil + } + resp, _ := client.DataCaptureUpload(context.Background(), uploadMetadata, []SensorData{tabularSensorData}) + test.That(t, resp, test.ShouldResemble, fileID) + }) + + // t.Run("StreamingDataCaptureUpload", func(t *testing.T) { + // // Mock implementation of the streaming client. + // mockStream := &inject.MockStreamingDataCaptureUploadClient{ + // SendFunc: func(req *syncPb.StreamingDataCaptureUploadRequest) error { + // switch packet := req.UploadPacket.(type) { + // case *syncPb.StreamingDataCaptureUploadRequest_Metadata: + // // Validate metadata packet. + // meta := packet.Metadata + // test.That(t, meta.UploadMetadata.PartId, test.ShouldEqual, partID) + // test.That(t, meta.UploadMetadata.FileExtension, test.ShouldEqual, "."+fileExt) + // test.That(t, meta.UploadMetadata.ComponentType, test.ShouldEqual, componentType) + // test.That(t, meta.UploadMetadata.ComponentName, test.ShouldEqual, componentName) + // test.That(t, meta.UploadMetadata.MethodName, test.ShouldEqual, method) + // test.That(t, meta.UploadMetadata.Tags, test.ShouldResemble, tags) + // test.That(t, meta.SensorMetadata.TimeRequested, test.ShouldResemble, timestamppb.New(startTime)) + // test.That(t, meta.SensorMetadata.TimeReceived, test.ShouldResemble, timestamppb.New(endTime)) + + // case *syncPb.StreamingDataCaptureUploadRequest_Data: + // // Validate data chunks. + // var chunkIndex int + // UploadChunkSize := 64 * 1024 + // chunk := packet.Data + // expectedChunk := binaryDataByte[chunkIndex*UploadChunkSize : min((chunkIndex+1)*UploadChunkSize, len(data))] + // test.That(t, chunk, test.ShouldResemble, expectedChunk) + // chunkIndex++ + + // default: + // t.Errorf("unexpected packet type: %T", packet) + // } + // return nil + // }, + // CloseAndRecvFunc: func() (*syncPb.StreamingDataCaptureUploadResponse, error) { + // // Validate the final response. + // return &syncPb.StreamingDataCaptureUploadResponse{ + // FileId: fileID, + // }, nil + // }, + // } + + // // Replace the gRPC client with the mock. + // grpcClient.StreamingDataCaptureUploadFunc = func(ctx context.Context, + // opts ...grpc.CallOption, + // ) (syncPb.DataSyncService_StreamingDataCaptureUploadClient, error) { + // return mockStream, nil // } - // resp, _ := client.DataCaptureUpload(context.Background()) //not proto-types, regular types u expect to recieve in the function - // //test.That(t, resp._, test.ShouldResemble, fromProto(something if needed)) //compare response with regular expected types + // // Call the function being tested. + // resp, err := client.StreamingDataCaptureUpload(context.Background(), binaryDataByte, partID, fileExt, componentType, componentName, method, methodParameters, dataRequestTimes, tags) + // test.That(t, err, test.ShouldBeNil) + // test.That(t, resp, test.ShouldEqual, fileID) // }) } diff --git a/testutils/inject/datasync_service_client.go b/testutils/inject/datasync_service_client.go index e61d23c1e95..d8f9add02f8 100644 --- a/testutils/inject/datasync_service_client.go +++ b/testutils/inject/datasync_service_client.go @@ -10,11 +10,11 @@ import ( // DataServiceClient represents a fake instance of a data service client. type DataSyncServiceClient struct { datapb.DataSyncServiceClient - DataCaptureUploadFunc func(ctx context.Context, in *datapb.DataCaptureUploadRequest, + DataCaptureUploadFunc func(ctx context.Context, in *datapb.DataCaptureUploadRequest, opts ...grpc.CallOption) (*datapb.DataCaptureUploadResponse, error) - FileUploadFunc func(ctx context.Context, + FileUploadFunc func(ctx context.Context, opts ...grpc.CallOption) (datapb.DataSyncService_FileUploadClient, error) - StreamingDataCaptureUploadFunc func (ctx context.Context, + StreamingDataCaptureUploadFunc func(ctx context.Context, opts ...grpc.CallOption) (datapb.DataSyncService_StreamingDataCaptureUploadClient, error) } @@ -27,6 +27,7 @@ func (client *DataSyncServiceClient) DataCaptureUpload(ctx context.Context, in * } return client.DataCaptureUploadFunc(ctx, in, opts...) } + // FileUpload uploads the contents and metadata for binary (image + file) data, // where the first packet must be the UploadMetadata. func (client *DataSyncServiceClient) FileUpload(ctx context.Context, @@ -37,6 +38,7 @@ func (client *DataSyncServiceClient) FileUpload(ctx context.Context, } return client.FileUploadFunc(ctx, opts...) } + // DataCaptureUpload uploads the contents and metadata for tabular data. func (client *DataSyncServiceClient) StreamingDataCaptureUpload(ctx context.Context, opts ...grpc.CallOption, @@ -46,3 +48,32 @@ func (client *DataSyncServiceClient) StreamingDataCaptureUpload(ctx context.Cont } return client.StreamingDataCaptureUpload(ctx, opts...) } + +type MockStreamingDataCaptureUploadClient struct { + SendFunc func(req *datapb.StreamingDataCaptureUploadRequest) error + CloseAndRecvFunc func() (*datapb.StreamingDataCaptureUploadResponse, error) +} + +func (m *MockStreamingDataCaptureUploadClient) Send(req *datapb.StreamingDataCaptureUploadRequest) error { + return m.SendFunc(req) +} + +func (m *MockStreamingDataCaptureUploadClient) CloseAndRecv() (*datapb.StreamingDataCaptureUploadResponse, error) { + return m.CloseAndRecvFunc() +} + +func (m *MockStreamingDataCaptureUploadClient) Context() context.Context { + return context.Background() +} + +// func (m *MockStreamingDataCaptureUploadClient) Header() (metadata.MD, error) { +// return nil, nil +// } + +// func (m *MockStreamingDataCaptureUploadClient) Trailer() metadata.MD { +// return nil +// } + +func (m *MockStreamingDataCaptureUploadClient) CloseSend() error { + return nil +} From b7c74345fdba18298622efc7d845da75d2071101 Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Tue, 19 Nov 2024 16:18:33 -0500 Subject: [PATCH 13/25] added a mock for the streaming test --- app/data_client_test.go | 100 ++++++++++---------- testutils/inject/datasync_service_client.go | 35 +++---- 2 files changed, 63 insertions(+), 72 deletions(-) diff --git a/app/data_client_test.go b/app/data_client_test.go index 9b0f6b0bb92..dd9995bdf40 100644 --- a/app/data_client_test.go +++ b/app/data_client_test.go @@ -727,55 +727,55 @@ func TestDataSyncClient(t *testing.T) { test.That(t, resp, test.ShouldResemble, fileID) }) - // t.Run("StreamingDataCaptureUpload", func(t *testing.T) { - // // Mock implementation of the streaming client. - // mockStream := &inject.MockStreamingDataCaptureUploadClient{ - // SendFunc: func(req *syncPb.StreamingDataCaptureUploadRequest) error { - // switch packet := req.UploadPacket.(type) { - // case *syncPb.StreamingDataCaptureUploadRequest_Metadata: - // // Validate metadata packet. - // meta := packet.Metadata - // test.That(t, meta.UploadMetadata.PartId, test.ShouldEqual, partID) - // test.That(t, meta.UploadMetadata.FileExtension, test.ShouldEqual, "."+fileExt) - // test.That(t, meta.UploadMetadata.ComponentType, test.ShouldEqual, componentType) - // test.That(t, meta.UploadMetadata.ComponentName, test.ShouldEqual, componentName) - // test.That(t, meta.UploadMetadata.MethodName, test.ShouldEqual, method) - // test.That(t, meta.UploadMetadata.Tags, test.ShouldResemble, tags) - // test.That(t, meta.SensorMetadata.TimeRequested, test.ShouldResemble, timestamppb.New(startTime)) - // test.That(t, meta.SensorMetadata.TimeReceived, test.ShouldResemble, timestamppb.New(endTime)) - - // case *syncPb.StreamingDataCaptureUploadRequest_Data: - // // Validate data chunks. - // var chunkIndex int - // UploadChunkSize := 64 * 1024 - // chunk := packet.Data - // expectedChunk := binaryDataByte[chunkIndex*UploadChunkSize : min((chunkIndex+1)*UploadChunkSize, len(data))] - // test.That(t, chunk, test.ShouldResemble, expectedChunk) - // chunkIndex++ - - // default: - // t.Errorf("unexpected packet type: %T", packet) - // } - // return nil - // }, - // CloseAndRecvFunc: func() (*syncPb.StreamingDataCaptureUploadResponse, error) { - // // Validate the final response. - // return &syncPb.StreamingDataCaptureUploadResponse{ - // FileId: fileID, - // }, nil - // }, - // } - - // // Replace the gRPC client with the mock. - // grpcClient.StreamingDataCaptureUploadFunc = func(ctx context.Context, - // opts ...grpc.CallOption, - // ) (syncPb.DataSyncService_StreamingDataCaptureUploadClient, error) { - // return mockStream, nil - // } - // // Call the function being tested. - // resp, err := client.StreamingDataCaptureUpload(context.Background(), binaryDataByte, partID, fileExt, componentType, componentName, method, methodParameters, dataRequestTimes, tags) - // test.That(t, err, test.ShouldBeNil) - // test.That(t, resp, test.ShouldEqual, fileID) - // }) + t.Run("StreamingDataCaptureUpload", func(t *testing.T) { + // Mock implementation of the streaming client. + mockStream := &inject.DataSyncService_StreamingDataCaptureUploadClient{ + SendFunc: func(req *syncPb.StreamingDataCaptureUploadRequest) error { + switch packet := req.UploadPacket.(type) { + case *syncPb.StreamingDataCaptureUploadRequest_Metadata: + // Validate metadata packet. + meta := packet.Metadata + test.That(t, meta.UploadMetadata.PartId, test.ShouldEqual, partID) + test.That(t, meta.UploadMetadata.FileExtension, test.ShouldEqual, "."+fileExt) + test.That(t, meta.UploadMetadata.ComponentType, test.ShouldEqual, componentType) + test.That(t, meta.UploadMetadata.ComponentName, test.ShouldEqual, componentName) + test.That(t, meta.UploadMetadata.MethodName, test.ShouldEqual, method) + test.That(t, meta.UploadMetadata.Tags, test.ShouldResemble, tags) + test.That(t, meta.SensorMetadata.TimeRequested, test.ShouldResemble, timestamppb.New(startTime)) + test.That(t, meta.SensorMetadata.TimeReceived, test.ShouldResemble, timestamppb.New(endTime)) + + case *syncPb.StreamingDataCaptureUploadRequest_Data: + // Validate data chunks. + var chunkIndex int + UploadChunkSize := 64 * 1024 + chunk := packet.Data + expectedChunk := binaryDataByte[chunkIndex*UploadChunkSize : min((chunkIndex+1)*UploadChunkSize, len(data))] + test.That(t, chunk, test.ShouldResemble, expectedChunk) + chunkIndex++ + + default: + t.Errorf("unexpected packet type: %T", packet) + } + return nil + }, + CloseAndRecvFunc: func() (*syncPb.StreamingDataCaptureUploadResponse, error) { + // Validate the final response. + return &syncPb.StreamingDataCaptureUploadResponse{ + FileId: fileID, + }, nil + }, + } + + // Replace the gRPC client with the mock. + grpcClient.StreamingDataCaptureUploadFunc = func(ctx context.Context, + opts ...grpc.CallOption, + ) (syncPb.DataSyncService_StreamingDataCaptureUploadClient, error) { + return mockStream, nil + } + // Call the function being tested. + resp, err := client.StreamingDataCaptureUpload(context.Background(), binaryDataByte, partID, fileExt, componentType, componentName, method, methodParameters, dataRequestTimes, tags) + test.That(t, err, test.ShouldBeNil) + test.That(t, resp, test.ShouldEqual, fileID) + }) } diff --git a/testutils/inject/datasync_service_client.go b/testutils/inject/datasync_service_client.go index d8f9add02f8..47aca7c997c 100644 --- a/testutils/inject/datasync_service_client.go +++ b/testutils/inject/datasync_service_client.go @@ -49,31 +49,22 @@ func (client *DataSyncServiceClient) StreamingDataCaptureUpload(ctx context.Cont return client.StreamingDataCaptureUpload(ctx, opts...) } -type MockStreamingDataCaptureUploadClient struct { - SendFunc func(req *datapb.StreamingDataCaptureUploadRequest) error +type DataSyncService_StreamingDataCaptureUploadClient struct { + SendFunc func(*datapb.StreamingDataCaptureUploadRequest) error CloseAndRecvFunc func() (*datapb.StreamingDataCaptureUploadResponse, error) + grpc.ClientStream } -func (m *MockStreamingDataCaptureUploadClient) Send(req *datapb.StreamingDataCaptureUploadRequest) error { - return m.SendFunc(req) -} - -func (m *MockStreamingDataCaptureUploadClient) CloseAndRecv() (*datapb.StreamingDataCaptureUploadResponse, error) { - return m.CloseAndRecvFunc() -} - -func (m *MockStreamingDataCaptureUploadClient) Context() context.Context { - return context.Background() +func (c *DataSyncService_StreamingDataCaptureUploadClient) Send(req *datapb.StreamingDataCaptureUploadRequest) error { + if c.SendFunc != nil { + return c.SendFunc(req) + } + return nil } -// func (m *MockStreamingDataCaptureUploadClient) Header() (metadata.MD, error) { -// return nil, nil -// } - -// func (m *MockStreamingDataCaptureUploadClient) Trailer() metadata.MD { -// return nil -// } - -func (m *MockStreamingDataCaptureUploadClient) CloseSend() error { - return nil +func (c *DataSyncService_StreamingDataCaptureUploadClient) CloseAndRecv() (*datapb.StreamingDataCaptureUploadResponse, error) { + if c.CloseAndRecvFunc != nil { + return c.CloseAndRecvFunc() + } + return nil, nil } From 1d48113705ac82a34f14334294f33b6aa7e64380 Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Tue, 19 Nov 2024 17:11:42 -0500 Subject: [PATCH 14/25] changes --- app/data_client_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/data_client_test.go b/app/data_client_test.go index dd9995bdf40..33d052f0eaa 100644 --- a/app/data_client_test.go +++ b/app/data_client_test.go @@ -743,7 +743,7 @@ func TestDataSyncClient(t *testing.T) { test.That(t, meta.UploadMetadata.Tags, test.ShouldResemble, tags) test.That(t, meta.SensorMetadata.TimeRequested, test.ShouldResemble, timestamppb.New(startTime)) test.That(t, meta.SensorMetadata.TimeReceived, test.ShouldResemble, timestamppb.New(endTime)) - + case *syncPb.StreamingDataCaptureUploadRequest_Data: // Validate data chunks. var chunkIndex int From 24a735a42f3d01ac82453330fcd7fbb0017d4ecc Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Wed, 20 Nov 2024 15:33:39 -0500 Subject: [PATCH 15/25] more changes and streaming up --- app/data_client.go | 131 +++++++------------- app/data_client_test.go | 57 ++++++--- testutils/inject/datasync_service_client.go | 33 +++-- 3 files changed, 106 insertions(+), 115 deletions(-) diff --git a/app/data_client.go b/app/data_client.go index c737f610bf5..e03cc350ea2 100644 --- a/app/data_client.go +++ b/app/data_client.go @@ -3,7 +3,6 @@ package app import ( "context" - "errors" "fmt" "time" @@ -872,29 +871,32 @@ func sensorContentsToProto(sensorContents []SensorData) []*syncPb.SensorData { } // Helper function to format the file extension. +// +// func formatFileExtension(fileExt string) string { +// if len(fileExt) > 0 && fileExt[0] != '.' { +// return "." + fileExt +// } +// return fileExt +// } func formatFileExtension(fileExt string) string { - if len(fileExt) > 0 && fileExt[0] != '.' { - return "." + fileExt + if fileExt == "" { + return fileExt // Return as-is if empty } - return fileExt + if fileExt[0] == '.' { + return fileExt // Return as-is if already starts with a dot + } + return "." + fileExt // Prepend a dot otherwise } func (d *DataClient) BinaryDataCaptureUpload( ctx context.Context, + mdOptions *UploadMetadata, + // smdOptions *SensorMetadata, //this is just the dataRequest times....??? should they be passing in something else instead...? binaryData []byte, - partID string, - componentType string, - componentName string, - methodName string, - fileName string, //not in python - methodParameters map[string]interface{}, - fileExtension string, - tags []string, //this is the last thing apart of uploadmetadata dataRequestTimes [2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived ) (string, error) { // Validate file extension - //**need to look into this! - fileExtension = formatFileExtension(fileExtension) + mdOptions.FileExtension = formatFileExtension(mdOptions.FileExtension) // Create SensorMetadata based on the provided times var sensorMetadata SensorMetadata if len(dataRequestTimes) == 2 { //can i have a better check here? like if dataRequestTimes != [2]time.Time{} @@ -903,25 +905,15 @@ func (d *DataClient) BinaryDataCaptureUpload( TimeReceived: dataRequestTimes[1], } } + // Create SensorData sensorData := SensorData{ Metadata: sensorMetadata, SDStruct: nil, // Assuming no struct is needed for binary data SDBinary: binaryData, // Attach the binary data } - // Create UploadMetadata - metadata := UploadMetadata{ - PartID: partID, - ComponentType: componentType, - ComponentName: componentName, - MethodName: methodName, - Type: DataTypeBinarySensor, // assuming this is the correct type?? - FileName: fileName, - MethodParameters: methodParameters, - FileExtension: fileExtension, - Tags: tags, - } - response, err := d.DataCaptureUpload(ctx, metadata, []SensorData{sensorData}) + + response, err := d.DataCaptureUpload(ctx, *mdOptions, []SensorData{sensorData}) if err != nil { return "", err } @@ -930,20 +922,15 @@ func (d *DataClient) BinaryDataCaptureUpload( func (d *DataClient) tabularDataCaptureUpload( ctx context.Context, + mdOptions *UploadMetadata, + // smdOptions *SensorMetadata, tabularData []map[string]interface{}, - partID string, - componentType string, - componentName string, - methodName string, - fileName string, - methodParameters map[string]interface{}, - fileExtension string, - tags []string, dataRequestTimes [][2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived ) (string, error) { - fileExtension = formatFileExtension(fileExtension) + mdOptions.FileExtension = formatFileExtension(mdOptions.FileExtension) if len(dataRequestTimes) != len(tabularData) { - errors.New("dataRequestTimes and tabularData lengths must be equal") + // errors.New("dataRequestTimes and tabularData lengths must be equal") + return "", fmt.Errorf("dataRequestTimes and tabularData lengths must be equal") } var sensorContents []SensorData // Iterate through the tabular data @@ -963,19 +950,7 @@ func (d *DataClient) tabularDataCaptureUpload( sensorContents = append(sensorContents, sensorData) } - // Create UploadMetadata - metadata := UploadMetadata{ - PartID: partID, - ComponentType: componentType, - ComponentName: componentName, - MethodName: methodName, - Type: DataTypeTabularSensor, // assuming this is the correct type?? - FileName: fileName, - MethodParameters: methodParameters, - FileExtension: fileExtension, - Tags: tags, - } - response, err := d.DataCaptureUpload(ctx, metadata, sensorContents) + response, err := d.DataCaptureUpload(ctx, *mdOptions, sensorContents) if err != nil { return "", err } @@ -1016,53 +991,32 @@ func (d *DataClient) FileUploadFromPath(ctx context.Context) error { } // StreamingDataCaptureUpload uploads metadata and streaming binary data in chunks. +//pass in a pointer to a struct +//pointer to dataTime array -->alr a pointer +//zero value of all arraays is nil --> so we can just have it stay nil and be okay?? +//strings ,arrays, maps we can just assume they r passing nil if its not filled and pass that too --> pass that along + func (d *DataClient) StreamingDataCaptureUpload( ctx context.Context, - data []byte, - partID string, - fileExt string, - componentType string, - componentName string, - methodName string, - methodParameters map[string]interface{}, - dataRequestTimes [2]time.Time, - tags []string, + mdOptions *UploadMetadata, + sdOptions *SensorData, ) (string, error) { - UploadChunkSize := 64 * 1024 + UploadChunkSize := 64 * 1024 //64 KB in bytes // create metadata for the upload. - methodParams, _ := protoutils.ConvertMapToProtoAny(methodParameters) - uploadMetadataPb := &syncPb.UploadMetadata{ - PartId: partID, - ComponentType: componentType, - ComponentName: componentName, - MethodName: methodName, - FileExtension: formatFileExtension(fileExt), - Type: syncPb.DataType_DATA_TYPE_BINARY_SENSOR, - MethodParameters: methodParams, - Tags: tags, - } - - // handle data request times. - var sensorMetadataPb *syncPb.SensorMetadata - if len(dataRequestTimes) == 2 { - sensorMetadataPb = &syncPb.SensorMetadata{ - TimeRequested: timestamppb.New(dataRequestTimes[0]), - TimeReceived: timestamppb.New(dataRequestTimes[1]), - } - } - + uploadMetadataPb := uploadMetadataToProto(*mdOptions) //derefernce the pointer to pass the value instead + uploadMetadataPb.Type = syncPb.DataType_DATA_TYPE_BINARY_SENSOR + // handle data request times w sensormetadata. + sensorMetadataPb := sensorMetadataToProto(sdOptions.Metadata) // create the DataCaptureUploadMetadata. metadata := &syncPb.DataCaptureUploadMetadata{ UploadMetadata: uploadMetadataPb, SensorMetadata: sensorMetadataPb, } - // establish a streaming connection. stream, err := d.dataSyncClient.StreamingDataCaptureUpload(ctx) if err != nil { return "", fmt.Errorf("failed to establish streaming connection: %w", err) } - // send the metadata as the first packet. metaReq := &syncPb.StreamingDataCaptureUploadRequest{ UploadPacket: &syncPb.StreamingDataCaptureUploadRequest_Metadata{ @@ -1074,13 +1028,16 @@ func (d *DataClient) StreamingDataCaptureUpload( } // send the binary data in chunks. - for start := 0; start < len(data); start += UploadChunkSize { + for start := 0; start < len(sdOptions.SDBinary); start += UploadChunkSize { + //loop thry the data array starting at index 0, in each iteration start index increases by UploadChunkSize + //the loop continues until start reaches or exceeds the length of the data array end := start + UploadChunkSize - if end > len(data) { - end = len(data) + //this calculates the end index for the chunk, it is simply the start index plys the upload chunk size + if end > len(sdOptions.SDBinary) { + end = len(sdOptions.SDBinary) } - chunk := data[start:end] + chunk := sdOptions.SDBinary[start:end] dataReq := &syncPb.StreamingDataCaptureUploadRequest{ UploadPacket: &syncPb.StreamingDataCaptureUploadRequest_Data{ Data: chunk, diff --git a/app/data_client_test.go b/app/data_client_test.go index 33d052f0eaa..2aa6024bee6 100644 --- a/app/data_client_test.go +++ b/app/data_client_test.go @@ -2,6 +2,7 @@ package app import ( "context" + "fmt" "testing" "time" @@ -34,7 +35,7 @@ const ( bboxLabel = "bbox_label" tag = "tag" fileName = "file_name" - fileExt = "file_ext.ext" + fileExt = ".ext" datasetID = "dataset_id" binaryMetaID = "binary_id" mongodbURI = "mongo_uri" @@ -630,11 +631,11 @@ func TestDataSyncClient(t *testing.T) { SDBinary: binaryDataByte, } - tabularSensorData := SensorData{ - Metadata: metadata, - SDStruct: tabularData.Data, - SDBinary: nil, - } + // tabularSensorData := SensorData{ + // Metadata: metadata, + // SDStruct: tabularData.Data, + // SDBinary: nil, + // } t.Run("BinaryDataCaptureUpload", func(t *testing.T) { uploadMetadata := UploadMetadata{ @@ -675,8 +676,9 @@ func TestDataSyncClient(t *testing.T) { FileId: fileID, }, nil } - resp, _ := client.DataCaptureUpload(context.Background(), uploadMetadata, []SensorData{binarySensorData}) //not proto-types, regular types u expect to recieve in the function - test.That(t, resp, test.ShouldResemble, fileID) //compare response with regular expected types (fromProto if needed) + // resp, _ := client.DataCaptureUpload(context.Background(), uploadMetadata, []SensorData{binarySensorData}) //not proto-types, regular types u expect to recieve in the function + resp, _ := client.BinaryDataCaptureUpload(context.Background(), &uploadMetadata, binaryDataByte, dataRequestTimes) + test.That(t, resp, test.ShouldResemble, fileID) //compare response with regular expected types (fromProto if needed) }) t.Run("TabularDataCaptureUpload", func(t *testing.T) { @@ -723,36 +725,50 @@ func TestDataSyncClient(t *testing.T) { FileId: fileID, }, nil } - resp, _ := client.DataCaptureUpload(context.Background(), uploadMetadata, []SensorData{tabularSensorData}) + // Convert `tabularDataPb.Data` to the expected input format for `tabularDataCaptureUpload` + tabularData := []map[string]interface{}{data} + // Provide corresponding request times + dataRequestTimes := [][2]time.Time{ + {startTime, endTime}, + } + + // resp, _ := client.DataCaptureUpload(context.Background(), uploadMetadata, []SensorData{tabularSensorData}) + resp, _ := client.tabularDataCaptureUpload(context.Background(), &uploadMetadata, tabularData, dataRequestTimes) test.That(t, resp, test.ShouldResemble, fileID) }) t.Run("StreamingDataCaptureUpload", func(t *testing.T) { + uploadMetadata := UploadMetadata{ + PartID: partID, + ComponentType: componentType, + ComponentName: componentName, + MethodName: method, + Type: DataTypeBinarySensor, + FileName: fileName, + MethodParameters: methodParameters, + FileExtension: fileExt, + Tags: tags, + } + // Mock implementation of the streaming client. mockStream := &inject.DataSyncService_StreamingDataCaptureUploadClient{ SendFunc: func(req *syncPb.StreamingDataCaptureUploadRequest) error { + fmt.Printf("Received packet type: %T\n", req.UploadPacket) switch packet := req.UploadPacket.(type) { case *syncPb.StreamingDataCaptureUploadRequest_Metadata: // Validate metadata packet. meta := packet.Metadata test.That(t, meta.UploadMetadata.PartId, test.ShouldEqual, partID) - test.That(t, meta.UploadMetadata.FileExtension, test.ShouldEqual, "."+fileExt) + test.That(t, meta.UploadMetadata.FileExtension, test.ShouldEqual, fileExt) test.That(t, meta.UploadMetadata.ComponentType, test.ShouldEqual, componentType) test.That(t, meta.UploadMetadata.ComponentName, test.ShouldEqual, componentName) test.That(t, meta.UploadMetadata.MethodName, test.ShouldEqual, method) test.That(t, meta.UploadMetadata.Tags, test.ShouldResemble, tags) test.That(t, meta.SensorMetadata.TimeRequested, test.ShouldResemble, timestamppb.New(startTime)) test.That(t, meta.SensorMetadata.TimeReceived, test.ShouldResemble, timestamppb.New(endTime)) - case *syncPb.StreamingDataCaptureUploadRequest_Data: - // Validate data chunks. - var chunkIndex int - UploadChunkSize := 64 * 1024 - chunk := packet.Data - expectedChunk := binaryDataByte[chunkIndex*UploadChunkSize : min((chunkIndex+1)*UploadChunkSize, len(data))] - test.That(t, chunk, test.ShouldResemble, expectedChunk) - chunkIndex++ - + // Validate data packet. + test.That(t, packet.Data, test.ShouldResemble, binaryDataByte) default: t.Errorf("unexpected packet type: %T", packet) } @@ -773,7 +789,8 @@ func TestDataSyncClient(t *testing.T) { return mockStream, nil } // Call the function being tested. - resp, err := client.StreamingDataCaptureUpload(context.Background(), binaryDataByte, partID, fileExt, componentType, componentName, method, methodParameters, dataRequestTimes, tags) + // resp, err := client.StreamingDataCaptureUpload(context.Background(), binaryDataByte, partID, fileExt, componentType, componentName, method, methodParameters, dataRequestTimes, tags) + resp, err := client.StreamingDataCaptureUpload(context.Background(), &uploadMetadata, &binarySensorData) test.That(t, err, test.ShouldBeNil) test.That(t, resp, test.ShouldEqual, fileID) }) diff --git a/testutils/inject/datasync_service_client.go b/testutils/inject/datasync_service_client.go index 47aca7c997c..3c43c032318 100644 --- a/testutils/inject/datasync_service_client.go +++ b/testutils/inject/datasync_service_client.go @@ -46,25 +46,42 @@ func (client *DataSyncServiceClient) StreamingDataCaptureUpload(ctx context.Cont if client.StreamingDataCaptureUploadFunc == nil { return client.DataSyncServiceClient.StreamingDataCaptureUpload(ctx, opts...) } - return client.StreamingDataCaptureUpload(ctx, opts...) + return client.StreamingDataCaptureUploadFunc(ctx, opts...) } type DataSyncService_StreamingDataCaptureUploadClient struct { + datapb.DataSyncService_StreamingDataCaptureUploadClient SendFunc func(*datapb.StreamingDataCaptureUploadRequest) error CloseAndRecvFunc func() (*datapb.StreamingDataCaptureUploadResponse, error) - grpc.ClientStream + // grpc.ClientStream } func (c *DataSyncService_StreamingDataCaptureUploadClient) Send(req *datapb.StreamingDataCaptureUploadRequest) error { - if c.SendFunc != nil { - return c.SendFunc(req) + if c.SendFunc == nil { + return c.DataSyncService_StreamingDataCaptureUploadClient.Send(req) } - return nil + //test that the data we send is equal to what we expect + return c.SendFunc(req) } func (c *DataSyncService_StreamingDataCaptureUploadClient) CloseAndRecv() (*datapb.StreamingDataCaptureUploadResponse, error) { - if c.CloseAndRecvFunc != nil { - return c.CloseAndRecvFunc() + if c.CloseAndRecvFunc == nil { + return c.DataSyncService_StreamingDataCaptureUploadClient.CloseAndRecv() } - return nil, nil + return c.CloseAndRecvFunc() + } + +// func (c *DataSyncService_StreamingDataCaptureUploadClient) Send(req *datapb.StreamingDataCaptureUploadRequest) error { +// if c.SendFunc != nil { +// return c.SendFunc(req) +// } +// return nil +// } + +// func (c *DataSyncService_StreamingDataCaptureUploadClient) CloseAndRecv() (*datapb.StreamingDataCaptureUploadResponse, error) { +// if c.CloseAndRecvFunc != nil { +// return c.CloseAndRecvFunc() +// } +// return nil, nil +// } From e048d3e7a94d6a58ff674b1135b3a654bf746861 Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Wed, 20 Nov 2024 15:47:50 -0500 Subject: [PATCH 16/25] clean ups --- app/data_client.go | 60 ++++++++----------- app/data_client_test.go | 65 +++++---------------- testutils/inject/datasync_service_client.go | 14 ----- 3 files changed, 37 insertions(+), 102 deletions(-) diff --git a/app/data_client.go b/app/data_client.go index e03cc350ea2..d5f25c83c7e 100644 --- a/app/data_client.go +++ b/app/data_client.go @@ -871,13 +871,6 @@ func sensorContentsToProto(sensorContents []SensorData) []*syncPb.SensorData { } // Helper function to format the file extension. -// -// func formatFileExtension(fileExt string) string { -// if len(fileExt) > 0 && fileExt[0] != '.' { -// return "." + fileExt -// } -// return fileExt -// } func formatFileExtension(fileExt string) string { if fileExt == "" { return fileExt // Return as-is if empty @@ -891,28 +884,25 @@ func formatFileExtension(fileExt string) string { func (d *DataClient) BinaryDataCaptureUpload( ctx context.Context, mdOptions *UploadMetadata, - // smdOptions *SensorMetadata, //this is just the dataRequest times....??? should they be passing in something else instead...? binaryData []byte, - dataRequestTimes [2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived + dataRequestTimes [2]time.Time, ) (string, error) { // Validate file extension mdOptions.FileExtension = formatFileExtension(mdOptions.FileExtension) // Create SensorMetadata based on the provided times var sensorMetadata SensorMetadata - if len(dataRequestTimes) == 2 { //can i have a better check here? like if dataRequestTimes != [2]time.Time{} + if len(dataRequestTimes) == 2 { sensorMetadata = SensorMetadata{ TimeRequested: dataRequestTimes[0], TimeReceived: dataRequestTimes[1], } } - // Create SensorData sensorData := SensorData{ Metadata: sensorMetadata, SDStruct: nil, // Assuming no struct is needed for binary data SDBinary: binaryData, // Attach the binary data } - response, err := d.DataCaptureUpload(ctx, *mdOptions, []SensorData{sensorData}) if err != nil { return "", err @@ -923,13 +913,11 @@ func (d *DataClient) BinaryDataCaptureUpload( func (d *DataClient) tabularDataCaptureUpload( ctx context.Context, mdOptions *UploadMetadata, - // smdOptions *SensorMetadata, tabularData []map[string]interface{}, - dataRequestTimes [][2]time.Time, // Assuming two time values, [0] is timeRequested, [1] is timeReceived + dataRequestTimes [][2]time.Time, ) (string, error) { mdOptions.FileExtension = formatFileExtension(mdOptions.FileExtension) if len(dataRequestTimes) != len(tabularData) { - // errors.New("dataRequestTimes and tabularData lengths must be equal") return "", fmt.Errorf("dataRequestTimes and tabularData lengths must be equal") } var sensorContents []SensorData @@ -949,7 +937,6 @@ func (d *DataClient) tabularDataCaptureUpload( } sensorContents = append(sensorContents, sensorData) } - response, err := d.DataCaptureUpload(ctx, *mdOptions, sensorContents) if err != nil { return "", err @@ -970,26 +957,6 @@ func (d *DataClient) DataCaptureUpload(ctx context.Context, metadata UploadMetad return resp.FileId, nil } -// FileUpload uploads the contents and metadata for binary (image + file) data, -// where the first packet must be the UploadMetadata. -func (d *DataClient) FileUpload(ctx context.Context) error { - // resp, err := d.dataSyncClient.FileUpload(ctx, &pb.FileUploadRequest{}) - // if err != nil { - // return err - // } - return nil -} - -// FileUpload uploads the contents and metadata for binary (image + file) data, -// where the first packet must be the UploadMetadata. -func (d *DataClient) FileUploadFromPath(ctx context.Context) error { - // resp, err := d.client.FileUpload(ctx, &pb.FileUploadRequest{}) - // if err != nil { - // return err - // } - return nil -} - // StreamingDataCaptureUpload uploads metadata and streaming binary data in chunks. //pass in a pointer to a struct //pointer to dataTime array -->alr a pointer @@ -1002,7 +969,6 @@ func (d *DataClient) StreamingDataCaptureUpload( sdOptions *SensorData, ) (string, error) { UploadChunkSize := 64 * 1024 //64 KB in bytes - // create metadata for the upload. uploadMetadataPb := uploadMetadataToProto(*mdOptions) //derefernce the pointer to pass the value instead uploadMetadataPb.Type = syncPb.DataType_DATA_TYPE_BINARY_SENSOR // handle data request times w sensormetadata. @@ -1061,3 +1027,23 @@ func (d *DataClient) StreamingDataCaptureUpload( } return resp.FileId, nil } + +// FileUpload uploads the contents and metadata for binary (image + file) data, +// where the first packet must be the UploadMetadata. +func (d *DataClient) FileUpload(ctx context.Context) error { + // resp, err := d.dataSyncClient.FileUpload(ctx, &pb.FileUploadRequest{}) + // if err != nil { + // return err + // } + return nil +} + +// FileUpload uploads the contents and metadata for binary (image + file) data, +// where the first packet must be the UploadMetadata. +func (d *DataClient) FileUploadFromPath(ctx context.Context) error { + // resp, err := d.client.FileUpload(ctx, &pb.FileUploadRequest{}) + // if err != nil { + // return err + // } + return nil +} \ No newline at end of file diff --git a/app/data_client_test.go b/app/data_client_test.go index 2aa6024bee6..4a51d47183f 100644 --- a/app/data_client_test.go +++ b/app/data_client_test.go @@ -210,9 +210,6 @@ func createGrpcDataSyncClient() *inject.DataSyncServiceClient { return &inject.DataSyncServiceClient{} } -// func createMockStreamingDataCaptureUploadClient() *inject.MockStreamingDataCaptureUploadClient { -// return &inject.MockStreamingDataCaptureUploadClient{} -// } func TestDataClient(t *testing.T) { grpcClient := createGrpcClient() client := DataClient{client: grpcClient} @@ -619,26 +616,7 @@ func TestDataSyncClient(t *testing.T) { grpcClient := createGrpcDataSyncClient() client := DataClient{dataSyncClient: grpcClient} - // mockStream := createMockStreamingDataCaptureUploadClient() - - metadata := SensorMetadata{ - TimeRequested: startTime, - TimeReceived: endTime, - } - binarySensorData := SensorData{ - Metadata: metadata, - SDStruct: nil, - SDBinary: binaryDataByte, - } - - // tabularSensorData := SensorData{ - // Metadata: metadata, - // SDStruct: tabularData.Data, - // SDBinary: nil, - // } - - t.Run("BinaryDataCaptureUpload", func(t *testing.T) { - uploadMetadata := UploadMetadata{ + uploadMetadata := UploadMetadata{ PartID: partID, ComponentType: componentType, ComponentName: componentName, @@ -649,10 +627,12 @@ func TestDataSyncClient(t *testing.T) { FileExtension: fileExt, Tags: tags, } + + t.Run("BinaryDataCaptureUpload", func(t *testing.T) { + uploadMetadata.Type = DataTypeBinarySensor grpcClient.DataCaptureUploadFunc = func(ctx context.Context, in *syncPb.DataCaptureUploadRequest, opts ...grpc.CallOption, ) (*syncPb.DataCaptureUploadResponse, error) { - // expectedBinaryData := binaryDataByte --> just a note for myself for now methodParams, _ := protoutils.ConvertMapToProtoAny(methodParameters) test.That(t, in.Metadata.PartId, test.ShouldEqual, partID) @@ -676,23 +656,12 @@ func TestDataSyncClient(t *testing.T) { FileId: fileID, }, nil } - // resp, _ := client.DataCaptureUpload(context.Background(), uploadMetadata, []SensorData{binarySensorData}) //not proto-types, regular types u expect to recieve in the function resp, _ := client.BinaryDataCaptureUpload(context.Background(), &uploadMetadata, binaryDataByte, dataRequestTimes) test.That(t, resp, test.ShouldResemble, fileID) //compare response with regular expected types (fromProto if needed) }) t.Run("TabularDataCaptureUpload", func(t *testing.T) { - uploadMetadata := UploadMetadata{ - PartID: partID, - ComponentType: componentType, - ComponentName: componentName, - MethodName: method, - Type: DataTypeTabularSensor, - FileName: fileName, - MethodParameters: methodParameters, - FileExtension: fileExt, - Tags: tags, - } + uploadMetadata.Type = DataTypeTabularSensor dataStruct, _ := utils.StructToStructPb(data) tabularDataPb := &pb.TabularData{ Data: dataStruct, @@ -731,25 +700,21 @@ func TestDataSyncClient(t *testing.T) { dataRequestTimes := [][2]time.Time{ {startTime, endTime}, } - - // resp, _ := client.DataCaptureUpload(context.Background(), uploadMetadata, []SensorData{tabularSensorData}) resp, _ := client.tabularDataCaptureUpload(context.Background(), &uploadMetadata, tabularData, dataRequestTimes) test.That(t, resp, test.ShouldResemble, fileID) }) t.Run("StreamingDataCaptureUpload", func(t *testing.T) { - uploadMetadata := UploadMetadata{ - PartID: partID, - ComponentType: componentType, - ComponentName: componentName, - MethodName: method, - Type: DataTypeBinarySensor, - FileName: fileName, - MethodParameters: methodParameters, - FileExtension: fileExt, - Tags: tags, + metadata := SensorMetadata{ + TimeRequested: startTime, + TimeReceived: endTime, } - + binarySensorData := SensorData{ + Metadata: metadata, + SDStruct: nil, + SDBinary: binaryDataByte, + } + // Mock implementation of the streaming client. mockStream := &inject.DataSyncService_StreamingDataCaptureUploadClient{ SendFunc: func(req *syncPb.StreamingDataCaptureUploadRequest) error { @@ -781,7 +746,6 @@ func TestDataSyncClient(t *testing.T) { }, nil }, } - // Replace the gRPC client with the mock. grpcClient.StreamingDataCaptureUploadFunc = func(ctx context.Context, opts ...grpc.CallOption, @@ -789,7 +753,6 @@ func TestDataSyncClient(t *testing.T) { return mockStream, nil } // Call the function being tested. - // resp, err := client.StreamingDataCaptureUpload(context.Background(), binaryDataByte, partID, fileExt, componentType, componentName, method, methodParameters, dataRequestTimes, tags) resp, err := client.StreamingDataCaptureUpload(context.Background(), &uploadMetadata, &binarySensorData) test.That(t, err, test.ShouldBeNil) test.That(t, resp, test.ShouldEqual, fileID) diff --git a/testutils/inject/datasync_service_client.go b/testutils/inject/datasync_service_client.go index 3c43c032318..9fb747d2fd5 100644 --- a/testutils/inject/datasync_service_client.go +++ b/testutils/inject/datasync_service_client.go @@ -71,17 +71,3 @@ func (c *DataSyncService_StreamingDataCaptureUploadClient) CloseAndRecv() (*data return c.CloseAndRecvFunc() } - -// func (c *DataSyncService_StreamingDataCaptureUploadClient) Send(req *datapb.StreamingDataCaptureUploadRequest) error { -// if c.SendFunc != nil { -// return c.SendFunc(req) -// } -// return nil -// } - -// func (c *DataSyncService_StreamingDataCaptureUploadClient) CloseAndRecv() (*datapb.StreamingDataCaptureUploadResponse, error) { -// if c.CloseAndRecvFunc != nil { -// return c.CloseAndRecvFunc() -// } -// return nil, nil -// } From 58d343da5bdf1a63892be50a12b216a262860d66 Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Wed, 20 Nov 2024 15:55:33 -0500 Subject: [PATCH 17/25] remove small fixes --- app/data_client.go | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/app/data_client.go b/app/data_client.go index d5f25c83c7e..4e7e5aa76b8 100644 --- a/app/data_client.go +++ b/app/data_client.go @@ -193,7 +193,6 @@ type SensorMetadata struct { type SensorData struct { //this is what can be filled by either tabular or binary data!! Metadata SensorMetadata - //its one of, either binary or tabular ==> this needs help SDStruct map[string]interface{} //or should it be TabularData.data ?? SDBinary []byte } @@ -210,7 +209,7 @@ type MimeType int32 const ( MimeTypeUnspecified MimeType = iota - MimeTypeJPEG //can i name things this??? + MimeTypeJPEG MimeTypePNG MimeTypePCD ) @@ -222,7 +221,7 @@ type UploadMetadata struct { MethodName string Type DataType FileName string - MethodParameters map[string]interface{} //or map[string]string?? + MethodParameters map[string]interface{} FileExtension string Tags []string } @@ -873,12 +872,12 @@ func sensorContentsToProto(sensorContents []SensorData) []*syncPb.SensorData { // Helper function to format the file extension. func formatFileExtension(fileExt string) string { if fileExt == "" { - return fileExt // Return as-is if empty + return fileExt } if fileExt[0] == '.' { - return fileExt // Return as-is if already starts with a dot + return fileExt } - return "." + fileExt // Prepend a dot otherwise + return "." + fileExt } func (d *DataClient) BinaryDataCaptureUpload( @@ -968,7 +967,7 @@ func (d *DataClient) StreamingDataCaptureUpload( mdOptions *UploadMetadata, sdOptions *SensorData, ) (string, error) { - UploadChunkSize := 64 * 1024 //64 KB in bytes + UploadChunkSize := 64 * 1024 //64 KB in bytes uploadMetadataPb := uploadMetadataToProto(*mdOptions) //derefernce the pointer to pass the value instead uploadMetadataPb.Type = syncPb.DataType_DATA_TYPE_BINARY_SENSOR // handle data request times w sensormetadata. @@ -1046,4 +1045,4 @@ func (d *DataClient) FileUploadFromPath(ctx context.Context) error { // return err // } return nil -} \ No newline at end of file +} From f04f1b1cf7c6b1209083892b0afd050f59f1a62c Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Wed, 20 Nov 2024 16:59:35 -0500 Subject: [PATCH 18/25] change to optional structs for datasync alr done --- app/data_client.go | 130 +++++++++++++++++++++++++++++++++------- app/data_client_test.go | 66 +++++++++++++------- 2 files changed, 153 insertions(+), 43 deletions(-) diff --git a/app/data_client.go b/app/data_client.go index 4e7e5aa76b8..df18d7481fb 100644 --- a/app/data_client.go +++ b/app/data_client.go @@ -880,20 +880,30 @@ func formatFileExtension(fileExt string) string { return "." + fileExt } +type BinaryOptions struct { + Type DataType + FileName string + MethodParameters map[string]interface{} + Tags []string + DataRequestTimes [2]time.Time +} + func (d *DataClient) BinaryDataCaptureUpload( ctx context.Context, - mdOptions *UploadMetadata, binaryData []byte, - dataRequestTimes [2]time.Time, + partID string, + componentType string, + componentName string, + methodName string, + fileExtension string, + options *BinaryOptions, ) (string, error) { - // Validate file extension - mdOptions.FileExtension = formatFileExtension(mdOptions.FileExtension) // Create SensorMetadata based on the provided times var sensorMetadata SensorMetadata - if len(dataRequestTimes) == 2 { + if len(options.DataRequestTimes) == 2 { sensorMetadata = SensorMetadata{ - TimeRequested: dataRequestTimes[0], - TimeReceived: dataRequestTimes[1], + TimeRequested: options.DataRequestTimes[0], + TimeReceived: options.DataRequestTimes[1], } } // Create SensorData @@ -902,20 +912,44 @@ func (d *DataClient) BinaryDataCaptureUpload( SDStruct: nil, // Assuming no struct is needed for binary data SDBinary: binaryData, // Attach the binary data } - response, err := d.DataCaptureUpload(ctx, *mdOptions, []SensorData{sensorData}) + // Create UploadMetadata + metadata := UploadMetadata{ + PartID: partID, + ComponentType: componentType, + ComponentName: componentName, + MethodName: methodName, + Type: DataTypeBinarySensor, + FileName: options.FileName, + MethodParameters: options.MethodParameters, + FileExtension: formatFileExtension(fileExtension), // Validate file extension + Tags: options.Tags, + } + response, err := d.DataCaptureUpload(ctx, metadata, []SensorData{sensorData}) if err != nil { return "", err } return response, nil } +type TabularOptions struct { + Type DataType + FileName string + MethodParameters map[string]interface{} + FileExtension string + Tags []string +} + func (d *DataClient) tabularDataCaptureUpload( ctx context.Context, - mdOptions *UploadMetadata, tabularData []map[string]interface{}, - dataRequestTimes [][2]time.Time, + partID string, + componentType string, + componentName string, + methodName string, + dataRequestTimes [][2]time.Time, //part of sensorData + options *TabularOptions, ) (string, error) { - mdOptions.FileExtension = formatFileExtension(mdOptions.FileExtension) + // options.FileExtension = formatFileExtension(options.FileExtension) if len(dataRequestTimes) != len(tabularData) { return "", fmt.Errorf("dataRequestTimes and tabularData lengths must be equal") } @@ -936,7 +970,19 @@ func (d *DataClient) tabularDataCaptureUpload( } sensorContents = append(sensorContents, sensorData) } - response, err := d.DataCaptureUpload(ctx, *mdOptions, sensorContents) + // Create UploadMetadata + metadata := UploadMetadata{ + PartID: partID, + ComponentType: componentType, + ComponentName: componentName, + MethodName: methodName, + Type: DataTypeTabularSensor, // assuming this is the correct type?? + FileName: options.FileName, + MethodParameters: options.MethodParameters, + FileExtension: formatFileExtension(options.FileExtension), + Tags: options.Tags, + } + response, err := d.DataCaptureUpload(ctx, metadata, sensorContents) if err != nil { return "", err } @@ -962,16 +1008,56 @@ func (d *DataClient) DataCaptureUpload(ctx context.Context, metadata UploadMetad //zero value of all arraays is nil --> so we can just have it stay nil and be okay?? //strings ,arrays, maps we can just assume they r passing nil if its not filled and pass that too --> pass that along +// func (d *DataClient) DoThing(ctx, nonOptional Foo, nonOptional2 Bar, optional DoThingOptionsStruct) ==> DO THIS +// func (d *DataClient) DoThing2(ctx, nonOptional Foo, nonOptional2 Bar, metadata MetadataOptions, md2 Metadata2Options) ==> DO NOT DO THIS! +/* + + */ + +// optional parameters for the StreamingDataCaptureUpload function +type StreamingOptions struct { + ComponentType string + ComponentName string + MethodName string + Type DataType + FileName string + MethodParameters map[string]interface{} + Tags []string + DataRequestTimes [2]time.Time +} + func (d *DataClient) StreamingDataCaptureUpload( ctx context.Context, - mdOptions *UploadMetadata, - sdOptions *SensorData, + data []byte, //data in bytes (so similar to binarydataCap)...the rest below are for dataCaptureUploadMetadata + partID string, //uploadmetadata + fileExt string, //uploadmetadata + options *StreamingOptions, ) (string, error) { - UploadChunkSize := 64 * 1024 //64 KB in bytes - uploadMetadataPb := uploadMetadataToProto(*mdOptions) //derefernce the pointer to pass the value instead - uploadMetadataPb.Type = syncPb.DataType_DATA_TYPE_BINARY_SENSOR + UploadChunkSize := 64 * 1024 //64 KB in bytes + // Prepare UploadMetadata + uploadMetadata := UploadMetadata{ + PartID: partID, + ComponentType: options.ComponentType, + ComponentName: options.ComponentName, + MethodName: options.MethodName, + Type: DataTypeBinarySensor, // assuming this is the correct type?? + FileName: options.FileName, + MethodParameters: options.MethodParameters, + FileExtension: fileExt, + Tags: options.Tags, + } + uploadMetadataPb := uploadMetadataToProto(uploadMetadata) //derefernce the pointer to pass the value instead + // uploadMetadataPb.Type = syncPb.DataType_DATA_TYPE_BINARY_SENSOR // handle data request times w sensormetadata. - sensorMetadataPb := sensorMetadataToProto(sdOptions.Metadata) + // Create SensorMetadata based on the provided times + var sensorMetadata SensorMetadata + if len(options.DataRequestTimes) == 2 { //can i have a better check here? like if dataRequestTimes != [2]time.Time{} + sensorMetadata = SensorMetadata{ + TimeRequested: options.DataRequestTimes[0], + TimeReceived: options.DataRequestTimes[1], + } + } + sensorMetadataPb := sensorMetadataToProto(sensorMetadata) // create the DataCaptureUploadMetadata. metadata := &syncPb.DataCaptureUploadMetadata{ UploadMetadata: uploadMetadataPb, @@ -993,16 +1079,16 @@ func (d *DataClient) StreamingDataCaptureUpload( } // send the binary data in chunks. - for start := 0; start < len(sdOptions.SDBinary); start += UploadChunkSize { + for start := 0; start < len(data); start += UploadChunkSize { //loop thry the data array starting at index 0, in each iteration start index increases by UploadChunkSize //the loop continues until start reaches or exceeds the length of the data array end := start + UploadChunkSize //this calculates the end index for the chunk, it is simply the start index plys the upload chunk size - if end > len(sdOptions.SDBinary) { - end = len(sdOptions.SDBinary) + if end > len(data) { + end = len(data) } - chunk := sdOptions.SDBinary[start:end] + chunk := data[start:end] dataReq := &syncPb.StreamingDataCaptureUploadRequest{ UploadPacket: &syncPb.StreamingDataCaptureUploadRequest_Data{ Data: chunk, diff --git a/app/data_client_test.go b/app/data_client_test.go index 4a51d47183f..394f51e7aee 100644 --- a/app/data_client_test.go +++ b/app/data_client_test.go @@ -617,19 +617,26 @@ func TestDataSyncClient(t *testing.T) { client := DataClient{dataSyncClient: grpcClient} uploadMetadata := UploadMetadata{ - PartID: partID, - ComponentType: componentType, - ComponentName: componentName, - MethodName: method, - Type: DataTypeBinarySensor, + PartID: partID, + ComponentType: componentType, + ComponentName: componentName, + MethodName: method, + Type: DataTypeBinarySensor, + FileName: fileName, + MethodParameters: methodParameters, + FileExtension: fileExt, + Tags: tags, + } + + t.Run("BinaryDataCaptureUpload", func(t *testing.T) { + uploadMetadata.Type = DataTypeBinarySensor + options := BinaryOptions{ + Type: binaryDataType, FileName: fileName, MethodParameters: methodParameters, - FileExtension: fileExt, Tags: tags, + DataRequestTimes: dataRequestTimes, } - - t.Run("BinaryDataCaptureUpload", func(t *testing.T) { - uploadMetadata.Type = DataTypeBinarySensor grpcClient.DataCaptureUploadFunc = func(ctx context.Context, in *syncPb.DataCaptureUploadRequest, opts ...grpc.CallOption, ) (*syncPb.DataCaptureUploadResponse, error) { @@ -656,7 +663,7 @@ func TestDataSyncClient(t *testing.T) { FileId: fileID, }, nil } - resp, _ := client.BinaryDataCaptureUpload(context.Background(), &uploadMetadata, binaryDataByte, dataRequestTimes) + resp, _ := client.BinaryDataCaptureUpload(context.Background(), binaryDataByte, partID, componentType, componentName, method, fileExt, &options) test.That(t, resp, test.ShouldResemble, fileID) //compare response with regular expected types (fromProto if needed) }) @@ -669,6 +676,13 @@ func TestDataSyncClient(t *testing.T) { TimeRequested: timestamppb.New(startTime), TimeReceived: timestamppb.New(endTime), } + options := TabularOptions{ + Type: binaryDataType, + FileName: fileName, + MethodParameters: methodParameters, + FileExtension: fileExt, + Tags: tags, + } grpcClient.DataCaptureUploadFunc = func(ctx context.Context, in *syncPb.DataCaptureUploadRequest, opts ...grpc.CallOption, ) (*syncPb.DataCaptureUploadResponse, error) { @@ -700,21 +714,31 @@ func TestDataSyncClient(t *testing.T) { dataRequestTimes := [][2]time.Time{ {startTime, endTime}, } - resp, _ := client.tabularDataCaptureUpload(context.Background(), &uploadMetadata, tabularData, dataRequestTimes) + resp, _ := client.tabularDataCaptureUpload(context.Background(), tabularData, partID, componentType, componentName, method, dataRequestTimes, &options) test.That(t, resp, test.ShouldResemble, fileID) }) t.Run("StreamingDataCaptureUpload", func(t *testing.T) { - metadata := SensorMetadata{ - TimeRequested: startTime, - TimeReceived: endTime, - } - binarySensorData := SensorData{ - Metadata: metadata, - SDStruct: nil, - SDBinary: binaryDataByte, + // metadata := SensorMetadata{ + // TimeRequested: startTime, + // TimeReceived: endTime, + // } + // binarySensorData := SensorData{ + // Metadata: metadata, + // SDStruct: nil, + // SDBinary: binaryDataByte, + // } + + options := StreamingOptions{ + ComponentType: componentType, + ComponentName: componentName, + MethodName: method, + Type: binaryDataType, + FileName: fileName, + MethodParameters: methodParameters, + Tags: tags, + DataRequestTimes: dataRequestTimes, } - // Mock implementation of the streaming client. mockStream := &inject.DataSyncService_StreamingDataCaptureUploadClient{ SendFunc: func(req *syncPb.StreamingDataCaptureUploadRequest) error { @@ -753,7 +777,7 @@ func TestDataSyncClient(t *testing.T) { return mockStream, nil } // Call the function being tested. - resp, err := client.StreamingDataCaptureUpload(context.Background(), &uploadMetadata, &binarySensorData) + resp, err := client.StreamingDataCaptureUpload(context.Background(), binaryDataByte, partID, fileExt, &options) test.That(t, err, test.ShouldBeNil) test.That(t, resp, test.ShouldEqual, fileID) }) From a6bd3d94dcda444de20b04b11b24a0476feef73a Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Wed, 20 Nov 2024 17:42:57 -0500 Subject: [PATCH 19/25] middle of working on filedata stuff --- app/data_client.go | 124 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 122 insertions(+), 2 deletions(-) diff --git a/app/data_client.go b/app/data_client.go index df18d7481fb..f142e5634de 100644 --- a/app/data_client.go +++ b/app/data_client.go @@ -225,6 +225,9 @@ type UploadMetadata struct { FileExtension string Tags []string } +type FileData struct { + Data []byte +} //:::::******NEW struct/variable ADDITIONS FOR DATASYNC END HERE!!!!**************** @@ -1115,7 +1118,7 @@ func (d *DataClient) StreamingDataCaptureUpload( // FileUpload uploads the contents and metadata for binary (image + file) data, // where the first packet must be the UploadMetadata. -func (d *DataClient) FileUpload(ctx context.Context) error { +func (d *DataClient) FileUploadByFileName(ctx context.Context) error { // resp, err := d.dataSyncClient.FileUpload(ctx, &pb.FileUploadRequest{}) // if err != nil { // return err @@ -1125,10 +1128,127 @@ func (d *DataClient) FileUpload(ctx context.Context) error { // FileUpload uploads the contents and metadata for binary (image + file) data, // where the first packet must be the UploadMetadata. -func (d *DataClient) FileUploadFromPath(ctx context.Context) error { +func (d *DataClient) FileUploadByPath(ctx context.Context) error { // resp, err := d.client.FileUpload(ctx, &pb.FileUploadRequest{}) // if err != nil { // return err // } return nil } + +// FileUpload uploads the contents and metadata for binary (image + file) data, +// where the first packet must be the UploadMetadata. +func (d *DataClient) FileUpload(ctx context.Context, metadata UploadMetadata, fileContents FileData) (string, error) { + + UploadChunkSize := 64 * 1024 //64 KB in bytes + // Prepare UploadMetadata + uploadMetadata := UploadMetadata{ + // PartID: partID, + // ComponentType: options.ComponentType, + // ComponentName: options.ComponentName, + // MethodName: options.MethodName, + // Type: DataTypeBinarySensor, // assuming this is the correct type?? + // FileName: options.FileName, + // MethodParameters: options.MethodParameters, + // FileExtension: fileExt, + // Tags: options.Tags, + } + uploadMetadataPb := uploadMetadataToProto(uploadMetadata) //derefernce the pointer to pass the value instead + + //prepare FileData file_contents + + // establish a streaming connection. + stream, err := d.dataSyncClient.FileUpload(ctx) + if err != nil { + return "", fmt.Errorf("failed to establish streaming connection: %w", err) + } + // send the metadata as the first packet. + metaReq := &syncPb.FileUploadRequest{ + UploadPacket: &syncPb.FileUploadRequest_Metadata{ + Metadata: uploadMetadataPb, + }, + } + if err := stream.Send(metaReq); err != nil { + return "", fmt.Errorf("failed to send metadata: %w", err) + } + + // send the binary file data in chunks. + for start := 0; start < len(fileContents.Data); start += UploadChunkSize { + //loop thry the data array starting at index 0, in each iteration start index increases by UploadChunkSize + //the loop continues until start reaches or exceeds the length of the data array + end := start + UploadChunkSize + //this calculates the end index for the chunk, it is simply the start index plys the upload chunk size + if end > len(fileContents.Data) { + end = len(fileContents.Data) + } + + chunk := fileContents.Data[start:end] + dataReq := &syncPb.FileUploadRequest{ + UploadPacket: &syncPb.FileUploadRequest_FileContents{ + FileContents: chunk, + }, + } + + if err := stream.Send(dataReq); err != nil { + return "", fmt.Errorf("failed to send data chunk: %w", err) + } + } + + // close the stream and get the response. + resp, err := stream.CloseAndRecv() + if err != nil { + return "", fmt.Errorf("failed to receive response: %w", err) + } + + // return the file ID from the response. + if resp == nil || resp.FileId == "" { + return "", fmt.Errorf("response is empty or invalid") + } + return resp.FileId, nil + +} + +// fileUpload handles the streaming upload of metadata and file contents. +// func (d *DataClient) FileUpload( +// ctx context.Context, +// metadata *syncPb.UploadMetadata, +// fileContents *syncPb.FileData, +// ) (string, error) { +// // Establish a streaming connection +// stream, err := d.dataSyncClient.FileUpload(ctx) +// if err != nil { +// return "", fmt.Errorf("failed to establish streaming connection: %w", err) +// } + +// // Send the metadata as the first packet +// metaReq := &syncPb.FileUploadRequest{ +// Request: &syncPb.FileUploadRequest_Metadata{ +// Metadata: metadata, +// }, +// } +// if err := stream.Send(metaReq); err != nil { +// return "", fmt.Errorf("failed to send metadata: %w", err) +// } + +// // Send the file data as the second packet +// dataReq := &syncPb.FileUploadRequest{ +// Request: &syncPb.FileUploadRequest_FileContents{ +// FileContents: fileContents, +// }, +// } +// if err := stream.Send(dataReq); err != nil { +// return "", fmt.Errorf("failed to send file data: %w", err) +// } + +// // Close the stream and receive the response +// resp, err := stream.CloseAndRecv() +// if err != nil { +// return "", fmt.Errorf("failed to receive response: %w", err) +// } + +// // Validate and return the response +// if resp == nil || resp.FileId == "" { +// return "", fmt.Errorf("response is empty or invalid") +// } +// return resp.FileId, nil +// } From f666d4e95cfc45ad5798ae3a1df835698c2512d6 Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Thu, 21 Nov 2024 14:51:13 -0500 Subject: [PATCH 20/25] last func --- app/data_client.go | 215 ++++++++++---------- app/data_client_test.go | 118 +++++++++++ testutils/inject/datasync_service_client.go | 38 +++- 3 files changed, 258 insertions(+), 113 deletions(-) diff --git a/app/data_client.go b/app/data_client.go index f142e5634de..b14359caea2 100644 --- a/app/data_client.go +++ b/app/data_client.go @@ -4,6 +4,8 @@ package app import ( "context" "fmt" + "os" + "path/filepath" "time" "go.mongodb.org/mongo-driver/bson" @@ -1029,6 +1031,7 @@ type StreamingOptions struct { DataRequestTimes [2]time.Time } +// uploads the metadata and contents of streaming binary data func (d *DataClient) StreamingDataCaptureUpload( ctx context.Context, data []byte, //data in bytes (so similar to binarydataCap)...the rest below are for dataCaptureUploadMetadata @@ -1116,139 +1119,141 @@ func (d *DataClient) StreamingDataCaptureUpload( return resp.FileId, nil } -// FileUpload uploads the contents and metadata for binary (image + file) data, -// where the first packet must be the UploadMetadata. -func (d *DataClient) FileUploadByFileName(ctx context.Context) error { - // resp, err := d.dataSyncClient.FileUpload(ctx, &pb.FileUploadRequest{}) - // if err != nil { - // return err - // } - return nil +type FileUploadOptions struct { + ComponentType string + ComponentName string + MethodName string + FileName string + MethodParameters map[string]interface{} + FileExtension string + Tags []string } // FileUpload uploads the contents and metadata for binary (image + file) data, // where the first packet must be the UploadMetadata. -func (d *DataClient) FileUploadByPath(ctx context.Context) error { - // resp, err := d.client.FileUpload(ctx, &pb.FileUploadRequest{}) - // if err != nil { - // return err - // } - return nil +// check does this cover upload by file name and by path?? +func (d *DataClient) FileUploadFromBytes( + ctx context.Context, + partID string, + data []byte, //you either pass in enooded image bytes as data + opts *FileUploadOptions, +) (string, error) { + // Prepare metadata + methodParams, _ := protoutils.ConvertMapToProtoAny(opts.MethodParameters) + metadata := &syncPb.UploadMetadata{ + PartId: partID, + ComponentType: opts.ComponentType, + ComponentName: opts.ComponentName, + MethodName: opts.MethodName, + Type: syncPb.DataType_DATA_TYPE_FILE, //check this!!! + MethodParameters: methodParams, + Tags: opts.Tags, + } + + // Handle filename and extension + if opts.FileName == "" { + // Use timestamp if no filename provided + metadata.FileName = time.Now().String() + } else { + metadata.FileName = opts.FileName + metadata.FileExtension = opts.FileExtension + } + return d.fileUploadStreamResp(metadata, data) + } -// FileUpload uploads the contents and metadata for binary (image + file) data, -// where the first packet must be the UploadMetadata. -func (d *DataClient) FileUpload(ctx context.Context, metadata UploadMetadata, fileContents FileData) (string, error) { +func (d *DataClient) FileUploadFromPath( + ctx context.Context, + partID string, + FilePath string, //or you pass in a filepath and then we create data from reading that file + opts *FileUploadOptions, +) (string, error) { + // Prepare metadata + methodParams, _ := protoutils.ConvertMapToProtoAny(opts.MethodParameters) + metadata := &syncPb.UploadMetadata{ + PartId: partID, + ComponentType: opts.ComponentType, + ComponentName: opts.ComponentName, + MethodName: opts.MethodName, + Type: syncPb.DataType_DATA_TYPE_FILE, + MethodParameters: methodParams, + Tags: opts.Tags, + } + //make data optional...and if data is passed in then you will use those bytes representing the file data to upload + //and - UploadChunkSize := 64 * 1024 //64 KB in bytes - // Prepare UploadMetadata - uploadMetadata := UploadMetadata{ - // PartID: partID, - // ComponentType: options.ComponentType, - // ComponentName: options.ComponentName, - // MethodName: options.MethodName, - // Type: DataTypeBinarySensor, // assuming this is the correct type?? - // FileName: options.FileName, - // MethodParameters: options.MethodParameters, - // FileExtension: fileExt, - // Tags: options.Tags, + // Handle filename and extension + if opts.FileName == "" { + if FilePath != "" { + // Extract from file path + metadata.FileName = filepath.Base(FilePath) + metadata.FileExtension = filepath.Ext(FilePath) + } else { + // Use timestamp if no filename provided + metadata.FileName = time.Now().String() + } + } else { + metadata.FileName = opts.FileName + metadata.FileExtension = opts.FileExtension } - uploadMetadataPb := uploadMetadataToProto(uploadMetadata) //derefernce the pointer to pass the value instead - //prepare FileData file_contents + var data []byte + // Prepare file data + if FilePath != "" { + // Read from file path if provided + fileData, err := os.ReadFile(FilePath) + if err != nil { + return "", err + } + data = fileData + } - // establish a streaming connection. - stream, err := d.dataSyncClient.FileUpload(ctx) + return d.fileUploadStreamResp(metadata, data) + +} +func (d *DataClient) fileUploadStreamResp(metadata *syncPb.UploadMetadata, data []byte) (string, error) { + // Create streaming client for upload + stream, err := d.dataSyncClient.FileUpload(context.Background()) if err != nil { - return "", fmt.Errorf("failed to establish streaming connection: %w", err) + return "", err } - // send the metadata as the first packet. - metaReq := &syncPb.FileUploadRequest{ + + // Send metadata + if err := stream.Send(&syncPb.FileUploadRequest{ UploadPacket: &syncPb.FileUploadRequest_Metadata{ - Metadata: uploadMetadataPb, + Metadata: metadata, }, - } - if err := stream.Send(metaReq); err != nil { - return "", fmt.Errorf("failed to send metadata: %w", err) + }); err != nil { + return "", err } - // send the binary file data in chunks. - for start := 0; start < len(fileContents.Data); start += UploadChunkSize { - //loop thry the data array starting at index 0, in each iteration start index increases by UploadChunkSize - //the loop continues until start reaches or exceeds the length of the data array - end := start + UploadChunkSize - //this calculates the end index for the chunk, it is simply the start index plys the upload chunk size - if end > len(fileContents.Data) { - end = len(fileContents.Data) + // Send file contents in chunks + const maxChunkSize = 2 * 1024 * 1024 // 2MB + for len(data) > 0 { + chunkSize := maxChunkSize + if len(data) < chunkSize { + chunkSize = len(data) } - chunk := fileContents.Data[start:end] - dataReq := &syncPb.FileUploadRequest{ + if err := stream.Send(&syncPb.FileUploadRequest{ UploadPacket: &syncPb.FileUploadRequest_FileContents{ - FileContents: chunk, + FileContents: &syncPb.FileData{ + Data: data[:chunkSize], + }, }, + }); err != nil { + return "", err } - if err := stream.Send(dataReq); err != nil { - return "", fmt.Errorf("failed to send data chunk: %w", err) - } + data = data[chunkSize:] } - // close the stream and get the response. + // Close stream and get response resp, err := stream.CloseAndRecv() if err != nil { - return "", fmt.Errorf("failed to receive response: %w", err) + return "", err } - // return the file ID from the response. - if resp == nil || resp.FileId == "" { - return "", fmt.Errorf("response is empty or invalid") - } return resp.FileId, nil } - -// fileUpload handles the streaming upload of metadata and file contents. -// func (d *DataClient) FileUpload( -// ctx context.Context, -// metadata *syncPb.UploadMetadata, -// fileContents *syncPb.FileData, -// ) (string, error) { -// // Establish a streaming connection -// stream, err := d.dataSyncClient.FileUpload(ctx) -// if err != nil { -// return "", fmt.Errorf("failed to establish streaming connection: %w", err) -// } - -// // Send the metadata as the first packet -// metaReq := &syncPb.FileUploadRequest{ -// Request: &syncPb.FileUploadRequest_Metadata{ -// Metadata: metadata, -// }, -// } -// if err := stream.Send(metaReq); err != nil { -// return "", fmt.Errorf("failed to send metadata: %w", err) -// } - -// // Send the file data as the second packet -// dataReq := &syncPb.FileUploadRequest{ -// Request: &syncPb.FileUploadRequest_FileContents{ -// FileContents: fileContents, -// }, -// } -// if err := stream.Send(dataReq); err != nil { -// return "", fmt.Errorf("failed to send file data: %w", err) -// } - -// // Close the stream and receive the response -// resp, err := stream.CloseAndRecv() -// if err != nil { -// return "", fmt.Errorf("failed to receive response: %w", err) -// } - -// // Validate and return the response -// if resp == nil || resp.FileId == "" { -// return "", fmt.Errorf("response is empty or invalid") -// } -// return resp.FileId, nil -// } diff --git a/app/data_client_test.go b/app/data_client_test.go index 394f51e7aee..99273469b96 100644 --- a/app/data_client_test.go +++ b/app/data_client_test.go @@ -3,6 +3,7 @@ package app import ( "context" "fmt" + "os" "testing" "time" @@ -781,5 +782,122 @@ func TestDataSyncClient(t *testing.T) { test.That(t, err, test.ShouldBeNil) test.That(t, resp, test.ShouldEqual, fileID) }) + t.Run("FileUploadFromBytes", func(t *testing.T) { + options := FileUploadOptions{ + ComponentType: componentType, + ComponentName: componentName, + MethodName: method, + FileName: fileName, + MethodParameters: methodParameters, + FileExtension: fileExt, + Tags: tags, + } + // Mock implementation of the streaming client. + mockStream := &inject.DataSyncService_FileUploadClient{ + SendFunc: func(req *syncPb.FileUploadRequest) error { + fmt.Printf("Received packet type: %T\n", req.UploadPacket) + switch packet := req.UploadPacket.(type) { + case *syncPb.FileUploadRequest_Metadata: + // Validate metadata packet. + methodParams, _ := protoutils.ConvertMapToProtoAny(methodParameters) + meta := packet.Metadata + test.That(t, meta.PartId, test.ShouldEqual, partID) + test.That(t, meta.ComponentType, test.ShouldEqual, componentType) + test.That(t, meta.ComponentName, test.ShouldEqual, componentName) + test.That(t, meta.MethodName, test.ShouldEqual, method) + test.That(t, meta.Type, test.ShouldEqual, DataTypeFile) + test.That(t, meta.FileName, test.ShouldEqual, fileName) + test.That(t, meta.MethodParameters, test.ShouldResemble, methodParams) + test.That(t, meta.FileExtension, test.ShouldEqual, fileExt) + test.That(t, meta.Tags, test.ShouldResemble, tags) + case *syncPb.FileUploadRequest_FileContents: + // Validate data packet. + test.That(t, packet.FileContents.Data, test.ShouldResemble, binaryDataByte) + default: + t.Errorf("unexpected packet type: %T", packet) + } + return nil + }, + CloseAndRecvFunc: func() (*syncPb.FileUploadResponse, error) { + // Validate the final response. + //this is either the file_id of the uploaded data, or the fileid of the new file + return &syncPb.FileUploadResponse{ + FileId: fileID, + }, nil + }, + } + // Replace the gRPC client with the mock. + grpcClient.FileUploadFunc = func(ctx context.Context, + opts ...grpc.CallOption, + ) (syncPb.DataSyncService_FileUploadClient, error) { + return mockStream, nil + } + // Call the function being tested. + resp, err := client.FileUploadFromBytes(context.Background(), partID, binaryDataByte, &options) + test.That(t, err, test.ShouldBeNil) + test.That(t, resp, test.ShouldEqual, fileID) + }) + t.Run("FileUploadFromPath", func(t *testing.T) { + options := FileUploadOptions{ + ComponentType: componentType, + ComponentName: componentName, + MethodName: method, + FileName: fileName, + MethodParameters: methodParameters, + FileExtension: fileExt, + Tags: tags, + } + // Create a temporary file for testing + tempContent := []byte("test file content") + tempFile, err := os.CreateTemp("", "test-upload-*.txt") + test.That(t, err, test.ShouldBeNil) + defer os.Remove(tempFile.Name()) // Clean up after test + + // Mock implementation of the streaming client. + mockStream := &inject.DataSyncService_FileUploadClient{ + SendFunc: func(req *syncPb.FileUploadRequest) error { + fmt.Printf("Received packet type: %T\n", req.UploadPacket) + switch packet := req.UploadPacket.(type) { + case *syncPb.FileUploadRequest_Metadata: + // Validate metadata packet. + methodParams, _ := protoutils.ConvertMapToProtoAny(methodParameters) + meta := packet.Metadata + test.That(t, meta.PartId, test.ShouldEqual, partID) + test.That(t, meta.ComponentType, test.ShouldEqual, componentType) + test.That(t, meta.ComponentName, test.ShouldEqual, componentName) + test.That(t, meta.MethodName, test.ShouldEqual, method) + test.That(t, meta.Type, test.ShouldEqual, DataTypeFile) + test.That(t, meta.FileName, test.ShouldEqual, fileName) + test.That(t, meta.MethodParameters, test.ShouldResemble, methodParams) + test.That(t, meta.FileExtension, test.ShouldEqual, fileExt) + test.That(t, meta.Tags, test.ShouldResemble, tags) + case *syncPb.FileUploadRequest_FileContents: + // Validate data packet. + test.That(t, packet.FileContents.Data, test.ShouldResemble, tempContent) + default: + t.Errorf("unexpected packet type: %T", packet) + } + return nil + }, + CloseAndRecvFunc: func() (*syncPb.FileUploadResponse, error) { + // Validate the final response. + //this is either the file_id of the uploaded data, or the fileid of the new file + return &syncPb.FileUploadResponse{ + FileId: fileID, + }, nil + }, + } + // Replace the gRPC client with the mock. + grpcClient.FileUploadFunc = func(ctx context.Context, + opts ...grpc.CallOption, + ) (syncPb.DataSyncService_FileUploadClient, error) { + return mockStream, nil + } + // Call the function being tested. + resp, err := client.FileUploadFromPath(context.Background(), partID, tempFile.Name(), &options) + test.That(t, err, test.ShouldBeNil) + test.That(t, resp, test.ShouldEqual, fileID) + + }) } diff --git a/testutils/inject/datasync_service_client.go b/testutils/inject/datasync_service_client.go index 9fb747d2fd5..035b3759e0d 100644 --- a/testutils/inject/datasync_service_client.go +++ b/testutils/inject/datasync_service_client.go @@ -56,18 +56,40 @@ type DataSyncService_StreamingDataCaptureUploadClient struct { // grpc.ClientStream } -func (c *DataSyncService_StreamingDataCaptureUploadClient) Send(req *datapb.StreamingDataCaptureUploadRequest) error { - if c.SendFunc == nil { - return c.DataSyncService_StreamingDataCaptureUploadClient.Send(req) +func (client *DataSyncService_StreamingDataCaptureUploadClient) Send(req *datapb.StreamingDataCaptureUploadRequest) error { + if client.SendFunc == nil { + return client.DataSyncService_StreamingDataCaptureUploadClient.Send(req) } //test that the data we send is equal to what we expect - return c.SendFunc(req) + return client.SendFunc(req) } -func (c *DataSyncService_StreamingDataCaptureUploadClient) CloseAndRecv() (*datapb.StreamingDataCaptureUploadResponse, error) { - if c.CloseAndRecvFunc == nil { - return c.DataSyncService_StreamingDataCaptureUploadClient.CloseAndRecv() +func (client *DataSyncService_StreamingDataCaptureUploadClient) CloseAndRecv() (*datapb.StreamingDataCaptureUploadResponse, error) { + if client.CloseAndRecvFunc == nil { + return client.DataSyncService_StreamingDataCaptureUploadClient.CloseAndRecv() } - return c.CloseAndRecvFunc() + return client.CloseAndRecvFunc() } + +type DataSyncService_FileUploadClient struct{ + datapb.DataSyncService_FileUploadClient + SendFunc func(*datapb.FileUploadRequest) error + CloseAndRecvFunc func() (*datapb.FileUploadResponse, error) + // grpc.ClientStream +} +func (client *DataSyncService_FileUploadClient) Send(req *datapb.FileUploadRequest) error { + if client.SendFunc == nil { + return client.DataSyncService_FileUploadClient.Send(req) + } + //test that the data we send is equal to what we expect + return client.SendFunc(req) +} + +func (client *DataSyncService_FileUploadClient) CloseAndRecv() (*datapb.FileUploadResponse, error) { + if client.CloseAndRecvFunc == nil { + return client.DataSyncService_FileUploadClient.CloseAndRecv() + } + return client.CloseAndRecvFunc() + +} \ No newline at end of file From 5d83b0ed7dea39dafdbfd100a9bc65264b8e144c Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Thu, 21 Nov 2024 15:46:54 -0500 Subject: [PATCH 21/25] finally done w functions --- app/data_client.go | 270 +++++++++++++++------------------------- app/data_client_test.go | 38 +----- 2 files changed, 105 insertions(+), 203 deletions(-) diff --git a/app/data_client.go b/app/data_client.go index b14359caea2..8b702e23979 100644 --- a/app/data_client.go +++ b/app/data_client.go @@ -27,6 +27,10 @@ type DataClient struct { dataSyncClient syncPb.DataSyncServiceClient } +const ( + UploadChunkSize = 64 * 1024 //64 KB +) + // Order specifies the order in which data is returned. type Order int32 @@ -183,23 +187,27 @@ type DatabaseConnReturn struct { HasDatabaseUser bool } -// :::::******NEW struct/variable ADDITIONS FOR DATASYNC START HERE!!!!**************** +// DataSyncClient structs + +// SensorMetadata contains the time the sensor data was requested and was received. type SensorMetadata struct { - // figure out if mimetype and annotations should be included or not TimeRequested time.Time TimeReceived time.Time - // MimeType MimeType - //annotations lives in the data client file...so maybe make a shared situation later on?? - // Annotations Annotations + MimeType MimeType + Annotations Annotations } + +// SensorData contains the contents and metadata for tabular data. type SensorData struct { - //this is what can be filled by either tabular or binary data!! Metadata SensorMetadata - SDStruct map[string]interface{} //or should it be TabularData.data ?? + SDStruct map[string]interface{} SDBinary []byte } + +// DataType specifies the type of data uploaded. type DataType int32 +// DataType constants define the possible DataType options. const ( DataTypeUnspecified DataType = iota DataTypeBinarySensor @@ -207,8 +215,10 @@ const ( DataTypeFile ) +// MimeType specifies the format of a file being uploaded. type MimeType int32 +// MimeType constants define the possible MimeType options const ( MimeTypeUnspecified MimeType = iota MimeTypeJPEG @@ -216,6 +226,7 @@ const ( MimeTypePCD ) +// UploadMetadata contains the metadata for binary (image + file) data. type UploadMetadata struct { PartID string ComponentType string @@ -227,11 +238,52 @@ type UploadMetadata struct { FileExtension string Tags []string } + +// FileData contains the contents of binary (image + file) data. type FileData struct { Data []byte } -//:::::******NEW struct/variable ADDITIONS FOR DATASYNC END HERE!!!!**************** +// BinaryOptions represents optional parameters for the BinaryDataCaptureUpload method +type BinaryOptions struct { + Type DataType + FileName string + MethodParameters map[string]interface{} + Tags []string + DataRequestTimes [2]time.Time +} + +// TabularOptions represents optional parameters for the TabularDataCaptureUpload method +type TabularOptions struct { + Type DataType + FileName string + MethodParameters map[string]interface{} + FileExtension string + Tags []string +} + +// StreamingOptions represents optional parameters for the StreamingDataCaptureUpload method +type StreamingOptions struct { + ComponentType string + ComponentName string + MethodName string + Type DataType + FileName string + MethodParameters map[string]interface{} + Tags []string + DataRequestTimes [2]time.Time +} + +// FileUploadOptions represents optional parameters for the FileUploadFromPath & FileUploadFromBytes methods +type FileUploadOptions struct { + ComponentType string + ComponentName string + MethodName string + FileName string + MethodParameters map[string]interface{} + FileExtension string + Tags []string +} // NewDataClient constructs a new DataClient using the connection passed in by the viamClient. func NewDataClient(conn rpc.ClientConn) *DataClient { @@ -299,27 +351,6 @@ func captureMetadataFromProto(proto *pb.CaptureMetadata) CaptureMetadata { } } -// func captureMetadataToProto(metadata CaptureMetadata) *pb.CaptureMetadata { -// methodParams, err := protoutils.ConvertMapToProtoAny(metadata.MethodParameters) -// if err != nil { -// return nil -// } -// return &pb.CaptureMetadata{ -// OrganizationId: metadata.OrganizationID, -// LocationId: metadata.LocationID, -// RobotName: metadata.RobotName, -// RobotId: metadata.RobotID, -// PartName: metadata.PartName, -// PartId: metadata.PartID, -// ComponentType: metadata.ComponentType, -// ComponentName: metadata.ComponentName, -// MethodName: metadata.MethodName, -// MethodParameters: methodParams, -// Tags: metadata.Tags, -// MimeType: metadata.MimeType, -// } -// } - func binaryDataFromProto(proto *pb.BinaryData) BinaryData { return BinaryData{ Binary: proto.Binary, @@ -818,10 +849,9 @@ func (d *DataClient) RemoveBinaryDataFromDatasetByIDs( return err } -// !!!!!!!!!! ******** ALL NEW ADDED FOR DATASYNC CLIENT +//DataSync Wrappers start here func uploadMetadataToProto(metadata UploadMetadata) *syncPb.UploadMetadata { - // methodParms, err := protoutils.ConvertStringMapToAnyPBMap(metadata.MethodParameters) methodParams, err := protoutils.ConvertMapToProtoAny(metadata.MethodParameters) if err != nil { @@ -840,7 +870,6 @@ func uploadMetadataToProto(metadata UploadMetadata) *syncPb.UploadMetadata { } } -// why doesnt this protoype have mime type and annotations with it?? func sensorMetadataToProto(metadata SensorMetadata) *syncPb.SensorMetadata { return &syncPb.SensorMetadata{ TimeRequested: timestamppb.New(metadata.TimeRequested), @@ -862,7 +891,7 @@ func sensorDataToProto(sensorData SensorData) *syncPb.SensorData { Struct: pbStruct, } } else { - return nil //should an error message be set instead?? + return nil } return protoSensorData } @@ -874,7 +903,6 @@ func sensorContentsToProto(sensorContents []SensorData) []*syncPb.SensorData { return protoSensorContents } -// Helper function to format the file extension. func formatFileExtension(fileExt string) string { if fileExt == "" { return fileExt @@ -885,14 +913,7 @@ func formatFileExtension(fileExt string) string { return "." + fileExt } -type BinaryOptions struct { - Type DataType - FileName string - MethodParameters map[string]interface{} - Tags []string - DataRequestTimes [2]time.Time -} - +// BinaryDataCaptureUpload uploads the contents and metadata for binary data. func (d *DataClient) BinaryDataCaptureUpload( ctx context.Context, binaryData []byte, @@ -903,7 +924,6 @@ func (d *DataClient) BinaryDataCaptureUpload( fileExtension string, options *BinaryOptions, ) (string, error) { - // Create SensorMetadata based on the provided times var sensorMetadata SensorMetadata if len(options.DataRequestTimes) == 2 { sensorMetadata = SensorMetadata{ @@ -911,13 +931,11 @@ func (d *DataClient) BinaryDataCaptureUpload( TimeReceived: options.DataRequestTimes[1], } } - // Create SensorData sensorData := SensorData{ Metadata: sensorMetadata, - SDStruct: nil, // Assuming no struct is needed for binary data - SDBinary: binaryData, // Attach the binary data + SDStruct: nil, + SDBinary: binaryData, } - // Create UploadMetadata metadata := UploadMetadata{ PartID: partID, ComponentType: componentType, @@ -926,7 +944,7 @@ func (d *DataClient) BinaryDataCaptureUpload( Type: DataTypeBinarySensor, FileName: options.FileName, MethodParameters: options.MethodParameters, - FileExtension: formatFileExtension(fileExtension), // Validate file extension + FileExtension: formatFileExtension(fileExtension), Tags: options.Tags, } response, err := d.DataCaptureUpload(ctx, metadata, []SensorData{sensorData}) @@ -936,14 +954,7 @@ func (d *DataClient) BinaryDataCaptureUpload( return response, nil } -type TabularOptions struct { - Type DataType - FileName string - MethodParameters map[string]interface{} - FileExtension string - Tags []string -} - +// TabularDataCaptureUpload uploads the contents and metadata for tabular data. func (d *DataClient) tabularDataCaptureUpload( ctx context.Context, tabularData []map[string]interface{}, @@ -951,15 +962,13 @@ func (d *DataClient) tabularDataCaptureUpload( componentType string, componentName string, methodName string, - dataRequestTimes [][2]time.Time, //part of sensorData + dataRequestTimes [][2]time.Time, options *TabularOptions, ) (string, error) { - // options.FileExtension = formatFileExtension(options.FileExtension) if len(dataRequestTimes) != len(tabularData) { return "", fmt.Errorf("dataRequestTimes and tabularData lengths must be equal") } var sensorContents []SensorData - // Iterate through the tabular data for i, tabData := range tabularData { sensorMetadata := SensorMetadata{} dates := dataRequestTimes[i] @@ -967,7 +976,6 @@ func (d *DataClient) tabularDataCaptureUpload( sensorMetadata.TimeRequested = dates[0] sensorMetadata.TimeReceived = dates[1] } - // Create SensorData sensorData := SensorData{ Metadata: sensorMetadata, SDStruct: tabData, @@ -975,13 +983,12 @@ func (d *DataClient) tabularDataCaptureUpload( } sensorContents = append(sensorContents, sensorData) } - // Create UploadMetadata metadata := UploadMetadata{ PartID: partID, ComponentType: componentType, ComponentName: componentName, MethodName: methodName, - Type: DataTypeTabularSensor, // assuming this is the correct type?? + Type: DataTypeTabularSensor, FileName: options.FileName, MethodParameters: options.MethodParameters, FileExtension: formatFileExtension(options.FileExtension), @@ -998,7 +1005,7 @@ func (d *DataClient) tabularDataCaptureUpload( // and returns the file ID associated with the uploaded data and metadata. func (d *DataClient) DataCaptureUpload(ctx context.Context, metadata UploadMetadata, sensorContents []SensorData) (string, error) { resp, err := d.dataSyncClient.DataCaptureUpload(ctx, &syncPb.DataCaptureUploadRequest{ - Metadata: uploadMetadataToProto(metadata), //should be in proto form !! + Metadata: uploadMetadataToProto(metadata), SensorContents: sensorContentsToProto(sensorContents), }) if err != nil { @@ -1008,63 +1015,33 @@ func (d *DataClient) DataCaptureUpload(ctx context.Context, metadata UploadMetad } // StreamingDataCaptureUpload uploads metadata and streaming binary data in chunks. -//pass in a pointer to a struct -//pointer to dataTime array -->alr a pointer -//zero value of all arraays is nil --> so we can just have it stay nil and be okay?? -//strings ,arrays, maps we can just assume they r passing nil if its not filled and pass that too --> pass that along - -// func (d *DataClient) DoThing(ctx, nonOptional Foo, nonOptional2 Bar, optional DoThingOptionsStruct) ==> DO THIS -// func (d *DataClient) DoThing2(ctx, nonOptional Foo, nonOptional2 Bar, metadata MetadataOptions, md2 Metadata2Options) ==> DO NOT DO THIS! -/* - - */ - -// optional parameters for the StreamingDataCaptureUpload function -type StreamingOptions struct { - ComponentType string - ComponentName string - MethodName string - Type DataType - FileName string - MethodParameters map[string]interface{} - Tags []string - DataRequestTimes [2]time.Time -} - -// uploads the metadata and contents of streaming binary data func (d *DataClient) StreamingDataCaptureUpload( ctx context.Context, - data []byte, //data in bytes (so similar to binarydataCap)...the rest below are for dataCaptureUploadMetadata - partID string, //uploadmetadata - fileExt string, //uploadmetadata + data []byte, + partID string, + fileExt string, options *StreamingOptions, ) (string, error) { - UploadChunkSize := 64 * 1024 //64 KB in bytes - // Prepare UploadMetadata uploadMetadata := UploadMetadata{ PartID: partID, ComponentType: options.ComponentType, ComponentName: options.ComponentName, MethodName: options.MethodName, - Type: DataTypeBinarySensor, // assuming this is the correct type?? + Type: DataTypeBinarySensor, FileName: options.FileName, MethodParameters: options.MethodParameters, FileExtension: fileExt, Tags: options.Tags, } - uploadMetadataPb := uploadMetadataToProto(uploadMetadata) //derefernce the pointer to pass the value instead - // uploadMetadataPb.Type = syncPb.DataType_DATA_TYPE_BINARY_SENSOR - // handle data request times w sensormetadata. - // Create SensorMetadata based on the provided times + uploadMetadataPb := uploadMetadataToProto(uploadMetadata) var sensorMetadata SensorMetadata - if len(options.DataRequestTimes) == 2 { //can i have a better check here? like if dataRequestTimes != [2]time.Time{} + if len(options.DataRequestTimes) == 2 { sensorMetadata = SensorMetadata{ TimeRequested: options.DataRequestTimes[0], TimeReceived: options.DataRequestTimes[1], } } sensorMetadataPb := sensorMetadataToProto(sensorMetadata) - // create the DataCaptureUploadMetadata. metadata := &syncPb.DataCaptureUploadMetadata{ UploadMetadata: uploadMetadataPb, SensorMetadata: sensorMetadataPb, @@ -1083,76 +1060,48 @@ func (d *DataClient) StreamingDataCaptureUpload( if err := stream.Send(metaReq); err != nil { return "", fmt.Errorf("failed to send metadata: %w", err) } - // send the binary data in chunks. for start := 0; start < len(data); start += UploadChunkSize { - //loop thry the data array starting at index 0, in each iteration start index increases by UploadChunkSize - //the loop continues until start reaches or exceeds the length of the data array end := start + UploadChunkSize - //this calculates the end index for the chunk, it is simply the start index plys the upload chunk size if end > len(data) { end = len(data) } - - chunk := data[start:end] dataReq := &syncPb.StreamingDataCaptureUploadRequest{ UploadPacket: &syncPb.StreamingDataCaptureUploadRequest_Data{ - Data: chunk, + Data: data[start:end], }, } - if err := stream.Send(dataReq); err != nil { return "", fmt.Errorf("failed to send data chunk: %w", err) } } - // close the stream and get the response. resp, err := stream.CloseAndRecv() if err != nil { return "", fmt.Errorf("failed to receive response: %w", err) } - - // return the file ID from the response. - if resp == nil || resp.FileId == "" { - return "", fmt.Errorf("response is empty or invalid") - } return resp.FileId, nil } -type FileUploadOptions struct { - ComponentType string - ComponentName string - MethodName string - FileName string - MethodParameters map[string]interface{} - FileExtension string - Tags []string -} - -// FileUpload uploads the contents and metadata for binary (image + file) data, -// where the first packet must be the UploadMetadata. -// check does this cover upload by file name and by path?? +// FileUploadFromBytes uploads the contents and metadata for binary data such as encoded images or other data represented by bytes func (d *DataClient) FileUploadFromBytes( ctx context.Context, partID string, - data []byte, //you either pass in enooded image bytes as data + data []byte, opts *FileUploadOptions, ) (string, error) { - // Prepare metadata methodParams, _ := protoutils.ConvertMapToProtoAny(opts.MethodParameters) metadata := &syncPb.UploadMetadata{ PartId: partID, ComponentType: opts.ComponentType, ComponentName: opts.ComponentName, MethodName: opts.MethodName, - Type: syncPb.DataType_DATA_TYPE_FILE, //check this!!! + Type: syncPb.DataType_DATA_TYPE_FILE, MethodParameters: methodParams, Tags: opts.Tags, } - // Handle filename and extension if opts.FileName == "" { - // Use timestamp if no filename provided metadata.FileName = time.Now().String() } else { metadata.FileName = opts.FileName @@ -1162,13 +1111,13 @@ func (d *DataClient) FileUploadFromBytes( } +// FileUploadFromPath uploads the contents and metadata for binary data created from a filepath func (d *DataClient) FileUploadFromPath( ctx context.Context, partID string, - FilePath string, //or you pass in a filepath and then we create data from reading that file + FilePath string, opts *FileUploadOptions, ) (string, error) { - // Prepare metadata methodParams, _ := protoutils.ConvertMapToProtoAny(opts.MethodParameters) metadata := &syncPb.UploadMetadata{ PartId: partID, @@ -1179,81 +1128,66 @@ func (d *DataClient) FileUploadFromPath( MethodParameters: methodParams, Tags: opts.Tags, } - //make data optional...and if data is passed in then you will use those bytes representing the file data to upload - //and - - // Handle filename and extension if opts.FileName == "" { if FilePath != "" { - // Extract from file path metadata.FileName = filepath.Base(FilePath) metadata.FileExtension = filepath.Ext(FilePath) } else { - // Use timestamp if no filename provided metadata.FileName = time.Now().String() } } else { metadata.FileName = opts.FileName metadata.FileExtension = opts.FileExtension } - var data []byte - // Prepare file data + // Prepare file data from filepath if FilePath != "" { - // Read from file path if provided fileData, err := os.ReadFile(FilePath) if err != nil { return "", err } data = fileData } - return d.fileUploadStreamResp(metadata, data) } func (d *DataClient) fileUploadStreamResp(metadata *syncPb.UploadMetadata, data []byte) (string, error) { - // Create streaming client for upload + // establish a streaming connection. stream, err := d.dataSyncClient.FileUpload(context.Background()) if err != nil { - return "", err + return "", fmt.Errorf("failed to establish streaming connection: %w", err) } - - // Send metadata - if err := stream.Send(&syncPb.FileUploadRequest{ + // send the metadata as the first packet. + metaReq := &syncPb.FileUploadRequest{ UploadPacket: &syncPb.FileUploadRequest_Metadata{ Metadata: metadata, }, - }); err != nil { - return "", err } - - // Send file contents in chunks - const maxChunkSize = 2 * 1024 * 1024 // 2MB - for len(data) > 0 { - chunkSize := maxChunkSize - if len(data) < chunkSize { - chunkSize = len(data) + if err := stream.Send(metaReq); err != nil { + return "", fmt.Errorf("failed to send metadata: %w", err) + } + // send file contents in chunks + for start := 0; start < len(data); start += UploadChunkSize { + end := start + UploadChunkSize + if end > len(data) { + end = len(data) } - - if err := stream.Send(&syncPb.FileUploadRequest{ + dataReq := &syncPb.FileUploadRequest{ UploadPacket: &syncPb.FileUploadRequest_FileContents{ FileContents: &syncPb.FileData{ - Data: data[:chunkSize], + Data: data[start:end], }, }, - }); err != nil { - return "", err } - - data = data[chunkSize:] + if err := stream.Send(dataReq); err != nil { + return "", fmt.Errorf("failed to send file data chunk: %w", err) + } } - - // Close stream and get response + // close stream and get response resp, err := stream.CloseAndRecv() if err != nil { - return "", err + return "", fmt.Errorf("failed to receive response: %w", err) } return resp.FileId, nil - } diff --git a/app/data_client_test.go b/app/data_client_test.go index 99273469b96..57c5157b99e 100644 --- a/app/data_client_test.go +++ b/app/data_client_test.go @@ -2,7 +2,6 @@ package app import ( "context" - "fmt" "os" "testing" "time" @@ -611,8 +610,6 @@ func TestDataClient(t *testing.T) { client.RemoveBinaryDataFromDatasetByIDs(context.Background(), binaryIDs, datasetID) }) } - -// ***********Added this below for new dataSync !!!****** func TestDataSyncClient(t *testing.T) { grpcClient := createGrpcDataSyncClient() client := DataClient{dataSyncClient: grpcClient} @@ -655,17 +652,15 @@ func TestDataSyncClient(t *testing.T) { test.That(t, in.SensorContents[0].Metadata.TimeRequested, test.ShouldResemble, timestamppb.New(startTime)) test.That(t, in.SensorContents[0].Metadata.TimeReceived, test.ShouldResemble, timestamppb.New(endTime)) - // Extract and validate SensorContents[0].Data dataField, ok := in.SensorContents[0].Data.(*syncPb.SensorData_Binary) - test.That(t, ok, test.ShouldBeTrue) // Ensure the type is correct + test.That(t, ok, test.ShouldBeTrue) test.That(t, dataField.Binary, test.ShouldResemble, binaryDataByte) - // test.That(t, in., test.ShouldResemble, toProto(something)) //toProto return &syncPb.DataCaptureUploadResponse{ FileId: fileID, }, nil } resp, _ := client.BinaryDataCaptureUpload(context.Background(), binaryDataByte, partID, componentType, componentName, method, fileExt, &options) - test.That(t, resp, test.ShouldResemble, fileID) //compare response with regular expected types (fromProto if needed) + test.That(t, resp, test.ShouldResemble, fileID) }) t.Run("TabularDataCaptureUpload", func(t *testing.T) { @@ -701,17 +696,14 @@ func TestDataSyncClient(t *testing.T) { test.That(t, in.SensorContents[0].Metadata.TimeRequested, test.ShouldResemble, timestamppb.New(startTime)) test.That(t, in.SensorContents[0].Metadata.TimeReceived, test.ShouldResemble, timestamppb.New(endTime)) - // Extract and validate SensorContents[0].Data dataField, ok := in.SensorContents[0].Data.(*syncPb.SensorData_Struct) - test.That(t, ok, test.ShouldBeTrue) // Ensure the type is correct + test.That(t, ok, test.ShouldBeTrue) test.That(t, dataField.Struct, test.ShouldResemble, tabularDataPb.Data) return &syncPb.DataCaptureUploadResponse{ FileId: fileID, }, nil } - // Convert `tabularDataPb.Data` to the expected input format for `tabularDataCaptureUpload` tabularData := []map[string]interface{}{data} - // Provide corresponding request times dataRequestTimes := [][2]time.Time{ {startTime, endTime}, } @@ -720,16 +712,6 @@ func TestDataSyncClient(t *testing.T) { }) t.Run("StreamingDataCaptureUpload", func(t *testing.T) { - // metadata := SensorMetadata{ - // TimeRequested: startTime, - // TimeReceived: endTime, - // } - // binarySensorData := SensorData{ - // Metadata: metadata, - // SDStruct: nil, - // SDBinary: binaryDataByte, - // } - options := StreamingOptions{ ComponentType: componentType, ComponentName: componentName, @@ -743,10 +725,8 @@ func TestDataSyncClient(t *testing.T) { // Mock implementation of the streaming client. mockStream := &inject.DataSyncService_StreamingDataCaptureUploadClient{ SendFunc: func(req *syncPb.StreamingDataCaptureUploadRequest) error { - fmt.Printf("Received packet type: %T\n", req.UploadPacket) switch packet := req.UploadPacket.(type) { case *syncPb.StreamingDataCaptureUploadRequest_Metadata: - // Validate metadata packet. meta := packet.Metadata test.That(t, meta.UploadMetadata.PartId, test.ShouldEqual, partID) test.That(t, meta.UploadMetadata.FileExtension, test.ShouldEqual, fileExt) @@ -757,7 +737,6 @@ func TestDataSyncClient(t *testing.T) { test.That(t, meta.SensorMetadata.TimeRequested, test.ShouldResemble, timestamppb.New(startTime)) test.That(t, meta.SensorMetadata.TimeReceived, test.ShouldResemble, timestamppb.New(endTime)) case *syncPb.StreamingDataCaptureUploadRequest_Data: - // Validate data packet. test.That(t, packet.Data, test.ShouldResemble, binaryDataByte) default: t.Errorf("unexpected packet type: %T", packet) @@ -765,19 +744,16 @@ func TestDataSyncClient(t *testing.T) { return nil }, CloseAndRecvFunc: func() (*syncPb.StreamingDataCaptureUploadResponse, error) { - // Validate the final response. return &syncPb.StreamingDataCaptureUploadResponse{ FileId: fileID, }, nil }, } - // Replace the gRPC client with the mock. grpcClient.StreamingDataCaptureUploadFunc = func(ctx context.Context, opts ...grpc.CallOption, ) (syncPb.DataSyncService_StreamingDataCaptureUploadClient, error) { return mockStream, nil } - // Call the function being tested. resp, err := client.StreamingDataCaptureUpload(context.Background(), binaryDataByte, partID, fileExt, &options) test.That(t, err, test.ShouldBeNil) test.That(t, resp, test.ShouldEqual, fileID) @@ -795,10 +771,8 @@ func TestDataSyncClient(t *testing.T) { // Mock implementation of the streaming client. mockStream := &inject.DataSyncService_FileUploadClient{ SendFunc: func(req *syncPb.FileUploadRequest) error { - fmt.Printf("Received packet type: %T\n", req.UploadPacket) switch packet := req.UploadPacket.(type) { case *syncPb.FileUploadRequest_Metadata: - // Validate metadata packet. methodParams, _ := protoutils.ConvertMapToProtoAny(methodParameters) meta := packet.Metadata test.That(t, meta.PartId, test.ShouldEqual, partID) @@ -811,7 +785,6 @@ func TestDataSyncClient(t *testing.T) { test.That(t, meta.FileExtension, test.ShouldEqual, fileExt) test.That(t, meta.Tags, test.ShouldResemble, tags) case *syncPb.FileUploadRequest_FileContents: - // Validate data packet. test.That(t, packet.FileContents.Data, test.ShouldResemble, binaryDataByte) default: t.Errorf("unexpected packet type: %T", packet) @@ -819,20 +792,16 @@ func TestDataSyncClient(t *testing.T) { return nil }, CloseAndRecvFunc: func() (*syncPb.FileUploadResponse, error) { - // Validate the final response. - //this is either the file_id of the uploaded data, or the fileid of the new file return &syncPb.FileUploadResponse{ FileId: fileID, }, nil }, } - // Replace the gRPC client with the mock. grpcClient.FileUploadFunc = func(ctx context.Context, opts ...grpc.CallOption, ) (syncPb.DataSyncService_FileUploadClient, error) { return mockStream, nil } - // Call the function being tested. resp, err := client.FileUploadFromBytes(context.Background(), partID, binaryDataByte, &options) test.That(t, err, test.ShouldBeNil) test.That(t, resp, test.ShouldEqual, fileID) @@ -857,7 +826,6 @@ func TestDataSyncClient(t *testing.T) { // Mock implementation of the streaming client. mockStream := &inject.DataSyncService_FileUploadClient{ SendFunc: func(req *syncPb.FileUploadRequest) error { - fmt.Printf("Received packet type: %T\n", req.UploadPacket) switch packet := req.UploadPacket.(type) { case *syncPb.FileUploadRequest_Metadata: // Validate metadata packet. From 3be13fbb0c16e6ea83e60d6e1b30201f1b5c4025 Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Thu, 21 Nov 2024 17:44:48 -0500 Subject: [PATCH 22/25] fixed lint --- app/data_client.go | 85 ++++++++++++--------- app/data_client_test.go | 29 ++++--- testutils/inject/datasync_service_client.go | 36 ++++----- 3 files changed, 80 insertions(+), 70 deletions(-) diff --git a/app/data_client.go b/app/data_client.go index 8b702e23979..78fd8baf4f2 100644 --- a/app/data_client.go +++ b/app/data_client.go @@ -3,6 +3,7 @@ package app import ( "context" + "errors" "fmt" "os" "path/filepath" @@ -12,7 +13,6 @@ import ( "go.mongodb.org/mongo-driver/bson/primitive" pb "go.viam.com/api/app/data/v1" syncPb "go.viam.com/api/app/datasync/v1" - "go.viam.com/utils/rpc" "google.golang.org/protobuf/types/known/anypb" "google.golang.org/protobuf/types/known/structpb" @@ -28,7 +28,7 @@ type DataClient struct { } const ( - UploadChunkSize = 64 * 1024 //64 KB + UploadChunkSize = 64 * 1024 // UploadChunkSize is 64 KB ) // Order specifies the order in which data is returned. @@ -218,7 +218,7 @@ const ( // MimeType specifies the format of a file being uploaded. type MimeType int32 -// MimeType constants define the possible MimeType options +// MimeType constants define the possible MimeType options. const ( MimeTypeUnspecified MimeType = iota MimeTypeJPEG @@ -244,7 +244,7 @@ type FileData struct { Data []byte } -// BinaryOptions represents optional parameters for the BinaryDataCaptureUpload method +// BinaryOptions represents optional parameters for the BinaryDataCaptureUpload method. type BinaryOptions struct { Type DataType FileName string @@ -253,7 +253,7 @@ type BinaryOptions struct { DataRequestTimes [2]time.Time } -// TabularOptions represents optional parameters for the TabularDataCaptureUpload method +// TabularOptions represents optional parameters for the TabularDataCaptureUpload method. type TabularOptions struct { Type DataType FileName string @@ -262,7 +262,7 @@ type TabularOptions struct { Tags []string } -// StreamingOptions represents optional parameters for the StreamingDataCaptureUpload method +// StreamingOptions represents optional parameters for the StreamingDataCaptureUpload method. type StreamingOptions struct { ComponentType string ComponentName string @@ -274,7 +274,7 @@ type StreamingOptions struct { DataRequestTimes [2]time.Time } -// FileUploadOptions represents optional parameters for the FileUploadFromPath & FileUploadFromBytes methods +// FileUploadOptions represents optional parameters for the FileUploadFromPath & FileUploadFromBytes methods. type FileUploadOptions struct { ComponentType string ComponentName string @@ -517,12 +517,12 @@ func (d *DataClient) TabularDataByFilter( } // TabularData contains tabular data and associated metadata dataArray := []TabularData{} - var metadata *pb.CaptureMetadata for _, data := range resp.Data { - if len(resp.Metadata) > 0 && int(data.MetadataIndex) < len(resp.Metadata) { + var metadata *pb.CaptureMetadata + switch { + case len(resp.Metadata) > 0 && int(data.MetadataIndex) < len(resp.Metadata): metadata = resp.Metadata[data.MetadataIndex] - } else { - // Use an empty CaptureMetadata as a fallback + default: metadata = &pb.CaptureMetadata{} } dataArray = append(dataArray, tabularDataFromProto(data, metadata)) @@ -849,11 +849,8 @@ func (d *DataClient) RemoveBinaryDataFromDatasetByIDs( return err } -//DataSync Wrappers start here - func uploadMetadataToProto(metadata UploadMetadata) *syncPb.UploadMetadata { methodParams, err := protoutils.ConvertMapToProtoAny(metadata.MethodParameters) - if err != nil { return nil } @@ -881,20 +878,25 @@ func sensorDataToProto(sensorData SensorData) *syncPb.SensorData { protoSensorData := &syncPb.SensorData{ Metadata: sensorMetadataToProto(sensorData.Metadata), } - if len(sensorData.SDBinary) > 0 { + switch { + case len(sensorData.SDBinary) > 0: protoSensorData.Data = &syncPb.SensorData_Binary{ Binary: sensorData.SDBinary, } - } else if sensorData.SDStruct != nil { - pbStruct, _ := structpb.NewStruct(sensorData.SDStruct) + case sensorData.SDStruct != nil: + pbStruct, err := structpb.NewStruct(sensorData.SDStruct) + if err != nil { + return nil + } protoSensorData.Data = &syncPb.SensorData_Struct{ Struct: pbStruct, } - } else { + default: return nil } return protoSensorData } + func sensorContentsToProto(sensorContents []SensorData) []*syncPb.SensorData { var protoSensorContents []*syncPb.SensorData for _, item := range sensorContents { @@ -966,7 +968,7 @@ func (d *DataClient) tabularDataCaptureUpload( options *TabularOptions, ) (string, error) { if len(dataRequestTimes) != len(tabularData) { - return "", fmt.Errorf("dataRequestTimes and tabularData lengths must be equal") + return "", errors.New("dataRequestTimes and tabularData lengths must be equal") } var sensorContents []SensorData for i, tabData := range tabularData { @@ -1049,7 +1051,7 @@ func (d *DataClient) StreamingDataCaptureUpload( // establish a streaming connection. stream, err := d.dataSyncClient.StreamingDataCaptureUpload(ctx) if err != nil { - return "", fmt.Errorf("failed to establish streaming connection: %w", err) + return "", err } // send the metadata as the first packet. metaReq := &syncPb.StreamingDataCaptureUploadRequest{ @@ -1058,8 +1060,9 @@ func (d *DataClient) StreamingDataCaptureUpload( }, } if err := stream.Send(metaReq); err != nil { - return "", fmt.Errorf("failed to send metadata: %w", err) + return "", err } + // send the binary data in chunks. for start := 0; start < len(data); start += UploadChunkSize { end := start + UploadChunkSize @@ -1072,25 +1075,28 @@ func (d *DataClient) StreamingDataCaptureUpload( }, } if err := stream.Send(dataReq); err != nil { - return "", fmt.Errorf("failed to send data chunk: %w", err) + return "", err } } // close the stream and get the response. resp, err := stream.CloseAndRecv() if err != nil { - return "", fmt.Errorf("failed to receive response: %w", err) + return "", err } return resp.FileId, nil } -// FileUploadFromBytes uploads the contents and metadata for binary data such as encoded images or other data represented by bytes +// FileUploadFromBytes uploads the contents and metadata for binary data such as encoded images or other data represented by bytes. func (d *DataClient) FileUploadFromBytes( ctx context.Context, partID string, data []byte, opts *FileUploadOptions, ) (string, error) { - methodParams, _ := protoutils.ConvertMapToProtoAny(opts.MethodParameters) + methodParams, err := protoutils.ConvertMapToProtoAny(opts.MethodParameters) + if err != nil { + return "", err + } metadata := &syncPb.UploadMetadata{ PartId: partID, ComponentType: opts.ComponentType, @@ -1108,17 +1114,19 @@ func (d *DataClient) FileUploadFromBytes( metadata.FileExtension = opts.FileExtension } return d.fileUploadStreamResp(metadata, data) - } -// FileUploadFromPath uploads the contents and metadata for binary data created from a filepath +// FileUploadFromPath uploads the contents and metadata for binary data created from a filepath. func (d *DataClient) FileUploadFromPath( ctx context.Context, partID string, - FilePath string, + filePath string, opts *FileUploadOptions, ) (string, error) { - methodParams, _ := protoutils.ConvertMapToProtoAny(opts.MethodParameters) + methodParams, err := protoutils.ConvertMapToProtoAny(opts.MethodParameters) + if err != nil { + return "", err + } metadata := &syncPb.UploadMetadata{ PartId: partID, ComponentType: opts.ComponentType, @@ -1129,9 +1137,9 @@ func (d *DataClient) FileUploadFromPath( Tags: opts.Tags, } if opts.FileName == "" { - if FilePath != "" { - metadata.FileName = filepath.Base(FilePath) - metadata.FileExtension = filepath.Ext(FilePath) + if filePath != "" { + metadata.FileName = filepath.Base(filePath) + metadata.FileExtension = filepath.Ext(filePath) } else { metadata.FileName = time.Now().String() } @@ -1141,21 +1149,22 @@ func (d *DataClient) FileUploadFromPath( } var data []byte // Prepare file data from filepath - if FilePath != "" { - fileData, err := os.ReadFile(FilePath) + if filePath != "" { + //nolint:gosec + fileData, err := os.ReadFile(filePath) if err != nil { return "", err } data = fileData } return d.fileUploadStreamResp(metadata, data) - } + func (d *DataClient) fileUploadStreamResp(metadata *syncPb.UploadMetadata, data []byte) (string, error) { // establish a streaming connection. stream, err := d.dataSyncClient.FileUpload(context.Background()) if err != nil { - return "", fmt.Errorf("failed to establish streaming connection: %w", err) + return "", err } // send the metadata as the first packet. metaReq := &syncPb.FileUploadRequest{ @@ -1180,13 +1189,13 @@ func (d *DataClient) fileUploadStreamResp(metadata *syncPb.UploadMetadata, data }, } if err := stream.Send(dataReq); err != nil { - return "", fmt.Errorf("failed to send file data chunk: %w", err) + return "", err } } // close stream and get response resp, err := stream.CloseAndRecv() if err != nil { - return "", fmt.Errorf("failed to receive response: %w", err) + return "", err } return resp.FileId, nil diff --git a/app/data_client_test.go b/app/data_client_test.go index 57c5157b99e..7ebb32bd46f 100644 --- a/app/data_client_test.go +++ b/app/data_client_test.go @@ -9,7 +9,6 @@ import ( "go.mongodb.org/mongo-driver/bson" pb "go.viam.com/api/app/data/v1" syncPb "go.viam.com/api/app/datasync/v1" - "go.viam.com/test" utils "go.viam.com/utils/protoutils" "google.golang.org/grpc" @@ -206,6 +205,7 @@ func dataRequestToProto(dataRequest DataRequest) *pb.DataRequest { func createGrpcClient() *inject.DataServiceClient { return &inject.DataServiceClient{} } + func createGrpcDataSyncClient() *inject.DataSyncServiceClient { return &inject.DataSyncServiceClient{} } @@ -610,6 +610,7 @@ func TestDataClient(t *testing.T) { client.RemoveBinaryDataFromDatasetByIDs(context.Background(), binaryIDs, datasetID) }) } + func TestDataSyncClient(t *testing.T) { grpcClient := createGrpcDataSyncClient() client := DataClient{dataSyncClient: grpcClient} @@ -659,7 +660,9 @@ func TestDataSyncClient(t *testing.T) { FileId: fileID, }, nil } - resp, _ := client.BinaryDataCaptureUpload(context.Background(), binaryDataByte, partID, componentType, componentName, method, fileExt, &options) + resp, _ := client.BinaryDataCaptureUpload(context.Background(), + binaryDataByte, partID, componentType, componentName, + method, fileExt, &options) test.That(t, resp, test.ShouldResemble, fileID) }) @@ -707,7 +710,9 @@ func TestDataSyncClient(t *testing.T) { dataRequestTimes := [][2]time.Time{ {startTime, endTime}, } - resp, _ := client.tabularDataCaptureUpload(context.Background(), tabularData, partID, componentType, componentName, method, dataRequestTimes, &options) + resp, _ := client.tabularDataCaptureUpload(context.Background(), + tabularData, partID, componentType, componentName, method, + dataRequestTimes, &options) test.That(t, resp, test.ShouldResemble, fileID) }) @@ -723,7 +728,7 @@ func TestDataSyncClient(t *testing.T) { DataRequestTimes: dataRequestTimes, } // Mock implementation of the streaming client. - mockStream := &inject.DataSyncService_StreamingDataCaptureUploadClient{ + mockStream := &inject.DataSyncServiceStreamingDataCaptureUploadClient{ SendFunc: func(req *syncPb.StreamingDataCaptureUploadRequest) error { switch packet := req.UploadPacket.(type) { case *syncPb.StreamingDataCaptureUploadRequest_Metadata: @@ -769,7 +774,8 @@ func TestDataSyncClient(t *testing.T) { Tags: tags, } // Mock implementation of the streaming client. - mockStream := &inject.DataSyncService_FileUploadClient{ + //nolint:dupl + mockStream := &inject.DataSyncServiceFileUploadClient{ SendFunc: func(req *syncPb.FileUploadRequest) error { switch packet := req.UploadPacket.(type) { case *syncPb.FileUploadRequest_Metadata: @@ -821,14 +827,13 @@ func TestDataSyncClient(t *testing.T) { tempContent := []byte("test file content") tempFile, err := os.CreateTemp("", "test-upload-*.txt") test.That(t, err, test.ShouldBeNil) - defer os.Remove(tempFile.Name()) // Clean up after test - + defer os.Remove(tempFile.Name()) // Mock implementation of the streaming client. - mockStream := &inject.DataSyncService_FileUploadClient{ + //nolint:dupl + mockStream := &inject.DataSyncServiceFileUploadClient{ SendFunc: func(req *syncPb.FileUploadRequest) error { switch packet := req.UploadPacket.(type) { case *syncPb.FileUploadRequest_Metadata: - // Validate metadata packet. methodParams, _ := protoutils.ConvertMapToProtoAny(methodParameters) meta := packet.Metadata test.That(t, meta.PartId, test.ShouldEqual, partID) @@ -841,7 +846,6 @@ func TestDataSyncClient(t *testing.T) { test.That(t, meta.FileExtension, test.ShouldEqual, fileExt) test.That(t, meta.Tags, test.ShouldResemble, tags) case *syncPb.FileUploadRequest_FileContents: - // Validate data packet. test.That(t, packet.FileContents.Data, test.ShouldResemble, tempContent) default: t.Errorf("unexpected packet type: %T", packet) @@ -849,23 +853,18 @@ func TestDataSyncClient(t *testing.T) { return nil }, CloseAndRecvFunc: func() (*syncPb.FileUploadResponse, error) { - // Validate the final response. - //this is either the file_id of the uploaded data, or the fileid of the new file return &syncPb.FileUploadResponse{ FileId: fileID, }, nil }, } - // Replace the gRPC client with the mock. grpcClient.FileUploadFunc = func(ctx context.Context, opts ...grpc.CallOption, ) (syncPb.DataSyncService_FileUploadClient, error) { return mockStream, nil } - // Call the function being tested. resp, err := client.FileUploadFromPath(context.Background(), partID, tempFile.Name(), &options) test.That(t, err, test.ShouldBeNil) test.That(t, resp, test.ShouldEqual, fileID) - }) } diff --git a/testutils/inject/datasync_service_client.go b/testutils/inject/datasync_service_client.go index 035b3759e0d..7fccd84fb2d 100644 --- a/testutils/inject/datasync_service_client.go +++ b/testutils/inject/datasync_service_client.go @@ -7,7 +7,7 @@ import ( "google.golang.org/grpc" ) -// DataServiceClient represents a fake instance of a data service client. +// DataSyncServiceClient represents a fake instance of a data sync service client. type DataSyncServiceClient struct { datapb.DataSyncServiceClient DataCaptureUploadFunc func(ctx context.Context, in *datapb.DataCaptureUploadRequest, @@ -39,7 +39,7 @@ func (client *DataSyncServiceClient) FileUpload(ctx context.Context, return client.FileUploadFunc(ctx, opts...) } -// DataCaptureUpload uploads the contents and metadata for tabular data. +// StreamingDataCaptureUpload uploads the contents and metadata for streaming binary data. func (client *DataSyncServiceClient) StreamingDataCaptureUpload(ctx context.Context, opts ...grpc.CallOption, ) (datapb.DataSyncService_StreamingDataCaptureUploadClient, error) { @@ -49,47 +49,49 @@ func (client *DataSyncServiceClient) StreamingDataCaptureUpload(ctx context.Cont return client.StreamingDataCaptureUploadFunc(ctx, opts...) } -type DataSyncService_StreamingDataCaptureUploadClient struct { +// DataSyncServiceStreamingDataCaptureUploadClient represents a fake instance of +// a StreamingDataCaptureUpload client. +type DataSyncServiceStreamingDataCaptureUploadClient struct { datapb.DataSyncService_StreamingDataCaptureUploadClient SendFunc func(*datapb.StreamingDataCaptureUploadRequest) error CloseAndRecvFunc func() (*datapb.StreamingDataCaptureUploadResponse, error) - // grpc.ClientStream } -func (client *DataSyncService_StreamingDataCaptureUploadClient) Send(req *datapb.StreamingDataCaptureUploadRequest) error { +// Send sends a StreamingDataCaptureUploadRequest using the mock or actual client. +func (client *DataSyncServiceStreamingDataCaptureUploadClient) Send(req *datapb.StreamingDataCaptureUploadRequest) error { if client.SendFunc == nil { return client.DataSyncService_StreamingDataCaptureUploadClient.Send(req) } - //test that the data we send is equal to what we expect return client.SendFunc(req) } -func (client *DataSyncService_StreamingDataCaptureUploadClient) CloseAndRecv() (*datapb.StreamingDataCaptureUploadResponse, error) { +// CloseAndRecv closes the stream and receives a StreamingDataCaptureUploadResponse using the mock or actual client. +func (client *DataSyncServiceStreamingDataCaptureUploadClient) CloseAndRecv() (*datapb.StreamingDataCaptureUploadResponse, error) { if client.CloseAndRecvFunc == nil { return client.DataSyncService_StreamingDataCaptureUploadClient.CloseAndRecv() } return client.CloseAndRecvFunc() - } -type DataSyncService_FileUploadClient struct{ +// DataSyncServiceFileUploadClient represents a fake instance of a FileUpload client. +type DataSyncServiceFileUploadClient struct { datapb.DataSyncService_FileUploadClient - SendFunc func(*datapb.FileUploadRequest) error - CloseAndRecvFunc func() (*datapb.FileUploadResponse, error) - // grpc.ClientStream + SendFunc func(*datapb.FileUploadRequest) error + CloseAndRecvFunc func() (*datapb.FileUploadResponse, error) } -func (client *DataSyncService_FileUploadClient) Send(req *datapb.FileUploadRequest) error { + +// Send sends a FileUploadRequest using the mock or actual client. +func (client *DataSyncServiceFileUploadClient) Send(req *datapb.FileUploadRequest) error { if client.SendFunc == nil { return client.DataSyncService_FileUploadClient.Send(req) } - //test that the data we send is equal to what we expect return client.SendFunc(req) } -func (client *DataSyncService_FileUploadClient) CloseAndRecv() (*datapb.FileUploadResponse, error) { +// CloseAndRecv closes the stream and receives a FileUploadResponse using the mock or actual client. +func (client *DataSyncServiceFileUploadClient) CloseAndRecv() (*datapb.FileUploadResponse, error) { if client.CloseAndRecvFunc == nil { return client.DataSyncService_FileUploadClient.CloseAndRecv() } return client.CloseAndRecvFunc() - -} \ No newline at end of file +} From 179a2616125d0b1e16e0f17c996e7e28b4f9e500 Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Thu, 21 Nov 2024 18:18:50 -0500 Subject: [PATCH 23/25] last minute mimetype and annotations --- app/data_client.go | 19 +++++++++++++++++++ app/data_client_test.go | 17 ----------------- go.mod | 2 +- 3 files changed, 20 insertions(+), 18 deletions(-) diff --git a/app/data_client.go b/app/data_client.go index 78fd8baf4f2..40206537003 100644 --- a/app/data_client.go +++ b/app/data_client.go @@ -867,10 +867,29 @@ func uploadMetadataToProto(metadata UploadMetadata) *syncPb.UploadMetadata { } } +func annotationsToProto(annotations Annotations) *pb.Annotations { + var protoBboxes []*pb.BoundingBox + for _, bbox := range annotations.Bboxes { + protoBboxes = append(protoBboxes, &pb.BoundingBox{ + Id: bbox.ID, + Label: bbox.Label, + XMinNormalized: bbox.XMinNormalized, + YMinNormalized: bbox.YMinNormalized, + XMaxNormalized: bbox.XMaxNormalized, + YMaxNormalized: bbox.YMaxNormalized, + }) + } + return &pb.Annotations{ + Bboxes: protoBboxes, + } +} + func sensorMetadataToProto(metadata SensorMetadata) *syncPb.SensorMetadata { return &syncPb.SensorMetadata{ TimeRequested: timestamppb.New(metadata.TimeRequested), TimeReceived: timestamppb.New(metadata.TimeReceived), + MimeType: syncPb.MimeType(metadata.MimeType), + Annotations: annotationsToProto(metadata.Annotations), } } diff --git a/app/data_client_test.go b/app/data_client_test.go index 7ebb32bd46f..9096635392c 100644 --- a/app/data_client_test.go +++ b/app/data_client_test.go @@ -134,23 +134,6 @@ var ( } ) -func annotationsToProto(annotations Annotations) *pb.Annotations { - var protoBboxes []*pb.BoundingBox - for _, bbox := range annotations.Bboxes { - protoBboxes = append(protoBboxes, &pb.BoundingBox{ - Id: bbox.ID, - Label: bbox.Label, - XMinNormalized: bbox.XMinNormalized, - YMinNormalized: bbox.YMinNormalized, - XMaxNormalized: bbox.XMaxNormalized, - YMaxNormalized: bbox.YMaxNormalized, - }) - } - return &pb.Annotations{ - Bboxes: protoBboxes, - } -} - func binaryDataToProto(binaryData BinaryData) *pb.BinaryData { return &pb.BinaryData{ Binary: binaryData.Binary, diff --git a/go.mod b/go.mod index 9949c4ef90c..8fadd9e024c 100644 --- a/go.mod +++ b/go.mod @@ -78,7 +78,7 @@ require ( go.uber.org/atomic v1.11.0 go.uber.org/multierr v1.11.0 go.uber.org/zap v1.27.0 - go.viam.com/api v0.1.357 + go.viam.com/api v0.1.366 go.viam.com/test v1.2.4 go.viam.com/utils v0.1.116 goji.io v2.0.2+incompatible From 4693eb24b8f0e865b0e9b2408546f6c795d11de9 Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Thu, 21 Nov 2024 18:22:55 -0500 Subject: [PATCH 24/25] forgot to include this --- go.sum | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/go.sum b/go.sum index e28191b5e46..20003c8d7cb 100644 --- a/go.sum +++ b/go.sum @@ -1531,8 +1531,8 @@ go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= go.uber.org/zap v1.23.0/go.mod h1:D+nX8jyLsMHMYrln8A0rJjFt/T/9/bGgIhAqxv5URuY= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= -go.viam.com/api v0.1.357 h1:L9LBYbaH0imv/B+mVxqtSgClIl4flzjLV6LclfnD9Nc= -go.viam.com/api v0.1.357/go.mod h1:5lpVRxMsKFCaahqsnJfPGwJ9baoQ6PIKQu3lxvy6Wtw= +go.viam.com/api v0.1.366 h1:lUen0W04hwdFL95GoQkYaweZO5ySG40BnUl7HHVZE3o= +go.viam.com/api v0.1.366/go.mod h1:g5eipXHNm0rQmW7DWya6avKcmzoypLmxnMlAaIsE5Ls= go.viam.com/test v1.2.4 h1:JYgZhsuGAQ8sL9jWkziAXN9VJJiKbjoi9BsO33TW3ug= go.viam.com/test v1.2.4/go.mod h1:zI2xzosHdqXAJ/kFqcN+OIF78kQuTV2nIhGZ8EzvaJI= go.viam.com/utils v0.1.116 h1:hoCj3SsV8LZAOEP75TjMeX57axhravS8rNUYmhpTWtM= From 3309075d5e1114b14b7c4656615004c910dd31fa Mon Sep 17 00:00:00 2001 From: Julie Krasnick Date: Fri, 22 Nov 2024 09:20:43 -0500 Subject: [PATCH 25/25] ran go mod tidy --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 8fadd9e024c..75e2f385ebb 100644 --- a/go.mod +++ b/go.mod @@ -193,7 +193,7 @@ require ( github.com/dnephin/pflag v1.0.7 // indirect github.com/docker/cli v25.0.4+incompatible // indirect github.com/docker/distribution v2.8.3+incompatible // indirect - github.com/docker/docker v25.0.4+incompatible // indirect + github.com/docker/docker v25.0.6+incompatible // indirect github.com/docker/docker-credential-helpers v0.8.1 // indirect github.com/docker/go-connections v0.5.0 // indirect github.com/dustin/go-humanize v1.0.1 // indirect diff --git a/go.sum b/go.sum index 20003c8d7cb..774b3790a87 100644 --- a/go.sum +++ b/go.sum @@ -347,8 +347,8 @@ github.com/docker/cli v25.0.4+incompatible h1:DatRkJ+nrFoYL2HZUzjM5Z5sAmcA5XGp+A github.com/docker/cli v25.0.4+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v25.0.4+incompatible h1:XITZTrq+52tZyZxUOtFIahUf3aH367FLxJzt9vZeAF8= -github.com/docker/docker v25.0.4+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v25.0.6+incompatible h1:5cPwbwriIcsua2REJe8HqQV+6WlWc1byg2QSXzBxBGg= +github.com/docker/docker v25.0.6+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker-credential-helpers v0.8.1 h1:j/eKUktUltBtMzKqmfLB0PAgqYyMHOp5vfsD1807oKo= github.com/docker/docker-credential-helpers v0.8.1/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M= github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c=