From 583a8e608fcb22610b30a53260b74f419d68c0da Mon Sep 17 00:00:00 2001 From: Nick Sanford Date: Fri, 15 Nov 2024 12:01:27 -0500 Subject: [PATCH] [DATA-3338] fix capture all from camera stability --- components/arm/collectors.go | 25 +- components/arm/collectors_test.go | 13 +- components/board/collectors.go | 30 +- components/board/collectors_test.go | 14 +- components/camera/collectors.go | 78 +- components/camera/collectors_test.go | 63 +- components/encoder/collectors.go | 13 +- components/encoder/collectors_test.go | 7 +- components/gantry/collectors.go | 25 +- components/gantry/collectors_test.go | 13 +- components/motor/collectors.go | 25 +- components/motor/collectors_test.go | 23 +- components/movementsensor/collectors.go | 106 +-- components/movementsensor/collectors_test.go | 105 +-- components/powersensor/collectors.go | 56 +- components/powersensor/collectors_test.go | 31 +- components/sensor/collectors.go | 20 +- components/sensor/collectors_test.go | 7 +- components/servo/collectors.go | 13 +- components/servo/collectors_test.go | 7 +- data/capture_buffer.go | 88 ++- data/capture_buffer_test.go | 121 ++-- data/capture_file.go | 69 +- data/capture_file_test.go | 2 +- data/collector.go | 150 ++-- data/collector_test.go | 140 ++-- data/collector_types.go | 394 ++++++++++ data/collector_types_test.go | 425 +++++++++++ data/registry.go | 16 +- go.mod | 4 +- go.sum | 8 +- .../datamanager/builtin/builtin_sync_test.go | 3 +- .../datamanager/builtin/capture/capture.go | 3 +- .../builtin/sync/exponential_retry.go | 12 +- .../builtin/sync/upload_data_capture_file.go | 302 ++++++-- .../sync/upload_data_capture_file_test.go | 672 ++++++++++++++++++ services/slam/collectors.go | 26 +- services/slam/collectors_test.go | 22 +- services/vision/collectors.go | 102 +-- services/vision/collectors_test.go | 115 +-- testutils/file_utils.go | 77 +- 41 files changed, 2591 insertions(+), 834 deletions(-) create mode 100644 data/collector_types.go create mode 100644 data/collector_types_test.go create mode 100644 services/datamanager/builtin/sync/upload_data_capture_file_test.go diff --git a/components/arm/collectors.go b/components/arm/collectors.go index 6dd9a60c618..9c1528cb6be 100644 --- a/components/arm/collectors.go +++ b/components/arm/collectors.go @@ -5,6 +5,7 @@ package arm import ( "context" "errors" + "time" v1 "go.viam.com/api/common/v1" pb "go.viam.com/api/component/arm/v1" @@ -39,18 +40,20 @@ func newEndPositionCollector(resource interface{}, params data.CollectorParams) return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult v, err := arm.EndPosition(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, endPosition.String(), err) + return res, data.FailedToReadErr(params.ComponentName, endPosition.String(), err) } o := v.Orientation().OrientationVectorDegrees() - return pb.GetEndPositionResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetEndPositionResponse{ Pose: &v1.Pose{ X: v.Point().X, Y: v.Point().Y, @@ -60,7 +63,7 @@ func newEndPositionCollector(resource interface{}, params data.CollectorParams) OZ: o.OZ, Theta: o.Theta, }, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -73,21 +76,23 @@ func newJointPositionsCollector(resource interface{}, params data.CollectorParam return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult v, err := arm.JointPositions(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, jointPositions.String(), err) + return res, data.FailedToReadErr(params.ComponentName, jointPositions.String(), err) } jp, err := referenceframe.JointPositionsFromInputs(arm.ModelFrame(), v) if err != nil { - return nil, data.FailedToReadErr(params.ComponentName, jointPositions.String(), err) + return res, data.FailedToReadErr(params.ComponentName, jointPositions.String(), err) } - return pb.GetJointPositionsResponse{Positions: jp}, nil + return data.NewTabularCaptureResult(timeRequested, pb.GetJointPositionsResponse{Positions: jp}) }) return data.NewCollector(cFunc, params) } diff --git a/components/arm/collectors_test.go b/components/arm/collectors_test.go index acd016fbc02..2898308975c 100644 --- a/components/arm/collectors_test.go +++ b/components/arm/collectors_test.go @@ -31,12 +31,12 @@ func TestCollectors(t *testing.T) { tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData }{ { name: "End position collector should write a pose", collector: arm.NewEndPositionCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "pose": map[string]any{ @@ -49,27 +49,28 @@ func TestCollectors(t *testing.T) { "z": 3, }, })}, - }, + }}, }, { name: "Joint positions collector should write a list of positions", collector: arm.NewJointPositionsCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "positions": map[string]any{ "values": []any{1.0, 2.0, 3.0}, }, })}, - }, + }}, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: componentName, Interval: captureInterval, Logger: logging.NewTestLogger(t), diff --git a/components/board/collectors.go b/components/board/collectors.go index 64a77bfdaa0..6ac74ecad97 100644 --- a/components/board/collectors.go +++ b/components/board/collectors.go @@ -2,6 +2,7 @@ package board import ( "context" + "time" "github.com/pkg/errors" pb "go.viam.com/api/component/board/v1" @@ -39,10 +40,12 @@ func newAnalogCollector(resource interface{}, params data.CollectorParams) (data return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult var analogValue AnalogValue if _, ok := arg[analogReaderNameKey]; !ok { - return nil, data.FailedToReadErr(params.ComponentName, analogs.String(), + return res, data.FailedToReadErr(params.ComponentName, analogs.String(), errors.New("Must supply reader_name in additional_params for analog collector")) } if reader, err := board.AnalogByName(arg[analogReaderNameKey].String()); err == nil { @@ -51,17 +54,18 @@ func newAnalogCollector(resource interface{}, params data.CollectorParams) (data // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, analogs.String(), err) + return res, data.FailedToReadErr(params.ComponentName, analogs.String(), err) } } - return pb.ReadAnalogReaderResponse{ + + return data.NewTabularCaptureResult(timeRequested, pb.ReadAnalogReaderResponse{ Value: int32(analogValue.Value), MinRange: analogValue.Min, MaxRange: analogValue.Max, StepSize: analogValue.StepSize, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -74,10 +78,12 @@ func newGPIOCollector(resource interface{}, params data.CollectorParams) (data.C return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult var value bool if _, ok := arg[gpioPinNameKey]; !ok { - return nil, data.FailedToReadErr(params.ComponentName, gpios.String(), + return res, data.FailedToReadErr(params.ComponentName, gpios.String(), errors.New("Must supply pin_name in additional params for gpio collector")) } if gpio, err := board.GPIOPinByName(arg[gpioPinNameKey].String()); err == nil { @@ -86,14 +92,14 @@ func newGPIOCollector(resource interface{}, params data.CollectorParams) (data.C // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, gpios.String(), err) + return res, data.FailedToReadErr(params.ComponentName, gpios.String(), err) } } - return pb.GetGPIOResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetGPIOResponse{ High: value, - }, nil + }) }) return data.NewCollector(cFunc, params) } diff --git a/components/board/collectors_test.go b/components/board/collectors_test.go index 200ea6cd1f7..7e9f8692315 100644 --- a/components/board/collectors_test.go +++ b/components/board/collectors_test.go @@ -30,11 +30,12 @@ func TestCollectors(t *testing.T) { name string params data.CollectorParams collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData }{ { name: "Board analog collector should write an analog response", params: data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: componentName, Interval: captureInterval, Logger: logging.NewTestLogger(t), @@ -43,7 +44,7 @@ func TestCollectors(t *testing.T) { }, }, collector: board.NewAnalogCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "value": 1, @@ -51,11 +52,12 @@ func TestCollectors(t *testing.T) { "max_range": 10, "step_size": float64(float32(0.1)), })}, - }, + }}, }, { name: "Board gpio collector should write a gpio response", params: data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: componentName, Interval: captureInterval, Logger: logging.NewTestLogger(t), @@ -64,19 +66,19 @@ func TestCollectors(t *testing.T) { }, }, collector: board.NewGPIOCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "high": true, })}, - }, + }}, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) tc.params.Clock = clock.New() tc.params.Target = buf diff --git a/components/camera/collectors.go b/components/camera/collectors.go index 37663e7335f..de05b5048c8 100644 --- a/components/camera/collectors.go +++ b/components/camera/collectors.go @@ -3,10 +3,10 @@ package camera import ( "bytes" "context" + "time" "github.com/pkg/errors" "go.opencensus.io/trace" - pb "go.viam.com/api/component/camera/v1" "google.golang.org/protobuf/types/known/anypb" "google.golang.org/protobuf/types/known/wrapperspb" @@ -42,7 +42,9 @@ func newNextPointCloudCollector(resource interface{}, params data.CollectorParam return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult _, span := trace.StartSpan(ctx, "camera::data::collector::CaptureFunc::NextPointCloud") defer span.End() @@ -53,9 +55,9 @@ func newNextPointCloudCollector(resource interface{}, params data.CollectorParam // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, nextPointCloud.String(), err) + return res, data.FailedToReadErr(params.ComponentName, nextPointCloud.String(), err) } var buf bytes.Buffer @@ -64,10 +66,17 @@ func newNextPointCloudCollector(resource interface{}, params data.CollectorParam buf.Grow(headerSize + v.Size()*4*4) // 4 numbers per point, each 4 bytes err = pointcloud.ToPCD(v, &buf, pointcloud.PCDBinary) if err != nil { - return nil, errors.Errorf("failed to convert returned point cloud to PCD: %v", err) + return res, errors.Errorf("failed to convert returned point cloud to PCD: %v", err) } } - return buf.Bytes(), nil + ts := data.Timestamps{ + TimeRequested: timeRequested, + TimeReceived: time.Now(), + } + return data.NewBinaryCaptureResult(ts, []data.Binary{{ + Payload: buf.Bytes(), + MimeType: data.MimeTypeApplicationPcd, + }}), nil }) return data.NewCollector(cFunc, params) } @@ -88,7 +97,9 @@ func newReadImageCollector(resource interface{}, params data.CollectorParams) (d } } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult _, span := trace.StartSpan(ctx, "camera::data::collector::CaptureFunc::ReadImage") defer span.End() @@ -99,10 +110,10 @@ func newReadImageCollector(resource interface{}, params data.CollectorParams) (d // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, readImage.String(), err) + return res, data.FailedToReadErr(params.ComponentName, readImage.String(), err) } defer func() { if release != nil { @@ -112,14 +123,26 @@ func newReadImageCollector(resource interface{}, params data.CollectorParams) (d mimeStr := new(wrapperspb.StringValue) if err := mimeType.UnmarshalTo(mimeStr); err != nil { - return nil, err + return res, err } outBytes, err := rimage.EncodeImage(ctx, img, mimeStr.Value) if err != nil { - return nil, err + return res, err + } + + actualOutMIME, _ := utils.CheckLazyMIMEType(mimeStr.Value) + // NOTE (Nick S): This is the mime type that the user described in the config, not the actual + // mime type returned from the camera.GetImage API + mimeType := data.CameraFormatToMimeType(utils.MimeTypeToFormat[actualOutMIME]) + ts := data.Timestamps{ + TimeRequested: timeRequested, + TimeReceived: time.Now(), } - return outBytes, nil + return data.NewBinaryCaptureResult(ts, []data.Binary{{ + MimeType: mimeType, + Payload: outBytes, + }}), nil }) return data.NewCollector(cFunc, params) } @@ -129,37 +152,36 @@ func newGetImagesCollector(resource interface{}, params data.CollectorParams) (d if err != nil { return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + var res data.CaptureResult _, span := trace.StartSpan(ctx, "camera::data::collector::CaptureFunc::GetImages") defer span.End() - ctx = context.WithValue(ctx, data.FromDMContextKey{}, true) resImgs, resMetadata, err := camera.Images(ctx) if err != nil { if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, getImages.String(), err) + return res, data.FailedToReadErr(params.ComponentName, getImages.String(), err) } - var imgsConverted []*pb.Image + var binaries []data.Binary for _, img := range resImgs { format, imgBytes, err := encodeImageFromUnderlyingType(ctx, img.Image) if err != nil { - return nil, err + return res, err } - imgPb := &pb.Image{ - SourceName: img.SourceName, - Format: format, - Image: imgBytes, - } - imgsConverted = append(imgsConverted, imgPb) + binaries = append(binaries, data.Binary{ + Payload: imgBytes, + MimeType: data.CameraFormatToMimeType(format), + }) + } + ts := data.Timestamps{ + TimeRequested: resMetadata.CapturedAt, + TimeReceived: resMetadata.CapturedAt, } - return pb.GetImagesResponse{ - ResponseMetadata: resMetadata.AsProto(), - Images: imgsConverted, - }, nil + return data.NewBinaryCaptureResult(ts, binaries), nil }) return data.NewCollector(cFunc, params) } diff --git a/components/camera/collectors_test.go b/components/camera/collectors_test.go index 60af3460585..a917aa50295 100644 --- a/components/camera/collectors_test.go +++ b/components/camera/collectors_test.go @@ -12,12 +12,10 @@ import ( "github.com/benbjohnson/clock" datasyncpb "go.viam.com/api/app/datasync/v1" - camerapb "go.viam.com/api/component/camera/v1" "go.viam.com/test" "go.viam.com/utils/artifact" "google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/types/known/anypb" - "google.golang.org/protobuf/types/known/timestamppb" "google.golang.org/protobuf/types/known/wrapperspb" "go.viam.com/rdk/components/camera" @@ -78,10 +76,6 @@ func TestCollectors(t *testing.T) { test.That(t, err, test.ShouldBeNil) viamLogoJpeg, err := io.ReadAll(base64.NewDecoder(base64.StdEncoding, bytes.NewReader(viamLogoJpegB64))) test.That(t, err, test.ShouldBeNil) - viamLogoJpegAsInts := []any{} - for _, b := range viamLogoJpeg { - viamLogoJpegAsInts = append(viamLogoJpegAsInts, int(b)) - } img := rimage.NewLazyEncodedImage(viamLogoJpeg, utils.MimeTypeJPEG) // 32 x 32 image @@ -94,59 +88,50 @@ func TestCollectors(t *testing.T) { var pcdBuf bytes.Buffer test.That(t, pointcloud.ToPCD(pcd, &pcdBuf, pointcloud.PCDBinary), test.ShouldBeNil) - now := time.Now() - nowPB := timestamppb.New(now) - cam := newCamera(img, img, now, pcd) + cam := newCamera(img, img, pcd) tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData camera camera.Camera }{ { name: "ReadImage returns a non nil binary response", collector: camera.NewReadImageCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Binary{Binary: viamLogoJpeg}, - }, + }}, camera: cam, }, { name: "NextPointCloud returns a non nil binary response", collector: camera.NewNextPointCloudCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Binary{Binary: pcdBuf.Bytes()}, - }, + expected: []*datasyncpb.SensorData{{ + Metadata: &datasyncpb.SensorMetadata{ + MimeType: datasyncpb.MimeType_MIME_TYPE_APPLICATION_PCD, + }, + Data: &datasyncpb.SensorData_Binary{Binary: pcdBuf.Bytes()}, + }}, camera: cam, }, { name: "GetImages returns a non nil tabular response", collector: camera.NewGetImagesCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "response_metadata": map[string]any{ - "captured_at": map[string]any{ - "seconds": nowPB.Seconds, - "nanos": nowPB.Nanos, - }, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{ + MimeType: datasyncpb.MimeType_MIME_TYPE_IMAGE_JPEG, }, - "images": []any{ - map[string]any{ - "source_name": "left", - "format": int(camerapb.Format_FORMAT_JPEG), - "image": viamLogoJpegAsInts, - }, - map[string]any{ - "source_name": "right", - "format": int(camerapb.Format_FORMAT_JPEG), - "image": viamLogoJpegAsInts, - }, + Data: &datasyncpb.SensorData_Binary{Binary: viamLogoJpeg}, + }, + { + Metadata: &datasyncpb.SensorMetadata{ + MimeType: datasyncpb.MimeType_MIME_TYPE_IMAGE_JPEG, }, - })}, + Data: &datasyncpb.SensorData_Binary{Binary: viamLogoJpeg}, + }, }, camera: cam, }, @@ -155,8 +140,9 @@ func TestCollectors(t *testing.T) { for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeBinary, ComponentName: serviceName, Interval: captureInterval, Logger: logging.NewTestLogger(t), @@ -181,7 +167,6 @@ func TestCollectors(t *testing.T) { func newCamera( left, right image.Image, - capturedAt time.Time, pcd pointcloud.PointCloud, ) camera.Camera { v := &inject.Camera{} @@ -200,7 +185,7 @@ func newCamera( {Image: left, SourceName: "left"}, {Image: right, SourceName: "right"}, }, - resource.ResponseMetadata{CapturedAt: capturedAt}, + resource.ResponseMetadata{CapturedAt: time.Now()}, nil } diff --git a/components/encoder/collectors.go b/components/encoder/collectors.go index 9ad643bc721..dc21d5653f0 100644 --- a/components/encoder/collectors.go +++ b/components/encoder/collectors.go @@ -3,6 +3,7 @@ package encoder import ( "context" "errors" + "time" pb "go.viam.com/api/component/encoder/v1" "google.golang.org/protobuf/types/known/anypb" @@ -31,20 +32,22 @@ func newTicksCountCollector(resource interface{}, params data.CollectorParams) ( return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult v, positionType, err := encoder.Position(ctx, PositionTypeUnspecified, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, ticksCount.String(), err) + return res, data.FailedToReadErr(params.ComponentName, ticksCount.String(), err) } - return pb.GetPositionResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetPositionResponse{ Value: float32(v), PositionType: pb.PositionType(positionType), - }, nil + }) }) return data.NewCollector(cFunc, params) } diff --git a/components/encoder/collectors_test.go b/components/encoder/collectors_test.go index d24e719a860..346ba178f9b 100644 --- a/components/encoder/collectors_test.go +++ b/components/encoder/collectors_test.go @@ -23,8 +23,9 @@ const ( func TestCollectors(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: "encoder", Interval: captureInterval, Logger: logging.NewTestLogger(t), @@ -41,13 +42,13 @@ func TestCollectors(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), time.Second) defer cancel() - tu.CheckMockBufferWrites(t, ctx, start, buf.Writes, &datasyncpb.SensorData{ + tu.CheckMockBufferWrites(t, ctx, start, buf.Writes, []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "value": 1.0, "position_type": int(pb.PositionType_POSITION_TYPE_TICKS_COUNT), })}, - }) + }}) buf.Close() } diff --git a/components/gantry/collectors.go b/components/gantry/collectors.go index 149f52f5a54..ff77f6945ab 100644 --- a/components/gantry/collectors.go +++ b/components/gantry/collectors.go @@ -3,6 +3,7 @@ package gantry import ( "context" "errors" + "time" pb "go.viam.com/api/component/gantry/v1" "google.golang.org/protobuf/types/known/anypb" @@ -35,19 +36,21 @@ func newPositionCollector(resource interface{}, params data.CollectorParams) (da return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult v, err := gantry.Position(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, position.String(), err) + return res, data.FailedToReadErr(params.ComponentName, position.String(), err) } - return pb.GetPositionResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetPositionResponse{ PositionsMm: v, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -60,19 +63,21 @@ func newLengthsCollector(resource interface{}, params data.CollectorParams) (dat return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult v, err := gantry.Lengths(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, lengths.String(), err) + return res, data.FailedToReadErr(params.ComponentName, lengths.String(), err) } - return pb.GetLengthsResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetLengthsResponse{ LengthsMm: v, - }, nil + }) }) return data.NewCollector(cFunc, params) } diff --git a/components/gantry/collectors_test.go b/components/gantry/collectors_test.go index 1080fe7abc1..18fd5f3ec9e 100644 --- a/components/gantry/collectors_test.go +++ b/components/gantry/collectors_test.go @@ -28,35 +28,36 @@ func TestCollectors(t *testing.T) { tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData }{ { name: "Length collector should write a lengths response", collector: gantry.NewLengthsCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "lengths_mm": []any{1000, 2000, 3000}, })}, - }, + }}, }, { name: "Position collector should write a list of positions", collector: gantry.NewPositionCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "positions_mm": []any{1000, 2000, 3000}, })}, - }, + }}, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: componentName, Interval: captureInterval, Logger: logging.NewTestLogger(t), diff --git a/components/motor/collectors.go b/components/motor/collectors.go index 1d328d7b36c..89401f87d56 100644 --- a/components/motor/collectors.go +++ b/components/motor/collectors.go @@ -3,6 +3,7 @@ package motor import ( "context" "errors" + "time" pb "go.viam.com/api/component/motor/v1" "google.golang.org/protobuf/types/known/anypb" @@ -35,19 +36,21 @@ func newPositionCollector(resource interface{}, params data.CollectorParams) (da return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult v, err := motor.Position(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, position.String(), err) + return res, data.FailedToReadErr(params.ComponentName, position.String(), err) } - return pb.GetPositionResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetPositionResponse{ Position: v, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -60,20 +63,22 @@ func newIsPoweredCollector(resource interface{}, params data.CollectorParams) (d return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult v, powerPct, err := motor.IsPowered(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, isPowered.String(), err) + return res, data.FailedToReadErr(params.ComponentName, isPowered.String(), err) } - return pb.IsPoweredResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.IsPoweredResponse{ IsOn: v, PowerPct: powerPct, - }, nil + }) }) return data.NewCollector(cFunc, params) } diff --git a/components/motor/collectors_test.go b/components/motor/collectors_test.go index 8b5444d8909..15bd2de53af 100644 --- a/components/motor/collectors_test.go +++ b/components/motor/collectors_test.go @@ -25,27 +25,29 @@ func TestCollectors(t *testing.T) { tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData }{ { name: "Motor position collector should write a position response", collector: motor.NewPositionCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "position": 1.0, })}, - }, + }}, }, { name: "Motor isPowered collector should write an isPowered response", collector: motor.NewIsPoweredCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "is_on": false, - "power_pct": 0.5, - })}, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{}, + Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ + "is_on": false, + "power_pct": 0.5, + })}, + }, }, }, } @@ -53,8 +55,9 @@ func TestCollectors(t *testing.T) { for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: componentName, Interval: captureInterval, Logger: logging.NewTestLogger(t), diff --git a/components/movementsensor/collectors.go b/components/movementsensor/collectors.go index 2d985d26a3f..5d4e814d169 100644 --- a/components/movementsensor/collectors.go +++ b/components/movementsensor/collectors.go @@ -3,13 +3,13 @@ package movementsensor import ( "context" "errors" + "time" v1 "go.viam.com/api/common/v1" pb "go.viam.com/api/component/movementsensor/v1" "google.golang.org/protobuf/types/known/anypb" "go.viam.com/rdk/data" - "go.viam.com/rdk/protoutils" "go.viam.com/rdk/spatialmath" ) @@ -53,182 +53,185 @@ func assertMovementSensor(resource interface{}) (MovementSensor, error) { return ms, nil } -// newLinearVelocityCollector returns a collector to register a linear velocity method. If one is already registered -// with the same MethodMetadata it will panic. +//nolint:dupl func newLinearVelocityCollector(resource interface{}, params data.CollectorParams) (data.Collector, error) { ms, err := assertMovementSensor(resource) if err != nil { return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult vec, err := ms.LinearVelocity(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, position.String(), err) + return res, data.FailedToReadErr(params.ComponentName, position.String(), err) } - return pb.GetLinearVelocityResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetLinearVelocityResponse{ LinearVelocity: &v1.Vector3{ X: vec.X, Y: vec.Y, Z: vec.Z, }, - }, nil + }) }) return data.NewCollector(cFunc, params) } -// newPositionCollector returns a collector to register a position method. If one is already registered -// with the same MethodMetadata it will panic. func newPositionCollector(resource interface{}, params data.CollectorParams) (data.Collector, error) { ms, err := assertMovementSensor(resource) if err != nil { return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult pos, altitude, err := ms.Position(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, linearVelocity.String(), err) + return res, data.FailedToReadErr(params.ComponentName, linearVelocity.String(), err) } var lat, lng float64 if pos != nil { lat = pos.Lat() lng = pos.Lng() } - return pb.GetPositionResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetPositionResponse{ Coordinate: &v1.GeoPoint{ Latitude: lat, Longitude: lng, }, AltitudeM: float32(altitude), - }, nil + }) }) return data.NewCollector(cFunc, params) } -// newAngularVelocityCollector returns a collector to register an angular velocity method. If one is already registered -// with the same MethodMetadata it will panic. +//nolint:dupl func newAngularVelocityCollector(resource interface{}, params data.CollectorParams) (data.Collector, error) { ms, err := assertMovementSensor(resource) if err != nil { return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult vel, err := ms.AngularVelocity(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, angularVelocity.String(), err) + return res, data.FailedToReadErr(params.ComponentName, angularVelocity.String(), err) } - return pb.GetAngularVelocityResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetAngularVelocityResponse{ AngularVelocity: &v1.Vector3{ X: vel.X, Y: vel.Y, Z: vel.Z, }, - }, nil + }) }) return data.NewCollector(cFunc, params) } -// newCompassHeadingCollector returns a collector to register a compass heading method. If one is already registered -// with the same MethodMetadata it will panic. func newCompassHeadingCollector(resource interface{}, params data.CollectorParams) (data.Collector, error) { ms, err := assertMovementSensor(resource) if err != nil { return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult heading, err := ms.CompassHeading(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, compassHeading.String(), err) + return res, data.FailedToReadErr(params.ComponentName, compassHeading.String(), err) } - return pb.GetCompassHeadingResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetCompassHeadingResponse{ Value: heading, - }, nil + }) }) return data.NewCollector(cFunc, params) } -// newLinearAccelerationCollector returns a collector to register a linear acceleration method. If one is already registered -// with the same MethodMetadata it will panic. +//nolint:dupl func newLinearAccelerationCollector(resource interface{}, params data.CollectorParams) (data.Collector, error) { ms, err := assertMovementSensor(resource) if err != nil { return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult accel, err := ms.LinearAcceleration(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, linearAcceleration.String(), err) + return res, data.FailedToReadErr(params.ComponentName, linearAcceleration.String(), err) } - return pb.GetLinearAccelerationResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetLinearAccelerationResponse{ LinearAcceleration: &v1.Vector3{ X: accel.X, Y: accel.Y, Z: accel.Z, }, - }, nil + }) }) return data.NewCollector(cFunc, params) } -// newOrientationCollector returns a collector to register an orientation method. If one is already registered -// with the same MethodMetadata it will panic. func newOrientationCollector(resource interface{}, params data.CollectorParams) (data.Collector, error) { ms, err := assertMovementSensor(resource) if err != nil { return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult orient, err := ms.Orientation(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, orientation.String(), err) + return res, data.FailedToReadErr(params.ComponentName, orientation.String(), err) } var orientVector *spatialmath.OrientationVectorDegrees if orient != nil { orientVector = orient.OrientationVectorDegrees() } - return pb.GetOrientationResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetOrientationResponse{ Orientation: &v1.Orientation{ OX: orientVector.OX, OY: orientVector.OY, OZ: orientVector.OZ, Theta: orientVector.Theta, }, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -241,23 +244,20 @@ func newReadingsCollector(resource interface{}, params data.CollectorParams) (da return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult values, err := ms.Readings(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, readings.String(), err) - } - readings, err := protoutils.ReadingGoToProto(values) - if err != nil { - return nil, err + return res, data.FailedToReadErr(params.ComponentName, readings.String(), err) } - return v1.GetReadingsResponse{ - Readings: readings, - }, nil + + return data.NewTabularCaptureResultReadings(timeRequested, values) }) return data.NewCollector(cFunc, params) } diff --git a/components/movementsensor/collectors_test.go b/components/movementsensor/collectors_test.go index f87c21ba99d..9a504367396 100644 --- a/components/movementsensor/collectors_test.go +++ b/components/movementsensor/collectors_test.go @@ -36,12 +36,12 @@ func TestCollectors(t *testing.T) { tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData }{ { name: "Movement sensor linear velocity collector should write a velocity response", collector: movementsensor.NewLinearVelocityCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "linear_velocity": map[string]any{ @@ -50,26 +50,28 @@ func TestCollectors(t *testing.T) { "z": 3.0, }, })}, - }, + }}, }, { name: "Movement sensor position collector should write a position response", collector: movementsensor.NewPositionCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "coordinate": map[string]any{ - "latitude": 1.0, - "longitude": 2.0, - }, - "altitude_m": 3.0, - })}, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{}, + Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ + "coordinate": map[string]any{ + "latitude": 1.0, + "longitude": 2.0, + }, + "altitude_m": 3.0, + })}, + }, }, }, { name: "Movement sensor angular velocity collector should write a velocity response", collector: movementsensor.NewAngularVelocityCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "angular_velocity": map[string]any{ @@ -78,58 +80,66 @@ func TestCollectors(t *testing.T) { "z": 3.0, }, })}, - }, + }}, }, { name: "Movement sensor compass heading collector should write a heading response", collector: movementsensor.NewCompassHeadingCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "value": 1.0, - })}, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{}, + Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ + "value": 1.0, + })}, + }, }, }, { name: "Movement sensor linear acceleration collector should write an acceleration response", collector: movementsensor.NewLinearAccelerationCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "linear_acceleration": map[string]any{ - "x": 1.0, - "y": 2.0, - "z": 3.0, - }, - })}, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{}, + Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ + "linear_acceleration": map[string]any{ + "x": 1.0, + "y": 2.0, + "z": 3.0, + }, + })}, + }, }, }, { name: "Movement sensor orientation collector should write an orientation response", collector: movementsensor.NewOrientationCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "orientation": map[string]any{ - "o_x": 0, - "o_y": 0, - "o_z": 1, - "theta": 0, - }, - })}, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{}, + Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ + "orientation": map[string]any{ + "o_x": 0, + "o_y": 0, + "o_z": 1, + "theta": 0, + }, + })}, + }, }, }, { name: "Movement sensor readings collector should write a readings response", collector: movementsensor.NewReadingsCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "readings": map[string]any{ - "reading1": false, - "reading2": "test", - }, - })}, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{}, + Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ + "readings": map[string]any{ + "reading1": false, + "reading2": "test", + }, + })}, + }, }, }, } @@ -137,8 +147,9 @@ func TestCollectors(t *testing.T) { for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: componentName, Interval: captureInterval, Logger: logging.NewTestLogger(t), diff --git a/components/powersensor/collectors.go b/components/powersensor/collectors.go index 89dc0badefa..aefad9e8a3e 100644 --- a/components/powersensor/collectors.go +++ b/components/powersensor/collectors.go @@ -3,13 +3,12 @@ package powersensor import ( "context" "errors" + "time" - v1 "go.viam.com/api/common/v1" pb "go.viam.com/api/component/powersensor/v1" "google.golang.org/protobuf/types/known/anypb" "go.viam.com/rdk/data" - "go.viam.com/rdk/protoutils" ) type method int64 @@ -51,20 +50,23 @@ func newVoltageCollector(resource interface{}, params data.CollectorParams) (dat return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult volts, isAc, err := ps.Voltage(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, voltage.String(), err) + return res, data.FailedToReadErr(params.ComponentName, voltage.String(), err) } - return pb.GetVoltageResponse{ + + return data.NewTabularCaptureResult(timeRequested, pb.GetVoltageResponse{ Volts: volts, IsAc: isAc, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -77,20 +79,22 @@ func newCurrentCollector(resource interface{}, params data.CollectorParams) (dat return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult curr, isAc, err := ps.Current(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, current.String(), err) + return res, data.FailedToReadErr(params.ComponentName, current.String(), err) } - return pb.GetCurrentResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetCurrentResponse{ Amperes: curr, IsAc: isAc, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -103,19 +107,21 @@ func newPowerCollector(resource interface{}, params data.CollectorParams) (data. return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult pwr, err := ps.Power(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, power.String(), err) + return res, data.FailedToReadErr(params.ComponentName, power.String(), err) } - return pb.GetPowerResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetPowerResponse{ Watts: pwr, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -128,23 +134,19 @@ func newReadingsCollector(resource interface{}, params data.CollectorParams) (da return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult values, err := ps.Readings(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, readings.String(), err) - } - readings, err := protoutils.ReadingGoToProto(values) - if err != nil { - return nil, err + return res, data.FailedToReadErr(params.ComponentName, readings.String(), err) } - return v1.GetReadingsResponse{ - Readings: readings, - }, nil + return data.NewTabularCaptureResultReadings(timeRequested, values) }) return data.NewCollector(cFunc, params) } diff --git a/components/powersensor/collectors_test.go b/components/powersensor/collectors_test.go index e27aa270527..c5f31f0fefe 100644 --- a/components/powersensor/collectors_test.go +++ b/components/powersensor/collectors_test.go @@ -27,44 +27,46 @@ func TestCollectors(t *testing.T) { tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData }{ { name: "Power sensor voltage collector should write a voltage response", collector: powersensor.NewVoltageCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "volts": 1.0, "is_ac": false, })}, - }, + }}, }, { name: "Power sensor current collector should write a current response", collector: powersensor.NewCurrentCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "amperes": 1.0, - "is_ac": false, - })}, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{}, + Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ + "amperes": 1.0, + "is_ac": false, + })}, + }, }, }, { name: "Power sensor power collector should write a power response", collector: powersensor.NewPowerCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "watts": 1.0, })}, - }, + }}, }, { name: "Power sensor readings collector should write a readings response", collector: powersensor.NewReadingsCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "readings": map[string]any{ @@ -72,15 +74,16 @@ func TestCollectors(t *testing.T) { "reading2": "test", }, })}, - }, + }}, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: componentName, Interval: captureInterval, Logger: logging.NewTestLogger(t), diff --git a/components/sensor/collectors.go b/components/sensor/collectors.go index 07f34e6880a..29a9973db5a 100644 --- a/components/sensor/collectors.go +++ b/components/sensor/collectors.go @@ -3,12 +3,11 @@ package sensor import ( "context" "errors" + "time" - pb "go.viam.com/api/common/v1" "google.golang.org/protobuf/types/known/anypb" "go.viam.com/rdk/data" - "go.viam.com/rdk/protoutils" ) type method int64 @@ -32,23 +31,20 @@ func newReadingsCollector(resource interface{}, params data.CollectorParams) (da return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult values, err := sensorResource.Readings(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, readings.String(), err) + return res, data.FailedToReadErr(params.ComponentName, readings.String(), err) } - readings, err := protoutils.ReadingGoToProto(values) - if err != nil { - return nil, err - } - return pb.GetReadingsResponse{ - Readings: readings, - }, nil + + return data.NewTabularCaptureResultReadings(timeRequested, values) }) return data.NewCollector(cFunc, params) } diff --git a/components/sensor/collectors_test.go b/components/sensor/collectors_test.go index 33b6c62217b..5f07fbdf882 100644 --- a/components/sensor/collectors_test.go +++ b/components/sensor/collectors_test.go @@ -24,8 +24,9 @@ var readingMap = map[string]any{"reading1": false, "reading2": "test"} func TestCollectors(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: "sensor", Interval: captureInterval, Logger: logging.NewTestLogger(t), @@ -42,7 +43,7 @@ func TestCollectors(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), time.Second) defer cancel() - tu.CheckMockBufferWrites(t, ctx, start, buf.Writes, &datasyncpb.SensorData{ + tu.CheckMockBufferWrites(t, ctx, start, buf.Writes, []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "readings": map[string]any{ @@ -50,7 +51,7 @@ func TestCollectors(t *testing.T) { "reading2": "test", }, })}, - }) + }}) buf.Close() } diff --git a/components/servo/collectors.go b/components/servo/collectors.go index b073b122fde..9c8df3d54d2 100644 --- a/components/servo/collectors.go +++ b/components/servo/collectors.go @@ -3,6 +3,7 @@ package servo import ( "context" "errors" + "time" pb "go.viam.com/api/component/servo/v1" "google.golang.org/protobuf/types/known/anypb" @@ -31,19 +32,21 @@ func newPositionCollector(resource interface{}, params data.CollectorParams) (da return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult pos, err := servo.Position(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, position.String(), err) + return res, data.FailedToReadErr(params.ComponentName, position.String(), err) } - return pb.GetPositionResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetPositionResponse{ PositionDeg: pos, - }, nil + }) }) return data.NewCollector(cFunc, params) } diff --git a/components/servo/collectors_test.go b/components/servo/collectors_test.go index 0a1219d75ee..0f883fd6ff0 100644 --- a/components/servo/collectors_test.go +++ b/components/servo/collectors_test.go @@ -22,8 +22,9 @@ const ( func TestCollectors(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: "servo", Interval: captureInterval, Logger: logging.NewTestLogger(t), @@ -40,12 +41,12 @@ func TestCollectors(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), time.Second) defer cancel() - tu.CheckMockBufferWrites(t, ctx, start, buf.Writes, &datasyncpb.SensorData{ + tu.CheckMockBufferWrites(t, ctx, start, buf.Writes, []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "position_deg": 1.0, })}, - }) + }}) buf.Close() } diff --git a/data/capture_buffer.go b/data/capture_buffer.go index 93edc43025e..5853bc91ad4 100644 --- a/data/capture_buffer.go +++ b/data/capture_buffer.go @@ -3,14 +3,14 @@ package data import ( "sync" + "github.com/pkg/errors" v1 "go.viam.com/api/app/datasync/v1" ) -const captureAllFromCamera = "CaptureAllFromCamera" - // CaptureBufferedWriter is a buffered, persistent queue of SensorData. type CaptureBufferedWriter interface { - Write(item *v1.SensorData) error + WriteBinary(items []*v1.SensorData) error + WriteTabular(items []*v1.SensorData) error Flush() error Path() string } @@ -33,27 +33,57 @@ func NewCaptureBuffer(dir string, md *v1.DataCaptureMetadata, maxCaptureFileSize } } -// Write writes item onto b. Binary sensor data is written to its own file. -// Tabular data is written to disk in maxCaptureFileSize sized files. Files that -// are still being written to are indicated with the extension -// InProgressFileExt. Files that have finished being written to are indicated by -// FileExt. -func (b *CaptureBuffer) Write(item *v1.SensorData) error { +var ( + // ErrInvalidBinarySensorData is returned from WriteBinary if the sensor data is the wrong type. + ErrInvalidBinarySensorData = errors.New("CaptureBuffer.WriteBinary called with non binary sensor data") + // ErrInvalidTabularSensorData is returned from WriteTabular if the sensor data is the wrong type. + ErrInvalidTabularSensorData = errors.New("CaptureBuffer.WriteTabular called with binary sensor data") +) + +// WriteBinary writes the items to their own file. +// Files that are still being written to are indicated with the extension +// '.prog'. +// Files that have finished being written to are indicated by +// '.capture'. +func (b *CaptureBuffer) WriteBinary(items []*v1.SensorData) error { b.lock.Lock() defer b.lock.Unlock() - if item.GetBinary() != nil { - binFile, err := NewCaptureFile(b.Directory, b.MetaData) - if err != nil { - return err + for _, item := range items { + if !IsBinary(item) { + return ErrInvalidBinarySensorData } + } + + binFile, err := NewCaptureFile(b.Directory, b.MetaData) + if err != nil { + return err + } + for _, item := range items { if err := binFile.WriteNext(item); err != nil { return err } - if err := binFile.Close(); err != nil { - return err + } + if err := binFile.Close(); err != nil { + return err + } + return nil +} + +// WriteTabular writes +// Tabular data to disk in maxCaptureFileSize sized files. +// Files that are still being written to are indicated with the extension +// '.prog'. +// Files that have finished being written to are indicated by +// '.capture'. +func (b *CaptureBuffer) WriteTabular(items []*v1.SensorData) error { + b.lock.Lock() + defer b.lock.Unlock() + + for _, item := range items { + if IsBinary(item) { + return ErrInvalidTabularSensorData } - return nil } if b.nextFile == nil { @@ -62,10 +92,7 @@ func (b *CaptureBuffer) Write(item *v1.SensorData) error { return err } b.nextFile = nextFile - // We want to special case on "CaptureAllFromCamera" because it is sensor data that contains images - // and their corresponding annotations. We want each image and its annotations to be stored in a - // separate file. - } else if b.nextFile.Size() > b.maxCaptureFileSize || b.MetaData.MethodName == captureAllFromCamera { + } else if b.nextFile.Size() > b.maxCaptureFileSize { if err := b.nextFile.Close(); err != nil { return err } @@ -76,7 +103,26 @@ func (b *CaptureBuffer) Write(item *v1.SensorData) error { b.nextFile = nextFile } - return b.nextFile.WriteNext(item) + for _, item := range items { + if err := b.nextFile.WriteNext(item); err != nil { + return err + } + } + + return nil +} + +// IsBinary returns true when the *v1.SensorData is of type binary. +func IsBinary(item *v1.SensorData) bool { + if item == nil { + return false + } + switch item.Data.(type) { + case *v1.SensorData_Binary: + return true + default: + return false + } } // Flush flushes all buffered data to disk and marks any in progress file as complete. diff --git a/data/capture_buffer_test.go b/data/capture_buffer_test.go index a89f85b0c60..5bfa5050311 100644 --- a/data/capture_buffer_test.go +++ b/data/capture_buffer_test.go @@ -1,7 +1,6 @@ package data import ( - "errors" "io" "os" "path/filepath" @@ -85,16 +84,19 @@ func TestCaptureQueue(t *testing.T) { tmpDir := t.TempDir() md := &v1.DataCaptureMetadata{Type: tc.dataType} sut := NewCaptureBuffer(tmpDir, md, int64(maxFileSize)) - var pushValue *v1.SensorData - if tc.dataType == v1.DataType_DATA_TYPE_BINARY_SENSOR { - pushValue = binarySensorData - } else { - pushValue = structSensorData - } for i := 0; i < tc.pushCount; i++ { - err := sut.Write(pushValue) - test.That(t, err, test.ShouldBeNil) + switch { + case tc.dataType == CaptureTypeBinary.ToProto(): + err := sut.WriteBinary([]*v1.SensorData{binarySensorData}) + test.That(t, err, test.ShouldBeNil) + case tc.dataType == CaptureTypeTabular.ToProto(): + err := sut.WriteTabular([]*v1.SensorData{structSensorData}) + test.That(t, err, test.ShouldBeNil) + default: + t.Error("unknown data type") + t.FailNow() + } } dcFiles, inProgressFiles := getCaptureFiles(tmpDir) @@ -221,7 +223,7 @@ func TestCaptureBufferReader(t *testing.T) { methodParams, err := rprotoutils.ConvertStringMapToAnyPBMap(tc.additionalParams) test.That(t, err, test.ShouldBeNil) - readImageCaptureMetadata := BuildCaptureMetadata( + readImageCaptureMetadata, _ := BuildCaptureMetadata( tc.resourceName.API, tc.resourceName.ShortName(), tc.methodName, @@ -248,7 +250,7 @@ func TestCaptureBufferReader(t *testing.T) { now := time.Now() timeRequested := timestamppb.New(now.UTC()) timeReceived := timestamppb.New(now.Add(time.Millisecond).UTC()) - msg := &v1.SensorData{ + msg := []*v1.SensorData{{ Metadata: &v1.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, @@ -256,8 +258,8 @@ func TestCaptureBufferReader(t *testing.T) { Data: &v1.SensorData_Struct{ Struct: tc.readings[0], }, - } - test.That(t, b.Write(msg), test.ShouldBeNil) + }} + test.That(t, b.WriteTabular(msg), test.ShouldBeNil) test.That(t, b.Flush(), test.ShouldBeNil) dirEntries, err := os.ReadDir(b.Path()) test.That(t, err, test.ShouldBeNil) @@ -273,7 +275,7 @@ func TestCaptureBufferReader(t *testing.T) { sd, err := cf.ReadNext() test.That(t, err, test.ShouldBeNil) - test.That(t, sd, test.ShouldResemble, msg) + test.That(t, sd, test.ShouldResemble, msg[0]) _, err = cf.ReadNext() test.That(t, err, test.ShouldBeError, io.EOF) @@ -281,7 +283,7 @@ func TestCaptureBufferReader(t *testing.T) { now = time.Now() timeRequested = timestamppb.New(now.UTC()) timeReceived = timestamppb.New(now.Add(time.Millisecond).UTC()) - msg2 := &v1.SensorData{ + msg2 := []*v1.SensorData{{ Metadata: &v1.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, @@ -289,13 +291,13 @@ func TestCaptureBufferReader(t *testing.T) { Data: &v1.SensorData_Struct{ Struct: tc.readings[1], }, - } - test.That(t, b.Write(msg2), test.ShouldBeNil) + }} + test.That(t, b.WriteTabular(msg2), test.ShouldBeNil) now = time.Now() timeRequested = timestamppb.New(now.UTC()) timeReceived = timestamppb.New(now.Add(time.Millisecond).UTC()) - msg3 := &v1.SensorData{ + msg3 := []*v1.SensorData{{ Metadata: &v1.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, @@ -303,8 +305,8 @@ func TestCaptureBufferReader(t *testing.T) { Data: &v1.SensorData_Struct{ Struct: tc.readings[2], }, - } - test.That(t, b.Write(msg3), test.ShouldBeNil) + }} + test.That(t, b.WriteTabular(msg3), test.ShouldBeNil) dirEntries2, err := os.ReadDir(b.Path()) test.That(t, err, test.ShouldBeNil) @@ -341,11 +343,11 @@ func TestCaptureBufferReader(t *testing.T) { sd2, err := cf2.ReadNext() test.That(t, err, test.ShouldBeNil) - test.That(t, sd2, test.ShouldResemble, msg2) + test.That(t, sd2, test.ShouldResemble, msg2[0]) sd3, err := cf2.ReadNext() test.That(t, err, test.ShouldBeNil) - test.That(t, sd3, test.ShouldResemble, msg3) + test.That(t, sd3, test.ShouldResemble, msg3[0]) _, err = cf2.ReadNext() test.That(t, err, test.ShouldBeError, io.EOF) @@ -426,7 +428,7 @@ func TestCaptureBufferReader(t *testing.T) { methodParams, err := rprotoutils.ConvertStringMapToAnyPBMap(tc.additionalParams) test.That(t, err, test.ShouldBeNil) - readImageCaptureMetadata := BuildCaptureMetadata( + readImageCaptureMetadata, _ := BuildCaptureMetadata( tc.resourceName.API, tc.resourceName.ShortName(), tc.methodName, @@ -456,32 +458,17 @@ func TestCaptureBufferReader(t *testing.T) { test.That(t, err, test.ShouldBeNil) test.That(t, firstDirEntries, test.ShouldBeEmpty) - // writing empty sensor data returns an error - test.That(t, b.Write(nil), test.ShouldBeError, errors.New("proto: Marshal called with nil")) - // flushing after this error occures, behaves the same as if no write had occurred // current behavior is likely a bug test.That(t, b.Flush(), test.ShouldBeNil) firstDirEntries, err = os.ReadDir(b.Path()) test.That(t, err, test.ShouldBeNil) - test.That(t, len(firstDirEntries), test.ShouldEqual, 1) - test.That(t, filepath.Ext(firstDirEntries[0].Name()), test.ShouldResemble, CompletedCaptureFileExt) - f, err := os.Open(filepath.Join(b.Path(), firstDirEntries[0].Name())) - test.That(t, err, test.ShouldBeNil) - defer func() { utils.UncheckedError(f.Close()) }() - - cf, err := ReadCaptureFile(f) - test.That(t, err, test.ShouldBeNil) - test.That(t, cf.ReadMetadata(), test.ShouldResemble, readImageCaptureMetadata) - - sd, err := cf.ReadNext() - test.That(t, err, test.ShouldBeError, io.EOF) - test.That(t, sd, test.ShouldBeNil) + test.That(t, len(firstDirEntries), test.ShouldEqual, 0) now := time.Now() timeRequested := timestamppb.New(now.UTC()) timeReceived := timestamppb.New(now.Add(time.Millisecond).UTC()) - msg := &v1.SensorData{ + msg := []*v1.SensorData{{ Metadata: &v1.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, @@ -489,19 +476,13 @@ func TestCaptureBufferReader(t *testing.T) { Data: &v1.SensorData_Binary{ Binary: []byte("this is fake binary data"), }, - } - test.That(t, b.Write(msg), test.ShouldBeNil) + }} + test.That(t, b.WriteBinary(msg), test.ShouldBeNil) test.That(t, b.Flush(), test.ShouldBeNil) secondDirEntries, err := os.ReadDir(b.Path()) test.That(t, err, test.ShouldBeNil) - test.That(t, len(secondDirEntries), test.ShouldEqual, 2) - var newFileName string - for _, de := range secondDirEntries { - if de.Name() != firstDirEntries[0].Name() { - newFileName = de.Name() - break - } - } + test.That(t, len(secondDirEntries), test.ShouldEqual, 1) + newFileName := secondDirEntries[0].Name() test.That(t, newFileName, test.ShouldNotBeEmpty) test.That(t, filepath.Ext(newFileName), test.ShouldResemble, CompletedCaptureFileExt) f2, err := os.Open(filepath.Join(b.Path(), newFileName)) @@ -514,14 +495,14 @@ func TestCaptureBufferReader(t *testing.T) { sd2, err := cf2.ReadNext() test.That(t, err, test.ShouldBeNil) - test.That(t, sd2, test.ShouldResemble, msg) + test.That(t, sd2, test.ShouldResemble, msg[0]) _, err = cf2.ReadNext() test.That(t, err, test.ShouldBeError, io.EOF) timeRequested = timestamppb.New(now.UTC()) timeReceived = timestamppb.New(now.Add(time.Millisecond).UTC()) - msg3 := &v1.SensorData{ + msg3 := []*v1.SensorData{{ Metadata: &v1.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, @@ -529,13 +510,13 @@ func TestCaptureBufferReader(t *testing.T) { Data: &v1.SensorData_Binary{ Binary: []byte("msg2"), }, - } + }} - test.That(t, b.Write(msg3), test.ShouldBeNil) + test.That(t, b.WriteBinary(msg3), test.ShouldBeNil) timeRequested = timestamppb.New(now.UTC()) timeReceived = timestamppb.New(now.Add(time.Millisecond).UTC()) - msg4 := &v1.SensorData{ + msg4 := []*v1.SensorData{{ Metadata: &v1.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, @@ -543,17 +524,17 @@ func TestCaptureBufferReader(t *testing.T) { Data: &v1.SensorData_Binary{ Binary: []byte("msg3"), }, - } + }} // Every binary data written becomes a new data capture file - test.That(t, b.Write(msg4), test.ShouldBeNil) + test.That(t, b.WriteBinary(msg4), test.ShouldBeNil) test.That(t, b.Flush(), test.ShouldBeNil) thirdDirEntries, err := os.ReadDir(b.Path()) test.That(t, err, test.ShouldBeNil) - test.That(t, len(thirdDirEntries), test.ShouldEqual, 4) + test.That(t, len(thirdDirEntries), test.ShouldEqual, 3) var newFileNames []string for _, de := range thirdDirEntries { - if de.Name() != firstDirEntries[0].Name() && de.Name() != newFileName { + if de.Name() != newFileName { newFileNames = append(newFileNames, de.Name()) } } @@ -568,7 +549,7 @@ func TestCaptureBufferReader(t *testing.T) { test.That(t, cf3.ReadMetadata(), test.ShouldResemble, readImageCaptureMetadata) sd3, err := cf3.ReadNext() test.That(t, err, test.ShouldBeNil) - test.That(t, sd3, test.ShouldResemble, msg3) + test.That(t, sd3, test.ShouldResemble, msg3[0]) _, err = cf3.ReadNext() test.That(t, err, test.ShouldBeError, io.EOF) @@ -581,7 +562,7 @@ func TestCaptureBufferReader(t *testing.T) { test.That(t, cf4.ReadMetadata(), test.ShouldResemble, readImageCaptureMetadata) sd4, err := cf4.ReadNext() test.That(t, err, test.ShouldBeNil) - test.That(t, sd4, test.ShouldResemble, msg4) + test.That(t, sd4, test.ShouldResemble, msg4[0]) _, err = cf4.ReadNext() test.That(t, err, test.ShouldBeError, io.EOF) }) @@ -597,7 +578,7 @@ func TestCaptureBufferReader(t *testing.T) { methodParams, err := rprotoutils.ConvertStringMapToAnyPBMap(additionalParams) test.That(t, err, test.ShouldBeNil) - readImageCaptureMetadata := BuildCaptureMetadata( + readImageCaptureMetadata, _ := BuildCaptureMetadata( name.API, name.ShortName(), method, @@ -625,7 +606,7 @@ func TestCaptureBufferReader(t *testing.T) { now := time.Now() timeRequested := timestamppb.New(now.UTC()) timeReceived := timestamppb.New(now.Add(time.Millisecond).UTC()) - msg := &v1.SensorData{ + msg := []*v1.SensorData{{ Metadata: &v1.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, @@ -633,8 +614,8 @@ func TestCaptureBufferReader(t *testing.T) { Data: &v1.SensorData_Binary{ Binary: []byte("this is a fake image"), }, - } - test.That(t, b.Write(msg), test.ShouldBeNil) + }} + test.That(t, b.WriteBinary(msg), test.ShouldBeNil) test.That(t, b.Flush(), test.ShouldBeNil) dirEntries, err := os.ReadDir(b.Path()) test.That(t, err, test.ShouldBeNil) @@ -650,17 +631,17 @@ func TestCaptureBufferReader(t *testing.T) { sd2, err := cf2.ReadNext() test.That(t, err, test.ShouldBeNil) - test.That(t, sd2, test.ShouldResemble, msg) + test.That(t, sd2, test.ShouldResemble, msg[0]) _, err = cf2.ReadNext() test.That(t, err, test.ShouldBeError, io.EOF) }) } -//nolint func getCaptureFiles(dir string) (dcFiles, progFiles []string) { _ = filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { if err != nil { + //nolint:nilerr return nil } if info.IsDir() { @@ -676,3 +657,9 @@ func getCaptureFiles(dir string) (dcFiles, progFiles []string) { }) return dcFiles, progFiles } + +func TestIsBinary(t *testing.T) { + test.That(t, IsBinary(nil), test.ShouldBeFalse) + test.That(t, IsBinary(&v1.SensorData{Data: &v1.SensorData_Struct{}}), test.ShouldBeFalse) + test.That(t, IsBinary(&v1.SensorData{Data: &v1.SensorData_Binary{}}), test.ShouldBeTrue) +} diff --git a/data/capture_file.go b/data/capture_file.go index 47b118e9926..9e552794299 100644 --- a/data/capture_file.go +++ b/data/capture_file.go @@ -16,7 +16,6 @@ import ( "google.golang.org/protobuf/types/known/anypb" "go.viam.com/rdk/resource" - "go.viam.com/rdk/utils" ) // TODO Data-343: Reorganize this into a more standard interface/package, and add tests. @@ -30,9 +29,10 @@ const ( CompletedCaptureFileExt = ".capture" readImage = "ReadImage" // GetImages is used for getting simultaneous images from different imagers. - GetImages = "GetImages" - nextPointCloud = "NextPointCloud" - pointCloudMap = "PointCloudMap" + GetImages = "GetImages" + nextPointCloud = "NextPointCloud" + pointCloudMap = "PointCloudMap" + captureAllFromCamera = "CaptureAllFromCamera" // Non-exhaustive list of characters to strip from file paths, since not allowed // on certain file systems. filePathReservedChars = ":" @@ -210,23 +210,23 @@ func (f *CaptureFile) Delete() error { // BuildCaptureMetadata builds a DataCaptureMetadata object and returns error if // additionalParams fails to convert to anypb map. func BuildCaptureMetadata( - compAPI resource.API, - compName string, + api resource.API, + name string, method string, additionalParams map[string]string, methodParams map[string]*anypb.Any, tags []string, -) *v1.DataCaptureMetadata { - dataType := getDataType(method) +) (*v1.DataCaptureMetadata, CaptureType) { + dataType := GetDataType(method) return &v1.DataCaptureMetadata{ - ComponentType: compAPI.String(), - ComponentName: compName, + ComponentType: api.String(), + ComponentName: name, MethodName: method, - Type: dataType, + Type: dataType.ToProto(), MethodParameters: methodParams, - FileExtension: GetFileExt(dataType, method, additionalParams), + FileExtension: getFileExt(dataType, method, additionalParams), Tags: tags, - } + }, dataType } // IsDataCaptureFile returns whether or not f is a data capture file. @@ -240,49 +240,6 @@ func getFileTimestampName() string { return time.Now().Format(time.RFC3339Nano) } -// TODO DATA-246: Implement this in some more robust, programmatic way. -func getDataType(methodName string) v1.DataType { - switch methodName { - case nextPointCloud, readImage, pointCloudMap, GetImages: - return v1.DataType_DATA_TYPE_BINARY_SENSOR - default: - return v1.DataType_DATA_TYPE_TABULAR_SENSOR - } -} - -// GetFileExt gets the file extension for a capture file. -func GetFileExt(dataType v1.DataType, methodName string, parameters map[string]string) string { - defaultFileExt := "" - switch dataType { - case v1.DataType_DATA_TYPE_TABULAR_SENSOR: - return ".dat" - case v1.DataType_DATA_TYPE_FILE: - return defaultFileExt - case v1.DataType_DATA_TYPE_BINARY_SENSOR: - if methodName == nextPointCloud { - return ".pcd" - } - if methodName == readImage { - // TODO: Add explicit file extensions for all mime types. - switch parameters["mime_type"] { - case utils.MimeTypeJPEG: - return ".jpeg" - case utils.MimeTypePNG: - return ".png" - case utils.MimeTypePCD: - return ".pcd" - default: - return defaultFileExt - } - } - case v1.DataType_DATA_TYPE_UNSPECIFIED: - return defaultFileExt - default: - return defaultFileExt - } - return defaultFileExt -} - // SensorDataFromCaptureFilePath returns all readings in the file at filePath. // NOTE: (Nick S) At time of writing this is only used in tests. func SensorDataFromCaptureFilePath(filePath string) ([]*v1.SensorData, error) { diff --git a/data/capture_file_test.go b/data/capture_file_test.go index 3de5756a88c..1f220bb5c76 100644 --- a/data/capture_file_test.go +++ b/data/capture_file_test.go @@ -125,7 +125,7 @@ func TestBuildCaptureMetadata(t *testing.T) { methodParams, err := protoutils.ConvertStringMapToAnyPBMap(tc.additionalParams) test.That(t, err, test.ShouldEqual, nil) - actualMetadata := BuildCaptureMetadata( + actualMetadata, _ := BuildCaptureMetadata( resource.APINamespaceRDK.WithComponentType(tc.componentType), tc.componentName, tc.method, diff --git a/data/collector.go b/data/collector.go index 7070141147e..b90db70c2cf 100644 --- a/data/collector.go +++ b/data/collector.go @@ -5,7 +5,6 @@ package data import ( "context" "fmt" - "reflect" "sync" "time" @@ -14,15 +13,12 @@ import ( "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/mongo" "go.opencensus.io/trace" - v1 "go.viam.com/api/app/datasync/v1" - pb "go.viam.com/api/common/v1" "go.viam.com/utils" "go.viam.com/utils/protoutils" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/anypb" "google.golang.org/protobuf/types/known/structpb" - "google.golang.org/protobuf/types/known/timestamppb" "go.viam.com/rdk/logging" "go.viam.com/rdk/resource" @@ -31,9 +27,6 @@ import ( // The cutoff at which if interval < cutoff, a sleep based capture func is used instead of a ticker. var sleepCaptureCutoff = 2 * time.Millisecond -// CaptureFunc allows the creation of simple Capturers with anonymous functions. -type CaptureFunc func(ctx context.Context, params map[string]*anypb.Any) (interface{}, error) - // FromDMContextKey is used to check whether the context is from data management. // Deprecated: use a camera.Extra with camera.NewContext instead. type FromDMContextKey struct{} @@ -50,6 +43,17 @@ var ErrNoCaptureToStore = status.Error(codes.FailedPrecondition, "no capture fro // If an error is ongoing, the frequency (in seconds) with which to suppress identical error logs. const identicalErrorLogFrequencyHz = 2 +// TabularDataBson is a denormalized sensor reading that can be +// encoded into BSON. +type TabularDataBson struct { + TimeRequested time.Time `bson:"time_requested"` + TimeReceived time.Time `bson:"time_received"` + ComponentName string `bson:"component_name"` + ComponentType string `bson:"component_type"` + MethodName string `bson:"method_name"` + Data bson.M `bson:"data"` +} + // Collector collects data to some target. type Collector interface { Close() @@ -58,9 +62,9 @@ type Collector interface { } type collector struct { - clock clock.Clock - captureResults chan *v1.SensorData + clock clock.Clock + captureResults chan CaptureResult mongoCollection *mongo.Collection componentName string componentType string @@ -78,6 +82,7 @@ type collector struct { captureFunc CaptureFunc target CaptureBufferedWriter lastLoggedErrors map[string]int64 + dataType CaptureType } // Close closes the channels backing the Collector. It should always be called before disposing of a Collector to avoid @@ -178,10 +183,27 @@ func (c *collector) tickerBasedCapture(started chan struct{}) { } } +func (c *collector) validateReadingType(t CaptureType) error { + switch c.dataType { + case CaptureTypeTabular: + if t != CaptureTypeTabular { + return fmt.Errorf("expected result of type CaptureTypeTabular, instead got CaptureResultType: %d", t) + } + return nil + case CaptureTypeBinary: + if t != CaptureTypeBinary { + return fmt.Errorf("expected result of type CaptureTypeBinary,instead got CaptureResultType: %d", t) + } + return nil + case CaptureTypeUnspecified: + return fmt.Errorf("unknown collector data type: %d", c.dataType) + default: + return fmt.Errorf("unknown collector data type: %d", c.dataType) + } +} + func (c *collector) getAndPushNextReading() { - timeRequested := timestamppb.New(c.clock.Now().UTC()) - reading, err := c.captureFunc(c.cancelCtx, c.params) - timeReceived := timestamppb.New(c.clock.Now().UTC()) + result, err := c.captureFunc(c.cancelCtx, c.params) if c.cancelCtx.Err() != nil { return @@ -196,56 +218,22 @@ func (c *collector) getAndPushNextReading() { return } - var msg v1.SensorData - switch v := reading.(type) { - case []byte: - msg = v1.SensorData{ - Metadata: &v1.SensorMetadata{ - TimeRequested: timeRequested, - TimeReceived: timeReceived, - }, - Data: &v1.SensorData_Binary{ - Binary: v, - }, - } - default: - // If it's not bytes, it's a struct. - var pbReading *structpb.Struct - var err error - - if reflect.TypeOf(reading) == reflect.TypeOf(pb.GetReadingsResponse{}) { - // We special-case the GetReadingsResponse because it already contains - // structpb.Values in it, and the StructToStructPb logic does not handle - // that cleanly. - topLevelMap := make(map[string]*structpb.Value) - topLevelMap["readings"] = structpb.NewStructValue( - &structpb.Struct{Fields: reading.(pb.GetReadingsResponse).Readings}, - ) - pbReading = &structpb.Struct{Fields: topLevelMap} - } else { - pbReading, err = protoutils.StructToStructPbIgnoreOmitEmpty(reading) - if err != nil { - c.captureErrors <- errors.Wrap(err, "error while converting reading to structpb.Struct") - return - } - } + if err := c.validateReadingType(result.Type); err != nil { + c.captureErrors <- errors.Wrap(err, "capture result invalid type") + return + } - msg = v1.SensorData{ - Metadata: &v1.SensorMetadata{ - TimeRequested: timeRequested, - TimeReceived: timeReceived, - }, - Data: &v1.SensorData_Struct{ - Struct: pbReading, - }, - } + if err := result.Validate(); err != nil { + c.captureErrors <- errors.Wrap(err, "capture result failed validation") + return } select { - // If c.captureResults is full, c.captureResults <- a can block indefinitely. This additional select block allows cancel to + // If c.captureResults is full, c.captureResults <- a can block indefinitely. + // This additional select block allows cancel to // still work when this happens. case <-c.cancelCtx.Done(): - case c.captureResults <- &msg: + case c.captureResults <- result: } } @@ -267,9 +255,10 @@ func NewCollector(captureFunc CaptureFunc, params CollectorParams) (Collector, e componentName: params.ComponentName, componentType: params.ComponentType, methodName: params.MethodName, - captureResults: make(chan *v1.SensorData, params.QueueSize), mongoCollection: params.MongoCollection, + captureResults: make(chan CaptureResult, params.QueueSize), captureErrors: make(chan error, params.QueueSize), + dataType: params.DataType, interval: params.Interval, params: params.MethodParams, logger: params.Logger, @@ -292,8 +281,24 @@ func (c *collector) writeCaptureResults() { case <-c.cancelCtx.Done(): return case msg := <-c.captureResults: - if err := c.target.Write(msg); err != nil { - c.logger.Error(errors.Wrap(err, fmt.Sprintf("failed to write to collector %s", c.target.Path())).Error()) + proto := msg.ToProto() + + switch msg.Type { + case CaptureTypeTabular: + if err := c.target.WriteTabular(proto); err != nil { + c.logger.Error(errors.Wrap(err, fmt.Sprintf("failed to write tabular data to prog file %s", c.target.Path())).Error()) + return + } + case CaptureTypeBinary: + if err := c.target.WriteBinary(proto); err != nil { + c.logger.Error(errors.Wrap(err, fmt.Sprintf("failed to write binary data to prog file %s", c.target.Path())).Error()) + return + } + case CaptureTypeUnspecified: + c.logger.Error(fmt.Sprintf("collector returned invalid result type: %d", msg.Type)) + return + default: + c.logger.Error(fmt.Sprintf("collector returned invalid result type: %d", msg.Type)) return } @@ -302,34 +307,19 @@ func (c *collector) writeCaptureResults() { } } -// TabularData is a denormalized sensor reading. -type TabularData struct { - TimeRequested time.Time `bson:"time_requested"` - TimeReceived time.Time `bson:"time_received"` - ComponentName string `bson:"component_name"` - ComponentType string `bson:"component_type"` - MethodName string `bson:"method_name"` - Data bson.M `bson:"data"` -} - // maybeWriteToMongo will write to the mongoCollection // if it is non-nil and the msg is tabular data // logs errors on failure. -func (c *collector) maybeWriteToMongo(msg *v1.SensorData) { +func (c *collector) maybeWriteToMongo(msg CaptureResult) { if c.mongoCollection == nil { return } - // DATA-3338: - // currently vision.CaptureAllFromCamera and camera.GetImages are stored in .capture files as VERY LARGE - // tabular sensor data - // That is a mistake which we are rectifying but in the meantime we don't want data captured from those methods to be synced - // to mongo - if getDataType(c.methodName) == v1.DataType_DATA_TYPE_BINARY_SENSOR || c.methodName == captureAllFromCamera { + if msg.Type != CaptureTypeTabular { return } - s := msg.GetStruct() + s := msg.TabularData.Payload if s == nil { return } @@ -340,9 +330,9 @@ func (c *collector) maybeWriteToMongo(msg *v1.SensorData) { return } - td := TabularData{ - TimeRequested: msg.Metadata.TimeRequested.AsTime(), - TimeReceived: msg.Metadata.TimeReceived.AsTime(), + td := TabularDataBson{ + TimeRequested: msg.TimeRequested, + TimeReceived: msg.TimeReceived, ComponentName: c.componentName, ComponentType: c.componentType, MethodName: c.methodName, diff --git a/data/collector_test.go b/data/collector_test.go index 9037677cede..8dafcebe44e 100644 --- a/data/collector_test.go +++ b/data/collector_test.go @@ -21,14 +21,29 @@ import ( ) var ( - structCapturer = CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + dummyTime = time.Date(2024, time.January, 10, 23, 0, 0, 0, time.UTC) + structCapturer = CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (CaptureResult, error) { return dummyStructReading, nil }) - binaryCapturer = CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { - return dummyBytesReading, nil + binaryCapturer = CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (CaptureResult, error) { + return CaptureResult{ + Timestamps: Timestamps{ + TimeRequested: dummyTime, + TimeReceived: dummyTime.Add(time.Second), + }, + Type: CaptureTypeBinary, + Binaries: []Binary{{Payload: dummyBytesReading}}, + }, nil }) - dummyStructReading = structReading{} - dummyStructReadingProto = dummyStructReading.toProto() + dummyStructReading = CaptureResult{ + Timestamps: Timestamps{ + TimeRequested: dummyTime, + TimeReceived: dummyTime.Add(time.Second), + }, + Type: CaptureTypeTabular, + TabularData: TabularData{dummyStructReadingProto}, + } + dummyStructReadingProto = structReading{}.toProto() dummyBytesReading = []byte("I sure am bytes") queueSize = 250 bufferSize = 4096 @@ -44,13 +59,24 @@ func TestNewCollector(t *testing.T) { // If not missing parameters, should not return an error. c2, err2 := NewCollector(nil, CollectorParams{ + DataType: CaptureTypeTabular, ComponentName: "name", Logger: logging.NewTestLogger(t), Target: NewCaptureBuffer("dir", nil, 50), }) - test.That(t, c2, test.ShouldNotBeNil) test.That(t, err2, test.ShouldBeNil) + test.That(t, c2, test.ShouldNotBeNil) + + c3, err3 := NewCollector(nil, CollectorParams{ + DataType: CaptureTypeBinary, + ComponentName: "name", + Logger: logging.NewTestLogger(t), + Target: NewCaptureBuffer("dir", nil, 50), + }) + + test.That(t, err3, test.ShouldBeNil) + test.That(t, c3, test.ShouldNotBeNil) } // Test that the Collector correctly writes the SensorData on an interval. @@ -73,6 +99,7 @@ func TestSuccessfulWrite(t *testing.T) { interval time.Duration expectReadings int expFiles int + datatype CaptureType }{ { name: "Ticker based struct writer.", @@ -80,6 +107,7 @@ func TestSuccessfulWrite(t *testing.T) { interval: tickerInterval, expectReadings: 2, expFiles: 1, + datatype: CaptureTypeTabular, }, { name: "Sleep based struct writer.", @@ -87,6 +115,7 @@ func TestSuccessfulWrite(t *testing.T) { interval: sleepInterval, expectReadings: 2, expFiles: 1, + datatype: CaptureTypeTabular, }, { name: "Ticker based binary writer.", @@ -94,6 +123,7 @@ func TestSuccessfulWrite(t *testing.T) { interval: tickerInterval, expectReadings: 2, expFiles: 2, + datatype: CaptureTypeBinary, }, { name: "Sleep based binary writer.", @@ -101,6 +131,7 @@ func TestSuccessfulWrite(t *testing.T) { interval: sleepInterval, expectReadings: 2, expFiles: 2, + datatype: CaptureTypeBinary, }, } @@ -109,19 +140,13 @@ func TestSuccessfulWrite(t *testing.T) { ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(time.Second)) defer cancel() tmpDir := t.TempDir() - md := v1.DataCaptureMetadata{} - tgt := NewCaptureBuffer(tmpDir, &md, 50) - test.That(t, tgt, test.ShouldNotBeNil) - wrote := make(chan struct{}) - target := &signalingBuffer{ - bw: tgt, - wrote: wrote, - } + target := newSignalingBuffer(ctx, tmpDir) mockClock := clock.NewMock() params.Interval = tc.interval params.Target = target params.Clock = mockClock + params.DataType = tc.datatype c, err := NewCollector(tc.captureFunc, params) test.That(t, err, test.ShouldBeNil) c.Collect() @@ -136,10 +161,9 @@ func TestSuccessfulWrite(t *testing.T) { select { case <-ctx.Done(): t.Fatalf("timed out waiting for data to be written") - case <-wrote: + case <-target.wrote: } } - close(wrote) // If it's a sleep based collector, we need to move the clock forward one more time after calling Close. // Otherwise, it will stay asleep indefinitely and Close will block forever. @@ -158,7 +182,7 @@ func TestSuccessfulWrite(t *testing.T) { return default: time.Sleep(time.Millisecond * 1) - mockClock.Add(tc.interval) + mockClock.Add(params.Interval) } } }() @@ -184,18 +208,15 @@ func TestSuccessfulWrite(t *testing.T) { func TestClose(t *testing.T) { // Set up a collector. l := logging.NewTestLogger(t) + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() tmpDir := t.TempDir() - md := v1.DataCaptureMetadata{} - buf := NewCaptureBuffer(tmpDir, &md, 50) - wrote := make(chan struct{}) - target := &signalingBuffer{ - bw: buf, - wrote: wrote, - } mockClock := clock.NewMock() + target := newSignalingBuffer(ctx, tmpDir) interval := time.Millisecond * 5 params := CollectorParams{ + DataType: CaptureTypeTabular, ComponentName: "testComponent", Interval: interval, MethodParams: map[string]*anypb.Any{"name": fakeVal}, @@ -205,17 +226,18 @@ func TestClose(t *testing.T) { Logger: l, Clock: mockClock, } - c, _ := NewCollector(structCapturer, params) + c, err := NewCollector(structCapturer, params) + test.That(t, err, test.ShouldBeNil) // Start collecting, and validate it is writing. c.Collect() mockClock.Add(interval) - ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond*10) + ctx, cancel = context.WithTimeout(context.Background(), time.Millisecond*10) defer cancel() select { case <-ctx.Done(): t.Fatalf("timed out waiting for data to be written") - case <-wrote: + case <-target.wrote: } // Close and validate no additional writes occur even after an additional interval. @@ -225,7 +247,7 @@ func TestClose(t *testing.T) { defer cancel() select { case <-ctx.Done(): - case <-wrote: + case <-target.wrote: t.Fatalf("unexpected write after close") } } @@ -238,10 +260,11 @@ func TestCtxCancelledNotLoggedAfterClose(t *testing.T) { tmpDir := t.TempDir() target := NewCaptureBuffer(tmpDir, &v1.DataCaptureMetadata{}, 50) captured := make(chan struct{}) - errorCapturer := CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + errorCapturer := CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (CaptureResult, error) { + var res CaptureResult select { case <-ctx.Done(): - return nil, fmt.Errorf("arbitrary wrapping message: %w", ctx.Err()) + return res, fmt.Errorf("arbitrary wrapping message: %w", ctx.Err()) case captured <- struct{}{}: } return dummyStructReading, nil @@ -249,6 +272,7 @@ func TestCtxCancelledNotLoggedAfterClose(t *testing.T) { params := CollectorParams{ ComponentName: "testComponent", + DataType: CaptureTypeTabular, Interval: time.Millisecond, MethodParams: map[string]*anypb.Any{"name": fakeVal}, Target: target, @@ -256,7 +280,8 @@ func TestCtxCancelledNotLoggedAfterClose(t *testing.T) { BufferSize: bufferSize, Logger: logger, } - c, _ := NewCollector(errorCapturer, params) + c, err := NewCollector(errorCapturer, params) + test.That(t, err, test.ShouldBeNil) c.Collect() <-captured c.Close() @@ -274,20 +299,18 @@ func TestLogErrorsOnlyOnce(t *testing.T) { // Set up a collector. logger, logs := logging.NewObservedTestLogger(t) tmpDir := t.TempDir() - md := v1.DataCaptureMetadata{} - buf := NewCaptureBuffer(tmpDir, &md, 50) - wrote := make(chan struct{}) - errorCapturer := CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { - return nil, errors.New("I am an error") + errorCapturer := CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (CaptureResult, error) { + return CaptureResult{}, errors.New("I am an error") }) - target := &signalingBuffer{ - bw: buf, - wrote: wrote, - } - mockClock := clock.NewMock() + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + target := newSignalingBuffer(ctx, tmpDir) interval := time.Millisecond * 5 + mockClock := clock.NewMock() + params := CollectorParams{ + DataType: CaptureTypeTabular, ComponentName: "testComponent", Interval: interval, MethodParams: map[string]*anypb.Any{"name": fakeVal}, @@ -297,13 +320,14 @@ func TestLogErrorsOnlyOnce(t *testing.T) { Logger: logger, Clock: mockClock, } - c, _ := NewCollector(errorCapturer, params) + c, err := NewCollector(errorCapturer, params) + test.That(t, err, test.ShouldBeNil) // Start collecting, and validate it is writing. c.Collect() mockClock.Add(interval * 5) - close(wrote) + // close(wrote) test.That(t, logs.FilterLevelExact(zapcore.ErrorLevel).Len(), test.ShouldEqual, 1) mockClock.Add(3 * time.Second) test.That(t, logs.FilterLevelExact(zapcore.ErrorLevel).Len(), test.ShouldEqual, 2) @@ -324,10 +348,10 @@ func validateReadings(t *testing.T, act []*v1.SensorData, n int) { } } -//nolint func getAllFiles(dir string) []os.FileInfo { var files []os.FileInfo _ = filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + //nolint:nilerr if err != nil { return nil } @@ -340,14 +364,36 @@ func getAllFiles(dir string) []os.FileInfo { return files } +func newSignalingBuffer(ctx context.Context, path string) *signalingBuffer { + md := v1.DataCaptureMetadata{} + return &signalingBuffer{ + ctx: ctx, + bw: NewCaptureBuffer(path, &md, 50), + wrote: make(chan struct{}), + } +} + type signalingBuffer struct { + ctx context.Context bw CaptureBufferedWriter wrote chan struct{} } -func (b *signalingBuffer) Write(data *v1.SensorData) error { - ret := b.bw.Write(data) - b.wrote <- struct{}{} +func (b *signalingBuffer) WriteBinary(items []*v1.SensorData) error { + ret := b.bw.WriteBinary(items) + select { + case b.wrote <- struct{}{}: + case <-b.ctx.Done(): + } + return ret +} + +func (b *signalingBuffer) WriteTabular(items []*v1.SensorData) error { + ret := b.bw.WriteTabular(items) + select { + case b.wrote <- struct{}{}: + case <-b.ctx.Done(): + } return ret } diff --git a/data/collector_types.go b/data/collector_types.go new file mode 100644 index 00000000000..7de961e6044 --- /dev/null +++ b/data/collector_types.go @@ -0,0 +1,394 @@ +package data + +import ( + "context" + "fmt" + "time" + + "github.com/pkg/errors" + dataPB "go.viam.com/api/app/data/v1" + datasyncPB "go.viam.com/api/app/datasync/v1" + camerapb "go.viam.com/api/component/camera/v1" + "go.viam.com/utils/protoutils" + "google.golang.org/protobuf/types/known/anypb" + "google.golang.org/protobuf/types/known/structpb" + "google.golang.org/protobuf/types/known/timestamppb" + + rprotoutils "go.viam.com/rdk/protoutils" + rutils "go.viam.com/rdk/utils" +) + +// CaptureFunc allows the creation of simple Capturers with anonymous functions. +type CaptureFunc func(ctx context.Context, params map[string]*anypb.Any) (CaptureResult, error) + +// CaptureResult is the result of a capture function. +type CaptureResult struct { + // Type represents the type of result (binary or tabular) + Type CaptureType + // Timestamps contain the time the data was requested and received + Timestamps + // TabularData contains the tabular data payload when Type == CaptureResultTypeTabular + TabularData TabularData + // Binaries contains binary data responses when Type == CaptureResultTypeBinary + Binaries []Binary +} + +// BEGIN CONSTRUCTORS + +// NewBinaryCaptureResult returns a binary capture result. +func NewBinaryCaptureResult(ts Timestamps, binaries []Binary) CaptureResult { + return CaptureResult{ + Timestamps: ts, + Type: CaptureTypeBinary, + Binaries: binaries, + } +} + +// NewTabularCaptureResultReadings returns a tabular readings result. +func NewTabularCaptureResultReadings(reqT time.Time, readings map[string]interface{}) (CaptureResult, error) { + var res CaptureResult + values, err := rprotoutils.ReadingGoToProto(readings) + if err != nil { + return res, err + } + + return CaptureResult{ + Timestamps: Timestamps{ + TimeRequested: reqT, + TimeReceived: time.Now(), + }, + Type: CaptureTypeTabular, + TabularData: TabularData{ + Payload: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "readings": structpb.NewStructValue(&structpb.Struct{Fields: values}), + }, + }, + }, + }, nil +} + +// NewTabularCaptureResult returns a tabular result. +func NewTabularCaptureResult(reqT time.Time, i interface{}) (CaptureResult, error) { + var res CaptureResult + readings, err := protoutils.StructToStructPbIgnoreOmitEmpty(i) + if err != nil { + return res, err + } + + return CaptureResult{ + Timestamps: Timestamps{ + TimeRequested: reqT, + TimeReceived: time.Now(), + }, + Type: CaptureTypeTabular, + TabularData: TabularData{ + Payload: readings, + }, + }, nil +} + +// END CONSTRUCTORS + +// ToProto converts a CaptureResult into a []*datasyncPB.SensorData{}. +func (cr *CaptureResult) ToProto() []*datasyncPB.SensorData { + ts := cr.Timestamps + if td := cr.TabularData.Payload; td != nil { + return []*datasyncPB.SensorData{{ + Metadata: &datasyncPB.SensorMetadata{ + TimeRequested: timestamppb.New(ts.TimeRequested.UTC()), + TimeReceived: timestamppb.New(ts.TimeReceived.UTC()), + }, + Data: &datasyncPB.SensorData_Struct{ + Struct: td, + }, + }} + } + + var sd []*datasyncPB.SensorData + for _, b := range cr.Binaries { + sd = append(sd, &datasyncPB.SensorData{ + Metadata: &datasyncPB.SensorMetadata{ + TimeRequested: timestamppb.New(ts.TimeRequested.UTC()), + TimeReceived: timestamppb.New(ts.TimeReceived.UTC()), + MimeType: b.MimeType.ToProto(), + Annotations: b.Annotations.ToProto(), + }, + Data: &datasyncPB.SensorData_Binary{ + Binary: b.Payload, + }, + }) + } + return sd +} + +// Validate returns an error if the *CaptureResult is invalid. +func (cr *CaptureResult) Validate() error { + var ts Timestamps + if cr.Timestamps.TimeRequested == ts.TimeRequested { + return errors.New("Timestamps.TimeRequested must be set") + } + + if cr.Timestamps.TimeReceived == ts.TimeReceived { + return errors.New("Timestamps.TimeRequested must be set") + } + + switch cr.Type { + case CaptureTypeTabular: + if len(cr.Binaries) > 0 { + return errors.New("tabular result can't contain binary data") + } + if cr.TabularData.Payload == nil { + return errors.New("tabular result must have non empty tabular data") + } + return nil + case CaptureTypeBinary: + if cr.TabularData.Payload != nil { + return errors.New("binary result can't contain tabular data") + } + if len(cr.Binaries) == 0 { + return errors.New("binary result must have non empty binary data") + } + + for _, b := range cr.Binaries { + if len(b.Payload) == 0 { + return errors.New("binary results can't have empty binary payload") + } + } + return nil + case CaptureTypeUnspecified: + return fmt.Errorf("unknown CaptureResultType: %d", cr.Type) + default: + return fmt.Errorf("unknown CaptureResultType: %d", cr.Type) + } +} + +// CaptureType represents captured tabular or binary data. +type CaptureType int + +const ( + // CaptureTypeUnspecified represents that the data type of the captured data was not specified. + CaptureTypeUnspecified CaptureType = iota + // CaptureTypeTabular represents that the data type of the captured data is tabular. + CaptureTypeTabular + // CaptureTypeBinary represents that the data type of the captured data is binary. + CaptureTypeBinary +) + +// ToProto converts a DataType into a v1.DataType. +func (dt CaptureType) ToProto() datasyncPB.DataType { + switch dt { + case CaptureTypeTabular: + return datasyncPB.DataType_DATA_TYPE_TABULAR_SENSOR + case CaptureTypeBinary: + return datasyncPB.DataType_DATA_TYPE_BINARY_SENSOR + case CaptureTypeUnspecified: + return datasyncPB.DataType_DATA_TYPE_UNSPECIFIED + default: + return datasyncPB.DataType_DATA_TYPE_UNSPECIFIED + } +} + +// GetDataType returns the DataType of the method. +func GetDataType(methodName string) CaptureType { + switch methodName { + case nextPointCloud, readImage, pointCloudMap, GetImages, captureAllFromCamera: + return CaptureTypeBinary + default: + return CaptureTypeTabular + } +} + +// Timestamps are the timestamps the data was captured. +type Timestamps struct { + // TimeRequested represents the time the request for the data was started + TimeRequested time.Time + // TimeRequested represents the time the response for the request for the data + // was received + TimeReceived time.Time +} + +// MimeType represents the mime type of the sensor data. +type MimeType int + +// This follows the mime types supported in +// https://github.com/viamrobotics/api/pull/571/files#diff-b77927298d8d5d5228beeea47bd0860d9b322b4f3ef45e129bc238ec17704826R75 +const ( + // MimeTypeUnspecified means that the mime type was not specified. + MimeTypeUnspecified MimeType = iota + // MimeTypeImageJpeg means that the mime type is jpeg. + MimeTypeImageJpeg + // MimeTypeImagePng means that the mime type is png. + MimeTypeImagePng + // MimeTypeApplicationPcd means that the mime type is pcd. + MimeTypeApplicationPcd +) + +// ToProto converts MimeType to datasyncPB. +func (mt MimeType) ToProto() datasyncPB.MimeType { + switch mt { + case MimeTypeUnspecified: + return datasyncPB.MimeType_MIME_TYPE_UNSPECIFIED + case MimeTypeImageJpeg: + return datasyncPB.MimeType_MIME_TYPE_IMAGE_JPEG + case MimeTypeImagePng: + return datasyncPB.MimeType_MIME_TYPE_IMAGE_PNG + case MimeTypeApplicationPcd: + return datasyncPB.MimeType_MIME_TYPE_APPLICATION_PCD + default: + return datasyncPB.MimeType_MIME_TYPE_UNSPECIFIED + } +} + +// MimeTypeFromProto converts a datasyncPB.MimeType to a data.MimeType. +func MimeTypeFromProto(mt datasyncPB.MimeType) MimeType { + switch mt { + case datasyncPB.MimeType_MIME_TYPE_UNSPECIFIED: + return MimeTypeUnspecified + case datasyncPB.MimeType_MIME_TYPE_IMAGE_JPEG: + return MimeTypeImageJpeg + case datasyncPB.MimeType_MIME_TYPE_IMAGE_PNG: + return MimeTypeImagePng + case datasyncPB.MimeType_MIME_TYPE_APPLICATION_PCD: + return MimeTypeApplicationPcd + default: + return MimeTypeUnspecified + } +} + +// CameraFormatToMimeType converts a camera camerapb.Format into a MimeType. +func CameraFormatToMimeType(f camerapb.Format) MimeType { + switch f { + case camerapb.Format_FORMAT_UNSPECIFIED: + return MimeTypeUnspecified + case camerapb.Format_FORMAT_JPEG: + return MimeTypeImageJpeg + case camerapb.Format_FORMAT_PNG: + return MimeTypeImagePng + case camerapb.Format_FORMAT_RAW_RGBA: + fallthrough + case camerapb.Format_FORMAT_RAW_DEPTH: + fallthrough + default: + return MimeTypeUnspecified + } +} + +// MimeTypeToCameraFormat converts a data.MimeType into a camerapb.Format. +func MimeTypeToCameraFormat(mt MimeType) camerapb.Format { + if mt == MimeTypeImageJpeg { + return camerapb.Format_FORMAT_JPEG + } + + if mt == MimeTypeImagePng { + return camerapb.Format_FORMAT_PNG + } + return camerapb.Format_FORMAT_UNSPECIFIED +} + +// Binary represents an element of a binary capture result response. +type Binary struct { + // Payload contains the binary payload + Payload []byte + // MimeType descibes the payload's MimeType + MimeType MimeType + // Annotations provide metadata about the Payload + Annotations Annotations +} + +// TabularData contains a tabular data payload. +type TabularData struct { + Payload *structpb.Struct +} + +// BoundingBox represents a labeled bounding box +// with an optional confidence interval between 0 and 1. +type BoundingBox struct { + Label string + Confidence *float64 + XMinNormalized float64 + YMinNormalized float64 + XMaxNormalized float64 + YMaxNormalized float64 +} + +// Classification represents a labeled classification +// with an optional confidence interval between 0 and 1. +type Classification struct { + Label string + Confidence *float64 +} + +// Annotations represents ML classifications. +type Annotations struct { + BoundingBoxes []BoundingBox + Classifications []Classification +} + +// Empty returns true when Annotations are empty. +func (mt Annotations) Empty() bool { + return len(mt.BoundingBoxes) == 0 && len(mt.Classifications) == 0 +} + +// ToProto converts Annotations to *dataPB.Annotations. +func (mt Annotations) ToProto() *dataPB.Annotations { + if mt.Empty() { + return nil + } + + var bboxes []*dataPB.BoundingBox + for _, bb := range mt.BoundingBoxes { + bboxes = append(bboxes, &dataPB.BoundingBox{ + Label: bb.Label, + Confidence: bb.Confidence, + XMinNormalized: bb.XMinNormalized, + XMaxNormalized: bb.XMaxNormalized, + YMinNormalized: bb.YMinNormalized, + YMaxNormalized: bb.YMaxNormalized, + }) + } + + var classifications []*dataPB.Classification + for _, c := range mt.Classifications { + classifications = append(classifications, &dataPB.Classification{ + Label: c.Label, + Confidence: c.Confidence, + }) + } + + return &dataPB.Annotations{ + Bboxes: bboxes, + Classifications: classifications, + } +} + +// getFileExt gets the file extension for a capture file. +func getFileExt(dataType CaptureType, methodName string, parameters map[string]string) string { + defaultFileExt := "" + switch dataType { + case CaptureTypeTabular: + return ".dat" + case CaptureTypeBinary: + if methodName == nextPointCloud { + return ".pcd" + } + if methodName == readImage { + // TODO: Add explicit file extensions for all mime types. + switch parameters["mime_type"] { + case rutils.MimeTypeJPEG: + return ".jpeg" + case rutils.MimeTypePNG: + return ".png" + case rutils.MimeTypePCD: + return ".pcd" + default: + return defaultFileExt + } + } + case CaptureTypeUnspecified: + return defaultFileExt + default: + return defaultFileExt + } + return defaultFileExt +} diff --git a/data/collector_types_test.go b/data/collector_types_test.go new file mode 100644 index 00000000000..f1d164c7f1e --- /dev/null +++ b/data/collector_types_test.go @@ -0,0 +1,425 @@ +package data + +import ( + "errors" + "testing" + "time" + + v1 "go.viam.com/api/app/data/v1" + datasyncPB "go.viam.com/api/app/datasync/v1" + commonPB "go.viam.com/api/common/v1" + armPB "go.viam.com/api/component/arm/v1" + cameraPB "go.viam.com/api/component/camera/v1" + "go.viam.com/test" + "google.golang.org/protobuf/types/known/structpb" + "google.golang.org/protobuf/types/known/timestamppb" + + tu "go.viam.com/rdk/testutils" + rutils "go.viam.com/rdk/utils" +) + +func TestNewBinaryCaptureResult(t *testing.T) { + timeRequested := time.Now() + timeReceived := time.Now() + ts := Timestamps{TimeRequested: timeRequested, TimeReceived: timeReceived} + type testCase struct { + input CaptureResult + output CaptureResult + validateErr error + } + confidence := 0.1 + emptyBinaries := []Binary{} + singleSimpleBinaries := []Binary{{Payload: []byte("hi there")}} + singleSimpleBinariesWithMimeType := []Binary{ + { + Payload: []byte("hi there"), + MimeType: MimeTypeImageJpeg, + }, + } + singleComplexBinaries := []Binary{ + { + Payload: []byte("hi there"), + MimeType: MimeTypeImageJpeg, + Annotations: Annotations{ + Classifications: []Classification{ + {Label: "no-confidence"}, + {Label: "confidence", Confidence: &confidence}, + }, + BoundingBoxes: []BoundingBox{ + { + Label: "no-confidence", + XMinNormalized: 1, + XMaxNormalized: 2, + YMinNormalized: 3, + YMaxNormalized: 4, + }, + { + Label: "confidence", + Confidence: &confidence, + XMinNormalized: 5, + XMaxNormalized: 6, + YMinNormalized: 7, + YMaxNormalized: 8, + }, + }, + }, + }, + { + Payload: []byte("hi too am here here"), + MimeType: MimeTypeImageJpeg, + Annotations: Annotations{ + Classifications: []Classification{ + {Label: "something completely different"}, + }, + }, + }, + } + + multipleComplexBinaries := []Binary{ + { + Payload: []byte("hi there"), + MimeType: MimeTypeImageJpeg, + Annotations: Annotations{ + Classifications: []Classification{ + {Label: "no-confidence"}, + {Label: "confidence", Confidence: &confidence}, + }, + BoundingBoxes: []BoundingBox{ + { + Label: "no-confidence", + XMinNormalized: 1, + XMaxNormalized: 2, + YMinNormalized: 3, + YMaxNormalized: 4, + }, + { + Label: "confidence", + Confidence: &confidence, + XMinNormalized: 5, + XMaxNormalized: 6, + YMinNormalized: 7, + YMaxNormalized: 8, + }, + }, + }, + }, + { + Payload: []byte("hi too am here here"), + MimeType: MimeTypeImageJpeg, + Annotations: Annotations{ + Classifications: []Classification{ + {Label: "something completely different"}, + }, + }, + }, + } + tcs := []testCase{ + { + input: NewBinaryCaptureResult(ts, nil), + output: CaptureResult{Type: CaptureTypeBinary, Timestamps: ts}, + validateErr: errors.New("binary result must have non empty binary data"), + }, + { + input: NewBinaryCaptureResult(ts, emptyBinaries), + output: CaptureResult{ + Type: CaptureTypeBinary, + Timestamps: ts, + Binaries: emptyBinaries, + }, + validateErr: errors.New("binary result must have non empty binary data"), + }, + { + input: NewBinaryCaptureResult(ts, singleSimpleBinaries), + output: CaptureResult{ + Type: CaptureTypeBinary, + Timestamps: ts, + Binaries: singleSimpleBinaries, + }, + }, + { + input: NewBinaryCaptureResult(ts, singleSimpleBinariesWithMimeType), + output: CaptureResult{ + Type: CaptureTypeBinary, + Timestamps: ts, + Binaries: singleSimpleBinariesWithMimeType, + }, + }, + { + input: NewBinaryCaptureResult(ts, singleComplexBinaries), + output: CaptureResult{ + Type: CaptureTypeBinary, + Timestamps: ts, + Binaries: singleComplexBinaries, + }, + }, + { + input: NewBinaryCaptureResult(ts, multipleComplexBinaries), + output: CaptureResult{ + Type: CaptureTypeBinary, + Timestamps: ts, + Binaries: multipleComplexBinaries, + }, + }, + } + for i, tc := range tcs { + t.Logf("index: %d", i) + + // confirm input resembles output + test.That(t, tc.input, test.ShouldResemble, tc.output) + + // confirm input conforms to validation expectations + if tc.validateErr != nil { + test.That(t, tc.input.Validate(), test.ShouldBeError, tc.validateErr) + continue + } + test.That(t, tc.input.Validate(), test.ShouldBeNil) + + // confirm input conforms to ToProto expectations + proto := tc.input.ToProto() + test.That(t, len(proto), test.ShouldEqual, len(tc.input.Binaries)) + for j := range tc.input.Binaries { + test.That(t, proto[j].Metadata, test.ShouldResemble, &datasyncPB.SensorMetadata{ + TimeRequested: timestamppb.New(timeRequested.UTC()), + TimeReceived: timestamppb.New(timeReceived.UTC()), + MimeType: tc.input.Binaries[j].MimeType.ToProto(), + Annotations: tc.input.Binaries[j].Annotations.ToProto(), + }) + + test.That(t, proto[j].Data, test.ShouldResemble, &datasyncPB.SensorData_Binary{ + Binary: tc.input.Binaries[j].Payload, + }) + } + } +} + +func TestNewTabularCaptureResultReadings(t *testing.T) { + now := time.Now() + type testCase struct { + input map[string]interface{} + output *structpb.Struct + err error + } + firstReading := map[string]any{ + "hi": 1, + "there": 1.2, + "friend": []any{ + map[string]any{ + "weird": "stuff", + "even": "stranger", + }, + 1, + true, + "20 mickey mouse", + []any{3.3, 9.9}, + []byte{1, 2, 3}, + }, + } + tcs := []testCase{ + { + input: nil, + output: tu.ToStructPBStruct(t, map[string]any{"readings": map[string]any{}}), + }, + { + input: firstReading, + output: tu.ToStructPBStruct(t, map[string]any{"readings": firstReading}), + }, + { + input: map[string]any{"invalid_type": []float64{3.3, 9.9}}, + err: errors.New("proto: invalid type: []float64"), + }, + } + + for i, tc := range tcs { + t.Logf("index: %d", i) + res, err := NewTabularCaptureResultReadings(now, tc.input) + if tc.err != nil { + test.That(t, err, test.ShouldBeError, tc.err) + continue + } + + test.That(t, err, test.ShouldBeNil) + verifyStruct(t, res, now, tc.output) + } +} + +func TestNewTabularCaptureResult(t *testing.T) { + now := time.Now() + type testCase struct { + input any + output *structpb.Struct + err error + } + tcs := []testCase{ + { + input: nil, + err: errors.New("unable to convert interface to a form acceptable to structpb.NewStruct: no data passed in"), + }, + { + input: armPB.GetEndPositionResponse{Pose: &commonPB.Pose{X: 1, Y: 2, Z: 3, OX: 4, OY: 5, OZ: 6, Theta: 7}}, + output: tu.ToStructPBStruct(t, map[string]any{"pose": map[string]any{ + "x": 1, + "y": 2, + "z": 3, + "o_x": 4, + "o_y": 5, + "o_z": 6, + "theta": 7, + }}), + }, + { + input: &armPB.GetEndPositionResponse{Pose: &commonPB.Pose{X: 1, Y: 2, Z: 3, OX: 4, OY: 5, OZ: 6, Theta: 7}}, + output: tu.ToStructPBStruct(t, map[string]any{"pose": map[string]any{ + "x": 1, + "y": 2, + "z": 3, + "o_x": 4, + "o_y": 5, + "o_z": 6, + "theta": 7, + }}), + }, + } + + for i, tc := range tcs { + t.Logf("index: %d", i) + res, err := NewTabularCaptureResult(now, tc.input) + if tc.err != nil { + test.That(t, err, test.ShouldBeError, tc.err) + continue + } + test.That(t, err, test.ShouldBeNil) + verifyStruct(t, res, now, tc.output) + } +} + +func verifyStruct(t *testing.T, res CaptureResult, now time.Time, output *structpb.Struct) { + t.Helper() + test.That(t, res, test.ShouldNotBeNil) + + test.That(t, res.Type, test.ShouldEqual, CaptureTypeTabular) + test.That(t, res.TimeRequested, test.ShouldEqual, now) + test.That(t, res.TimeReceived, test.ShouldHappenAfter, now) + test.That(t, res.TimeReceived, test.ShouldHappenBefore, time.Now()) + test.That(t, res.Binaries, test.ShouldBeNil) + test.That(t, res.TabularData.Payload, test.ShouldNotBeNil) + test.That(t, res.TabularData.Payload, test.ShouldResemble, output) + + test.That(t, res.Validate(), test.ShouldBeNil) + + // confirm input conforms to ToProto expectations + for _, proto := range res.ToProto() { + test.That(t, proto.Metadata, test.ShouldResemble, &datasyncPB.SensorMetadata{ + TimeRequested: timestamppb.New(res.TimeRequested.UTC()), + TimeReceived: timestamppb.New(res.TimeReceived.UTC()), + }) + + test.That(t, proto.Data, test.ShouldResemble, &datasyncPB.SensorData_Struct{ + Struct: output, + }) + } +} + +func TestCaptureTypeToProto(t *testing.T) { + test.That(t, CaptureTypeBinary.ToProto(), test.ShouldEqual, datasyncPB.DataType_DATA_TYPE_BINARY_SENSOR) + test.That(t, CaptureTypeTabular.ToProto(), test.ShouldEqual, datasyncPB.DataType_DATA_TYPE_TABULAR_SENSOR) + test.That(t, CaptureTypeUnspecified.ToProto(), test.ShouldEqual, datasyncPB.DataType_DATA_TYPE_UNSPECIFIED) + invalidCaptureType := CaptureType(20) + test.That(t, invalidCaptureType.ToProto(), test.ShouldEqual, datasyncPB.DataType_DATA_TYPE_UNSPECIFIED) +} + +func TestMimeTypeToProto(t *testing.T) { + test.That(t, MimeTypeImageJpeg.ToProto(), test.ShouldEqual, datasyncPB.MimeType_MIME_TYPE_IMAGE_JPEG) + test.That(t, MimeTypeImagePng.ToProto(), test.ShouldEqual, datasyncPB.MimeType_MIME_TYPE_IMAGE_PNG) + test.That(t, MimeTypeApplicationPcd.ToProto(), test.ShouldEqual, datasyncPB.MimeType_MIME_TYPE_APPLICATION_PCD) + test.That(t, MimeTypeUnspecified.ToProto(), test.ShouldEqual, datasyncPB.MimeType_MIME_TYPE_UNSPECIFIED) +} + +func TestGetDataType(t *testing.T) { + test.That(t, GetDataType(nextPointCloud), test.ShouldEqual, CaptureTypeBinary) + test.That(t, GetDataType(readImage), test.ShouldEqual, CaptureTypeBinary) + test.That(t, GetDataType(pointCloudMap), test.ShouldEqual, CaptureTypeBinary) + test.That(t, GetDataType(GetImages), test.ShouldEqual, CaptureTypeBinary) + test.That(t, GetDataType("anything else"), test.ShouldEqual, CaptureTypeTabular) +} + +func TestMimeTypeFromProto(t *testing.T) { + test.That(t, MimeTypeFromProto(datasyncPB.MimeType_MIME_TYPE_IMAGE_JPEG), test.ShouldEqual, MimeTypeImageJpeg) + test.That(t, MimeTypeFromProto(datasyncPB.MimeType_MIME_TYPE_IMAGE_PNG), test.ShouldEqual, MimeTypeImagePng) + test.That(t, MimeTypeFromProto(datasyncPB.MimeType_MIME_TYPE_APPLICATION_PCD), test.ShouldEqual, MimeTypeApplicationPcd) + test.That(t, MimeTypeFromProto(datasyncPB.MimeType_MIME_TYPE_UNSPECIFIED), test.ShouldEqual, MimeTypeUnspecified) + test.That(t, MimeTypeFromProto(datasyncPB.MimeType(20)), test.ShouldEqual, MimeTypeUnspecified) +} + +func TestCameraFormatToMimeType(t *testing.T) { + test.That(t, CameraFormatToMimeType(cameraPB.Format_FORMAT_JPEG), test.ShouldEqual, MimeTypeImageJpeg) + test.That(t, CameraFormatToMimeType(cameraPB.Format_FORMAT_PNG), test.ShouldEqual, MimeTypeImagePng) + test.That(t, CameraFormatToMimeType(cameraPB.Format_FORMAT_RAW_RGBA), test.ShouldEqual, MimeTypeUnspecified) + test.That(t, CameraFormatToMimeType(cameraPB.Format_FORMAT_RAW_DEPTH), test.ShouldEqual, MimeTypeUnspecified) + test.That(t, CameraFormatToMimeType(cameraPB.Format_FORMAT_UNSPECIFIED), test.ShouldEqual, MimeTypeUnspecified) +} + +func TestAnnotationsToProto(t *testing.T) { + conf := 0.2 + + empty := Annotations{} + test.That(t, empty.ToProto() == nil, test.ShouldBeTrue) + + onlyBBoxes := Annotations{ + BoundingBoxes: []BoundingBox{ + {Label: "a", Confidence: &conf, XMinNormalized: 1, XMaxNormalized: 2, YMinNormalized: 3, YMaxNormalized: 4}, + {Label: "b", XMinNormalized: 5, XMaxNormalized: 6, YMinNormalized: 7, YMaxNormalized: 8}, + }, + } + test.That(t, onlyBBoxes.ToProto(), test.ShouldResemble, &v1.Annotations{ + Bboxes: []*v1.BoundingBox{ + {Label: "a", Confidence: &conf, XMinNormalized: 1, XMaxNormalized: 2, YMinNormalized: 3, YMaxNormalized: 4}, + {Label: "b", XMinNormalized: 5, XMaxNormalized: 6, YMinNormalized: 7, YMaxNormalized: 8}, + }, + }) + + onlyClassifications := Annotations{ + Classifications: []Classification{ + {Label: "c"}, + {Label: "d", Confidence: &conf}, + }, + } + test.That(t, onlyClassifications.ToProto(), test.ShouldResemble, &v1.Annotations{ + Classifications: []*v1.Classification{ + {Label: "c"}, + {Label: "d", Confidence: &conf}, + }, + }) + + both := Annotations{ + BoundingBoxes: []BoundingBox{ + {Label: "a", Confidence: &conf, XMinNormalized: 1, XMaxNormalized: 2, YMinNormalized: 3, YMaxNormalized: 4}, + {Label: "b", XMinNormalized: 5, XMaxNormalized: 6, YMinNormalized: 7, YMaxNormalized: 8}, + }, + Classifications: []Classification{ + {Label: "c"}, + {Label: "d", Confidence: &conf}, + }, + } + test.That(t, both.ToProto(), test.ShouldResemble, &v1.Annotations{ + Bboxes: []*v1.BoundingBox{ + {Label: "a", Confidence: &conf, XMinNormalized: 1, XMaxNormalized: 2, YMinNormalized: 3, YMaxNormalized: 4}, + {Label: "b", XMinNormalized: 5, XMaxNormalized: 6, YMinNormalized: 7, YMaxNormalized: 8}, + }, + Classifications: []*v1.Classification{ + {Label: "c"}, + {Label: "d", Confidence: &conf}, + }, + }) +} + +func TestGetFileExt(t *testing.T) { + test.That(t, getFileExt(CaptureTypeTabular, "anything", nil), test.ShouldResemble, ".dat") + test.That(t, getFileExt(CaptureTypeUnspecified, "anything", nil), test.ShouldResemble, "") + test.That(t, getFileExt(CaptureType(20), "anything", nil), test.ShouldResemble, "") + test.That(t, getFileExt(CaptureTypeBinary, "anything", nil), test.ShouldResemble, "") + test.That(t, getFileExt(CaptureTypeBinary, "NextPointCloud", nil), test.ShouldResemble, ".pcd") + test.That(t, getFileExt(CaptureTypeBinary, "ReadImage", nil), test.ShouldResemble, "") + test.That(t, getFileExt(CaptureTypeBinary, "ReadImage", map[string]string{"mime_type": rutils.MimeTypeJPEG}), test.ShouldResemble, ".jpeg") + test.That(t, getFileExt(CaptureTypeBinary, "ReadImage", map[string]string{"mime_type": rutils.MimeTypePNG}), test.ShouldResemble, ".png") + test.That(t, getFileExt(CaptureTypeBinary, "ReadImage", map[string]string{"mime_type": rutils.MimeTypePCD}), test.ShouldResemble, ".pcd") +} diff --git a/data/registry.go b/data/registry.go index d48997893ed..67a26295d4b 100644 --- a/data/registry.go +++ b/data/registry.go @@ -19,17 +19,18 @@ type CollectorConstructor func(resource interface{}, params CollectorParams) (Co // CollectorParams contain the parameters needed to construct a Collector. type CollectorParams struct { - MongoCollection *mongo.Collection + BufferSize int + Clock clock.Clock ComponentName string ComponentType string - MethodName string + DataType CaptureType Interval time.Duration + Logger logging.Logger + MethodName string MethodParams map[string]*anypb.Any - Target CaptureBufferedWriter + MongoCollection *mongo.Collection QueueSize int - BufferSize int - Logger logging.Logger - Clock clock.Clock + Target CaptureBufferedWriter } // Validate validates that p contains all required parameters. @@ -43,6 +44,9 @@ func (p CollectorParams) Validate() error { if p.ComponentName == "" { return errors.New("missing required parameter component name") } + if p.DataType != CaptureTypeBinary && p.DataType != CaptureTypeTabular { + return errors.New("invalid DataType") + } return nil } diff --git a/go.mod b/go.mod index 9949c4ef90c..c28d3f93099 100644 --- a/go.mod +++ b/go.mod @@ -78,7 +78,7 @@ require ( go.uber.org/atomic v1.11.0 go.uber.org/multierr v1.11.0 go.uber.org/zap v1.27.0 - go.viam.com/api v0.1.357 + go.viam.com/api v0.1.360 go.viam.com/test v1.2.4 go.viam.com/utils v0.1.116 goji.io v2.0.2+incompatible @@ -193,7 +193,7 @@ require ( github.com/dnephin/pflag v1.0.7 // indirect github.com/docker/cli v25.0.4+incompatible // indirect github.com/docker/distribution v2.8.3+incompatible // indirect - github.com/docker/docker v25.0.4+incompatible // indirect + github.com/docker/docker v25.0.6+incompatible // indirect github.com/docker/docker-credential-helpers v0.8.1 // indirect github.com/docker/go-connections v0.5.0 // indirect github.com/dustin/go-humanize v1.0.1 // indirect diff --git a/go.sum b/go.sum index e28191b5e46..aa27839de3c 100644 --- a/go.sum +++ b/go.sum @@ -347,8 +347,8 @@ github.com/docker/cli v25.0.4+incompatible h1:DatRkJ+nrFoYL2HZUzjM5Z5sAmcA5XGp+A github.com/docker/cli v25.0.4+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v25.0.4+incompatible h1:XITZTrq+52tZyZxUOtFIahUf3aH367FLxJzt9vZeAF8= -github.com/docker/docker v25.0.4+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v25.0.6+incompatible h1:5cPwbwriIcsua2REJe8HqQV+6WlWc1byg2QSXzBxBGg= +github.com/docker/docker v25.0.6+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker-credential-helpers v0.8.1 h1:j/eKUktUltBtMzKqmfLB0PAgqYyMHOp5vfsD1807oKo= github.com/docker/docker-credential-helpers v0.8.1/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M= github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= @@ -1531,8 +1531,8 @@ go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= go.uber.org/zap v1.23.0/go.mod h1:D+nX8jyLsMHMYrln8A0rJjFt/T/9/bGgIhAqxv5URuY= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= -go.viam.com/api v0.1.357 h1:L9LBYbaH0imv/B+mVxqtSgClIl4flzjLV6LclfnD9Nc= -go.viam.com/api v0.1.357/go.mod h1:5lpVRxMsKFCaahqsnJfPGwJ9baoQ6PIKQu3lxvy6Wtw= +go.viam.com/api v0.1.360 h1:jpcm7mxUy2RvmZGAjUpx6RP+/8341XdlRLSX/3mll0g= +go.viam.com/api v0.1.360/go.mod h1:g5eipXHNm0rQmW7DWya6avKcmzoypLmxnMlAaIsE5Ls= go.viam.com/test v1.2.4 h1:JYgZhsuGAQ8sL9jWkziAXN9VJJiKbjoi9BsO33TW3ug= go.viam.com/test v1.2.4/go.mod h1:zI2xzosHdqXAJ/kFqcN+OIF78kQuTV2nIhGZ8EzvaJI= go.viam.com/utils v0.1.116 h1:hoCj3SsV8LZAOEP75TjMeX57axhravS8rNUYmhpTWtM= diff --git a/services/datamanager/builtin/builtin_sync_test.go b/services/datamanager/builtin/builtin_sync_test.go index 844b8772095..83d1c4c6765 100644 --- a/services/datamanager/builtin/builtin_sync_test.go +++ b/services/datamanager/builtin/builtin_sync_test.go @@ -15,6 +15,7 @@ import ( "testing" "time" + "github.com/golang/geo/r3" v1 "go.viam.com/api/app/datasync/v1" "go.viam.com/test" "go.viam.com/utils/rpc" @@ -343,7 +344,7 @@ func TestDataCaptureUploadIntegration(t *testing.T) { ctx context.Context, extra map[string]interface{}, ) (spatialmath.Pose, error) { - return spatialmath.NewZeroPose(), nil + return spatialmath.NewPoseFromPoint(r3.Vector{X: 1, Y: 2, Z: 3}), nil }, }, }) diff --git a/services/datamanager/builtin/capture/capture.go b/services/datamanager/builtin/capture/capture.go index 75112cba583..47d10b700ea 100644 --- a/services/datamanager/builtin/capture/capture.go +++ b/services/datamanager/builtin/capture/capture.go @@ -324,7 +324,7 @@ func (c *Capture) initializeOrUpdateCollector( return nil, errors.Wrapf(err, "failed to create target directory %s with 700 file permissions", targetDir) } // Build metadata. - captureMetadata := data.BuildCaptureMetadata( + captureMetadata, dataType := data.BuildCaptureMetadata( collectorConfig.Name.API, collectorConfig.Name.ShortName(), collectorConfig.Method, @@ -337,6 +337,7 @@ func (c *Capture) initializeOrUpdateCollector( bufferSize := defaultIfZeroVal(collectorConfig.CaptureBufferSize, defaultCaptureBufferSize) collector, err := collectorConstructor(res, data.CollectorParams{ MongoCollection: collection, + DataType: dataType, ComponentName: collectorConfig.Name.ShortName(), ComponentType: collectorConfig.Name.API.String(), MethodName: collectorConfig.Method, diff --git a/services/datamanager/builtin/sync/exponential_retry.go b/services/datamanager/builtin/sync/exponential_retry.go index 5db098d69c0..36b5bc6017d 100644 --- a/services/datamanager/builtin/sync/exponential_retry.go +++ b/services/datamanager/builtin/sync/exponential_retry.go @@ -168,6 +168,14 @@ func getNextWait(lastWait time.Duration, isOffline bool) time.Duration { // terminalError returns true if retrying will never succeed so that // the data gets moved to the corrupted data directory and false otherwise. func terminalError(err error) bool { - errStatus := status.Convert(err) - return errStatus.Code() == codes.InvalidArgument || errors.Is(err, proto.Error) + if status.Convert(err).Code() == codes.InvalidArgument || errors.Is(err, proto.Error) { + return true + } + + for _, e := range terminalCaptureFileErrs { + if errors.Is(err, e) { + return true + } + } + return false } diff --git a/services/datamanager/builtin/sync/upload_data_capture_file.go b/services/datamanager/builtin/sync/upload_data_capture_file.go index c913d5f478f..480c4829004 100644 --- a/services/datamanager/builtin/sync/upload_data_capture_file.go +++ b/services/datamanager/builtin/sync/upload_data_capture_file.go @@ -2,22 +2,32 @@ package sync import ( "context" - "fmt" "github.com/docker/go-units" "github.com/go-viper/mapstructure/v2" "github.com/pkg/errors" - v1 "go.viam.com/api/app/datasync/v1" - pb "go.viam.com/api/component/camera/v1" + datasyncPB "go.viam.com/api/app/datasync/v1" + cameraPB "go.viam.com/api/component/camera/v1" + "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/timestamppb" "go.viam.com/rdk/data" "go.viam.com/rdk/logging" ) -// MaxUnaryFileSize is the max number of bytes to send using the unary DataCaptureUpload, as opposed to the -// StreamingDataCaptureUpload. -var MaxUnaryFileSize = int64(units.MB) +var ( + // MaxUnaryFileSize is the max number of bytes to send using the unary DataCaptureUpload, as opposed to the + // StreamingDataCaptureUpload. + MaxUnaryFileSize = int64(units.MB) + errMultipleReadingTypes = errors.New("sensor readings contain multiple types") + errSensorDataTypesDontMatchUploadMetadata = errors.New("sensor reaings types don't match upload metadata") + errInvalidCaptureFileType = errors.New("invalid capture file type") + terminalCaptureFileErrs = []error{ + errMultipleReadingTypes, + errSensorDataTypesDontMatchUploadMetadata, + errInvalidCaptureFileType, + } +) // uploadDataCaptureFile uploads the *data.CaptureFile to the cloud using the cloud connection. // returns context.Cancelled if ctx is cancelled before upload completes. @@ -41,23 +51,64 @@ func uploadDataCaptureFile(ctx context.Context, f *data.CaptureFile, conn cloudC return 0, nil } - if md.GetType() == v1.DataType_DATA_TYPE_BINARY_SENSOR && len(sensorData) > 1 { - return 0, fmt.Errorf("binary sensor data file with more than one sensor reading is not supported: %s", f.GetPath()) + sensorDataTypeSet := captureTypesInSensorData(sensorData) + if len(sensorDataTypeSet) != 1 { + return 0, errMultipleReadingTypes } - // camera.GetImages is a special case. For that API we make 2 binary data upload requests - if md.GetType() == v1.DataType_DATA_TYPE_BINARY_SENSOR && md.GetMethodName() == data.GetImages { - logger.Debugf("attemping to upload camera.GetImages data: %s", f.GetPath()) + _, isTabular := sensorDataTypeSet[data.CaptureTypeTabular] + if isLegacyGetImagesCaptureFile(md, isTabular) { + logger.Debugf("attemping to upload legacy camera.GetImages data: %s", f.GetPath()) + return uint64(f.Size()), legacyUploadGetImages(ctx, conn, md, sensorData[0], f.Size(), f.GetPath(), logger) + } - return uint64(f.Size()), uploadGetImages(ctx, conn, md, sensorData[0], f.Size(), f.GetPath(), logger) + if err := checkUploadMetadaTypeMatchesSensorDataType(md, sensorDataTypeSet); err != nil { + return 0, err } - metaData := uploadMetadata(conn.partID, md, md.GetFileExtension()) + metaData := uploadMetadata(conn.partID, md) return uint64(f.Size()), uploadSensorData(ctx, conn.client, metaData, sensorData, f.Size(), f.GetPath(), logger) } -func uploadMetadata(partID string, md *v1.DataCaptureMetadata, fileextension string) *v1.UploadMetadata { - return &v1.UploadMetadata{ +func checkUploadMetadaTypeMatchesSensorDataType(md *datasyncPB.DataCaptureMetadata, sensorDataTypeSet map[data.CaptureType]struct{}) error { + var captureType data.CaptureType + // get first element of single element set + for k := range sensorDataTypeSet { + captureType = k + break + } + + if captureType.ToProto() != md.GetType() { + return errSensorDataTypesDontMatchUploadMetadata + } + return nil +} + +func isLegacyGetImagesCaptureFile(md *datasyncPB.DataCaptureMetadata, isTabular bool) bool { + // camera.GetImages is a special case. For that API we make 2 binary data upload requests + // if the capture file proports to contain DataType_DATA_TYPE_BINARY_SENSOR from GetImages + // but the sensor data is not binary, then this is from a legacy GetImages capture file + return md.GetType() == datasyncPB.DataType_DATA_TYPE_BINARY_SENSOR && md.GetMethodName() == data.GetImages && isTabular +} + +func captureTypesInSensorData(sensorData []*datasyncPB.SensorData) map[data.CaptureType]struct{} { + set := map[data.CaptureType]struct{}{} + for _, sd := range sensorData { + if data.IsBinary(sd) { + if _, ok := set[data.CaptureTypeBinary]; !ok { + set[data.CaptureTypeBinary] = struct{}{} + } + } else { + if _, ok := set[data.CaptureTypeTabular]; !ok { + set[data.CaptureTypeTabular] = struct{}{} + } + } + } + return set +} + +func uploadMetadata(partID string, md *datasyncPB.DataCaptureMetadata) *datasyncPB.UploadMetadata { + return &datasyncPB.UploadMetadata{ PartId: partID, ComponentType: md.GetComponentType(), ComponentName: md.GetComponentName(), @@ -65,39 +116,40 @@ func uploadMetadata(partID string, md *v1.DataCaptureMetadata, fileextension str Type: md.GetType(), MethodParameters: md.GetMethodParameters(), Tags: md.GetTags(), - FileExtension: fileextension, + FileExtension: md.GetFileExtension(), } } -func uploadGetImages( +func legacyUploadGetImages( ctx context.Context, conn cloudConn, - md *v1.DataCaptureMetadata, - sd *v1.SensorData, + md *datasyncPB.DataCaptureMetadata, + sd *datasyncPB.SensorData, size int64, path string, logger logging.Logger, ) error { - var res pb.GetImagesResponse + var res cameraPB.GetImagesResponse if err := mapstructure.Decode(sd.GetStruct().AsMap(), &res); err != nil { return errors.Wrap(err, "failed to decode camera.GetImagesResponse") } timeRequested, timeReceived := getImagesTimestamps(&res, sd) for i, img := range res.Images { - newSensorData := []*v1.SensorData{ + newSensorData := []*datasyncPB.SensorData{ { - Metadata: &v1.SensorMetadata{ + Metadata: &datasyncPB.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, }, - Data: &v1.SensorData_Binary{ + Data: &datasyncPB.SensorData_Binary{ Binary: img.GetImage(), }, }, } logger.Debugf("attempting to upload camera.GetImages response, index: %d", i) - metadata := uploadMetadata(conn.partID, md, getFileExtFromImageFormat(img.GetFormat())) + metadata := uploadMetadata(conn.partID, md) + metadata.FileExtension = getFileExtFromImageFormat(img.GetFormat()) // TODO: This is wrong as the size describes the size of the entire GetImages response, but we are only // uploading one of the 2 images in that response here. if err := uploadSensorData(ctx, conn.client, metadata, newSensorData, size, path, logger); err != nil { @@ -107,7 +159,10 @@ func uploadGetImages( return nil } -func getImagesTimestamps(res *pb.GetImagesResponse, sensorData *v1.SensorData) (*timestamppb.Timestamp, *timestamppb.Timestamp) { +func getImagesTimestamps( + res *cameraPB.GetImagesResponse, + sensorData *datasyncPB.SensorData, +) (*timestamppb.Timestamp, *timestamppb.Timestamp) { // If the GetImagesResponse metadata contains a capture timestamp, use that to // populate SensorMetadata. Otherwise, use the timestamps that the data management // system stored to track when a request was sent and response was received. @@ -125,55 +180,157 @@ func getImagesTimestamps(res *pb.GetImagesResponse, sensorData *v1.SensorData) ( func uploadSensorData( ctx context.Context, - client v1.DataSyncServiceClient, - uploadMD *v1.UploadMetadata, - sensorData []*v1.SensorData, + client datasyncPB.DataSyncServiceClient, + uploadMD *datasyncPB.UploadMetadata, + sensorData []*datasyncPB.SensorData, fileSize int64, path string, logger logging.Logger, ) error { - // If it's a large binary file, we need to upload it in chunks. - if uploadMD.GetType() == v1.DataType_DATA_TYPE_BINARY_SENSOR && fileSize > MaxUnaryFileSize { - logger.Debugf("attempting to upload large binary file using StreamingDataCaptureUpload, file: %s", path) - c, err := client.StreamingDataCaptureUpload(ctx) - if err != nil { - return errors.Wrap(err, "error creating StreamingDataCaptureUpload client") + captureFileType := uploadMD.GetType() + switch captureFileType { + case datasyncPB.DataType_DATA_TYPE_BINARY_SENSOR: + // If it's a large binary file, we need to upload it in chunks. + if uploadMD.GetType() == datasyncPB.DataType_DATA_TYPE_BINARY_SENSOR && fileSize > MaxUnaryFileSize { + return uploadMultipleLargeBinarySensorData(ctx, client, uploadMD, sensorData, path, logger) } + return uploadMultipleBinarySensorData(ctx, client, uploadMD, sensorData, path, logger) + case datasyncPB.DataType_DATA_TYPE_TABULAR_SENSOR: + // Otherwise use the unary endpoint + logger.Debugf("attempting to upload small binary file using DataCaptureUpload, file: %s", path) + _, err := client.DataCaptureUpload(ctx, &datasyncPB.DataCaptureUploadRequest{ + Metadata: uploadMD, + SensorContents: sensorData, + }) + return errors.Wrap(err, "DataCaptureUpload failed") + case datasyncPB.DataType_DATA_TYPE_FILE: + fallthrough + case datasyncPB.DataType_DATA_TYPE_UNSPECIFIED: + fallthrough + default: + logger.Errorf("%s: %s", errInvalidCaptureFileType.Error(), captureFileType) + return errInvalidCaptureFileType + } +} + +func uploadBinarySensorData( + ctx context.Context, + client datasyncPB.DataSyncServiceClient, + md *datasyncPB.UploadMetadata, + sd *datasyncPB.SensorData, +) error { + if md.FileExtension == "" { + md.FileExtension = getFileExtFromMimeType(sd.GetMetadata().GetMimeType()) + } + if _, err := client.DataCaptureUpload(ctx, &datasyncPB.DataCaptureUploadRequest{ + Metadata: md, + SensorContents: []*datasyncPB.SensorData{sd}, + }); err != nil { + return errors.Wrap(err, "DataCaptureUpload failed") + } - toUpload := sensorData[0] + return nil +} - // First send metadata. - streamMD := &v1.StreamingDataCaptureUploadRequest_Metadata{ - Metadata: &v1.DataCaptureUploadMetadata{ - UploadMetadata: uploadMD, - SensorMetadata: toUpload.GetMetadata(), - }, - } - if err := c.Send(&v1.StreamingDataCaptureUploadRequest{UploadPacket: streamMD}); err != nil { - return errors.Wrap(err, "StreamingDataCaptureUpload failed sending metadata") +func uploadMultipleBinarySensorData( + ctx context.Context, + client datasyncPB.DataSyncServiceClient, + uploadMD *datasyncPB.UploadMetadata, + sensorData []*datasyncPB.SensorData, + path string, + logger logging.Logger, +) error { + // this is the common case + if len(sensorData) == 1 { + logger.Debugf("attempting to upload small binary file using DataCaptureUpload, sensor data, file: %s", path) + return uploadBinarySensorData(ctx, client, uploadMD, sensorData[0]) + } + + // we only go down this path if the capture method returned multiple binary + // responses, which at time of writing, only includes camera.GetImages data. + for i, sd := range sensorData { + // Otherwise use the unary endpoint + logger.Debugf("attempting to upload small binary file using DataCaptureUpload, sensor data index: %d, ext: %s, file: %s", i, path) + // we clone as the uploadMD may be changed for each sensor data + // and I'm not confident that it is safe to reuse grpc request structs + // between calls if the data in the request struct changes + clonedMD := proto.Clone(uploadMD).(*datasyncPB.UploadMetadata) + if err := uploadBinarySensorData(ctx, client, clonedMD, sd); err != nil { + return err } + } + return nil +} + +func uploadMultipleLargeBinarySensorData( + ctx context.Context, + client datasyncPB.DataSyncServiceClient, + uploadMD *datasyncPB.UploadMetadata, + sensorData []*datasyncPB.SensorData, + path string, + logger logging.Logger, +) error { + if len(sensorData) == 1 { + logger.Debugf("attempting to upload large binary file using StreamingDataCaptureUpload, sensor data file: %s", path) + return uploadLargeBinarySensorData(ctx, client, uploadMD, sensorData[0], path, logger) + } - // Then call the function to send the rest. - if err := sendStreamingDCRequests(ctx, c, toUpload.GetBinary(), path, logger); err != nil { - return errors.Wrap(err, "StreamingDataCaptureUpload failed to sync") + for i, sd := range sensorData { + logger.Debugf("attempting to upload large binary file using StreamingDataCaptureUpload, sensor data index: %d, file: %s", i, path) + // we clone as the uploadMD may be changed for each sensor data + // and I'm not confident that it is safe to reuse grpc request structs + // between calls if the data in the request struct changes + clonedMD := proto.Clone(uploadMD).(*datasyncPB.UploadMetadata) + if err := uploadLargeBinarySensorData(ctx, client, clonedMD, sd, path, logger); err != nil { + return err } + } + return nil +} + +func uploadLargeBinarySensorData( + ctx context.Context, + client datasyncPB.DataSyncServiceClient, + md *datasyncPB.UploadMetadata, + sd *datasyncPB.SensorData, + path string, + logger logging.Logger, +) error { + c, err := client.StreamingDataCaptureUpload(ctx) + if err != nil { + return errors.Wrap(err, "error creating StreamingDataCaptureUpload client") + } + // First send metadata. + smd := sd.GetMetadata() + // if the extension is not set, derive the extension from the sensor data metadata's mime type + if md.FileExtension == "" { + md.FileExtension = getFileExtFromMimeType(smd.MimeType) + } + streamMD := &datasyncPB.StreamingDataCaptureUploadRequest_Metadata{ + Metadata: &datasyncPB.DataCaptureUploadMetadata{ + UploadMetadata: md, + SensorMetadata: smd, + }, + } + if err := c.Send(&datasyncPB.StreamingDataCaptureUploadRequest{UploadPacket: streamMD}); err != nil { + return errors.Wrap(err, "StreamingDataCaptureUpload failed sending metadata") + } + + // Then call the function to send the rest. + if err := sendStreamingDCRequests(ctx, c, sd.GetBinary(), path, logger); err != nil { + return errors.Wrap(err, "StreamingDataCaptureUpload failed to sync") + } - _, err = c.CloseAndRecv() + if _, err = c.CloseAndRecv(); err != nil { return errors.Wrap(err, "StreamingDataCaptureUpload CloseAndRecv failed") } - // Otherwise use the unary endpoint - logger.Debugf("attempting to upload small binary file using DataCaptureUpload, file: %s", path) - _, err := client.DataCaptureUpload(ctx, &v1.DataCaptureUploadRequest{ - Metadata: uploadMD, - SensorContents: sensorData, - }) - return errors.Wrap(err, "DataCaptureUpload failed") + return nil } func sendStreamingDCRequests( ctx context.Context, - stream v1.DataSyncService_StreamingDataCaptureUploadClient, + stream datasyncPB.DataSyncService_StreamingDataCaptureUploadClient, contents []byte, path string, logger logging.Logger, @@ -193,8 +350,8 @@ func sendStreamingDCRequests( chunk := contents[i:end] // Build request with contents. - uploadReq := &v1.StreamingDataCaptureUploadRequest{ - UploadPacket: &v1.StreamingDataCaptureUploadRequest_Data{ + uploadReq := &datasyncPB.StreamingDataCaptureUploadRequest{ + UploadPacket: &datasyncPB.StreamingDataCaptureUploadRequest_Data{ Data: chunk, }, } @@ -211,17 +368,32 @@ func sendStreamingDCRequests( return nil } -func getFileExtFromImageFormat(res pb.Format) string { - switch res { - case pb.Format_FORMAT_JPEG: +func getFileExtFromImageFormat(t cameraPB.Format) string { + switch t { + case cameraPB.Format_FORMAT_JPEG: return ".jpeg" - case pb.Format_FORMAT_PNG: + case cameraPB.Format_FORMAT_PNG: return ".png" - case pb.Format_FORMAT_RAW_DEPTH: + case cameraPB.Format_FORMAT_RAW_DEPTH: return ".dep" - case pb.Format_FORMAT_RAW_RGBA: + case cameraPB.Format_FORMAT_RAW_RGBA: return ".rgba" - case pb.Format_FORMAT_UNSPECIFIED: + case cameraPB.Format_FORMAT_UNSPECIFIED: + fallthrough + default: + return "" + } +} + +func getFileExtFromMimeType(t datasyncPB.MimeType) string { + switch t { + case datasyncPB.MimeType_MIME_TYPE_IMAGE_JPEG: + return ".jpeg" + case datasyncPB.MimeType_MIME_TYPE_IMAGE_PNG: + return ".png" + case datasyncPB.MimeType_MIME_TYPE_APPLICATION_PCD: + return ".pcd" + case datasyncPB.MimeType_MIME_TYPE_UNSPECIFIED: fallthrough default: return "" diff --git a/services/datamanager/builtin/sync/upload_data_capture_file_test.go b/services/datamanager/builtin/sync/upload_data_capture_file_test.go new file mode 100644 index 00000000000..a40518de77e --- /dev/null +++ b/services/datamanager/builtin/sync/upload_data_capture_file_test.go @@ -0,0 +1,672 @@ +package sync + +import ( + "context" + "os" + "slices" + "sort" + "strings" + "sync/atomic" + "testing" + "time" + + "github.com/docker/go-units" + v1 "go.viam.com/api/app/datasync/v1" + powersensorPB "go.viam.com/api/component/powersensor/v1" + "go.viam.com/test" + "google.golang.org/grpc" + + "go.viam.com/rdk/components/camera" + "go.viam.com/rdk/components/powersensor" + "go.viam.com/rdk/components/sensor" + "go.viam.com/rdk/data" + "go.viam.com/rdk/logging" + rprotoutils "go.viam.com/rdk/protoutils" + "go.viam.com/rdk/resource" + "go.viam.com/rdk/services/vision" + "go.viam.com/rdk/utils" +) + +func TestUploadDataCaptureFile(t *testing.T) { + type upload struct { + md *v1.UploadMetadata + sd []*v1.SensorData + } + type testCase struct { + testName string + api resource.API + name string + method string + tags []string + captureType data.CaptureType + captureResults data.CaptureResult + client MockDataSyncServiceClient + expectedUploads []upload + // expectedUploadMetadata []*v1.UploadMetadata + // expectedSensorData [][]*v1.SensorData + additionalParams map[string]string + unaryReqs chan *v1.DataCaptureUploadRequest + steamingReqs []chan *v1.StreamingDataCaptureUploadRequest + } + + testCtx, cancel := context.WithTimeout(context.Background(), time.Second*5) + defer cancel() + logger := logging.NewTestLogger(t) + + partID := "my-part-id" + + now := time.Now() + sensorReadingResult, err := data.NewTabularCaptureResultReadings(now, map[string]interface{}{"a": 1}) + test.That(t, err, test.ShouldBeNil) + + tabularResult, err := data.NewTabularCaptureResult(now, &powersensorPB.GetPowerResponse{Watts: 0.5}) + test.That(t, err, test.ShouldBeNil) + + ts := data.Timestamps{TimeRequested: now, TimeReceived: now.Add(time.Second)} + smallBinaryResult := data.NewBinaryCaptureResult(ts, []data.Binary{ + {Payload: []byte("I'm a small binary result"), MimeType: data.MimeTypeImageJpeg}, + }) + test.That(t, err, test.ShouldBeNil) + + largeBinaryPayload := slices.Repeat([]byte{1, 2}, units.MB) + largeBinaryResult := data.NewBinaryCaptureResult(ts, []data.Binary{ + {Payload: largeBinaryPayload, MimeType: data.MimeTypeImagePng}, + }) + test.That(t, err, test.ShouldBeNil) + + smallGetImagesResult := data.NewBinaryCaptureResult(ts, []data.Binary{ + {Payload: []byte("I'm a small binary jpeg result"), MimeType: data.MimeTypeImageJpeg}, + {Payload: []byte("I'm a small binary png result"), MimeType: data.MimeTypeImagePng}, + }) + + largeGetImagesResult := data.NewBinaryCaptureResult(ts, []data.Binary{ + {Payload: largeBinaryPayload, MimeType: data.MimeTypeImageJpeg}, + {Payload: largeBinaryPayload, MimeType: data.MimeTypeImagePng}, + }) + conf := 0.888 + smallVisionCaptureAllFromCamera := data.NewBinaryCaptureResult(ts, []data.Binary{ + { + Payload: []byte("I'm a small binary jpeg result"), + MimeType: data.MimeTypeImageJpeg, + Annotations: data.Annotations{ + BoundingBoxes: []data.BoundingBox{ + { + Label: "a", + Confidence: &conf, + XMinNormalized: 1, + XMaxNormalized: 2, + YMinNormalized: 3, + YMaxNormalized: 4, + }, + { + Label: "b", + XMinNormalized: 5, + XMaxNormalized: 6, + YMinNormalized: 7, + YMaxNormalized: 8, + }, + }, + Classifications: []data.Classification{ + {Label: "a", Confidence: &conf}, + {Label: "b"}, + }, + }, + }, + }) + + largeVisionCaptureAllFromCamera := data.NewBinaryCaptureResult(ts, []data.Binary{ + { + Payload: largeBinaryPayload, + MimeType: data.MimeTypeImagePng, + Annotations: data.Annotations{ + BoundingBoxes: []data.BoundingBox{ + { + Label: "a", + Confidence: &conf, + XMinNormalized: 1, + XMaxNormalized: 2, + YMinNormalized: 3, + YMaxNormalized: 4, + }, + { + Label: "b", + XMinNormalized: 5, + XMaxNormalized: 6, + YMinNormalized: 7, + YMaxNormalized: 8, + }, + }, + Classifications: []data.Classification{ + {Label: "a", Confidence: &conf}, + {Label: "b"}, + }, + }, + }, + }) + + reqs0 := make(chan *v1.DataCaptureUploadRequest, 1) + reqs1 := make(chan *v1.DataCaptureUploadRequest, 1) + reqs2 := make(chan *v1.DataCaptureUploadRequest, 1) + reqs3 := make(chan *v1.StreamingDataCaptureUploadRequest, 100) + reqs4 := make(chan *v1.DataCaptureUploadRequest, 2) + largeGetImagesReqsIdx := atomic.Int64{} + largeGetImagesReqs := []chan *v1.StreamingDataCaptureUploadRequest{ + make(chan *v1.StreamingDataCaptureUploadRequest, 100), + make(chan *v1.StreamingDataCaptureUploadRequest, 100), + } + reqs5 := make(chan *v1.DataCaptureUploadRequest, 2) + largeVisionCaptureAllFromCameraIdx := atomic.Int64{} + largeVisionCaptureAllFromCameraReqs := []chan *v1.StreamingDataCaptureUploadRequest{ + make(chan *v1.StreamingDataCaptureUploadRequest, 100), + } + + tcs := []testCase{ + { + testName: "sensor readings", + captureResults: sensorReadingResult, + captureType: data.CaptureTypeTabular, + client: MockDataSyncServiceClient{ + T: t, + DataCaptureUploadFunc: func( + ctx context.Context, + in *v1.DataCaptureUploadRequest, + opts ...grpc.CallOption, + ) (*v1.DataCaptureUploadResponse, error) { + t.Log("called") + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case reqs0 <- in: + } + return &v1.DataCaptureUploadResponse{}, nil + }, + }, + api: sensor.API, + name: "sensor-1", + method: "Readings", + tags: []string{}, + additionalParams: map[string]string{}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "sensor-1", + ComponentType: sensor.API.String(), + FileExtension: ".dat", + MethodName: "Readings", + PartId: partID, + Type: v1.DataType_DATA_TYPE_TABULAR_SENSOR, + }, + sd: sensorReadingResult.ToProto(), + }, + }, + unaryReqs: reqs0, + }, + { + testName: "non readings tabular data", + captureResults: tabularResult, + captureType: data.CaptureTypeTabular, + client: MockDataSyncServiceClient{ + T: t, + DataCaptureUploadFunc: func( + ctx context.Context, + in *v1.DataCaptureUploadRequest, + opts ...grpc.CallOption, + ) (*v1.DataCaptureUploadResponse, error) { + t.Log("called") + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case reqs1 <- in: + } + return &v1.DataCaptureUploadResponse{}, nil + }, + }, + api: powersensor.API, + name: "powersensor-1", + method: "Power", + tags: []string{"tag1", "tag2"}, + additionalParams: map[string]string{"some": "additional", "param": "things"}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "powersensor-1", + ComponentType: powersensor.API.String(), + FileExtension: ".dat", + MethodName: "Power", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_TABULAR_SENSOR, + }, + sd: tabularResult.ToProto(), + }, + }, + unaryReqs: reqs1, + }, + { + testName: "small binary data", + captureResults: smallBinaryResult, + captureType: data.CaptureTypeBinary, + client: MockDataSyncServiceClient{ + T: t, + DataCaptureUploadFunc: func( + ctx context.Context, + in *v1.DataCaptureUploadRequest, + opts ...grpc.CallOption, + ) (*v1.DataCaptureUploadResponse, error) { + t.Log("called") + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case reqs2 <- in: + } + return &v1.DataCaptureUploadResponse{}, nil + }, + }, + api: camera.API, + name: "camera-1", + method: "ReadImage", + tags: []string{"tag1", "tag2"}, + additionalParams: map[string]string{"mime_type": utils.MimeTypeJPEG}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "camera-1", + ComponentType: camera.API.String(), + FileExtension: ".jpeg", + MethodName: "ReadImage", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: smallBinaryResult.ToProto(), + }, + }, + unaryReqs: reqs2, + }, + { + testName: "large binary data", + captureResults: largeBinaryResult, + captureType: data.CaptureTypeBinary, + client: MockDataSyncServiceClient{ + T: t, + StreamingDataCaptureUploadFunc: func( + ctx context.Context, + _ ...grpc.CallOption, + ) (v1.DataSyncService_StreamingDataCaptureUploadClient, error) { + mockStreamingClient := &ClientStreamingMock[ + *v1.StreamingDataCaptureUploadRequest, + *v1.StreamingDataCaptureUploadResponse, + ]{ + T: t, + SendFunc: func(in *v1.StreamingDataCaptureUploadRequest) error { + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case reqs3 <- in: + } + return nil + }, + CloseAndRecvFunc: func() (*v1.StreamingDataCaptureUploadResponse, error) { + close(reqs3) + return &v1.StreamingDataCaptureUploadResponse{}, nil + }, + } + return mockStreamingClient, nil + }, + }, + api: camera.API, + name: "camera-1", + method: "ReadImage", + tags: []string{"tag1", "tag2"}, + additionalParams: map[string]string{"mime_type": utils.MimeTypePNG}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "camera-1", + ComponentType: camera.API.String(), + FileExtension: ".png", + MethodName: "ReadImage", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: largeBinaryResult.ToProto(), + }, + }, + steamingReqs: []chan *v1.StreamingDataCaptureUploadRequest{reqs3}, + }, + { + testName: "small camera.GetImages", + captureResults: smallGetImagesResult, + captureType: data.CaptureTypeBinary, + client: MockDataSyncServiceClient{ + T: t, + DataCaptureUploadFunc: func( + ctx context.Context, + in *v1.DataCaptureUploadRequest, + opts ...grpc.CallOption, + ) (*v1.DataCaptureUploadResponse, error) { + t.Log("called") + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case reqs4 <- in: + } + return &v1.DataCaptureUploadResponse{}, nil + }, + }, + api: camera.API, + name: "camera-1", + method: "GetImages", + tags: []string{"tag1", "tag2"}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "camera-1", + ComponentType: camera.API.String(), + FileExtension: ".jpeg", + MethodName: "GetImages", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: []*v1.SensorData{smallGetImagesResult.ToProto()[0]}, + }, + { + md: &v1.UploadMetadata{ + ComponentName: "camera-1", + ComponentType: camera.API.String(), + FileExtension: ".png", + MethodName: "GetImages", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: []*v1.SensorData{smallGetImagesResult.ToProto()[1]}, + }, + }, + unaryReqs: reqs4, + }, + { + testName: "large camera.GetImages", + captureResults: largeGetImagesResult, + captureType: data.CaptureTypeBinary, + client: MockDataSyncServiceClient{ + T: t, + StreamingDataCaptureUploadFunc: func( + ctx context.Context, + _ ...grpc.CallOption, + ) (v1.DataSyncService_StreamingDataCaptureUploadClient, error) { + mockStreamingClient := &ClientStreamingMock[ + *v1.StreamingDataCaptureUploadRequest, + *v1.StreamingDataCaptureUploadResponse, + ]{ + T: t, + SendFunc: func(in *v1.StreamingDataCaptureUploadRequest) error { + idx := largeGetImagesReqsIdx.Load() + t.Logf("writing to index: %d", idx) + ch := largeGetImagesReqs[idx] + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case ch <- in: + } + return nil + }, + CloseAndRecvFunc: func() (*v1.StreamingDataCaptureUploadResponse, error) { + close(largeGetImagesReqs[largeGetImagesReqsIdx.Add(1)-1]) + return &v1.StreamingDataCaptureUploadResponse{}, nil + }, + } + return mockStreamingClient, nil + }, + }, + api: camera.API, + name: "camera-1", + method: "GetImages", + tags: []string{"tag1", "tag2"}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "camera-1", + ComponentType: camera.API.String(), + FileExtension: ".jpeg", + MethodName: "GetImages", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: []*v1.SensorData{largeGetImagesResult.ToProto()[0]}, + }, + { + md: &v1.UploadMetadata{ + ComponentName: "camera-1", + ComponentType: camera.API.String(), + FileExtension: ".png", + MethodName: "GetImages", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: []*v1.SensorData{largeGetImagesResult.ToProto()[1]}, + }, + }, + steamingReqs: largeGetImagesReqs, + }, + { + testName: "small vision.CaptureAllFromCamera", + captureResults: smallVisionCaptureAllFromCamera, + captureType: data.CaptureTypeBinary, + client: MockDataSyncServiceClient{ + T: t, + DataCaptureUploadFunc: func( + ctx context.Context, + in *v1.DataCaptureUploadRequest, + opts ...grpc.CallOption, + ) (*v1.DataCaptureUploadResponse, error) { + t.Log("called") + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case reqs5 <- in: + } + return &v1.DataCaptureUploadResponse{}, nil + }, + }, + api: vision.API, + name: "vision-1", + method: "CaptureAllFromCamera", + tags: []string{"tag1", "tag2"}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "vision-1", + ComponentType: vision.API.String(), + FileExtension: ".jpeg", + MethodName: "CaptureAllFromCamera", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: smallVisionCaptureAllFromCamera.ToProto(), + }, + }, + unaryReqs: reqs5, + }, + { + testName: "large vision.CaptureAllFromCamera", + captureResults: largeVisionCaptureAllFromCamera, + captureType: data.CaptureTypeBinary, + client: MockDataSyncServiceClient{ + T: t, + StreamingDataCaptureUploadFunc: func( + ctx context.Context, + _ ...grpc.CallOption, + ) (v1.DataSyncService_StreamingDataCaptureUploadClient, error) { + mockStreamingClient := &ClientStreamingMock[ + *v1.StreamingDataCaptureUploadRequest, + *v1.StreamingDataCaptureUploadResponse, + ]{ + T: t, + SendFunc: func(in *v1.StreamingDataCaptureUploadRequest) error { + idx := largeVisionCaptureAllFromCameraIdx.Load() + t.Logf("writing to index: %d", idx) + ch := largeVisionCaptureAllFromCameraReqs[idx] + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case ch <- in: + } + return nil + }, + CloseAndRecvFunc: func() (*v1.StreamingDataCaptureUploadResponse, error) { + close(largeVisionCaptureAllFromCameraReqs[largeVisionCaptureAllFromCameraIdx.Add(1)-1]) + return &v1.StreamingDataCaptureUploadResponse{}, nil + }, + } + return mockStreamingClient, nil + }, + }, + api: vision.API, + name: "vision-1", + method: "CaptureAllFromCamera", + tags: []string{"tag1", "tag2"}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "vision-1", + ComponentType: vision.API.String(), + FileExtension: ".png", + MethodName: "CaptureAllFromCamera", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: largeVisionCaptureAllFromCamera.ToProto(), + }, + }, + steamingReqs: largeVisionCaptureAllFromCameraReqs, + }, + } + + tempDir := t.TempDir() + for _, tc := range tcs { + t.Run(tc.testName, func(t *testing.T) { + methodParams, err := rprotoutils.ConvertStringMapToAnyPBMap(tc.additionalParams) + test.That(t, err, test.ShouldBeNil) + md, ct := data.BuildCaptureMetadata(tc.api, tc.name, tc.method, tc.additionalParams, methodParams, tc.tags) + test.That(t, err, test.ShouldBeNil) + test.That(t, ct, test.ShouldEqual, tc.captureType) + w, err := data.NewCaptureFile(tempDir, md) + test.That(t, err, test.ShouldBeNil) + test.That(t, len(tc.expectedUploads), test.ShouldBeGreaterThan, 0) + for _, sd := range tc.captureResults.ToProto() { + test.That(t, w.WriteNext(sd), test.ShouldBeNil) + } + w.Flush() + w.Close() + + f, err := os.Open(strings.Replace(w.GetPath(), data.InProgressCaptureFileExt, data.CompletedCaptureFileExt, 1)) + test.That(t, err, test.ShouldBeNil) + + stat, err := f.Stat() + test.That(t, err, test.ShouldBeNil) + + test.That(t, data.IsDataCaptureFile(f), test.ShouldBeTrue) + cf, err := data.ReadCaptureFile(f) + test.That(t, err, test.ShouldBeNil) + cc := cloudConn{partID: partID, client: tc.client} + bytesUploaded, err := uploadDataCaptureFile(testCtx, cf, cc, logger) + test.That(t, err, test.ShouldBeNil) + test.That(t, bytesUploaded, test.ShouldEqual, stat.Size()) + if tc.unaryReqs != nil { + for i := 0; i < len(tc.expectedUploads); i++ { + t.Logf("unaryReqs: i: %d", i) + tc.expectedUploads[i].md.MethodParameters = methodParams + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case req := <-tc.unaryReqs: + t.Logf("got req\n") + test.That(t, len(tc.expectedUploads[i].sd), test.ShouldEqual, 1) + test.That(t, req.Metadata.FileExtension, test.ShouldResemble, tc.expectedUploads[i].md.FileExtension) + test.That(t, req.Metadata, test.ShouldResemble, tc.expectedUploads[i].md) + compareSensorData(t, tc.captureType.ToProto(), req.SensorContents, tc.expectedUploads[i].sd) + } + } + } else { + test.That(t, len(tc.steamingReqs), test.ShouldEqual, len(tc.expectedUploads)) + for i := 0; i < len(tc.expectedUploads); i++ { + test.That(t, len(tc.expectedUploads[i].sd), test.ShouldEqual, 1) + md := tc.expectedUploads[i].md + sd := tc.expectedUploads[i].sd[0] + md.MethodParameters = methodParams + var gotHeader bool + var data []byte + for req := range tc.steamingReqs[i] { + if !gotHeader { + test.That(t, req.GetMetadata().UploadMetadata, test.ShouldResemble, md) + test.That(t, req.GetMetadata().SensorMetadata, test.ShouldResemble, sd.GetMetadata()) + gotHeader = true + continue + } + data = append(data, req.GetData()...) + } + test.That(t, gotHeader, test.ShouldBeTrue) + test.That(t, data, test.ShouldResemble, sd.GetBinary()) + } + } + }) + } +} + +func compareSensorData(t *testing.T, dataType v1.DataType, act, exp []*v1.SensorData) { + t.Helper() + if len(act) == 0 && len(exp) == 0 { + return + } + + // Sort both by time requested. + sort.SliceStable(act, func(i, j int) bool { + diffRequested := act[j].GetMetadata().GetTimeRequested().AsTime().Sub(act[i].GetMetadata().GetTimeRequested().AsTime()) + switch { + case diffRequested > 0: + return true + case diffRequested == 0: + return act[j].GetMetadata().GetTimeReceived().AsTime().Sub(act[i].GetMetadata().GetTimeReceived().AsTime()) > 0 + default: + return false + } + }) + sort.SliceStable(exp, func(i, j int) bool { + diffRequested := exp[j].GetMetadata().GetTimeRequested().AsTime().Sub(exp[i].GetMetadata().GetTimeRequested().AsTime()) + switch { + case diffRequested > 0: + return true + case diffRequested == 0: + return exp[j].GetMetadata().GetTimeReceived().AsTime().Sub(exp[i].GetMetadata().GetTimeReceived().AsTime()) > 0 + default: + return false + } + }) + + test.That(t, len(act), test.ShouldEqual, len(exp)) + + for i := range act { + test.That(t, act[i].GetMetadata(), test.ShouldResemble, exp[i].GetMetadata()) + if dataType == v1.DataType_DATA_TYPE_TABULAR_SENSOR { + test.That(t, act[i].GetStruct(), test.ShouldResemble, exp[i].GetStruct()) + } else { + test.That(t, act[i].GetBinary(), test.ShouldResemble, exp[i].GetBinary()) + } + } +} diff --git a/services/slam/collectors.go b/services/slam/collectors.go index 185ea71eaf2..ea8323c98b8 100644 --- a/services/slam/collectors.go +++ b/services/slam/collectors.go @@ -2,6 +2,7 @@ package slam import ( "context" + "time" pb "go.viam.com/api/service/slam/v1" "google.golang.org/protobuf/types/known/anypb" @@ -33,12 +34,14 @@ func newPositionCollector(resource interface{}, params data.CollectorParams) (da return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult pose, err := slam.Position(ctx) if err != nil { - return nil, data.FailedToReadErr(params.ComponentName, position.String(), err) + return res, data.FailedToReadErr(params.ComponentName, position.String(), err) } - return &pb.GetPositionResponse{Pose: spatialmath.PoseToProtobuf(pose)}, nil + return data.NewTabularCaptureResult(timeRequested, &pb.GetPositionResponse{Pose: spatialmath.PoseToProtobuf(pose)}) }) return data.NewCollector(cFunc, params) } @@ -49,19 +52,28 @@ func newPointCloudMapCollector(resource interface{}, params data.CollectorParams return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult // edited maps do not need to be captured because they should not be modified f, err := slam.PointCloudMap(ctx, false) if err != nil { - return nil, data.FailedToReadErr(params.ComponentName, pointCloudMap.String(), err) + return res, data.FailedToReadErr(params.ComponentName, pointCloudMap.String(), err) } pcd, err := HelperConcatenateChunksToFull(f) if err != nil { - return nil, data.FailedToReadErr(params.ComponentName, pointCloudMap.String(), err) + return res, data.FailedToReadErr(params.ComponentName, pointCloudMap.String(), err) } - return pcd, nil + ts := data.Timestamps{ + TimeRequested: timeRequested, + TimeReceived: time.Now(), + } + return data.NewBinaryCaptureResult(ts, []data.Binary{{ + Payload: pcd, + MimeType: data.MimeTypeApplicationPcd, + }}), nil }) return data.NewCollector(cFunc, params) } diff --git a/services/slam/collectors_test.go b/services/slam/collectors_test.go index b58c4ae56fb..5e572309751 100644 --- a/services/slam/collectors_test.go +++ b/services/slam/collectors_test.go @@ -34,13 +34,15 @@ func TestCollectors(t *testing.T) { tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData + datatype data.CaptureType slam slam.Service }{ { name: "PositionCollector returns non-empty position responses", collector: slam.NewPositionCollector, - expected: &datasyncpb.SensorData{ + datatype: data.CaptureTypeTabular, + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "pose": map[string]any{ @@ -53,16 +55,19 @@ func TestCollectors(t *testing.T) { "z": 3, }, })}, - }, + }}, slam: newSlamService(pcdPath), }, { name: "PointCloudMapCollector returns non-empty pointcloud responses", collector: slam.NewPointCloudMapCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Binary{Binary: pcd}, - }, + datatype: data.CaptureTypeBinary, + expected: []*datasyncpb.SensorData{{ + Metadata: &datasyncpb.SensorMetadata{ + MimeType: datasyncpb.MimeType_MIME_TYPE_APPLICATION_PCD, + }, + Data: &datasyncpb.SensorData_Binary{Binary: pcd}, + }}, slam: newSlamService(pcdPath), }, } @@ -70,8 +75,9 @@ func TestCollectors(t *testing.T) { for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: tc.datatype, ComponentName: serviceName, Interval: captureInterval, Logger: logging.NewTestLogger(t), diff --git a/services/vision/collectors.go b/services/vision/collectors.go index 3bb875e4e00..0bf9830d4e0 100644 --- a/services/vision/collectors.go +++ b/services/vision/collectors.go @@ -2,10 +2,9 @@ package vision import ( "context" + "time" "github.com/pkg/errors" - servicepb "go.viam.com/api/service/vision/v1" - "go.viam.com/utils/protoutils" "google.golang.org/protobuf/types/known/anypb" "google.golang.org/protobuf/types/known/wrapperspb" @@ -29,12 +28,6 @@ func (m method) String() string { return "Unknown" } -type extraFields struct { - Height int - Width int - MimeType string -} - type methodParamsDecoded struct { cameraName string mimeType string @@ -53,81 +46,90 @@ func newCaptureAllFromCameraCollector(resource interface{}, params data.Collecto } cameraName := decodedParams.cameraName - mimeType := decodedParams.mimeType minConfidenceScore := decodedParams.minConfidence - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult visCaptureOptions := viscapture.CaptureOptions{ ReturnImage: true, ReturnDetections: true, ReturnClassifications: true, - ReturnObject: true, } visCapture, err := vision.CaptureAllFromCamera(ctx, cameraName, visCaptureOptions, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a service. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, captureAllFromCamera.String(), err) + return res, data.FailedToReadErr(params.ComponentName, captureAllFromCamera.String(), err) + } + + if visCapture.Image == nil { + return res, errors.New("vision service didn't return an image") } protoImage, err := imageToProto(ctx, visCapture.Image, cameraName) if err != nil { - return nil, err + return res, err } - filteredDetections := []objectdetection.Detection{} - for _, elem := range visCapture.Detections { - if elem.Score() >= minConfidenceScore { - filteredDetections = append(filteredDetections, elem) - } + var width, height int + if visCapture.Image != nil { + width = visCapture.Image.Bounds().Dx() + height = visCapture.Image.Bounds().Dy() } - protoDetections := detsToProto(filteredDetections) - - filteredClassifications := classification.Classifications{} - for _, elem := range visCapture.Classifications { - if elem.Score() >= minConfidenceScore { - filteredClassifications = append(filteredClassifications, elem) + filteredBoundingBoxes := []data.BoundingBox{} + for _, d := range visCapture.Detections { + if score := d.Score(); score >= minConfidenceScore { + filteredBoundingBoxes = append(filteredBoundingBoxes, toDataBoundingBox(d, width, height)) } } - protoClassifications := clasToProto(filteredClassifications) - - protoObjects, err := segmentsToProto(cameraName, visCapture.Objects) - if err != nil { - return nil, err - } - - // We need this to pass in the height & width of an image in order to calculate - // the normalized coordinate values of any bounding boxes. We also need the - // mimeType to appropriately upload the image. - bounds := extraFields{} - - if visCapture.Image != nil { - bounds = extraFields{ - Height: visCapture.Image.Bounds().Dy(), - Width: visCapture.Image.Bounds().Dx(), - MimeType: mimeType, + filteredClassifications := []data.Classification{} + for _, c := range visCapture.Classifications { + if score := c.Score(); score >= minConfidenceScore { + filteredClassifications = append(filteredClassifications, toDataClassification(c)) } } - boundsPb, err := protoutils.StructToStructPb(bounds) - if err != nil { - return nil, err + ts := data.Timestamps{ + TimeRequested: timeRequested, + TimeReceived: time.Now(), } - - return &servicepb.CaptureAllFromCameraResponse{ - Image: protoImage, Detections: protoDetections, Classifications: protoClassifications, - Objects: protoObjects, Extra: boundsPb, - }, nil + return data.NewBinaryCaptureResult(ts, []data.Binary{{ + Payload: protoImage.Image, + MimeType: data.CameraFormatToMimeType(protoImage.Format), + Annotations: data.Annotations{ + BoundingBoxes: filteredBoundingBoxes, + Classifications: filteredClassifications, + }, + }}), nil }) return data.NewCollector(cFunc, params) } +func toDataClassification(c classification.Classification) data.Classification { + confidence := c.Score() + return data.Classification{Label: c.Label(), Confidence: &confidence} +} + +func toDataBoundingBox(d objectdetection.Detection, width, height int) data.BoundingBox { + confidence := d.Score() + bbox := d.BoundingBox() + return data.BoundingBox{ + Label: d.Label(), + Confidence: &confidence, + XMinNormalized: float64(bbox.Min.X) / float64(width), + XMaxNormalized: float64(bbox.Max.X) / float64(width), + YMinNormalized: float64(bbox.Min.Y) / float64(height), + YMaxNormalized: float64(bbox.Max.Y) / float64(height), + } +} + func additionalParamExtraction(methodParams map[string]*anypb.Any) (methodParamsDecoded, error) { cameraParam := methodParams["camera_name"] diff --git a/services/vision/collectors_test.go b/services/vision/collectors_test.go index f9cac5e0d35..671b67516ff 100644 --- a/services/vision/collectors_test.go +++ b/services/vision/collectors_test.go @@ -11,8 +11,8 @@ import ( "time" "github.com/benbjohnson/clock" + datapb "go.viam.com/api/app/data/v1" datasyncpb "go.viam.com/api/app/datasync/v1" - camerapb "go.viam.com/api/component/camera/v1" "go.viam.com/test" "google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/types/known/anypb" @@ -142,108 +142,54 @@ func TestCollectors(t *testing.T) { test.That(t, err, test.ShouldBeNil) viamLogoJpeg, err := io.ReadAll(base64.NewDecoder(base64.StdEncoding, bytes.NewReader(viamLogoJpegB64))) test.That(t, err, test.ShouldBeNil) - viamLogoJpegAsInts := []any{} - for _, b := range viamLogoJpeg { - viamLogoJpegAsInts = append(viamLogoJpegAsInts, int(b)) - } - img := rimage.NewLazyEncodedImage(viamLogoJpeg, utils.MimeTypeJPEG) // 32 x 32 image test.That(t, img.Bounds().Dx(), test.ShouldEqual, 32) test.That(t, img.Bounds().Dy(), test.ShouldEqual, 32) - + bboxConf := 0.95 + classConf := 0.85 tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData vision visionservice.Service }{ { name: "CaptureAllFromCameraCollector returns non-empty CaptureAllFromCameraResp", collector: visionservice.NewCaptureAllFromCameraCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "image": map[string]any{ - "source_name": "camera-1", - "format": int(camerapb.Format_FORMAT_JPEG), - "image": viamLogoJpegAsInts, - }, - "classifications": []any{ - map[string]any{ - "confidence": 0.85, - "class_name": "cat", - }, - }, - "detections": []any{ - map[string]any{ - "confidence": 0.95, - "class_name": "cat", - "x_min": 10, - "y_min": 20, - "x_max": 110, - "y_max": 120, - }, - }, - "objects": []any{}, - "extra": map[string]any{ - "fields": map[string]any{ - "Height": map[string]any{ - "Kind": map[string]any{ - "NumberValue": 32, - }, - }, - "Width": map[string]any{ - "Kind": map[string]any{ - "NumberValue": 32, - }, - }, - "MimeType": map[string]any{ - "Kind": map[string]any{ - "StringValue": utils.MimeTypeJPEG, - }, + expected: []*datasyncpb.SensorData{{ + Metadata: &datasyncpb.SensorMetadata{ + MimeType: datasyncpb.MimeType_MIME_TYPE_IMAGE_JPEG, + Annotations: &datapb.Annotations{ + Bboxes: []*datapb.BoundingBox{ + { + Label: "cat", + XMinNormalized: 0.3125, + YMinNormalized: 0.625, + XMaxNormalized: 3.4375, + YMaxNormalized: 3.75, + Confidence: &bboxConf, }, }, + Classifications: []*datapb.Classification{{ + Label: "cat", + Confidence: &classConf, + }}, }, - })}, - }, + }, + Data: &datasyncpb.SensorData_Binary{Binary: viamLogoJpeg}, + }}, vision: newVisionService(img), }, { name: "CaptureAllFromCameraCollector w/ Classifications & Detections < 0.5 returns empty CaptureAllFromCameraResp", collector: visionservice.NewCaptureAllFromCameraCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "image": map[string]any{ - "source_name": "camera-1", - "format": 3, - "image": viamLogoJpegAsInts, - }, - "classifications": []any{}, - "detections": []any{}, - "objects": []any{}, - "extra": map[string]any{ - "fields": map[string]any{ - "Height": map[string]any{ - "Kind": map[string]any{ - "NumberValue": 32, - }, - }, - "Width": map[string]any{ - "Kind": map[string]any{ - "NumberValue": 32, - }, - }, - "MimeType": map[string]any{ - "Kind": map[string]any{ - "StringValue": utils.MimeTypeJPEG, - }, - }, - }, - }, - })}, - }, + expected: []*datasyncpb.SensorData{{ + Metadata: &datasyncpb.SensorMetadata{ + MimeType: datasyncpb.MimeType_MIME_TYPE_IMAGE_JPEG, + }, + Data: &datasyncpb.SensorData_Binary{Binary: viamLogoJpeg}, + }}, vision: newVisionService2(img), }, } @@ -251,8 +197,9 @@ func TestCollectors(t *testing.T) { for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeBinary, ComponentName: serviceName, Interval: captureInterval, Logger: logging.NewTestLogger(t), diff --git a/testutils/file_utils.go b/testutils/file_utils.go index ece95ec9d40..a160fa45141 100644 --- a/testutils/file_utils.go +++ b/testutils/file_utils.go @@ -103,20 +103,22 @@ func BuildTempModuleWithFirstRun(tb testing.TB, modDir string) string { // MockBuffer is a buffered writer that just appends data to an array to read // without needing a real file system for testing. type MockBuffer struct { + t *testing.T ctx context.Context cancel context.CancelFunc - Writes chan *v1.SensorData + Writes chan []*v1.SensorData } // NewMockBuffer returns a mock buffer. // This needs to be closed before the collector, otherwise the // collector's Close method will block. -func NewMockBuffer() *MockBuffer { +func NewMockBuffer(t *testing.T) *MockBuffer { c, cancel := context.WithCancel(context.Background()) return &MockBuffer{ + t: t, ctx: c, cancel: cancel, - Writes: make(chan *v1.SensorData, 1), + Writes: make(chan []*v1.SensorData, 1), } } @@ -147,29 +149,34 @@ func CheckMockBufferWrites( t *testing.T, ctx context.Context, start time.Time, - writes chan *v1.SensorData, - expected *v1.SensorData, + writes chan []*v1.SensorData, + expecteds []*v1.SensorData, ) { select { case <-ctx.Done(): t.Error("timeout") t.FailNow() - case write := <-writes: + case writes := <-writes: end := time.Now() - // nil out to make comparable - requestedAt := write.Metadata.TimeRequested.AsTime() - receivedAt := write.Metadata.TimeReceived.AsTime() - test.That(t, start, test.ShouldHappenOnOrBefore, requestedAt) - test.That(t, requestedAt, test.ShouldHappenOnOrBefore, receivedAt) - test.That(t, receivedAt, test.ShouldHappenOnOrBefore, end) - // nil out to make comparable - write.Metadata.TimeRequested = nil - write.Metadata.TimeReceived = nil - test.That(t, write.GetMetadata(), test.ShouldResemble, expected.GetMetadata()) - if isBinary(write) { - test.That(t, write.GetBinary(), test.ShouldResemble, expected.GetBinary()) - } else { - test.That(t, write.GetStruct(), test.ShouldResemble, expected.GetStruct()) + test.That(t, len(writes), test.ShouldEqual, len(expecteds)) + for i, expected := range expecteds { + write := writes[i] + requestedAt := write.Metadata.TimeRequested.AsTime() + receivedAt := write.Metadata.TimeReceived.AsTime() + test.That(t, start, test.ShouldHappenOnOrBefore, requestedAt) + test.That(t, requestedAt, test.ShouldHappenOnOrBefore, receivedAt) + test.That(t, receivedAt, test.ShouldHappenOnOrBefore, end) + test.That(t, len(expecteds), test.ShouldEqual, len(writes)) + // nil out to make comparable + // nil out to make comparable + write.Metadata.TimeRequested = nil + write.Metadata.TimeReceived = nil + test.That(t, write.GetMetadata(), test.ShouldResemble, expected.GetMetadata()) + if isBinary(write) { + test.That(t, write.GetBinary(), test.ShouldResemble, expected.GetBinary()) + } else { + test.That(t, write.GetStruct(), test.ShouldResemble, expected.GetStruct()) + } } } } @@ -179,13 +186,37 @@ func (m *MockBuffer) Close() { m.cancel() } -// Write adds the item to the channel. -func (m *MockBuffer) Write(item *v1.SensorData) error { +// WriteBinary writes binary sensor data. +func (m *MockBuffer) WriteBinary(items []*v1.SensorData) error { + if err := m.ctx.Err(); err != nil { + return err + } + for i, item := range items { + if !isBinary(item) { + m.t.Errorf("MockBuffer.WriteBinary called with non binary data. index: %d, items: %#v\n", i, items) + m.t.FailNow() + } + } + select { + case m.Writes <- items: + case <-m.ctx.Done(): + } + return nil +} + +// WriteTabular writes tabular sensor data to the Writes channel. +func (m *MockBuffer) WriteTabular(items []*v1.SensorData) error { if err := m.ctx.Err(); err != nil { return err } + for i, item := range items { + if isBinary(item) { + m.t.Errorf("MockBuffer.WriteTabular called with binary data. index: %d, items: %#v\n", i, items) + m.t.FailNow() + } + } select { - case m.Writes <- item: + case m.Writes <- items: case <-m.ctx.Done(): } return nil