From 842ea823f7768823f0ee49e8589b40a7178c88b4 Mon Sep 17 00:00:00 2001 From: Nick Sanford Date: Fri, 15 Nov 2024 12:01:27 -0500 Subject: [PATCH] [DATA-3338] fix capture all from camera stability --- components/arm/collectors.go | 25 +- components/arm/collectors_test.go | 13 +- components/board/collectors.go | 30 +- components/board/collectors_test.go | 14 +- components/camera/collectors.go | 78 +- components/camera/collectors_test.go | 63 +- components/encoder/collectors.go | 13 +- components/encoder/collectors_test.go | 7 +- components/gantry/collectors.go | 25 +- components/gantry/collectors_test.go | 13 +- components/motor/collectors.go | 25 +- components/motor/collectors_test.go | 23 +- components/movementsensor/collectors.go | 106 +-- components/movementsensor/collectors_test.go | 105 +-- components/powersensor/collectors.go | 56 +- components/powersensor/collectors_test.go | 31 +- components/sensor/collectors.go | 20 +- components/sensor/collectors_test.go | 7 +- components/servo/collectors.go | 13 +- components/servo/collectors_test.go | 7 +- data/capture_buffer.go | 88 ++- data/capture_buffer_test.go | 121 ++-- data/capture_file.go | 69 +- data/capture_file_test.go | 2 +- data/collector.go | 150 ++-- data/collector_test.go | 140 ++-- data/collector_types.go | 394 ++++++++++ data/collector_types_test.go | 425 +++++++++++ data/registry.go | 16 +- go.mod | 62 +- go.sum | 167 +++-- .../datamanager/builtin/builtin_sync_test.go | 3 +- .../datamanager/builtin/capture/capture.go | 3 +- .../builtin/sync/exponential_retry.go | 12 +- .../builtin/sync/upload_data_capture_file.go | 302 ++++++-- .../sync/upload_data_capture_file_test.go | 672 ++++++++++++++++++ services/slam/collectors.go | 26 +- services/slam/collectors_test.go | 22 +- services/vision/collectors.go | 102 +-- services/vision/collectors_test.go | 115 +-- testutils/file_utils.go | 77 +- 41 files changed, 2719 insertions(+), 923 deletions(-) create mode 100644 data/collector_types.go create mode 100644 data/collector_types_test.go create mode 100644 services/datamanager/builtin/sync/upload_data_capture_file_test.go diff --git a/components/arm/collectors.go b/components/arm/collectors.go index 6dd9a60c618..9c1528cb6be 100644 --- a/components/arm/collectors.go +++ b/components/arm/collectors.go @@ -5,6 +5,7 @@ package arm import ( "context" "errors" + "time" v1 "go.viam.com/api/common/v1" pb "go.viam.com/api/component/arm/v1" @@ -39,18 +40,20 @@ func newEndPositionCollector(resource interface{}, params data.CollectorParams) return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult v, err := arm.EndPosition(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, endPosition.String(), err) + return res, data.FailedToReadErr(params.ComponentName, endPosition.String(), err) } o := v.Orientation().OrientationVectorDegrees() - return pb.GetEndPositionResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetEndPositionResponse{ Pose: &v1.Pose{ X: v.Point().X, Y: v.Point().Y, @@ -60,7 +63,7 @@ func newEndPositionCollector(resource interface{}, params data.CollectorParams) OZ: o.OZ, Theta: o.Theta, }, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -73,21 +76,23 @@ func newJointPositionsCollector(resource interface{}, params data.CollectorParam return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult v, err := arm.JointPositions(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, jointPositions.String(), err) + return res, data.FailedToReadErr(params.ComponentName, jointPositions.String(), err) } jp, err := referenceframe.JointPositionsFromInputs(arm.ModelFrame(), v) if err != nil { - return nil, data.FailedToReadErr(params.ComponentName, jointPositions.String(), err) + return res, data.FailedToReadErr(params.ComponentName, jointPositions.String(), err) } - return pb.GetJointPositionsResponse{Positions: jp}, nil + return data.NewTabularCaptureResult(timeRequested, pb.GetJointPositionsResponse{Positions: jp}) }) return data.NewCollector(cFunc, params) } diff --git a/components/arm/collectors_test.go b/components/arm/collectors_test.go index acd016fbc02..2898308975c 100644 --- a/components/arm/collectors_test.go +++ b/components/arm/collectors_test.go @@ -31,12 +31,12 @@ func TestCollectors(t *testing.T) { tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData }{ { name: "End position collector should write a pose", collector: arm.NewEndPositionCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "pose": map[string]any{ @@ -49,27 +49,28 @@ func TestCollectors(t *testing.T) { "z": 3, }, })}, - }, + }}, }, { name: "Joint positions collector should write a list of positions", collector: arm.NewJointPositionsCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "positions": map[string]any{ "values": []any{1.0, 2.0, 3.0}, }, })}, - }, + }}, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: componentName, Interval: captureInterval, Logger: logging.NewTestLogger(t), diff --git a/components/board/collectors.go b/components/board/collectors.go index 64a77bfdaa0..6ac74ecad97 100644 --- a/components/board/collectors.go +++ b/components/board/collectors.go @@ -2,6 +2,7 @@ package board import ( "context" + "time" "github.com/pkg/errors" pb "go.viam.com/api/component/board/v1" @@ -39,10 +40,12 @@ func newAnalogCollector(resource interface{}, params data.CollectorParams) (data return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult var analogValue AnalogValue if _, ok := arg[analogReaderNameKey]; !ok { - return nil, data.FailedToReadErr(params.ComponentName, analogs.String(), + return res, data.FailedToReadErr(params.ComponentName, analogs.String(), errors.New("Must supply reader_name in additional_params for analog collector")) } if reader, err := board.AnalogByName(arg[analogReaderNameKey].String()); err == nil { @@ -51,17 +54,18 @@ func newAnalogCollector(resource interface{}, params data.CollectorParams) (data // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, analogs.String(), err) + return res, data.FailedToReadErr(params.ComponentName, analogs.String(), err) } } - return pb.ReadAnalogReaderResponse{ + + return data.NewTabularCaptureResult(timeRequested, pb.ReadAnalogReaderResponse{ Value: int32(analogValue.Value), MinRange: analogValue.Min, MaxRange: analogValue.Max, StepSize: analogValue.StepSize, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -74,10 +78,12 @@ func newGPIOCollector(resource interface{}, params data.CollectorParams) (data.C return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult var value bool if _, ok := arg[gpioPinNameKey]; !ok { - return nil, data.FailedToReadErr(params.ComponentName, gpios.String(), + return res, data.FailedToReadErr(params.ComponentName, gpios.String(), errors.New("Must supply pin_name in additional params for gpio collector")) } if gpio, err := board.GPIOPinByName(arg[gpioPinNameKey].String()); err == nil { @@ -86,14 +92,14 @@ func newGPIOCollector(resource interface{}, params data.CollectorParams) (data.C // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, gpios.String(), err) + return res, data.FailedToReadErr(params.ComponentName, gpios.String(), err) } } - return pb.GetGPIOResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetGPIOResponse{ High: value, - }, nil + }) }) return data.NewCollector(cFunc, params) } diff --git a/components/board/collectors_test.go b/components/board/collectors_test.go index 200ea6cd1f7..7e9f8692315 100644 --- a/components/board/collectors_test.go +++ b/components/board/collectors_test.go @@ -30,11 +30,12 @@ func TestCollectors(t *testing.T) { name string params data.CollectorParams collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData }{ { name: "Board analog collector should write an analog response", params: data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: componentName, Interval: captureInterval, Logger: logging.NewTestLogger(t), @@ -43,7 +44,7 @@ func TestCollectors(t *testing.T) { }, }, collector: board.NewAnalogCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "value": 1, @@ -51,11 +52,12 @@ func TestCollectors(t *testing.T) { "max_range": 10, "step_size": float64(float32(0.1)), })}, - }, + }}, }, { name: "Board gpio collector should write a gpio response", params: data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: componentName, Interval: captureInterval, Logger: logging.NewTestLogger(t), @@ -64,19 +66,19 @@ func TestCollectors(t *testing.T) { }, }, collector: board.NewGPIOCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "high": true, })}, - }, + }}, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) tc.params.Clock = clock.New() tc.params.Target = buf diff --git a/components/camera/collectors.go b/components/camera/collectors.go index 37663e7335f..de05b5048c8 100644 --- a/components/camera/collectors.go +++ b/components/camera/collectors.go @@ -3,10 +3,10 @@ package camera import ( "bytes" "context" + "time" "github.com/pkg/errors" "go.opencensus.io/trace" - pb "go.viam.com/api/component/camera/v1" "google.golang.org/protobuf/types/known/anypb" "google.golang.org/protobuf/types/known/wrapperspb" @@ -42,7 +42,9 @@ func newNextPointCloudCollector(resource interface{}, params data.CollectorParam return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult _, span := trace.StartSpan(ctx, "camera::data::collector::CaptureFunc::NextPointCloud") defer span.End() @@ -53,9 +55,9 @@ func newNextPointCloudCollector(resource interface{}, params data.CollectorParam // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, nextPointCloud.String(), err) + return res, data.FailedToReadErr(params.ComponentName, nextPointCloud.String(), err) } var buf bytes.Buffer @@ -64,10 +66,17 @@ func newNextPointCloudCollector(resource interface{}, params data.CollectorParam buf.Grow(headerSize + v.Size()*4*4) // 4 numbers per point, each 4 bytes err = pointcloud.ToPCD(v, &buf, pointcloud.PCDBinary) if err != nil { - return nil, errors.Errorf("failed to convert returned point cloud to PCD: %v", err) + return res, errors.Errorf("failed to convert returned point cloud to PCD: %v", err) } } - return buf.Bytes(), nil + ts := data.Timestamps{ + TimeRequested: timeRequested, + TimeReceived: time.Now(), + } + return data.NewBinaryCaptureResult(ts, []data.Binary{{ + Payload: buf.Bytes(), + MimeType: data.MimeTypeApplicationPcd, + }}), nil }) return data.NewCollector(cFunc, params) } @@ -88,7 +97,9 @@ func newReadImageCollector(resource interface{}, params data.CollectorParams) (d } } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult _, span := trace.StartSpan(ctx, "camera::data::collector::CaptureFunc::ReadImage") defer span.End() @@ -99,10 +110,10 @@ func newReadImageCollector(resource interface{}, params data.CollectorParams) (d // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, readImage.String(), err) + return res, data.FailedToReadErr(params.ComponentName, readImage.String(), err) } defer func() { if release != nil { @@ -112,14 +123,26 @@ func newReadImageCollector(resource interface{}, params data.CollectorParams) (d mimeStr := new(wrapperspb.StringValue) if err := mimeType.UnmarshalTo(mimeStr); err != nil { - return nil, err + return res, err } outBytes, err := rimage.EncodeImage(ctx, img, mimeStr.Value) if err != nil { - return nil, err + return res, err + } + + actualOutMIME, _ := utils.CheckLazyMIMEType(mimeStr.Value) + // NOTE (Nick S): This is the mime type that the user described in the config, not the actual + // mime type returned from the camera.GetImage API + mimeType := data.CameraFormatToMimeType(utils.MimeTypeToFormat[actualOutMIME]) + ts := data.Timestamps{ + TimeRequested: timeRequested, + TimeReceived: time.Now(), } - return outBytes, nil + return data.NewBinaryCaptureResult(ts, []data.Binary{{ + MimeType: mimeType, + Payload: outBytes, + }}), nil }) return data.NewCollector(cFunc, params) } @@ -129,37 +152,36 @@ func newGetImagesCollector(resource interface{}, params data.CollectorParams) (d if err != nil { return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + var res data.CaptureResult _, span := trace.StartSpan(ctx, "camera::data::collector::CaptureFunc::GetImages") defer span.End() - ctx = context.WithValue(ctx, data.FromDMContextKey{}, true) resImgs, resMetadata, err := camera.Images(ctx) if err != nil { if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, getImages.String(), err) + return res, data.FailedToReadErr(params.ComponentName, getImages.String(), err) } - var imgsConverted []*pb.Image + var binaries []data.Binary for _, img := range resImgs { format, imgBytes, err := encodeImageFromUnderlyingType(ctx, img.Image) if err != nil { - return nil, err + return res, err } - imgPb := &pb.Image{ - SourceName: img.SourceName, - Format: format, - Image: imgBytes, - } - imgsConverted = append(imgsConverted, imgPb) + binaries = append(binaries, data.Binary{ + Payload: imgBytes, + MimeType: data.CameraFormatToMimeType(format), + }) + } + ts := data.Timestamps{ + TimeRequested: resMetadata.CapturedAt, + TimeReceived: resMetadata.CapturedAt, } - return pb.GetImagesResponse{ - ResponseMetadata: resMetadata.AsProto(), - Images: imgsConverted, - }, nil + return data.NewBinaryCaptureResult(ts, binaries), nil }) return data.NewCollector(cFunc, params) } diff --git a/components/camera/collectors_test.go b/components/camera/collectors_test.go index 60af3460585..a917aa50295 100644 --- a/components/camera/collectors_test.go +++ b/components/camera/collectors_test.go @@ -12,12 +12,10 @@ import ( "github.com/benbjohnson/clock" datasyncpb "go.viam.com/api/app/datasync/v1" - camerapb "go.viam.com/api/component/camera/v1" "go.viam.com/test" "go.viam.com/utils/artifact" "google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/types/known/anypb" - "google.golang.org/protobuf/types/known/timestamppb" "google.golang.org/protobuf/types/known/wrapperspb" "go.viam.com/rdk/components/camera" @@ -78,10 +76,6 @@ func TestCollectors(t *testing.T) { test.That(t, err, test.ShouldBeNil) viamLogoJpeg, err := io.ReadAll(base64.NewDecoder(base64.StdEncoding, bytes.NewReader(viamLogoJpegB64))) test.That(t, err, test.ShouldBeNil) - viamLogoJpegAsInts := []any{} - for _, b := range viamLogoJpeg { - viamLogoJpegAsInts = append(viamLogoJpegAsInts, int(b)) - } img := rimage.NewLazyEncodedImage(viamLogoJpeg, utils.MimeTypeJPEG) // 32 x 32 image @@ -94,59 +88,50 @@ func TestCollectors(t *testing.T) { var pcdBuf bytes.Buffer test.That(t, pointcloud.ToPCD(pcd, &pcdBuf, pointcloud.PCDBinary), test.ShouldBeNil) - now := time.Now() - nowPB := timestamppb.New(now) - cam := newCamera(img, img, now, pcd) + cam := newCamera(img, img, pcd) tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData camera camera.Camera }{ { name: "ReadImage returns a non nil binary response", collector: camera.NewReadImageCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Binary{Binary: viamLogoJpeg}, - }, + }}, camera: cam, }, { name: "NextPointCloud returns a non nil binary response", collector: camera.NewNextPointCloudCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Binary{Binary: pcdBuf.Bytes()}, - }, + expected: []*datasyncpb.SensorData{{ + Metadata: &datasyncpb.SensorMetadata{ + MimeType: datasyncpb.MimeType_MIME_TYPE_APPLICATION_PCD, + }, + Data: &datasyncpb.SensorData_Binary{Binary: pcdBuf.Bytes()}, + }}, camera: cam, }, { name: "GetImages returns a non nil tabular response", collector: camera.NewGetImagesCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "response_metadata": map[string]any{ - "captured_at": map[string]any{ - "seconds": nowPB.Seconds, - "nanos": nowPB.Nanos, - }, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{ + MimeType: datasyncpb.MimeType_MIME_TYPE_IMAGE_JPEG, }, - "images": []any{ - map[string]any{ - "source_name": "left", - "format": int(camerapb.Format_FORMAT_JPEG), - "image": viamLogoJpegAsInts, - }, - map[string]any{ - "source_name": "right", - "format": int(camerapb.Format_FORMAT_JPEG), - "image": viamLogoJpegAsInts, - }, + Data: &datasyncpb.SensorData_Binary{Binary: viamLogoJpeg}, + }, + { + Metadata: &datasyncpb.SensorMetadata{ + MimeType: datasyncpb.MimeType_MIME_TYPE_IMAGE_JPEG, }, - })}, + Data: &datasyncpb.SensorData_Binary{Binary: viamLogoJpeg}, + }, }, camera: cam, }, @@ -155,8 +140,9 @@ func TestCollectors(t *testing.T) { for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeBinary, ComponentName: serviceName, Interval: captureInterval, Logger: logging.NewTestLogger(t), @@ -181,7 +167,6 @@ func TestCollectors(t *testing.T) { func newCamera( left, right image.Image, - capturedAt time.Time, pcd pointcloud.PointCloud, ) camera.Camera { v := &inject.Camera{} @@ -200,7 +185,7 @@ func newCamera( {Image: left, SourceName: "left"}, {Image: right, SourceName: "right"}, }, - resource.ResponseMetadata{CapturedAt: capturedAt}, + resource.ResponseMetadata{CapturedAt: time.Now()}, nil } diff --git a/components/encoder/collectors.go b/components/encoder/collectors.go index 9ad643bc721..dc21d5653f0 100644 --- a/components/encoder/collectors.go +++ b/components/encoder/collectors.go @@ -3,6 +3,7 @@ package encoder import ( "context" "errors" + "time" pb "go.viam.com/api/component/encoder/v1" "google.golang.org/protobuf/types/known/anypb" @@ -31,20 +32,22 @@ func newTicksCountCollector(resource interface{}, params data.CollectorParams) ( return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult v, positionType, err := encoder.Position(ctx, PositionTypeUnspecified, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, ticksCount.String(), err) + return res, data.FailedToReadErr(params.ComponentName, ticksCount.String(), err) } - return pb.GetPositionResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetPositionResponse{ Value: float32(v), PositionType: pb.PositionType(positionType), - }, nil + }) }) return data.NewCollector(cFunc, params) } diff --git a/components/encoder/collectors_test.go b/components/encoder/collectors_test.go index d24e719a860..346ba178f9b 100644 --- a/components/encoder/collectors_test.go +++ b/components/encoder/collectors_test.go @@ -23,8 +23,9 @@ const ( func TestCollectors(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: "encoder", Interval: captureInterval, Logger: logging.NewTestLogger(t), @@ -41,13 +42,13 @@ func TestCollectors(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), time.Second) defer cancel() - tu.CheckMockBufferWrites(t, ctx, start, buf.Writes, &datasyncpb.SensorData{ + tu.CheckMockBufferWrites(t, ctx, start, buf.Writes, []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "value": 1.0, "position_type": int(pb.PositionType_POSITION_TYPE_TICKS_COUNT), })}, - }) + }}) buf.Close() } diff --git a/components/gantry/collectors.go b/components/gantry/collectors.go index 149f52f5a54..ff77f6945ab 100644 --- a/components/gantry/collectors.go +++ b/components/gantry/collectors.go @@ -3,6 +3,7 @@ package gantry import ( "context" "errors" + "time" pb "go.viam.com/api/component/gantry/v1" "google.golang.org/protobuf/types/known/anypb" @@ -35,19 +36,21 @@ func newPositionCollector(resource interface{}, params data.CollectorParams) (da return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult v, err := gantry.Position(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, position.String(), err) + return res, data.FailedToReadErr(params.ComponentName, position.String(), err) } - return pb.GetPositionResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetPositionResponse{ PositionsMm: v, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -60,19 +63,21 @@ func newLengthsCollector(resource interface{}, params data.CollectorParams) (dat return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult v, err := gantry.Lengths(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, lengths.String(), err) + return res, data.FailedToReadErr(params.ComponentName, lengths.String(), err) } - return pb.GetLengthsResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetLengthsResponse{ LengthsMm: v, - }, nil + }) }) return data.NewCollector(cFunc, params) } diff --git a/components/gantry/collectors_test.go b/components/gantry/collectors_test.go index 1080fe7abc1..18fd5f3ec9e 100644 --- a/components/gantry/collectors_test.go +++ b/components/gantry/collectors_test.go @@ -28,35 +28,36 @@ func TestCollectors(t *testing.T) { tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData }{ { name: "Length collector should write a lengths response", collector: gantry.NewLengthsCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "lengths_mm": []any{1000, 2000, 3000}, })}, - }, + }}, }, { name: "Position collector should write a list of positions", collector: gantry.NewPositionCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "positions_mm": []any{1000, 2000, 3000}, })}, - }, + }}, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: componentName, Interval: captureInterval, Logger: logging.NewTestLogger(t), diff --git a/components/motor/collectors.go b/components/motor/collectors.go index 1d328d7b36c..89401f87d56 100644 --- a/components/motor/collectors.go +++ b/components/motor/collectors.go @@ -3,6 +3,7 @@ package motor import ( "context" "errors" + "time" pb "go.viam.com/api/component/motor/v1" "google.golang.org/protobuf/types/known/anypb" @@ -35,19 +36,21 @@ func newPositionCollector(resource interface{}, params data.CollectorParams) (da return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult v, err := motor.Position(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, position.String(), err) + return res, data.FailedToReadErr(params.ComponentName, position.String(), err) } - return pb.GetPositionResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetPositionResponse{ Position: v, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -60,20 +63,22 @@ func newIsPoweredCollector(resource interface{}, params data.CollectorParams) (d return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult v, powerPct, err := motor.IsPowered(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, isPowered.String(), err) + return res, data.FailedToReadErr(params.ComponentName, isPowered.String(), err) } - return pb.IsPoweredResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.IsPoweredResponse{ IsOn: v, PowerPct: powerPct, - }, nil + }) }) return data.NewCollector(cFunc, params) } diff --git a/components/motor/collectors_test.go b/components/motor/collectors_test.go index 8b5444d8909..15bd2de53af 100644 --- a/components/motor/collectors_test.go +++ b/components/motor/collectors_test.go @@ -25,27 +25,29 @@ func TestCollectors(t *testing.T) { tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData }{ { name: "Motor position collector should write a position response", collector: motor.NewPositionCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "position": 1.0, })}, - }, + }}, }, { name: "Motor isPowered collector should write an isPowered response", collector: motor.NewIsPoweredCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "is_on": false, - "power_pct": 0.5, - })}, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{}, + Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ + "is_on": false, + "power_pct": 0.5, + })}, + }, }, }, } @@ -53,8 +55,9 @@ func TestCollectors(t *testing.T) { for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: componentName, Interval: captureInterval, Logger: logging.NewTestLogger(t), diff --git a/components/movementsensor/collectors.go b/components/movementsensor/collectors.go index 2d985d26a3f..5d4e814d169 100644 --- a/components/movementsensor/collectors.go +++ b/components/movementsensor/collectors.go @@ -3,13 +3,13 @@ package movementsensor import ( "context" "errors" + "time" v1 "go.viam.com/api/common/v1" pb "go.viam.com/api/component/movementsensor/v1" "google.golang.org/protobuf/types/known/anypb" "go.viam.com/rdk/data" - "go.viam.com/rdk/protoutils" "go.viam.com/rdk/spatialmath" ) @@ -53,182 +53,185 @@ func assertMovementSensor(resource interface{}) (MovementSensor, error) { return ms, nil } -// newLinearVelocityCollector returns a collector to register a linear velocity method. If one is already registered -// with the same MethodMetadata it will panic. +//nolint:dupl func newLinearVelocityCollector(resource interface{}, params data.CollectorParams) (data.Collector, error) { ms, err := assertMovementSensor(resource) if err != nil { return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult vec, err := ms.LinearVelocity(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, position.String(), err) + return res, data.FailedToReadErr(params.ComponentName, position.String(), err) } - return pb.GetLinearVelocityResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetLinearVelocityResponse{ LinearVelocity: &v1.Vector3{ X: vec.X, Y: vec.Y, Z: vec.Z, }, - }, nil + }) }) return data.NewCollector(cFunc, params) } -// newPositionCollector returns a collector to register a position method. If one is already registered -// with the same MethodMetadata it will panic. func newPositionCollector(resource interface{}, params data.CollectorParams) (data.Collector, error) { ms, err := assertMovementSensor(resource) if err != nil { return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult pos, altitude, err := ms.Position(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, linearVelocity.String(), err) + return res, data.FailedToReadErr(params.ComponentName, linearVelocity.String(), err) } var lat, lng float64 if pos != nil { lat = pos.Lat() lng = pos.Lng() } - return pb.GetPositionResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetPositionResponse{ Coordinate: &v1.GeoPoint{ Latitude: lat, Longitude: lng, }, AltitudeM: float32(altitude), - }, nil + }) }) return data.NewCollector(cFunc, params) } -// newAngularVelocityCollector returns a collector to register an angular velocity method. If one is already registered -// with the same MethodMetadata it will panic. +//nolint:dupl func newAngularVelocityCollector(resource interface{}, params data.CollectorParams) (data.Collector, error) { ms, err := assertMovementSensor(resource) if err != nil { return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult vel, err := ms.AngularVelocity(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, angularVelocity.String(), err) + return res, data.FailedToReadErr(params.ComponentName, angularVelocity.String(), err) } - return pb.GetAngularVelocityResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetAngularVelocityResponse{ AngularVelocity: &v1.Vector3{ X: vel.X, Y: vel.Y, Z: vel.Z, }, - }, nil + }) }) return data.NewCollector(cFunc, params) } -// newCompassHeadingCollector returns a collector to register a compass heading method. If one is already registered -// with the same MethodMetadata it will panic. func newCompassHeadingCollector(resource interface{}, params data.CollectorParams) (data.Collector, error) { ms, err := assertMovementSensor(resource) if err != nil { return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult heading, err := ms.CompassHeading(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, compassHeading.String(), err) + return res, data.FailedToReadErr(params.ComponentName, compassHeading.String(), err) } - return pb.GetCompassHeadingResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetCompassHeadingResponse{ Value: heading, - }, nil + }) }) return data.NewCollector(cFunc, params) } -// newLinearAccelerationCollector returns a collector to register a linear acceleration method. If one is already registered -// with the same MethodMetadata it will panic. +//nolint:dupl func newLinearAccelerationCollector(resource interface{}, params data.CollectorParams) (data.Collector, error) { ms, err := assertMovementSensor(resource) if err != nil { return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult accel, err := ms.LinearAcceleration(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, linearAcceleration.String(), err) + return res, data.FailedToReadErr(params.ComponentName, linearAcceleration.String(), err) } - return pb.GetLinearAccelerationResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetLinearAccelerationResponse{ LinearAcceleration: &v1.Vector3{ X: accel.X, Y: accel.Y, Z: accel.Z, }, - }, nil + }) }) return data.NewCollector(cFunc, params) } -// newOrientationCollector returns a collector to register an orientation method. If one is already registered -// with the same MethodMetadata it will panic. func newOrientationCollector(resource interface{}, params data.CollectorParams) (data.Collector, error) { ms, err := assertMovementSensor(resource) if err != nil { return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult orient, err := ms.Orientation(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, orientation.String(), err) + return res, data.FailedToReadErr(params.ComponentName, orientation.String(), err) } var orientVector *spatialmath.OrientationVectorDegrees if orient != nil { orientVector = orient.OrientationVectorDegrees() } - return pb.GetOrientationResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetOrientationResponse{ Orientation: &v1.Orientation{ OX: orientVector.OX, OY: orientVector.OY, OZ: orientVector.OZ, Theta: orientVector.Theta, }, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -241,23 +244,20 @@ func newReadingsCollector(resource interface{}, params data.CollectorParams) (da return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult values, err := ms.Readings(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, readings.String(), err) - } - readings, err := protoutils.ReadingGoToProto(values) - if err != nil { - return nil, err + return res, data.FailedToReadErr(params.ComponentName, readings.String(), err) } - return v1.GetReadingsResponse{ - Readings: readings, - }, nil + + return data.NewTabularCaptureResultReadings(timeRequested, values) }) return data.NewCollector(cFunc, params) } diff --git a/components/movementsensor/collectors_test.go b/components/movementsensor/collectors_test.go index f87c21ba99d..9a504367396 100644 --- a/components/movementsensor/collectors_test.go +++ b/components/movementsensor/collectors_test.go @@ -36,12 +36,12 @@ func TestCollectors(t *testing.T) { tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData }{ { name: "Movement sensor linear velocity collector should write a velocity response", collector: movementsensor.NewLinearVelocityCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "linear_velocity": map[string]any{ @@ -50,26 +50,28 @@ func TestCollectors(t *testing.T) { "z": 3.0, }, })}, - }, + }}, }, { name: "Movement sensor position collector should write a position response", collector: movementsensor.NewPositionCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "coordinate": map[string]any{ - "latitude": 1.0, - "longitude": 2.0, - }, - "altitude_m": 3.0, - })}, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{}, + Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ + "coordinate": map[string]any{ + "latitude": 1.0, + "longitude": 2.0, + }, + "altitude_m": 3.0, + })}, + }, }, }, { name: "Movement sensor angular velocity collector should write a velocity response", collector: movementsensor.NewAngularVelocityCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "angular_velocity": map[string]any{ @@ -78,58 +80,66 @@ func TestCollectors(t *testing.T) { "z": 3.0, }, })}, - }, + }}, }, { name: "Movement sensor compass heading collector should write a heading response", collector: movementsensor.NewCompassHeadingCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "value": 1.0, - })}, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{}, + Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ + "value": 1.0, + })}, + }, }, }, { name: "Movement sensor linear acceleration collector should write an acceleration response", collector: movementsensor.NewLinearAccelerationCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "linear_acceleration": map[string]any{ - "x": 1.0, - "y": 2.0, - "z": 3.0, - }, - })}, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{}, + Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ + "linear_acceleration": map[string]any{ + "x": 1.0, + "y": 2.0, + "z": 3.0, + }, + })}, + }, }, }, { name: "Movement sensor orientation collector should write an orientation response", collector: movementsensor.NewOrientationCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "orientation": map[string]any{ - "o_x": 0, - "o_y": 0, - "o_z": 1, - "theta": 0, - }, - })}, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{}, + Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ + "orientation": map[string]any{ + "o_x": 0, + "o_y": 0, + "o_z": 1, + "theta": 0, + }, + })}, + }, }, }, { name: "Movement sensor readings collector should write a readings response", collector: movementsensor.NewReadingsCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "readings": map[string]any{ - "reading1": false, - "reading2": "test", - }, - })}, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{}, + Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ + "readings": map[string]any{ + "reading1": false, + "reading2": "test", + }, + })}, + }, }, }, } @@ -137,8 +147,9 @@ func TestCollectors(t *testing.T) { for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: componentName, Interval: captureInterval, Logger: logging.NewTestLogger(t), diff --git a/components/powersensor/collectors.go b/components/powersensor/collectors.go index 89dc0badefa..aefad9e8a3e 100644 --- a/components/powersensor/collectors.go +++ b/components/powersensor/collectors.go @@ -3,13 +3,12 @@ package powersensor import ( "context" "errors" + "time" - v1 "go.viam.com/api/common/v1" pb "go.viam.com/api/component/powersensor/v1" "google.golang.org/protobuf/types/known/anypb" "go.viam.com/rdk/data" - "go.viam.com/rdk/protoutils" ) type method int64 @@ -51,20 +50,23 @@ func newVoltageCollector(resource interface{}, params data.CollectorParams) (dat return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult volts, isAc, err := ps.Voltage(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, voltage.String(), err) + return res, data.FailedToReadErr(params.ComponentName, voltage.String(), err) } - return pb.GetVoltageResponse{ + + return data.NewTabularCaptureResult(timeRequested, pb.GetVoltageResponse{ Volts: volts, IsAc: isAc, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -77,20 +79,22 @@ func newCurrentCollector(resource interface{}, params data.CollectorParams) (dat return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult curr, isAc, err := ps.Current(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, current.String(), err) + return res, data.FailedToReadErr(params.ComponentName, current.String(), err) } - return pb.GetCurrentResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetCurrentResponse{ Amperes: curr, IsAc: isAc, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -103,19 +107,21 @@ func newPowerCollector(resource interface{}, params data.CollectorParams) (data. return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, extra map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult pwr, err := ps.Power(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, power.String(), err) + return res, data.FailedToReadErr(params.ComponentName, power.String(), err) } - return pb.GetPowerResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetPowerResponse{ Watts: pwr, - }, nil + }) }) return data.NewCollector(cFunc, params) } @@ -128,23 +134,19 @@ func newReadingsCollector(resource interface{}, params data.CollectorParams) (da return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult values, err := ps.Readings(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, readings.String(), err) - } - readings, err := protoutils.ReadingGoToProto(values) - if err != nil { - return nil, err + return res, data.FailedToReadErr(params.ComponentName, readings.String(), err) } - return v1.GetReadingsResponse{ - Readings: readings, - }, nil + return data.NewTabularCaptureResultReadings(timeRequested, values) }) return data.NewCollector(cFunc, params) } diff --git a/components/powersensor/collectors_test.go b/components/powersensor/collectors_test.go index e27aa270527..c5f31f0fefe 100644 --- a/components/powersensor/collectors_test.go +++ b/components/powersensor/collectors_test.go @@ -27,44 +27,46 @@ func TestCollectors(t *testing.T) { tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData }{ { name: "Power sensor voltage collector should write a voltage response", collector: powersensor.NewVoltageCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "volts": 1.0, "is_ac": false, })}, - }, + }}, }, { name: "Power sensor current collector should write a current response", collector: powersensor.NewCurrentCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "amperes": 1.0, - "is_ac": false, - })}, + expected: []*datasyncpb.SensorData{ + { + Metadata: &datasyncpb.SensorMetadata{}, + Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ + "amperes": 1.0, + "is_ac": false, + })}, + }, }, }, { name: "Power sensor power collector should write a power response", collector: powersensor.NewPowerCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "watts": 1.0, })}, - }, + }}, }, { name: "Power sensor readings collector should write a readings response", collector: powersensor.NewReadingsCollector, - expected: &datasyncpb.SensorData{ + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "readings": map[string]any{ @@ -72,15 +74,16 @@ func TestCollectors(t *testing.T) { "reading2": "test", }, })}, - }, + }}, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: componentName, Interval: captureInterval, Logger: logging.NewTestLogger(t), diff --git a/components/sensor/collectors.go b/components/sensor/collectors.go index 07f34e6880a..29a9973db5a 100644 --- a/components/sensor/collectors.go +++ b/components/sensor/collectors.go @@ -3,12 +3,11 @@ package sensor import ( "context" "errors" + "time" - pb "go.viam.com/api/common/v1" "google.golang.org/protobuf/types/known/anypb" "go.viam.com/rdk/data" - "go.viam.com/rdk/protoutils" ) type method int64 @@ -32,23 +31,20 @@ func newReadingsCollector(resource interface{}, params data.CollectorParams) (da return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, arg map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult values, err := sensorResource.Readings(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, readings.String(), err) + return res, data.FailedToReadErr(params.ComponentName, readings.String(), err) } - readings, err := protoutils.ReadingGoToProto(values) - if err != nil { - return nil, err - } - return pb.GetReadingsResponse{ - Readings: readings, - }, nil + + return data.NewTabularCaptureResultReadings(timeRequested, values) }) return data.NewCollector(cFunc, params) } diff --git a/components/sensor/collectors_test.go b/components/sensor/collectors_test.go index 33b6c62217b..5f07fbdf882 100644 --- a/components/sensor/collectors_test.go +++ b/components/sensor/collectors_test.go @@ -24,8 +24,9 @@ var readingMap = map[string]any{"reading1": false, "reading2": "test"} func TestCollectors(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: "sensor", Interval: captureInterval, Logger: logging.NewTestLogger(t), @@ -42,7 +43,7 @@ func TestCollectors(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), time.Second) defer cancel() - tu.CheckMockBufferWrites(t, ctx, start, buf.Writes, &datasyncpb.SensorData{ + tu.CheckMockBufferWrites(t, ctx, start, buf.Writes, []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "readings": map[string]any{ @@ -50,7 +51,7 @@ func TestCollectors(t *testing.T) { "reading2": "test", }, })}, - }) + }}) buf.Close() } diff --git a/components/servo/collectors.go b/components/servo/collectors.go index b073b122fde..9c8df3d54d2 100644 --- a/components/servo/collectors.go +++ b/components/servo/collectors.go @@ -3,6 +3,7 @@ package servo import ( "context" "errors" + "time" pb "go.viam.com/api/component/servo/v1" "google.golang.org/protobuf/types/known/anypb" @@ -31,19 +32,21 @@ func newPositionCollector(resource interface{}, params data.CollectorParams) (da return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult pos, err := servo.Position(ctx, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a component. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, position.String(), err) + return res, data.FailedToReadErr(params.ComponentName, position.String(), err) } - return pb.GetPositionResponse{ + return data.NewTabularCaptureResult(timeRequested, pb.GetPositionResponse{ PositionDeg: pos, - }, nil + }) }) return data.NewCollector(cFunc, params) } diff --git a/components/servo/collectors_test.go b/components/servo/collectors_test.go index 0a1219d75ee..0f883fd6ff0 100644 --- a/components/servo/collectors_test.go +++ b/components/servo/collectors_test.go @@ -22,8 +22,9 @@ const ( func TestCollectors(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeTabular, ComponentName: "servo", Interval: captureInterval, Logger: logging.NewTestLogger(t), @@ -40,12 +41,12 @@ func TestCollectors(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), time.Second) defer cancel() - tu.CheckMockBufferWrites(t, ctx, start, buf.Writes, &datasyncpb.SensorData{ + tu.CheckMockBufferWrites(t, ctx, start, buf.Writes, []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "position_deg": 1.0, })}, - }) + }}) buf.Close() } diff --git a/data/capture_buffer.go b/data/capture_buffer.go index 93edc43025e..5853bc91ad4 100644 --- a/data/capture_buffer.go +++ b/data/capture_buffer.go @@ -3,14 +3,14 @@ package data import ( "sync" + "github.com/pkg/errors" v1 "go.viam.com/api/app/datasync/v1" ) -const captureAllFromCamera = "CaptureAllFromCamera" - // CaptureBufferedWriter is a buffered, persistent queue of SensorData. type CaptureBufferedWriter interface { - Write(item *v1.SensorData) error + WriteBinary(items []*v1.SensorData) error + WriteTabular(items []*v1.SensorData) error Flush() error Path() string } @@ -33,27 +33,57 @@ func NewCaptureBuffer(dir string, md *v1.DataCaptureMetadata, maxCaptureFileSize } } -// Write writes item onto b. Binary sensor data is written to its own file. -// Tabular data is written to disk in maxCaptureFileSize sized files. Files that -// are still being written to are indicated with the extension -// InProgressFileExt. Files that have finished being written to are indicated by -// FileExt. -func (b *CaptureBuffer) Write(item *v1.SensorData) error { +var ( + // ErrInvalidBinarySensorData is returned from WriteBinary if the sensor data is the wrong type. + ErrInvalidBinarySensorData = errors.New("CaptureBuffer.WriteBinary called with non binary sensor data") + // ErrInvalidTabularSensorData is returned from WriteTabular if the sensor data is the wrong type. + ErrInvalidTabularSensorData = errors.New("CaptureBuffer.WriteTabular called with binary sensor data") +) + +// WriteBinary writes the items to their own file. +// Files that are still being written to are indicated with the extension +// '.prog'. +// Files that have finished being written to are indicated by +// '.capture'. +func (b *CaptureBuffer) WriteBinary(items []*v1.SensorData) error { b.lock.Lock() defer b.lock.Unlock() - if item.GetBinary() != nil { - binFile, err := NewCaptureFile(b.Directory, b.MetaData) - if err != nil { - return err + for _, item := range items { + if !IsBinary(item) { + return ErrInvalidBinarySensorData } + } + + binFile, err := NewCaptureFile(b.Directory, b.MetaData) + if err != nil { + return err + } + for _, item := range items { if err := binFile.WriteNext(item); err != nil { return err } - if err := binFile.Close(); err != nil { - return err + } + if err := binFile.Close(); err != nil { + return err + } + return nil +} + +// WriteTabular writes +// Tabular data to disk in maxCaptureFileSize sized files. +// Files that are still being written to are indicated with the extension +// '.prog'. +// Files that have finished being written to are indicated by +// '.capture'. +func (b *CaptureBuffer) WriteTabular(items []*v1.SensorData) error { + b.lock.Lock() + defer b.lock.Unlock() + + for _, item := range items { + if IsBinary(item) { + return ErrInvalidTabularSensorData } - return nil } if b.nextFile == nil { @@ -62,10 +92,7 @@ func (b *CaptureBuffer) Write(item *v1.SensorData) error { return err } b.nextFile = nextFile - // We want to special case on "CaptureAllFromCamera" because it is sensor data that contains images - // and their corresponding annotations. We want each image and its annotations to be stored in a - // separate file. - } else if b.nextFile.Size() > b.maxCaptureFileSize || b.MetaData.MethodName == captureAllFromCamera { + } else if b.nextFile.Size() > b.maxCaptureFileSize { if err := b.nextFile.Close(); err != nil { return err } @@ -76,7 +103,26 @@ func (b *CaptureBuffer) Write(item *v1.SensorData) error { b.nextFile = nextFile } - return b.nextFile.WriteNext(item) + for _, item := range items { + if err := b.nextFile.WriteNext(item); err != nil { + return err + } + } + + return nil +} + +// IsBinary returns true when the *v1.SensorData is of type binary. +func IsBinary(item *v1.SensorData) bool { + if item == nil { + return false + } + switch item.Data.(type) { + case *v1.SensorData_Binary: + return true + default: + return false + } } // Flush flushes all buffered data to disk and marks any in progress file as complete. diff --git a/data/capture_buffer_test.go b/data/capture_buffer_test.go index a89f85b0c60..5bfa5050311 100644 --- a/data/capture_buffer_test.go +++ b/data/capture_buffer_test.go @@ -1,7 +1,6 @@ package data import ( - "errors" "io" "os" "path/filepath" @@ -85,16 +84,19 @@ func TestCaptureQueue(t *testing.T) { tmpDir := t.TempDir() md := &v1.DataCaptureMetadata{Type: tc.dataType} sut := NewCaptureBuffer(tmpDir, md, int64(maxFileSize)) - var pushValue *v1.SensorData - if tc.dataType == v1.DataType_DATA_TYPE_BINARY_SENSOR { - pushValue = binarySensorData - } else { - pushValue = structSensorData - } for i := 0; i < tc.pushCount; i++ { - err := sut.Write(pushValue) - test.That(t, err, test.ShouldBeNil) + switch { + case tc.dataType == CaptureTypeBinary.ToProto(): + err := sut.WriteBinary([]*v1.SensorData{binarySensorData}) + test.That(t, err, test.ShouldBeNil) + case tc.dataType == CaptureTypeTabular.ToProto(): + err := sut.WriteTabular([]*v1.SensorData{structSensorData}) + test.That(t, err, test.ShouldBeNil) + default: + t.Error("unknown data type") + t.FailNow() + } } dcFiles, inProgressFiles := getCaptureFiles(tmpDir) @@ -221,7 +223,7 @@ func TestCaptureBufferReader(t *testing.T) { methodParams, err := rprotoutils.ConvertStringMapToAnyPBMap(tc.additionalParams) test.That(t, err, test.ShouldBeNil) - readImageCaptureMetadata := BuildCaptureMetadata( + readImageCaptureMetadata, _ := BuildCaptureMetadata( tc.resourceName.API, tc.resourceName.ShortName(), tc.methodName, @@ -248,7 +250,7 @@ func TestCaptureBufferReader(t *testing.T) { now := time.Now() timeRequested := timestamppb.New(now.UTC()) timeReceived := timestamppb.New(now.Add(time.Millisecond).UTC()) - msg := &v1.SensorData{ + msg := []*v1.SensorData{{ Metadata: &v1.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, @@ -256,8 +258,8 @@ func TestCaptureBufferReader(t *testing.T) { Data: &v1.SensorData_Struct{ Struct: tc.readings[0], }, - } - test.That(t, b.Write(msg), test.ShouldBeNil) + }} + test.That(t, b.WriteTabular(msg), test.ShouldBeNil) test.That(t, b.Flush(), test.ShouldBeNil) dirEntries, err := os.ReadDir(b.Path()) test.That(t, err, test.ShouldBeNil) @@ -273,7 +275,7 @@ func TestCaptureBufferReader(t *testing.T) { sd, err := cf.ReadNext() test.That(t, err, test.ShouldBeNil) - test.That(t, sd, test.ShouldResemble, msg) + test.That(t, sd, test.ShouldResemble, msg[0]) _, err = cf.ReadNext() test.That(t, err, test.ShouldBeError, io.EOF) @@ -281,7 +283,7 @@ func TestCaptureBufferReader(t *testing.T) { now = time.Now() timeRequested = timestamppb.New(now.UTC()) timeReceived = timestamppb.New(now.Add(time.Millisecond).UTC()) - msg2 := &v1.SensorData{ + msg2 := []*v1.SensorData{{ Metadata: &v1.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, @@ -289,13 +291,13 @@ func TestCaptureBufferReader(t *testing.T) { Data: &v1.SensorData_Struct{ Struct: tc.readings[1], }, - } - test.That(t, b.Write(msg2), test.ShouldBeNil) + }} + test.That(t, b.WriteTabular(msg2), test.ShouldBeNil) now = time.Now() timeRequested = timestamppb.New(now.UTC()) timeReceived = timestamppb.New(now.Add(time.Millisecond).UTC()) - msg3 := &v1.SensorData{ + msg3 := []*v1.SensorData{{ Metadata: &v1.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, @@ -303,8 +305,8 @@ func TestCaptureBufferReader(t *testing.T) { Data: &v1.SensorData_Struct{ Struct: tc.readings[2], }, - } - test.That(t, b.Write(msg3), test.ShouldBeNil) + }} + test.That(t, b.WriteTabular(msg3), test.ShouldBeNil) dirEntries2, err := os.ReadDir(b.Path()) test.That(t, err, test.ShouldBeNil) @@ -341,11 +343,11 @@ func TestCaptureBufferReader(t *testing.T) { sd2, err := cf2.ReadNext() test.That(t, err, test.ShouldBeNil) - test.That(t, sd2, test.ShouldResemble, msg2) + test.That(t, sd2, test.ShouldResemble, msg2[0]) sd3, err := cf2.ReadNext() test.That(t, err, test.ShouldBeNil) - test.That(t, sd3, test.ShouldResemble, msg3) + test.That(t, sd3, test.ShouldResemble, msg3[0]) _, err = cf2.ReadNext() test.That(t, err, test.ShouldBeError, io.EOF) @@ -426,7 +428,7 @@ func TestCaptureBufferReader(t *testing.T) { methodParams, err := rprotoutils.ConvertStringMapToAnyPBMap(tc.additionalParams) test.That(t, err, test.ShouldBeNil) - readImageCaptureMetadata := BuildCaptureMetadata( + readImageCaptureMetadata, _ := BuildCaptureMetadata( tc.resourceName.API, tc.resourceName.ShortName(), tc.methodName, @@ -456,32 +458,17 @@ func TestCaptureBufferReader(t *testing.T) { test.That(t, err, test.ShouldBeNil) test.That(t, firstDirEntries, test.ShouldBeEmpty) - // writing empty sensor data returns an error - test.That(t, b.Write(nil), test.ShouldBeError, errors.New("proto: Marshal called with nil")) - // flushing after this error occures, behaves the same as if no write had occurred // current behavior is likely a bug test.That(t, b.Flush(), test.ShouldBeNil) firstDirEntries, err = os.ReadDir(b.Path()) test.That(t, err, test.ShouldBeNil) - test.That(t, len(firstDirEntries), test.ShouldEqual, 1) - test.That(t, filepath.Ext(firstDirEntries[0].Name()), test.ShouldResemble, CompletedCaptureFileExt) - f, err := os.Open(filepath.Join(b.Path(), firstDirEntries[0].Name())) - test.That(t, err, test.ShouldBeNil) - defer func() { utils.UncheckedError(f.Close()) }() - - cf, err := ReadCaptureFile(f) - test.That(t, err, test.ShouldBeNil) - test.That(t, cf.ReadMetadata(), test.ShouldResemble, readImageCaptureMetadata) - - sd, err := cf.ReadNext() - test.That(t, err, test.ShouldBeError, io.EOF) - test.That(t, sd, test.ShouldBeNil) + test.That(t, len(firstDirEntries), test.ShouldEqual, 0) now := time.Now() timeRequested := timestamppb.New(now.UTC()) timeReceived := timestamppb.New(now.Add(time.Millisecond).UTC()) - msg := &v1.SensorData{ + msg := []*v1.SensorData{{ Metadata: &v1.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, @@ -489,19 +476,13 @@ func TestCaptureBufferReader(t *testing.T) { Data: &v1.SensorData_Binary{ Binary: []byte("this is fake binary data"), }, - } - test.That(t, b.Write(msg), test.ShouldBeNil) + }} + test.That(t, b.WriteBinary(msg), test.ShouldBeNil) test.That(t, b.Flush(), test.ShouldBeNil) secondDirEntries, err := os.ReadDir(b.Path()) test.That(t, err, test.ShouldBeNil) - test.That(t, len(secondDirEntries), test.ShouldEqual, 2) - var newFileName string - for _, de := range secondDirEntries { - if de.Name() != firstDirEntries[0].Name() { - newFileName = de.Name() - break - } - } + test.That(t, len(secondDirEntries), test.ShouldEqual, 1) + newFileName := secondDirEntries[0].Name() test.That(t, newFileName, test.ShouldNotBeEmpty) test.That(t, filepath.Ext(newFileName), test.ShouldResemble, CompletedCaptureFileExt) f2, err := os.Open(filepath.Join(b.Path(), newFileName)) @@ -514,14 +495,14 @@ func TestCaptureBufferReader(t *testing.T) { sd2, err := cf2.ReadNext() test.That(t, err, test.ShouldBeNil) - test.That(t, sd2, test.ShouldResemble, msg) + test.That(t, sd2, test.ShouldResemble, msg[0]) _, err = cf2.ReadNext() test.That(t, err, test.ShouldBeError, io.EOF) timeRequested = timestamppb.New(now.UTC()) timeReceived = timestamppb.New(now.Add(time.Millisecond).UTC()) - msg3 := &v1.SensorData{ + msg3 := []*v1.SensorData{{ Metadata: &v1.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, @@ -529,13 +510,13 @@ func TestCaptureBufferReader(t *testing.T) { Data: &v1.SensorData_Binary{ Binary: []byte("msg2"), }, - } + }} - test.That(t, b.Write(msg3), test.ShouldBeNil) + test.That(t, b.WriteBinary(msg3), test.ShouldBeNil) timeRequested = timestamppb.New(now.UTC()) timeReceived = timestamppb.New(now.Add(time.Millisecond).UTC()) - msg4 := &v1.SensorData{ + msg4 := []*v1.SensorData{{ Metadata: &v1.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, @@ -543,17 +524,17 @@ func TestCaptureBufferReader(t *testing.T) { Data: &v1.SensorData_Binary{ Binary: []byte("msg3"), }, - } + }} // Every binary data written becomes a new data capture file - test.That(t, b.Write(msg4), test.ShouldBeNil) + test.That(t, b.WriteBinary(msg4), test.ShouldBeNil) test.That(t, b.Flush(), test.ShouldBeNil) thirdDirEntries, err := os.ReadDir(b.Path()) test.That(t, err, test.ShouldBeNil) - test.That(t, len(thirdDirEntries), test.ShouldEqual, 4) + test.That(t, len(thirdDirEntries), test.ShouldEqual, 3) var newFileNames []string for _, de := range thirdDirEntries { - if de.Name() != firstDirEntries[0].Name() && de.Name() != newFileName { + if de.Name() != newFileName { newFileNames = append(newFileNames, de.Name()) } } @@ -568,7 +549,7 @@ func TestCaptureBufferReader(t *testing.T) { test.That(t, cf3.ReadMetadata(), test.ShouldResemble, readImageCaptureMetadata) sd3, err := cf3.ReadNext() test.That(t, err, test.ShouldBeNil) - test.That(t, sd3, test.ShouldResemble, msg3) + test.That(t, sd3, test.ShouldResemble, msg3[0]) _, err = cf3.ReadNext() test.That(t, err, test.ShouldBeError, io.EOF) @@ -581,7 +562,7 @@ func TestCaptureBufferReader(t *testing.T) { test.That(t, cf4.ReadMetadata(), test.ShouldResemble, readImageCaptureMetadata) sd4, err := cf4.ReadNext() test.That(t, err, test.ShouldBeNil) - test.That(t, sd4, test.ShouldResemble, msg4) + test.That(t, sd4, test.ShouldResemble, msg4[0]) _, err = cf4.ReadNext() test.That(t, err, test.ShouldBeError, io.EOF) }) @@ -597,7 +578,7 @@ func TestCaptureBufferReader(t *testing.T) { methodParams, err := rprotoutils.ConvertStringMapToAnyPBMap(additionalParams) test.That(t, err, test.ShouldBeNil) - readImageCaptureMetadata := BuildCaptureMetadata( + readImageCaptureMetadata, _ := BuildCaptureMetadata( name.API, name.ShortName(), method, @@ -625,7 +606,7 @@ func TestCaptureBufferReader(t *testing.T) { now := time.Now() timeRequested := timestamppb.New(now.UTC()) timeReceived := timestamppb.New(now.Add(time.Millisecond).UTC()) - msg := &v1.SensorData{ + msg := []*v1.SensorData{{ Metadata: &v1.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, @@ -633,8 +614,8 @@ func TestCaptureBufferReader(t *testing.T) { Data: &v1.SensorData_Binary{ Binary: []byte("this is a fake image"), }, - } - test.That(t, b.Write(msg), test.ShouldBeNil) + }} + test.That(t, b.WriteBinary(msg), test.ShouldBeNil) test.That(t, b.Flush(), test.ShouldBeNil) dirEntries, err := os.ReadDir(b.Path()) test.That(t, err, test.ShouldBeNil) @@ -650,17 +631,17 @@ func TestCaptureBufferReader(t *testing.T) { sd2, err := cf2.ReadNext() test.That(t, err, test.ShouldBeNil) - test.That(t, sd2, test.ShouldResemble, msg) + test.That(t, sd2, test.ShouldResemble, msg[0]) _, err = cf2.ReadNext() test.That(t, err, test.ShouldBeError, io.EOF) }) } -//nolint func getCaptureFiles(dir string) (dcFiles, progFiles []string) { _ = filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { if err != nil { + //nolint:nilerr return nil } if info.IsDir() { @@ -676,3 +657,9 @@ func getCaptureFiles(dir string) (dcFiles, progFiles []string) { }) return dcFiles, progFiles } + +func TestIsBinary(t *testing.T) { + test.That(t, IsBinary(nil), test.ShouldBeFalse) + test.That(t, IsBinary(&v1.SensorData{Data: &v1.SensorData_Struct{}}), test.ShouldBeFalse) + test.That(t, IsBinary(&v1.SensorData{Data: &v1.SensorData_Binary{}}), test.ShouldBeTrue) +} diff --git a/data/capture_file.go b/data/capture_file.go index 47b118e9926..9e552794299 100644 --- a/data/capture_file.go +++ b/data/capture_file.go @@ -16,7 +16,6 @@ import ( "google.golang.org/protobuf/types/known/anypb" "go.viam.com/rdk/resource" - "go.viam.com/rdk/utils" ) // TODO Data-343: Reorganize this into a more standard interface/package, and add tests. @@ -30,9 +29,10 @@ const ( CompletedCaptureFileExt = ".capture" readImage = "ReadImage" // GetImages is used for getting simultaneous images from different imagers. - GetImages = "GetImages" - nextPointCloud = "NextPointCloud" - pointCloudMap = "PointCloudMap" + GetImages = "GetImages" + nextPointCloud = "NextPointCloud" + pointCloudMap = "PointCloudMap" + captureAllFromCamera = "CaptureAllFromCamera" // Non-exhaustive list of characters to strip from file paths, since not allowed // on certain file systems. filePathReservedChars = ":" @@ -210,23 +210,23 @@ func (f *CaptureFile) Delete() error { // BuildCaptureMetadata builds a DataCaptureMetadata object and returns error if // additionalParams fails to convert to anypb map. func BuildCaptureMetadata( - compAPI resource.API, - compName string, + api resource.API, + name string, method string, additionalParams map[string]string, methodParams map[string]*anypb.Any, tags []string, -) *v1.DataCaptureMetadata { - dataType := getDataType(method) +) (*v1.DataCaptureMetadata, CaptureType) { + dataType := GetDataType(method) return &v1.DataCaptureMetadata{ - ComponentType: compAPI.String(), - ComponentName: compName, + ComponentType: api.String(), + ComponentName: name, MethodName: method, - Type: dataType, + Type: dataType.ToProto(), MethodParameters: methodParams, - FileExtension: GetFileExt(dataType, method, additionalParams), + FileExtension: getFileExt(dataType, method, additionalParams), Tags: tags, - } + }, dataType } // IsDataCaptureFile returns whether or not f is a data capture file. @@ -240,49 +240,6 @@ func getFileTimestampName() string { return time.Now().Format(time.RFC3339Nano) } -// TODO DATA-246: Implement this in some more robust, programmatic way. -func getDataType(methodName string) v1.DataType { - switch methodName { - case nextPointCloud, readImage, pointCloudMap, GetImages: - return v1.DataType_DATA_TYPE_BINARY_SENSOR - default: - return v1.DataType_DATA_TYPE_TABULAR_SENSOR - } -} - -// GetFileExt gets the file extension for a capture file. -func GetFileExt(dataType v1.DataType, methodName string, parameters map[string]string) string { - defaultFileExt := "" - switch dataType { - case v1.DataType_DATA_TYPE_TABULAR_SENSOR: - return ".dat" - case v1.DataType_DATA_TYPE_FILE: - return defaultFileExt - case v1.DataType_DATA_TYPE_BINARY_SENSOR: - if methodName == nextPointCloud { - return ".pcd" - } - if methodName == readImage { - // TODO: Add explicit file extensions for all mime types. - switch parameters["mime_type"] { - case utils.MimeTypeJPEG: - return ".jpeg" - case utils.MimeTypePNG: - return ".png" - case utils.MimeTypePCD: - return ".pcd" - default: - return defaultFileExt - } - } - case v1.DataType_DATA_TYPE_UNSPECIFIED: - return defaultFileExt - default: - return defaultFileExt - } - return defaultFileExt -} - // SensorDataFromCaptureFilePath returns all readings in the file at filePath. // NOTE: (Nick S) At time of writing this is only used in tests. func SensorDataFromCaptureFilePath(filePath string) ([]*v1.SensorData, error) { diff --git a/data/capture_file_test.go b/data/capture_file_test.go index 3de5756a88c..1f220bb5c76 100644 --- a/data/capture_file_test.go +++ b/data/capture_file_test.go @@ -125,7 +125,7 @@ func TestBuildCaptureMetadata(t *testing.T) { methodParams, err := protoutils.ConvertStringMapToAnyPBMap(tc.additionalParams) test.That(t, err, test.ShouldEqual, nil) - actualMetadata := BuildCaptureMetadata( + actualMetadata, _ := BuildCaptureMetadata( resource.APINamespaceRDK.WithComponentType(tc.componentType), tc.componentName, tc.method, diff --git a/data/collector.go b/data/collector.go index 7070141147e..b90db70c2cf 100644 --- a/data/collector.go +++ b/data/collector.go @@ -5,7 +5,6 @@ package data import ( "context" "fmt" - "reflect" "sync" "time" @@ -14,15 +13,12 @@ import ( "go.mongodb.org/mongo-driver/bson" "go.mongodb.org/mongo-driver/mongo" "go.opencensus.io/trace" - v1 "go.viam.com/api/app/datasync/v1" - pb "go.viam.com/api/common/v1" "go.viam.com/utils" "go.viam.com/utils/protoutils" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/anypb" "google.golang.org/protobuf/types/known/structpb" - "google.golang.org/protobuf/types/known/timestamppb" "go.viam.com/rdk/logging" "go.viam.com/rdk/resource" @@ -31,9 +27,6 @@ import ( // The cutoff at which if interval < cutoff, a sleep based capture func is used instead of a ticker. var sleepCaptureCutoff = 2 * time.Millisecond -// CaptureFunc allows the creation of simple Capturers with anonymous functions. -type CaptureFunc func(ctx context.Context, params map[string]*anypb.Any) (interface{}, error) - // FromDMContextKey is used to check whether the context is from data management. // Deprecated: use a camera.Extra with camera.NewContext instead. type FromDMContextKey struct{} @@ -50,6 +43,17 @@ var ErrNoCaptureToStore = status.Error(codes.FailedPrecondition, "no capture fro // If an error is ongoing, the frequency (in seconds) with which to suppress identical error logs. const identicalErrorLogFrequencyHz = 2 +// TabularDataBson is a denormalized sensor reading that can be +// encoded into BSON. +type TabularDataBson struct { + TimeRequested time.Time `bson:"time_requested"` + TimeReceived time.Time `bson:"time_received"` + ComponentName string `bson:"component_name"` + ComponentType string `bson:"component_type"` + MethodName string `bson:"method_name"` + Data bson.M `bson:"data"` +} + // Collector collects data to some target. type Collector interface { Close() @@ -58,9 +62,9 @@ type Collector interface { } type collector struct { - clock clock.Clock - captureResults chan *v1.SensorData + clock clock.Clock + captureResults chan CaptureResult mongoCollection *mongo.Collection componentName string componentType string @@ -78,6 +82,7 @@ type collector struct { captureFunc CaptureFunc target CaptureBufferedWriter lastLoggedErrors map[string]int64 + dataType CaptureType } // Close closes the channels backing the Collector. It should always be called before disposing of a Collector to avoid @@ -178,10 +183,27 @@ func (c *collector) tickerBasedCapture(started chan struct{}) { } } +func (c *collector) validateReadingType(t CaptureType) error { + switch c.dataType { + case CaptureTypeTabular: + if t != CaptureTypeTabular { + return fmt.Errorf("expected result of type CaptureTypeTabular, instead got CaptureResultType: %d", t) + } + return nil + case CaptureTypeBinary: + if t != CaptureTypeBinary { + return fmt.Errorf("expected result of type CaptureTypeBinary,instead got CaptureResultType: %d", t) + } + return nil + case CaptureTypeUnspecified: + return fmt.Errorf("unknown collector data type: %d", c.dataType) + default: + return fmt.Errorf("unknown collector data type: %d", c.dataType) + } +} + func (c *collector) getAndPushNextReading() { - timeRequested := timestamppb.New(c.clock.Now().UTC()) - reading, err := c.captureFunc(c.cancelCtx, c.params) - timeReceived := timestamppb.New(c.clock.Now().UTC()) + result, err := c.captureFunc(c.cancelCtx, c.params) if c.cancelCtx.Err() != nil { return @@ -196,56 +218,22 @@ func (c *collector) getAndPushNextReading() { return } - var msg v1.SensorData - switch v := reading.(type) { - case []byte: - msg = v1.SensorData{ - Metadata: &v1.SensorMetadata{ - TimeRequested: timeRequested, - TimeReceived: timeReceived, - }, - Data: &v1.SensorData_Binary{ - Binary: v, - }, - } - default: - // If it's not bytes, it's a struct. - var pbReading *structpb.Struct - var err error - - if reflect.TypeOf(reading) == reflect.TypeOf(pb.GetReadingsResponse{}) { - // We special-case the GetReadingsResponse because it already contains - // structpb.Values in it, and the StructToStructPb logic does not handle - // that cleanly. - topLevelMap := make(map[string]*structpb.Value) - topLevelMap["readings"] = structpb.NewStructValue( - &structpb.Struct{Fields: reading.(pb.GetReadingsResponse).Readings}, - ) - pbReading = &structpb.Struct{Fields: topLevelMap} - } else { - pbReading, err = protoutils.StructToStructPbIgnoreOmitEmpty(reading) - if err != nil { - c.captureErrors <- errors.Wrap(err, "error while converting reading to structpb.Struct") - return - } - } + if err := c.validateReadingType(result.Type); err != nil { + c.captureErrors <- errors.Wrap(err, "capture result invalid type") + return + } - msg = v1.SensorData{ - Metadata: &v1.SensorMetadata{ - TimeRequested: timeRequested, - TimeReceived: timeReceived, - }, - Data: &v1.SensorData_Struct{ - Struct: pbReading, - }, - } + if err := result.Validate(); err != nil { + c.captureErrors <- errors.Wrap(err, "capture result failed validation") + return } select { - // If c.captureResults is full, c.captureResults <- a can block indefinitely. This additional select block allows cancel to + // If c.captureResults is full, c.captureResults <- a can block indefinitely. + // This additional select block allows cancel to // still work when this happens. case <-c.cancelCtx.Done(): - case c.captureResults <- &msg: + case c.captureResults <- result: } } @@ -267,9 +255,10 @@ func NewCollector(captureFunc CaptureFunc, params CollectorParams) (Collector, e componentName: params.ComponentName, componentType: params.ComponentType, methodName: params.MethodName, - captureResults: make(chan *v1.SensorData, params.QueueSize), mongoCollection: params.MongoCollection, + captureResults: make(chan CaptureResult, params.QueueSize), captureErrors: make(chan error, params.QueueSize), + dataType: params.DataType, interval: params.Interval, params: params.MethodParams, logger: params.Logger, @@ -292,8 +281,24 @@ func (c *collector) writeCaptureResults() { case <-c.cancelCtx.Done(): return case msg := <-c.captureResults: - if err := c.target.Write(msg); err != nil { - c.logger.Error(errors.Wrap(err, fmt.Sprintf("failed to write to collector %s", c.target.Path())).Error()) + proto := msg.ToProto() + + switch msg.Type { + case CaptureTypeTabular: + if err := c.target.WriteTabular(proto); err != nil { + c.logger.Error(errors.Wrap(err, fmt.Sprintf("failed to write tabular data to prog file %s", c.target.Path())).Error()) + return + } + case CaptureTypeBinary: + if err := c.target.WriteBinary(proto); err != nil { + c.logger.Error(errors.Wrap(err, fmt.Sprintf("failed to write binary data to prog file %s", c.target.Path())).Error()) + return + } + case CaptureTypeUnspecified: + c.logger.Error(fmt.Sprintf("collector returned invalid result type: %d", msg.Type)) + return + default: + c.logger.Error(fmt.Sprintf("collector returned invalid result type: %d", msg.Type)) return } @@ -302,34 +307,19 @@ func (c *collector) writeCaptureResults() { } } -// TabularData is a denormalized sensor reading. -type TabularData struct { - TimeRequested time.Time `bson:"time_requested"` - TimeReceived time.Time `bson:"time_received"` - ComponentName string `bson:"component_name"` - ComponentType string `bson:"component_type"` - MethodName string `bson:"method_name"` - Data bson.M `bson:"data"` -} - // maybeWriteToMongo will write to the mongoCollection // if it is non-nil and the msg is tabular data // logs errors on failure. -func (c *collector) maybeWriteToMongo(msg *v1.SensorData) { +func (c *collector) maybeWriteToMongo(msg CaptureResult) { if c.mongoCollection == nil { return } - // DATA-3338: - // currently vision.CaptureAllFromCamera and camera.GetImages are stored in .capture files as VERY LARGE - // tabular sensor data - // That is a mistake which we are rectifying but in the meantime we don't want data captured from those methods to be synced - // to mongo - if getDataType(c.methodName) == v1.DataType_DATA_TYPE_BINARY_SENSOR || c.methodName == captureAllFromCamera { + if msg.Type != CaptureTypeTabular { return } - s := msg.GetStruct() + s := msg.TabularData.Payload if s == nil { return } @@ -340,9 +330,9 @@ func (c *collector) maybeWriteToMongo(msg *v1.SensorData) { return } - td := TabularData{ - TimeRequested: msg.Metadata.TimeRequested.AsTime(), - TimeReceived: msg.Metadata.TimeReceived.AsTime(), + td := TabularDataBson{ + TimeRequested: msg.TimeRequested, + TimeReceived: msg.TimeReceived, ComponentName: c.componentName, ComponentType: c.componentType, MethodName: c.methodName, diff --git a/data/collector_test.go b/data/collector_test.go index 9037677cede..8dafcebe44e 100644 --- a/data/collector_test.go +++ b/data/collector_test.go @@ -21,14 +21,29 @@ import ( ) var ( - structCapturer = CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + dummyTime = time.Date(2024, time.January, 10, 23, 0, 0, 0, time.UTC) + structCapturer = CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (CaptureResult, error) { return dummyStructReading, nil }) - binaryCapturer = CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { - return dummyBytesReading, nil + binaryCapturer = CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (CaptureResult, error) { + return CaptureResult{ + Timestamps: Timestamps{ + TimeRequested: dummyTime, + TimeReceived: dummyTime.Add(time.Second), + }, + Type: CaptureTypeBinary, + Binaries: []Binary{{Payload: dummyBytesReading}}, + }, nil }) - dummyStructReading = structReading{} - dummyStructReadingProto = dummyStructReading.toProto() + dummyStructReading = CaptureResult{ + Timestamps: Timestamps{ + TimeRequested: dummyTime, + TimeReceived: dummyTime.Add(time.Second), + }, + Type: CaptureTypeTabular, + TabularData: TabularData{dummyStructReadingProto}, + } + dummyStructReadingProto = structReading{}.toProto() dummyBytesReading = []byte("I sure am bytes") queueSize = 250 bufferSize = 4096 @@ -44,13 +59,24 @@ func TestNewCollector(t *testing.T) { // If not missing parameters, should not return an error. c2, err2 := NewCollector(nil, CollectorParams{ + DataType: CaptureTypeTabular, ComponentName: "name", Logger: logging.NewTestLogger(t), Target: NewCaptureBuffer("dir", nil, 50), }) - test.That(t, c2, test.ShouldNotBeNil) test.That(t, err2, test.ShouldBeNil) + test.That(t, c2, test.ShouldNotBeNil) + + c3, err3 := NewCollector(nil, CollectorParams{ + DataType: CaptureTypeBinary, + ComponentName: "name", + Logger: logging.NewTestLogger(t), + Target: NewCaptureBuffer("dir", nil, 50), + }) + + test.That(t, err3, test.ShouldBeNil) + test.That(t, c3, test.ShouldNotBeNil) } // Test that the Collector correctly writes the SensorData on an interval. @@ -73,6 +99,7 @@ func TestSuccessfulWrite(t *testing.T) { interval time.Duration expectReadings int expFiles int + datatype CaptureType }{ { name: "Ticker based struct writer.", @@ -80,6 +107,7 @@ func TestSuccessfulWrite(t *testing.T) { interval: tickerInterval, expectReadings: 2, expFiles: 1, + datatype: CaptureTypeTabular, }, { name: "Sleep based struct writer.", @@ -87,6 +115,7 @@ func TestSuccessfulWrite(t *testing.T) { interval: sleepInterval, expectReadings: 2, expFiles: 1, + datatype: CaptureTypeTabular, }, { name: "Ticker based binary writer.", @@ -94,6 +123,7 @@ func TestSuccessfulWrite(t *testing.T) { interval: tickerInterval, expectReadings: 2, expFiles: 2, + datatype: CaptureTypeBinary, }, { name: "Sleep based binary writer.", @@ -101,6 +131,7 @@ func TestSuccessfulWrite(t *testing.T) { interval: sleepInterval, expectReadings: 2, expFiles: 2, + datatype: CaptureTypeBinary, }, } @@ -109,19 +140,13 @@ func TestSuccessfulWrite(t *testing.T) { ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(time.Second)) defer cancel() tmpDir := t.TempDir() - md := v1.DataCaptureMetadata{} - tgt := NewCaptureBuffer(tmpDir, &md, 50) - test.That(t, tgt, test.ShouldNotBeNil) - wrote := make(chan struct{}) - target := &signalingBuffer{ - bw: tgt, - wrote: wrote, - } + target := newSignalingBuffer(ctx, tmpDir) mockClock := clock.NewMock() params.Interval = tc.interval params.Target = target params.Clock = mockClock + params.DataType = tc.datatype c, err := NewCollector(tc.captureFunc, params) test.That(t, err, test.ShouldBeNil) c.Collect() @@ -136,10 +161,9 @@ func TestSuccessfulWrite(t *testing.T) { select { case <-ctx.Done(): t.Fatalf("timed out waiting for data to be written") - case <-wrote: + case <-target.wrote: } } - close(wrote) // If it's a sleep based collector, we need to move the clock forward one more time after calling Close. // Otherwise, it will stay asleep indefinitely and Close will block forever. @@ -158,7 +182,7 @@ func TestSuccessfulWrite(t *testing.T) { return default: time.Sleep(time.Millisecond * 1) - mockClock.Add(tc.interval) + mockClock.Add(params.Interval) } } }() @@ -184,18 +208,15 @@ func TestSuccessfulWrite(t *testing.T) { func TestClose(t *testing.T) { // Set up a collector. l := logging.NewTestLogger(t) + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() tmpDir := t.TempDir() - md := v1.DataCaptureMetadata{} - buf := NewCaptureBuffer(tmpDir, &md, 50) - wrote := make(chan struct{}) - target := &signalingBuffer{ - bw: buf, - wrote: wrote, - } mockClock := clock.NewMock() + target := newSignalingBuffer(ctx, tmpDir) interval := time.Millisecond * 5 params := CollectorParams{ + DataType: CaptureTypeTabular, ComponentName: "testComponent", Interval: interval, MethodParams: map[string]*anypb.Any{"name": fakeVal}, @@ -205,17 +226,18 @@ func TestClose(t *testing.T) { Logger: l, Clock: mockClock, } - c, _ := NewCollector(structCapturer, params) + c, err := NewCollector(structCapturer, params) + test.That(t, err, test.ShouldBeNil) // Start collecting, and validate it is writing. c.Collect() mockClock.Add(interval) - ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond*10) + ctx, cancel = context.WithTimeout(context.Background(), time.Millisecond*10) defer cancel() select { case <-ctx.Done(): t.Fatalf("timed out waiting for data to be written") - case <-wrote: + case <-target.wrote: } // Close and validate no additional writes occur even after an additional interval. @@ -225,7 +247,7 @@ func TestClose(t *testing.T) { defer cancel() select { case <-ctx.Done(): - case <-wrote: + case <-target.wrote: t.Fatalf("unexpected write after close") } } @@ -238,10 +260,11 @@ func TestCtxCancelledNotLoggedAfterClose(t *testing.T) { tmpDir := t.TempDir() target := NewCaptureBuffer(tmpDir, &v1.DataCaptureMetadata{}, 50) captured := make(chan struct{}) - errorCapturer := CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + errorCapturer := CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (CaptureResult, error) { + var res CaptureResult select { case <-ctx.Done(): - return nil, fmt.Errorf("arbitrary wrapping message: %w", ctx.Err()) + return res, fmt.Errorf("arbitrary wrapping message: %w", ctx.Err()) case captured <- struct{}{}: } return dummyStructReading, nil @@ -249,6 +272,7 @@ func TestCtxCancelledNotLoggedAfterClose(t *testing.T) { params := CollectorParams{ ComponentName: "testComponent", + DataType: CaptureTypeTabular, Interval: time.Millisecond, MethodParams: map[string]*anypb.Any{"name": fakeVal}, Target: target, @@ -256,7 +280,8 @@ func TestCtxCancelledNotLoggedAfterClose(t *testing.T) { BufferSize: bufferSize, Logger: logger, } - c, _ := NewCollector(errorCapturer, params) + c, err := NewCollector(errorCapturer, params) + test.That(t, err, test.ShouldBeNil) c.Collect() <-captured c.Close() @@ -274,20 +299,18 @@ func TestLogErrorsOnlyOnce(t *testing.T) { // Set up a collector. logger, logs := logging.NewObservedTestLogger(t) tmpDir := t.TempDir() - md := v1.DataCaptureMetadata{} - buf := NewCaptureBuffer(tmpDir, &md, 50) - wrote := make(chan struct{}) - errorCapturer := CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { - return nil, errors.New("I am an error") + errorCapturer := CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (CaptureResult, error) { + return CaptureResult{}, errors.New("I am an error") }) - target := &signalingBuffer{ - bw: buf, - wrote: wrote, - } - mockClock := clock.NewMock() + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + target := newSignalingBuffer(ctx, tmpDir) interval := time.Millisecond * 5 + mockClock := clock.NewMock() + params := CollectorParams{ + DataType: CaptureTypeTabular, ComponentName: "testComponent", Interval: interval, MethodParams: map[string]*anypb.Any{"name": fakeVal}, @@ -297,13 +320,14 @@ func TestLogErrorsOnlyOnce(t *testing.T) { Logger: logger, Clock: mockClock, } - c, _ := NewCollector(errorCapturer, params) + c, err := NewCollector(errorCapturer, params) + test.That(t, err, test.ShouldBeNil) // Start collecting, and validate it is writing. c.Collect() mockClock.Add(interval * 5) - close(wrote) + // close(wrote) test.That(t, logs.FilterLevelExact(zapcore.ErrorLevel).Len(), test.ShouldEqual, 1) mockClock.Add(3 * time.Second) test.That(t, logs.FilterLevelExact(zapcore.ErrorLevel).Len(), test.ShouldEqual, 2) @@ -324,10 +348,10 @@ func validateReadings(t *testing.T, act []*v1.SensorData, n int) { } } -//nolint func getAllFiles(dir string) []os.FileInfo { var files []os.FileInfo _ = filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + //nolint:nilerr if err != nil { return nil } @@ -340,14 +364,36 @@ func getAllFiles(dir string) []os.FileInfo { return files } +func newSignalingBuffer(ctx context.Context, path string) *signalingBuffer { + md := v1.DataCaptureMetadata{} + return &signalingBuffer{ + ctx: ctx, + bw: NewCaptureBuffer(path, &md, 50), + wrote: make(chan struct{}), + } +} + type signalingBuffer struct { + ctx context.Context bw CaptureBufferedWriter wrote chan struct{} } -func (b *signalingBuffer) Write(data *v1.SensorData) error { - ret := b.bw.Write(data) - b.wrote <- struct{}{} +func (b *signalingBuffer) WriteBinary(items []*v1.SensorData) error { + ret := b.bw.WriteBinary(items) + select { + case b.wrote <- struct{}{}: + case <-b.ctx.Done(): + } + return ret +} + +func (b *signalingBuffer) WriteTabular(items []*v1.SensorData) error { + ret := b.bw.WriteTabular(items) + select { + case b.wrote <- struct{}{}: + case <-b.ctx.Done(): + } return ret } diff --git a/data/collector_types.go b/data/collector_types.go new file mode 100644 index 00000000000..7de961e6044 --- /dev/null +++ b/data/collector_types.go @@ -0,0 +1,394 @@ +package data + +import ( + "context" + "fmt" + "time" + + "github.com/pkg/errors" + dataPB "go.viam.com/api/app/data/v1" + datasyncPB "go.viam.com/api/app/datasync/v1" + camerapb "go.viam.com/api/component/camera/v1" + "go.viam.com/utils/protoutils" + "google.golang.org/protobuf/types/known/anypb" + "google.golang.org/protobuf/types/known/structpb" + "google.golang.org/protobuf/types/known/timestamppb" + + rprotoutils "go.viam.com/rdk/protoutils" + rutils "go.viam.com/rdk/utils" +) + +// CaptureFunc allows the creation of simple Capturers with anonymous functions. +type CaptureFunc func(ctx context.Context, params map[string]*anypb.Any) (CaptureResult, error) + +// CaptureResult is the result of a capture function. +type CaptureResult struct { + // Type represents the type of result (binary or tabular) + Type CaptureType + // Timestamps contain the time the data was requested and received + Timestamps + // TabularData contains the tabular data payload when Type == CaptureResultTypeTabular + TabularData TabularData + // Binaries contains binary data responses when Type == CaptureResultTypeBinary + Binaries []Binary +} + +// BEGIN CONSTRUCTORS + +// NewBinaryCaptureResult returns a binary capture result. +func NewBinaryCaptureResult(ts Timestamps, binaries []Binary) CaptureResult { + return CaptureResult{ + Timestamps: ts, + Type: CaptureTypeBinary, + Binaries: binaries, + } +} + +// NewTabularCaptureResultReadings returns a tabular readings result. +func NewTabularCaptureResultReadings(reqT time.Time, readings map[string]interface{}) (CaptureResult, error) { + var res CaptureResult + values, err := rprotoutils.ReadingGoToProto(readings) + if err != nil { + return res, err + } + + return CaptureResult{ + Timestamps: Timestamps{ + TimeRequested: reqT, + TimeReceived: time.Now(), + }, + Type: CaptureTypeTabular, + TabularData: TabularData{ + Payload: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + "readings": structpb.NewStructValue(&structpb.Struct{Fields: values}), + }, + }, + }, + }, nil +} + +// NewTabularCaptureResult returns a tabular result. +func NewTabularCaptureResult(reqT time.Time, i interface{}) (CaptureResult, error) { + var res CaptureResult + readings, err := protoutils.StructToStructPbIgnoreOmitEmpty(i) + if err != nil { + return res, err + } + + return CaptureResult{ + Timestamps: Timestamps{ + TimeRequested: reqT, + TimeReceived: time.Now(), + }, + Type: CaptureTypeTabular, + TabularData: TabularData{ + Payload: readings, + }, + }, nil +} + +// END CONSTRUCTORS + +// ToProto converts a CaptureResult into a []*datasyncPB.SensorData{}. +func (cr *CaptureResult) ToProto() []*datasyncPB.SensorData { + ts := cr.Timestamps + if td := cr.TabularData.Payload; td != nil { + return []*datasyncPB.SensorData{{ + Metadata: &datasyncPB.SensorMetadata{ + TimeRequested: timestamppb.New(ts.TimeRequested.UTC()), + TimeReceived: timestamppb.New(ts.TimeReceived.UTC()), + }, + Data: &datasyncPB.SensorData_Struct{ + Struct: td, + }, + }} + } + + var sd []*datasyncPB.SensorData + for _, b := range cr.Binaries { + sd = append(sd, &datasyncPB.SensorData{ + Metadata: &datasyncPB.SensorMetadata{ + TimeRequested: timestamppb.New(ts.TimeRequested.UTC()), + TimeReceived: timestamppb.New(ts.TimeReceived.UTC()), + MimeType: b.MimeType.ToProto(), + Annotations: b.Annotations.ToProto(), + }, + Data: &datasyncPB.SensorData_Binary{ + Binary: b.Payload, + }, + }) + } + return sd +} + +// Validate returns an error if the *CaptureResult is invalid. +func (cr *CaptureResult) Validate() error { + var ts Timestamps + if cr.Timestamps.TimeRequested == ts.TimeRequested { + return errors.New("Timestamps.TimeRequested must be set") + } + + if cr.Timestamps.TimeReceived == ts.TimeReceived { + return errors.New("Timestamps.TimeRequested must be set") + } + + switch cr.Type { + case CaptureTypeTabular: + if len(cr.Binaries) > 0 { + return errors.New("tabular result can't contain binary data") + } + if cr.TabularData.Payload == nil { + return errors.New("tabular result must have non empty tabular data") + } + return nil + case CaptureTypeBinary: + if cr.TabularData.Payload != nil { + return errors.New("binary result can't contain tabular data") + } + if len(cr.Binaries) == 0 { + return errors.New("binary result must have non empty binary data") + } + + for _, b := range cr.Binaries { + if len(b.Payload) == 0 { + return errors.New("binary results can't have empty binary payload") + } + } + return nil + case CaptureTypeUnspecified: + return fmt.Errorf("unknown CaptureResultType: %d", cr.Type) + default: + return fmt.Errorf("unknown CaptureResultType: %d", cr.Type) + } +} + +// CaptureType represents captured tabular or binary data. +type CaptureType int + +const ( + // CaptureTypeUnspecified represents that the data type of the captured data was not specified. + CaptureTypeUnspecified CaptureType = iota + // CaptureTypeTabular represents that the data type of the captured data is tabular. + CaptureTypeTabular + // CaptureTypeBinary represents that the data type of the captured data is binary. + CaptureTypeBinary +) + +// ToProto converts a DataType into a v1.DataType. +func (dt CaptureType) ToProto() datasyncPB.DataType { + switch dt { + case CaptureTypeTabular: + return datasyncPB.DataType_DATA_TYPE_TABULAR_SENSOR + case CaptureTypeBinary: + return datasyncPB.DataType_DATA_TYPE_BINARY_SENSOR + case CaptureTypeUnspecified: + return datasyncPB.DataType_DATA_TYPE_UNSPECIFIED + default: + return datasyncPB.DataType_DATA_TYPE_UNSPECIFIED + } +} + +// GetDataType returns the DataType of the method. +func GetDataType(methodName string) CaptureType { + switch methodName { + case nextPointCloud, readImage, pointCloudMap, GetImages, captureAllFromCamera: + return CaptureTypeBinary + default: + return CaptureTypeTabular + } +} + +// Timestamps are the timestamps the data was captured. +type Timestamps struct { + // TimeRequested represents the time the request for the data was started + TimeRequested time.Time + // TimeRequested represents the time the response for the request for the data + // was received + TimeReceived time.Time +} + +// MimeType represents the mime type of the sensor data. +type MimeType int + +// This follows the mime types supported in +// https://github.com/viamrobotics/api/pull/571/files#diff-b77927298d8d5d5228beeea47bd0860d9b322b4f3ef45e129bc238ec17704826R75 +const ( + // MimeTypeUnspecified means that the mime type was not specified. + MimeTypeUnspecified MimeType = iota + // MimeTypeImageJpeg means that the mime type is jpeg. + MimeTypeImageJpeg + // MimeTypeImagePng means that the mime type is png. + MimeTypeImagePng + // MimeTypeApplicationPcd means that the mime type is pcd. + MimeTypeApplicationPcd +) + +// ToProto converts MimeType to datasyncPB. +func (mt MimeType) ToProto() datasyncPB.MimeType { + switch mt { + case MimeTypeUnspecified: + return datasyncPB.MimeType_MIME_TYPE_UNSPECIFIED + case MimeTypeImageJpeg: + return datasyncPB.MimeType_MIME_TYPE_IMAGE_JPEG + case MimeTypeImagePng: + return datasyncPB.MimeType_MIME_TYPE_IMAGE_PNG + case MimeTypeApplicationPcd: + return datasyncPB.MimeType_MIME_TYPE_APPLICATION_PCD + default: + return datasyncPB.MimeType_MIME_TYPE_UNSPECIFIED + } +} + +// MimeTypeFromProto converts a datasyncPB.MimeType to a data.MimeType. +func MimeTypeFromProto(mt datasyncPB.MimeType) MimeType { + switch mt { + case datasyncPB.MimeType_MIME_TYPE_UNSPECIFIED: + return MimeTypeUnspecified + case datasyncPB.MimeType_MIME_TYPE_IMAGE_JPEG: + return MimeTypeImageJpeg + case datasyncPB.MimeType_MIME_TYPE_IMAGE_PNG: + return MimeTypeImagePng + case datasyncPB.MimeType_MIME_TYPE_APPLICATION_PCD: + return MimeTypeApplicationPcd + default: + return MimeTypeUnspecified + } +} + +// CameraFormatToMimeType converts a camera camerapb.Format into a MimeType. +func CameraFormatToMimeType(f camerapb.Format) MimeType { + switch f { + case camerapb.Format_FORMAT_UNSPECIFIED: + return MimeTypeUnspecified + case camerapb.Format_FORMAT_JPEG: + return MimeTypeImageJpeg + case camerapb.Format_FORMAT_PNG: + return MimeTypeImagePng + case camerapb.Format_FORMAT_RAW_RGBA: + fallthrough + case camerapb.Format_FORMAT_RAW_DEPTH: + fallthrough + default: + return MimeTypeUnspecified + } +} + +// MimeTypeToCameraFormat converts a data.MimeType into a camerapb.Format. +func MimeTypeToCameraFormat(mt MimeType) camerapb.Format { + if mt == MimeTypeImageJpeg { + return camerapb.Format_FORMAT_JPEG + } + + if mt == MimeTypeImagePng { + return camerapb.Format_FORMAT_PNG + } + return camerapb.Format_FORMAT_UNSPECIFIED +} + +// Binary represents an element of a binary capture result response. +type Binary struct { + // Payload contains the binary payload + Payload []byte + // MimeType descibes the payload's MimeType + MimeType MimeType + // Annotations provide metadata about the Payload + Annotations Annotations +} + +// TabularData contains a tabular data payload. +type TabularData struct { + Payload *structpb.Struct +} + +// BoundingBox represents a labeled bounding box +// with an optional confidence interval between 0 and 1. +type BoundingBox struct { + Label string + Confidence *float64 + XMinNormalized float64 + YMinNormalized float64 + XMaxNormalized float64 + YMaxNormalized float64 +} + +// Classification represents a labeled classification +// with an optional confidence interval between 0 and 1. +type Classification struct { + Label string + Confidence *float64 +} + +// Annotations represents ML classifications. +type Annotations struct { + BoundingBoxes []BoundingBox + Classifications []Classification +} + +// Empty returns true when Annotations are empty. +func (mt Annotations) Empty() bool { + return len(mt.BoundingBoxes) == 0 && len(mt.Classifications) == 0 +} + +// ToProto converts Annotations to *dataPB.Annotations. +func (mt Annotations) ToProto() *dataPB.Annotations { + if mt.Empty() { + return nil + } + + var bboxes []*dataPB.BoundingBox + for _, bb := range mt.BoundingBoxes { + bboxes = append(bboxes, &dataPB.BoundingBox{ + Label: bb.Label, + Confidence: bb.Confidence, + XMinNormalized: bb.XMinNormalized, + XMaxNormalized: bb.XMaxNormalized, + YMinNormalized: bb.YMinNormalized, + YMaxNormalized: bb.YMaxNormalized, + }) + } + + var classifications []*dataPB.Classification + for _, c := range mt.Classifications { + classifications = append(classifications, &dataPB.Classification{ + Label: c.Label, + Confidence: c.Confidence, + }) + } + + return &dataPB.Annotations{ + Bboxes: bboxes, + Classifications: classifications, + } +} + +// getFileExt gets the file extension for a capture file. +func getFileExt(dataType CaptureType, methodName string, parameters map[string]string) string { + defaultFileExt := "" + switch dataType { + case CaptureTypeTabular: + return ".dat" + case CaptureTypeBinary: + if methodName == nextPointCloud { + return ".pcd" + } + if methodName == readImage { + // TODO: Add explicit file extensions for all mime types. + switch parameters["mime_type"] { + case rutils.MimeTypeJPEG: + return ".jpeg" + case rutils.MimeTypePNG: + return ".png" + case rutils.MimeTypePCD: + return ".pcd" + default: + return defaultFileExt + } + } + case CaptureTypeUnspecified: + return defaultFileExt + default: + return defaultFileExt + } + return defaultFileExt +} diff --git a/data/collector_types_test.go b/data/collector_types_test.go new file mode 100644 index 00000000000..f1d164c7f1e --- /dev/null +++ b/data/collector_types_test.go @@ -0,0 +1,425 @@ +package data + +import ( + "errors" + "testing" + "time" + + v1 "go.viam.com/api/app/data/v1" + datasyncPB "go.viam.com/api/app/datasync/v1" + commonPB "go.viam.com/api/common/v1" + armPB "go.viam.com/api/component/arm/v1" + cameraPB "go.viam.com/api/component/camera/v1" + "go.viam.com/test" + "google.golang.org/protobuf/types/known/structpb" + "google.golang.org/protobuf/types/known/timestamppb" + + tu "go.viam.com/rdk/testutils" + rutils "go.viam.com/rdk/utils" +) + +func TestNewBinaryCaptureResult(t *testing.T) { + timeRequested := time.Now() + timeReceived := time.Now() + ts := Timestamps{TimeRequested: timeRequested, TimeReceived: timeReceived} + type testCase struct { + input CaptureResult + output CaptureResult + validateErr error + } + confidence := 0.1 + emptyBinaries := []Binary{} + singleSimpleBinaries := []Binary{{Payload: []byte("hi there")}} + singleSimpleBinariesWithMimeType := []Binary{ + { + Payload: []byte("hi there"), + MimeType: MimeTypeImageJpeg, + }, + } + singleComplexBinaries := []Binary{ + { + Payload: []byte("hi there"), + MimeType: MimeTypeImageJpeg, + Annotations: Annotations{ + Classifications: []Classification{ + {Label: "no-confidence"}, + {Label: "confidence", Confidence: &confidence}, + }, + BoundingBoxes: []BoundingBox{ + { + Label: "no-confidence", + XMinNormalized: 1, + XMaxNormalized: 2, + YMinNormalized: 3, + YMaxNormalized: 4, + }, + { + Label: "confidence", + Confidence: &confidence, + XMinNormalized: 5, + XMaxNormalized: 6, + YMinNormalized: 7, + YMaxNormalized: 8, + }, + }, + }, + }, + { + Payload: []byte("hi too am here here"), + MimeType: MimeTypeImageJpeg, + Annotations: Annotations{ + Classifications: []Classification{ + {Label: "something completely different"}, + }, + }, + }, + } + + multipleComplexBinaries := []Binary{ + { + Payload: []byte("hi there"), + MimeType: MimeTypeImageJpeg, + Annotations: Annotations{ + Classifications: []Classification{ + {Label: "no-confidence"}, + {Label: "confidence", Confidence: &confidence}, + }, + BoundingBoxes: []BoundingBox{ + { + Label: "no-confidence", + XMinNormalized: 1, + XMaxNormalized: 2, + YMinNormalized: 3, + YMaxNormalized: 4, + }, + { + Label: "confidence", + Confidence: &confidence, + XMinNormalized: 5, + XMaxNormalized: 6, + YMinNormalized: 7, + YMaxNormalized: 8, + }, + }, + }, + }, + { + Payload: []byte("hi too am here here"), + MimeType: MimeTypeImageJpeg, + Annotations: Annotations{ + Classifications: []Classification{ + {Label: "something completely different"}, + }, + }, + }, + } + tcs := []testCase{ + { + input: NewBinaryCaptureResult(ts, nil), + output: CaptureResult{Type: CaptureTypeBinary, Timestamps: ts}, + validateErr: errors.New("binary result must have non empty binary data"), + }, + { + input: NewBinaryCaptureResult(ts, emptyBinaries), + output: CaptureResult{ + Type: CaptureTypeBinary, + Timestamps: ts, + Binaries: emptyBinaries, + }, + validateErr: errors.New("binary result must have non empty binary data"), + }, + { + input: NewBinaryCaptureResult(ts, singleSimpleBinaries), + output: CaptureResult{ + Type: CaptureTypeBinary, + Timestamps: ts, + Binaries: singleSimpleBinaries, + }, + }, + { + input: NewBinaryCaptureResult(ts, singleSimpleBinariesWithMimeType), + output: CaptureResult{ + Type: CaptureTypeBinary, + Timestamps: ts, + Binaries: singleSimpleBinariesWithMimeType, + }, + }, + { + input: NewBinaryCaptureResult(ts, singleComplexBinaries), + output: CaptureResult{ + Type: CaptureTypeBinary, + Timestamps: ts, + Binaries: singleComplexBinaries, + }, + }, + { + input: NewBinaryCaptureResult(ts, multipleComplexBinaries), + output: CaptureResult{ + Type: CaptureTypeBinary, + Timestamps: ts, + Binaries: multipleComplexBinaries, + }, + }, + } + for i, tc := range tcs { + t.Logf("index: %d", i) + + // confirm input resembles output + test.That(t, tc.input, test.ShouldResemble, tc.output) + + // confirm input conforms to validation expectations + if tc.validateErr != nil { + test.That(t, tc.input.Validate(), test.ShouldBeError, tc.validateErr) + continue + } + test.That(t, tc.input.Validate(), test.ShouldBeNil) + + // confirm input conforms to ToProto expectations + proto := tc.input.ToProto() + test.That(t, len(proto), test.ShouldEqual, len(tc.input.Binaries)) + for j := range tc.input.Binaries { + test.That(t, proto[j].Metadata, test.ShouldResemble, &datasyncPB.SensorMetadata{ + TimeRequested: timestamppb.New(timeRequested.UTC()), + TimeReceived: timestamppb.New(timeReceived.UTC()), + MimeType: tc.input.Binaries[j].MimeType.ToProto(), + Annotations: tc.input.Binaries[j].Annotations.ToProto(), + }) + + test.That(t, proto[j].Data, test.ShouldResemble, &datasyncPB.SensorData_Binary{ + Binary: tc.input.Binaries[j].Payload, + }) + } + } +} + +func TestNewTabularCaptureResultReadings(t *testing.T) { + now := time.Now() + type testCase struct { + input map[string]interface{} + output *structpb.Struct + err error + } + firstReading := map[string]any{ + "hi": 1, + "there": 1.2, + "friend": []any{ + map[string]any{ + "weird": "stuff", + "even": "stranger", + }, + 1, + true, + "20 mickey mouse", + []any{3.3, 9.9}, + []byte{1, 2, 3}, + }, + } + tcs := []testCase{ + { + input: nil, + output: tu.ToStructPBStruct(t, map[string]any{"readings": map[string]any{}}), + }, + { + input: firstReading, + output: tu.ToStructPBStruct(t, map[string]any{"readings": firstReading}), + }, + { + input: map[string]any{"invalid_type": []float64{3.3, 9.9}}, + err: errors.New("proto: invalid type: []float64"), + }, + } + + for i, tc := range tcs { + t.Logf("index: %d", i) + res, err := NewTabularCaptureResultReadings(now, tc.input) + if tc.err != nil { + test.That(t, err, test.ShouldBeError, tc.err) + continue + } + + test.That(t, err, test.ShouldBeNil) + verifyStruct(t, res, now, tc.output) + } +} + +func TestNewTabularCaptureResult(t *testing.T) { + now := time.Now() + type testCase struct { + input any + output *structpb.Struct + err error + } + tcs := []testCase{ + { + input: nil, + err: errors.New("unable to convert interface to a form acceptable to structpb.NewStruct: no data passed in"), + }, + { + input: armPB.GetEndPositionResponse{Pose: &commonPB.Pose{X: 1, Y: 2, Z: 3, OX: 4, OY: 5, OZ: 6, Theta: 7}}, + output: tu.ToStructPBStruct(t, map[string]any{"pose": map[string]any{ + "x": 1, + "y": 2, + "z": 3, + "o_x": 4, + "o_y": 5, + "o_z": 6, + "theta": 7, + }}), + }, + { + input: &armPB.GetEndPositionResponse{Pose: &commonPB.Pose{X: 1, Y: 2, Z: 3, OX: 4, OY: 5, OZ: 6, Theta: 7}}, + output: tu.ToStructPBStruct(t, map[string]any{"pose": map[string]any{ + "x": 1, + "y": 2, + "z": 3, + "o_x": 4, + "o_y": 5, + "o_z": 6, + "theta": 7, + }}), + }, + } + + for i, tc := range tcs { + t.Logf("index: %d", i) + res, err := NewTabularCaptureResult(now, tc.input) + if tc.err != nil { + test.That(t, err, test.ShouldBeError, tc.err) + continue + } + test.That(t, err, test.ShouldBeNil) + verifyStruct(t, res, now, tc.output) + } +} + +func verifyStruct(t *testing.T, res CaptureResult, now time.Time, output *structpb.Struct) { + t.Helper() + test.That(t, res, test.ShouldNotBeNil) + + test.That(t, res.Type, test.ShouldEqual, CaptureTypeTabular) + test.That(t, res.TimeRequested, test.ShouldEqual, now) + test.That(t, res.TimeReceived, test.ShouldHappenAfter, now) + test.That(t, res.TimeReceived, test.ShouldHappenBefore, time.Now()) + test.That(t, res.Binaries, test.ShouldBeNil) + test.That(t, res.TabularData.Payload, test.ShouldNotBeNil) + test.That(t, res.TabularData.Payload, test.ShouldResemble, output) + + test.That(t, res.Validate(), test.ShouldBeNil) + + // confirm input conforms to ToProto expectations + for _, proto := range res.ToProto() { + test.That(t, proto.Metadata, test.ShouldResemble, &datasyncPB.SensorMetadata{ + TimeRequested: timestamppb.New(res.TimeRequested.UTC()), + TimeReceived: timestamppb.New(res.TimeReceived.UTC()), + }) + + test.That(t, proto.Data, test.ShouldResemble, &datasyncPB.SensorData_Struct{ + Struct: output, + }) + } +} + +func TestCaptureTypeToProto(t *testing.T) { + test.That(t, CaptureTypeBinary.ToProto(), test.ShouldEqual, datasyncPB.DataType_DATA_TYPE_BINARY_SENSOR) + test.That(t, CaptureTypeTabular.ToProto(), test.ShouldEqual, datasyncPB.DataType_DATA_TYPE_TABULAR_SENSOR) + test.That(t, CaptureTypeUnspecified.ToProto(), test.ShouldEqual, datasyncPB.DataType_DATA_TYPE_UNSPECIFIED) + invalidCaptureType := CaptureType(20) + test.That(t, invalidCaptureType.ToProto(), test.ShouldEqual, datasyncPB.DataType_DATA_TYPE_UNSPECIFIED) +} + +func TestMimeTypeToProto(t *testing.T) { + test.That(t, MimeTypeImageJpeg.ToProto(), test.ShouldEqual, datasyncPB.MimeType_MIME_TYPE_IMAGE_JPEG) + test.That(t, MimeTypeImagePng.ToProto(), test.ShouldEqual, datasyncPB.MimeType_MIME_TYPE_IMAGE_PNG) + test.That(t, MimeTypeApplicationPcd.ToProto(), test.ShouldEqual, datasyncPB.MimeType_MIME_TYPE_APPLICATION_PCD) + test.That(t, MimeTypeUnspecified.ToProto(), test.ShouldEqual, datasyncPB.MimeType_MIME_TYPE_UNSPECIFIED) +} + +func TestGetDataType(t *testing.T) { + test.That(t, GetDataType(nextPointCloud), test.ShouldEqual, CaptureTypeBinary) + test.That(t, GetDataType(readImage), test.ShouldEqual, CaptureTypeBinary) + test.That(t, GetDataType(pointCloudMap), test.ShouldEqual, CaptureTypeBinary) + test.That(t, GetDataType(GetImages), test.ShouldEqual, CaptureTypeBinary) + test.That(t, GetDataType("anything else"), test.ShouldEqual, CaptureTypeTabular) +} + +func TestMimeTypeFromProto(t *testing.T) { + test.That(t, MimeTypeFromProto(datasyncPB.MimeType_MIME_TYPE_IMAGE_JPEG), test.ShouldEqual, MimeTypeImageJpeg) + test.That(t, MimeTypeFromProto(datasyncPB.MimeType_MIME_TYPE_IMAGE_PNG), test.ShouldEqual, MimeTypeImagePng) + test.That(t, MimeTypeFromProto(datasyncPB.MimeType_MIME_TYPE_APPLICATION_PCD), test.ShouldEqual, MimeTypeApplicationPcd) + test.That(t, MimeTypeFromProto(datasyncPB.MimeType_MIME_TYPE_UNSPECIFIED), test.ShouldEqual, MimeTypeUnspecified) + test.That(t, MimeTypeFromProto(datasyncPB.MimeType(20)), test.ShouldEqual, MimeTypeUnspecified) +} + +func TestCameraFormatToMimeType(t *testing.T) { + test.That(t, CameraFormatToMimeType(cameraPB.Format_FORMAT_JPEG), test.ShouldEqual, MimeTypeImageJpeg) + test.That(t, CameraFormatToMimeType(cameraPB.Format_FORMAT_PNG), test.ShouldEqual, MimeTypeImagePng) + test.That(t, CameraFormatToMimeType(cameraPB.Format_FORMAT_RAW_RGBA), test.ShouldEqual, MimeTypeUnspecified) + test.That(t, CameraFormatToMimeType(cameraPB.Format_FORMAT_RAW_DEPTH), test.ShouldEqual, MimeTypeUnspecified) + test.That(t, CameraFormatToMimeType(cameraPB.Format_FORMAT_UNSPECIFIED), test.ShouldEqual, MimeTypeUnspecified) +} + +func TestAnnotationsToProto(t *testing.T) { + conf := 0.2 + + empty := Annotations{} + test.That(t, empty.ToProto() == nil, test.ShouldBeTrue) + + onlyBBoxes := Annotations{ + BoundingBoxes: []BoundingBox{ + {Label: "a", Confidence: &conf, XMinNormalized: 1, XMaxNormalized: 2, YMinNormalized: 3, YMaxNormalized: 4}, + {Label: "b", XMinNormalized: 5, XMaxNormalized: 6, YMinNormalized: 7, YMaxNormalized: 8}, + }, + } + test.That(t, onlyBBoxes.ToProto(), test.ShouldResemble, &v1.Annotations{ + Bboxes: []*v1.BoundingBox{ + {Label: "a", Confidence: &conf, XMinNormalized: 1, XMaxNormalized: 2, YMinNormalized: 3, YMaxNormalized: 4}, + {Label: "b", XMinNormalized: 5, XMaxNormalized: 6, YMinNormalized: 7, YMaxNormalized: 8}, + }, + }) + + onlyClassifications := Annotations{ + Classifications: []Classification{ + {Label: "c"}, + {Label: "d", Confidence: &conf}, + }, + } + test.That(t, onlyClassifications.ToProto(), test.ShouldResemble, &v1.Annotations{ + Classifications: []*v1.Classification{ + {Label: "c"}, + {Label: "d", Confidence: &conf}, + }, + }) + + both := Annotations{ + BoundingBoxes: []BoundingBox{ + {Label: "a", Confidence: &conf, XMinNormalized: 1, XMaxNormalized: 2, YMinNormalized: 3, YMaxNormalized: 4}, + {Label: "b", XMinNormalized: 5, XMaxNormalized: 6, YMinNormalized: 7, YMaxNormalized: 8}, + }, + Classifications: []Classification{ + {Label: "c"}, + {Label: "d", Confidence: &conf}, + }, + } + test.That(t, both.ToProto(), test.ShouldResemble, &v1.Annotations{ + Bboxes: []*v1.BoundingBox{ + {Label: "a", Confidence: &conf, XMinNormalized: 1, XMaxNormalized: 2, YMinNormalized: 3, YMaxNormalized: 4}, + {Label: "b", XMinNormalized: 5, XMaxNormalized: 6, YMinNormalized: 7, YMaxNormalized: 8}, + }, + Classifications: []*v1.Classification{ + {Label: "c"}, + {Label: "d", Confidence: &conf}, + }, + }) +} + +func TestGetFileExt(t *testing.T) { + test.That(t, getFileExt(CaptureTypeTabular, "anything", nil), test.ShouldResemble, ".dat") + test.That(t, getFileExt(CaptureTypeUnspecified, "anything", nil), test.ShouldResemble, "") + test.That(t, getFileExt(CaptureType(20), "anything", nil), test.ShouldResemble, "") + test.That(t, getFileExt(CaptureTypeBinary, "anything", nil), test.ShouldResemble, "") + test.That(t, getFileExt(CaptureTypeBinary, "NextPointCloud", nil), test.ShouldResemble, ".pcd") + test.That(t, getFileExt(CaptureTypeBinary, "ReadImage", nil), test.ShouldResemble, "") + test.That(t, getFileExt(CaptureTypeBinary, "ReadImage", map[string]string{"mime_type": rutils.MimeTypeJPEG}), test.ShouldResemble, ".jpeg") + test.That(t, getFileExt(CaptureTypeBinary, "ReadImage", map[string]string{"mime_type": rutils.MimeTypePNG}), test.ShouldResemble, ".png") + test.That(t, getFileExt(CaptureTypeBinary, "ReadImage", map[string]string{"mime_type": rutils.MimeTypePCD}), test.ShouldResemble, ".pcd") +} diff --git a/data/registry.go b/data/registry.go index d48997893ed..67a26295d4b 100644 --- a/data/registry.go +++ b/data/registry.go @@ -19,17 +19,18 @@ type CollectorConstructor func(resource interface{}, params CollectorParams) (Co // CollectorParams contain the parameters needed to construct a Collector. type CollectorParams struct { - MongoCollection *mongo.Collection + BufferSize int + Clock clock.Clock ComponentName string ComponentType string - MethodName string + DataType CaptureType Interval time.Duration + Logger logging.Logger + MethodName string MethodParams map[string]*anypb.Any - Target CaptureBufferedWriter + MongoCollection *mongo.Collection QueueSize int - BufferSize int - Logger logging.Logger - Clock clock.Clock + Target CaptureBufferedWriter } // Validate validates that p contains all required parameters. @@ -43,6 +44,9 @@ func (p CollectorParams) Validate() error { if p.ComponentName == "" { return errors.New("missing required parameter component name") } + if p.DataType != CaptureTypeBinary && p.DataType != CaptureTypeTabular { + return errors.New("invalid DataType") + } return nil } diff --git a/go.mod b/go.mod index 971e22b91f5..76127f97435 100644 --- a/go.mod +++ b/go.mod @@ -14,7 +14,7 @@ require ( github.com/bep/debounce v1.2.1 github.com/bluenviron/gortsplib/v4 v4.8.0 github.com/bluenviron/mediacommon v1.9.2 - github.com/bufbuild/buf v1.21.0 + github.com/bufbuild/buf v1.30.0 github.com/charmbracelet/huh v0.6.0 github.com/charmbracelet/huh/spinner v0.0.0-20240917123815-c9b2c9cdb7b6 github.com/creack/pty v1.1.19-0.20220421211855-0d412c9fbeb1 @@ -44,10 +44,10 @@ require ( github.com/google/go-cmp v0.6.0 github.com/google/uuid v1.6.0 github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 - github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.2 + github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 github.com/invopop/jsonschema v0.6.0 github.com/jedib0t/go-pretty/v6 v6.4.6 - github.com/jhump/protoreflect v1.15.1 + github.com/jhump/protoreflect v1.15.6 github.com/kellydunn/golang-geo v0.7.0 github.com/kylelemons/godebug v1.1.0 github.com/lestrrat-go/jwx v1.2.29 @@ -78,7 +78,7 @@ require ( go.uber.org/atomic v1.11.0 go.uber.org/multierr v1.11.0 go.uber.org/zap v1.27.0 - go.viam.com/api v0.1.357 + go.viam.com/api v0.1.360 go.viam.com/test v1.2.3 go.viam.com/utils v0.1.112 goji.io v2.0.2+incompatible @@ -96,7 +96,7 @@ require ( google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 google.golang.org/grpc v1.66.0 google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.2.0 - google.golang.org/protobuf v1.34.2 + google.golang.org/protobuf v1.35.1 gopkg.in/src-d/go-billy.v4 v4.3.2 gorgonia.org/tensor v0.9.24 gotest.tools/gotestsum v1.10.0 @@ -107,6 +107,7 @@ require ( require ( 4d63.com/gocheckcompilerdirectives v1.2.1 // indirect 4d63.com/gochecknoglobals v0.2.1 // indirect + buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.33.0-20240221180331-f05a6f4403ce.1 // indirect cel.dev/expr v0.15.0 // indirect cloud.google.com/go v0.115.1 // indirect cloud.google.com/go/auth v0.9.3 // indirect @@ -117,6 +118,8 @@ require ( cloud.google.com/go/monitoring v1.21.0 // indirect cloud.google.com/go/storage v1.43.0 // indirect cloud.google.com/go/trace v1.11.0 // indirect + connectrpc.com/connect v1.15.0 // indirect + connectrpc.com/otelconnect v0.7.0 // indirect contrib.go.opencensus.io/exporter/stackdriver v0.13.4 // indirect git.sr.ht/~sbinet/gg v0.3.1 // indirect github.com/4meepo/tagalign v1.3.4 // indirect @@ -138,6 +141,7 @@ require ( github.com/alexkohler/nakedret/v2 v2.0.4 // indirect github.com/alexkohler/prealloc v1.0.0 // indirect github.com/alingse/asasalint v0.0.11 // indirect + github.com/antlr4-go/antlr/v4 v4.13.0 // indirect github.com/apache/arrow/go/arrow v0.0.0-20201229220542-30ce2eb5d4dc // indirect github.com/ashanbrown/forbidigo v1.6.0 // indirect github.com/ashanbrown/makezero v1.1.1 // indirect @@ -151,8 +155,9 @@ require ( github.com/bombsimon/wsl/v4 v4.4.1 // indirect github.com/breml/bidichk v0.2.7 // indirect github.com/breml/errchkjson v0.3.6 // indirect - github.com/bufbuild/connect-go v1.8.0 // indirect - github.com/bufbuild/protocompile v0.5.1 // indirect + github.com/bufbuild/protocompile v0.9.0 // indirect + github.com/bufbuild/protovalidate-go v0.6.0 // indirect + github.com/bufbuild/protoyaml-go v0.1.8 // indirect github.com/butuzov/ireturn v0.3.0 // indirect github.com/butuzov/mirror v1.2.0 // indirect github.com/campoy/embedmd v1.0.0 // indirect @@ -160,7 +165,7 @@ require ( github.com/catppuccin/go v0.2.0 // indirect github.com/ccojocar/zxcvbn-go v1.0.2 // indirect github.com/cenkalti/backoff v2.2.1+incompatible // indirect - github.com/cenkalti/backoff/v4 v4.2.1 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/census-instrumentation/opencensus-proto v0.4.1 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/charithe/durationcheck v0.0.10 // indirect @@ -175,7 +180,7 @@ require ( github.com/chewxy/math32 v1.0.8 // indirect github.com/ckaznocha/intrange v0.2.0 // indirect github.com/cncf/xds/go v0.0.0-20240423153145-555b57ec207b // indirect - github.com/containerd/stargz-snapshotter/estargz v0.14.3 // indirect + github.com/containerd/stargz-snapshotter/estargz v0.15.1 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.4 // indirect github.com/curioswitch/go-reassign v0.2.0 // indirect github.com/daixiang0/gci v0.13.5 // indirect @@ -184,12 +189,13 @@ require ( github.com/denis-tingaikin/go-header v0.5.0 // indirect github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f // indirect github.com/dgottlieb/smarty-assertions v1.2.5 // indirect + github.com/distribution/reference v0.5.0 // indirect github.com/dnephin/pflag v1.0.7 // indirect - github.com/docker/cli v24.0.2+incompatible // indirect - github.com/docker/distribution v2.8.2+incompatible // indirect - github.com/docker/docker v24.0.2+incompatible // indirect - github.com/docker/docker-credential-helpers v0.7.0 // indirect - github.com/docker/go-connections v0.4.0 // indirect + github.com/docker/cli v25.0.4+incompatible // indirect + github.com/docker/distribution v2.8.3+incompatible // indirect + github.com/docker/docker v25.0.6+incompatible // indirect + github.com/docker/docker-credential-helpers v0.8.1 // indirect + github.com/docker/go-connections v0.5.0 // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/edaniels/golog v0.0.0-20230215213219-28954395e8d0 // indirect github.com/edaniels/zeroconf v1.0.10 // indirect @@ -199,7 +205,7 @@ require ( github.com/ettle/strcase v0.2.0 // indirect github.com/fatih/camelcase v1.0.0 // indirect github.com/fatih/structtag v1.2.0 // indirect - github.com/felixge/fgprof v0.9.3 // indirect + github.com/felixge/fgprof v0.9.4 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/firefart/nonamedreturns v1.0.5 // indirect github.com/fzipp/gocyclo v0.6.0 // indirect @@ -207,7 +213,7 @@ require ( github.com/ghostiam/protogetter v0.3.6 // indirect github.com/gin-gonic/gin v1.9.1 // indirect github.com/go-audio/riff v1.0.0 // indirect - github.com/go-chi/chi/v5 v5.0.8 // indirect + github.com/go-chi/chi/v5 v5.0.12 // indirect github.com/go-critic/go-critic v0.11.4 // indirect github.com/go-fonts/liberation v0.3.0 // indirect github.com/go-latex/latex v0.0.0-20230307184459-12ec69307ad9 // indirect @@ -227,7 +233,6 @@ require ( github.com/gofrs/flock v0.12.1 // indirect github.com/gofrs/uuid/v5 v5.0.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang/glog v1.2.1 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/snappy v0.0.4 // indirect github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a // indirect @@ -238,8 +243,9 @@ require ( github.com/golangci/revgrep v0.5.3 // indirect github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed // indirect github.com/gonuts/binary v0.2.0 // indirect + github.com/google/cel-go v0.20.1 // indirect github.com/google/flatbuffers v2.0.6+incompatible // indirect - github.com/google/go-containerregistry v0.15.2 // indirect + github.com/google/go-containerregistry v0.19.0 // indirect github.com/google/pprof v0.0.0-20240827171923-fa2c70bbbfe5 // indirect github.com/google/s2a-go v0.1.8 // indirect github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect @@ -259,7 +265,7 @@ require ( github.com/imdario/mergo v0.3.12 // indirect github.com/improbable-eng/grpc-web v0.15.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/jdxcode/netrc v0.0.0-20221124155335-4616370d1a84 // indirect + github.com/jdx/go-netrc v1.0.0 // indirect github.com/jgautheron/goconst v1.7.1 // indirect github.com/jingyugao/rowserrcheck v1.1.1 // indirect github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af // indirect @@ -269,7 +275,7 @@ require ( github.com/karamaru-alpha/copyloopvar v1.1.0 // indirect github.com/kisielk/errcheck v1.7.0 // indirect github.com/kkHAIKE/contextcheck v1.1.5 // indirect - github.com/klauspost/compress v1.16.5 // indirect + github.com/klauspost/compress v1.17.7 // indirect github.com/klauspost/pgzip v1.2.6 // indirect github.com/kulti/thelper v0.6.3 // indirect github.com/kunwardeep/paralleltest v1.0.10 // indirect @@ -313,7 +319,7 @@ require ( github.com/nunnatsa/ginkgolinter v0.16.2 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/opencontainers/image-spec v1.1.0-rc3 // indirect + github.com/opencontainers/image-spec v1.1.0 // indirect github.com/pelletier/go-toml v1.9.5 // indirect github.com/pelletier/go-toml/v2 v2.2.3 // indirect github.com/pierrec/lz4 v2.0.5+incompatible // indirect @@ -330,7 +336,7 @@ require ( github.com/pion/transport/v2 v2.2.10 // indirect github.com/pion/turn/v2 v2.1.6 // indirect github.com/pion/webrtc/v3 v3.2.36 // indirect - github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect + github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect github.com/pkg/profile v1.7.0 // indirect github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect @@ -369,12 +375,12 @@ require ( github.com/srikrsna/protoc-gen-gotag v0.6.2 // indirect github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect github.com/stbenjam/no-sprintf-host-port v0.1.1 // indirect + github.com/stoewer/go-strcase v1.3.0 // indirect github.com/stretchr/objx v0.5.2 // indirect github.com/stretchr/testify v1.9.0 // indirect github.com/subosito/gotenv v1.4.1 // indirect github.com/tdakkota/asciicheck v0.2.0 // indirect github.com/tetafro/godot v1.4.17 // indirect - github.com/tetratelabs/wazero v1.2.0 // indirect github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 // indirect github.com/timonwong/loggercheck v0.9.4 // indirect github.com/tomarrell/wrapcheck/v2 v2.9.0 // indirect @@ -383,7 +389,7 @@ require ( github.com/ultraware/funlen v0.1.0 // indirect github.com/ultraware/whitespace v0.1.1 // indirect github.com/uudashr/gocognit v1.1.3 // indirect - github.com/vbatts/tar-split v0.11.3 // indirect + github.com/vbatts/tar-split v0.11.5 // indirect github.com/wlynxg/anet v0.0.3 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect github.com/xdg-go/scram v1.1.2 // indirect @@ -401,10 +407,10 @@ require ( go-simpler.org/sloglint v0.7.2 // indirect go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 // indirect - go.opentelemetry.io/otel v1.29.0 // indirect - go.opentelemetry.io/otel/metric v1.29.0 // indirect - go.opentelemetry.io/otel/sdk v1.28.0 // indirect - go.opentelemetry.io/otel/trace v1.29.0 // indirect + go.opentelemetry.io/otel v1.31.0 // indirect + go.opentelemetry.io/otel/metric v1.31.0 // indirect + go.opentelemetry.io/otel/sdk v1.31.0 // indirect + go.opentelemetry.io/otel/trace v1.31.0 // indirect go.uber.org/automaxprocs v1.5.3 // indirect go.uber.org/goleak v1.3.0 // indirect go4.org/unsafe/assume-no-moving-gc v0.0.0-20230525183740-e7c30c78aeb2 // indirect diff --git a/go.sum b/go.sum index 4c432a537fa..f513a3e4e15 100644 --- a/go.sum +++ b/go.sum @@ -3,6 +3,8 @@ 4d63.com/gochecknoglobals v0.0.0-20201008074935-acfc0b28355a/go.mod h1:wfdC5ZjKSPr7CybKEcgJhUOgeAQW1+7WcyK8OvUilfo= 4d63.com/gochecknoglobals v0.2.1 h1:1eiorGsgHOFOuoOiJDy2psSrQbRdIHrlge0IJIkUgDc= 4d63.com/gochecknoglobals v0.2.1/go.mod h1:KRE8wtJB3CXCsb1xy421JfTHIIbmT3U5ruxw2Qu8fSU= +buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.33.0-20240221180331-f05a6f4403ce.1 h1:0nWhrRcnkgw1kwJ7xibIO8bqfOA7pBzBjGCDBxIHch8= +buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.33.0-20240221180331-f05a6f4403ce.1/go.mod h1:Tgn5bgL220vkFOI0KPStlcClPeOJzAv4uT+V8JXGUnw= cel.dev/expr v0.15.0 h1:O1jzfJCQBfL5BFoYktaxwIhuttaQPsVWerH9/EEKx0w= cel.dev/expr v0.15.0/go.mod h1:TRSuuV7DlVCE/uwv5QbAiW/v8l5O8C4eEPHeu7gf7Sg= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= @@ -58,6 +60,10 @@ cloud.google.com/go/storage v1.43.0 h1:CcxnSohZwizt4LCzQHWvBf1/kvtHUn7gk9QERXPyX cloud.google.com/go/storage v1.43.0/go.mod h1:ajvxEa7WmZS1PxvKRq4bq0tFT3vMd502JwstCcYv0Q0= cloud.google.com/go/trace v1.11.0 h1:UHX6cOJm45Zw/KIbqHe4kII8PupLt/V5tscZUkeiJVI= cloud.google.com/go/trace v1.11.0/go.mod h1:Aiemdi52635dBR7o3zuc9lLjXo3BwGaChEjCa3tJNmM= +connectrpc.com/connect v1.15.0 h1:lFdeCbZrVVDydAqwr4xGV2y+ULn+0Z73s5JBj2LikWo= +connectrpc.com/connect v1.15.0/go.mod h1:bQmjpDY8xItMnttnurVgOkHUBMRT9cpsNi2O4AjKhmA= +connectrpc.com/otelconnect v0.7.0 h1:ZH55ZZtcJOTKWWLy3qmL4Pam4RzRWBJFOqTPyAqCXkY= +connectrpc.com/otelconnect v0.7.0/go.mod h1:Bt2ivBymHZHqxvo4HkJ0EwHuUzQN6k2l0oH+mp/8nwc= contrib.go.opencensus.io/exporter/stackdriver v0.13.4 h1:ksUxwH3OD5sxkjzEqGxNTl+Xjsmu3BnC/300MhSVTSc= contrib.go.opencensus.io/exporter/stackdriver v0.13.4/go.mod h1:aXENhDJ1Y4lIg4EUaVTwzvYETVNZk10Pu26tevFKLUc= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= @@ -78,7 +84,6 @@ github.com/Antonboom/testifylint v1.4.3/go.mod h1:+8Q9+AOLsz5ZiQiiYujJKs9mNz398+ github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0= github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c h1:pxW6RcqyfI9/kWtOwnv/G+AzdKuy2ZrqINhenH4HyNs= github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= @@ -137,6 +142,8 @@ github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQ github.com/alingse/asasalint v0.0.11/go.mod h1:nCaoMhw7a9kSJObvQyVzNTPBDbNpdocqrSP7t/cW5+I= github.com/andybalholm/brotli v1.0.0/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI= +github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= github.com/apache/arrow/go/arrow v0.0.0-20201229220542-30ce2eb5d4dc h1:zvQ6w7KwtQWgMQiewOF9tFtundRMVZFSAksNV6ogzuY= github.com/apache/arrow/go/arrow v0.0.0-20201229220542-30ce2eb5d4dc/go.mod h1:c9sxoIT3YgLxH4UhLOCKaBlEojuMhVYpk4Ntv3opUTQ= github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= @@ -197,12 +204,14 @@ github.com/breml/bidichk v0.2.7 h1:dAkKQPLl/Qrk7hnP6P+E0xOodrq8Us7+U0o4UBOAlQY= github.com/breml/bidichk v0.2.7/go.mod h1:YodjipAGI9fGcYM7II6wFvGhdMYsC5pHDlGzqvEW3tQ= github.com/breml/errchkjson v0.3.6 h1:VLhVkqSBH96AvXEyclMR37rZslRrY2kcyq+31HCsVrA= github.com/breml/errchkjson v0.3.6/go.mod h1:jhSDoFheAF2RSDOlCfhHO9KqhZgAYLyvHe7bRCX8f/U= -github.com/bufbuild/buf v1.21.0 h1:fgmvmA5xDFbKYd9wtpExH6YtCcTUo4GDt+7yizSNqUE= -github.com/bufbuild/buf v1.21.0/go.mod h1:o7qgHprFF7rrwY9OEE3Jv+zVMqEjtYjETR+klgPcPoE= -github.com/bufbuild/connect-go v1.8.0 h1:srluNkFkZBfSfg9Qb6DrO+5nMaxix//h2ctrHZhMGKc= -github.com/bufbuild/connect-go v1.8.0/go.mod h1:GmMJYR6orFqD0Y6ZgX8pwQ8j9baizDrIQMm1/a6LnHk= -github.com/bufbuild/protocompile v0.5.1 h1:mixz5lJX4Hiz4FpqFREJHIXLfaLBntfaJv1h+/jS+Qg= -github.com/bufbuild/protocompile v0.5.1/go.mod h1:G5iLmavmF4NsYtpZFvE3B/zFch2GIY8+wjsYLR/lc40= +github.com/bufbuild/buf v1.30.0 h1:V/Gir+aVKukqI/w2Eqoiv4tqUs01KBWP9t3Hz/9/25I= +github.com/bufbuild/buf v1.30.0/go.mod h1:vfr2bN0OlblcfLHKJNMixj7WohlMlFX4yB4L3VZq7A8= +github.com/bufbuild/protocompile v0.9.0 h1:DI8qLG5PEO0Mu1Oj51YFPqtx6I3qYXUAhJVJ/IzAVl0= +github.com/bufbuild/protocompile v0.9.0/go.mod h1:s89m1O8CqSYpyE/YaSGtg1r1YFMF5nLTwh4vlj6O444= +github.com/bufbuild/protovalidate-go v0.6.0 h1:Jgs1kFuZ2LHvvdj8SpCLA1W/+pXS8QSM3F/E2l3InPY= +github.com/bufbuild/protovalidate-go v0.6.0/go.mod h1:1LamgoYHZ2NdIQH0XGczGTc6Z8YrTHjcJVmiBaar4t4= +github.com/bufbuild/protoyaml-go v0.1.8 h1:X9QDLfl9uEllh4gsXUGqPanZYCOKzd92uniRtW2OnAQ= +github.com/bufbuild/protoyaml-go v0.1.8/go.mod h1:R8vE2+l49bSiIExP4VJpxOXleHE+FDzZ6HVxr3cYunw= github.com/butuzov/ireturn v0.3.0 h1:hTjMqWw3y5JC3kpnC5vXmFJAWI/m31jaCYQqzkS6PL0= github.com/butuzov/ireturn v0.3.0/go.mod h1:A09nIiwiqzN/IoVo9ogpa0Hzi9fex1kd9PSD6edP5ZA= github.com/butuzov/mirror v1.2.0 h1:9YVK1qIjNspaqWutSv8gsge2e/Xpq1eqEkslEUHy5cs= @@ -221,8 +230,8 @@ github.com/ccojocar/zxcvbn-go v1.0.2/go.mod h1:g1qkXtUSvHP8lhHp5GrSmTz6uWALGRMQd github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= -github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= -github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.4.1 h1:iKLQ0xPNFxR/2hzXZMrBo8f1j86j5WHzznCCQxV/b8g= github.com/census-instrumentation/opencensus-proto v0.4.1/go.mod h1:4T9NM4+4Vw91VeyqjLS6ao50K5bOcLKN6Q42XnYaRYw= @@ -259,9 +268,15 @@ github.com/chewxy/hm v1.0.0/go.mod h1:qg9YI4q6Fkj/whwHR1D+bOGeF7SniIP40VweVepLjg github.com/chewxy/math32 v1.0.0/go.mod h1:Miac6hA1ohdDUTagnvJy/q+aNnEk16qWUdb8ZVhvCN0= github.com/chewxy/math32 v1.0.8 h1:fU5E4Ec4Z+5RtRAi3TovSxUjQPkgRh+HbP7tKB2OFbM= github.com/chewxy/math32 v1.0.8/go.mod h1:dOB2rcuFrCn6UHrze36WSLVPKtzPMRAQvBvUwkSsLqs= +github.com/chromedp/cdproto v0.0.0-20230802225258-3cf4e6d46a89/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs= +github.com/chromedp/chromedp v0.9.2/go.mod h1:LkSXJKONWTCHAfQasKFUZI+mxqS4tZqhmtGzzhLsnLs= +github.com/chromedp/sysutil v1.0.0/go.mod h1:kgWmDdq8fTzXYcKIBqIYvRRTnYb9aNS9moAV0xufSww= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/logex v1.2.1/go.mod h1:JLbx6lG2kDbNRFnfkgvh4eRJRPX1QCoOIWomwysCBrQ= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/readline v1.5.1/go.mod h1:Eh+b79XXUwfKfcPLepksvw2tcLE/Ct21YObkaSkeBlk= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/chzyer/test v1.0.0/go.mod h1:2JlltgoNkt4TW/z9V/IzDdFaMTM2JPIi26O1pF38GC8= github.com/ckaznocha/intrange v0.2.0 h1:FykcZuJ8BD7oX93YbO1UY9oZtkRbp+1/kJcDjkefYLs= github.com/ckaznocha/intrange v0.2.0/go.mod h1:r5I7nUlAAG56xmkOpw4XVr16BXhwYTUdcuRFeevn1oE= github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= @@ -276,8 +291,10 @@ github.com/cncf/xds/go v0.0.0-20240423153145-555b57ec207b h1:ga8SEFjZ60pxLcmhnTh github.com/cncf/xds/go v0.0.0-20240423153145-555b57ec207b/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= -github.com/containerd/stargz-snapshotter/estargz v0.14.3 h1:OqlDCK3ZVUO6C3B/5FSkDwbkEETK84kQgEeFwDC+62k= -github.com/containerd/stargz-snapshotter/estargz v0.14.3/go.mod h1:KY//uOCIkSuNAHhJogcZtrNHdKrA99/FCCRjE3HD36o= +github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= +github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= +github.com/containerd/stargz-snapshotter/estargz v0.15.1 h1:eXJjw9RbkLFgioVaTG+G/ZW/0kEe2oEKCdS/ZxIyoCU= +github.com/containerd/stargz-snapshotter/estargz v0.15.1/go.mod h1:gr2RNwukQ/S9Nv33Lt6UC7xEx58C+LHRdoqbEKjz1Kk= github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= @@ -290,7 +307,6 @@ github.com/corona10/goimagehash v1.0.2 h1:pUfB0LnsJASMPGEZLj7tGY251vF+qLGqOgEP4r github.com/corona10/goimagehash v1.0.2/go.mod h1:/l9umBhvcHQXVtQO1V6Gp1yD20STawkhRnnX0D1bvVI= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.4 h1:wfIWP927BUkWJb2NmU/kNDYIBTh/ziUX91+lVfRxZq4= github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= @@ -323,18 +339,20 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZm github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c= github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4= +github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0= +github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= github.com/dnephin/pflag v1.0.7 h1:oxONGlWxhmUct0YzKTgrpQv9AUA1wtPBn7zuSjJqptk= github.com/dnephin/pflag v1.0.7/go.mod h1:uxE91IoWURlOiTUIA8Mq5ZZkAv3dPUfZNaT80Zm7OQE= -github.com/docker/cli v24.0.2+incompatible h1:QdqR7znue1mtkXIJ+ruQMGQhpw2JzMJLRXp6zpzF6tM= -github.com/docker/cli v24.0.2+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= -github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8= -github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v24.0.2+incompatible h1:eATx+oLz9WdNVkQrr0qjQ8HvRJ4bOOxfzEo8R+dA3cg= -github.com/docker/docker v24.0.2+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= -github.com/docker/docker-credential-helpers v0.7.0 h1:xtCHsjxogADNZcdv1pKUHXryefjlVRqWqIhk/uXJp0A= -github.com/docker/docker-credential-helpers v0.7.0/go.mod h1:rETQfLdHNT3foU5kuNkFR1R1V12OJRRO5lzt2D1b5X0= -github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= -github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= +github.com/docker/cli v25.0.4+incompatible h1:DatRkJ+nrFoYL2HZUzjM5Z5sAmcA5XGp+AW0oEw2+cA= +github.com/docker/cli v25.0.4+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= +github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= +github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/docker v25.0.6+incompatible h1:5cPwbwriIcsua2REJe8HqQV+6WlWc1byg2QSXzBxBGg= +github.com/docker/docker v25.0.6+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker-credential-helpers v0.8.1 h1:j/eKUktUltBtMzKqmfLB0PAgqYyMHOp5vfsD1807oKo= +github.com/docker/docker-credential-helpers v0.8.1/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M= +github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= +github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= @@ -385,8 +403,9 @@ github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= -github.com/felixge/fgprof v0.9.3 h1:VvyZxILNuCiUCSXtPtYmmtGvb65nqXh2QFWc0Wpf2/g= github.com/felixge/fgprof v0.9.3/go.mod h1:RdbpDgzqYVh/T9fPELJyV7EYJuHB55UTEULNun8eiPw= +github.com/felixge/fgprof v0.9.4 h1:ocDNwMFlnA0NU0zSB3I52xkO4sFXk80VK9lXjLClu88= +github.com/felixge/fgprof v0.9.4/go.mod h1:yKl+ERSa++RYOs32d8K6WEXCB4uXdLls4ZaZPpayhMM= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/firefart/nonamedreturns v1.0.5 h1:tM+Me2ZaXs8tfdDw3X6DOX++wMCOqzYUho6tUTYIdRA= @@ -427,8 +446,8 @@ github.com/go-audio/transforms v0.0.0-20180121090939-51830ccc35a5 h1:acgZxkn6oSJ github.com/go-audio/transforms v0.0.0-20180121090939-51830ccc35a5/go.mod h1:z9ahC4nc9/kxKfl1BnTZ/D2Cm5TbhjR2LeuUpepL9zI= github.com/go-audio/wav v1.1.0 h1:jQgLtbqBzY7G+BM8fXF7AHUk1uHUviWS4X39d5rsL2g= github.com/go-audio/wav v1.1.0/go.mod h1:mpe9qfwbScEbkd8uybLuIpTgHyrISw/OTuvjUW2iGtE= -github.com/go-chi/chi/v5 v5.0.8 h1:lD+NLqFcAi1ovnVZpsnObHGW4xb4J8lNmoYVfECH1Y0= -github.com/go-chi/chi/v5 v5.0.8/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-chi/chi/v5 v5.0.12 h1:9euLV5sTrTNTRUU9POmDUvfxyj6LAABLUcEWO+JJb4s= +github.com/go-chi/chi/v5 v5.0.12/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= github.com/go-critic/go-critic v0.5.4/go.mod h1:cjB4YGw+n/+X8gREApej7150Uyy1Tg8If6F2XOAUXNE= github.com/go-critic/go-critic v0.11.4 h1:O7kGOCx0NDIni4czrkRIXTnit0mkyKOCePh3My6OyEU= github.com/go-critic/go-critic v0.11.4/go.mod h1:2QAdo4iuLik5S9YG0rT4wcZ8QxwHYkrr6/2MWAiv/vc= @@ -516,12 +535,15 @@ github.com/go-xmlfmt/xmlfmt v1.1.2 h1:Nea7b4icn8s57fTx1M5AI4qQT5HEM3rVUO8MuE6g80 github.com/go-xmlfmt/xmlfmt v1.1.2/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= -github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee h1:s+21KNqlpePfkah2I+gwHF8xmJWRjooY+5248k6m4A0= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= -github.com/gobwas/pool v0.2.0 h1:QEmUOlnSjWtnpRGHF3SauEiOsy82Cup83Vf2LcMlnc8= +github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU= +github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= -github.com/gobwas/ws v1.0.2 h1:CoAavW/wd/kulfZmSIBt6p24n4j7tHgNVCjsfHVNUbo= +github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og= +github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= +github.com/gobwas/ws v1.2.1 h1:F2aeBZrm2NDsc7vbovKrWSogd4wvfAxg0FQ89/iqOTk= +github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= github.com/goccy/go-graphviz v0.1.3 h1:Pkt8y4FBnBNI9tfSobpoN5qy1qMNqRXPQYvLhaSUasY= github.com/goccy/go-graphviz v0.1.3/go.mod h1:pMYpbAqJT10V8dzV1JN/g/wUlG/0imKPzn3ZsrchGCI= github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= @@ -545,8 +567,6 @@ github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGw github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 h1:gtexQ/VGyN+VVFRXSFiguSNcXmS6rkKT+X7FdIrTtfo= github.com/golang/geo v0.0.0-20210211234256-740aa86cb551/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.2.1 h1:OptwRhECazUx5ix5TTWC3EZhsZEHWcYWY4FQHTIubm4= -github.com/golang/glog v1.2.1/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -612,6 +632,8 @@ github.com/gonuts/binary v0.2.0 h1:caITwMWAoQWlL0RNvv2lTU/AHqAJlVuu6nZmNgfbKW4= github.com/gonuts/binary v0.2.0/go.mod h1:kM+CtBrCGDSKdv8WXTuCUsw+loiy8f/QEI8YCCC0M/E= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/cel-go v0.20.1 h1:nDx9r8S3L4pE61eDdt8igGj8rf5kjYR3ILxWIpWNi84= +github.com/google/cel-go v0.20.1/go.mod h1:kWcIzTsPX0zmQ+H3TirHstLLf9ep5QTsZBN9u4dOYLg= github.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/flatbuffers v2.0.6+incompatible h1:XHFReMv7nFFusa+CEokzWbzaYocKXI6C7hdU5Kgh9Lw= github.com/google/flatbuffers v2.0.6+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= @@ -630,8 +652,8 @@ github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-containerregistry v0.15.2 h1:MMkSh+tjSdnmJZO7ljvEqV1DjfekB6VUEAZgy3a+TQE= -github.com/google/go-containerregistry v0.15.2/go.mod h1:wWK+LnOv4jXMM23IT/F1wdYftGWGr47Is8CG+pmHK1Q= +github.com/google/go-containerregistry v0.19.0 h1:uIsMRBV7m/HDkDxE/nXMnv1q+lOOSPlQ/ywc5JbB8Ic= +github.com/google/go-containerregistry v0.19.0/go.mod h1:u0qB2l7mvtWVR5kNcbFIhFY1hLbf8eeGapA+vbFDCtQ= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= @@ -646,6 +668,7 @@ github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20211214055906-6f57359322fd/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg= +github.com/google/pprof v0.0.0-20240227163752-401108e1b7e7/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= github.com/google/pprof v0.0.0-20240827171923-fa2c70bbbfe5 h1:5iH8iuqE5apketRbSFBy+X1V0o+l+8NF1avt4HWl7cA= github.com/google/pprof v0.0.0-20240827171923-fa2c70bbbfe5/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= @@ -707,8 +730,8 @@ github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgf github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.2 h1:gDLXvp5S9izjldquuoAhDzccbskOL6tDC5jMSyx3zxE= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.2/go.mod h1:7pdNwVWBBHGiCxa9lAszqCJMbfTISJ7oMftp8+UGV08= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 h1:bkypFPDjIYGfCYD5mRBvpqxfYX1YCS1PXdKYWi8FsN0= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0/go.mod h1:P+Lt/0by1T8bfcF3z737NnSbmxQAppXMRziHUxPOC8k= github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= @@ -747,6 +770,7 @@ github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0 h1:i462o439Z github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w= +github.com/ianlancetaylor/demangle v0.0.0-20230524184225-eabc099b10ab/go.mod h1:gx7rwoVhcfuVKG5uya9Hs3Sxj7EIvldVofAWIUtGouw= github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU= github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/improbable-eng/grpc-web v0.15.0 h1:BN+7z6uNXZ1tQGcNAuaU1YjsLTApzkjt2tzCixLaUPQ= @@ -757,16 +781,16 @@ github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLf github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= github.com/invopop/jsonschema v0.6.0 h1:8e+xY8ZEn8gDHUYylSlLHy22P+SLeIRIHv3nM3hCbmY= github.com/invopop/jsonschema v0.6.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0= -github.com/jdxcode/netrc v0.0.0-20221124155335-4616370d1a84 h1:2uT3aivO7NVpUPGcQX7RbHijHMyWix/yCnIrCWc+5co= -github.com/jdxcode/netrc v0.0.0-20221124155335-4616370d1a84/go.mod h1:Zi/ZFkEqFHTm7qkjyNJjaWH4LQA9LQhGJyF0lTYGpxw= +github.com/jdx/go-netrc v1.0.0 h1:QbLMLyCZGj0NA8glAhxUpf1zDg6cxnWgMBbjq40W0gQ= +github.com/jdx/go-netrc v1.0.0/go.mod h1:Gh9eFQJnoTNIRHXl2j5bJXA1u84hQWJWgGh569zF3v8= github.com/jedib0t/go-pretty/v6 v6.4.6 h1:v6aG9h6Uby3IusSSEjHaZNXpHFhzqMmjXcPq1Rjl9Jw= github.com/jedib0t/go-pretty/v6 v6.4.6/go.mod h1:Ndk3ase2CkQbXLLNf5QDHoYb6J9WtVfmHZu9n8rk2xs= github.com/jgautheron/goconst v1.4.0/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= github.com/jgautheron/goconst v1.7.1 h1:VpdAG7Ca7yvvJk5n8dMwQhfEZJh95kl/Hl9S1OI5Jkk= github.com/jgautheron/goconst v1.7.1/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= github.com/jhump/protoreflect v1.10.3/go.mod h1:7GcYQDdMU/O/BBrl/cX6PNHpXh6cenjd8pneu5yW7Tg= -github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgfCL6c= -github.com/jhump/protoreflect v1.15.1/go.mod h1:jD/2GMKKE6OqX8qTjhADU1e6DShO+gavG9e0Q693nKo= +github.com/jhump/protoreflect v1.15.6 h1:WMYJbw2Wo+KOWwZFvgY0jMoVHM6i4XIvRs2RcBj5VmI= +github.com/jhump/protoreflect v1.15.6/go.mod h1:jCHoyYQIJnaabEYnbGwyo9hUqfyUMTbJw/tAut5t97E= github.com/jingyugao/rowserrcheck v0.0.0-20210130005344-c6a0c12dd98d/go.mod h1:/EZlaYCnEX24i7qdVhT9du5JrtFWYRQr67bVgR7JJC8= github.com/jingyugao/rowserrcheck v1.1.1 h1:zibz55j/MJtLsjP1OF4bSdgXxwL1b+Vn7Tjzq7gFzUs= github.com/jingyugao/rowserrcheck v1.1.1/go.mod h1:4yvlZSDb3IyDTUZJUmpZfm2Hwok+Dtp+nu2qOq+er9c= @@ -783,6 +807,7 @@ github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhB github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/jonboulle/clockwork v0.3.0 h1:9BSCMi8C+0qdApAp4auwX0RkLGUjs956h0EkuQymUhg= github.com/jonboulle/clockwork v0.3.0/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -820,8 +845,8 @@ github.com/klauspost/compress v1.10.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYs github.com/klauspost/compress v1.11.0/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= -github.com/klauspost/compress v1.16.5 h1:IFV2oUNUzZaz+XyusxpLzpzS8Pt5rh0Z16For/djlyI= -github.com/klauspost/compress v1.16.5/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= +github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= github.com/klauspost/cpuid v0.0.0-20180405133222-e7e905edc00e h1:+lIPJOWl+jSiJOc70QXJ07+2eg2Jy2EC7Mi11BWujeM= github.com/klauspost/cpuid v0.0.0-20180405133222-e7e905edc00e/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/klauspost/cpuid/v2 v2.2.4 h1:acbojRNwl3o09bUq+yDCtZFc1aiwaAAxtcn8YkZXnvk= @@ -859,6 +884,7 @@ github.com/ldez/gomoddirectives v0.2.4 h1:j3YjBIjEBbqZ0NKtBNzr8rtMHTOrLPeiwTkfUJ github.com/ldez/gomoddirectives v0.2.4/go.mod h1:oWu9i62VcQDYp9EQ0ONTfqLNh+mDLWWDO+SO0qSQw5g= github.com/ldez/tagliatelle v0.5.0 h1:epgfuYt9v0CG3fms0pEgIMNPuFf/LpPIfjk4kyqSioo= github.com/ldez/tagliatelle v0.5.0/go.mod h1:rj1HmWiL1MiKQuOONhd09iySTEkUuE/8+5jtPYz9xa4= +github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= @@ -898,6 +924,7 @@ github.com/magefile/mage v1.10.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXq github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/maratori/testableexamples v1.0.0 h1:dU5alXRrD8WKSjOUnmJZuzdxWOEQ57+7s93SLMxb2vI= github.com/maratori/testableexamples v1.0.0/go.mod h1:4rhjL1n20TUTT4vdh3RDqSizKLyXp7K2u6HgraZCGzE= github.com/maratori/testpackage v1.0.1/go.mod h1:ddKdw+XG0Phzhx8BFDTKgpWP4i7MpApTE5fXSKAqwDU= @@ -1050,8 +1077,8 @@ github.com/onsi/gomega v1.34.2/go.mod h1:v1xfxRgk0KIsG+QOdm7p8UosrOzPYRo60fd3B/1 github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= -github.com/opencontainers/image-spec v1.1.0-rc3 h1:fzg1mXZFj8YdPeNkRXMg+zb88BFV0Ys52cJydRwBkb8= -github.com/opencontainers/image-spec v1.1.0-rc3/go.mod h1:X4pATf0uXsnn3g5aiGIsVnJBR4mxhKzfwmvK/B2NTm8= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= @@ -1060,6 +1087,7 @@ github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxS github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw= github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= +github.com/orisano/pixelmatch v0.0.0-20220722002657-fb0b55479cde/go.mod h1:nZgzbfBr3hhjoZnS66nKrHmduYNpc34ny7RK4z5/HM0= github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw= github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU= github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w= @@ -1130,8 +1158,8 @@ github.com/pion/turn/v2 v2.1.6 h1:Xr2niVsiPTB0FPtt+yAWKFUkU1eotQbGgpTIld4x1Gc= github.com/pion/turn/v2 v2.1.6/go.mod h1:huEpByKKHix2/b9kmTAM3YoX6MKP+/D//0ClgUYR2fY= github.com/pion/webrtc/v3 v3.2.36 h1:RM/miAv0M4TrhhS7h2mcZXt44K68WmpVDkUOgz2l2l8= github.com/pion/webrtc/v3 v3.2.36/go.mod h1:wWQz1PuKNSNK4VrJJNpPN3vZmKEi4zA6i2ynaQOlxIU= -github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= -github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= +github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= @@ -1219,8 +1247,8 @@ github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6So github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= -github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/rs/cors v1.11.1 h1:eU3gRzXLRK57F5rKMGMZURNdIG4EoAmX8k94r9wXWHA= github.com/rs/cors v1.11.1/go.mod h1:XyqrcTp5zjWr1wsJ8PIRZssZ8b/WMcMf71DJnit4EMU= @@ -1263,7 +1291,6 @@ github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6Mwd github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.8.0/go.mod h1:4GuYW9TZmE769R5STWrRakJc4UqQ3+QQ95fyz7ENv1A= -github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+Wwfd0XE= @@ -1312,6 +1339,8 @@ github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YE github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I= github.com/stbenjam/no-sprintf-host-port v0.1.1 h1:tYugd/yrm1O0dV+ThCbaKZh195Dfm07ysF0U6JQXczc= github.com/stbenjam/no-sprintf-host-port v0.1.1/go.mod h1:TLhvtIvONRzdmkFiio4O8LHsN9N74I+PhRquPsxpL0I= +github.com/stoewer/go-strcase v1.3.0 h1:g0eASXYtp+yvN9fK8sH94oCIk0fau9uV1/ZdJ0AVEzs= +github.com/stoewer/go-strcase v1.3.0/go.mod h1:fAH5hQ5pehh+j3nZfvwdk2RgEgQjAoM8wodgtPmh1xo= github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI= @@ -1351,8 +1380,6 @@ github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1: github.com/tetafro/godot v1.4.4/go.mod h1:FVDd4JuKliW3UgjswZfJfHq4vAx0bD/Jd5brJjGeaz4= github.com/tetafro/godot v1.4.17 h1:pGzu+Ye7ZUEFx7LHU0dAKmCOXWsPjl7qA6iMGndsjPs= github.com/tetafro/godot v1.4.17/go.mod h1:2oVxTBSftRTh4+MVfUaUXR6bn2GDXCaMcOG4Dk3rfio= -github.com/tetratelabs/wazero v1.2.0 h1:I/8LMf4YkCZ3r2XaL9whhA0VMyAvF6QE+O7rco0DCeQ= -github.com/tetratelabs/wazero v1.2.0/go.mod h1:wYx2gNRg8/WihJfSDxA1TIL8H+GkfLYm+bIfbblu9VQ= github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk= github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 h1:quvGphlmUVU+nhpFa4gg4yJyTRJ13reZMDHrKwYw53M= github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966/go.mod h1:27bSVNWSBOHm+qRp1T9qzaIpsWEP6TbUnei/43HK+PQ= @@ -1385,7 +1412,6 @@ github.com/ultraware/whitespace v0.1.1 h1:bTPOGejYFulW3PkcrqkeQwOd6NKOOXvmGD9bo/ github.com/ultraware/whitespace v0.1.1/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/urfave/cli v1.22.12/go.mod h1:sSBEIC79qR6OvcmsD4U3KABeOTxDqQtdDnaFuUN30b8= github.com/urfave/cli/v2 v2.10.3 h1:oi571Fxz5aHugfBAJd5nkwSk3fzATXtMlpxdLylSCMo= github.com/urfave/cli/v2 v2.10.3/go.mod h1:f8iq5LtQ/bLxafbdBSLPPNsgaW0l/2fYYEHhAyPlwvo= github.com/uudashr/gocognit v1.0.1/go.mod h1:j44Ayx2KW4+oB6SWMv8KsmHzZrOInQav7D3cQMJ5JUM= @@ -1395,8 +1421,8 @@ github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyC github.com/valyala/fasthttp v1.16.0/go.mod h1:YOKImeEosDdBPnxc0gy7INqi3m1zK6A+xl6TwOBhHCA= github.com/valyala/quicktemplate v1.6.3/go.mod h1:fwPzK2fHuYEODzJ9pkw0ipCPNHZ2tD5KW4lOuSdPKzY= github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= -github.com/vbatts/tar-split v0.11.3 h1:hLFqsOLQ1SsppQNTMpkpPXClLDfC2A3Zgy9OUU+RVck= -github.com/vbatts/tar-split v0.11.3/go.mod h1:9QlHN18E+fEH7RdG+QAJJcuya3rqT7eXSTY7wGrAokY= +github.com/vbatts/tar-split v0.11.5 h1:3bHCTIheBm1qFTcgh9oPu+nNBtX+XJIupG/vacinCts= +github.com/vbatts/tar-split v0.11.5/go.mod h1:yZbwRsSeGjusneWgA781EKej9HF8vme8okylkAeNKLk= github.com/viamrobotics/evdev v0.1.3 h1:mR4HFafvbc5Wx4Vp1AUJp6/aITfVx9AKyXWx+rWjpfc= github.com/viamrobotics/evdev v0.1.3/go.mod h1:N6nuZmPz7HEIpM7esNWwLxbYzqWqLSZkfI/1Sccckqk= github.com/viamrobotics/webrtc/v3 v3.99.10 h1:ykE14wm+HkqMD5Ozq4rvhzzfvnXAu14ak/HzA1OCzfY= @@ -1467,15 +1493,23 @@ go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.5 go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.54.0/go.mod h1:B9yO6b04uB80CzjedvewuqDhxJxi11s7/GtiGa8bAjI= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 h1:TT4fX+nBOA/+LUkobKGW1ydGcn+G3vRw9+g5HwCphpk= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0/go.mod h1:L7UH0GbB0p47T4Rri3uHjbpCFYrVrwc1I25QhNPiGK8= -go.opentelemetry.io/otel v1.29.0 h1:PdomN/Al4q/lN6iBJEN3AwPvUiHPMlt93c8bqTG5Llw= -go.opentelemetry.io/otel v1.29.0/go.mod h1:N/WtXPs1CNCUEx+Agz5uouwCba+i+bJGFicT8SR4NP8= -go.opentelemetry.io/otel/metric v1.29.0 h1:vPf/HFWTNkPu1aYeIsc98l4ktOQaL6LeSoeV2g+8YLc= -go.opentelemetry.io/otel/metric v1.29.0/go.mod h1:auu/QWieFVWx+DmQOUMgj0F8LHWdgalxXqvp7BII/W8= -go.opentelemetry.io/otel/sdk v1.28.0 h1:b9d7hIry8yZsgtbmM0DKyPWMMUMlK9NEKuIG4aBqWyE= -go.opentelemetry.io/otel/sdk v1.28.0/go.mod h1:oYj7ClPUA7Iw3m+r7GeEjz0qckQRJK2B8zjcZEfu7Pg= -go.opentelemetry.io/otel/trace v1.29.0 h1:J/8ZNK4XgR7a21DZUAsbF8pZ5Jcw1VhACmnYt39JTi4= -go.opentelemetry.io/otel/trace v1.29.0/go.mod h1:eHl3w0sp3paPkYstJOmAimxhiFXPg+MMTlEh3nsQgWQ= +go.opentelemetry.io/otel v1.31.0 h1:NsJcKPIW0D0H3NgzPDHmo0WW6SptzPdqg/L1zsIm2hY= +go.opentelemetry.io/otel v1.31.0/go.mod h1:O0C14Yl9FgkjqcCZAsE053C13OaddMYr/hz6clDkEJE= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.31.0 h1:K0XaT3DwHAcV4nKLzcQvwAgSyisUghWoY20I7huthMk= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.31.0/go.mod h1:B5Ki776z/MBnVha1Nzwp5arlzBbE3+1jk+pGmaP5HME= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.24.0 h1:Xw8U6u2f8DK2XAkGRFV7BBLENgnTGX9i4rQRxJf+/vs= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.24.0/go.mod h1:6KW1Fm6R/s6Z3PGXwSJN2K4eT6wQB3vXX6CVnYX9NmM= +go.opentelemetry.io/otel/metric v1.31.0 h1:FSErL0ATQAmYHUIzSezZibnyVlft1ybhy4ozRPcF2fE= +go.opentelemetry.io/otel/metric v1.31.0/go.mod h1:C3dEloVbLuYoX41KpmAhOqNriGbA+qqH6PQ5E5mUfnY= +go.opentelemetry.io/otel/sdk v1.31.0 h1:xLY3abVHYZ5HSfOg3l2E5LUj2Cwva5Y7yGxnSW9H5Gk= +go.opentelemetry.io/otel/sdk v1.31.0/go.mod h1:TfRbMdhvxIIr/B2N2LQW2S5v9m3gOQ/08KsbbO5BPT0= +go.opentelemetry.io/otel/sdk/metric v1.19.0 h1:EJoTO5qysMsYCa+w4UghwFV/ptQgqSL/8Ni+hx+8i1k= +go.opentelemetry.io/otel/sdk/metric v1.19.0/go.mod h1:XjG0jQyFJrv2PbMvwND7LwCEhsJzCzV5210euduKcKY= +go.opentelemetry.io/otel/trace v1.31.0 h1:ffjsj1aRouKewfr85U2aGagJ46+MvodynlQ1HYdmJys= +go.opentelemetry.io/otel/trace v1.31.0/go.mod h1:TXZkRk7SM2ZQLtR6eoAWQFIHPvzQ06FJAsO1tJg480A= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0= +go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= @@ -1501,8 +1535,8 @@ go.uber.org/zap v1.18.1/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI= go.uber.org/zap v1.23.0/go.mod h1:D+nX8jyLsMHMYrln8A0rJjFt/T/9/bGgIhAqxv5URuY= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= -go.viam.com/api v0.1.357 h1:L9LBYbaH0imv/B+mVxqtSgClIl4flzjLV6LclfnD9Nc= -go.viam.com/api v0.1.357/go.mod h1:5lpVRxMsKFCaahqsnJfPGwJ9baoQ6PIKQu3lxvy6Wtw= +go.viam.com/api v0.1.360 h1:jpcm7mxUy2RvmZGAjUpx6RP+/8341XdlRLSX/3mll0g= +go.viam.com/api v0.1.360/go.mod h1:g5eipXHNm0rQmW7DWya6avKcmzoypLmxnMlAaIsE5Ls= go.viam.com/test v1.2.3 h1:tT2QqthC2BL2tiloUC2T1AIwuLILyMRx8mmxunN+cT4= go.viam.com/test v1.2.3/go.mod h1:5pXMnEyvTygilOCaFtonnKNMqsCCBbe2ZXU8ZsJ2zjY= go.viam.com/utils v0.1.112 h1:yuVkNITUijdP/CMI3BaDozUMZwP4Ari57BvRQfORFK0= @@ -1740,7 +1774,6 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -1751,12 +1784,12 @@ golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220906165534-d0df966e6959/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -2032,14 +2065,14 @@ google.golang.org/protobuf v1.25.1-0.20200805231151-a709e31e5d12/go.mod h1:9JNX7 google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= -google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA= +google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20200902074654-038fdea0a05b/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= diff --git a/services/datamanager/builtin/builtin_sync_test.go b/services/datamanager/builtin/builtin_sync_test.go index 844b8772095..83d1c4c6765 100644 --- a/services/datamanager/builtin/builtin_sync_test.go +++ b/services/datamanager/builtin/builtin_sync_test.go @@ -15,6 +15,7 @@ import ( "testing" "time" + "github.com/golang/geo/r3" v1 "go.viam.com/api/app/datasync/v1" "go.viam.com/test" "go.viam.com/utils/rpc" @@ -343,7 +344,7 @@ func TestDataCaptureUploadIntegration(t *testing.T) { ctx context.Context, extra map[string]interface{}, ) (spatialmath.Pose, error) { - return spatialmath.NewZeroPose(), nil + return spatialmath.NewPoseFromPoint(r3.Vector{X: 1, Y: 2, Z: 3}), nil }, }, }) diff --git a/services/datamanager/builtin/capture/capture.go b/services/datamanager/builtin/capture/capture.go index 75112cba583..47d10b700ea 100644 --- a/services/datamanager/builtin/capture/capture.go +++ b/services/datamanager/builtin/capture/capture.go @@ -324,7 +324,7 @@ func (c *Capture) initializeOrUpdateCollector( return nil, errors.Wrapf(err, "failed to create target directory %s with 700 file permissions", targetDir) } // Build metadata. - captureMetadata := data.BuildCaptureMetadata( + captureMetadata, dataType := data.BuildCaptureMetadata( collectorConfig.Name.API, collectorConfig.Name.ShortName(), collectorConfig.Method, @@ -337,6 +337,7 @@ func (c *Capture) initializeOrUpdateCollector( bufferSize := defaultIfZeroVal(collectorConfig.CaptureBufferSize, defaultCaptureBufferSize) collector, err := collectorConstructor(res, data.CollectorParams{ MongoCollection: collection, + DataType: dataType, ComponentName: collectorConfig.Name.ShortName(), ComponentType: collectorConfig.Name.API.String(), MethodName: collectorConfig.Method, diff --git a/services/datamanager/builtin/sync/exponential_retry.go b/services/datamanager/builtin/sync/exponential_retry.go index 5db098d69c0..36b5bc6017d 100644 --- a/services/datamanager/builtin/sync/exponential_retry.go +++ b/services/datamanager/builtin/sync/exponential_retry.go @@ -168,6 +168,14 @@ func getNextWait(lastWait time.Duration, isOffline bool) time.Duration { // terminalError returns true if retrying will never succeed so that // the data gets moved to the corrupted data directory and false otherwise. func terminalError(err error) bool { - errStatus := status.Convert(err) - return errStatus.Code() == codes.InvalidArgument || errors.Is(err, proto.Error) + if status.Convert(err).Code() == codes.InvalidArgument || errors.Is(err, proto.Error) { + return true + } + + for _, e := range terminalCaptureFileErrs { + if errors.Is(err, e) { + return true + } + } + return false } diff --git a/services/datamanager/builtin/sync/upload_data_capture_file.go b/services/datamanager/builtin/sync/upload_data_capture_file.go index c913d5f478f..480c4829004 100644 --- a/services/datamanager/builtin/sync/upload_data_capture_file.go +++ b/services/datamanager/builtin/sync/upload_data_capture_file.go @@ -2,22 +2,32 @@ package sync import ( "context" - "fmt" "github.com/docker/go-units" "github.com/go-viper/mapstructure/v2" "github.com/pkg/errors" - v1 "go.viam.com/api/app/datasync/v1" - pb "go.viam.com/api/component/camera/v1" + datasyncPB "go.viam.com/api/app/datasync/v1" + cameraPB "go.viam.com/api/component/camera/v1" + "google.golang.org/protobuf/proto" "google.golang.org/protobuf/types/known/timestamppb" "go.viam.com/rdk/data" "go.viam.com/rdk/logging" ) -// MaxUnaryFileSize is the max number of bytes to send using the unary DataCaptureUpload, as opposed to the -// StreamingDataCaptureUpload. -var MaxUnaryFileSize = int64(units.MB) +var ( + // MaxUnaryFileSize is the max number of bytes to send using the unary DataCaptureUpload, as opposed to the + // StreamingDataCaptureUpload. + MaxUnaryFileSize = int64(units.MB) + errMultipleReadingTypes = errors.New("sensor readings contain multiple types") + errSensorDataTypesDontMatchUploadMetadata = errors.New("sensor reaings types don't match upload metadata") + errInvalidCaptureFileType = errors.New("invalid capture file type") + terminalCaptureFileErrs = []error{ + errMultipleReadingTypes, + errSensorDataTypesDontMatchUploadMetadata, + errInvalidCaptureFileType, + } +) // uploadDataCaptureFile uploads the *data.CaptureFile to the cloud using the cloud connection. // returns context.Cancelled if ctx is cancelled before upload completes. @@ -41,23 +51,64 @@ func uploadDataCaptureFile(ctx context.Context, f *data.CaptureFile, conn cloudC return 0, nil } - if md.GetType() == v1.DataType_DATA_TYPE_BINARY_SENSOR && len(sensorData) > 1 { - return 0, fmt.Errorf("binary sensor data file with more than one sensor reading is not supported: %s", f.GetPath()) + sensorDataTypeSet := captureTypesInSensorData(sensorData) + if len(sensorDataTypeSet) != 1 { + return 0, errMultipleReadingTypes } - // camera.GetImages is a special case. For that API we make 2 binary data upload requests - if md.GetType() == v1.DataType_DATA_TYPE_BINARY_SENSOR && md.GetMethodName() == data.GetImages { - logger.Debugf("attemping to upload camera.GetImages data: %s", f.GetPath()) + _, isTabular := sensorDataTypeSet[data.CaptureTypeTabular] + if isLegacyGetImagesCaptureFile(md, isTabular) { + logger.Debugf("attemping to upload legacy camera.GetImages data: %s", f.GetPath()) + return uint64(f.Size()), legacyUploadGetImages(ctx, conn, md, sensorData[0], f.Size(), f.GetPath(), logger) + } - return uint64(f.Size()), uploadGetImages(ctx, conn, md, sensorData[0], f.Size(), f.GetPath(), logger) + if err := checkUploadMetadaTypeMatchesSensorDataType(md, sensorDataTypeSet); err != nil { + return 0, err } - metaData := uploadMetadata(conn.partID, md, md.GetFileExtension()) + metaData := uploadMetadata(conn.partID, md) return uint64(f.Size()), uploadSensorData(ctx, conn.client, metaData, sensorData, f.Size(), f.GetPath(), logger) } -func uploadMetadata(partID string, md *v1.DataCaptureMetadata, fileextension string) *v1.UploadMetadata { - return &v1.UploadMetadata{ +func checkUploadMetadaTypeMatchesSensorDataType(md *datasyncPB.DataCaptureMetadata, sensorDataTypeSet map[data.CaptureType]struct{}) error { + var captureType data.CaptureType + // get first element of single element set + for k := range sensorDataTypeSet { + captureType = k + break + } + + if captureType.ToProto() != md.GetType() { + return errSensorDataTypesDontMatchUploadMetadata + } + return nil +} + +func isLegacyGetImagesCaptureFile(md *datasyncPB.DataCaptureMetadata, isTabular bool) bool { + // camera.GetImages is a special case. For that API we make 2 binary data upload requests + // if the capture file proports to contain DataType_DATA_TYPE_BINARY_SENSOR from GetImages + // but the sensor data is not binary, then this is from a legacy GetImages capture file + return md.GetType() == datasyncPB.DataType_DATA_TYPE_BINARY_SENSOR && md.GetMethodName() == data.GetImages && isTabular +} + +func captureTypesInSensorData(sensorData []*datasyncPB.SensorData) map[data.CaptureType]struct{} { + set := map[data.CaptureType]struct{}{} + for _, sd := range sensorData { + if data.IsBinary(sd) { + if _, ok := set[data.CaptureTypeBinary]; !ok { + set[data.CaptureTypeBinary] = struct{}{} + } + } else { + if _, ok := set[data.CaptureTypeTabular]; !ok { + set[data.CaptureTypeTabular] = struct{}{} + } + } + } + return set +} + +func uploadMetadata(partID string, md *datasyncPB.DataCaptureMetadata) *datasyncPB.UploadMetadata { + return &datasyncPB.UploadMetadata{ PartId: partID, ComponentType: md.GetComponentType(), ComponentName: md.GetComponentName(), @@ -65,39 +116,40 @@ func uploadMetadata(partID string, md *v1.DataCaptureMetadata, fileextension str Type: md.GetType(), MethodParameters: md.GetMethodParameters(), Tags: md.GetTags(), - FileExtension: fileextension, + FileExtension: md.GetFileExtension(), } } -func uploadGetImages( +func legacyUploadGetImages( ctx context.Context, conn cloudConn, - md *v1.DataCaptureMetadata, - sd *v1.SensorData, + md *datasyncPB.DataCaptureMetadata, + sd *datasyncPB.SensorData, size int64, path string, logger logging.Logger, ) error { - var res pb.GetImagesResponse + var res cameraPB.GetImagesResponse if err := mapstructure.Decode(sd.GetStruct().AsMap(), &res); err != nil { return errors.Wrap(err, "failed to decode camera.GetImagesResponse") } timeRequested, timeReceived := getImagesTimestamps(&res, sd) for i, img := range res.Images { - newSensorData := []*v1.SensorData{ + newSensorData := []*datasyncPB.SensorData{ { - Metadata: &v1.SensorMetadata{ + Metadata: &datasyncPB.SensorMetadata{ TimeRequested: timeRequested, TimeReceived: timeReceived, }, - Data: &v1.SensorData_Binary{ + Data: &datasyncPB.SensorData_Binary{ Binary: img.GetImage(), }, }, } logger.Debugf("attempting to upload camera.GetImages response, index: %d", i) - metadata := uploadMetadata(conn.partID, md, getFileExtFromImageFormat(img.GetFormat())) + metadata := uploadMetadata(conn.partID, md) + metadata.FileExtension = getFileExtFromImageFormat(img.GetFormat()) // TODO: This is wrong as the size describes the size of the entire GetImages response, but we are only // uploading one of the 2 images in that response here. if err := uploadSensorData(ctx, conn.client, metadata, newSensorData, size, path, logger); err != nil { @@ -107,7 +159,10 @@ func uploadGetImages( return nil } -func getImagesTimestamps(res *pb.GetImagesResponse, sensorData *v1.SensorData) (*timestamppb.Timestamp, *timestamppb.Timestamp) { +func getImagesTimestamps( + res *cameraPB.GetImagesResponse, + sensorData *datasyncPB.SensorData, +) (*timestamppb.Timestamp, *timestamppb.Timestamp) { // If the GetImagesResponse metadata contains a capture timestamp, use that to // populate SensorMetadata. Otherwise, use the timestamps that the data management // system stored to track when a request was sent and response was received. @@ -125,55 +180,157 @@ func getImagesTimestamps(res *pb.GetImagesResponse, sensorData *v1.SensorData) ( func uploadSensorData( ctx context.Context, - client v1.DataSyncServiceClient, - uploadMD *v1.UploadMetadata, - sensorData []*v1.SensorData, + client datasyncPB.DataSyncServiceClient, + uploadMD *datasyncPB.UploadMetadata, + sensorData []*datasyncPB.SensorData, fileSize int64, path string, logger logging.Logger, ) error { - // If it's a large binary file, we need to upload it in chunks. - if uploadMD.GetType() == v1.DataType_DATA_TYPE_BINARY_SENSOR && fileSize > MaxUnaryFileSize { - logger.Debugf("attempting to upload large binary file using StreamingDataCaptureUpload, file: %s", path) - c, err := client.StreamingDataCaptureUpload(ctx) - if err != nil { - return errors.Wrap(err, "error creating StreamingDataCaptureUpload client") + captureFileType := uploadMD.GetType() + switch captureFileType { + case datasyncPB.DataType_DATA_TYPE_BINARY_SENSOR: + // If it's a large binary file, we need to upload it in chunks. + if uploadMD.GetType() == datasyncPB.DataType_DATA_TYPE_BINARY_SENSOR && fileSize > MaxUnaryFileSize { + return uploadMultipleLargeBinarySensorData(ctx, client, uploadMD, sensorData, path, logger) } + return uploadMultipleBinarySensorData(ctx, client, uploadMD, sensorData, path, logger) + case datasyncPB.DataType_DATA_TYPE_TABULAR_SENSOR: + // Otherwise use the unary endpoint + logger.Debugf("attempting to upload small binary file using DataCaptureUpload, file: %s", path) + _, err := client.DataCaptureUpload(ctx, &datasyncPB.DataCaptureUploadRequest{ + Metadata: uploadMD, + SensorContents: sensorData, + }) + return errors.Wrap(err, "DataCaptureUpload failed") + case datasyncPB.DataType_DATA_TYPE_FILE: + fallthrough + case datasyncPB.DataType_DATA_TYPE_UNSPECIFIED: + fallthrough + default: + logger.Errorf("%s: %s", errInvalidCaptureFileType.Error(), captureFileType) + return errInvalidCaptureFileType + } +} + +func uploadBinarySensorData( + ctx context.Context, + client datasyncPB.DataSyncServiceClient, + md *datasyncPB.UploadMetadata, + sd *datasyncPB.SensorData, +) error { + if md.FileExtension == "" { + md.FileExtension = getFileExtFromMimeType(sd.GetMetadata().GetMimeType()) + } + if _, err := client.DataCaptureUpload(ctx, &datasyncPB.DataCaptureUploadRequest{ + Metadata: md, + SensorContents: []*datasyncPB.SensorData{sd}, + }); err != nil { + return errors.Wrap(err, "DataCaptureUpload failed") + } - toUpload := sensorData[0] + return nil +} - // First send metadata. - streamMD := &v1.StreamingDataCaptureUploadRequest_Metadata{ - Metadata: &v1.DataCaptureUploadMetadata{ - UploadMetadata: uploadMD, - SensorMetadata: toUpload.GetMetadata(), - }, - } - if err := c.Send(&v1.StreamingDataCaptureUploadRequest{UploadPacket: streamMD}); err != nil { - return errors.Wrap(err, "StreamingDataCaptureUpload failed sending metadata") +func uploadMultipleBinarySensorData( + ctx context.Context, + client datasyncPB.DataSyncServiceClient, + uploadMD *datasyncPB.UploadMetadata, + sensorData []*datasyncPB.SensorData, + path string, + logger logging.Logger, +) error { + // this is the common case + if len(sensorData) == 1 { + logger.Debugf("attempting to upload small binary file using DataCaptureUpload, sensor data, file: %s", path) + return uploadBinarySensorData(ctx, client, uploadMD, sensorData[0]) + } + + // we only go down this path if the capture method returned multiple binary + // responses, which at time of writing, only includes camera.GetImages data. + for i, sd := range sensorData { + // Otherwise use the unary endpoint + logger.Debugf("attempting to upload small binary file using DataCaptureUpload, sensor data index: %d, ext: %s, file: %s", i, path) + // we clone as the uploadMD may be changed for each sensor data + // and I'm not confident that it is safe to reuse grpc request structs + // between calls if the data in the request struct changes + clonedMD := proto.Clone(uploadMD).(*datasyncPB.UploadMetadata) + if err := uploadBinarySensorData(ctx, client, clonedMD, sd); err != nil { + return err } + } + return nil +} + +func uploadMultipleLargeBinarySensorData( + ctx context.Context, + client datasyncPB.DataSyncServiceClient, + uploadMD *datasyncPB.UploadMetadata, + sensorData []*datasyncPB.SensorData, + path string, + logger logging.Logger, +) error { + if len(sensorData) == 1 { + logger.Debugf("attempting to upload large binary file using StreamingDataCaptureUpload, sensor data file: %s", path) + return uploadLargeBinarySensorData(ctx, client, uploadMD, sensorData[0], path, logger) + } - // Then call the function to send the rest. - if err := sendStreamingDCRequests(ctx, c, toUpload.GetBinary(), path, logger); err != nil { - return errors.Wrap(err, "StreamingDataCaptureUpload failed to sync") + for i, sd := range sensorData { + logger.Debugf("attempting to upload large binary file using StreamingDataCaptureUpload, sensor data index: %d, file: %s", i, path) + // we clone as the uploadMD may be changed for each sensor data + // and I'm not confident that it is safe to reuse grpc request structs + // between calls if the data in the request struct changes + clonedMD := proto.Clone(uploadMD).(*datasyncPB.UploadMetadata) + if err := uploadLargeBinarySensorData(ctx, client, clonedMD, sd, path, logger); err != nil { + return err } + } + return nil +} + +func uploadLargeBinarySensorData( + ctx context.Context, + client datasyncPB.DataSyncServiceClient, + md *datasyncPB.UploadMetadata, + sd *datasyncPB.SensorData, + path string, + logger logging.Logger, +) error { + c, err := client.StreamingDataCaptureUpload(ctx) + if err != nil { + return errors.Wrap(err, "error creating StreamingDataCaptureUpload client") + } + // First send metadata. + smd := sd.GetMetadata() + // if the extension is not set, derive the extension from the sensor data metadata's mime type + if md.FileExtension == "" { + md.FileExtension = getFileExtFromMimeType(smd.MimeType) + } + streamMD := &datasyncPB.StreamingDataCaptureUploadRequest_Metadata{ + Metadata: &datasyncPB.DataCaptureUploadMetadata{ + UploadMetadata: md, + SensorMetadata: smd, + }, + } + if err := c.Send(&datasyncPB.StreamingDataCaptureUploadRequest{UploadPacket: streamMD}); err != nil { + return errors.Wrap(err, "StreamingDataCaptureUpload failed sending metadata") + } + + // Then call the function to send the rest. + if err := sendStreamingDCRequests(ctx, c, sd.GetBinary(), path, logger); err != nil { + return errors.Wrap(err, "StreamingDataCaptureUpload failed to sync") + } - _, err = c.CloseAndRecv() + if _, err = c.CloseAndRecv(); err != nil { return errors.Wrap(err, "StreamingDataCaptureUpload CloseAndRecv failed") } - // Otherwise use the unary endpoint - logger.Debugf("attempting to upload small binary file using DataCaptureUpload, file: %s", path) - _, err := client.DataCaptureUpload(ctx, &v1.DataCaptureUploadRequest{ - Metadata: uploadMD, - SensorContents: sensorData, - }) - return errors.Wrap(err, "DataCaptureUpload failed") + return nil } func sendStreamingDCRequests( ctx context.Context, - stream v1.DataSyncService_StreamingDataCaptureUploadClient, + stream datasyncPB.DataSyncService_StreamingDataCaptureUploadClient, contents []byte, path string, logger logging.Logger, @@ -193,8 +350,8 @@ func sendStreamingDCRequests( chunk := contents[i:end] // Build request with contents. - uploadReq := &v1.StreamingDataCaptureUploadRequest{ - UploadPacket: &v1.StreamingDataCaptureUploadRequest_Data{ + uploadReq := &datasyncPB.StreamingDataCaptureUploadRequest{ + UploadPacket: &datasyncPB.StreamingDataCaptureUploadRequest_Data{ Data: chunk, }, } @@ -211,17 +368,32 @@ func sendStreamingDCRequests( return nil } -func getFileExtFromImageFormat(res pb.Format) string { - switch res { - case pb.Format_FORMAT_JPEG: +func getFileExtFromImageFormat(t cameraPB.Format) string { + switch t { + case cameraPB.Format_FORMAT_JPEG: return ".jpeg" - case pb.Format_FORMAT_PNG: + case cameraPB.Format_FORMAT_PNG: return ".png" - case pb.Format_FORMAT_RAW_DEPTH: + case cameraPB.Format_FORMAT_RAW_DEPTH: return ".dep" - case pb.Format_FORMAT_RAW_RGBA: + case cameraPB.Format_FORMAT_RAW_RGBA: return ".rgba" - case pb.Format_FORMAT_UNSPECIFIED: + case cameraPB.Format_FORMAT_UNSPECIFIED: + fallthrough + default: + return "" + } +} + +func getFileExtFromMimeType(t datasyncPB.MimeType) string { + switch t { + case datasyncPB.MimeType_MIME_TYPE_IMAGE_JPEG: + return ".jpeg" + case datasyncPB.MimeType_MIME_TYPE_IMAGE_PNG: + return ".png" + case datasyncPB.MimeType_MIME_TYPE_APPLICATION_PCD: + return ".pcd" + case datasyncPB.MimeType_MIME_TYPE_UNSPECIFIED: fallthrough default: return "" diff --git a/services/datamanager/builtin/sync/upload_data_capture_file_test.go b/services/datamanager/builtin/sync/upload_data_capture_file_test.go new file mode 100644 index 00000000000..a40518de77e --- /dev/null +++ b/services/datamanager/builtin/sync/upload_data_capture_file_test.go @@ -0,0 +1,672 @@ +package sync + +import ( + "context" + "os" + "slices" + "sort" + "strings" + "sync/atomic" + "testing" + "time" + + "github.com/docker/go-units" + v1 "go.viam.com/api/app/datasync/v1" + powersensorPB "go.viam.com/api/component/powersensor/v1" + "go.viam.com/test" + "google.golang.org/grpc" + + "go.viam.com/rdk/components/camera" + "go.viam.com/rdk/components/powersensor" + "go.viam.com/rdk/components/sensor" + "go.viam.com/rdk/data" + "go.viam.com/rdk/logging" + rprotoutils "go.viam.com/rdk/protoutils" + "go.viam.com/rdk/resource" + "go.viam.com/rdk/services/vision" + "go.viam.com/rdk/utils" +) + +func TestUploadDataCaptureFile(t *testing.T) { + type upload struct { + md *v1.UploadMetadata + sd []*v1.SensorData + } + type testCase struct { + testName string + api resource.API + name string + method string + tags []string + captureType data.CaptureType + captureResults data.CaptureResult + client MockDataSyncServiceClient + expectedUploads []upload + // expectedUploadMetadata []*v1.UploadMetadata + // expectedSensorData [][]*v1.SensorData + additionalParams map[string]string + unaryReqs chan *v1.DataCaptureUploadRequest + steamingReqs []chan *v1.StreamingDataCaptureUploadRequest + } + + testCtx, cancel := context.WithTimeout(context.Background(), time.Second*5) + defer cancel() + logger := logging.NewTestLogger(t) + + partID := "my-part-id" + + now := time.Now() + sensorReadingResult, err := data.NewTabularCaptureResultReadings(now, map[string]interface{}{"a": 1}) + test.That(t, err, test.ShouldBeNil) + + tabularResult, err := data.NewTabularCaptureResult(now, &powersensorPB.GetPowerResponse{Watts: 0.5}) + test.That(t, err, test.ShouldBeNil) + + ts := data.Timestamps{TimeRequested: now, TimeReceived: now.Add(time.Second)} + smallBinaryResult := data.NewBinaryCaptureResult(ts, []data.Binary{ + {Payload: []byte("I'm a small binary result"), MimeType: data.MimeTypeImageJpeg}, + }) + test.That(t, err, test.ShouldBeNil) + + largeBinaryPayload := slices.Repeat([]byte{1, 2}, units.MB) + largeBinaryResult := data.NewBinaryCaptureResult(ts, []data.Binary{ + {Payload: largeBinaryPayload, MimeType: data.MimeTypeImagePng}, + }) + test.That(t, err, test.ShouldBeNil) + + smallGetImagesResult := data.NewBinaryCaptureResult(ts, []data.Binary{ + {Payload: []byte("I'm a small binary jpeg result"), MimeType: data.MimeTypeImageJpeg}, + {Payload: []byte("I'm a small binary png result"), MimeType: data.MimeTypeImagePng}, + }) + + largeGetImagesResult := data.NewBinaryCaptureResult(ts, []data.Binary{ + {Payload: largeBinaryPayload, MimeType: data.MimeTypeImageJpeg}, + {Payload: largeBinaryPayload, MimeType: data.MimeTypeImagePng}, + }) + conf := 0.888 + smallVisionCaptureAllFromCamera := data.NewBinaryCaptureResult(ts, []data.Binary{ + { + Payload: []byte("I'm a small binary jpeg result"), + MimeType: data.MimeTypeImageJpeg, + Annotations: data.Annotations{ + BoundingBoxes: []data.BoundingBox{ + { + Label: "a", + Confidence: &conf, + XMinNormalized: 1, + XMaxNormalized: 2, + YMinNormalized: 3, + YMaxNormalized: 4, + }, + { + Label: "b", + XMinNormalized: 5, + XMaxNormalized: 6, + YMinNormalized: 7, + YMaxNormalized: 8, + }, + }, + Classifications: []data.Classification{ + {Label: "a", Confidence: &conf}, + {Label: "b"}, + }, + }, + }, + }) + + largeVisionCaptureAllFromCamera := data.NewBinaryCaptureResult(ts, []data.Binary{ + { + Payload: largeBinaryPayload, + MimeType: data.MimeTypeImagePng, + Annotations: data.Annotations{ + BoundingBoxes: []data.BoundingBox{ + { + Label: "a", + Confidence: &conf, + XMinNormalized: 1, + XMaxNormalized: 2, + YMinNormalized: 3, + YMaxNormalized: 4, + }, + { + Label: "b", + XMinNormalized: 5, + XMaxNormalized: 6, + YMinNormalized: 7, + YMaxNormalized: 8, + }, + }, + Classifications: []data.Classification{ + {Label: "a", Confidence: &conf}, + {Label: "b"}, + }, + }, + }, + }) + + reqs0 := make(chan *v1.DataCaptureUploadRequest, 1) + reqs1 := make(chan *v1.DataCaptureUploadRequest, 1) + reqs2 := make(chan *v1.DataCaptureUploadRequest, 1) + reqs3 := make(chan *v1.StreamingDataCaptureUploadRequest, 100) + reqs4 := make(chan *v1.DataCaptureUploadRequest, 2) + largeGetImagesReqsIdx := atomic.Int64{} + largeGetImagesReqs := []chan *v1.StreamingDataCaptureUploadRequest{ + make(chan *v1.StreamingDataCaptureUploadRequest, 100), + make(chan *v1.StreamingDataCaptureUploadRequest, 100), + } + reqs5 := make(chan *v1.DataCaptureUploadRequest, 2) + largeVisionCaptureAllFromCameraIdx := atomic.Int64{} + largeVisionCaptureAllFromCameraReqs := []chan *v1.StreamingDataCaptureUploadRequest{ + make(chan *v1.StreamingDataCaptureUploadRequest, 100), + } + + tcs := []testCase{ + { + testName: "sensor readings", + captureResults: sensorReadingResult, + captureType: data.CaptureTypeTabular, + client: MockDataSyncServiceClient{ + T: t, + DataCaptureUploadFunc: func( + ctx context.Context, + in *v1.DataCaptureUploadRequest, + opts ...grpc.CallOption, + ) (*v1.DataCaptureUploadResponse, error) { + t.Log("called") + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case reqs0 <- in: + } + return &v1.DataCaptureUploadResponse{}, nil + }, + }, + api: sensor.API, + name: "sensor-1", + method: "Readings", + tags: []string{}, + additionalParams: map[string]string{}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "sensor-1", + ComponentType: sensor.API.String(), + FileExtension: ".dat", + MethodName: "Readings", + PartId: partID, + Type: v1.DataType_DATA_TYPE_TABULAR_SENSOR, + }, + sd: sensorReadingResult.ToProto(), + }, + }, + unaryReqs: reqs0, + }, + { + testName: "non readings tabular data", + captureResults: tabularResult, + captureType: data.CaptureTypeTabular, + client: MockDataSyncServiceClient{ + T: t, + DataCaptureUploadFunc: func( + ctx context.Context, + in *v1.DataCaptureUploadRequest, + opts ...grpc.CallOption, + ) (*v1.DataCaptureUploadResponse, error) { + t.Log("called") + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case reqs1 <- in: + } + return &v1.DataCaptureUploadResponse{}, nil + }, + }, + api: powersensor.API, + name: "powersensor-1", + method: "Power", + tags: []string{"tag1", "tag2"}, + additionalParams: map[string]string{"some": "additional", "param": "things"}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "powersensor-1", + ComponentType: powersensor.API.String(), + FileExtension: ".dat", + MethodName: "Power", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_TABULAR_SENSOR, + }, + sd: tabularResult.ToProto(), + }, + }, + unaryReqs: reqs1, + }, + { + testName: "small binary data", + captureResults: smallBinaryResult, + captureType: data.CaptureTypeBinary, + client: MockDataSyncServiceClient{ + T: t, + DataCaptureUploadFunc: func( + ctx context.Context, + in *v1.DataCaptureUploadRequest, + opts ...grpc.CallOption, + ) (*v1.DataCaptureUploadResponse, error) { + t.Log("called") + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case reqs2 <- in: + } + return &v1.DataCaptureUploadResponse{}, nil + }, + }, + api: camera.API, + name: "camera-1", + method: "ReadImage", + tags: []string{"tag1", "tag2"}, + additionalParams: map[string]string{"mime_type": utils.MimeTypeJPEG}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "camera-1", + ComponentType: camera.API.String(), + FileExtension: ".jpeg", + MethodName: "ReadImage", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: smallBinaryResult.ToProto(), + }, + }, + unaryReqs: reqs2, + }, + { + testName: "large binary data", + captureResults: largeBinaryResult, + captureType: data.CaptureTypeBinary, + client: MockDataSyncServiceClient{ + T: t, + StreamingDataCaptureUploadFunc: func( + ctx context.Context, + _ ...grpc.CallOption, + ) (v1.DataSyncService_StreamingDataCaptureUploadClient, error) { + mockStreamingClient := &ClientStreamingMock[ + *v1.StreamingDataCaptureUploadRequest, + *v1.StreamingDataCaptureUploadResponse, + ]{ + T: t, + SendFunc: func(in *v1.StreamingDataCaptureUploadRequest) error { + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case reqs3 <- in: + } + return nil + }, + CloseAndRecvFunc: func() (*v1.StreamingDataCaptureUploadResponse, error) { + close(reqs3) + return &v1.StreamingDataCaptureUploadResponse{}, nil + }, + } + return mockStreamingClient, nil + }, + }, + api: camera.API, + name: "camera-1", + method: "ReadImage", + tags: []string{"tag1", "tag2"}, + additionalParams: map[string]string{"mime_type": utils.MimeTypePNG}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "camera-1", + ComponentType: camera.API.String(), + FileExtension: ".png", + MethodName: "ReadImage", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: largeBinaryResult.ToProto(), + }, + }, + steamingReqs: []chan *v1.StreamingDataCaptureUploadRequest{reqs3}, + }, + { + testName: "small camera.GetImages", + captureResults: smallGetImagesResult, + captureType: data.CaptureTypeBinary, + client: MockDataSyncServiceClient{ + T: t, + DataCaptureUploadFunc: func( + ctx context.Context, + in *v1.DataCaptureUploadRequest, + opts ...grpc.CallOption, + ) (*v1.DataCaptureUploadResponse, error) { + t.Log("called") + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case reqs4 <- in: + } + return &v1.DataCaptureUploadResponse{}, nil + }, + }, + api: camera.API, + name: "camera-1", + method: "GetImages", + tags: []string{"tag1", "tag2"}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "camera-1", + ComponentType: camera.API.String(), + FileExtension: ".jpeg", + MethodName: "GetImages", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: []*v1.SensorData{smallGetImagesResult.ToProto()[0]}, + }, + { + md: &v1.UploadMetadata{ + ComponentName: "camera-1", + ComponentType: camera.API.String(), + FileExtension: ".png", + MethodName: "GetImages", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: []*v1.SensorData{smallGetImagesResult.ToProto()[1]}, + }, + }, + unaryReqs: reqs4, + }, + { + testName: "large camera.GetImages", + captureResults: largeGetImagesResult, + captureType: data.CaptureTypeBinary, + client: MockDataSyncServiceClient{ + T: t, + StreamingDataCaptureUploadFunc: func( + ctx context.Context, + _ ...grpc.CallOption, + ) (v1.DataSyncService_StreamingDataCaptureUploadClient, error) { + mockStreamingClient := &ClientStreamingMock[ + *v1.StreamingDataCaptureUploadRequest, + *v1.StreamingDataCaptureUploadResponse, + ]{ + T: t, + SendFunc: func(in *v1.StreamingDataCaptureUploadRequest) error { + idx := largeGetImagesReqsIdx.Load() + t.Logf("writing to index: %d", idx) + ch := largeGetImagesReqs[idx] + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case ch <- in: + } + return nil + }, + CloseAndRecvFunc: func() (*v1.StreamingDataCaptureUploadResponse, error) { + close(largeGetImagesReqs[largeGetImagesReqsIdx.Add(1)-1]) + return &v1.StreamingDataCaptureUploadResponse{}, nil + }, + } + return mockStreamingClient, nil + }, + }, + api: camera.API, + name: "camera-1", + method: "GetImages", + tags: []string{"tag1", "tag2"}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "camera-1", + ComponentType: camera.API.String(), + FileExtension: ".jpeg", + MethodName: "GetImages", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: []*v1.SensorData{largeGetImagesResult.ToProto()[0]}, + }, + { + md: &v1.UploadMetadata{ + ComponentName: "camera-1", + ComponentType: camera.API.String(), + FileExtension: ".png", + MethodName: "GetImages", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: []*v1.SensorData{largeGetImagesResult.ToProto()[1]}, + }, + }, + steamingReqs: largeGetImagesReqs, + }, + { + testName: "small vision.CaptureAllFromCamera", + captureResults: smallVisionCaptureAllFromCamera, + captureType: data.CaptureTypeBinary, + client: MockDataSyncServiceClient{ + T: t, + DataCaptureUploadFunc: func( + ctx context.Context, + in *v1.DataCaptureUploadRequest, + opts ...grpc.CallOption, + ) (*v1.DataCaptureUploadResponse, error) { + t.Log("called") + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case reqs5 <- in: + } + return &v1.DataCaptureUploadResponse{}, nil + }, + }, + api: vision.API, + name: "vision-1", + method: "CaptureAllFromCamera", + tags: []string{"tag1", "tag2"}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "vision-1", + ComponentType: vision.API.String(), + FileExtension: ".jpeg", + MethodName: "CaptureAllFromCamera", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: smallVisionCaptureAllFromCamera.ToProto(), + }, + }, + unaryReqs: reqs5, + }, + { + testName: "large vision.CaptureAllFromCamera", + captureResults: largeVisionCaptureAllFromCamera, + captureType: data.CaptureTypeBinary, + client: MockDataSyncServiceClient{ + T: t, + StreamingDataCaptureUploadFunc: func( + ctx context.Context, + _ ...grpc.CallOption, + ) (v1.DataSyncService_StreamingDataCaptureUploadClient, error) { + mockStreamingClient := &ClientStreamingMock[ + *v1.StreamingDataCaptureUploadRequest, + *v1.StreamingDataCaptureUploadResponse, + ]{ + T: t, + SendFunc: func(in *v1.StreamingDataCaptureUploadRequest) error { + idx := largeVisionCaptureAllFromCameraIdx.Load() + t.Logf("writing to index: %d", idx) + ch := largeVisionCaptureAllFromCameraReqs[idx] + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case ch <- in: + } + return nil + }, + CloseAndRecvFunc: func() (*v1.StreamingDataCaptureUploadResponse, error) { + close(largeVisionCaptureAllFromCameraReqs[largeVisionCaptureAllFromCameraIdx.Add(1)-1]) + return &v1.StreamingDataCaptureUploadResponse{}, nil + }, + } + return mockStreamingClient, nil + }, + }, + api: vision.API, + name: "vision-1", + method: "CaptureAllFromCamera", + tags: []string{"tag1", "tag2"}, + expectedUploads: []upload{ + { + md: &v1.UploadMetadata{ + ComponentName: "vision-1", + ComponentType: vision.API.String(), + FileExtension: ".png", + MethodName: "CaptureAllFromCamera", + PartId: partID, + Tags: []string{"tag1", "tag2"}, + Type: v1.DataType_DATA_TYPE_BINARY_SENSOR, + }, + sd: largeVisionCaptureAllFromCamera.ToProto(), + }, + }, + steamingReqs: largeVisionCaptureAllFromCameraReqs, + }, + } + + tempDir := t.TempDir() + for _, tc := range tcs { + t.Run(tc.testName, func(t *testing.T) { + methodParams, err := rprotoutils.ConvertStringMapToAnyPBMap(tc.additionalParams) + test.That(t, err, test.ShouldBeNil) + md, ct := data.BuildCaptureMetadata(tc.api, tc.name, tc.method, tc.additionalParams, methodParams, tc.tags) + test.That(t, err, test.ShouldBeNil) + test.That(t, ct, test.ShouldEqual, tc.captureType) + w, err := data.NewCaptureFile(tempDir, md) + test.That(t, err, test.ShouldBeNil) + test.That(t, len(tc.expectedUploads), test.ShouldBeGreaterThan, 0) + for _, sd := range tc.captureResults.ToProto() { + test.That(t, w.WriteNext(sd), test.ShouldBeNil) + } + w.Flush() + w.Close() + + f, err := os.Open(strings.Replace(w.GetPath(), data.InProgressCaptureFileExt, data.CompletedCaptureFileExt, 1)) + test.That(t, err, test.ShouldBeNil) + + stat, err := f.Stat() + test.That(t, err, test.ShouldBeNil) + + test.That(t, data.IsDataCaptureFile(f), test.ShouldBeTrue) + cf, err := data.ReadCaptureFile(f) + test.That(t, err, test.ShouldBeNil) + cc := cloudConn{partID: partID, client: tc.client} + bytesUploaded, err := uploadDataCaptureFile(testCtx, cf, cc, logger) + test.That(t, err, test.ShouldBeNil) + test.That(t, bytesUploaded, test.ShouldEqual, stat.Size()) + if tc.unaryReqs != nil { + for i := 0; i < len(tc.expectedUploads); i++ { + t.Logf("unaryReqs: i: %d", i) + tc.expectedUploads[i].md.MethodParameters = methodParams + select { + case <-testCtx.Done(): + t.Error("timeout") + t.FailNow() + case req := <-tc.unaryReqs: + t.Logf("got req\n") + test.That(t, len(tc.expectedUploads[i].sd), test.ShouldEqual, 1) + test.That(t, req.Metadata.FileExtension, test.ShouldResemble, tc.expectedUploads[i].md.FileExtension) + test.That(t, req.Metadata, test.ShouldResemble, tc.expectedUploads[i].md) + compareSensorData(t, tc.captureType.ToProto(), req.SensorContents, tc.expectedUploads[i].sd) + } + } + } else { + test.That(t, len(tc.steamingReqs), test.ShouldEqual, len(tc.expectedUploads)) + for i := 0; i < len(tc.expectedUploads); i++ { + test.That(t, len(tc.expectedUploads[i].sd), test.ShouldEqual, 1) + md := tc.expectedUploads[i].md + sd := tc.expectedUploads[i].sd[0] + md.MethodParameters = methodParams + var gotHeader bool + var data []byte + for req := range tc.steamingReqs[i] { + if !gotHeader { + test.That(t, req.GetMetadata().UploadMetadata, test.ShouldResemble, md) + test.That(t, req.GetMetadata().SensorMetadata, test.ShouldResemble, sd.GetMetadata()) + gotHeader = true + continue + } + data = append(data, req.GetData()...) + } + test.That(t, gotHeader, test.ShouldBeTrue) + test.That(t, data, test.ShouldResemble, sd.GetBinary()) + } + } + }) + } +} + +func compareSensorData(t *testing.T, dataType v1.DataType, act, exp []*v1.SensorData) { + t.Helper() + if len(act) == 0 && len(exp) == 0 { + return + } + + // Sort both by time requested. + sort.SliceStable(act, func(i, j int) bool { + diffRequested := act[j].GetMetadata().GetTimeRequested().AsTime().Sub(act[i].GetMetadata().GetTimeRequested().AsTime()) + switch { + case diffRequested > 0: + return true + case diffRequested == 0: + return act[j].GetMetadata().GetTimeReceived().AsTime().Sub(act[i].GetMetadata().GetTimeReceived().AsTime()) > 0 + default: + return false + } + }) + sort.SliceStable(exp, func(i, j int) bool { + diffRequested := exp[j].GetMetadata().GetTimeRequested().AsTime().Sub(exp[i].GetMetadata().GetTimeRequested().AsTime()) + switch { + case diffRequested > 0: + return true + case diffRequested == 0: + return exp[j].GetMetadata().GetTimeReceived().AsTime().Sub(exp[i].GetMetadata().GetTimeReceived().AsTime()) > 0 + default: + return false + } + }) + + test.That(t, len(act), test.ShouldEqual, len(exp)) + + for i := range act { + test.That(t, act[i].GetMetadata(), test.ShouldResemble, exp[i].GetMetadata()) + if dataType == v1.DataType_DATA_TYPE_TABULAR_SENSOR { + test.That(t, act[i].GetStruct(), test.ShouldResemble, exp[i].GetStruct()) + } else { + test.That(t, act[i].GetBinary(), test.ShouldResemble, exp[i].GetBinary()) + } + } +} diff --git a/services/slam/collectors.go b/services/slam/collectors.go index 185ea71eaf2..ea8323c98b8 100644 --- a/services/slam/collectors.go +++ b/services/slam/collectors.go @@ -2,6 +2,7 @@ package slam import ( "context" + "time" pb "go.viam.com/api/service/slam/v1" "google.golang.org/protobuf/types/known/anypb" @@ -33,12 +34,14 @@ func newPositionCollector(resource interface{}, params data.CollectorParams) (da return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult pose, err := slam.Position(ctx) if err != nil { - return nil, data.FailedToReadErr(params.ComponentName, position.String(), err) + return res, data.FailedToReadErr(params.ComponentName, position.String(), err) } - return &pb.GetPositionResponse{Pose: spatialmath.PoseToProtobuf(pose)}, nil + return data.NewTabularCaptureResult(timeRequested, &pb.GetPositionResponse{Pose: spatialmath.PoseToProtobuf(pose)}) }) return data.NewCollector(cFunc, params) } @@ -49,19 +52,28 @@ func newPointCloudMapCollector(resource interface{}, params data.CollectorParams return nil, err } - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult // edited maps do not need to be captured because they should not be modified f, err := slam.PointCloudMap(ctx, false) if err != nil { - return nil, data.FailedToReadErr(params.ComponentName, pointCloudMap.String(), err) + return res, data.FailedToReadErr(params.ComponentName, pointCloudMap.String(), err) } pcd, err := HelperConcatenateChunksToFull(f) if err != nil { - return nil, data.FailedToReadErr(params.ComponentName, pointCloudMap.String(), err) + return res, data.FailedToReadErr(params.ComponentName, pointCloudMap.String(), err) } - return pcd, nil + ts := data.Timestamps{ + TimeRequested: timeRequested, + TimeReceived: time.Now(), + } + return data.NewBinaryCaptureResult(ts, []data.Binary{{ + Payload: pcd, + MimeType: data.MimeTypeApplicationPcd, + }}), nil }) return data.NewCollector(cFunc, params) } diff --git a/services/slam/collectors_test.go b/services/slam/collectors_test.go index b58c4ae56fb..5e572309751 100644 --- a/services/slam/collectors_test.go +++ b/services/slam/collectors_test.go @@ -34,13 +34,15 @@ func TestCollectors(t *testing.T) { tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData + datatype data.CaptureType slam slam.Service }{ { name: "PositionCollector returns non-empty position responses", collector: slam.NewPositionCollector, - expected: &datasyncpb.SensorData{ + datatype: data.CaptureTypeTabular, + expected: []*datasyncpb.SensorData{{ Metadata: &datasyncpb.SensorMetadata{}, Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ "pose": map[string]any{ @@ -53,16 +55,19 @@ func TestCollectors(t *testing.T) { "z": 3, }, })}, - }, + }}, slam: newSlamService(pcdPath), }, { name: "PointCloudMapCollector returns non-empty pointcloud responses", collector: slam.NewPointCloudMapCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Binary{Binary: pcd}, - }, + datatype: data.CaptureTypeBinary, + expected: []*datasyncpb.SensorData{{ + Metadata: &datasyncpb.SensorMetadata{ + MimeType: datasyncpb.MimeType_MIME_TYPE_APPLICATION_PCD, + }, + Data: &datasyncpb.SensorData_Binary{Binary: pcd}, + }}, slam: newSlamService(pcdPath), }, } @@ -70,8 +75,9 @@ func TestCollectors(t *testing.T) { for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: tc.datatype, ComponentName: serviceName, Interval: captureInterval, Logger: logging.NewTestLogger(t), diff --git a/services/vision/collectors.go b/services/vision/collectors.go index 3bb875e4e00..0bf9830d4e0 100644 --- a/services/vision/collectors.go +++ b/services/vision/collectors.go @@ -2,10 +2,9 @@ package vision import ( "context" + "time" "github.com/pkg/errors" - servicepb "go.viam.com/api/service/vision/v1" - "go.viam.com/utils/protoutils" "google.golang.org/protobuf/types/known/anypb" "google.golang.org/protobuf/types/known/wrapperspb" @@ -29,12 +28,6 @@ func (m method) String() string { return "Unknown" } -type extraFields struct { - Height int - Width int - MimeType string -} - type methodParamsDecoded struct { cameraName string mimeType string @@ -53,81 +46,90 @@ func newCaptureAllFromCameraCollector(resource interface{}, params data.Collecto } cameraName := decodedParams.cameraName - mimeType := decodedParams.mimeType minConfidenceScore := decodedParams.minConfidence - cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (interface{}, error) { + cFunc := data.CaptureFunc(func(ctx context.Context, _ map[string]*anypb.Any) (data.CaptureResult, error) { + timeRequested := time.Now() + var res data.CaptureResult visCaptureOptions := viscapture.CaptureOptions{ ReturnImage: true, ReturnDetections: true, ReturnClassifications: true, - ReturnObject: true, } visCapture, err := vision.CaptureAllFromCamera(ctx, cameraName, visCaptureOptions, data.FromDMExtraMap) if err != nil { // A modular filter component can be created to filter the readings from a service. The error ErrNoCaptureToStore // is used in the datamanager to exclude readings from being captured and stored. if errors.Is(err, data.ErrNoCaptureToStore) { - return nil, err + return res, err } - return nil, data.FailedToReadErr(params.ComponentName, captureAllFromCamera.String(), err) + return res, data.FailedToReadErr(params.ComponentName, captureAllFromCamera.String(), err) + } + + if visCapture.Image == nil { + return res, errors.New("vision service didn't return an image") } protoImage, err := imageToProto(ctx, visCapture.Image, cameraName) if err != nil { - return nil, err + return res, err } - filteredDetections := []objectdetection.Detection{} - for _, elem := range visCapture.Detections { - if elem.Score() >= minConfidenceScore { - filteredDetections = append(filteredDetections, elem) - } + var width, height int + if visCapture.Image != nil { + width = visCapture.Image.Bounds().Dx() + height = visCapture.Image.Bounds().Dy() } - protoDetections := detsToProto(filteredDetections) - - filteredClassifications := classification.Classifications{} - for _, elem := range visCapture.Classifications { - if elem.Score() >= minConfidenceScore { - filteredClassifications = append(filteredClassifications, elem) + filteredBoundingBoxes := []data.BoundingBox{} + for _, d := range visCapture.Detections { + if score := d.Score(); score >= minConfidenceScore { + filteredBoundingBoxes = append(filteredBoundingBoxes, toDataBoundingBox(d, width, height)) } } - protoClassifications := clasToProto(filteredClassifications) - - protoObjects, err := segmentsToProto(cameraName, visCapture.Objects) - if err != nil { - return nil, err - } - - // We need this to pass in the height & width of an image in order to calculate - // the normalized coordinate values of any bounding boxes. We also need the - // mimeType to appropriately upload the image. - bounds := extraFields{} - - if visCapture.Image != nil { - bounds = extraFields{ - Height: visCapture.Image.Bounds().Dy(), - Width: visCapture.Image.Bounds().Dx(), - MimeType: mimeType, + filteredClassifications := []data.Classification{} + for _, c := range visCapture.Classifications { + if score := c.Score(); score >= minConfidenceScore { + filteredClassifications = append(filteredClassifications, toDataClassification(c)) } } - boundsPb, err := protoutils.StructToStructPb(bounds) - if err != nil { - return nil, err + ts := data.Timestamps{ + TimeRequested: timeRequested, + TimeReceived: time.Now(), } - - return &servicepb.CaptureAllFromCameraResponse{ - Image: protoImage, Detections: protoDetections, Classifications: protoClassifications, - Objects: protoObjects, Extra: boundsPb, - }, nil + return data.NewBinaryCaptureResult(ts, []data.Binary{{ + Payload: protoImage.Image, + MimeType: data.CameraFormatToMimeType(protoImage.Format), + Annotations: data.Annotations{ + BoundingBoxes: filteredBoundingBoxes, + Classifications: filteredClassifications, + }, + }}), nil }) return data.NewCollector(cFunc, params) } +func toDataClassification(c classification.Classification) data.Classification { + confidence := c.Score() + return data.Classification{Label: c.Label(), Confidence: &confidence} +} + +func toDataBoundingBox(d objectdetection.Detection, width, height int) data.BoundingBox { + confidence := d.Score() + bbox := d.BoundingBox() + return data.BoundingBox{ + Label: d.Label(), + Confidence: &confidence, + XMinNormalized: float64(bbox.Min.X) / float64(width), + XMaxNormalized: float64(bbox.Max.X) / float64(width), + YMinNormalized: float64(bbox.Min.Y) / float64(height), + YMaxNormalized: float64(bbox.Max.Y) / float64(height), + } +} + func additionalParamExtraction(methodParams map[string]*anypb.Any) (methodParamsDecoded, error) { cameraParam := methodParams["camera_name"] diff --git a/services/vision/collectors_test.go b/services/vision/collectors_test.go index f9cac5e0d35..671b67516ff 100644 --- a/services/vision/collectors_test.go +++ b/services/vision/collectors_test.go @@ -11,8 +11,8 @@ import ( "time" "github.com/benbjohnson/clock" + datapb "go.viam.com/api/app/data/v1" datasyncpb "go.viam.com/api/app/datasync/v1" - camerapb "go.viam.com/api/component/camera/v1" "go.viam.com/test" "google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/types/known/anypb" @@ -142,108 +142,54 @@ func TestCollectors(t *testing.T) { test.That(t, err, test.ShouldBeNil) viamLogoJpeg, err := io.ReadAll(base64.NewDecoder(base64.StdEncoding, bytes.NewReader(viamLogoJpegB64))) test.That(t, err, test.ShouldBeNil) - viamLogoJpegAsInts := []any{} - for _, b := range viamLogoJpeg { - viamLogoJpegAsInts = append(viamLogoJpegAsInts, int(b)) - } - img := rimage.NewLazyEncodedImage(viamLogoJpeg, utils.MimeTypeJPEG) // 32 x 32 image test.That(t, img.Bounds().Dx(), test.ShouldEqual, 32) test.That(t, img.Bounds().Dy(), test.ShouldEqual, 32) - + bboxConf := 0.95 + classConf := 0.85 tests := []struct { name string collector data.CollectorConstructor - expected *datasyncpb.SensorData + expected []*datasyncpb.SensorData vision visionservice.Service }{ { name: "CaptureAllFromCameraCollector returns non-empty CaptureAllFromCameraResp", collector: visionservice.NewCaptureAllFromCameraCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "image": map[string]any{ - "source_name": "camera-1", - "format": int(camerapb.Format_FORMAT_JPEG), - "image": viamLogoJpegAsInts, - }, - "classifications": []any{ - map[string]any{ - "confidence": 0.85, - "class_name": "cat", - }, - }, - "detections": []any{ - map[string]any{ - "confidence": 0.95, - "class_name": "cat", - "x_min": 10, - "y_min": 20, - "x_max": 110, - "y_max": 120, - }, - }, - "objects": []any{}, - "extra": map[string]any{ - "fields": map[string]any{ - "Height": map[string]any{ - "Kind": map[string]any{ - "NumberValue": 32, - }, - }, - "Width": map[string]any{ - "Kind": map[string]any{ - "NumberValue": 32, - }, - }, - "MimeType": map[string]any{ - "Kind": map[string]any{ - "StringValue": utils.MimeTypeJPEG, - }, + expected: []*datasyncpb.SensorData{{ + Metadata: &datasyncpb.SensorMetadata{ + MimeType: datasyncpb.MimeType_MIME_TYPE_IMAGE_JPEG, + Annotations: &datapb.Annotations{ + Bboxes: []*datapb.BoundingBox{ + { + Label: "cat", + XMinNormalized: 0.3125, + YMinNormalized: 0.625, + XMaxNormalized: 3.4375, + YMaxNormalized: 3.75, + Confidence: &bboxConf, }, }, + Classifications: []*datapb.Classification{{ + Label: "cat", + Confidence: &classConf, + }}, }, - })}, - }, + }, + Data: &datasyncpb.SensorData_Binary{Binary: viamLogoJpeg}, + }}, vision: newVisionService(img), }, { name: "CaptureAllFromCameraCollector w/ Classifications & Detections < 0.5 returns empty CaptureAllFromCameraResp", collector: visionservice.NewCaptureAllFromCameraCollector, - expected: &datasyncpb.SensorData{ - Metadata: &datasyncpb.SensorMetadata{}, - Data: &datasyncpb.SensorData_Struct{Struct: tu.ToStructPBStruct(t, map[string]any{ - "image": map[string]any{ - "source_name": "camera-1", - "format": 3, - "image": viamLogoJpegAsInts, - }, - "classifications": []any{}, - "detections": []any{}, - "objects": []any{}, - "extra": map[string]any{ - "fields": map[string]any{ - "Height": map[string]any{ - "Kind": map[string]any{ - "NumberValue": 32, - }, - }, - "Width": map[string]any{ - "Kind": map[string]any{ - "NumberValue": 32, - }, - }, - "MimeType": map[string]any{ - "Kind": map[string]any{ - "StringValue": utils.MimeTypeJPEG, - }, - }, - }, - }, - })}, - }, + expected: []*datasyncpb.SensorData{{ + Metadata: &datasyncpb.SensorMetadata{ + MimeType: datasyncpb.MimeType_MIME_TYPE_IMAGE_JPEG, + }, + Data: &datasyncpb.SensorData_Binary{Binary: viamLogoJpeg}, + }}, vision: newVisionService2(img), }, } @@ -251,8 +197,9 @@ func TestCollectors(t *testing.T) { for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { start := time.Now() - buf := tu.NewMockBuffer() + buf := tu.NewMockBuffer(t) params := data.CollectorParams{ + DataType: data.CaptureTypeBinary, ComponentName: serviceName, Interval: captureInterval, Logger: logging.NewTestLogger(t), diff --git a/testutils/file_utils.go b/testutils/file_utils.go index ece95ec9d40..a160fa45141 100644 --- a/testutils/file_utils.go +++ b/testutils/file_utils.go @@ -103,20 +103,22 @@ func BuildTempModuleWithFirstRun(tb testing.TB, modDir string) string { // MockBuffer is a buffered writer that just appends data to an array to read // without needing a real file system for testing. type MockBuffer struct { + t *testing.T ctx context.Context cancel context.CancelFunc - Writes chan *v1.SensorData + Writes chan []*v1.SensorData } // NewMockBuffer returns a mock buffer. // This needs to be closed before the collector, otherwise the // collector's Close method will block. -func NewMockBuffer() *MockBuffer { +func NewMockBuffer(t *testing.T) *MockBuffer { c, cancel := context.WithCancel(context.Background()) return &MockBuffer{ + t: t, ctx: c, cancel: cancel, - Writes: make(chan *v1.SensorData, 1), + Writes: make(chan []*v1.SensorData, 1), } } @@ -147,29 +149,34 @@ func CheckMockBufferWrites( t *testing.T, ctx context.Context, start time.Time, - writes chan *v1.SensorData, - expected *v1.SensorData, + writes chan []*v1.SensorData, + expecteds []*v1.SensorData, ) { select { case <-ctx.Done(): t.Error("timeout") t.FailNow() - case write := <-writes: + case writes := <-writes: end := time.Now() - // nil out to make comparable - requestedAt := write.Metadata.TimeRequested.AsTime() - receivedAt := write.Metadata.TimeReceived.AsTime() - test.That(t, start, test.ShouldHappenOnOrBefore, requestedAt) - test.That(t, requestedAt, test.ShouldHappenOnOrBefore, receivedAt) - test.That(t, receivedAt, test.ShouldHappenOnOrBefore, end) - // nil out to make comparable - write.Metadata.TimeRequested = nil - write.Metadata.TimeReceived = nil - test.That(t, write.GetMetadata(), test.ShouldResemble, expected.GetMetadata()) - if isBinary(write) { - test.That(t, write.GetBinary(), test.ShouldResemble, expected.GetBinary()) - } else { - test.That(t, write.GetStruct(), test.ShouldResemble, expected.GetStruct()) + test.That(t, len(writes), test.ShouldEqual, len(expecteds)) + for i, expected := range expecteds { + write := writes[i] + requestedAt := write.Metadata.TimeRequested.AsTime() + receivedAt := write.Metadata.TimeReceived.AsTime() + test.That(t, start, test.ShouldHappenOnOrBefore, requestedAt) + test.That(t, requestedAt, test.ShouldHappenOnOrBefore, receivedAt) + test.That(t, receivedAt, test.ShouldHappenOnOrBefore, end) + test.That(t, len(expecteds), test.ShouldEqual, len(writes)) + // nil out to make comparable + // nil out to make comparable + write.Metadata.TimeRequested = nil + write.Metadata.TimeReceived = nil + test.That(t, write.GetMetadata(), test.ShouldResemble, expected.GetMetadata()) + if isBinary(write) { + test.That(t, write.GetBinary(), test.ShouldResemble, expected.GetBinary()) + } else { + test.That(t, write.GetStruct(), test.ShouldResemble, expected.GetStruct()) + } } } } @@ -179,13 +186,37 @@ func (m *MockBuffer) Close() { m.cancel() } -// Write adds the item to the channel. -func (m *MockBuffer) Write(item *v1.SensorData) error { +// WriteBinary writes binary sensor data. +func (m *MockBuffer) WriteBinary(items []*v1.SensorData) error { + if err := m.ctx.Err(); err != nil { + return err + } + for i, item := range items { + if !isBinary(item) { + m.t.Errorf("MockBuffer.WriteBinary called with non binary data. index: %d, items: %#v\n", i, items) + m.t.FailNow() + } + } + select { + case m.Writes <- items: + case <-m.ctx.Done(): + } + return nil +} + +// WriteTabular writes tabular sensor data to the Writes channel. +func (m *MockBuffer) WriteTabular(items []*v1.SensorData) error { if err := m.ctx.Err(); err != nil { return err } + for i, item := range items { + if isBinary(item) { + m.t.Errorf("MockBuffer.WriteTabular called with binary data. index: %d, items: %#v\n", i, items) + m.t.FailNow() + } + } select { - case m.Writes <- item: + case m.Writes <- items: case <-m.ctx.Done(): } return nil