From c8c9e779ab9d8af5f42688c4d7f3284914822477 Mon Sep 17 00:00:00 2001 From: amquake Date: Sun, 15 Oct 2023 10:44:47 -0700 Subject: [PATCH] [photon-core] 2D Detection data accuracy (#896) Use calibration data for 2d target info when available (principal point, FOV) Correct perspective distortion in 2d yaw/pitch info --- .../vision/frame/FrameStaticProperties.java | 55 ++++--- .../vision/target/TargetCalculations.java | 31 +++- .../vision/target/TrackedTarget.java | 55 ++++--- .../vision/target/TargetCalculationsTest.java | 147 +++++++++++++----- .../org/photonvision/VisionSystemSimTest.java | 8 +- 5 files changed, 200 insertions(+), 96 deletions(-) diff --git a/photon-core/src/main/java/org/photonvision/vision/frame/FrameStaticProperties.java b/photon-core/src/main/java/org/photonvision/vision/frame/FrameStaticProperties.java index f03db3a60e..cedda04d22 100644 --- a/photon-core/src/main/java/org/photonvision/vision/frame/FrameStaticProperties.java +++ b/photon-core/src/main/java/org/photonvision/vision/frame/FrameStaticProperties.java @@ -39,7 +39,7 @@ public class FrameStaticProperties { * Instantiates a new Frame static properties. * * @param mode The Video Mode of the camera. - * @param fov The fov of the image. + * @param fov The FOV (Field Of Vision) of the image in degrees. */ public FrameStaticProperties(VideoMode mode, double fov, CameraCalibrationCoefficients cal) { this(mode != null ? mode.width : 1, mode != null ? mode.height : 1, fov, cal); @@ -48,9 +48,9 @@ public FrameStaticProperties(VideoMode mode, double fov, CameraCalibrationCoeffi /** * Instantiates a new Frame static properties. * - * @param imageWidth The width of the image. - * @param imageHeight The width of the image. - * @param fov The fov of the image. + * @param imageWidth The width of the image in pixels. + * @param imageHeight The width of the image in pixels. + * @param fov The FOV (Field Of Vision) of the image in degrees. */ public FrameStaticProperties( int imageWidth, int imageHeight, double fov, CameraCalibrationCoefficients cal) { @@ -61,30 +61,47 @@ public FrameStaticProperties( imageArea = this.imageWidth * this.imageHeight; - // Todo -- if we have calibration, use it's center point? - centerX = ((double) this.imageWidth / 2) - 0.5; - centerY = ((double) this.imageHeight / 2) - 0.5; - centerPoint = new Point(centerX, centerY); - - // TODO if we have calibration use it here instead // pinhole model calculations - DoubleCouple horizVertViews = - calculateHorizontalVerticalFoV(this.fov, this.imageWidth, this.imageHeight); + if (cameraCalibration != null && cameraCalibration.getCameraIntrinsicsMat() != null) { + // Use calibration data + var camIntrinsics = cameraCalibration.getCameraIntrinsicsMat(); + centerX = camIntrinsics.get(0, 2)[0]; + centerY = camIntrinsics.get(1, 2)[0]; + centerPoint = new Point(centerX, centerY); + horizontalFocalLength = camIntrinsics.get(0, 0)[0]; + verticalFocalLength = camIntrinsics.get(1, 1)[0]; + } else { + // No calibration data. Calculate from user provided diagonal FOV + centerX = (this.imageWidth / 2.0) - 0.5; + centerY = (this.imageHeight / 2.0) - 0.5; + centerPoint = new Point(centerX, centerY); - horizontalFocalLength = this.imageWidth / (2 * Math.tan(horizVertViews.getFirst() / 2)); - verticalFocalLength = this.imageHeight / (2 * Math.tan(horizVertViews.getSecond() / 2)); + DoubleCouple horizVertViews = + calculateHorizontalVerticalFoV(this.fov, this.imageWidth, this.imageHeight); + double horizFOV = Math.toRadians(horizVertViews.getFirst()); + double vertFOV = Math.toRadians(horizVertViews.getSecond()); + horizontalFocalLength = (this.imageWidth / 2.0) / Math.tan(horizFOV / 2.0); + verticalFocalLength = (this.imageHeight / 2.0) / Math.tan(vertFOV / 2.0); + } } + /** + * Calculates the horizontal and vertical FOV components from a given diagonal FOV and image size. + * + * @param diagonalFoV Diagonal FOV in degrees + * @param imageWidth Image width in pixels + * @param imageHeight Image height in pixels + * @return Horizontal and vertical FOV in degrees + */ public static DoubleCouple calculateHorizontalVerticalFoV( double diagonalFoV, int imageWidth, int imageHeight) { - double diagonalView = Math.toRadians(diagonalFoV); + diagonalFoV = Math.toRadians(diagonalFoV); double diagonalAspect = Math.hypot(imageWidth, imageHeight); double horizontalView = - Math.atan(Math.tan(diagonalView / 2) * (imageWidth / diagonalAspect)) * 2; - double verticalView = - Math.atan(Math.tan(diagonalView / 2) * (imageHeight / diagonalAspect)) * 2; + Math.atan(Math.tan(diagonalFoV / 2) * (imageWidth / diagonalAspect)) * 2; + double verticalView = Math.atan(Math.tan(diagonalFoV / 2) * (imageHeight / diagonalAspect)) * 2; - return new DoubleCouple(horizontalView, verticalView); + return new DoubleCouple(Math.toDegrees(horizontalView), Math.toDegrees(verticalView)); } } diff --git a/photon-core/src/main/java/org/photonvision/vision/target/TargetCalculations.java b/photon-core/src/main/java/org/photonvision/vision/target/TargetCalculations.java index 60b278aae4..5e64237443 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/TargetCalculations.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/TargetCalculations.java @@ -23,14 +23,29 @@ import org.photonvision.vision.opencv.DualOffsetValues; public class TargetCalculations { - public static double calculateYaw( - double offsetCenterX, double targetCenterX, double horizontalFocalLength) { - return Math.toDegrees(Math.atan((offsetCenterX - targetCenterX) / horizontalFocalLength)); - } - - public static double calculatePitch( - double offsetCenterY, double targetCenterY, double verticalFocalLength) { - return -Math.toDegrees(Math.atan((offsetCenterY - targetCenterY) / verticalFocalLength)); + /** + * Calculates the yaw and pitch of a point in the image. Yaw and pitch must be calculated together + * to account for perspective distortion. Yaw is positive right, and pitch is positive up. + * + * @param offsetCenterX The X value of the offset principal point (cx) in pixels + * @param targetCenterX The X value of the target's center point in pixels + * @param horizontalFocalLength The horizontal focal length (fx) in pixels + * @param offsetCenterY The Y value of the offset principal point (cy) in pixels + * @param targetCenterY The Y value of the target's center point in pixels + * @param verticalFocalLength The vertical focal length (fy) in pixels + * @return The yaw and pitch from the principal axis to the target center, in degrees. + */ + public static DoubleCouple calculateYawPitch( + double offsetCenterX, + double targetCenterX, + double horizontalFocalLength, + double offsetCenterY, + double targetCenterY, + double verticalFocalLength) { + double yaw = Math.atan((targetCenterX - offsetCenterX) / horizontalFocalLength); + double pitch = + Math.atan((offsetCenterY - targetCenterY) / (verticalFocalLength / Math.cos(yaw))); + return new DoubleCouple(Math.toDegrees(yaw), Math.toDegrees(pitch)); } public static double calculateSkew(boolean isLandscape, RotatedRect minAreaRect) { diff --git a/photon-core/src/main/java/org/photonvision/vision/target/TrackedTarget.java b/photon-core/src/main/java/org/photonvision/vision/target/TrackedTarget.java index 72c5cdf600..922ec5e6ec 100644 --- a/photon-core/src/main/java/org/photonvision/vision/target/TrackedTarget.java +++ b/photon-core/src/main/java/org/photonvision/vision/target/TrackedTarget.java @@ -73,13 +73,16 @@ public TrackedTarget( TargetCalculationParameters params) { m_targetOffsetPoint = new Point(tagDetection.getCenterX(), tagDetection.getCenterY()); m_robotOffsetPoint = new Point(); - - m_pitch = - TargetCalculations.calculatePitch( - tagDetection.getCenterY(), params.cameraCenterPoint.y, params.verticalFocalLength); - m_yaw = - TargetCalculations.calculateYaw( - tagDetection.getCenterX(), params.cameraCenterPoint.x, params.horizontalFocalLength); + var yawPitch = + TargetCalculations.calculateYawPitch( + params.cameraCenterPoint.x, + tagDetection.getCenterX(), + params.horizontalFocalLength, + params.cameraCenterPoint.y, + tagDetection.getCenterY(), + params.verticalFocalLength); + m_yaw = yawPitch.getFirst(); + m_pitch = yawPitch.getSecond(); var bestPose = new Transform3d(); var altPose = new Transform3d(); @@ -138,13 +141,16 @@ public TrackedTarget( public TrackedTarget(ArucoDetectionResult result, TargetCalculationParameters params) { m_targetOffsetPoint = new Point(result.getCenterX(), result.getCenterY()); m_robotOffsetPoint = new Point(); - - m_pitch = - TargetCalculations.calculatePitch( - result.getCenterY(), params.cameraCenterPoint.y, params.verticalFocalLength); - m_yaw = - TargetCalculations.calculateYaw( - result.getCenterX(), params.cameraCenterPoint.x, params.horizontalFocalLength); + var yawPitch = + TargetCalculations.calculateYawPitch( + params.cameraCenterPoint.x, + result.getCenterX(), + params.horizontalFocalLength, + params.cameraCenterPoint.y, + result.getCenterY(), + params.verticalFocalLength); + m_yaw = yawPitch.getFirst(); + m_pitch = yawPitch.getSecond(); double[] xCorners = result.getxCorners(); double[] yCorners = result.getyCorners(); @@ -246,7 +252,7 @@ public MatOfPoint2f getApproximateBoundingPolygon() { } public void calculateValues(TargetCalculationParameters params) { - // this MUST happen in this exact order! + // this MUST happen in this exact order! (TODO: document why) m_targetOffsetPoint = TargetCalculations.calculateTargetOffsetPoint( params.isLandscape, params.targetOffsetPointEdge, getMinAreaRect()); @@ -257,13 +263,18 @@ public void calculateValues(TargetCalculationParameters params) { params.dualOffsetValues, params.robotOffsetPointMode); - // order of this stuff doesn't matter though - m_pitch = - TargetCalculations.calculatePitch( - m_targetOffsetPoint.y, m_robotOffsetPoint.y, params.verticalFocalLength); - m_yaw = - TargetCalculations.calculateYaw( - m_targetOffsetPoint.x, m_robotOffsetPoint.x, params.horizontalFocalLength); + // order of this stuff doesnt matter though + var yawPitch = + TargetCalculations.calculateYawPitch( + m_robotOffsetPoint.x, + m_targetOffsetPoint.x, + params.horizontalFocalLength, + m_robotOffsetPoint.y, + m_targetOffsetPoint.y, + params.verticalFocalLength); + m_yaw = yawPitch.getFirst(); + m_pitch = yawPitch.getSecond(); + m_area = m_mainContour.getMinAreaRect().size.area() / params.imageArea * 100; m_skew = TargetCalculations.calculateSkew(params.isLandscape, getMinAreaRect()); diff --git a/photon-core/src/test/java/org/photonvision/vision/target/TargetCalculationsTest.java b/photon-core/src/test/java/org/photonvision/vision/target/TargetCalculationsTest.java index 0dd36d90e9..7b23d3a600 100644 --- a/photon-core/src/test/java/org/photonvision/vision/target/TargetCalculationsTest.java +++ b/photon-core/src/test/java/org/photonvision/vision/target/TargetCalculationsTest.java @@ -17,14 +17,19 @@ package org.photonvision.vision.target; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Translation3d; +import java.util.stream.Stream; import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -import org.opencv.core.MatOfPoint2f; -import org.opencv.core.Point; -import org.opencv.core.RotatedRect; -import org.opencv.core.Size; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.opencv.calib3d.Calib3d; +import org.opencv.core.*; import org.opencv.imgproc.Imgproc; import org.photonvision.common.util.TestUtils; import org.photonvision.common.util.numbers.DoubleCouple; @@ -33,9 +38,9 @@ public class TargetCalculationsTest { - private static final Size imageSize = new Size(800, 600); - private static final Point imageCenterPoint = - new Point(imageSize.width / 2, imageSize.height / 2); + private static Size imageSize = new Size(800, 600); + private static Point imageCenterPoint = + new Point(imageSize.width / 2.0 - 0.5, imageSize.height / 2.0 - 0.5); private static final double diagFOV = Math.toRadians(70.0); private static final FrameStaticProperties props = @@ -52,43 +57,104 @@ public class TargetCalculationsTest { props.verticalFocalLength, imageSize.width * imageSize.height); - @BeforeEach - public void Init() { + @BeforeAll + public static void setup() { TestUtils.loadLibraries(); } @Test - public void yawTest() { - var targetPixelOffsetX = 100; - var targetCenterPoint = new Point(imageCenterPoint.x + targetPixelOffsetX, imageCenterPoint.y); - - var trueYaw = - Math.atan((imageCenterPoint.x - targetCenterPoint.x) / params.horizontalFocalLength); - - var yaw = - TargetCalculations.calculateYaw( - imageCenterPoint.x, targetCenterPoint.x, params.horizontalFocalLength); - - assertEquals(Math.toDegrees(trueYaw), yaw, 0.025, "Yaw not as expected"); + public void testYawPitchBehavior() { + double targetPixelOffsetX = 100; + double targetPixelOffsetY = 100; + var targetCenterPoint = + new Point(imageCenterPoint.x + targetPixelOffsetX, imageCenterPoint.y + targetPixelOffsetY); + + var targetYawPitch = + TargetCalculations.calculateYawPitch( + imageCenterPoint.x, + targetCenterPoint.x, + params.horizontalFocalLength, + imageCenterPoint.y, + targetCenterPoint.y, + params.verticalFocalLength); + + assertTrue(targetYawPitch.getFirst() > 0, "Yaw is not positive right"); + assertTrue(targetYawPitch.getSecond() < 0, "Pitch is not positive up"); + + var fovs = + FrameStaticProperties.calculateHorizontalVerticalFoV( + diagFOV, (int) imageSize.width, (int) imageSize.height); + var maxYaw = + TargetCalculations.calculateYawPitch( + imageCenterPoint.x, + 2 * imageCenterPoint.x, + params.horizontalFocalLength, + imageCenterPoint.y, + imageCenterPoint.y, + params.verticalFocalLength); + assertEquals(fovs.getFirst() / 2.0, maxYaw.getFirst(), 0.025, "Horizontal FOV check failed"); + var maxPitch = + TargetCalculations.calculateYawPitch( + imageCenterPoint.x, + imageCenterPoint.x, + params.horizontalFocalLength, + imageCenterPoint.y, + 0, + params.verticalFocalLength); + assertEquals(fovs.getSecond() / 2.0, maxPitch.getSecond(), 0.025, "Vertical FOV check failed"); } - @Test - public void pitchTest() { - var targetPixelOffsetY = 100; - var targetCenterPoint = new Point(imageCenterPoint.x, imageCenterPoint.y + targetPixelOffsetY); - - var truePitch = - Math.atan((imageCenterPoint.y - targetCenterPoint.y) / params.verticalFocalLength); - - var pitch = - TargetCalculations.calculatePitch( - imageCenterPoint.y, targetCenterPoint.y, params.verticalFocalLength); + private static Stream testYawPitchCalcArgs() { + return Stream.of( + // (yaw, pitch) in degrees + Arguments.of(0, 0), + Arguments.of(10, 0), + Arguments.of(0, 10), + Arguments.of(10, 10), + Arguments.of(-10, -10), + Arguments.of(30, 45), + Arguments.of(-45, -20)); + } - assertEquals(Math.toDegrees(truePitch) * -1, pitch, 0.025, "Pitch not as expected"); + private static double[] testCameraMatrix = {240, 0, 320, 0, 240, 320, 0, 0, 1}; + + @ParameterizedTest + @MethodSource("testYawPitchCalcArgs") + public void testYawPitchCalc(double yawDeg, double pitchDeg) { + Mat testCameraMat = new Mat(3, 3, CvType.CV_64F); + testCameraMat.put(0, 0, testCameraMatrix); + // Since we create this translation using the given yaw/pitch, we should see the same angles + // calculated + var targetTrl = + new Translation3d(1, new Rotation3d(0, Math.toRadians(pitchDeg), Math.toRadians(yawDeg))); + // NWU to EDN + var objectPoints = + new MatOfPoint3f(new Point3(-targetTrl.getY(), -targetTrl.getZ(), targetTrl.getX())); + var imagePoints = new MatOfPoint2f(); + // Project translation into camera image + Calib3d.projectPoints( + objectPoints, + new MatOfDouble(0, 0, 0), + new MatOfDouble(0, 0, 0), + testCameraMat, + new MatOfDouble(0, 0, 0, 0, 0), + imagePoints); + var point = imagePoints.toArray()[0]; + // Test if the target yaw/pitch calculation matches what the target was created with + var yawPitch = + TargetCalculations.calculateYawPitch( + point.x, + testCameraMatrix[2], + testCameraMatrix[0], + point.y, + testCameraMatrix[5], + testCameraMatrix[4]); + assertEquals(yawDeg, yawPitch.getFirst(), 1e-3, "Yaw calculation incorrect"); + assertEquals(pitchDeg, yawPitch.getSecond(), 1e-3, "Pitch calculation incorrect"); } @Test - public void targetOffsetTest() { + public void testTargetOffset() { Point center = new Point(0, 0); Size rectSize = new Size(10, 5); double angle = 30; @@ -105,11 +171,6 @@ public void targetOffsetTest() { assertEquals(2.17, result.y, 0.1, "Target offset Y not as expected"); } - public static void main(String[] args) { - TestUtils.loadLibraries(); - new TargetCalculationsTest().targetOffsetTest(); - } - @Test public void testSkewCalculation() { // Setup @@ -191,14 +252,14 @@ public void testSkewCalculation() { public void testCameraFOVCalculation() { final DoubleCouple glowormHorizVert = FrameStaticProperties.calculateHorizontalVerticalFoV(74.8, 640, 480); - var gwHorizDeg = Math.toDegrees(glowormHorizVert.getFirst()); - var gwVertDeg = Math.toDegrees(glowormHorizVert.getSecond()); + var gwHorizDeg = glowormHorizVert.getFirst(); + var gwVertDeg = glowormHorizVert.getSecond(); assertEquals(62.7, gwHorizDeg, .3); assertEquals(49, gwVertDeg, .3); } @Test - public void robotOffsetDualTest() { + public void testDualOffsetCrosshair() { final DualOffsetValues dualOffsetValues = new DualOffsetValues( new Point(400, 150), 10, diff --git a/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java index 5ab7d00ce2..c53e006717 100644 --- a/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java +++ b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java @@ -297,12 +297,12 @@ public void testYawAngles(double testYaw) { var res = camera.getLatestResult(); assertTrue(res.hasTargets()); var tgt = res.getBestTarget(); - assertEquals(tgt.getYaw(), testYaw, kRotDeltaDeg); + assertEquals(testYaw, tgt.getYaw(), kRotDeltaDeg); } @ParameterizedTest @ValueSource(doubles = {-10, -5, -0, -1, -2, 5, 7, 10.23, 20.21, -19.999}) - public void testCameraPitch(double testPitch) { + public void testPitchAngles(double testPitch) { final var targetPose = new Pose3d(new Translation3d(15.98, 0, 0), new Rotation3d(0, 0, 3 * Math.PI / 4)); final var robotPose = new Pose2d(new Translation2d(10, 0), new Rotation2d(0)); @@ -331,7 +331,7 @@ public void testCameraPitch(double testPitch) { assertEquals(testPitch, tgt.getPitch(), kRotDeltaDeg); } - private static Stream distCalCParamProvider() { + private static Stream testDistanceCalcArgs() { // Arbitrary and fairly random assortment of distances, camera pitches, and heights return Stream.of( Arguments.of(5, -15.98, 0), @@ -354,7 +354,7 @@ private static Stream distCalCParamProvider() { } @ParameterizedTest - @MethodSource("distCalCParamProvider") + @MethodSource("testDistanceCalcArgs") public void testDistanceCalc(double testDist, double testPitch, double testHeight) { // Assume dist along ground and tgt height the same. Iterate over other parameters.