Skip to content

Commit

Permalink
use calibration data in 2d, fix yaw/pitch calculation
Browse files Browse the repository at this point in the history
  • Loading branch information
amquake committed Aug 19, 2023
1 parent 8397b43 commit 3ffbc87
Show file tree
Hide file tree
Showing 4 changed files with 93 additions and 79 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public class FrameStaticProperties {
* Instantiates a new Frame static properties.
*
* @param mode The Video Mode of the camera.
* @param fov The fov of the image.
* @param fov The FOV (Field Of Vision) of the image in degrees.
*/
public FrameStaticProperties(VideoMode mode, double fov, CameraCalibrationCoefficients cal) {
this(mode != null ? mode.width : 1, mode != null ? mode.height : 1, fov, cal);
Expand All @@ -48,9 +48,9 @@ public FrameStaticProperties(VideoMode mode, double fov, CameraCalibrationCoeffi
/**
* Instantiates a new Frame static properties.
*
* @param imageWidth The width of the image.
* @param imageHeight The width of the image.
* @param fov The fov of the image.
* @param imageWidth The width of the image in pixels.
* @param imageHeight The width of the image in pixels.
* @param fov The FOV (Field Of Vision) of the image in degrees.
*/
public FrameStaticProperties(
int imageWidth, int imageHeight, double fov, CameraCalibrationCoefficients cal) {
Expand All @@ -61,30 +61,47 @@ public FrameStaticProperties(

imageArea = this.imageWidth * this.imageHeight;

// Todo -- if we have calibration, use it's center point?
centerX = ((double) this.imageWidth / 2) - 0.5;
centerY = ((double) this.imageHeight / 2) - 0.5;
centerPoint = new Point(centerX, centerY);

// TODO if we have calibration use it here instead
// pinhole model calculations
DoubleCouple horizVertViews =
calculateHorizontalVerticalFoV(this.fov, this.imageWidth, this.imageHeight);
if(cameraCalibration != null) { // Use calibration data
var camIntrinsics = cameraCalibration.getCameraIntrinsicsMat();
centerX = camIntrinsics.get(0, 2)[0];
centerY = camIntrinsics.get(1, 2)[0];
centerPoint = new Point(centerX, centerY);
horizontalFocalLength = camIntrinsics.get(0, 0)[0];
verticalFocalLength = camIntrinsics.get(1, 1)[0];
}
else { // No calibration data. Calculate from user provided diagonal FOV
centerX = (this.imageWidth / 2.0) - 0.5;
centerY = (this.imageHeight / 2.0) - 0.5;
centerPoint = new Point(centerX, centerY);

horizontalFocalLength = this.imageWidth / (2 * Math.tan(horizVertViews.getFirst() / 2));
verticalFocalLength = this.imageHeight / (2 * Math.tan(horizVertViews.getSecond() / 2));
DoubleCouple horizVertViews =
calculateHorizontalVerticalFoV(this.fov, this.imageWidth, this.imageHeight);
double horizFOV = Math.toRadians(horizVertViews.getFirst());
double vertFOV = Math.toRadians(horizVertViews.getSecond());
horizontalFocalLength = (this.imageWidth / 2.0) / Math.tan(horizFOV / 2.0);
verticalFocalLength = (this.imageHeight / 2.0) / Math.tan(vertFOV / 2.0);
}
}

/**
* Calculates the horizontal and vertical FOV components from a given diagonal FOV and image size.
*
* @param diagonalFoV Diagonal FOV in degrees
* @param imageWidth Image width in pixels
* @param imageHeight Image height in pixels
* @return Horizontal and vertical FOV in degrees
*/
public static DoubleCouple calculateHorizontalVerticalFoV(
double diagonalFoV, int imageWidth, int imageHeight) {
double diagonalView = Math.toRadians(diagonalFoV);
diagonalFoV = Math.toRadians(diagonalFoV);
double diagonalAspect = Math.hypot(imageWidth, imageHeight);

double horizontalView =
Math.atan(Math.tan(diagonalView / 2) * (imageWidth / diagonalAspect)) * 2;
Math.atan(Math.tan(diagonalFoV / 2) * (imageWidth / diagonalAspect)) * 2;
double verticalView =
Math.atan(Math.tan(diagonalView / 2) * (imageHeight / diagonalAspect)) * 2;
Math.atan(Math.tan(diagonalFoV / 2) * (imageHeight / diagonalAspect)) * 2;

return new DoubleCouple(horizontalView, verticalView);
return new DoubleCouple(Math.toDegrees(horizontalView), Math.toDegrees(verticalView));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,24 @@
import org.photonvision.vision.opencv.DualOffsetValues;

public class TargetCalculations {
public static double calculateYaw(
double offsetCenterX, double targetCenterX, double horizontalFocalLength) {
return Math.toDegrees(Math.atan((offsetCenterX - targetCenterX) / horizontalFocalLength));
}

public static double calculatePitch(
/**
* Calculates the yaw and pitch of a point in the image. Yaw and pitch must be calculated together
* to account for perspective distortion. Yaw is positive right, and pitch is positive up.
*
* @param offsetCenterX The X value of the offset principal point (cx) in pixels
* @param targetCenterX The X value of the target's center point in pixels
* @param horizontalFocalLength The horizontal focal length (fx) in pixels
* @param offsetCenterY The Y value of the offset principal point (cy) in pixels
* @param targetCenterY The Y value of the target's center point in pixels
* @param verticalFocalLength The vertical focal length (fy) in pixels
* @return The yaw and pitch from the principal axis to the target center, in degrees.
*/
public static DoubleCouple calculateYawPitch(
double offsetCenterX, double targetCenterX, double horizontalFocalLength,
double offsetCenterY, double targetCenterY, double verticalFocalLength) {
return -Math.toDegrees(Math.atan((offsetCenterY - targetCenterY) / verticalFocalLength));
double yaw = Math.atan((targetCenterX - offsetCenterX) / horizontalFocalLength);
double pitch = Math.atan((offsetCenterY - targetCenterY) / (verticalFocalLength / Math.cos(yaw)));
return new DoubleCouple(Math.toDegrees(yaw), Math.toDegrees(pitch));
}

@SuppressWarnings("DuplicatedCode")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,15 +73,11 @@ public TrackedTarget(
AprilTagDetection tagDetection,
AprilTagPoseEstimate tagPose,
TargetCalculationParameters params) {
m_targetOffsetPoint = new Point(tagDetection.getCenterX(), tagDetection.getCenterY());
m_robotOffsetPoint = new Point();

m_pitch =
TargetCalculations.calculatePitch(
tagDetection.getCenterY(), params.cameraCenterPoint.y, params.verticalFocalLength);
m_yaw =
TargetCalculations.calculateYaw(
tagDetection.getCenterX(), params.cameraCenterPoint.x, params.horizontalFocalLength);
var yawPitch = TargetCalculations.calculateYawPitch(
params.cameraCenterPoint.x, tagDetection.getCenterX(), params.horizontalFocalLength,
params.cameraCenterPoint.y, tagDetection.getCenterY(), params.verticalFocalLength);
m_yaw = yawPitch.getFirst();
m_pitch = yawPitch.getSecond();
var bestPose = new Transform3d();
var altPose = new Transform3d();

Expand Down Expand Up @@ -140,15 +136,11 @@ public TrackedTarget(
}

public TrackedTarget(ArucoDetectionResult result, TargetCalculationParameters params) {
m_targetOffsetPoint = new Point(result.getCenterX(), result.getCenterY());
m_robotOffsetPoint = new Point();

m_pitch =
TargetCalculations.calculatePitch(
result.getCenterY(), params.cameraCenterPoint.y, params.verticalFocalLength);
m_yaw =
TargetCalculations.calculateYaw(
result.getCenterX(), params.cameraCenterPoint.x, params.horizontalFocalLength);
var yawPitch = TargetCalculations.calculateYawPitch(
params.cameraCenterPoint.x, result.getCenterX(), params.horizontalFocalLength,
params.cameraCenterPoint.y, result.getCenterY(), params.verticalFocalLength);
m_yaw = yawPitch.getFirst();
m_pitch = yawPitch.getSecond();

double[] xCorners = result.getxCorners();
double[] yCorners = result.getyCorners();
Expand Down Expand Up @@ -263,12 +255,12 @@ public void calculateValues(TargetCalculationParameters params) {
params.robotOffsetPointMode);

// order of this stuff doesnt matter though
m_pitch =
TargetCalculations.calculatePitch(
m_targetOffsetPoint.y, m_robotOffsetPoint.y, params.verticalFocalLength);
m_yaw =
TargetCalculations.calculateYaw(
m_targetOffsetPoint.x, m_robotOffsetPoint.x, params.horizontalFocalLength);
var yawPitch = TargetCalculations.calculateYawPitch(
m_robotOffsetPoint.x, m_targetOffsetPoint.x, params.horizontalFocalLength,
m_robotOffsetPoint.y, m_targetOffsetPoint.y, params.verticalFocalLength);
m_yaw = yawPitch.getFirst();
m_pitch = yawPitch.getSecond();

m_area = m_mainContour.getMinAreaRect().size.area() / params.imageArea * 100;

m_skew = TargetCalculations.calculateSkew(params.isLandscape, getMinAreaRect());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package org.photonvision.vision.target;

import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;

import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
Expand All @@ -32,7 +33,7 @@
public class TargetCalculationsTest {

private static Size imageSize = new Size(800, 600);
private static Point imageCenterPoint = new Point(imageSize.width / 2, imageSize.height / 2);
private static Point imageCenterPoint = new Point(imageSize.width / 2.0 - 0.5, imageSize.height / 2.0 - 0.5);
private static final double diagFOV = Math.toRadians(70.0);

private static final FrameStaticProperties props =
Expand All @@ -55,33 +56,27 @@ public void Init() {
}

@Test
public void yawTest() {
var targetPixelOffsetX = 100;
var targetCenterPoint = new Point(imageCenterPoint.x + targetPixelOffsetX, imageCenterPoint.y);

var trueYaw =
Math.atan((imageCenterPoint.x - targetCenterPoint.x) / params.horizontalFocalLength);

var yaw =
TargetCalculations.calculateYaw(
imageCenterPoint.x, targetCenterPoint.x, params.horizontalFocalLength);

assertEquals(Math.toDegrees(trueYaw), yaw, 0.025, "Yaw not as expected");
}

@Test
public void pitchTest() {
var targetPixelOffsetY = 100;
var targetCenterPoint = new Point(imageCenterPoint.x, imageCenterPoint.y + targetPixelOffsetY);

var truePitch =
Math.atan((imageCenterPoint.y - targetCenterPoint.y) / params.verticalFocalLength);

var pitch =
TargetCalculations.calculatePitch(
imageCenterPoint.y, targetCenterPoint.y, params.verticalFocalLength);

assertEquals(Math.toDegrees(truePitch) * -1, pitch, 0.025, "Pitch not as expected");
public void yawPitchTest() {
double targetPixelOffsetX = 100;
double targetPixelOffsetY = 100;
var targetCenterPoint = new Point(imageCenterPoint.x + targetPixelOffsetX, imageCenterPoint.y + targetPixelOffsetY);

var targetYawPitch = TargetCalculations.calculateYawPitch(
imageCenterPoint.x, targetCenterPoint.x, params.horizontalFocalLength,
imageCenterPoint.y, targetCenterPoint.y, params.verticalFocalLength);

assertTrue(targetYawPitch.getFirst() > 0, "Yaw is not positive right");
assertTrue(targetYawPitch.getSecond() < 0, "Pitch is not positive up");

var fovs = FrameStaticProperties.calculateHorizontalVerticalFoV(diagFOV, (int)imageSize.width, (int)imageSize.height);
var maxYaw = TargetCalculations.calculateYawPitch(
imageCenterPoint.x, 2*imageCenterPoint.x, params.horizontalFocalLength,
imageCenterPoint.y, imageCenterPoint.y, params.verticalFocalLength);
assertEquals(fovs.getFirst() / 2.0, maxYaw.getFirst(), 0.025, "Horizontal FOV check failed");
var maxPitch = TargetCalculations.calculateYawPitch(
imageCenterPoint.x, imageCenterPoint.x, params.horizontalFocalLength,
imageCenterPoint.y, 0, params.verticalFocalLength);
assertEquals(fovs.getSecond() / 2.0, maxPitch.getSecond(), 0.025, "Vertical FOV check failed");
}

@Test
Expand Down Expand Up @@ -188,8 +183,8 @@ public void testSkewCalculation() {
public void testCameraFOVCalculation() {
final DoubleCouple glowormHorizVert =
FrameStaticProperties.calculateHorizontalVerticalFoV(74.8, 640, 480);
var gwHorizDeg = Math.toDegrees(glowormHorizVert.getFirst());
var gwVertDeg = Math.toDegrees(glowormHorizVert.getSecond());
var gwHorizDeg = glowormHorizVert.getFirst();
var gwVertDeg = glowormHorizVert.getSecond();
assertEquals(62.7, gwHorizDeg, .3);
assertEquals(49, gwVertDeg, .3);
}
Expand Down

0 comments on commit 3ffbc87

Please sign in to comment.