Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[photon-core] 2D Detection data accuracy #896

Merged
merged 7 commits into from
Oct 15, 2023
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public class FrameStaticProperties {
* Instantiates a new Frame static properties.
*
* @param mode The Video Mode of the camera.
* @param fov The fov of the image.
* @param fov The FOV (Field Of Vision) of the image in degrees.
*/
public FrameStaticProperties(VideoMode mode, double fov, CameraCalibrationCoefficients cal) {
this(mode != null ? mode.width : 1, mode != null ? mode.height : 1, fov, cal);
Expand All @@ -48,9 +48,9 @@ public FrameStaticProperties(VideoMode mode, double fov, CameraCalibrationCoeffi
/**
* Instantiates a new Frame static properties.
*
* @param imageWidth The width of the image.
* @param imageHeight The width of the image.
* @param fov The fov of the image.
* @param imageWidth The width of the image in pixels.
* @param imageHeight The width of the image in pixels.
* @param fov The FOV (Field Of Vision) of the image in degrees.
*/
public FrameStaticProperties(
int imageWidth, int imageHeight, double fov, CameraCalibrationCoefficients cal) {
Expand All @@ -61,30 +61,47 @@ public FrameStaticProperties(

imageArea = this.imageWidth * this.imageHeight;

// Todo -- if we have calibration, use it's center point?
centerX = ((double) this.imageWidth / 2) - 0.5;
centerY = ((double) this.imageHeight / 2) - 0.5;
centerPoint = new Point(centerX, centerY);

// TODO if we have calibration use it here instead
// pinhole model calculations
DoubleCouple horizVertViews =
calculateHorizontalVerticalFoV(this.fov, this.imageWidth, this.imageHeight);
if (cameraCalibration != null && cameraCalibration.getCameraIntrinsicsMat() != null) {
// Use calibration data
var camIntrinsics = cameraCalibration.getCameraIntrinsicsMat();
centerX = camIntrinsics.get(0, 2)[0];
centerY = camIntrinsics.get(1, 2)[0];
centerPoint = new Point(centerX, centerY);
horizontalFocalLength = camIntrinsics.get(0, 0)[0];
verticalFocalLength = camIntrinsics.get(1, 1)[0];
} else {
// No calibration data. Calculate from user provided diagonal FOV
centerX = (this.imageWidth / 2.0) - 0.5;
centerY = (this.imageHeight / 2.0) - 0.5;
centerPoint = new Point(centerX, centerY);

horizontalFocalLength = this.imageWidth / (2 * Math.tan(horizVertViews.getFirst() / 2));
verticalFocalLength = this.imageHeight / (2 * Math.tan(horizVertViews.getSecond() / 2));
DoubleCouple horizVertViews =
calculateHorizontalVerticalFoV(this.fov, this.imageWidth, this.imageHeight);
double horizFOV = Math.toRadians(horizVertViews.getFirst());
double vertFOV = Math.toRadians(horizVertViews.getSecond());
horizontalFocalLength = (this.imageWidth / 2.0) / Math.tan(horizFOV / 2.0);
verticalFocalLength = (this.imageHeight / 2.0) / Math.tan(vertFOV / 2.0);
}
}

/**
* Calculates the horizontal and vertical FOV components from a given diagonal FOV and image size.
*
* @param diagonalFoV Diagonal FOV in degrees
* @param imageWidth Image width in pixels
* @param imageHeight Image height in pixels
* @return Horizontal and vertical FOV in degrees
*/
public static DoubleCouple calculateHorizontalVerticalFoV(
double diagonalFoV, int imageWidth, int imageHeight) {
double diagonalView = Math.toRadians(diagonalFoV);
diagonalFoV = Math.toRadians(diagonalFoV);
double diagonalAspect = Math.hypot(imageWidth, imageHeight);

double horizontalView =
Math.atan(Math.tan(diagonalView / 2) * (imageWidth / diagonalAspect)) * 2;
double verticalView =
Math.atan(Math.tan(diagonalView / 2) * (imageHeight / diagonalAspect)) * 2;
Math.atan(Math.tan(diagonalFoV / 2) * (imageWidth / diagonalAspect)) * 2;
double verticalView = Math.atan(Math.tan(diagonalFoV / 2) * (imageHeight / diagonalAspect)) * 2;

return new DoubleCouple(horizontalView, verticalView);
return new DoubleCouple(Math.toDegrees(horizontalView), Math.toDegrees(verticalView));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,29 @@
import org.photonvision.vision.opencv.DualOffsetValues;

public class TargetCalculations {
public static double calculateYaw(
double offsetCenterX, double targetCenterX, double horizontalFocalLength) {
return Math.toDegrees(Math.atan((offsetCenterX - targetCenterX) / horizontalFocalLength));
}

public static double calculatePitch(
double offsetCenterY, double targetCenterY, double verticalFocalLength) {
return -Math.toDegrees(Math.atan((offsetCenterY - targetCenterY) / verticalFocalLength));
/**
* Calculates the yaw and pitch of a point in the image. Yaw and pitch must be calculated together
* to account for perspective distortion. Yaw is positive right, and pitch is positive up.
*
* @param offsetCenterX The X value of the offset principal point (cx) in pixels
* @param targetCenterX The X value of the target's center point in pixels
* @param horizontalFocalLength The horizontal focal length (fx) in pixels
* @param offsetCenterY The Y value of the offset principal point (cy) in pixels
* @param targetCenterY The Y value of the target's center point in pixels
* @param verticalFocalLength The vertical focal length (fy) in pixels
* @return The yaw and pitch from the principal axis to the target center, in degrees.
*/
public static DoubleCouple calculateYawPitch(
double offsetCenterX,
double targetCenterX,
double horizontalFocalLength,
double offsetCenterY,
double targetCenterY,
double verticalFocalLength) {
double yaw = Math.atan((targetCenterX - offsetCenterX) / horizontalFocalLength);
double pitch =
Math.atan((offsetCenterY - targetCenterY) / (verticalFocalLength / Math.cos(yaw)));
return new DoubleCouple(Math.toDegrees(yaw), Math.toDegrees(pitch));
}

@SuppressWarnings("DuplicatedCode")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,13 +75,16 @@ public TrackedTarget(
TargetCalculationParameters params) {
m_targetOffsetPoint = new Point(tagDetection.getCenterX(), tagDetection.getCenterY());
m_robotOffsetPoint = new Point();

m_pitch =
TargetCalculations.calculatePitch(
tagDetection.getCenterY(), params.cameraCenterPoint.y, params.verticalFocalLength);
m_yaw =
TargetCalculations.calculateYaw(
tagDetection.getCenterX(), params.cameraCenterPoint.x, params.horizontalFocalLength);
var yawPitch =
TargetCalculations.calculateYawPitch(
params.cameraCenterPoint.x,
tagDetection.getCenterX(),
params.horizontalFocalLength,
params.cameraCenterPoint.y,
tagDetection.getCenterY(),
params.verticalFocalLength);
m_yaw = yawPitch.getFirst();
m_pitch = yawPitch.getSecond();
var bestPose = new Transform3d();
var altPose = new Transform3d();

Expand Down Expand Up @@ -142,13 +145,16 @@ public TrackedTarget(
public TrackedTarget(ArucoDetectionResult result, TargetCalculationParameters params) {
m_targetOffsetPoint = new Point(result.getCenterX(), result.getCenterY());
m_robotOffsetPoint = new Point();

m_pitch =
TargetCalculations.calculatePitch(
result.getCenterY(), params.cameraCenterPoint.y, params.verticalFocalLength);
m_yaw =
TargetCalculations.calculateYaw(
result.getCenterX(), params.cameraCenterPoint.x, params.horizontalFocalLength);
var yawPitch =
TargetCalculations.calculateYawPitch(
params.cameraCenterPoint.x,
result.getCenterX(),
params.horizontalFocalLength,
params.cameraCenterPoint.y,
result.getCenterY(),
params.verticalFocalLength);
m_yaw = yawPitch.getFirst();
m_pitch = yawPitch.getSecond();

double[] xCorners = result.getxCorners();
double[] yCorners = result.getyCorners();
Expand Down Expand Up @@ -263,12 +269,17 @@ public void calculateValues(TargetCalculationParameters params) {
params.robotOffsetPointMode);

// order of this stuff doesnt matter though
m_pitch =
TargetCalculations.calculatePitch(
m_targetOffsetPoint.y, m_robotOffsetPoint.y, params.verticalFocalLength);
m_yaw =
TargetCalculations.calculateYaw(
m_targetOffsetPoint.x, m_robotOffsetPoint.x, params.horizontalFocalLength);
var yawPitch =
TargetCalculations.calculateYawPitch(
m_robotOffsetPoint.x,
m_targetOffsetPoint.x,
params.horizontalFocalLength,
m_robotOffsetPoint.y,
m_targetOffsetPoint.y,
params.verticalFocalLength);
m_yaw = yawPitch.getFirst();
m_pitch = yawPitch.getSecond();

m_area = m_mainContour.getMinAreaRect().size.area() / params.imageArea * 100;

m_skew = TargetCalculations.calculateSkew(params.isLandscape, getMinAreaRect());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,19 @@
package org.photonvision.vision.target;

import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;

import edu.wpi.first.math.geometry.Rotation3d;
import edu.wpi.first.math.geometry.Translation3d;
import java.util.stream.Stream;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.opencv.calib3d.Calib3d;
import org.opencv.core.*;
import org.opencv.core.Point;
import org.opencv.imgproc.Imgproc;
import org.photonvision.common.util.TestUtils;
import org.photonvision.common.util.numbers.DoubleCouple;
Expand All @@ -32,7 +39,8 @@
public class TargetCalculationsTest {

private static Size imageSize = new Size(800, 600);
private static Point imageCenterPoint = new Point(imageSize.width / 2, imageSize.height / 2);
private static Point imageCenterPoint =
new Point(imageSize.width / 2.0 - 0.5, imageSize.height / 2.0 - 0.5);
private static final double diagFOV = Math.toRadians(70.0);

private static final FrameStaticProperties props =
Expand All @@ -49,43 +57,104 @@ public class TargetCalculationsTest {
props.verticalFocalLength,
imageSize.width * imageSize.height);

@BeforeEach
public void Init() {
@BeforeAll
public static void setup() {
TestUtils.loadLibraries();
}

@Test
public void yawTest() {
var targetPixelOffsetX = 100;
var targetCenterPoint = new Point(imageCenterPoint.x + targetPixelOffsetX, imageCenterPoint.y);

var trueYaw =
Math.atan((imageCenterPoint.x - targetCenterPoint.x) / params.horizontalFocalLength);

var yaw =
TargetCalculations.calculateYaw(
imageCenterPoint.x, targetCenterPoint.x, params.horizontalFocalLength);

assertEquals(Math.toDegrees(trueYaw), yaw, 0.025, "Yaw not as expected");
public void testYawPitchBehavior() {
double targetPixelOffsetX = 100;
double targetPixelOffsetY = 100;
var targetCenterPoint =
new Point(imageCenterPoint.x + targetPixelOffsetX, imageCenterPoint.y + targetPixelOffsetY);

var targetYawPitch =
TargetCalculations.calculateYawPitch(
imageCenterPoint.x,
targetCenterPoint.x,
params.horizontalFocalLength,
imageCenterPoint.y,
targetCenterPoint.y,
params.verticalFocalLength);

assertTrue(targetYawPitch.getFirst() > 0, "Yaw is not positive right");
assertTrue(targetYawPitch.getSecond() < 0, "Pitch is not positive up");

var fovs =
FrameStaticProperties.calculateHorizontalVerticalFoV(
diagFOV, (int) imageSize.width, (int) imageSize.height);
var maxYaw =
TargetCalculations.calculateYawPitch(
imageCenterPoint.x,
2 * imageCenterPoint.x,
params.horizontalFocalLength,
imageCenterPoint.y,
imageCenterPoint.y,
params.verticalFocalLength);
assertEquals(fovs.getFirst() / 2.0, maxYaw.getFirst(), 0.025, "Horizontal FOV check failed");
var maxPitch =
TargetCalculations.calculateYawPitch(
imageCenterPoint.x,
imageCenterPoint.x,
params.horizontalFocalLength,
imageCenterPoint.y,
0,
params.verticalFocalLength);
assertEquals(fovs.getSecond() / 2.0, maxPitch.getSecond(), 0.025, "Vertical FOV check failed");
}

@Test
public void pitchTest() {
var targetPixelOffsetY = 100;
var targetCenterPoint = new Point(imageCenterPoint.x, imageCenterPoint.y + targetPixelOffsetY);

var truePitch =
Math.atan((imageCenterPoint.y - targetCenterPoint.y) / params.verticalFocalLength);

var pitch =
TargetCalculations.calculatePitch(
imageCenterPoint.y, targetCenterPoint.y, params.verticalFocalLength);
private static Stream<Arguments> testYawPitchCalcArgs() {
return Stream.of(
// (yaw, pitch) in degrees
Arguments.of(0, 0),
Arguments.of(10, 0),
Arguments.of(0, 10),
Arguments.of(10, 10),
Arguments.of(-10, -10),
Arguments.of(30, 45),
Arguments.of(-45, -20));
}

assertEquals(Math.toDegrees(truePitch) * -1, pitch, 0.025, "Pitch not as expected");
private static double[] testCameraMatrix = {240, 0, 320, 0, 240, 320, 0, 0, 1};

@ParameterizedTest
@MethodSource("testYawPitchCalcArgs")
public void testYawPitchCalc(double yawDeg, double pitchDeg) {
mcm001 marked this conversation as resolved.
Show resolved Hide resolved
Mat testCameraMat = new Mat(3, 3, CvType.CV_64F);
testCameraMat.put(0, 0, testCameraMatrix);
// Since we create this translation using the given yaw/pitch, we should see the same angles
// calculated
var targetTrl =
new Translation3d(1, new Rotation3d(0, Math.toRadians(pitchDeg), Math.toRadians(yawDeg)));
// NWU to EDN
var objectPoints =
new MatOfPoint3f(new Point3(-targetTrl.getY(), -targetTrl.getZ(), targetTrl.getX()));
var imagePoints = new MatOfPoint2f();
// Project translation into camera image
Calib3d.projectPoints(
objectPoints,
new MatOfDouble(0, 0, 0),
new MatOfDouble(0, 0, 0),
testCameraMat,
new MatOfDouble(0, 0, 0, 0, 0),
imagePoints);
var point = imagePoints.toArray()[0];
// Test if the target yaw/pitch calculation matches what the target was created with
var yawPitch =
TargetCalculations.calculateYawPitch(
point.x,
testCameraMatrix[2],
testCameraMatrix[0],
point.y,
testCameraMatrix[5],
testCameraMatrix[4]);
assertEquals(yawDeg, yawPitch.getFirst(), 1e-3, "Yaw calculation incorrect");
assertEquals(pitchDeg, yawPitch.getSecond(), 1e-3, "Pitch calculation incorrect");
}

@Test
public void targetOffsetTest() {
public void testTargetOffset() {
Point center = new Point(0, 0);
Size rectSize = new Size(10, 5);
double angle = 30;
Expand All @@ -102,11 +171,6 @@ public void targetOffsetTest() {
assertEquals(2.17, result.y, 0.1, "Target offset Y not as expected");
}

public static void main(String[] args) {
TestUtils.loadLibraries();
new TargetCalculationsTest().targetOffsetTest();
}

@Test
public void testSkewCalculation() {
// Setup
Expand Down Expand Up @@ -188,14 +252,14 @@ public void testSkewCalculation() {
public void testCameraFOVCalculation() {
final DoubleCouple glowormHorizVert =
FrameStaticProperties.calculateHorizontalVerticalFoV(74.8, 640, 480);
var gwHorizDeg = Math.toDegrees(glowormHorizVert.getFirst());
var gwVertDeg = Math.toDegrees(glowormHorizVert.getSecond());
var gwHorizDeg = glowormHorizVert.getFirst();
var gwVertDeg = glowormHorizVert.getSecond();
assertEquals(62.7, gwHorizDeg, .3);
assertEquals(49, gwVertDeg, .3);
}

@Test
public void robotOffsetDualTest() {
public void testDualOffsetCrosshair() {
final DualOffsetValues dualOffsetValues =
new DualOffsetValues(
new Point(400, 150), 10,
Expand Down
Loading