From f1cadc1e1eca3d7c4fa3e486fe56a2bff3665abb Mon Sep 17 00:00:00 2001 From: amquake Date: Sun, 18 Jun 2023 15:54:12 -0700 Subject: [PATCH 1/3] [WIP] Simulation Overhaul (#742) ### What does this do? - Deprecates previous sim classes - Has a `CameraProperties` class for describing a camera's basic/calibration info, and performance values for simulation. Calibration values can be loaded from the `config.json` in the settings exported by photonvision. - `OpenCVHelp` provides convenience functions for using opencv methods with wpilib/photonvision classes, mainly to project 3d points to a camera's 2d image and perform solvePnP with the above camera calibration info. - `TargetModel`s describe the 3d shape of a target, both for projecting into the camera's 2d image and use in solvePnP. - `PhotonCameraSim` uses camera properties to simulate how 3d targets would appear in its view, and has simulated noise, latency, and FPS. For apriltags, the best/alternate camera-to-target transform is also estimated with solvePnP. - `VideoSimUtil` has helper functions for drawing apriltags to a simulated raw and processed MJPEG stream for each camera using the projected tag corners. - `VisionSystemSim` stores `VisionTargetSim`s and `PhotonCameraSim`s, and is periodically updated with the robot's simulated pose. When updating, camera sims are automatically processed and published with their visible targets from their respective poses with proper latency. ### What's still not working? - Mac Arm builds are broken - More examples - Update website/docs --- .gitignore | 1 + photon-lib/build.gradle | 12 +- .../java/org/photonvision/PhotonCamera.java | 21 +- .../photonvision/estimation/OpenCVHelp.java | 2 +- .../estimation/RotTrlTransform3d.java | 4 +- .../estimation/VisionEstimation.java | 57 +- .../simulation/PhotonCameraSim.java | 464 ++++++++++++++ .../simulation/SimCameraProperties.java | 601 ++++++++++++++++++ .../{ => simulation}/SimPhotonCamera.java | 8 +- .../{ => simulation}/SimVisionSystem.java | 6 +- .../{ => simulation}/SimVisionTarget.java | 6 +- .../photonvision/simulation/VideoSimUtil.java | 337 ++++++++++ .../simulation/VisionSystemSim.java | 377 +++++++++++ .../simulation/VisionTargetSim.java | 94 +++ .../images/apriltags/tag16_05_00000.png | Bin 0 -> 99 bytes .../images/apriltags/tag16_05_00001.png | Bin 0 -> 97 bytes .../images/apriltags/tag16_05_00002.png | Bin 0 -> 99 bytes .../images/apriltags/tag16_05_00003.png | Bin 0 -> 97 bytes .../images/apriltags/tag16_05_00004.png | Bin 0 -> 97 bytes .../images/apriltags/tag16_05_00005.png | Bin 0 -> 100 bytes .../images/apriltags/tag16_05_00006.png | Bin 0 -> 101 bytes .../images/apriltags/tag16_05_00007.png | Bin 0 -> 98 bytes .../images/apriltags/tag16_05_00008.png | Bin 0 -> 97 bytes .../images/apriltags/tag16_05_00009.png | Bin 0 -> 100 bytes .../images/apriltags/tag16_05_00010.png | Bin 0 -> 99 bytes .../images/apriltags/tag16_05_00011.png | Bin 0 -> 98 bytes .../images/apriltags/tag16_05_00012.png | Bin 0 -> 95 bytes .../images/apriltags/tag16_05_00013.png | Bin 0 -> 99 bytes .../images/apriltags/tag16_05_00014.png | Bin 0 -> 102 bytes .../images/apriltags/tag16_05_00015.png | Bin 0 -> 97 bytes .../images/apriltags/tag16_05_00016.png | Bin 0 -> 97 bytes .../images/apriltags/tag16_05_00017.png | Bin 0 -> 101 bytes .../images/apriltags/tag16_05_00018.png | Bin 0 -> 99 bytes .../images/apriltags/tag16_05_00019.png | Bin 0 -> 99 bytes .../images/apriltags/tag16_05_00020.png | Bin 0 -> 98 bytes .../images/apriltags/tag16_05_00021.png | Bin 0 -> 100 bytes .../images/apriltags/tag16_05_00022.png | Bin 0 -> 101 bytes .../images/apriltags/tag16_05_00023.png | Bin 0 -> 97 bytes .../images/apriltags/tag16_05_00024.png | Bin 0 -> 97 bytes .../images/apriltags/tag16_05_00025.png | Bin 0 -> 99 bytes .../images/apriltags/tag16_05_00026.png | Bin 0 -> 99 bytes .../images/apriltags/tag16_05_00027.png | Bin 0 -> 98 bytes .../images/apriltags/tag16_05_00028.png | Bin 0 -> 95 bytes .../images/apriltags/tag16_05_00029.png | Bin 0 -> 97 bytes .../java/org/photonvision/OpenCVTest.java | 229 +++++++ ...stemTest.java => VisionSystemSimTest.java} | 357 ++++++----- .../targeting/PhotonTrackedTarget.java | 8 +- .../apriltagExample/gradlew | 0 .../apriltagExample/simgui-ds.json | 17 +- .../apriltagExample/simgui.json | 33 +- .../src/main/java/frc/robot/Drivetrain.java | 21 +- .../java/frc/robot/PhotonCameraWrapper.java | 4 +- .../src/main/java/frc/robot/Robot.java | 7 + .../java/frc/robot/sim/DrivetrainSim.java | 4 +- .../src/main/java/frc/robot/Constants.java | 2 +- .../main/java/frc/robot/DrivetrainSim.java | 2 +- 56 files changed, 2473 insertions(+), 201 deletions(-) create mode 100644 photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java create mode 100644 photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java rename photon-lib/src/main/java/org/photonvision/{ => simulation}/SimPhotonCamera.java (97%) rename photon-lib/src/main/java/org/photonvision/{ => simulation}/SimVisionSystem.java (99%) rename photon-lib/src/main/java/org/photonvision/{ => simulation}/SimVisionTarget.java (95%) create mode 100644 photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java create mode 100644 photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java create mode 100644 photon-lib/src/main/java/org/photonvision/simulation/VisionTargetSim.java create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00000.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00001.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00002.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00003.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00004.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00005.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00006.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00007.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00008.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00009.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00010.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00011.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00012.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00013.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00014.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00015.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00016.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00017.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00018.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00019.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00020.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00021.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00022.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00023.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00024.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00025.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00026.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00027.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00028.png create mode 100644 photon-lib/src/main/resources/images/apriltags/tag16_05_00029.png create mode 100644 photon-lib/src/test/java/org/photonvision/OpenCVTest.java rename photon-lib/src/test/java/org/photonvision/{SimVisionSystemTest.java => VisionSystemSimTest.java} (53%) mode change 100755 => 100644 photonlib-cpp-examples/apriltagExample/gradlew diff --git a/.gitignore b/.gitignore index 4c59f92df6..ca5a93289b 100644 --- a/.gitignore +++ b/.gitignore @@ -145,6 +145,7 @@ build/spotlessJava build/* build photon-lib/src/main/java/org/photonvision/PhotonVersion.java +photon-lib/bin/main/images/* /photonlib-java-examples/bin/ photon-lib/src/generate/native/include/PhotonVersion.h .gitattributes diff --git a/photon-lib/build.gradle b/photon-lib/build.gradle index 741117401b..a14f09c398 100644 --- a/photon-lib/build.gradle +++ b/photon-lib/build.gradle @@ -34,7 +34,10 @@ nativeUtils.platformConfigs.linuxathena.cppCompiler.args.add("-Wno-deprecated-an dependencies { implementation project(":photon-targeting") + // WPILib deps implementation wpilibTools.deps.wpilibJava("wpiutil") + implementation wpilibTools.deps.wpilibJava("cameraserver") + implementation wpilibTools.deps.wpilibJava("cscore") implementation wpilibTools.deps.wpilibJava("wpimath") implementation wpilibTools.deps.wpilibJava("wpinet") implementation wpilibTools.deps.wpilibJava("hal") @@ -42,12 +45,18 @@ dependencies { implementation wpilibTools.deps.wpilibJava("wpilibj") implementation wpilibTools.deps.wpilibJava("apriltag") - testImplementation wpilibTools.deps.wpilibJava("cscore") + // Jackson + implementation "com.fasterxml.jackson.core:jackson-annotations:2.12.4" + implementation "com.fasterxml.jackson.core:jackson-core:2.12.4" + implementation "com.fasterxml.jackson.core:jackson-databind:2.12.4" + implementation "edu.wpi.first.thirdparty.frc2023.opencv:opencv-java:$opencvVersion" implementation "edu.wpi.first.thirdparty.frc2023.opencv:opencv-jni:$opencvVersion:$jniPlatform" + implementation "org.ejml:ejml-simple:0.41" // Junit + testImplementation wpilibTools.deps.wpilibJava("cscore") testImplementation("org.junit.jupiter:junit-jupiter-api:5.8.2") testImplementation("org.junit.jupiter:junit-jupiter-params:5.8.2") testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:5.8.2") @@ -157,6 +166,7 @@ def testNativeTasks = wpilibTools.createExtractionTasks { testNativeTasks.addToSourceSetResources(sourceSets.test) +testNativeConfig.dependencies.add wpilibTools.deps.cscore() testNativeConfig.dependencies.add wpilibTools.deps.wpilib("ntcore") testNativeConfig.dependencies.add wpilibTools.deps.wpilib("wpinet") testNativeConfig.dependencies.add wpilibTools.deps.wpilib("hal") diff --git a/photon-lib/src/main/java/org/photonvision/PhotonCamera.java b/photon-lib/src/main/java/org/photonvision/PhotonCamera.java index dc0052912d..b9978a8021 100644 --- a/photon-lib/src/main/java/org/photonvision/PhotonCamera.java +++ b/photon-lib/src/main/java/org/photonvision/PhotonCamera.java @@ -51,10 +51,10 @@ import org.photonvision.targeting.PhotonPipelineResult; /** Represents a camera that is connected to PhotonVision. */ -public class PhotonCamera { - static final String kTableName = "photonvision"; +public class PhotonCamera implements AutoCloseable { + public static final String kTableName = "photonvision"; - protected final NetworkTable cameraTable; + private final NetworkTable cameraTable; RawSubscriber rawBytesEntry; BooleanPublisher driverModePublisher; BooleanSubscriber driverModeSubscriber; @@ -73,6 +73,7 @@ public class PhotonCamera { private DoubleArraySubscriber cameraIntrinsicsSubscriber; private DoubleArraySubscriber cameraDistortionSubscriber; + @Override public void close() { rawBytesEntry.close(); driverModePublisher.close(); @@ -151,9 +152,7 @@ public PhotonCamera(NetworkTableInstance instance, String cameraName) { m_topicNameSubscriber = new MultiSubscriber( - instance, - new String[] {"/photonvision/"}, - new PubSubOption[] {PubSubOption.topicsOnly(true)}); + instance, new String[] {"/photonvision/"}, PubSubOption.topicsOnly(true)); } /** @@ -186,7 +185,7 @@ public PhotonPipelineResult getLatestResult() { ret.createFromPacket(packet); // Set the timestamp of the result. - // getLatestChange returns in microseconds so we divide by 1e6 to convert to seconds. + // getLatestChange returns in microseconds, so we divide by 1e6 to convert to seconds. ret.setTimestampSeconds((rawBytesEntry.getLastChange() / 1e6) - ret.getLatencyMillis() / 1e3); // Return result. @@ -334,6 +333,14 @@ public Optional> getDistCoeffs() { } else return Optional.empty(); } + /** + * Gets the NetworkTable representing this camera's subtable. You probably don't ever need to call + * this. + */ + public final NetworkTable getCameraTable() { + return cameraTable; + } + private void verifyVersion() { if (!VERSION_CHECK_ENABLED) return; diff --git a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java index acd9a71c27..b834ad4459 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/OpenCVHelp.java @@ -285,7 +285,7 @@ public static List projectPoints( /** * Undistort 2d image points using a given camera's intrinsics and distortion. * - *

2d image points from projectPoints(CameraProperties, Pose3d, List) projectPoints} will + *

2d image points from projectPoints(CameraProperties, Pose3d, List) projectPoints will * naturally be distorted, so this operation is important if the image points need to be directly * used (e.g. 2d yaw/pitch). * diff --git a/photon-lib/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java b/photon-lib/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java index 549614de87..41c3e36d97 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/RotTrlTransform3d.java @@ -106,7 +106,7 @@ public Translation3d apply(Translation3d trl) { ; public List applyTrls(List trls) { - return trls.stream().map(t -> apply(t)).collect(Collectors.toList()); + return trls.stream().map(this::apply).collect(Collectors.toList()); } public Pose3d apply(Pose3d pose) { @@ -114,6 +114,6 @@ public Pose3d apply(Pose3d pose) { } public List applyPoses(List poses) { - return poses.stream().map(p -> apply(p)).collect(Collectors.toList()); + return poses.stream().map(this::apply).collect(Collectors.toList()); } } diff --git a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java index bc3a735e9d..9cdc1811fb 100644 --- a/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java +++ b/photon-lib/src/main/java/org/photonvision/estimation/VisionEstimation.java @@ -25,20 +25,34 @@ package org.photonvision.estimation; import edu.wpi.first.apriltag.AprilTag; +import edu.wpi.first.apriltag.AprilTagFieldLayout; import edu.wpi.first.math.Matrix; import edu.wpi.first.math.geometry.Pose3d; import edu.wpi.first.math.geometry.Transform3d; import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.numbers.*; -import edu.wpi.first.math.util.Units; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import java.util.ArrayList; import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; +import org.photonvision.targeting.PhotonTrackedTarget; import org.photonvision.targeting.TargetCorner; public class VisionEstimation { - public static final TargetModel kTagModel = - new TargetModel(Units.inchesToMeters(6), Units.inchesToMeters(6)); + /** Get the visible {@link AprilTag}s according the tag layout using the visible tag IDs. */ + public static List getVisibleLayoutTags( + List visTags, AprilTagFieldLayout tagLayout) { + return visTags.stream() + .map( + t -> { + int id = t.getFiducialId(); + var maybePose = tagLayout.getTagPose(id); + return maybePose.map(pose3d -> new AprilTag(id, pose3d)).orElse(null); + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } /** * Performs solvePNP using 3d-2d point correspondences to estimate the field-to-camera @@ -47,29 +61,37 @@ public class VisionEstimation { *

Note: The returned transformation is from the field origin to the camera pose! * (Unless you only feed this one tag??) * - * @param cameraMatrix the camera intrinsics matrix in standard opencv form - * @param distCoeffs the camera distortion matrix in standard opencv form - * @param corners The visible tag corners in the 2d image - * @param knownTags The known tag field poses corresponding to the visible tag IDs + * @param cameraMatrix The camera intrinsics matrix in standard opencv form + * @param distCoeffs The camera distortion matrix in standard opencv form + * @param visTags The visible tags reported by PV + * @param tagLayout The known tag layout on the field * @return The transformation that maps the field origin to the camera pose */ @Deprecated public static PNPResults estimateCamPosePNP( Matrix cameraMatrix, Matrix distCoeffs, - List corners, - List knownTags) { - if (knownTags == null - || corners == null - || corners.size() != knownTags.size() * 4 - || knownTags.size() == 0) { + List visTags, + AprilTagFieldLayout tagLayout) { + if (tagLayout == null + || visTags == null + || tagLayout.getTags().size() == 0 + || visTags.size() == 0) { return new PNPResults(); } + + var corners = new ArrayList(); + for (var tag : visTags) corners.addAll(tag.getDetectedCorners()); + var knownTags = getVisibleLayoutTags(visTags, tagLayout); + if (knownTags.size() == 0 || corners.size() == 0 || corners.size() % 4 != 0) { + return new PNPResults(); + } + // single-tag pnp - if (corners.size() == 4) { + if (visTags.size() == 1) { var camToTag = OpenCVHelp.solvePNP_SQUARE( - cameraMatrix, distCoeffs, kTagModel.getFieldVertices(knownTags.get(0).pose), corners); + cameraMatrix, distCoeffs, TargetModel.kTag16h5.vertices, corners); var bestPose = knownTags.get(0).pose.transformBy(camToTag.best.inverse()); var altPose = new Pose3d(); if (camToTag.ambiguity != 0) @@ -99,9 +121,8 @@ public static PNPResults estimateCamPosePNP( // multi-tag pnp else { var objectTrls = new ArrayList(); - for (var tag : knownTags) objectTrls.addAll(kTagModel.getFieldVertices(tag.pose)); + for (var tag : knownTags) objectTrls.addAll(TargetModel.kTag16h5.getFieldVertices(tag.pose)); var camToOrigin = OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, corners); - // var camToOrigin = OpenCVHelp.solveTagsPNPRansac(prop, objectTrls, corners); return new PNPResults( camToOrigin.best.inverse(), camToOrigin.alt.inverse(), @@ -135,7 +156,7 @@ public static PNPResults estimateCamPoseSqpnp( return new PNPResults(); } var objectTrls = new ArrayList(); - for (var tag : knownTags) objectTrls.addAll(kTagModel.getFieldVertices(tag.pose)); + for (var tag : knownTags) objectTrls.addAll(TargetModel.kTag16h5.vertices); var camToOrigin = OpenCVHelp.solvePNP_SQPNP(cameraMatrix, distCoeffs, objectTrls, corners); // var camToOrigin = OpenCVHelp.solveTagsPNPRansac(prop, objectTrls, corners); return new PNPResults( diff --git a/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java new file mode 100644 index 0000000000..710d7f49bf --- /dev/null +++ b/photon-lib/src/main/java/org/photonvision/simulation/PhotonCameraSim.java @@ -0,0 +1,464 @@ +/* + * MIT License + * + * Copyright (c) PhotonVision + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.photonvision.simulation; + +import edu.wpi.first.cameraserver.CameraServer; +import edu.wpi.first.cscore.CvSource; +import edu.wpi.first.cscore.VideoMode.PixelFormat; +import edu.wpi.first.cscore.VideoSource.ConnectionStrategy; +import edu.wpi.first.math.MathUtil; +import edu.wpi.first.math.Pair; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.util.RuntimeLoader; +import edu.wpi.first.util.WPIUtilJNI; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import org.opencv.core.Core; +import org.opencv.core.CvType; +import org.opencv.core.Mat; +import org.opencv.core.Point; +import org.opencv.core.Size; +import org.opencv.imgproc.Imgproc; +import org.photonvision.PhotonCamera; +import org.photonvision.PhotonTargetSortMode; +import org.photonvision.common.dataflow.structures.Packet; +import org.photonvision.common.networktables.NTTopicSet; +import org.photonvision.estimation.CameraTargetRelation; +import org.photonvision.estimation.OpenCVHelp; +import org.photonvision.estimation.PNPResults; +import org.photonvision.targeting.PhotonPipelineResult; +import org.photonvision.targeting.PhotonTrackedTarget; +import org.photonvision.targeting.TargetCorner; + +/** + * A handle for simulating {@link PhotonCamera} values. Processing simulated targets through this + * class will change the associated PhotonCamera's results. + */ +@SuppressWarnings("unused") +public class PhotonCameraSim implements AutoCloseable { + private final PhotonCamera cam; + + NTTopicSet ts = new NTTopicSet(); + private long heartbeatCounter = 0; + + /** This simulated camera's {@link SimCameraProperties} */ + public final SimCameraProperties prop; + + private long nextNTEntryTime = WPIUtilJNI.now(); + + private double maxSightRangeMeters = Double.MAX_VALUE; + private static final double kDefaultMinAreaPx = 90; + private double minTargetAreaPercent; + private PhotonTargetSortMode sortMode = PhotonTargetSortMode.Largest; + + // video stream simulation + private final CvSource videoSimRaw; + private final Mat videoSimFrameRaw = new Mat(); + private boolean videoSimRawEnabled = true; + private final CvSource videoSimProcessed; + private final Mat videoSimFrameProcessed = new Mat(); + private boolean videoSimProcEnabled = true; + + static { + try { + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + } catch (Exception e) { + throw new RuntimeException("Failed to load native libraries!", e); + } + } + + @Override + public void close() { + videoSimRaw.close(); + videoSimFrameRaw.release(); + videoSimProcessed.close(); + videoSimFrameProcessed.release(); + } + + /** + * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets + * through this class will change the associated PhotonCamera's results. + * + *

This constructor's camera has a 90 deg FOV with no simulated lag! + * + *

By default, the minimum target area is 100 pixels and there is no maximum sight range. + * + * @param camera The camera to be simulated + */ + public PhotonCameraSim(PhotonCamera camera) { + this(camera, SimCameraProperties.PERFECT_90DEG()); + } + + /** + * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets + * through this class will change the associated PhotonCamera's results. + * + *

By default, the minimum target area is 100 pixels and there is no maximum sight range. + * + * @param camera The camera to be simulated + * @param prop Properties of this camera such as FOV and FPS + */ + public PhotonCameraSim(PhotonCamera camera, SimCameraProperties prop) { + this.cam = camera; + this.prop = prop; + setMinTargetAreaPixels(kDefaultMinAreaPx); + + videoSimRaw = + CameraServer.putVideo(camera.getName() + "-raw", prop.getResWidth(), prop.getResHeight()); + videoSimRaw.setPixelFormat(PixelFormat.kGray); + videoSimProcessed = + CameraServer.putVideo( + camera.getName() + "-processed", prop.getResWidth(), prop.getResHeight()); + + ts.removeEntries(); + ts.subTable = camera.getCameraTable(); + ts.updateEntries(); + } + + /** + * Constructs a handle for simulating {@link PhotonCamera} values. Processing simulated targets + * through this class will change the associated PhotonCamera's results. + * + * @param camera The camera to be simulated + * @param prop Properties of this camera such as FOV and FPS + * @param minTargetAreaPercent The minimum percentage(0 - 100) a detected target must take up of + * the camera's image to be processed. Match this with your contour filtering settings in the + * PhotonVision GUI. + * @param maxSightRangeMeters Maximum distance at which the target is illuminated to your camera. + * Note that minimum target area of the image is separate from this. + */ + public PhotonCameraSim( + PhotonCamera camera, + SimCameraProperties prop, + double minTargetAreaPercent, + double maxSightRangeMeters) { + this(camera, prop); + this.minTargetAreaPercent = minTargetAreaPercent; + this.maxSightRangeMeters = maxSightRangeMeters; + } + + public PhotonCamera getCamera() { + return cam; + } + + public double getMinTargetAreaPercent() { + return minTargetAreaPercent; + } + + public double getMinTargetAreaPixels() { + return minTargetAreaPercent / 100.0 * prop.getResArea(); + } + + public double getMaxSightRangeMeters() { + return maxSightRangeMeters; + } + + public PhotonTargetSortMode getTargetSortMode() { + return sortMode; + } + + public CvSource getVideoSimRaw() { + return videoSimRaw; + } + + public Mat getVideoSimFrameRaw() { + return videoSimFrameRaw; + } + + /** + * Determines if this target's pose should be visible to the camera without considering its + * projected image points. Does not account for image area. + * + * @param camPose Camera's 3d pose + * @param target Vision target containing pose and shape + * @return If this vision target can be seen before image projection. + */ + public boolean canSeeTargetPose(Pose3d camPose, VisionTargetSim target) { + var rel = new CameraTargetRelation(camPose, target.getPose()); + return ( + // target translation is outside of camera's FOV + (Math.abs(rel.camToTargYaw.getDegrees()) < prop.getHorizFOV().getDegrees() / 2) + && (Math.abs(rel.camToTargPitch.getDegrees()) < prop.getVertFOV().getDegrees() / 2) + && (!target.getModel().isPlanar + || Math.abs(rel.targToCamAngle.getDegrees()) + < 90) // camera is behind planar target and it should be occluded + && (rel.camToTarg.getTranslation().getNorm() <= maxSightRangeMeters)); // target is too far + } + + /** + * Determines if all target corners are inside the camera's image. + * + * @param corners The corners of the target as image points(x,y) + */ + public boolean canSeeCorners(List corners) { + // corner is outside of resolution + for (var corner : corners) { + if (MathUtil.clamp(corner.x, 0, prop.getResWidth()) != corner.x + || MathUtil.clamp(corner.y, 0, prop.getResHeight()) != corner.y) { + return false; + } + } + return true; + } + + /** + * Determine if this camera should process a new frame based on performance metrics and the time + * since the last update. This returns an Optional which is either empty if no update should occur + * or a Long of the timestamp in microseconds of when the frame which should be received by NT. If + * a timestamp is returned, the last frame update time becomes that timestamp. + * + * @return Optional long which is empty while blocked or the NT entry timestamp in microseconds if + * ready + */ + public Optional consumeNextEntryTime() { + // check if this camera is ready for another frame update + long now = WPIUtilJNI.now(); + long timestamp = -1; + int iter = 0; + // prepare next latest update + while (now >= nextNTEntryTime) { + timestamp = nextNTEntryTime; + long frameTime = (long) (prop.estMsUntilNextFrame() * 1e3); + nextNTEntryTime += frameTime; + + // if frame time is very small, avoid blocking + if (iter++ > 50) { + timestamp = now; + nextNTEntryTime = now + frameTime; + break; + } + } + // return the timestamp of the latest update + if (timestamp >= 0) return Optional.of(timestamp); + // or this camera isn't ready to process yet + else return Optional.empty(); + } + + /** + * The minimum percentage(0 - 100) a detected target must take up of the camera's image to be + * processed. + */ + public void setMinTargetAreaPercent(double areaPercent) { + this.minTargetAreaPercent = areaPercent; + } + + /** + * The minimum number of pixels a detected target must take up in the camera's image to be + * processed. + */ + public void setMinTargetAreaPixels(double areaPx) { + this.minTargetAreaPercent = areaPx / prop.getResArea() * 100; + } + + /** + * Maximum distance at which the target is illuminated to your camera. Note that minimum target + * area of the image is separate from this. + */ + public void setMaxSightRange(double rangeMeters) { + this.maxSightRangeMeters = rangeMeters; + } + + /** Defines the order the targets are sorted in the pipeline result. */ + public void setTargetSortMode(PhotonTargetSortMode sortMode) { + if (sortMode != null) this.sortMode = sortMode; + } + + /** Sets whether the raw video stream simulation is enabled. */ + public void enableRawStream(boolean enabled) { + videoSimRawEnabled = enabled; + } + + /** Sets whether the processed video stream simulation is enabled. */ + public void enableProcessedStream(boolean enabled) { + videoSimProcEnabled = enabled; + } + + public PhotonPipelineResult process( + double latencyMillis, Pose3d cameraPose, List targets) { + // sort targets by distance to camera + targets = new ArrayList<>(targets); + targets.sort( + (t1, t2) -> { + double dist1 = t1.getPose().getTranslation().getDistance(cameraPose.getTranslation()); + double dist2 = t2.getPose().getTranslation().getDistance(cameraPose.getTranslation()); + if (dist1 == dist2) return 0; + return dist1 < dist2 ? 1 : -1; + }); + // all targets visible (in FOV) + var visibleTags = new ArrayList>>(); + // all targets actually detectable to the camera + var detectableTgts = new ArrayList(); + + // reset our frame + VideoSimUtil.updateVideoProp(videoSimRaw, prop); + VideoSimUtil.updateVideoProp(videoSimProcessed, prop); + Size videoFrameSize = new Size(prop.getResWidth(), prop.getResHeight()); + Mat.zeros(videoFrameSize, CvType.CV_8UC1).assignTo(videoSimFrameRaw); + + for (var tgt : targets) { + // pose isn't visible, skip to next + if (!canSeeTargetPose(cameraPose, tgt)) continue; + + // find target's 3d corner points + // TODO: Handle spherical targets + var fieldCorners = tgt.getFieldVertices(); + + // project 3d target points into 2d image points + var targetCorners = + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, fieldCorners); + // save visible tags for stream simulation + if (tgt.fiducialID >= 0) { + visibleTags.add(new Pair<>(tgt.fiducialID, targetCorners)); + } + // estimate pixel noise + var noisyTargetCorners = prop.estPixelNoise(targetCorners); + // find the (naive) 2d yaw/pitch + var centerPt = OpenCVHelp.getMinAreaRect(noisyTargetCorners).center; + var centerRot = prop.getPixelRot(new TargetCorner(centerPt.x, centerPt.y)); + // find contour area + double areaPercent = prop.getContourAreaPercent(noisyTargetCorners); + + // projected target can't be detected, skip to next + if (!(canSeeCorners(noisyTargetCorners) && areaPercent >= minTargetAreaPercent)) continue; + + var pnpSim = new PNPResults(); + if (tgt.fiducialID >= 0 && tgt.getFieldVertices().size() == 4) { // single AprilTag solvePNP + pnpSim = + OpenCVHelp.solvePNP_SQUARE( + prop.getIntrinsics(), + prop.getDistCoeffs(), + tgt.getModel().vertices, + noisyTargetCorners); + centerRot = + prop.getPixelRot( + OpenCVHelp.projectPoints( + prop.getIntrinsics(), + prop.getDistCoeffs(), + new Pose3d(), + List.of(pnpSim.best.getTranslation())) + .get(0)); + } + + Point[] minAreaRectPts = new Point[noisyTargetCorners.size()]; + OpenCVHelp.getMinAreaRect(noisyTargetCorners).points(minAreaRectPts); + + detectableTgts.add( + new PhotonTrackedTarget( + Math.toDegrees(centerRot.getZ()), + -Math.toDegrees(centerRot.getY()), + areaPercent, + Math.toDegrees(centerRot.getX()), + tgt.fiducialID, + pnpSim.best, + pnpSim.alt, + pnpSim.ambiguity, + List.of(OpenCVHelp.pointsToTargetCorners(minAreaRectPts)), + noisyTargetCorners)); + } + // render visible tags to raw video frame + if (videoSimRawEnabled) { + for (var tag : visibleTags) { + VideoSimUtil.warp16h5TagImage( + tag.getFirst(), OpenCVHelp.targetCornersToMat(tag.getSecond()), videoSimFrameRaw, true); + } + videoSimRaw.putFrame(videoSimFrameRaw); + } else videoSimRaw.setConnectionStrategy(ConnectionStrategy.kForceClose); + // draw/annotate tag detection outline on processed view + if (videoSimProcEnabled) { + Imgproc.cvtColor(videoSimFrameRaw, videoSimFrameProcessed, Imgproc.COLOR_GRAY2BGR); + for (var tgt : detectableTgts) { + if (tgt.getFiducialId() >= 0) { + VideoSimUtil.drawTagDetection( + tgt.getFiducialId(), + OpenCVHelp.targetCornersToMat(tgt.getDetectedCorners()), + videoSimFrameProcessed); + } + } + videoSimProcessed.putFrame(videoSimFrameProcessed); + } else videoSimProcessed.setConnectionStrategy(ConnectionStrategy.kForceClose); + + // put this simulated data to NT + if (sortMode != null) { + detectableTgts.sort(sortMode.getComparator()); + } + return new PhotonPipelineResult(latencyMillis, detectableTgts); + } + + /** + * Simulate one processed frame of vision data, putting one result to NT at current timestamp. + * Image capture time is assumed be (current time - latency). + * + * @param result The pipeline result to submit + */ + public void submitProcessedFrame(PhotonPipelineResult result) { + submitProcessedFrame(result, WPIUtilJNI.now()); + } + + /** + * Simulate one processed frame of vision data, putting one result to NT. Image capture timestamp + * overrides {@link PhotonPipelineResult#getTimestampSeconds() getTimestampSeconds()} for more + * precise latency simulation. + * + * @param result The pipeline result to submit + * @param receiveTimestamp The (sim) timestamp when this result was read by NT in microseconds + */ + public void submitProcessedFrame(PhotonPipelineResult result, long receiveTimestamp) { + ts.latencyMillisEntry.set(result.getLatencyMillis(), receiveTimestamp); + + var newPacket = new Packet(result.getPacketSize()); + result.populatePacket(newPacket); + ts.rawBytesEntry.set(newPacket.getData(), receiveTimestamp); + + boolean hasTargets = result.hasTargets(); + ts.hasTargetEntry.set(hasTargets, receiveTimestamp); + if (!hasTargets) { + ts.targetPitchEntry.set(0.0, receiveTimestamp); + ts.targetYawEntry.set(0.0, receiveTimestamp); + ts.targetAreaEntry.set(0.0, receiveTimestamp); + ts.targetPoseEntry.set(new double[] {0.0, 0.0, 0.0}, receiveTimestamp); + ts.targetSkewEntry.set(0.0, receiveTimestamp); + } else { + var bestTarget = result.getBestTarget(); + + ts.targetPitchEntry.set(bestTarget.getPitch(), receiveTimestamp); + ts.targetYawEntry.set(bestTarget.getYaw(), receiveTimestamp); + ts.targetAreaEntry.set(bestTarget.getArea(), receiveTimestamp); + ts.targetSkewEntry.set(bestTarget.getSkew(), receiveTimestamp); + + var transform = bestTarget.getBestCameraToTarget(); + double[] poseData = { + transform.getX(), transform.getY(), transform.getRotation().toRotation2d().getDegrees() + }; + ts.targetPoseEntry.set(poseData, receiveTimestamp); + } + + ts.heartbeatPublisher.set(heartbeatCounter++, receiveTimestamp); + } +} diff --git a/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java new file mode 100644 index 0000000000..336c7d5029 --- /dev/null +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimCameraProperties.java @@ -0,0 +1,601 @@ +/* + * MIT License + * + * Copyright (c) PhotonVision + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.photonvision.simulation; + +import com.fasterxml.jackson.databind.ObjectMapper; +import edu.wpi.first.math.Matrix; +import edu.wpi.first.math.Nat; +import edu.wpi.first.math.VecBuilder; +import edu.wpi.first.math.Vector; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.numbers.*; +import edu.wpi.first.wpilibj.DriverStation; +import java.io.IOException; +import java.nio.file.Path; +import java.util.List; +import java.util.Random; +import java.util.stream.Collectors; +import org.photonvision.estimation.OpenCVHelp; +import org.photonvision.targeting.TargetCorner; + +/** + * Calibration and performance values for this camera. + * + *

The resolution will affect the accuracy of projected(3d to 2d) target corners and similarly + * the severity of image noise on estimation(2d to 3d). + * + *

The camera intrinsics and distortion coefficients describe the results of calibration, and how + * to map between 3d field points and 2d image points. + * + *

The performance values (framerate/exposure time, latency) determine how often results should + * be updated and with how much latency in simulation. High exposure time causes motion blur which + * can inhibit target detection while moving. Note that latency estimation does not account for + * network latency and the latency reported will always be perfect. + */ +public class SimCameraProperties { + private final Random rand = new Random(); + // calibration + private int resWidth; + private int resHeight; + private Matrix camIntrinsics; + private Matrix distCoeffs; + private double avgErrorPx; + private double errorStdDevPx; + // performance + private double frameSpeedMs = 0; + private double exposureTimeMs = 0; + private double avgLatencyMs = 0; + private double latencyStdDevMs = 0; + + /** Default constructor which is the same as {@link #PERFECT_90DEG} */ + public SimCameraProperties() { + setCalibration(960, 720, Rotation2d.fromDegrees(90)); + } + + /** + * Reads camera properties from a photonvision config.json file. This is only the + * resolution, camera intrinsics, distortion coefficients, and average/std. dev. pixel error. + * Other camera properties must be set. + * + * @param path Path to the config.json file + * @param width The width of the desired resolution in the JSON + * @param height The height of the desired resolution in the JSON + * @throws IOException If reading the JSON fails, either from an invalid path or a missing/invalid + * calibrated resolution. + */ + public SimCameraProperties(String path, int width, int height) throws IOException { + this(Path.of(path), width, height); + } + + /** + * Reads camera properties from a photonvision config.json file. This is only the + * resolution, camera intrinsics, distortion coefficients, and average/std. dev. pixel error. + * Other camera properties must be set. + * + * @param path Path to the config.json file + * @param width The width of the desired resolution in the JSON + * @param height The height of the desired resolution in the JSON + * @throws IOException If reading the JSON fails, either from an invalid path or a missing/invalid + * calibrated resolution. + */ + public SimCameraProperties(Path path, int width, int height) throws IOException { + var mapper = new ObjectMapper(); + var json = mapper.readTree(path.toFile()); + json = json.get("calibrations"); + boolean success = false; + try { + for (int i = 0; i < json.size() && !success; i++) { + // check if this calibration entry is our desired resolution + var calib = json.get(i); + int jsonWidth = calib.get("resolution").get("width").asInt(); + int jsonHeight = calib.get("resolution").get("height").asInt(); + if (jsonWidth != width || jsonHeight != height) continue; + // get the relevant calibration values + var jsonIntrinsicsNode = calib.get("cameraIntrinsics").get("data"); + double[] jsonIntrinsics = new double[jsonIntrinsicsNode.size()]; + for (int j = 0; j < jsonIntrinsicsNode.size(); j++) { + jsonIntrinsics[j] = jsonIntrinsicsNode.get(j).asDouble(); + } + var jsonDistortNode = calib.get("cameraExtrinsics").get("data"); + double[] jsonDistortion = new double[jsonDistortNode.size()]; + for (int j = 0; j < jsonDistortNode.size(); j++) { + jsonDistortion[j] = jsonDistortNode.get(j).asDouble(); + } + var jsonViewErrors = calib.get("perViewErrors"); + double jsonAvgError = 0; + for (int j = 0; j < jsonViewErrors.size(); j++) { + jsonAvgError += jsonViewErrors.get(j).asDouble(); + } + jsonAvgError /= jsonViewErrors.size(); + double jsonErrorStdDev = calib.get("standardDeviation").asDouble(); + // assign the read JSON values to this CameraProperties + setCalibration( + jsonWidth, + jsonHeight, + Matrix.mat(Nat.N3(), Nat.N3()).fill(jsonIntrinsics), + Matrix.mat(Nat.N5(), Nat.N1()).fill(jsonDistortion)); + setCalibError(jsonAvgError, jsonErrorStdDev); + success = true; + } + } catch (Exception e) { + throw new IOException("Invalid calibration JSON"); + } + if (!success) throw new IOException("Requested resolution not found in calibration"); + } + + public void setRandomSeed(long seed) { + rand.setSeed(seed); + } + + public void setCalibration(int resWidth, int resHeight, Rotation2d fovDiag) { + double s = Math.sqrt(resWidth * resWidth + resHeight * resHeight); + var fovWidth = new Rotation2d(fovDiag.getRadians() * (resWidth / s)); + var fovHeight = new Rotation2d(fovDiag.getRadians() * (resHeight / s)); + + double maxFovDeg = Math.max(fovWidth.getDegrees(), fovHeight.getDegrees()); + if (maxFovDeg > 179) { + double scale = 179.0 / maxFovDeg; + fovWidth = new Rotation2d(fovWidth.getRadians() * scale); + fovHeight = new Rotation2d(fovHeight.getRadians() * scale); + DriverStation.reportError( + "Requested FOV width/height too large! Scaling below 180 degrees...", false); + } + + // assume no distortion + var distCoeff = VecBuilder.fill(0, 0, 0, 0, 0); + + // assume centered principal point (pixels) + double cx = resWidth / 2.0; + double cy = resHeight / 2.0; + + // use given fov to determine focal point (pixels) + double fx = cx / Math.tan(fovWidth.getRadians() / 2.0); + double fy = cy / Math.tan(fovHeight.getRadians() / 2.0); + + // create camera intrinsics matrix + var camIntrinsics = Matrix.mat(Nat.N3(), Nat.N3()).fill(fx, 0, cx, 0, fy, cy, 0, 0, 1); + setCalibration(resWidth, resHeight, camIntrinsics, distCoeff); + } + + public void setCalibration( + int resWidth, int resHeight, Matrix camIntrinsics, Matrix distCoeffs) { + this.resWidth = resWidth; + this.resHeight = resHeight; + this.camIntrinsics = camIntrinsics; + this.distCoeffs = distCoeffs; + } + + public void setCameraIntrinsics(Matrix camIntrinsics) { + this.camIntrinsics = camIntrinsics; + } + + public void setDistortionCoeffs(Matrix distCoeffs) { + this.distCoeffs = distCoeffs; + } + + public void setCalibError(double avgErrorPx, double errorStdDevPx) { + this.avgErrorPx = avgErrorPx; + this.errorStdDevPx = errorStdDevPx; + } + + /** + * @param fps The average frames per second the camera should process at. Exposure time limits + * FPS if set! + */ + public void setFPS(double fps) { + frameSpeedMs = Math.max(1000.0 / fps, exposureTimeMs); + } + + /** + * @param exposureTimeMs The amount of time the "shutter" is open for one frame. Affects motion + * blur. Frame speed(from FPS) is limited to this! + */ + public void setExposureTimeMs(double exposureTimeMs) { + this.exposureTimeMs = exposureTimeMs; + frameSpeedMs = Math.max(frameSpeedMs, exposureTimeMs); + } + + /** + * @param avgLatencyMs The average latency (from image capture to data published) in milliseconds + * a frame should have + */ + public void setAvgLatencyMs(double avgLatencyMs) { + this.avgLatencyMs = avgLatencyMs; + } + + /** + * @param latencyStdDevMs The standard deviation in milliseconds of the latency + */ + public void setLatencyStdDevMs(double latencyStdDevMs) { + this.latencyStdDevMs = latencyStdDevMs; + } + + public int getResWidth() { + return resWidth; + } + + public int getResHeight() { + return resHeight; + } + + public int getResArea() { + return resWidth * resHeight; + } + + public Matrix getIntrinsics() { + return camIntrinsics.copy(); + } + + public Vector getDistCoeffs() { + return new Vector<>(distCoeffs); + } + + public double getFPS() { + return 1000.0 / frameSpeedMs; + } + + public double getFrameSpeedMs() { + return frameSpeedMs; + } + + public double getExposureTimeMs() { + return exposureTimeMs; + } + + public double getAvgLatencyMs() { + return avgLatencyMs; + } + + public double getLatencyStdDevMs() { + return latencyStdDevMs; + } + + public SimCameraProperties copy() { + var newProp = new SimCameraProperties(); + newProp.setCalibration(resWidth, resHeight, camIntrinsics, distCoeffs); + newProp.setCalibError(avgErrorPx, errorStdDevPx); + newProp.setFPS(getFPS()); + newProp.setExposureTimeMs(exposureTimeMs); + newProp.setAvgLatencyMs(avgLatencyMs); + newProp.setLatencyStdDevMs(latencyStdDevMs); + return newProp; + } + + public List undistort(List points) { + return OpenCVHelp.undistortPoints(camIntrinsics, distCoeffs, points); + } + + /** + * The percentage(0 - 100) of this camera's resolution the contour takes up in pixels of the + * image. + * + * @param corners Corners of the contour + */ + public double getContourAreaPercent(List corners) { + return OpenCVHelp.getContourAreaPx(corners) / getResArea() * 100; + } + + /** The yaw from the principal point of this camera to the pixel x value. Positive values left. */ + public Rotation2d getPixelYaw(double pixelX) { + double fx = camIntrinsics.get(0, 0); + // account for principal point not being centered + double cx = camIntrinsics.get(0, 2); + double xOffset = cx - pixelX; + return new Rotation2d(fx, xOffset); + } + + /** + * The pitch from the principal point of this camera to the pixel y value. Pitch is positive down. + * + *

Note that this angle is naively computed and may be incorrect. See {@link + * #getCorrectedPixelRot(TargetCorner)}. + */ + public Rotation2d getPixelPitch(double pixelY) { + double fy = camIntrinsics.get(1, 1); + // account for principal point not being centered + double cy = camIntrinsics.get(1, 2); + double yOffset = cy - pixelY; + return new Rotation2d(fy, -yOffset); + } + + /** + * Finds the yaw and pitch to the given image point. Yaw is positive left, and pitch is positive + * down. + * + *

Note that pitch is naively computed and may be incorrect. See {@link + * #getCorrectedPixelRot(TargetCorner)}. + */ + public Rotation3d getPixelRot(TargetCorner point) { + return new Rotation3d( + 0, getPixelPitch(point.y).getRadians(), getPixelYaw(point.x).getRadians()); + } + + /** + * Gives the yaw and pitch of the line intersecting the camera lens and the given pixel + * coordinates on the sensor. Yaw is positive left, and pitch positive down. + * + *

The pitch traditionally calculated from pixel offsets do not correctly account for non-zero + * values of yaw because of perspective distortion (not to be confused with lens distortion)-- for + * example, the pitch angle is naively calculated as: + * + *

pitch = arctan(pixel y offset / focal length y)
+ * + * However, using focal length as a side of the associated right triangle is not correct when the + * pixel x value is not 0, because the distance from this pixel (projected on the x-axis) to the + * camera lens increases. Projecting a line back out of the camera with these naive angles will + * not intersect the 3d point that was originally projected into this 2d pixel. Instead, this + * length should be: + * + *
focal length y ⟶ (focal length y / cos(arctan(pixel x offset / focal length x)))
+ * + * @return Rotation3d with yaw and pitch of the line projected out of the camera from the given + * pixel (roll is zero). + */ + public Rotation3d getCorrectedPixelRot(TargetCorner point) { + double fx = camIntrinsics.get(0, 0); + double cx = camIntrinsics.get(0, 2); + double xOffset = cx - point.x; + + double fy = camIntrinsics.get(1, 1); + double cy = camIntrinsics.get(1, 2); + double yOffset = cy - point.y; + + // calculate yaw normally + var yaw = new Rotation2d(fx, xOffset); + // correct pitch based on yaw + var pitch = new Rotation2d(fy / Math.cos(Math.atan(xOffset / fx)), -yOffset); + + return new Rotation3d(0, pitch.getRadians(), yaw.getRadians()); + } + + public Rotation2d getHorizFOV() { + // sum of FOV left and right principal point + var left = getPixelYaw(0); + var right = getPixelYaw(resWidth); + return left.minus(right); + } + + public Rotation2d getVertFOV() { + // sum of FOV above and below principal point + var above = getPixelPitch(0); + var below = getPixelPitch(resHeight); + return below.minus(above); + } + + public Rotation2d getDiagFOV() { + return new Rotation2d(Math.hypot(getHorizFOV().getRadians(), getVertFOV().getRadians())); + } + + /** Width:height */ + public double getAspectRatio() { + return (double) resWidth / resHeight; + } + + /** + * Returns these pixel points as fractions of a 1x1 square image. This means the camera's aspect + * ratio and resolution will be used, and the points' x and y may not reach all portions(e.g. a + * wide aspect ratio means some of the top and bottom of the square image is unreachable). + * + * @param points Pixel points on this camera's image + * @return Points mapped to an image of 1x1 resolution + */ + public List getPixelFraction(List points) { + double resLarge = getAspectRatio() > 1 ? resWidth : resHeight; + + return points.stream() + .map( + p -> { + // offset to account for aspect ratio + return new TargetCorner( + (p.x + (resLarge - resWidth) / 2.0) / resLarge, + (p.y + (resLarge - resHeight) / 2.0) / resLarge); + }) + .collect(Collectors.toList()); + } + + /** Returns these points after applying this camera's estimated noise. */ + public List estPixelNoise(List points) { + if (avgErrorPx == 0 && errorStdDevPx == 0) return points; + + return points.stream() + .map( + p -> { + // error pixels in random direction + double error = avgErrorPx + rand.nextGaussian() * errorStdDevPx; + double errorAngle = rand.nextDouble() * 2 * Math.PI - Math.PI; + return new TargetCorner( + p.x + error * Math.cos(errorAngle), p.y + error * Math.sin(errorAngle)); + }) + .collect(Collectors.toList()); + } + + /** + * @return Noisy estimation of a frame's processing latency in milliseconds + */ + public double estLatencyMs() { + return Math.max(avgLatencyMs + rand.nextGaussian() * latencyStdDevMs, 0); + } + + /** + * @return Estimate how long until the next frame should be processed in milliseconds + */ + public double estMsUntilNextFrame() { + // exceptional processing latency blocks the next frame + return frameSpeedMs + Math.max(0, estLatencyMs() - frameSpeedMs); + } + + // pre-calibrated example cameras + + /** 960x720 resolution, 90 degree FOV, "perfect" lagless camera */ + public static SimCameraProperties PERFECT_90DEG() { + return new SimCameraProperties(); + } + + public static SimCameraProperties PI4_LIFECAM_320_240() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 320, + 240, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 328.2733242048587, + 0.0, + 164.8190261141906, + 0.0, + 318.0609794305216, + 123.8633838438093, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.09957946553445934, + -0.9166265114485799, + 0.0019519890627236526, + -0.0036071725380870333, + 1.5627234622420942)); + prop.setCalibError(0.21, 0.0124); + prop.setFPS(30); + prop.setAvgLatencyMs(30); + prop.setLatencyStdDevMs(10); + return prop; + } + + public static SimCameraProperties PI4_LIFECAM_640_480() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 640, + 480, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 669.1428078983059, + 0.0, + 322.53377249329213, + 0.0, + 646.9843137061716, + 241.26567383784163, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.12788470750464645, + -1.2350335805796528, + 0.0024990767286192732, + -0.0026958287600230705, + 2.2951386729115537)); + prop.setCalibError(0.26, 0.046); + prop.setFPS(15); + prop.setAvgLatencyMs(65); + prop.setLatencyStdDevMs(15); + return prop; + } + + public static SimCameraProperties LL2_640_480() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 640, + 480, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 511.22843367007755, + 0.0, + 323.62049380211096, + 0.0, + 514.5452336723849, + 261.8827920543568, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.1917469998873756, + -0.5142936883324216, + 0.012461562046896614, + 0.0014084973492408186, + 0.35160648971214437)); + prop.setCalibError(0.25, 0.05); + prop.setFPS(15); + prop.setAvgLatencyMs(35); + prop.setLatencyStdDevMs(8); + return prop; + } + + public static SimCameraProperties LL2_960_720() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 960, + 720, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 769.6873145148892, + 0.0, + 486.1096609458122, + 0.0, + 773.8164483705323, + 384.66071662358354, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.189462064814501, + -0.49903003669627627, + 0.007468423590519429, + 0.002496885298683693, + 0.3443122090208624)); + prop.setCalibError(0.35, 0.10); + prop.setFPS(10); + prop.setAvgLatencyMs(50); + prop.setLatencyStdDevMs(15); + return prop; + } + + public static SimCameraProperties LL2_1280_720() { + var prop = new SimCameraProperties(); + prop.setCalibration( + 1280, + 720, + Matrix.mat(Nat.N3(), Nat.N3()) + .fill( // intrinsic + 1011.3749416937393, + 0.0, + 645.4955139388737, + 0.0, + 1008.5391755084075, + 508.32877656020196, + 0.0, + 0.0, + 1.0), + VecBuilder.fill( // distort + 0.13730101577061535, + -0.2904345656989261, + 8.32475714507539E-4, + -3.694397782014239E-4, + 0.09487962227027584)); + prop.setCalibError(0.37, 0.06); + prop.setFPS(7); + prop.setAvgLatencyMs(60); + prop.setLatencyStdDevMs(20); + return prop; + } +} diff --git a/photon-lib/src/main/java/org/photonvision/SimPhotonCamera.java b/photon-lib/src/main/java/org/photonvision/simulation/SimPhotonCamera.java similarity index 97% rename from photon-lib/src/main/java/org/photonvision/SimPhotonCamera.java rename to photon-lib/src/main/java/org/photonvision/simulation/SimPhotonCamera.java index a0ce8f37b2..9f301486a6 100644 --- a/photon-lib/src/main/java/org/photonvision/SimPhotonCamera.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimPhotonCamera.java @@ -22,7 +22,7 @@ * SOFTWARE. */ -package org.photonvision; +package org.photonvision.simulation; import edu.wpi.first.math.Matrix; import edu.wpi.first.math.numbers.N1; @@ -31,11 +31,17 @@ import edu.wpi.first.networktables.NetworkTableInstance; import java.util.Arrays; import java.util.List; +import org.photonvision.PhotonCamera; +import org.photonvision.PhotonTargetSortMode; import org.photonvision.common.dataflow.structures.Packet; import org.photonvision.common.networktables.NTTopicSet; import org.photonvision.targeting.PhotonPipelineResult; import org.photonvision.targeting.PhotonTrackedTarget; +/** + * @deprecated Use {@link PhotonCameraSim} instead + */ +@Deprecated @SuppressWarnings("unused") public class SimPhotonCamera { NTTopicSet ts = new NTTopicSet(); diff --git a/photon-lib/src/main/java/org/photonvision/SimVisionSystem.java b/photon-lib/src/main/java/org/photonvision/simulation/SimVisionSystem.java similarity index 99% rename from photon-lib/src/main/java/org/photonvision/SimVisionSystem.java rename to photon-lib/src/main/java/org/photonvision/simulation/SimVisionSystem.java index 4ae091475e..52756f4727 100644 --- a/photon-lib/src/main/java/org/photonvision/SimVisionSystem.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimVisionSystem.java @@ -22,7 +22,7 @@ * SOFTWARE. */ -package org.photonvision; +package org.photonvision.simulation; import edu.wpi.first.apriltag.AprilTag; import edu.wpi.first.apriltag.AprilTagFieldLayout; @@ -41,6 +41,10 @@ import org.photonvision.targeting.PhotonTrackedTarget; import org.photonvision.targeting.TargetCorner; +/** + * @deprecated Use {@link VisionSystemSim} instead + */ +@Deprecated public class SimVisionSystem { SimPhotonCamera cam; diff --git a/photon-lib/src/main/java/org/photonvision/SimVisionTarget.java b/photon-lib/src/main/java/org/photonvision/simulation/SimVisionTarget.java similarity index 95% rename from photon-lib/src/main/java/org/photonvision/SimVisionTarget.java rename to photon-lib/src/main/java/org/photonvision/simulation/SimVisionTarget.java index 2b5c2ceab7..0d66154d9f 100644 --- a/photon-lib/src/main/java/org/photonvision/SimVisionTarget.java +++ b/photon-lib/src/main/java/org/photonvision/simulation/SimVisionTarget.java @@ -22,10 +22,14 @@ * SOFTWARE. */ -package org.photonvision; +package org.photonvision.simulation; import edu.wpi.first.math.geometry.Pose3d; +/** + * @deprecated Use {@link VisionTargetSim} instead + */ +@Deprecated public class SimVisionTarget { Pose3d targetPose; double targetWidthMeters; diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java new file mode 100644 index 0000000000..cec341b4d8 --- /dev/null +++ b/photon-lib/src/main/java/org/photonvision/simulation/VideoSimUtil.java @@ -0,0 +1,337 @@ +/* + * MIT License + * + * Copyright (c) PhotonVision + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.photonvision.simulation; + +import edu.wpi.first.cscore.CvSource; +import edu.wpi.first.util.RuntimeLoader; +import java.awt.image.BufferedImage; +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.imageio.ImageIO; +import org.opencv.core.Core; +import org.opencv.core.CvType; +import org.opencv.core.Mat; +import org.opencv.core.MatOfPoint; +import org.opencv.core.MatOfPoint2f; +import org.opencv.core.Point; +import org.opencv.core.Rect; +import org.opencv.core.Scalar; +import org.opencv.core.Size; +import org.opencv.imgcodecs.Imgcodecs; +import org.opencv.imgproc.Imgproc; +import org.photonvision.estimation.OpenCVHelp; + +public class VideoSimUtil { + public static final String kLocalTagImagesPath = "./src/main/resources/images/apriltags/"; + public static final String kResourceTagImagesPath = "/images/apriltags/"; + public static final String kTag16h5ImageName = "tag16_05_00000"; + public static final int kNumTags16h5 = 30; + + // All 16h5 tag images + private static final Map kTag16h5Images = new HashMap<>(); + // Points corresponding to marker(black square) corners of 8x8 16h5 tag images + public static final Point[] kTag16h5MarkerPts; + + static { + try { + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + } catch (Exception e) { + throw new RuntimeException("Failed to load native libraries!", e); + } + + // create Mats of 8x8 apriltag images + for (int i = 0; i < VideoSimUtil.kNumTags16h5; i++) { + Mat tagImage = VideoSimUtil.get16h5TagImage(i); + kTag16h5Images.put(i, tagImage); + } + + kTag16h5MarkerPts = get16h5MarkerPts(); + } + + /** Updates the properties of this CvSource video stream with the given camera properties. */ + public static void updateVideoProp(CvSource video, SimCameraProperties prop) { + video.setResolution(prop.getResWidth(), prop.getResHeight()); + video.setFPS((int) prop.getFPS()); + } + + /** + * Gets the points representing the corners of this image. Because image pixels are accessed + * through a Mat, the point (0,0) actually represents the center of the top-left pixel and not the + * actual top-left corner. + * + * @param size Size of image + */ + public static Point[] getImageCorners(Size size) { + return new Point[] { + new Point(-0.5, -0.5), + new Point(size.width - 0.5, -0.5), + new Point(size.width - 0.5, size.height - 0.5), + new Point(-0.5, size.height - 0.5) + }; + } + + /** + * Gets the 8x8 (grayscale) image of a specific 16h5 AprilTag. + * + * @param id The fiducial id of the desired tag + */ + public static Mat get16h5TagImage(int id) { + String name = kTag16h5ImageName; + String idString = String.valueOf(id); + name = name.substring(0, name.length() - idString.length()) + idString; + + var resource = VideoSimUtil.class.getResource(kResourceTagImagesPath + name + ".png"); + // local IDE tests + String path = kLocalTagImagesPath + name + ".png"; + // gradle tests + if (resource != null) { + path = resource.getPath(); + + // TODO why did we have this previously? + // if (path.startsWith("/")) path = path.substring(1); + } + Mat result = new Mat(); + if (!path.startsWith("file")) result = Imgcodecs.imread(path, Imgcodecs.IMREAD_GRAYSCALE); + // reading jar file + if (result.empty()) { + BufferedImage buf; + try { + buf = ImageIO.read(resource); + } catch (IOException e) { + System.err.println("Couldn't read tag image!"); + return result; + } + + result = new Mat(buf.getHeight(), buf.getWidth(), CvType.CV_8UC1); + + byte[] px = new byte[1]; + for (int y = 0; y < result.height(); y++) { + for (int x = 0; x < result.width(); x++) { + px[0] = (byte) (buf.getRGB(x, y) & 0xFF); + result.put(y, x, px); + } + } + } + return result; + } + + /** Gets the points representing the marker(black square) corners. */ + public static Point[] get16h5MarkerPts() { + return get16h5MarkerPts(1); + } + + /** + * Gets the points representing the marker(black square) corners. + * + * @param scale The scale of the tag image (8*scale x 8*scale image) + */ + public static Point[] get16h5MarkerPts(int scale) { + var roi16h5 = new Rect(new Point(1, 1), new Size(6, 6)); + roi16h5.x *= scale; + roi16h5.y *= scale; + roi16h5.width *= scale; + roi16h5.height *= scale; + var pts = getImageCorners(roi16h5.size()); + for (int i = 0; i < pts.length; i++) { + var pt = pts[i]; + pts[i] = new Point(roi16h5.tl().x + pt.x, roi16h5.tl().y + pt.y); + } + return pts; + } + + /** + * Warps the image of a specific 16h5 AprilTag onto the destination image at the given points. + * + * @param tagId The id of the specific tag to warp onto the destination image + * @param dstPoints Points(4) in destination image where the tag marker(black square) corners + * should be warped onto. + * @param destination The destination image to place the warped tag image onto. + * @param antialiasing If antialiasing should be performed by automatically + * supersampling/interpolating the warped image. This should be used if better stream quality + * is desired or target detection is being done on the stream, but can hurt performance. + * @see OpenCVHelp#targetCornersToMat(org.photonvision.targeting.TargetCorner...) + */ + public static void warp16h5TagImage( + int tagId, MatOfPoint2f dstPoints, Mat destination, boolean antialiasing) { + Mat tagImage = kTag16h5Images.get(tagId); + if (tagImage == null || tagImage.empty()) return; + var tagPoints = new MatOfPoint2f(kTag16h5MarkerPts); + // points of tag image corners + var tagImageCorners = new MatOfPoint2f(getImageCorners(tagImage.size())); + // the rectangle describing the rectangle-of-interest(ROI) + var boundingRect = Imgproc.boundingRect(dstPoints); + // find the perspective transform from the tag image to the warped destination points + Mat perspecTrf = Imgproc.getPerspectiveTransform(tagPoints, dstPoints); + // check extreme image corners after transform to check if we need to expand bounding rect + var extremeCorners = new MatOfPoint2f(); + Core.perspectiveTransform(tagImageCorners, extremeCorners, perspecTrf); + // dilate ROI to fit full tag + boundingRect = Imgproc.boundingRect(extremeCorners); + + // adjust interpolation strategy based on size of warped tag compared to tag image + var warpedContourArea = Imgproc.contourArea(extremeCorners); + double warpedTagUpscale = Math.sqrt(warpedContourArea) / Math.sqrt(tagImage.size().area()); + int warpStrategy = Imgproc.INTER_NEAREST; + // automatically determine the best supersampling of warped image and scale of tag image + /* + (warpPerspective does not properly resample, so this is used to avoid aliasing in the + warped image. Supersampling is used when the warped tag is small, but is very slow + when the warped tag is large-- scaling the tag image up and using linear interpolation + instead can be performant while still effectively antialiasing. Some combination of these + two can be used in between those extremes.) + + TODO: Simplify magic numbers to one or two variables, or use a more proper approach? + */ + int supersampling = 6; + supersampling = (int) Math.ceil(supersampling / warpedTagUpscale); + supersampling = Math.max(Math.min(supersampling, 8), 1); + + Mat scaledTagImage = new Mat(); + if (warpedTagUpscale > 2.0) { + warpStrategy = Imgproc.INTER_LINEAR; + int scaleFactor = (int) (warpedTagUpscale / 3.0) + 2; + scaleFactor = Math.max(Math.min(scaleFactor, 40), 1); + scaleFactor *= supersampling; + Imgproc.resize( + tagImage, scaledTagImage, new Size(), scaleFactor, scaleFactor, Imgproc.INTER_NEAREST); + tagPoints.fromArray(get16h5MarkerPts(scaleFactor)); + } else tagImage.assignTo(scaledTagImage); + + // constrain the bounding rect inside of the destination image + boundingRect.x -= 1; + boundingRect.y -= 1; + boundingRect.width += 2; + boundingRect.height += 2; + if (boundingRect.x < 0) { + boundingRect.width += boundingRect.x; + boundingRect.x = 0; + } + if (boundingRect.y < 0) { + boundingRect.height += boundingRect.y; + boundingRect.y = 0; + } + boundingRect.width = Math.min(destination.width() - boundingRect.x, boundingRect.width); + boundingRect.height = Math.min(destination.height() - boundingRect.y, boundingRect.height); + if (boundingRect.width <= 0 || boundingRect.height <= 0) return; + + // upscale if supersampling + Mat scaledDstPts = new Mat(); + if (supersampling > 1) { + Core.multiply(dstPoints, new Scalar(supersampling, supersampling), scaledDstPts); + boundingRect.x *= supersampling; + boundingRect.y *= supersampling; + boundingRect.width *= supersampling; + boundingRect.height *= supersampling; + } else dstPoints.assignTo(scaledDstPts); + + // update transform relative to expanded, scaled bounding rect + Core.subtract(scaledDstPts, new Scalar(boundingRect.tl().x, boundingRect.tl().y), scaledDstPts); + perspecTrf = Imgproc.getPerspectiveTransform(tagPoints, scaledDstPts); + + // warp (scaled) tag image onto (scaled) ROI image representing the portion of + // the destination image encapsulated by boundingRect + Mat tempROI = new Mat(); + Imgproc.warpPerspective(scaledTagImage, tempROI, perspecTrf, boundingRect.size(), warpStrategy); + + // downscale ROI with interpolation if supersampling + if (supersampling > 1) { + boundingRect.x /= supersampling; + boundingRect.y /= supersampling; + boundingRect.width /= supersampling; + boundingRect.height /= supersampling; + Imgproc.resize(tempROI, tempROI, boundingRect.size(), 0, 0, Imgproc.INTER_AREA); + } + + // we want to copy ONLY the transformed tag to the result image, not the entire bounding rect + // using a mask only copies the source pixels which have an associated non-zero value in the + // mask + Mat tempMask = Mat.zeros(tempROI.size(), CvType.CV_8UC1); + Core.subtract( + extremeCorners, new Scalar(boundingRect.tl().x, boundingRect.tl().y), extremeCorners); + Point tempCenter = new Point(); + tempCenter.x = + Arrays.stream(extremeCorners.toArray()).mapToDouble(p -> p.x).average().getAsDouble(); + tempCenter.y = + Arrays.stream(extremeCorners.toArray()).mapToDouble(p -> p.y).average().getAsDouble(); + // dilate tag corners + Arrays.stream(extremeCorners.toArray()) + .forEach( + p -> { + double xdiff = p.x - tempCenter.x; + double ydiff = p.y - tempCenter.y; + xdiff += 1 * Math.signum(xdiff); + ydiff += 1 * Math.signum(ydiff); + new Point(tempCenter.x + xdiff, tempCenter.y + ydiff); + }); + // (make inside of tag completely white in mask) + Imgproc.fillConvexPoly(tempMask, new MatOfPoint(extremeCorners.toArray()), new Scalar(255)); + + // copy transformed tag onto result image + tempROI.copyTo(destination.submat(boundingRect), tempMask); + } + + /** + * Draws a contour around the given points and text of the id onto the destination image. + * + * @param id Fiducial ID number to draw + * @param dstPoints Points representing the four corners of the tag marker(black square) in the + * destination image. + * @param destination The destination image to draw onto. The image should be in the BGR color + * space. + */ + public static void drawTagDetection(int id, MatOfPoint2f dstPoints, Mat destination) { + var dstPointsd = new MatOfPoint(dstPoints.toArray()); + double scaleX = destination.width() / 640.0; + double scaleY = destination.height() / 480.0; + double minScale = Math.min(scaleX, scaleY); + int thickness = (int) (1 * minScale); + // for(var pt : dstPoints.toArray()) { + // Imgproc.circle(destination, pt, 4, new Scalar(255), 1, Imgproc.LINE_AA); + // } + // Imgproc.rectangle(destination, extremeRect, new Scalar(255), 1, Imgproc.LINE_AA); + // Imgproc.rectangle(destination, Imgproc.boundingRect(dstPoints), new Scalar(255), 1, + // Imgproc.LINE_AA); + Imgproc.polylines( + destination, List.of(dstPointsd), true, new Scalar(0, 0, 255), thickness, Imgproc.LINE_AA); + var textPt = Imgproc.boundingRect(dstPoints).tl(); + textPt.x -= 10.0 * scaleX; + textPt.y -= 12.0 * scaleY; + Imgproc.putText( + destination, + String.valueOf(id), + textPt, + Imgproc.FONT_HERSHEY_PLAIN, + 1.5 * minScale, + new Scalar(0, 0, 255), + thickness, + Imgproc.LINE_AA); + } +} diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java new file mode 100644 index 0000000000..6a1c36cf3d --- /dev/null +++ b/photon-lib/src/main/java/org/photonvision/simulation/VisionSystemSim.java @@ -0,0 +1,377 @@ +/* + * MIT License + * + * Copyright (c) PhotonVision + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.photonvision.simulation; + +import edu.wpi.first.apriltag.AprilTag; +import edu.wpi.first.apriltag.AprilTagFieldLayout; +import edu.wpi.first.math.geometry.Pose2d; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Transform3d; +import edu.wpi.first.math.interpolation.TimeInterpolatableBuffer; +import edu.wpi.first.wpilibj.Timer; +import edu.wpi.first.wpilibj.smartdashboard.Field2d; +import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import org.photonvision.PhotonCamera; +import org.photonvision.estimation.TargetModel; + +/** + * A simulated vision system involving a camera(s) and coprocessor(s) mounted on a mobile robot + * running PhotonVision, detecting targets placed on the field. {@link VisionTargetSim}s added to + * this class will be detected by the {@link PhotonCameraSim}s added to this class. This class + * should be updated periodically with the robot's current pose in order to publish the simulated + * camera target info. + */ +public class VisionSystemSim { + private final Map camSimMap = new HashMap<>(); + private static final double kBufferLengthSeconds = 1.5; + // save robot-to-camera for each camera over time (Pose3d is easily interpolatable) + private final Map> camTrfMap = new HashMap<>(); + + // interpolate drivetrain with twists + private final TimeInterpolatableBuffer robotPoseBuffer = + TimeInterpolatableBuffer.createBuffer(kBufferLengthSeconds); + + private final Map> targetSets = new HashMap<>(); + + private final Field2d dbgField; + + /** + * A simulated vision system involving a camera(s) and coprocessor(s) mounted on a mobile robot + * running PhotonVision, detecting targets placed on the field. {@link VisionTargetSim}s added to + * this class will be detected by the {@link PhotonCameraSim}s added to this class. This class + * should be updated periodically with the robot's current pose in order to publish the simulated + * camera target info. + * + * @param visionSystemName The specific identifier for this vision system in NetworkTables. + */ + public VisionSystemSim(String visionSystemName) { + dbgField = new Field2d(); + String tableName = "VisionSystemSim-" + visionSystemName; + SmartDashboard.putData(tableName + "/Sim Field", dbgField); + } + + /** Get one of the simulated cameras. */ + public Optional getCameraSim(String name) { + return Optional.ofNullable(camSimMap.get(name)); + } + + /** Get all the simulated cameras. */ + public Collection getCameraSims() { + return camSimMap.values(); + } + + /** + * Adds a simulated camera to this vision system with a specified robot-to-camera transformation. + * The vision targets registered with this vision system simulation will be observed by the + * simulated {@link PhotonCamera}. + * + * @param cameraSim The camera simulation + * @param robotToCamera The transform from the robot pose to the camera pose + */ + public void addCamera(PhotonCameraSim cameraSim, Transform3d robotToCamera) { + var existing = camSimMap.putIfAbsent(cameraSim.getCamera().getName(), cameraSim); + if (existing == null) { + camTrfMap.put(cameraSim, TimeInterpolatableBuffer.createBuffer(kBufferLengthSeconds)); + camTrfMap + .get(cameraSim) + .addSample(Timer.getFPGATimestamp(), new Pose3d().plus(robotToCamera)); + } + } + + /** Remove all simulated cameras from this vision system. */ + public void clearCameras() { + camSimMap.clear(); + camTrfMap.clear(); + } + + /** + * Remove a simulated camera from this vision system. + * + * @return If the camera was present and removed + */ + public boolean removeCamera(PhotonCameraSim cameraSim) { + boolean success = camSimMap.remove(cameraSim.getCamera().getName()) != null; + camTrfMap.remove(cameraSim); + return success; + } + + /** + * Get a simulated camera's position relative to the robot. If the requested camera is invalid, an + * empty optional is returned. + * + * @return The transform of this cameraSim, or an empty optional if it is invalid + */ + public Optional getRobotToCamera(PhotonCameraSim cameraSim) { + return getRobotToCamera(cameraSim, Timer.getFPGATimestamp()); + } + + /** + * Get a simulated camera's position relative to the robot. If the requested camera is invalid, an + * empty optional is returned. + * + * @param cameraSim Specific camera to get the robot-to-camera transform of + * @param timeSeconds Timestamp in seconds of when the transform should be observed + * @return The transform of this cameraSim, or an empty optional if it is invalid + */ + public Optional getRobotToCamera(PhotonCameraSim cameraSim, double timeSeconds) { + var trfBuffer = camTrfMap.get(cameraSim); + if (trfBuffer == null) return Optional.empty(); + var sample = trfBuffer.getSample(timeSeconds); + if (sample.isEmpty()) return Optional.empty(); + return Optional.of(new Transform3d(new Pose3d(), sample.orElse(new Pose3d()))); + } + + /** + * Adjust a camera's position relative to the robot. Use this if your camera is on a gimbal or + * turret or some other mobile platform. + * + * @param cameraSim The simulated camera to change the relative position of + * @param robotToCamera New transform from the robot to the camera + * @return If the cameraSim was valid and transform was adjusted + */ + public boolean adjustCamera(PhotonCameraSim cameraSim, Transform3d robotToCamera) { + var trfBuffer = camTrfMap.get(cameraSim); + if (trfBuffer == null) return false; + trfBuffer.addSample(Timer.getFPGATimestamp(), new Pose3d().plus(robotToCamera)); + return true; + } + + /** Reset the previous transforms for all cameras to their current transform. */ + public void resetCameraTransforms() { + for (var cam : camTrfMap.keySet()) resetCameraTransforms(cam); + } + + /** + * Reset the transform history for this camera to just the current transform. + * + * @return If the cameraSim was valid and transforms were reset + */ + public boolean resetCameraTransforms(PhotonCameraSim cameraSim) { + double now = Timer.getFPGATimestamp(); + var trfBuffer = camTrfMap.get(cameraSim); + if (trfBuffer == null) return false; + var lastTrf = new Transform3d(new Pose3d(), trfBuffer.getSample(now).orElse(new Pose3d())); + trfBuffer.clear(); + adjustCamera(cameraSim, lastTrf); + return true; + } + + public Set getVisionTargets() { + var all = new HashSet(); + for (var entry : targetSets.entrySet()) { + all.addAll(entry.getValue()); + } + return all; + } + + public Set getVisionTargets(String type) { + return targetSets.get(type); + } + + /** + * Adds targets on the field which your vision system is designed to detect. The {@link + * PhotonCamera}s simulated from this system will report the location of the camera relative to + * the subset of these targets which are visible from the given camera position. + * + *

By default these are added under the type "targets". + * + * @param targets Targets to add to the simulated field + */ + public void addVisionTargets(VisionTargetSim... targets) { + addVisionTargets("targets", targets); + } + + /** + * Adds targets on the field which your vision system is designed to detect. The {@link + * PhotonCamera}s simulated from this system will report the location of the camera relative to + * the subset of these targets which are visible from the given camera position. + * + *

The AprilTags from this layout will be added as vision targets under the type "apriltags". + * The poses added preserve the tag layout's current alliance origin. + * + * @param tagLayout The field tag layout to get Apriltag poses and IDs from + */ + public void addVisionTargets(AprilTagFieldLayout tagLayout) { + for (AprilTag tag : tagLayout.getTags()) { + addVisionTargets( + "apriltags", + new VisionTargetSim( + tagLayout.getTagPose(tag.ID).get(), // preserve alliance rotation + TargetModel.kTag16h5, + tag.ID)); + } + } + + /** + * Adds targets on the field which your vision system is designed to detect. The {@link + * PhotonCamera}s simulated from this system will report the location of the camera relative to + * the subset of these targets which are visible from the given camera position. + * + * @param type Type of target (e.g. "cargo"). + * @param targets Targets to add to the simulated field + */ + public void addVisionTargets(String type, VisionTargetSim... targets) { + targetSets.computeIfAbsent(type, k -> new HashSet<>()); + for (var tgt : targets) { + targetSets.get(type).add(tgt); + } + } + + public void clearVisionTargets() { + targetSets.clear(); + } + + public Set removeVisionTargets(String type) { + return targetSets.remove(type); + } + + public Set removeVisionTargets(VisionTargetSim... targets) { + var removeList = List.of(targets); + var removedSet = new HashSet(); + for (var entry : targetSets.entrySet()) { + entry + .getValue() + .removeIf( + t -> { + if (removeList.contains(t)) { + removedSet.add(t); + return true; + } else return false; + }); + } + return removedSet; + } + + /** Get the latest robot pose in meters saved by the vision system. */ + public Pose3d getRobotPose() { + return getRobotPose(Timer.getFPGATimestamp()); + } + + /** + * Get the robot pose in meters saved by the vision system at this timestamp. + * + * @param timestamp Timestamp of the desired robot pose + */ + public Pose3d getRobotPose(double timestamp) { + return robotPoseBuffer.getSample(timestamp).orElse(new Pose3d()); + } + + /** Clears all previous robot poses and sets robotPose at current time. */ + public void resetRobotPose(Pose2d robotPose) { + resetRobotPose(new Pose3d(robotPose)); + } + + /** Clears all previous robot poses and sets robotPose at current time. */ + public void resetRobotPose(Pose3d robotPose) { + robotPoseBuffer.clear(); + robotPoseBuffer.addSample(Timer.getFPGATimestamp(), robotPose); + } + + public Field2d getDebugField() { + return dbgField; + } + + /** + * Periodic update. Ensure this is called repeatedly-- camera performance is used to automatically + * determine if a new frame should be submitted. + * + * @param robotPoseMeters The current robot pose in meters + */ + public void update(Pose2d robotPoseMeters) { + update(new Pose3d(robotPoseMeters)); + } + + /** + * Periodic update. Ensure this is called repeatedly-- camera performance is used to automatically + * determine if a new frame should be submitted. + * + * @param robotPoseMeters The current robot pose in meters + */ + public void update(Pose3d robotPoseMeters) { + var targetTypes = targetSets.entrySet(); + // update vision targets on field + targetTypes.forEach( + entry -> + dbgField + .getObject(entry.getKey()) + .setPoses( + entry.getValue().stream() + .map(t -> t.getPose().toPose2d()) + .collect(Collectors.toList()))); + + if (robotPoseMeters == null) return; + + // save "real" robot poses over time + double now = Timer.getFPGATimestamp(); + robotPoseBuffer.addSample(now, robotPoseMeters); + dbgField.setRobotPose(robotPoseMeters.toPose2d()); + + var allTargets = new ArrayList(); + targetTypes.forEach((entry) -> allTargets.addAll(entry.getValue())); + var visibleTargets = new ArrayList(); + var cameraPose2ds = new ArrayList(); + // process each camera + for (var camSim : camSimMap.values()) { + // check if this camera is ready to process and get latency + var optTimestamp = camSim.consumeNextEntryTime(); + if (optTimestamp.isEmpty()) continue; + // when this result "was" read by NT + long timestampNT = optTimestamp.get(); + // this result's processing latency in milliseconds + double latencyMillis = camSim.prop.estLatencyMs(); + // the image capture timestamp in seconds of this result + double timestampCapture = timestampNT / 1e6 - latencyMillis / 1e3; + + // use camera pose from the image capture timestamp + Pose3d lateRobotPose = getRobotPose(timestampCapture); + Pose3d lateCameraPose = lateRobotPose.plus(getRobotToCamera(camSim).get()); + cameraPose2ds.add(lateCameraPose.toPose2d()); + + // process a PhotonPipelineResult with visible targets + var camResult = camSim.process(latencyMillis, lateCameraPose, allTargets); + // publish this info to NT at estimated timestamp of receive + camSim.submitProcessedFrame(camResult, timestampNT); + // display debug results + for (var target : camResult.getTargets()) { + visibleTargets.add(lateCameraPose.transformBy(target.getBestCameraToTarget())); + } + } + if (visibleTargets.size() != 0) { + dbgField + .getObject("visibleTargetPoses") + .setPoses(visibleTargets.stream().map(Pose3d::toPose2d).collect(Collectors.toList())); + } + if (cameraPose2ds.size() != 0) dbgField.getObject("cameras").setPoses(cameraPose2ds); + } +} diff --git a/photon-lib/src/main/java/org/photonvision/simulation/VisionTargetSim.java b/photon-lib/src/main/java/org/photonvision/simulation/VisionTargetSim.java new file mode 100644 index 0000000000..88d19756c3 --- /dev/null +++ b/photon-lib/src/main/java/org/photonvision/simulation/VisionTargetSim.java @@ -0,0 +1,94 @@ +/* + * MIT License + * + * Copyright (c) PhotonVision + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.photonvision.simulation; + +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Translation3d; +import java.util.List; +import org.photonvision.estimation.TargetModel; + +/** Describes a vision target located somewhere on the field that your vision system can detect. */ +public class VisionTargetSim { + private Pose3d pose; + private TargetModel model; + + public final int fiducialID; + + /** + * Describes a vision target located somewhere on the field that your vision system can detect. + * + * @param pose Pose3d of the tag in field-relative coordinates + * @param model TargetModel which describes the shape of the target + */ + public VisionTargetSim(Pose3d pose, TargetModel model) { + this.pose = pose; + this.model = model; + this.fiducialID = -1; + } + + /** + * Describes a fiducial tag located somewhere on the field that your vision system can detect. + * + * @param pose Pose3d of the tag in field-relative coordinates + * @param model TargetModel which describes the shape of the target(tag) + * @param id The ID of this fiducial tag + */ + public VisionTargetSim(Pose3d pose, TargetModel model, int id) { + this.pose = pose; + this.model = model; + this.fiducialID = id; + } + + public void setPose(Pose3d pose) { + this.pose = pose; + } + + public void setModel(TargetModel model) { + this.model = model; + } + + public Pose3d getPose() { + return pose; + } + + public TargetModel getModel() { + return model; + } + + /** This target's vertices offset from its field pose. */ + public List getFieldVertices() { + return model.getFieldVertices(pose); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj instanceof VisionTargetSim) { + var o = (VisionTargetSim) obj; + return pose.equals(o.pose) && model.equals(o.model); + } + return false; + } +} diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00000.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00000.png new file mode 100644 index 0000000000000000000000000000000000000000..f6f22a9c9782ddb1b16b7833b0ff6cba3ca662a2 GIT binary patch literal 99 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqTAnVBAr*1SKl~XC8aUb5)<~>o wO0n}}H85PmHNn?0u)!xew1M}q=mTbkYKs*SuD9D~0Ch2Vy85}Sb4q9e02tC4MF0Q* literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00001.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00001.png new file mode 100644 index 0000000000000000000000000000000000000000..9b66b7d59b034d5245135dc3009bcbab41661ee7 GIT binary patch literal 97 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFq8lEnWAr*1SKl~XC8aUb5)<~>o uO0nx=N@)mYj1%6Vc_^Tv?;+O*Mut0|R)=n?ZgK+ZVeoYIb6Mw<&;$S&HX3yR literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00002.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00002.png new file mode 100644 index 0000000000000000000000000000000000000000..856d4f163ffd2252c1d622335c341dfe461f0cde GIT binary patch literal 99 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqTAnVBAr*1SKl~XC8aUb5)<~>o w+ToYOwL$Y(K*PCqZHMKCA_<2JSbs1u6a=n_SgD|A1k}ag>FVdQ&MBb@08$Sc9RL6T literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00003.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00003.png new file mode 100644 index 0000000000000000000000000000000000000000..df0cc4ab3cadc90fd1d330bcfb8fad9232cc4832 GIT binary patch literal 97 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFq8lEnWAr*1SKl~XC8aUb5)<`%r u?n;`&6_AnBR3W-R+ifG0>|xOd%nV7nt3xZgV!ii@3vc;pZMT3=F<&)&-@N@H__UVeoYIb6Mw<&;$VYQW)m| literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00005.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00005.png new file mode 100644 index 0000000000000000000000000000000000000000..13574f8d5e736635083decd39fb05995f7d8c17d GIT binary patch literal 100 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFq+MX_sAr*1SKl~XC8aUb5)<`%r x8XX8|;4=_W&_0yKYSgR5_Bi2i0qYM2hIP%`JAr*1SKl~XC8aUb5)=0QA v8XZ{0Fk5L2S3y+c8irQ4>#bK=Y8V(cc&-cj$h^4+sENVT)z4*}Q$iB}EEpP6 literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00008.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00008.png new file mode 100644 index 0000000000000000000000000000000000000000..30827b6f94a7f4bcdab8788a5f41ff6f5517e48c GIT binary patch literal 97 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFq8lEnWAr*1SKl~XC8aUb5)=0QA uS{+!;u$r@gu|z6XU^!Ec#5x9f1_ra=TYTj7uQCJmFnGH9xvXU|L>xoGkCOyr(heaPSGkiX~IyCf)Lmp5YgQu&X%Q~loCIDfr8%6*C literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00010.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00010.png new file mode 100644 index 0000000000000000000000000000000000000000..71beaa9626d727a1f4fc58be25bddeddf3ac2f90 GIT binary patch literal 99 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqTAnVBAr*1SKl~XC8aUb5)<~>o w+TmBi`hv?)B;jy`Zv!I_>kWe#Mmq+EH9wX{|7WN<4b;Wp>FVdQ&MBb@04kvwsQ>@~ literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00011.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00011.png new file mode 100644 index 0000000000000000000000000000000000000000..06e0c94a3f6b6de56ccaf636498d93959dc192bc GIT binary patch literal 98 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqnw~C>Ar*1SKl~XC8aUb5)=0QA vS{(>z;9acYu>7NzgJ5&vhQu9AH4F^jzN`yc@^^O#P!ofvtDnm{r-UW|Jy9EO literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00012.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00012.png new file mode 100644 index 0000000000000000000000000000000000000000..11a9a0016c750ac6717642aa703f49e5907d88b4 GIT binary patch literal 95 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqYMw5RAr*1SKl~XC8aUb5*32+r sHDH*2U>U=D&Kn|$8V<(`Sbs1u)JQLi+x9mdKI;Vst0Nm;sNB{r; literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00013.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00013.png new file mode 100644 index 0000000000000000000000000000000000000000..7443d8030fc3e9850a6d022ed0c3437ae8ad2ac1 GIT binary patch literal 99 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqTAnVBAr*1SKl~XC8aUb5)<`%r w?n+w2mB6r?DaAEHRKYtbw4v`I*9S(1wMwf)ZQHJ919dTYy85}Sb4q9e03hfYb^rhX literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00014.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00014.png new file mode 100644 index 0000000000000000000000000000000000000000..bad5efedc9d2b1a667f92954bb7caed157ac716c GIT binary patch literal 102 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqx}GkMAr*1SKl~XC8aUb5)=0QA ynzc%BT@YQU;Sk0md^n&XPc)2GqU)fF90Nm#<%$R%#` literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00016.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00016.png new file mode 100644 index 0000000000000000000000000000000000000000..cf5fa68c27db46d3839c47a8331c6625c76b6c18 GIT binary patch literal 97 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFq8lEnWAr*1SKl~XC8aUb5*32+r uJ;4=tS)m3{4}+(xpUXO@geCwn%Nn)- literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00017.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00017.png new file mode 100644 index 0000000000000000000000000000000000000000..13ed6b2cc7c71188b4f2de31774fdb076f9d83e8 GIT binary patch literal 101 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqI-V|$Ar*1SKl~XC8aUb5)=0QA yZgLVaED=s%aAq{>4Q1SA8X>wMc?VMs14CW>>d-gQKMaBT7(8A5T-G@yGywo6=otb4 literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00018.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00018.png new file mode 100644 index 0000000000000000000000000000000000000000..27ccc3324f39344fb86583b6384484deb758c38b GIT binary patch literal 99 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqTAnVBAr*1SKl~XC8aUb5)=0QA wTJn+a literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00019.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00019.png new file mode 100644 index 0000000000000000000000000000000000000000..97a521c47c38e423ea2355c2d641a23fac6cd52d GIT binary patch literal 99 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqTAnVBAr*1SKl~XC8aUb5)<`%r v?n+w2b>U|L>j@zXQ3Ylm)*A*fjCKqR2N_m}x_LB$j9~C|^>bP0l+XkK3fC8j literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00020.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00020.png new file mode 100644 index 0000000000000000000000000000000000000000..f7c1f0d4ed4d731ba91d224df6ca7acdf476c3c9 GIT binary patch literal 98 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqnw~C>Ar*1SKl~XC8aUb5)=0QA r8XZ{0;H<94YS0i0X3HKHeZb6+(!MTe<<0+^Kurvuu6{1-oD!M<^rIN; literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00021.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00021.png new file mode 100644 index 0000000000000000000000000000000000000000..aade7b33205b3575d882d7b9896d9aea991a448d GIT binary patch literal 100 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFq+MX_sAr*1SKl~XC8aUb5)<`%r x_VhX})^1?rVKtbwfK$R`6H^IKcmqEJ!*P{$L9hD`!~?Z4c)I$ztaD0e0st5&7`p%f literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00022.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00022.png new file mode 100644 index 0000000000000000000000000000000000000000..ae9c0be5e84397b2f449e7a465c60c425b75b5ff GIT binary patch literal 101 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqI-V|$Ar*1SKl~XC8aUb5)=0QA y#<2S^O=prg5YVtjc(v#ak;Kr3g%UoG7#SMd)&;SDOj`@o$KdJe=d#Wzp$Py?(;74Y literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00023.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00023.png new file mode 100644 index 0000000000000000000000000000000000000000..d4122e60012e71592c85ab99c8aadc2642e3d7e9 GIT binary patch literal 97 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFq8lEnWAr*1SKl~XC8aUb5)<`U8 unjz7}Byk|1L(gLhXM||N;R4nl3=FF|Rz&Q5xoay>4}+(xpUXO@geCw2L>bBe literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00024.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00024.png new file mode 100644 index 0000000000000000000000000000000000000000..46f9c981211d577c9169742c45dda4fc393f29ff GIT binary patch literal 97 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFq8lEnWAr*1SKl~XC8aUb5)=0QA u8XZ{4;L0?cNrbb8YeQ5c?_tpg%nbK5H~OD#@cahU!{F)a=d#Wzp$P!q&KRoz literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00025.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00025.png new file mode 100644 index 0000000000000000000000000000000000000000..29d3bf8aa7ff109241f66a7117bbf780d5916023 GIT binary patch literal 99 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqTAnVBAr*1SKl~XC8aUb5)<`%r w8XZ{05GmNkR3f#Ssl-)-YeV7=rWytY<0~8ezh7Iw0;r3@)78&qol`;+02z51$p8QV literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00026.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00026.png new file mode 100644 index 0000000000000000000000000000000000000000..4e86541296974b698c8c805e5d96075ad8889d90 GIT binary patch literal 99 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqTAnVBAr*1SKl~XC8aUb5)=0QA wTJ?r9?otwAl@NNtb%C*sDMw-*gFFMnbA>HF+m?Ra2h_#j>FVdQ&MBb@0QFKBk^lez literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00027.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00027.png new file mode 100644 index 0000000000000000000000000000000000000000..a14c4a733195646f1747ea9b417537dfa0fee09c GIT binary patch literal 98 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqnw~C>Ar*1SKl~XC8aUb5)=0QA v#<2S^#WJ?Bn6XCqI)yi^n_JnixD?{an^LB{Ts5(BT*4 literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00028.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00028.png new file mode 100644 index 0000000000000000000000000000000000000000..5adc09082b6a27c4f4f1d84bbe2a84e43f207262 GIT binary patch literal 95 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFqYMw5RAr*1SKl~XC8aUb5*32kj sJ;C72xQQi9_(CQR>kWe#Mmq+ELmyT|7+jlN0o1|Z>FVdQ&MBb@0K#V&F8}}l literal 0 HcmV?d00001 diff --git a/photon-lib/src/main/resources/images/apriltags/tag16_05_00029.png b/photon-lib/src/main/resources/images/apriltags/tag16_05_00029.png new file mode 100644 index 0000000000000000000000000000000000000000..9639d1cec31b01bee373794883e26f3483512bb1 GIT binary patch literal 97 zcmeAS@N?(olHy`uVBq!ia0vp^93afW1|*O0@9PFq8lEnWAr*1SKl~XC8aUb5)=0QA uTJ?r98XX8|Skyg%GeR`sZ~^NN28QG%8~qPP&Xxq~VeoYIb6Mw<&;$VbPZ=5j literal 0 HcmV?d00001 diff --git a/photon-lib/src/test/java/org/photonvision/OpenCVTest.java b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java new file mode 100644 index 0000000000..594439e847 --- /dev/null +++ b/photon-lib/src/test/java/org/photonvision/OpenCVTest.java @@ -0,0 +1,229 @@ +/* + * MIT License + * + * Copyright (c) PhotonVision + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.photonvision; + +import static org.junit.jupiter.api.Assertions.*; + +import edu.wpi.first.apriltag.jni.AprilTagJNI; +import edu.wpi.first.cscore.CameraServerCvJNI; +import edu.wpi.first.cscore.CameraServerJNI; +import edu.wpi.first.hal.JNIWrapper; +import edu.wpi.first.math.MathUtil; +import edu.wpi.first.math.geometry.Pose3d; +import edu.wpi.first.math.geometry.Rotation2d; +import edu.wpi.first.math.geometry.Rotation3d; +import edu.wpi.first.math.geometry.Transform3d; +import edu.wpi.first.math.geometry.Translation3d; +import edu.wpi.first.net.WPINetJNI; +import edu.wpi.first.networktables.NetworkTableInstance; +import edu.wpi.first.networktables.NetworkTablesJNI; +import edu.wpi.first.util.CombinedRuntimeLoader; +import edu.wpi.first.util.RuntimeLoader; +import edu.wpi.first.util.WPIUtilJNI; +import java.util.List; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.opencv.core.Core; +import org.photonvision.estimation.CameraTargetRelation; +import org.photonvision.estimation.OpenCVHelp; +import org.photonvision.estimation.TargetModel; +import org.photonvision.simulation.SimCameraProperties; +import org.photonvision.simulation.VisionSystemSim; +import org.photonvision.simulation.VisionTargetSim; + +public class OpenCVTest { + private static final double kTrlDelta = 0.005; + private static final double kRotDeltaDeg = 0.25; + + public static void assertSame(Translation3d trl1, Translation3d trl2) { + assertEquals(0, trl1.getX() - trl2.getX(), kTrlDelta, "Trl X Diff"); + assertEquals(0, trl1.getY() - trl2.getY(), kTrlDelta, "Trl Y Diff"); + assertEquals(0, trl1.getZ() - trl2.getZ(), kTrlDelta, "Trl Z Diff"); + } + + public static void assertSame(Rotation3d rot1, Rotation3d rot2) { + assertEquals(0, MathUtil.angleModulus(rot1.getX() - rot2.getX()), kRotDeltaDeg, "Rot X Diff"); + assertEquals(0, MathUtil.angleModulus(rot1.getY() - rot2.getY()), kRotDeltaDeg, "Rot Y Diff"); + assertEquals(0, MathUtil.angleModulus(rot1.getZ() - rot2.getZ()), kRotDeltaDeg, "Rot Z Diff"); + assertEquals( + 0, MathUtil.angleModulus(rot1.getAngle() - rot2.getAngle()), kRotDeltaDeg, "Rot W Diff"); + } + + public static void assertSame(Pose3d pose1, Pose3d pose2) { + assertSame(pose1.getTranslation(), pose2.getTranslation()); + assertSame(pose1.getRotation(), pose2.getRotation()); + } + + public static void assertSame(Transform3d trf1, Transform3d trf2) { + assertSame(trf1.getTranslation(), trf2.getTranslation()); + assertSame(trf1.getRotation(), trf2.getRotation()); + } + + private static final SimCameraProperties prop = new SimCameraProperties(); + + @BeforeAll + public static void setUp() { + JNIWrapper.Helper.setExtractOnStaticLoad(false); + WPIUtilJNI.Helper.setExtractOnStaticLoad(false); + NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); + WPINetJNI.Helper.setExtractOnStaticLoad(false); + CameraServerJNI.Helper.setExtractOnStaticLoad(false); + CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); + AprilTagJNI.Helper.setExtractOnStaticLoad(false); + + try { + CombinedRuntimeLoader.loadLibraries( + VisionSystemSim.class, + "wpiutiljni", + "ntcorejni", + "wpinetjni", + "wpiHaljni", + "cscorejni", + "cscorejnicvstatic"); + + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); + } catch (Exception e) { + e.printStackTrace(); + } + + // NT live for debug purposes + NetworkTableInstance.getDefault().startServer(); + + // No version check for testing + PhotonCamera.setVersionCheckEnabled(false); + } + + @Test + public void testTrlConvert() { + var trl = new Translation3d(0.75, -0.4, 0.1); + var tvec = OpenCVHelp.translationToTvec(trl); + var result = OpenCVHelp.tvecToTranslation(tvec); + tvec.release(); + assertSame(trl, result); + } + + @Test + public void testRotConvert() { + var rot = new Rotation3d(0.5, 1, -1); + var rvec = OpenCVHelp.rotationToRvec(rot); + var result = OpenCVHelp.rvecToRotation(rvec); + rvec.release(); + var diff = result.minus(rot); + assertSame(new Rotation3d(), diff); + } + + @Test + public void testProjection() { + var target = + new VisionTargetSim( + new Pose3d(1, 0, 0, new Rotation3d(0, 0, Math.PI)), TargetModel.kTag16h5, 0); + var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); + var targetCorners = + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, target.getFieldVertices()); + // find circulation (counter/clockwise-ness) + double circulation = 0; + for (int i = 0; i < targetCorners.size(); i++) { + double xDiff = targetCorners.get((i + 1) % 4).x - targetCorners.get(i).x; + double ySum = targetCorners.get((i + 1) % 4).y + targetCorners.get(i).y; + circulation += xDiff * ySum; + } + assertTrue(circulation > 0, "2d fiducial points aren't counter-clockwise"); + // undo projection distortion + targetCorners = prop.undistort(targetCorners); + var avgCenterRot1 = prop.getPixelRot(OpenCVHelp.averageCorner(targetCorners)); + cameraPose = + cameraPose.plus(new Transform3d(new Translation3d(), new Rotation3d(0, 0.25, 0.25))); + targetCorners = + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, target.getFieldVertices()); + var avgCenterRot2 = prop.getPixelRot(OpenCVHelp.averageCorner(targetCorners)); + var yaw2d = new Rotation2d(avgCenterRot2.getZ()); + var pitch2d = new Rotation2d(avgCenterRot2.getY()); + var yawDiff = yaw2d.minus(new Rotation2d(avgCenterRot1.getZ())); + var pitchDiff = pitch2d.minus(new Rotation2d(avgCenterRot1.getY())); + assertTrue(yawDiff.getRadians() < 0, "2d points don't follow yaw"); + assertTrue(pitchDiff.getRadians() < 0, "2d points don't follow pitch"); + var actualRelation = new CameraTargetRelation(cameraPose, target.getPose()); + assertEquals( + actualRelation.camToTargPitch.getDegrees(), + pitchDiff.getDegrees() * Math.cos(yaw2d.getRadians()), // adjust for perpsective distortion + kRotDeltaDeg, + "2d pitch doesn't match 3d"); + assertEquals( + actualRelation.camToTargYaw.getDegrees(), + yawDiff.getDegrees(), + kRotDeltaDeg, + "2d yaw doesn't match 3d"); + } + + @Test + public void testSolvePNP_SQUARE() { + var target = + new VisionTargetSim( + new Pose3d(5, 0.5, 1, new Rotation3d(0, 0, Math.PI)), TargetModel.kTag16h5, 0); + var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); + var actualRelation = new CameraTargetRelation(cameraPose, target.getPose()); + var targetCorners = + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, target.getFieldVertices()); + var pnpSim = + OpenCVHelp.solvePNP_SQUARE( + prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); + var estRelation = new CameraTargetRelation(cameraPose, cameraPose.plus(pnpSim.best)); + assertSame(actualRelation.camToTarg, estRelation.camToTarg); + } + + @Test + public void testSolvePNP_SQPNP() { + var target = + new VisionTargetSim( + new Pose3d(5, 0.5, 1, new Rotation3d(0, 0, Math.PI)), + new TargetModel( + List.of( + new Translation3d(0, 0, 0), + new Translation3d(1, 0, 0), + new Translation3d(0, 1, 0), + new Translation3d(0, 0, 1), + new Translation3d(0.125, 0.25, 0.5), + new Translation3d(0, 0, -1), + new Translation3d(0, -1, 0), + new Translation3d(-1, 0, 0))), + 0); + var cameraPose = new Pose3d(0, 0, 0, new Rotation3d()); + var actualRelation = new CameraTargetRelation(cameraPose, target.getPose()); + var targetCorners = + OpenCVHelp.projectPoints( + prop.getIntrinsics(), prop.getDistCoeffs(), cameraPose, target.getFieldVertices()); + var pnpSim = + OpenCVHelp.solvePNP_SQPNP( + prop.getIntrinsics(), prop.getDistCoeffs(), target.getModel().vertices, targetCorners); + var estRelation = new CameraTargetRelation(cameraPose, cameraPose.plus(pnpSim.best)); + assertSame(actualRelation.camToTarg, estRelation.camToTarg); + } +} diff --git a/photon-lib/src/test/java/org/photonvision/SimVisionSystemTest.java b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java similarity index 53% rename from photon-lib/src/test/java/org/photonvision/SimVisionSystemTest.java rename to photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java index 524e8eff5a..f4d5806e43 100644 --- a/photon-lib/src/test/java/org/photonvision/SimVisionSystemTest.java +++ b/photon-lib/src/test/java/org/photonvision/VisionSystemSimTest.java @@ -28,6 +28,9 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; +import edu.wpi.first.apriltag.jni.AprilTagJNI; +import edu.wpi.first.cscore.CameraServerCvJNI; +import edu.wpi.first.cscore.CameraServerJNI; import edu.wpi.first.hal.JNIWrapper; import edu.wpi.first.math.geometry.Pose2d; import edu.wpi.first.math.geometry.Pose3d; @@ -41,6 +44,7 @@ import edu.wpi.first.networktables.NetworkTableInstance; import edu.wpi.first.networktables.NetworkTablesJNI; import edu.wpi.first.util.CombinedRuntimeLoader; +import edu.wpi.first.util.RuntimeLoader; import edu.wpi.first.util.WPIUtilJNI; import java.util.List; import java.util.stream.Stream; @@ -52,18 +56,26 @@ import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.junit.jupiter.params.provider.ValueSource; +import org.opencv.core.Core; +import org.photonvision.estimation.TargetModel; +import org.photonvision.simulation.PhotonCameraSim; +import org.photonvision.simulation.VisionSystemSim; +import org.photonvision.simulation.VisionTargetSim; import org.photonvision.targeting.PhotonTrackedTarget; -class SimVisionSystemTest { +class VisionSystemSimTest { + private static final double kTrlDelta = 0.005; + private static final double kRotDeltaDeg = 0.25; + @Test public void testEmpty() { Assertions.assertDoesNotThrow( () -> { - var sysUnderTest = - new SimVisionSystem("Test", 80.0, new Transform3d(), 99999, 320, 240, 0); - sysUnderTest.addSimVisionTarget(new SimVisionTarget(new Pose3d(), 1.0, 1.0, 42)); + var sysUnderTest = new VisionSystemSim("Test"); + sysUnderTest.addVisionTargets( + new VisionTargetSim(new Pose3d(), new TargetModel(1.0, 1.0))); for (int loopIdx = 0; loopIdx < 100; loopIdx++) { - sysUnderTest.processFrame(new Pose2d()); + sysUnderTest.update(new Pose2d()); } }); } @@ -74,12 +86,25 @@ public static void setUp() { WPIUtilJNI.Helper.setExtractOnStaticLoad(false); NetworkTablesJNI.Helper.setExtractOnStaticLoad(false); WPINetJNI.Helper.setExtractOnStaticLoad(false); + CameraServerJNI.Helper.setExtractOnStaticLoad(false); + CameraServerCvJNI.Helper.setExtractOnStaticLoad(false); + AprilTagJNI.Helper.setExtractOnStaticLoad(false); try { CombinedRuntimeLoader.loadLibraries( - SimVisionSystem.class, "wpiutiljni", "ntcorejni", "wpinetjni", "wpiHaljni"); + VisionSystemSim.class, + "wpiutiljni", + "ntcorejni", + "wpinetjni", + "wpiHaljni", + "cscorejni", + "cscorejnicvstatic"); + + var loader = + new RuntimeLoader<>( + Core.NATIVE_LIBRARY_NAME, RuntimeLoader.getDefaultExtractionRoot(), Core.class); + loader.loadLibrary(); } catch (Exception e) { - // TODO Auto-generated catch block e.printStackTrace(); } @@ -114,66 +139,74 @@ public static void shutDown() {} public void testVisibilityCupidShuffle() { final var targetPose = new Pose3d(new Translation3d(15.98, 0, 2), new Rotation3d(0, 0, Math.PI)); - var sysUnderTest = new SimVisionSystem("Test", 80.0, new Transform3d(), 99999, 640, 480, 0); - sysUnderTest.addSimVisionTarget(new SimVisionTarget(targetPose, 1.0, 3.0, 3)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 3.0), 3)); // To the right, to the right var robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(-70)); - sysUnderTest.processFrame(robotPose); - assertFalse(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); // To the right, to the right robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(-95)); - sysUnderTest.processFrame(robotPose); - assertFalse(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); // To the left, to the left robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(90)); - sysUnderTest.processFrame(robotPose); - assertFalse(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); // To the left, to the left robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(65)); - sysUnderTest.processFrame(robotPose); - assertFalse(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); // now kick, now kick robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(5)); - sysUnderTest.processFrame(robotPose); - assertTrue(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); // now kick, now kick robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(-5)); - sysUnderTest.processFrame(robotPose); - assertTrue(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); // now walk it by yourself robotPose = new Pose2d(new Translation2d(2, 0), Rotation2d.fromDegrees(-179)); - sysUnderTest.processFrame(robotPose); - assertFalse(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); // now walk it by yourself - sysUnderTest.moveCamera(new Transform3d(new Translation3d(), new Rotation3d(0, 0, Math.PI))); - sysUnderTest.processFrame(robotPose); - assertTrue(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.adjustCamera( + cameraSim, new Transform3d(new Translation3d(), new Rotation3d(0, 0, Math.PI))); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); } @Test public void testNotVisibleVert1() { final var targetPose = new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); - var sysUnderTest = new SimVisionSystem("Test", 80.0, new Transform3d(), 99999, 640, 480, 0); - sysUnderTest.addSimVisionTarget(new SimVisionTarget(targetPose, 1.0, 3.0, 3)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 3.0), 3)); var robotPose = new Pose2d(new Translation2d(5, 0), Rotation2d.fromDegrees(5)); - sysUnderTest.processFrame(robotPose); - assertTrue(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); - sysUnderTest.moveCamera( - new Transform3d( - new Translation3d(0, 0, 5000), new Rotation3d(0, 0, Math.PI))); // vooop selfie stick - sysUnderTest.processFrame(robotPose); - assertFalse(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.adjustCamera( // vooop selfie stick + cameraSim, new Transform3d(new Translation3d(0, 0, 5000), new Rotation3d(0, 0, Math.PI))); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); } @Test @@ -181,50 +214,65 @@ public void testNotVisibleVert2() { final var targetPose = new Pose3d(new Translation3d(15.98, 0, 2), new Rotation3d(0, 0, Math.PI)); var robotToCamera = - new Transform3d(new Translation3d(0, 0, 1), new Rotation3d(0, Math.PI / 4, 0)); - var sysUnderTest = new SimVisionSystem("Test", 80.0, robotToCamera, 99999, 1234, 1234, 0); - sysUnderTest.addSimVisionTarget(new SimVisionTarget(targetPose, 3.0, 0.5, 1736)); - - var robotPose = new Pose2d(new Translation2d(14.98, 0), Rotation2d.fromDegrees(5)); - sysUnderTest.processFrame(robotPose); - assertTrue(sysUnderTest.cam.getLatestResult().hasTargets()); + new Transform3d(new Translation3d(0, 0, 1), new Rotation3d(0, -Math.PI / 4, 0)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, robotToCamera); + cameraSim.prop.setCalibration(1234, 1234, Rotation2d.fromDegrees(80)); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 0.5), 1736)); + + var robotPose = new Pose2d(new Translation2d(13.98, 0), Rotation2d.fromDegrees(5)); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); // Pitched back camera should mean target goes out of view below the robot as distance increases robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); - sysUnderTest.processFrame(robotPose); - assertFalse(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); } @Test public void testNotVisibleTgtSize() { final var targetPose = new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); - var sysUnderTest = new SimVisionSystem("Test", 80.0, new Transform3d(), 99999, 640, 480, 20.0); - sysUnderTest.addSimVisionTarget(new SimVisionTarget(targetPose, 0.1, 0.025, 24)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + cameraSim.setMinTargetAreaPixels(20.0); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.1, 0.025), 24)); var robotPose = new Pose2d(new Translation2d(12, 0), Rotation2d.fromDegrees(5)); - sysUnderTest.processFrame(robotPose); - assertTrue(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); - sysUnderTest.processFrame(robotPose); - assertFalse(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); } @Test public void testNotVisibleTooFarForLEDs() { final var targetPose = new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, Math.PI)); - var sysUnderTest = new SimVisionSystem("Test", 80.0, new Transform3d(), 10, 640, 480, 1.0); - sysUnderTest.addSimVisionTarget(new SimVisionTarget(targetPose, 1.0, 0.25, 78)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + cameraSim.setMaxSightRange(10); + cameraSim.setMinTargetAreaPixels(1.0); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(1.0, 0.25), 78)); var robotPose = new Pose2d(new Translation2d(10, 0), Rotation2d.fromDegrees(5)); - sysUnderTest.processFrame(robotPose); - assertTrue(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.update(robotPose); + assertTrue(camera.getLatestResult().hasTargets()); robotPose = new Pose2d(new Translation2d(0, 0), Rotation2d.fromDegrees(5)); - sysUnderTest.processFrame(robotPose); - assertFalse(sysUnderTest.cam.getLatestResult().hasTargets()); + visionSysSim.update(robotPose); + assertFalse(camera.getLatestResult().hasTargets()); } @ParameterizedTest @@ -232,34 +280,43 @@ public void testNotVisibleTooFarForLEDs() { public void testYawAngles(double testYaw) { final var targetPose = new Pose3d(new Translation3d(15.98, 0, 1), new Rotation3d(0, 0, 3 * Math.PI / 4)); - var sysUnderTest = new SimVisionSystem("Test", 80.0, new Transform3d(), 99999, 640, 480, 0); - sysUnderTest.addSimVisionTarget(new SimVisionTarget(targetPose, 0.5, 0.5, 3)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + cameraSim.setMinTargetAreaPixels(0.0); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 3)); var robotPose = new Pose2d(new Translation2d(10, 0), Rotation2d.fromDegrees(-1.0 * testYaw)); - sysUnderTest.processFrame(robotPose); - var res = sysUnderTest.cam.getLatestResult(); + visionSysSim.update(robotPose); + var res = camera.getLatestResult(); assertTrue(res.hasTargets()); var tgt = res.getBestTarget(); - assertEquals(tgt.getYaw(), testYaw, 0.0001); + assertEquals(tgt.getYaw(), testYaw, kRotDeltaDeg); } @ParameterizedTest @ValueSource(doubles = {-10, -5, -0, -1, -2, 5, 7, 10.23, 20.21, -19.999}) public void testCameraPitch(double testPitch) { - testPitch = testPitch * -1; - final var targetPose = new Pose3d(new Translation3d(15.98, 0, 0), new Rotation3d(0, 0, 3 * Math.PI / 4)); final var robotPose = new Pose2d(new Translation2d(10, 0), new Rotation2d(0)); - var sysUnderTest = new SimVisionSystem("Test", 120.0, new Transform3d(), 99999, 640, 480, 0); - sysUnderTest.addSimVisionTarget(new SimVisionTarget(targetPose, 0.5, 0.5, 23)); + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(120)); + cameraSim.setMinTargetAreaPixels(0.0); + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 23)); // Transform is now robot -> camera - sysUnderTest.moveCamera( + visionSysSim.adjustCamera( + cameraSim, new Transform3d( new Translation3d(), new Rotation3d(0, Units.degreesToRadians(testPitch), 0))); - sysUnderTest.processFrame(robotPose); - var res = sysUnderTest.cam.getLatestResult(); + visionSysSim.update(robotPose); + var res = camera.getLatestResult(); assertTrue(res.hasTargets()); var tgt = res.getBestTarget(); @@ -267,29 +324,29 @@ public void testCameraPitch(double testPitch) { // the // lower half of the image // which should produce negative pitch. - assertEquals(testPitch * -1, tgt.getPitch(), 0.0001); + assertEquals(testPitch, tgt.getPitch(), kRotDeltaDeg); } private static Stream distCalCParamProvider() { // Arbitrary and fairly random assortment of distances, camera pitches, and heights return Stream.of( - Arguments.of(5, 15.98, 0), - Arguments.of(6, 15.98, 1), - Arguments.of(10, 15.98, 0), - Arguments.of(15, 15.98, 2), - Arguments.of(19.95, 15.98, 0), - Arguments.of(20, 15.98, 0), - Arguments.of(5, 42, 1), - Arguments.of(6, 42, 0), - Arguments.of(10, 42, 2), - Arguments.of(15, 42, 0.5), - Arguments.of(19.42, 15.98, 0), - Arguments.of(20, 42, 0), - Arguments.of(5, 55, 2), - Arguments.of(6, 55, 0), - Arguments.of(10, 54, 2.2), - Arguments.of(15, 53, 0), - Arguments.of(19.52, 15.98, 1.1)); + Arguments.of(5, -15.98, 0), + Arguments.of(6, -15.98, 1), + Arguments.of(10, -15.98, 0), + Arguments.of(15, -15.98, 2), + Arguments.of(19.95, -15.98, 0), + Arguments.of(20, -15.98, 0), + Arguments.of(5, -42, 1), + Arguments.of(6, -42, 0), + Arguments.of(10, -42, 2), + Arguments.of(15, -42, 0.5), + Arguments.of(19.42, -15.98, 0), + Arguments.of(20, -42, 0), + Arguments.of(5, -35, 2), + Arguments.of(6, -35, 0), + Arguments.of(10, -34, 3.2), + Arguments.of(15, -33, 0), + Arguments.of(19.52, -15.98, 1.1)); } @ParameterizedTest @@ -306,29 +363,32 @@ public void testDistanceCalc(double testDist, double testPitch, double testHeigh new Translation3d(0, 0, Units.feetToMeters(testHeight)), new Rotation3d(0, Units.degreesToRadians(testPitch), 0)); - var sysUnderTest = - new SimVisionSystem( - "absurdlylongnamewhichshouldneveractuallyhappenbuteehwelltestitanywaysohowsyourdaygoingihopegoodhaveagreatrestofyourlife!", - 160.0, - robotToCamera, - 99999, - 640, - 480, - 0); - sysUnderTest.addSimVisionTarget(new SimVisionTarget(targetPose, 0.5, 0.5, 0)); - - sysUnderTest.processFrame(robotPose); - var res = sysUnderTest.cam.getLatestResult(); + var visionSysSim = + new VisionSystemSim( + "absurdlylongnamewhichshouldneveractuallyhappenbuteehwelltestitanywaysohowsyourdaygoingihopegoodhaveagreatrestofyourlife!"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(160)); + cameraSim.setMinTargetAreaPixels(0.0); + visionSysSim.adjustCamera(cameraSim, robotToCamera); + // note that non-fiducial targets have different center point calculation and will + // return slightly inaccurate yaw/pitch values + visionSysSim.addVisionTargets(new VisionTargetSim(targetPose, new TargetModel(0.5, 0.5), 0)); + + visionSysSim.update(robotPose); + var res = camera.getLatestResult(); assertTrue(res.hasTargets()); var tgt = res.getBestTarget(); - assertEquals(tgt.getYaw(), 0.0, 0.0001); + assertEquals(0.0, tgt.getYaw(), kRotDeltaDeg); + double distMeas = PhotonUtils.calculateDistanceToTargetMeters( robotToCamera.getZ(), targetPose.getZ(), - Units.degreesToRadians(testPitch), + Units.degreesToRadians(-testPitch), Units.degreesToRadians(tgt.getPitch())); - assertEquals(Units.feetToMeters(testDist), distMeas, 0.001); + assertEquals(Units.feetToMeters(testDist), distMeas, kTrlDelta); } @Test @@ -339,92 +399,87 @@ public void testMultipleTargets() { new Pose3d(new Translation3d(15.98, 0, 0), new Rotation3d(0, 0, Math.PI)); final var targetPoseR = new Pose3d(new Translation3d(15.98, -2, 0), new Rotation3d(0, 0, Math.PI)); - var sysUnderTest = new SimVisionSystem("Test", 160.0, new Transform3d(), 99999, 640, 480, 20.0); - sysUnderTest.addSimVisionTarget( - new SimVisionTarget( + var visionSysSim = new VisionSystemSim("Test"); + var camera = new PhotonCamera("camera"); + var cameraSim = new PhotonCameraSim(camera); + visionSysSim.addCamera(cameraSim, new Transform3d()); + cameraSim.prop.setCalibration(640, 480, Rotation2d.fromDegrees(80)); + cameraSim.setMinTargetAreaPixels(20.0); + + visionSysSim.addVisionTargets( + new VisionTargetSim( targetPoseL.transformBy( new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), - 0.25, - 0.25, + TargetModel.kTag16h5, 1)); - sysUnderTest.addSimVisionTarget( - new SimVisionTarget( + visionSysSim.addVisionTargets( + new VisionTargetSim( targetPoseC.transformBy( new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), - 0.25, - 0.25, + TargetModel.kTag16h5, 2)); - sysUnderTest.addSimVisionTarget( - new SimVisionTarget( + visionSysSim.addVisionTargets( + new VisionTargetSim( targetPoseR.transformBy( new Transform3d(new Translation3d(0, 0, 0.00), new Rotation3d())), - 0.25, - 0.25, + TargetModel.kTag16h5, 3)); - sysUnderTest.addSimVisionTarget( - new SimVisionTarget( + visionSysSim.addVisionTargets( + new VisionTargetSim( targetPoseL.transformBy( new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), - 0.25, - 0.25, + TargetModel.kTag16h5, 4)); - sysUnderTest.addSimVisionTarget( - new SimVisionTarget( + visionSysSim.addVisionTargets( + new VisionTargetSim( targetPoseC.transformBy( new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), - 0.25, - 0.25, + TargetModel.kTag16h5, 5)); - sysUnderTest.addSimVisionTarget( - new SimVisionTarget( - targetPoseR.transformBy( + visionSysSim.addVisionTargets( + new VisionTargetSim( + targetPoseL.transformBy( new Transform3d(new Translation3d(0, 0, 1.00), new Rotation3d())), - 0.25, - 0.25, + TargetModel.kTag16h5, 6)); - sysUnderTest.addSimVisionTarget( - new SimVisionTarget( + visionSysSim.addVisionTargets( + new VisionTargetSim( targetPoseL.transformBy( new Transform3d(new Translation3d(0, 0, 0.50), new Rotation3d())), - 0.25, - 0.25, + TargetModel.kTag16h5, 7)); - sysUnderTest.addSimVisionTarget( - new SimVisionTarget( + visionSysSim.addVisionTargets( + new VisionTargetSim( targetPoseC.transformBy( new Transform3d(new Translation3d(0, 0, 0.50), new Rotation3d())), - 0.25, - 0.25, + TargetModel.kTag16h5, 8)); - sysUnderTest.addSimVisionTarget( - new SimVisionTarget( + visionSysSim.addVisionTargets( + new VisionTargetSim( targetPoseL.transformBy( new Transform3d(new Translation3d(0, 0, 0.75), new Rotation3d())), - 0.25, - 0.25, + TargetModel.kTag16h5, 9)); - sysUnderTest.addSimVisionTarget( - new SimVisionTarget( + visionSysSim.addVisionTargets( + new VisionTargetSim( targetPoseR.transformBy( new Transform3d(new Translation3d(0, 0, 0.75), new Rotation3d())), - 0.25, - 0.25, + TargetModel.kTag16h5, 10)); - sysUnderTest.addSimVisionTarget( - new SimVisionTarget( + visionSysSim.addVisionTargets( + new VisionTargetSim( targetPoseL.transformBy( new Transform3d(new Translation3d(0, 0, 0.25), new Rotation3d())), - 0.25, - 0.25, + TargetModel.kTag16h5, 11)); var robotPose = new Pose2d(new Translation2d(6.0, 0), Rotation2d.fromDegrees(0.25)); - sysUnderTest.processFrame(robotPose); - var res = sysUnderTest.cam.getLatestResult(); + visionSysSim.update(robotPose); + var res = camera.getLatestResult(); assertTrue(res.hasTargets()); List tgtList; tgtList = res.getTargets(); - assertEquals(tgtList.size(), 11); + assertEquals(11, tgtList.size()); } } diff --git a/photon-targeting/src/main/java/org/photonvision/targeting/PhotonTrackedTarget.java b/photon-targeting/src/main/java/org/photonvision/targeting/PhotonTrackedTarget.java index 908ca24e60..a15fca87d5 100644 --- a/photon-targeting/src/main/java/org/photonvision/targeting/PhotonTrackedTarget.java +++ b/photon-targeting/src/main/java/org/photonvision/targeting/PhotonTrackedTarget.java @@ -118,14 +118,14 @@ public List getMinAreaRectCorners() { * Return a list of the n corners in image space (origin top left, x right, y down), in no * particular order, detected for this target. * - *

For fiducials, the order is known and is always counter-clock wise around the tag, like so + *

For fiducials, the order is known and is always counter-clock wise around the tag, like so: * - *

spotless:off - * -> +X 3 ----- 2 + *

​
+     * ⟶ +X  3 ----- 2
      * |      |       |
      * V      |       |
      * +Y     0 ----- 1
-     * spotless:on
+     * 
*/ public List getDetectedCorners() { return detectedCorners; diff --git a/photonlib-cpp-examples/apriltagExample/gradlew b/photonlib-cpp-examples/apriltagExample/gradlew old mode 100755 new mode 100644 diff --git a/photonlib-java-examples/apriltagExample/simgui-ds.json b/photonlib-java-examples/apriltagExample/simgui-ds.json index 69b1a3cbcd..25e298be7f 100644 --- a/photonlib-java-examples/apriltagExample/simgui-ds.json +++ b/photonlib-java-examples/apriltagExample/simgui-ds.json @@ -1,11 +1,13 @@ { + "Keyboard 0 Settings": { + "window": { + "visible": true + } + }, "keyboardJoysticks": [ { "axisConfig": [ - { - "decKey": 65, - "incKey": 68 - }, + {}, { "decKey": 87, "incKey": 83 @@ -15,9 +17,14 @@ "decayRate": 0.0, "incKey": 82, "keyRate": 0.009999999776482582 + }, + {}, + { + "decKey": 65, + "incKey": 68 } ], - "axisCount": 3, + "axisCount": 5, "buttonCount": 4, "buttonKeys": [ 90, diff --git a/photonlib-java-examples/apriltagExample/simgui.json b/photonlib-java-examples/apriltagExample/simgui.json index ec51dae2b0..5c06de417f 100644 --- a/photonlib-java-examples/apriltagExample/simgui.json +++ b/photonlib-java-examples/apriltagExample/simgui.json @@ -1,20 +1,46 @@ { + "HALProvider": { + "Other Devices": { + "AnalogGyro[0]": { + "header": { + "open": true + } + } + } + }, "NTProvider": { "types": { "/FMSInfo": "FMSInfo", - "/SmartDashboard/Field": "Field2d" + "/LiveWindow/Ungrouped/AnalogGyro[0]": "Gyro", + "/LiveWindow/Ungrouped/MotorControllerGroup[1]": "Motor Controller", + "/LiveWindow/Ungrouped/MotorControllerGroup[2]": "Motor Controller", + "/LiveWindow/Ungrouped/PIDController[1]": "PIDController", + "/LiveWindow/Ungrouped/PIDController[2]": "PIDController", + "/SmartDashboard/Field": "Field2d", + "/SmartDashboard/VisionSystemSim-Test/Sim Field": "Field2d", + "/SmartDashboard/VisionSystemSim-YOUR CAMERA NAME/Sim Field": "Field2d" }, "windows": { "/SmartDashboard/Field": { "window": { "visible": true } + }, + "/SmartDashboard/VisionSystemSim-Test/Sim Field": { + "window": { + "visible": true + } + }, + "/SmartDashboard/VisionSystemSim-YOUR CAMERA NAME/Sim Field": { + "window": { + "visible": true + } } } }, "NetworkTables": { "transitory": { - "Shuffleboard": { + "LiveWindow": { "open": true }, "photonvision": { @@ -24,6 +50,9 @@ "USB_Camera": { "open": true }, + "YOUR CAMERA NAME": { + "open": true + }, "open": true, "testCamera": { "open": true diff --git a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Drivetrain.java b/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Drivetrain.java index b470437ee7..f33d9f0510 100644 --- a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Drivetrain.java +++ b/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Drivetrain.java @@ -38,6 +38,7 @@ import edu.wpi.first.math.system.plant.LinearSystemId; import edu.wpi.first.wpilibj.AnalogGyro; import edu.wpi.first.wpilibj.Encoder; +import edu.wpi.first.wpilibj.RobotBase; import edu.wpi.first.wpilibj.RobotController; import edu.wpi.first.wpilibj.motorcontrol.MotorController; import edu.wpi.first.wpilibj.motorcontrol.MotorControllerGroup; @@ -50,6 +51,9 @@ import frc.robot.Constants.DriveTrainConstants; import java.util.Optional; import org.photonvision.EstimatedRobotPose; +import org.photonvision.simulation.PhotonCameraSim; +import org.photonvision.simulation.SimCameraProperties; +import org.photonvision.simulation.VisionSystemSim; /** Represents a differential drive style drivetrain. */ public class Drivetrain { @@ -94,7 +98,7 @@ public class Drivetrain { private final EncoderSim m_rightEncoderSim = new EncoderSim(m_rightEncoder); private final Field2d m_fieldSim = new Field2d(); private final LinearSystem m_drivetrainSystem = - LinearSystemId.identifyDrivetrainSystem(1.98, 0.2, 1.5, 0.3); + LinearSystemId.identifyDrivetrainSystem(1.98, 0.2, 1.5, 0.8); private final DifferentialDrivetrainSim m_drivetrainSimulator = new DifferentialDrivetrainSim( m_drivetrainSystem, @@ -104,6 +108,9 @@ public class Drivetrain { DriveTrainConstants.kWheelRadius, null); + // Simulated PhotonCamera -- only used in sim! + VisionSystemSim m_visionSystemSim; + /** * Constructs a differential drive object. Sets the encoder distance per pulse and resets the * gyro. @@ -128,6 +135,15 @@ public Drivetrain() { m_rightEncoder.reset(); SmartDashboard.putData("Field", m_fieldSim); + + // Only simulate our PhotonCamera in simulation + if (RobotBase.isSimulation()) { + m_visionSystemSim = new VisionSystemSim(Constants.VisionConstants.cameraName); + var cameraSim = + new PhotonCameraSim(pcw.photonCamera, SimCameraProperties.PI4_LIFECAM_640_480()); + m_visionSystemSim.addCamera(cameraSim, Constants.VisionConstants.robotToCam); + m_visionSystemSim.addVisionTargets(pcw.photonPoseEstimator.getFieldTags()); + } } /** @@ -174,6 +190,9 @@ public void simulationPeriodic() { m_rightEncoderSim.setDistance(m_drivetrainSimulator.getRightPositionMeters()); m_rightEncoderSim.setRate(m_drivetrainSimulator.getRightVelocityMetersPerSecond()); m_gyroSim.setAngle(-m_drivetrainSimulator.getHeading().getDegrees()); + + // Update results from vision as well + m_visionSystemSim.update(m_drivetrainSimulator.getPose()); } /** Updates the field-relative position. */ diff --git a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/PhotonCameraWrapper.java b/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/PhotonCameraWrapper.java index a29737a0ff..d46957886c 100644 --- a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/PhotonCameraWrapper.java +++ b/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/PhotonCameraWrapper.java @@ -37,8 +37,8 @@ import org.photonvision.PhotonPoseEstimator.PoseStrategy; public class PhotonCameraWrapper { - private PhotonCamera photonCamera; - private PhotonPoseEstimator photonPoseEstimator; + PhotonCamera photonCamera; + PhotonPoseEstimator photonPoseEstimator; public PhotonCameraWrapper() { // Change the name of your camera here to whatever it is in the PhotonVision UI. diff --git a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Robot.java b/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Robot.java index 8f93274573..78f0f50ab9 100644 --- a/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Robot.java +++ b/photonlib-java-examples/apriltagExample/src/main/java/frc/robot/Robot.java @@ -42,6 +42,9 @@ public class Robot extends TimedRobot { public void robotInit() { if (Robot.isSimulation()) { NetworkTableInstance instance = NetworkTableInstance.getDefault(); + + // We have to have Photon running and set to NT server mode for it to connect + // to our computer instead of to a roboRIO. instance.stopServer(); // set the NT server if simulating this code. // "localhost" for photon on desktop, or "photonvision.local" / "[ip-address]" for coprocessor @@ -51,6 +54,10 @@ public void robotInit() { m_controller = new XboxController(0); m_drive = new Drivetrain(); + + // Flush NetworkTables every loop. This ensures that robot pose and other values + // are sent during every iteration. This only applies to local NT connections! + setNetworkTablesFlushEnabled(true); } @Override diff --git a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java index 6675b89ca9..2686a202ba 100644 --- a/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java +++ b/photonlib-java-examples/simaimandrange/src/main/java/frc/robot/sim/DrivetrainSim.java @@ -41,8 +41,8 @@ import edu.wpi.first.wpilibj.smartdashboard.Field2d; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; import frc.robot.Robot; -import org.photonvision.SimVisionSystem; -import org.photonvision.SimVisionTarget; +import org.photonvision.simulation.SimVisionSystem; +import org.photonvision.simulation.SimVisionTarget; /** * Implementation of a simulation of robot physics, sensors, motor controllers Includes a Simulated diff --git a/photonlib-java-examples/simposeest/src/main/java/frc/robot/Constants.java b/photonlib-java-examples/simposeest/src/main/java/frc/robot/Constants.java index d3d7fdf7ae..e5e8ca9caa 100644 --- a/photonlib-java-examples/simposeest/src/main/java/frc/robot/Constants.java +++ b/photonlib-java-examples/simposeest/src/main/java/frc/robot/Constants.java @@ -30,7 +30,7 @@ import edu.wpi.first.math.geometry.Translation3d; import edu.wpi.first.math.kinematics.DifferentialDriveKinematics; import edu.wpi.first.math.util.Units; -import org.photonvision.SimVisionTarget; +import org.photonvision.simulation.SimVisionTarget; /** * Holding class for all physical constants that must be used throughout the codebase. These values diff --git a/photonlib-java-examples/simposeest/src/main/java/frc/robot/DrivetrainSim.java b/photonlib-java-examples/simposeest/src/main/java/frc/robot/DrivetrainSim.java index 2eeaaa106b..7d1eee2d47 100644 --- a/photonlib-java-examples/simposeest/src/main/java/frc/robot/DrivetrainSim.java +++ b/photonlib-java-examples/simposeest/src/main/java/frc/robot/DrivetrainSim.java @@ -38,7 +38,7 @@ import edu.wpi.first.wpilibj.simulation.DifferentialDrivetrainSim; import edu.wpi.first.wpilibj.simulation.EncoderSim; import edu.wpi.first.wpilibj.simulation.PWMSim; -import org.photonvision.SimVisionSystem; +import org.photonvision.simulation.SimVisionSystem; /** * Implementation of a simulation of robot physics, sensors, motor controllers Includes a Simulated From 7593c5ed056d4606dfdcba36c5326a06a52b53a6 Mon Sep 17 00:00:00 2001 From: amquake Date: Wed, 21 Jun 2023 08:38:30 -0700 Subject: [PATCH 2/3] Windows EOL spotless fix (#875) Fixes spotless on windows flagging every line for being LF instead of CRLF. Developers may need to reclone to fix their local history. --- .gitattributes | 37 +++ .styleguide | 1 + .../.wpilib/wpilib_preferences.json | 12 +- .../aimandrange/WPILib-License.md | 48 ++-- .../aimandrange/build.gradle | 232 +++++++++--------- .../aimandrange/settings.gradle | 60 ++--- .../.wpilib/wpilib_preferences.json | 12 +- .../aimattarget/WPILib-License.md | 48 ++-- .../aimattarget/build.gradle | 232 +++++++++--------- .../aimattarget/settings.gradle | 60 ++--- .../.wpilib/wpilib_preferences.json | 12 +- .../apriltagExample/WPILib-License.md | 48 ++-- .../apriltagExample/build.gradle | 232 +++++++++--------- .../apriltagExample/settings.gradle | 60 ++--- .../.wpilib/wpilib_preferences.json | 12 +- .../getinrange/WPILib-License.md | 48 ++-- .../getinrange/build.gradle | 232 +++++++++--------- .../getinrange/settings.gradle | 60 ++--- 18 files changed, 742 insertions(+), 704 deletions(-) create mode 100644 .gitattributes diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..6ff3f8d48f --- /dev/null +++ b/.gitattributes @@ -0,0 +1,37 @@ +# Set default behavior to automatically normalize line endings (LF on check-in). +* text=auto + +# Force batch scripts to always use CRLF line endings so that if a repo is accessed +# in Windows via a file share from Linux, the scripts will work. +*.{cmd,[cC][mM][dD]} text eol=crlf +*.{bat,[bB][aA][tT]} text eol=crlf +*.{ics,[iI][cC][sS]} text eol=crlf + +# Force bash scripts to always use LF line endings so that if a repo is accessed +# in Unix via a file share from Windows, the scripts will work. +*.sh text eol=lf + +# Ensure Spotless does not try to use CRLF line endings on Windows in the local repo. +*.gradle text eol=lf +*.java text eol=lf +*.json text eol=lf +*.md text eol=lf +*.xml text eol=lf +*.h text eol=lf +*.hpp text eol=lf +*.inc text eol=lf +*.inl text eol=lf +*.cpp text eol=lf + +# Frontend Files +*.js text eol=lf +*.vue text eol=lf + +# Denote all files that are truly binary and should not be modified. +*.png binary +*.jpg binary +*.jpeg binary +*.gif binary +*.so binary +*.dll binary +*.webp binary diff --git a/.styleguide b/.styleguide index 5b6275ca1f..13ce2512df 100644 --- a/.styleguide +++ b/.styleguide @@ -17,6 +17,7 @@ modifiableFileExclude { \.so$ \.dll$ \.webp$ + gradlew } includeProject { diff --git a/photonlib-cpp-examples/aimandrange/.wpilib/wpilib_preferences.json b/photonlib-cpp-examples/aimandrange/.wpilib/wpilib_preferences.json index cf67d2d09f..a6e62683de 100644 --- a/photonlib-cpp-examples/aimandrange/.wpilib/wpilib_preferences.json +++ b/photonlib-cpp-examples/aimandrange/.wpilib/wpilib_preferences.json @@ -1,6 +1,6 @@ -{ - "enableCppIntellisense": true, - "currentLanguage": "cpp", - "projectYear": "2023", - "teamNumber": 5 -} +{ + "enableCppIntellisense": true, + "currentLanguage": "cpp", + "projectYear": "2023", + "teamNumber": 5 +} diff --git a/photonlib-cpp-examples/aimandrange/WPILib-License.md b/photonlib-cpp-examples/aimandrange/WPILib-License.md index ba35a02c5c..3d5a824cad 100644 --- a/photonlib-cpp-examples/aimandrange/WPILib-License.md +++ b/photonlib-cpp-examples/aimandrange/WPILib-License.md @@ -1,24 +1,24 @@ -Copyright (c) 2009-2021 FIRST and other WPILib contributors -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of FIRST, WPILib, nor the names of other WPILib - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY FIRST AND OTHER WPILIB CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY NONINFRINGEMENT AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL FIRST OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2009-2021 FIRST and other WPILib contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of FIRST, WPILib, nor the names of other WPILib + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY FIRST AND OTHER WPILIB CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY NONINFRINGEMENT AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL FIRST OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/photonlib-cpp-examples/aimandrange/build.gradle b/photonlib-cpp-examples/aimandrange/build.gradle index 05cfb9f914..67e4323b4c 100644 --- a/photonlib-cpp-examples/aimandrange/build.gradle +++ b/photonlib-cpp-examples/aimandrange/build.gradle @@ -1,116 +1,116 @@ -plugins { - id "cpp" - id "google-test-test-suite" - id "edu.wpi.first.GradleRIO" version "2023.4.2" - - id "com.dorongold.task-tree" version "2.1.0" -} - -repositories { - mavenLocal() - jcenter() -} - -apply from: "${rootDir}/../shared/examples_common.gradle" - -// Define my targets (RoboRIO) and artifacts (deployable files) -// This is added by GradleRIO's backing project DeployUtils. -deploy { - targets { - roborio(getTargetTypeClass('RoboRIO')) { - // Team number is loaded either from the .wpilib/wpilib_preferences.json - // or from command line. If not found an exception will be thrown. - // You can use getTeamOrDefault(team) instead of getTeamNumber if you - // want to store a team number in this file. - team = project.frc.getTeamOrDefault(5940) - debug = project.frc.getDebugOrDefault(false) - - artifacts { - // First part is artifact name, 2nd is artifact type - // getTargetTypeClass is a shortcut to get the class type using a string - - frcCpp(getArtifactTypeClass('FRCNativeArtifact')) { - } - - // Static files artifact - frcStaticFileDeploy(getArtifactTypeClass('FileTreeArtifact')) { - files = project.fileTree('src/main/deploy') - directory = '/home/lvuser/deploy' - } - } - } - } -} - -def deployArtifact = deploy.targets.roborio.artifacts.frcCpp - -// Set this to true to enable desktop support. -def includeDesktopSupport = true - -// Set to true to run simulation in debug mode -wpi.cpp.debugSimulation = false - -// Default enable simgui -wpi.sim.addGui().defaultEnabled = true -// Enable DS but not by default -wpi.sim.addDriverstation() - -model { - components { - frcUserProgram(NativeExecutableSpec) { - // We don't need to build for roborio -- if we do, we need to install - // a roborio toolchain every time we build in CI - // Ideally, we'd be able to set the roborio toolchain as optional, but - // I can't figure out how to set that environment variable from build.gradle - // (see https://github.com/wpilibsuite/native-utils/blob/2917c69fb5094e36d499c465f047dab81c68446c/ToolchainPlugin/src/main/java/edu/wpi/first/toolchain/ToolchainGraphBuildService.java#L71) - // for now, commented out - - // targetPlatform wpi.platforms.roborio - - if (includeDesktopSupport) { - targetPlatform wpi.platforms.desktop - } - - sources.cpp { - source { - srcDir 'src/main/cpp' - include '**/*.cpp', '**/*.cc' - } - exportedHeaders { - srcDir 'src/main/include' - } - } - - // Set deploy task to deploy this component - deployArtifact.component = it - - // Enable run tasks for this component - wpi.cpp.enableExternalTasks(it) - - // Enable simulation for this component - wpi.sim.enable(it) - // Defining my dependencies. In this case, WPILib (+ friends), and vendor libraries. - wpi.cpp.vendor.cpp(it) - wpi.cpp.deps.wpilib(it) - } - } - testSuites { - frcUserProgramTest(GoogleTestTestSuiteSpec) { - testing $.components.frcUserProgram - - sources.cpp { - source { - srcDir 'src/test/cpp' - include '**/*.cpp' - } - } - - // Enable run tasks for this component - wpi.cpp.enableExternalTasks(it) - - wpi.cpp.vendor.cpp(it) - wpi.cpp.deps.wpilib(it) - wpi.cpp.deps.googleTest(it) - } - } -} +plugins { + id "cpp" + id "google-test-test-suite" + id "edu.wpi.first.GradleRIO" version "2023.4.2" + + id "com.dorongold.task-tree" version "2.1.0" +} + +repositories { + mavenLocal() + jcenter() +} + +apply from: "${rootDir}/../shared/examples_common.gradle" + +// Define my targets (RoboRIO) and artifacts (deployable files) +// This is added by GradleRIO's backing project DeployUtils. +deploy { + targets { + roborio(getTargetTypeClass('RoboRIO')) { + // Team number is loaded either from the .wpilib/wpilib_preferences.json + // or from command line. If not found an exception will be thrown. + // You can use getTeamOrDefault(team) instead of getTeamNumber if you + // want to store a team number in this file. + team = project.frc.getTeamOrDefault(5940) + debug = project.frc.getDebugOrDefault(false) + + artifacts { + // First part is artifact name, 2nd is artifact type + // getTargetTypeClass is a shortcut to get the class type using a string + + frcCpp(getArtifactTypeClass('FRCNativeArtifact')) { + } + + // Static files artifact + frcStaticFileDeploy(getArtifactTypeClass('FileTreeArtifact')) { + files = project.fileTree('src/main/deploy') + directory = '/home/lvuser/deploy' + } + } + } + } +} + +def deployArtifact = deploy.targets.roborio.artifacts.frcCpp + +// Set this to true to enable desktop support. +def includeDesktopSupport = true + +// Set to true to run simulation in debug mode +wpi.cpp.debugSimulation = false + +// Default enable simgui +wpi.sim.addGui().defaultEnabled = true +// Enable DS but not by default +wpi.sim.addDriverstation() + +model { + components { + frcUserProgram(NativeExecutableSpec) { + // We don't need to build for roborio -- if we do, we need to install + // a roborio toolchain every time we build in CI + // Ideally, we'd be able to set the roborio toolchain as optional, but + // I can't figure out how to set that environment variable from build.gradle + // (see https://github.com/wpilibsuite/native-utils/blob/2917c69fb5094e36d499c465f047dab81c68446c/ToolchainPlugin/src/main/java/edu/wpi/first/toolchain/ToolchainGraphBuildService.java#L71) + // for now, commented out + + // targetPlatform wpi.platforms.roborio + + if (includeDesktopSupport) { + targetPlatform wpi.platforms.desktop + } + + sources.cpp { + source { + srcDir 'src/main/cpp' + include '**/*.cpp', '**/*.cc' + } + exportedHeaders { + srcDir 'src/main/include' + } + } + + // Set deploy task to deploy this component + deployArtifact.component = it + + // Enable run tasks for this component + wpi.cpp.enableExternalTasks(it) + + // Enable simulation for this component + wpi.sim.enable(it) + // Defining my dependencies. In this case, WPILib (+ friends), and vendor libraries. + wpi.cpp.vendor.cpp(it) + wpi.cpp.deps.wpilib(it) + } + } + testSuites { + frcUserProgramTest(GoogleTestTestSuiteSpec) { + testing $.components.frcUserProgram + + sources.cpp { + source { + srcDir 'src/test/cpp' + include '**/*.cpp' + } + } + + // Enable run tasks for this component + wpi.cpp.enableExternalTasks(it) + + wpi.cpp.vendor.cpp(it) + wpi.cpp.deps.wpilib(it) + wpi.cpp.deps.googleTest(it) + } + } +} diff --git a/photonlib-cpp-examples/aimandrange/settings.gradle b/photonlib-cpp-examples/aimandrange/settings.gradle index bf0580f4a5..e387260182 100644 --- a/photonlib-cpp-examples/aimandrange/settings.gradle +++ b/photonlib-cpp-examples/aimandrange/settings.gradle @@ -1,30 +1,30 @@ -import org.gradle.internal.os.OperatingSystem - -rootProject.name = 'aimandrange' - -pluginManagement { - repositories { - mavenLocal() - jcenter() - gradlePluginPortal() - String frcYear = '2023' - File frcHome - if (OperatingSystem.current().isWindows()) { - String publicFolder = System.getenv('PUBLIC') - if (publicFolder == null) { - publicFolder = "C:\\Users\\Public" - } - def homeRoot = new File(publicFolder, "wpilib") - frcHome = new File(homeRoot, frcYear) - } else { - def userFolder = System.getProperty("user.home") - def homeRoot = new File(userFolder, "wpilib") - frcHome = new File(homeRoot, frcYear) - } - def frcHomeMaven = new File(frcHome, 'maven') - maven { - name 'frcHome' - url frcHomeMaven - } - } -} +import org.gradle.internal.os.OperatingSystem + +rootProject.name = 'aimandrange' + +pluginManagement { + repositories { + mavenLocal() + jcenter() + gradlePluginPortal() + String frcYear = '2023' + File frcHome + if (OperatingSystem.current().isWindows()) { + String publicFolder = System.getenv('PUBLIC') + if (publicFolder == null) { + publicFolder = "C:\\Users\\Public" + } + def homeRoot = new File(publicFolder, "wpilib") + frcHome = new File(homeRoot, frcYear) + } else { + def userFolder = System.getProperty("user.home") + def homeRoot = new File(userFolder, "wpilib") + frcHome = new File(homeRoot, frcYear) + } + def frcHomeMaven = new File(frcHome, 'maven') + maven { + name 'frcHome' + url frcHomeMaven + } + } +} diff --git a/photonlib-cpp-examples/aimattarget/.wpilib/wpilib_preferences.json b/photonlib-cpp-examples/aimattarget/.wpilib/wpilib_preferences.json index cf67d2d09f..a6e62683de 100644 --- a/photonlib-cpp-examples/aimattarget/.wpilib/wpilib_preferences.json +++ b/photonlib-cpp-examples/aimattarget/.wpilib/wpilib_preferences.json @@ -1,6 +1,6 @@ -{ - "enableCppIntellisense": true, - "currentLanguage": "cpp", - "projectYear": "2023", - "teamNumber": 5 -} +{ + "enableCppIntellisense": true, + "currentLanguage": "cpp", + "projectYear": "2023", + "teamNumber": 5 +} diff --git a/photonlib-cpp-examples/aimattarget/WPILib-License.md b/photonlib-cpp-examples/aimattarget/WPILib-License.md index ba35a02c5c..3d5a824cad 100644 --- a/photonlib-cpp-examples/aimattarget/WPILib-License.md +++ b/photonlib-cpp-examples/aimattarget/WPILib-License.md @@ -1,24 +1,24 @@ -Copyright (c) 2009-2021 FIRST and other WPILib contributors -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of FIRST, WPILib, nor the names of other WPILib - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY FIRST AND OTHER WPILIB CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY NONINFRINGEMENT AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL FIRST OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2009-2021 FIRST and other WPILib contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of FIRST, WPILib, nor the names of other WPILib + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY FIRST AND OTHER WPILIB CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY NONINFRINGEMENT AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL FIRST OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/photonlib-cpp-examples/aimattarget/build.gradle b/photonlib-cpp-examples/aimattarget/build.gradle index 05cfb9f914..67e4323b4c 100644 --- a/photonlib-cpp-examples/aimattarget/build.gradle +++ b/photonlib-cpp-examples/aimattarget/build.gradle @@ -1,116 +1,116 @@ -plugins { - id "cpp" - id "google-test-test-suite" - id "edu.wpi.first.GradleRIO" version "2023.4.2" - - id "com.dorongold.task-tree" version "2.1.0" -} - -repositories { - mavenLocal() - jcenter() -} - -apply from: "${rootDir}/../shared/examples_common.gradle" - -// Define my targets (RoboRIO) and artifacts (deployable files) -// This is added by GradleRIO's backing project DeployUtils. -deploy { - targets { - roborio(getTargetTypeClass('RoboRIO')) { - // Team number is loaded either from the .wpilib/wpilib_preferences.json - // or from command line. If not found an exception will be thrown. - // You can use getTeamOrDefault(team) instead of getTeamNumber if you - // want to store a team number in this file. - team = project.frc.getTeamOrDefault(5940) - debug = project.frc.getDebugOrDefault(false) - - artifacts { - // First part is artifact name, 2nd is artifact type - // getTargetTypeClass is a shortcut to get the class type using a string - - frcCpp(getArtifactTypeClass('FRCNativeArtifact')) { - } - - // Static files artifact - frcStaticFileDeploy(getArtifactTypeClass('FileTreeArtifact')) { - files = project.fileTree('src/main/deploy') - directory = '/home/lvuser/deploy' - } - } - } - } -} - -def deployArtifact = deploy.targets.roborio.artifacts.frcCpp - -// Set this to true to enable desktop support. -def includeDesktopSupport = true - -// Set to true to run simulation in debug mode -wpi.cpp.debugSimulation = false - -// Default enable simgui -wpi.sim.addGui().defaultEnabled = true -// Enable DS but not by default -wpi.sim.addDriverstation() - -model { - components { - frcUserProgram(NativeExecutableSpec) { - // We don't need to build for roborio -- if we do, we need to install - // a roborio toolchain every time we build in CI - // Ideally, we'd be able to set the roborio toolchain as optional, but - // I can't figure out how to set that environment variable from build.gradle - // (see https://github.com/wpilibsuite/native-utils/blob/2917c69fb5094e36d499c465f047dab81c68446c/ToolchainPlugin/src/main/java/edu/wpi/first/toolchain/ToolchainGraphBuildService.java#L71) - // for now, commented out - - // targetPlatform wpi.platforms.roborio - - if (includeDesktopSupport) { - targetPlatform wpi.platforms.desktop - } - - sources.cpp { - source { - srcDir 'src/main/cpp' - include '**/*.cpp', '**/*.cc' - } - exportedHeaders { - srcDir 'src/main/include' - } - } - - // Set deploy task to deploy this component - deployArtifact.component = it - - // Enable run tasks for this component - wpi.cpp.enableExternalTasks(it) - - // Enable simulation for this component - wpi.sim.enable(it) - // Defining my dependencies. In this case, WPILib (+ friends), and vendor libraries. - wpi.cpp.vendor.cpp(it) - wpi.cpp.deps.wpilib(it) - } - } - testSuites { - frcUserProgramTest(GoogleTestTestSuiteSpec) { - testing $.components.frcUserProgram - - sources.cpp { - source { - srcDir 'src/test/cpp' - include '**/*.cpp' - } - } - - // Enable run tasks for this component - wpi.cpp.enableExternalTasks(it) - - wpi.cpp.vendor.cpp(it) - wpi.cpp.deps.wpilib(it) - wpi.cpp.deps.googleTest(it) - } - } -} +plugins { + id "cpp" + id "google-test-test-suite" + id "edu.wpi.first.GradleRIO" version "2023.4.2" + + id "com.dorongold.task-tree" version "2.1.0" +} + +repositories { + mavenLocal() + jcenter() +} + +apply from: "${rootDir}/../shared/examples_common.gradle" + +// Define my targets (RoboRIO) and artifacts (deployable files) +// This is added by GradleRIO's backing project DeployUtils. +deploy { + targets { + roborio(getTargetTypeClass('RoboRIO')) { + // Team number is loaded either from the .wpilib/wpilib_preferences.json + // or from command line. If not found an exception will be thrown. + // You can use getTeamOrDefault(team) instead of getTeamNumber if you + // want to store a team number in this file. + team = project.frc.getTeamOrDefault(5940) + debug = project.frc.getDebugOrDefault(false) + + artifacts { + // First part is artifact name, 2nd is artifact type + // getTargetTypeClass is a shortcut to get the class type using a string + + frcCpp(getArtifactTypeClass('FRCNativeArtifact')) { + } + + // Static files artifact + frcStaticFileDeploy(getArtifactTypeClass('FileTreeArtifact')) { + files = project.fileTree('src/main/deploy') + directory = '/home/lvuser/deploy' + } + } + } + } +} + +def deployArtifact = deploy.targets.roborio.artifacts.frcCpp + +// Set this to true to enable desktop support. +def includeDesktopSupport = true + +// Set to true to run simulation in debug mode +wpi.cpp.debugSimulation = false + +// Default enable simgui +wpi.sim.addGui().defaultEnabled = true +// Enable DS but not by default +wpi.sim.addDriverstation() + +model { + components { + frcUserProgram(NativeExecutableSpec) { + // We don't need to build for roborio -- if we do, we need to install + // a roborio toolchain every time we build in CI + // Ideally, we'd be able to set the roborio toolchain as optional, but + // I can't figure out how to set that environment variable from build.gradle + // (see https://github.com/wpilibsuite/native-utils/blob/2917c69fb5094e36d499c465f047dab81c68446c/ToolchainPlugin/src/main/java/edu/wpi/first/toolchain/ToolchainGraphBuildService.java#L71) + // for now, commented out + + // targetPlatform wpi.platforms.roborio + + if (includeDesktopSupport) { + targetPlatform wpi.platforms.desktop + } + + sources.cpp { + source { + srcDir 'src/main/cpp' + include '**/*.cpp', '**/*.cc' + } + exportedHeaders { + srcDir 'src/main/include' + } + } + + // Set deploy task to deploy this component + deployArtifact.component = it + + // Enable run tasks for this component + wpi.cpp.enableExternalTasks(it) + + // Enable simulation for this component + wpi.sim.enable(it) + // Defining my dependencies. In this case, WPILib (+ friends), and vendor libraries. + wpi.cpp.vendor.cpp(it) + wpi.cpp.deps.wpilib(it) + } + } + testSuites { + frcUserProgramTest(GoogleTestTestSuiteSpec) { + testing $.components.frcUserProgram + + sources.cpp { + source { + srcDir 'src/test/cpp' + include '**/*.cpp' + } + } + + // Enable run tasks for this component + wpi.cpp.enableExternalTasks(it) + + wpi.cpp.vendor.cpp(it) + wpi.cpp.deps.wpilib(it) + wpi.cpp.deps.googleTest(it) + } + } +} diff --git a/photonlib-cpp-examples/aimattarget/settings.gradle b/photonlib-cpp-examples/aimattarget/settings.gradle index adeb0f45c4..2f39cd03ad 100644 --- a/photonlib-cpp-examples/aimattarget/settings.gradle +++ b/photonlib-cpp-examples/aimattarget/settings.gradle @@ -1,30 +1,30 @@ -import org.gradle.internal.os.OperatingSystem - -rootProject.name = 'aimattarget' - -pluginManagement { - repositories { - mavenLocal() - jcenter() - gradlePluginPortal() - String frcYear = '2023' - File frcHome - if (OperatingSystem.current().isWindows()) { - String publicFolder = System.getenv('PUBLIC') - if (publicFolder == null) { - publicFolder = "C:\\Users\\Public" - } - def homeRoot = new File(publicFolder, "wpilib") - frcHome = new File(homeRoot, frcYear) - } else { - def userFolder = System.getProperty("user.home") - def homeRoot = new File(userFolder, "wpilib") - frcHome = new File(homeRoot, frcYear) - } - def frcHomeMaven = new File(frcHome, 'maven') - maven { - name 'frcHome' - url frcHomeMaven - } - } -} +import org.gradle.internal.os.OperatingSystem + +rootProject.name = 'aimattarget' + +pluginManagement { + repositories { + mavenLocal() + jcenter() + gradlePluginPortal() + String frcYear = '2023' + File frcHome + if (OperatingSystem.current().isWindows()) { + String publicFolder = System.getenv('PUBLIC') + if (publicFolder == null) { + publicFolder = "C:\\Users\\Public" + } + def homeRoot = new File(publicFolder, "wpilib") + frcHome = new File(homeRoot, frcYear) + } else { + def userFolder = System.getProperty("user.home") + def homeRoot = new File(userFolder, "wpilib") + frcHome = new File(homeRoot, frcYear) + } + def frcHomeMaven = new File(frcHome, 'maven') + maven { + name 'frcHome' + url frcHomeMaven + } + } +} diff --git a/photonlib-cpp-examples/apriltagExample/.wpilib/wpilib_preferences.json b/photonlib-cpp-examples/apriltagExample/.wpilib/wpilib_preferences.json index cf67d2d09f..a6e62683de 100644 --- a/photonlib-cpp-examples/apriltagExample/.wpilib/wpilib_preferences.json +++ b/photonlib-cpp-examples/apriltagExample/.wpilib/wpilib_preferences.json @@ -1,6 +1,6 @@ -{ - "enableCppIntellisense": true, - "currentLanguage": "cpp", - "projectYear": "2023", - "teamNumber": 5 -} +{ + "enableCppIntellisense": true, + "currentLanguage": "cpp", + "projectYear": "2023", + "teamNumber": 5 +} diff --git a/photonlib-cpp-examples/apriltagExample/WPILib-License.md b/photonlib-cpp-examples/apriltagExample/WPILib-License.md index ba35a02c5c..3d5a824cad 100644 --- a/photonlib-cpp-examples/apriltagExample/WPILib-License.md +++ b/photonlib-cpp-examples/apriltagExample/WPILib-License.md @@ -1,24 +1,24 @@ -Copyright (c) 2009-2021 FIRST and other WPILib contributors -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of FIRST, WPILib, nor the names of other WPILib - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY FIRST AND OTHER WPILIB CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY NONINFRINGEMENT AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL FIRST OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2009-2021 FIRST and other WPILib contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of FIRST, WPILib, nor the names of other WPILib + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY FIRST AND OTHER WPILIB CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY NONINFRINGEMENT AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL FIRST OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/photonlib-cpp-examples/apriltagExample/build.gradle b/photonlib-cpp-examples/apriltagExample/build.gradle index 05cfb9f914..67e4323b4c 100644 --- a/photonlib-cpp-examples/apriltagExample/build.gradle +++ b/photonlib-cpp-examples/apriltagExample/build.gradle @@ -1,116 +1,116 @@ -plugins { - id "cpp" - id "google-test-test-suite" - id "edu.wpi.first.GradleRIO" version "2023.4.2" - - id "com.dorongold.task-tree" version "2.1.0" -} - -repositories { - mavenLocal() - jcenter() -} - -apply from: "${rootDir}/../shared/examples_common.gradle" - -// Define my targets (RoboRIO) and artifacts (deployable files) -// This is added by GradleRIO's backing project DeployUtils. -deploy { - targets { - roborio(getTargetTypeClass('RoboRIO')) { - // Team number is loaded either from the .wpilib/wpilib_preferences.json - // or from command line. If not found an exception will be thrown. - // You can use getTeamOrDefault(team) instead of getTeamNumber if you - // want to store a team number in this file. - team = project.frc.getTeamOrDefault(5940) - debug = project.frc.getDebugOrDefault(false) - - artifacts { - // First part is artifact name, 2nd is artifact type - // getTargetTypeClass is a shortcut to get the class type using a string - - frcCpp(getArtifactTypeClass('FRCNativeArtifact')) { - } - - // Static files artifact - frcStaticFileDeploy(getArtifactTypeClass('FileTreeArtifact')) { - files = project.fileTree('src/main/deploy') - directory = '/home/lvuser/deploy' - } - } - } - } -} - -def deployArtifact = deploy.targets.roborio.artifacts.frcCpp - -// Set this to true to enable desktop support. -def includeDesktopSupport = true - -// Set to true to run simulation in debug mode -wpi.cpp.debugSimulation = false - -// Default enable simgui -wpi.sim.addGui().defaultEnabled = true -// Enable DS but not by default -wpi.sim.addDriverstation() - -model { - components { - frcUserProgram(NativeExecutableSpec) { - // We don't need to build for roborio -- if we do, we need to install - // a roborio toolchain every time we build in CI - // Ideally, we'd be able to set the roborio toolchain as optional, but - // I can't figure out how to set that environment variable from build.gradle - // (see https://github.com/wpilibsuite/native-utils/blob/2917c69fb5094e36d499c465f047dab81c68446c/ToolchainPlugin/src/main/java/edu/wpi/first/toolchain/ToolchainGraphBuildService.java#L71) - // for now, commented out - - // targetPlatform wpi.platforms.roborio - - if (includeDesktopSupport) { - targetPlatform wpi.platforms.desktop - } - - sources.cpp { - source { - srcDir 'src/main/cpp' - include '**/*.cpp', '**/*.cc' - } - exportedHeaders { - srcDir 'src/main/include' - } - } - - // Set deploy task to deploy this component - deployArtifact.component = it - - // Enable run tasks for this component - wpi.cpp.enableExternalTasks(it) - - // Enable simulation for this component - wpi.sim.enable(it) - // Defining my dependencies. In this case, WPILib (+ friends), and vendor libraries. - wpi.cpp.vendor.cpp(it) - wpi.cpp.deps.wpilib(it) - } - } - testSuites { - frcUserProgramTest(GoogleTestTestSuiteSpec) { - testing $.components.frcUserProgram - - sources.cpp { - source { - srcDir 'src/test/cpp' - include '**/*.cpp' - } - } - - // Enable run tasks for this component - wpi.cpp.enableExternalTasks(it) - - wpi.cpp.vendor.cpp(it) - wpi.cpp.deps.wpilib(it) - wpi.cpp.deps.googleTest(it) - } - } -} +plugins { + id "cpp" + id "google-test-test-suite" + id "edu.wpi.first.GradleRIO" version "2023.4.2" + + id "com.dorongold.task-tree" version "2.1.0" +} + +repositories { + mavenLocal() + jcenter() +} + +apply from: "${rootDir}/../shared/examples_common.gradle" + +// Define my targets (RoboRIO) and artifacts (deployable files) +// This is added by GradleRIO's backing project DeployUtils. +deploy { + targets { + roborio(getTargetTypeClass('RoboRIO')) { + // Team number is loaded either from the .wpilib/wpilib_preferences.json + // or from command line. If not found an exception will be thrown. + // You can use getTeamOrDefault(team) instead of getTeamNumber if you + // want to store a team number in this file. + team = project.frc.getTeamOrDefault(5940) + debug = project.frc.getDebugOrDefault(false) + + artifacts { + // First part is artifact name, 2nd is artifact type + // getTargetTypeClass is a shortcut to get the class type using a string + + frcCpp(getArtifactTypeClass('FRCNativeArtifact')) { + } + + // Static files artifact + frcStaticFileDeploy(getArtifactTypeClass('FileTreeArtifact')) { + files = project.fileTree('src/main/deploy') + directory = '/home/lvuser/deploy' + } + } + } + } +} + +def deployArtifact = deploy.targets.roborio.artifacts.frcCpp + +// Set this to true to enable desktop support. +def includeDesktopSupport = true + +// Set to true to run simulation in debug mode +wpi.cpp.debugSimulation = false + +// Default enable simgui +wpi.sim.addGui().defaultEnabled = true +// Enable DS but not by default +wpi.sim.addDriverstation() + +model { + components { + frcUserProgram(NativeExecutableSpec) { + // We don't need to build for roborio -- if we do, we need to install + // a roborio toolchain every time we build in CI + // Ideally, we'd be able to set the roborio toolchain as optional, but + // I can't figure out how to set that environment variable from build.gradle + // (see https://github.com/wpilibsuite/native-utils/blob/2917c69fb5094e36d499c465f047dab81c68446c/ToolchainPlugin/src/main/java/edu/wpi/first/toolchain/ToolchainGraphBuildService.java#L71) + // for now, commented out + + // targetPlatform wpi.platforms.roborio + + if (includeDesktopSupport) { + targetPlatform wpi.platforms.desktop + } + + sources.cpp { + source { + srcDir 'src/main/cpp' + include '**/*.cpp', '**/*.cc' + } + exportedHeaders { + srcDir 'src/main/include' + } + } + + // Set deploy task to deploy this component + deployArtifact.component = it + + // Enable run tasks for this component + wpi.cpp.enableExternalTasks(it) + + // Enable simulation for this component + wpi.sim.enable(it) + // Defining my dependencies. In this case, WPILib (+ friends), and vendor libraries. + wpi.cpp.vendor.cpp(it) + wpi.cpp.deps.wpilib(it) + } + } + testSuites { + frcUserProgramTest(GoogleTestTestSuiteSpec) { + testing $.components.frcUserProgram + + sources.cpp { + source { + srcDir 'src/test/cpp' + include '**/*.cpp' + } + } + + // Enable run tasks for this component + wpi.cpp.enableExternalTasks(it) + + wpi.cpp.vendor.cpp(it) + wpi.cpp.deps.wpilib(it) + wpi.cpp.deps.googleTest(it) + } + } +} diff --git a/photonlib-cpp-examples/apriltagExample/settings.gradle b/photonlib-cpp-examples/apriltagExample/settings.gradle index adeb0f45c4..2f39cd03ad 100644 --- a/photonlib-cpp-examples/apriltagExample/settings.gradle +++ b/photonlib-cpp-examples/apriltagExample/settings.gradle @@ -1,30 +1,30 @@ -import org.gradle.internal.os.OperatingSystem - -rootProject.name = 'aimattarget' - -pluginManagement { - repositories { - mavenLocal() - jcenter() - gradlePluginPortal() - String frcYear = '2023' - File frcHome - if (OperatingSystem.current().isWindows()) { - String publicFolder = System.getenv('PUBLIC') - if (publicFolder == null) { - publicFolder = "C:\\Users\\Public" - } - def homeRoot = new File(publicFolder, "wpilib") - frcHome = new File(homeRoot, frcYear) - } else { - def userFolder = System.getProperty("user.home") - def homeRoot = new File(userFolder, "wpilib") - frcHome = new File(homeRoot, frcYear) - } - def frcHomeMaven = new File(frcHome, 'maven') - maven { - name 'frcHome' - url frcHomeMaven - } - } -} +import org.gradle.internal.os.OperatingSystem + +rootProject.name = 'aimattarget' + +pluginManagement { + repositories { + mavenLocal() + jcenter() + gradlePluginPortal() + String frcYear = '2023' + File frcHome + if (OperatingSystem.current().isWindows()) { + String publicFolder = System.getenv('PUBLIC') + if (publicFolder == null) { + publicFolder = "C:\\Users\\Public" + } + def homeRoot = new File(publicFolder, "wpilib") + frcHome = new File(homeRoot, frcYear) + } else { + def userFolder = System.getProperty("user.home") + def homeRoot = new File(userFolder, "wpilib") + frcHome = new File(homeRoot, frcYear) + } + def frcHomeMaven = new File(frcHome, 'maven') + maven { + name 'frcHome' + url frcHomeMaven + } + } +} diff --git a/photonlib-cpp-examples/getinrange/.wpilib/wpilib_preferences.json b/photonlib-cpp-examples/getinrange/.wpilib/wpilib_preferences.json index cf67d2d09f..a6e62683de 100644 --- a/photonlib-cpp-examples/getinrange/.wpilib/wpilib_preferences.json +++ b/photonlib-cpp-examples/getinrange/.wpilib/wpilib_preferences.json @@ -1,6 +1,6 @@ -{ - "enableCppIntellisense": true, - "currentLanguage": "cpp", - "projectYear": "2023", - "teamNumber": 5 -} +{ + "enableCppIntellisense": true, + "currentLanguage": "cpp", + "projectYear": "2023", + "teamNumber": 5 +} diff --git a/photonlib-cpp-examples/getinrange/WPILib-License.md b/photonlib-cpp-examples/getinrange/WPILib-License.md index ba35a02c5c..3d5a824cad 100644 --- a/photonlib-cpp-examples/getinrange/WPILib-License.md +++ b/photonlib-cpp-examples/getinrange/WPILib-License.md @@ -1,24 +1,24 @@ -Copyright (c) 2009-2021 FIRST and other WPILib contributors -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of FIRST, WPILib, nor the names of other WPILib - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY FIRST AND OTHER WPILIB CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY NONINFRINGEMENT AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL FIRST OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Copyright (c) 2009-2021 FIRST and other WPILib contributors +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of FIRST, WPILib, nor the names of other WPILib + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY FIRST AND OTHER WPILIB CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY NONINFRINGEMENT AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL FIRST OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/photonlib-cpp-examples/getinrange/build.gradle b/photonlib-cpp-examples/getinrange/build.gradle index 05cfb9f914..67e4323b4c 100644 --- a/photonlib-cpp-examples/getinrange/build.gradle +++ b/photonlib-cpp-examples/getinrange/build.gradle @@ -1,116 +1,116 @@ -plugins { - id "cpp" - id "google-test-test-suite" - id "edu.wpi.first.GradleRIO" version "2023.4.2" - - id "com.dorongold.task-tree" version "2.1.0" -} - -repositories { - mavenLocal() - jcenter() -} - -apply from: "${rootDir}/../shared/examples_common.gradle" - -// Define my targets (RoboRIO) and artifacts (deployable files) -// This is added by GradleRIO's backing project DeployUtils. -deploy { - targets { - roborio(getTargetTypeClass('RoboRIO')) { - // Team number is loaded either from the .wpilib/wpilib_preferences.json - // or from command line. If not found an exception will be thrown. - // You can use getTeamOrDefault(team) instead of getTeamNumber if you - // want to store a team number in this file. - team = project.frc.getTeamOrDefault(5940) - debug = project.frc.getDebugOrDefault(false) - - artifacts { - // First part is artifact name, 2nd is artifact type - // getTargetTypeClass is a shortcut to get the class type using a string - - frcCpp(getArtifactTypeClass('FRCNativeArtifact')) { - } - - // Static files artifact - frcStaticFileDeploy(getArtifactTypeClass('FileTreeArtifact')) { - files = project.fileTree('src/main/deploy') - directory = '/home/lvuser/deploy' - } - } - } - } -} - -def deployArtifact = deploy.targets.roborio.artifacts.frcCpp - -// Set this to true to enable desktop support. -def includeDesktopSupport = true - -// Set to true to run simulation in debug mode -wpi.cpp.debugSimulation = false - -// Default enable simgui -wpi.sim.addGui().defaultEnabled = true -// Enable DS but not by default -wpi.sim.addDriverstation() - -model { - components { - frcUserProgram(NativeExecutableSpec) { - // We don't need to build for roborio -- if we do, we need to install - // a roborio toolchain every time we build in CI - // Ideally, we'd be able to set the roborio toolchain as optional, but - // I can't figure out how to set that environment variable from build.gradle - // (see https://github.com/wpilibsuite/native-utils/blob/2917c69fb5094e36d499c465f047dab81c68446c/ToolchainPlugin/src/main/java/edu/wpi/first/toolchain/ToolchainGraphBuildService.java#L71) - // for now, commented out - - // targetPlatform wpi.platforms.roborio - - if (includeDesktopSupport) { - targetPlatform wpi.platforms.desktop - } - - sources.cpp { - source { - srcDir 'src/main/cpp' - include '**/*.cpp', '**/*.cc' - } - exportedHeaders { - srcDir 'src/main/include' - } - } - - // Set deploy task to deploy this component - deployArtifact.component = it - - // Enable run tasks for this component - wpi.cpp.enableExternalTasks(it) - - // Enable simulation for this component - wpi.sim.enable(it) - // Defining my dependencies. In this case, WPILib (+ friends), and vendor libraries. - wpi.cpp.vendor.cpp(it) - wpi.cpp.deps.wpilib(it) - } - } - testSuites { - frcUserProgramTest(GoogleTestTestSuiteSpec) { - testing $.components.frcUserProgram - - sources.cpp { - source { - srcDir 'src/test/cpp' - include '**/*.cpp' - } - } - - // Enable run tasks for this component - wpi.cpp.enableExternalTasks(it) - - wpi.cpp.vendor.cpp(it) - wpi.cpp.deps.wpilib(it) - wpi.cpp.deps.googleTest(it) - } - } -} +plugins { + id "cpp" + id "google-test-test-suite" + id "edu.wpi.first.GradleRIO" version "2023.4.2" + + id "com.dorongold.task-tree" version "2.1.0" +} + +repositories { + mavenLocal() + jcenter() +} + +apply from: "${rootDir}/../shared/examples_common.gradle" + +// Define my targets (RoboRIO) and artifacts (deployable files) +// This is added by GradleRIO's backing project DeployUtils. +deploy { + targets { + roborio(getTargetTypeClass('RoboRIO')) { + // Team number is loaded either from the .wpilib/wpilib_preferences.json + // or from command line. If not found an exception will be thrown. + // You can use getTeamOrDefault(team) instead of getTeamNumber if you + // want to store a team number in this file. + team = project.frc.getTeamOrDefault(5940) + debug = project.frc.getDebugOrDefault(false) + + artifacts { + // First part is artifact name, 2nd is artifact type + // getTargetTypeClass is a shortcut to get the class type using a string + + frcCpp(getArtifactTypeClass('FRCNativeArtifact')) { + } + + // Static files artifact + frcStaticFileDeploy(getArtifactTypeClass('FileTreeArtifact')) { + files = project.fileTree('src/main/deploy') + directory = '/home/lvuser/deploy' + } + } + } + } +} + +def deployArtifact = deploy.targets.roborio.artifacts.frcCpp + +// Set this to true to enable desktop support. +def includeDesktopSupport = true + +// Set to true to run simulation in debug mode +wpi.cpp.debugSimulation = false + +// Default enable simgui +wpi.sim.addGui().defaultEnabled = true +// Enable DS but not by default +wpi.sim.addDriverstation() + +model { + components { + frcUserProgram(NativeExecutableSpec) { + // We don't need to build for roborio -- if we do, we need to install + // a roborio toolchain every time we build in CI + // Ideally, we'd be able to set the roborio toolchain as optional, but + // I can't figure out how to set that environment variable from build.gradle + // (see https://github.com/wpilibsuite/native-utils/blob/2917c69fb5094e36d499c465f047dab81c68446c/ToolchainPlugin/src/main/java/edu/wpi/first/toolchain/ToolchainGraphBuildService.java#L71) + // for now, commented out + + // targetPlatform wpi.platforms.roborio + + if (includeDesktopSupport) { + targetPlatform wpi.platforms.desktop + } + + sources.cpp { + source { + srcDir 'src/main/cpp' + include '**/*.cpp', '**/*.cc' + } + exportedHeaders { + srcDir 'src/main/include' + } + } + + // Set deploy task to deploy this component + deployArtifact.component = it + + // Enable run tasks for this component + wpi.cpp.enableExternalTasks(it) + + // Enable simulation for this component + wpi.sim.enable(it) + // Defining my dependencies. In this case, WPILib (+ friends), and vendor libraries. + wpi.cpp.vendor.cpp(it) + wpi.cpp.deps.wpilib(it) + } + } + testSuites { + frcUserProgramTest(GoogleTestTestSuiteSpec) { + testing $.components.frcUserProgram + + sources.cpp { + source { + srcDir 'src/test/cpp' + include '**/*.cpp' + } + } + + // Enable run tasks for this component + wpi.cpp.enableExternalTasks(it) + + wpi.cpp.vendor.cpp(it) + wpi.cpp.deps.wpilib(it) + wpi.cpp.deps.googleTest(it) + } + } +} diff --git a/photonlib-cpp-examples/getinrange/settings.gradle b/photonlib-cpp-examples/getinrange/settings.gradle index bf1225d1d3..8301f6020c 100644 --- a/photonlib-cpp-examples/getinrange/settings.gradle +++ b/photonlib-cpp-examples/getinrange/settings.gradle @@ -1,30 +1,30 @@ -import org.gradle.internal.os.OperatingSystem - -rootProject.name = 'getinrange' - -pluginManagement { - repositories { - mavenLocal() - jcenter() - gradlePluginPortal() - String frcYear = '2023' - File frcHome - if (OperatingSystem.current().isWindows()) { - String publicFolder = System.getenv('PUBLIC') - if (publicFolder == null) { - publicFolder = "C:\\Users\\Public" - } - def homeRoot = new File(publicFolder, "wpilib") - frcHome = new File(homeRoot, frcYear) - } else { - def userFolder = System.getProperty("user.home") - def homeRoot = new File(userFolder, "wpilib") - frcHome = new File(homeRoot, frcYear) - } - def frcHomeMaven = new File(frcHome, 'maven') - maven { - name 'frcHome' - url frcHomeMaven - } - } -} +import org.gradle.internal.os.OperatingSystem + +rootProject.name = 'getinrange' + +pluginManagement { + repositories { + mavenLocal() + jcenter() + gradlePluginPortal() + String frcYear = '2023' + File frcHome + if (OperatingSystem.current().isWindows()) { + String publicFolder = System.getenv('PUBLIC') + if (publicFolder == null) { + publicFolder = "C:\\Users\\Public" + } + def homeRoot = new File(publicFolder, "wpilib") + frcHome = new File(homeRoot, frcYear) + } else { + def userFolder = System.getProperty("user.home") + def homeRoot = new File(userFolder, "wpilib") + frcHome = new File(homeRoot, frcYear) + } + def frcHomeMaven = new File(frcHome, 'maven') + maven { + name 'frcHome' + url frcHomeMaven + } + } +} From 715ef62c85b631b68f446c4bc37c28bfa57c5ec6 Mon Sep 17 00:00:00 2001 From: Sriman Achanta <68172138+srimanachanta@users.noreply.github.com> Date: Sun, 25 Jun 2023 21:07:27 -0400 Subject: [PATCH 3/3] Update backend to provide more useful info to frontend (#866) --- photon-client/src/App.vue | 2 +- .../pipeline/CameraAndPipelineSelect.vue | 56 +- photon-client/src/views/CamerasView.vue | 99 +-- photon-client/src/views/LogsView.vue | 2 +- .../src/views/PipelineViews/OutputTab.vue | 2 +- .../src/views/SettingsViews/DeviceControl.vue | 292 ++++++--- .../src/views/SettingsViews/Networking.vue | 47 +- .../src/views/SettingsViews/Stats.vue | 19 - .../common/configuration/ConfigManager.java | 18 +- .../common/configuration/ConfigProvider.java | 8 +- .../configuration/LegacyConfigProvider.java | 21 +- .../common/configuration/NetworkConfig.java | 9 - .../configuration/SqlConfigProvider.java | 36 +- .../common/util/file/FileUtils.java | 35 +- .../src/main/java/org/photonvision/Main.java | 2 +- .../photonvision/server/RequestHandler.java | 611 +++++++++++------- .../java/org/photonvision/server/Server.java | 64 +- 17 files changed, 838 insertions(+), 485 deletions(-) diff --git a/photon-client/src/App.vue b/photon-client/src/App.vue index 164253f09f..053635c21d 100644 --- a/photon-client/src/App.vue +++ b/photon-client/src/App.vue @@ -345,7 +345,7 @@ export default { this.previouslySelectedIndices = null; }, switchToSettingsTab() { - this.axios.post('http://' + this.$address + '/api/sendMetrics', {}) + this.axios.post('http://' + this.$address + '/api/utils/publishMetrics') } } }; diff --git a/photon-client/src/components/pipeline/CameraAndPipelineSelect.vue b/photon-client/src/components/pipeline/CameraAndPipelineSelect.vue index 7fd3a1ccd9..66c7404ec8 100644 --- a/photon-client/src/components/pipeline/CameraAndPipelineSelect.vue +++ b/photon-client/src/components/pipeline/CameraAndPipelineSelect.vue @@ -1,5 +1,13 @@ @@ -431,17 +432,17 @@ export default { }, data() { return { - snack: false, + calibrationDialog: false, calibrationInProgress: false, calibrationFailed: false, filteredVideomodeIndex: 0, settingsValid: true, unfilteredStreamDivisors: [1, 2, 4], - uploadSnackData: { + snackbar: { color: "success", text: "", }, - uploadSnack: false, + snack: false, } }, computed: { @@ -617,43 +618,34 @@ export default { }; this.axios - .post("http://" + this.$address + "/api/calibration/import", data, { + .post("http://" + this.$address + "/api/calibration/importFromCalibDB", data, { headers: { "Content-Type": "text/plain" }, }) - .then(() => { - this.uploadSnackData = { - color: "success", - text: - "Calibration imported successfully!", - }; - this.uploadSnack = true; + .then((response) => { + this.snackbar = { + color: response.status === 200 ? "success" : "error", + text: response.data.text || response.data + } + this.snack = true; }) .catch((err) => { - if (err.response) { - this.uploadSnackData = { + if (err.request) { + this.snackbar = { color: "error", - text: - "Error while uploading calibration file! Could not process provided file.", - }; - } else if (err.request) { - this.uploadSnackData = { - color: "error", - text: - "Error while uploading calibration file! No respond to upload attempt.", + text: "Error while uploading calibration file! The backend didn't respond to the upload attempt.", }; } else { - this.uploadSnackData = { + this.snackbar = { color: "error", text: "Error while uploading calibration file!", }; } - this.uploadSnack = true; + this.snack = true; }); - }) }, closeDialog() { - this.snack = false; + this.calibrationDialog = false; this.calibrationInProgress = false; this.calibrationFailed = false; }, @@ -747,15 +739,33 @@ export default { doc.save(`calibrationTarget-${config.type}.pdf`) }, sendCameraSettings() { - this.axios.post("http://" + this.$address + "/api/settings/camera", { - "settings": this.cameraSettings, - "index": this.$store.state.currentCameraIndex - }).then(response => { - if (response.status === 200) { - this.$store.state.saveBar = true; + this.axios.post("http://" + this.$address + "/api/settings/camera", {"settings": this.cameraSettings, "index": this.$store.state.currentCameraIndex}) + .then(response => { + this.snackbar = { + color: "success", + text: response.data.text || response.data + } + this.snack = true; + }) + .catch(error => { + if(error.response) { + this.snackbar = { + color: "error", + text: error.response.data.text || error.response.data } - } - ) + } else if(error.request) { + this.snackbar = { + color: "error", + text: "Error while trying to process the request! The backend didn't respond.", + }; + } else { + this.snackbar = { + color: "error", + text: "An error occurred while trying to process the request.", + }; + } + this.snack = true; + }) }, isCalibrated(resolution) { return this.$store.getters.currentCameraSettings.calibrations @@ -782,16 +792,13 @@ export default { sendCalibrationFinish() { console.log("finishing calibration for index " + this.$store.getters.currentCameraIndex); - this.snack = true; + this.calibrationDialog = true; this.calibrationInProgress = true; - this.axios.post("http://" + this.$address + "/api/settings/endCalibration", {idx: this.$store.getters.currentCameraIndex}) - .then((response) => { - if (response.status === 200) { - this.calibrationInProgress = false; - } else { - this.calibrationFailed = true; - } + this.axios.post("http://" + this.$address + "/api/calibration/end", {index: this.$store.getters.currentCameraIndex}) + .then(() => { + // End calibration will always return a 200 code on success + this.calibrationInProgress = false; } ).catch(() => { this.calibrationFailed = true; diff --git a/photon-client/src/views/LogsView.vue b/photon-client/src/views/LogsView.vue index fbb9c86d53..ca05a334b2 100644 --- a/photon-client/src/views/LogsView.vue +++ b/photon-client/src/views/LogsView.vue @@ -23,7 +23,7 @@ diff --git a/photon-client/src/views/PipelineViews/OutputTab.vue b/photon-client/src/views/PipelineViews/OutputTab.vue index 66906ccc2b..7866b832b9 100644 --- a/photon-client/src/views/PipelineViews/OutputTab.vue +++ b/photon-client/src/views/PipelineViews/OutputTab.vue @@ -43,7 +43,7 @@ /> diff --git a/photon-client/src/views/SettingsViews/DeviceControl.vue b/photon-client/src/views/SettingsViews/DeviceControl.vue index ef6c772ee8..ea4f7054d8 100644 --- a/photon-client/src/views/SettingsViews/DeviceControl.vue +++ b/photon-client/src/views/SettingsViews/DeviceControl.vue @@ -54,7 +54,7 @@ > mdi-import @@ -62,7 +62,6 @@ Import Settings - - - {{ snackbar.text }} - - - + + Import Settings + + Upload and apply previously saved or exported PhotonVision settings to this device + + + + + + + + + + mdi-import + + Import Settings + + + + + + @@ -156,18 +196,28 @@ +